From 31b3d07dcfc2381673aeb5da849f83eb82b58b68 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 8 Jun 2023 18:07:35 +0300 Subject: [PATCH 001/179] properly set version Signed-off-by: Gabriel Adrian Samfira --- scripts/build-static.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/build-static.sh b/scripts/build-static.sh index 70cc4114..debcd5e4 100755 --- a/scripts/build-static.sh +++ b/scripts/build-static.sh @@ -11,11 +11,11 @@ USER_ID=${USER_ID:-$UID} USER_GROUP=${USER_GROUP:-$(id -g)} cd $GARM_SOURCE/cmd/garm -go build -mod vendor -o $BIN_DIR/garm -tags osusergo,netgo,sqlite_omit_load_extension -ldflags "-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --always --dirty)" . +go build -mod vendor -o $BIN_DIR/garm -tags osusergo,netgo,sqlite_omit_load_extension -ldflags "-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" . # GOOS=windows CC=x86_64-w64-mingw32-cc go build -mod vendor -o $BIN_DIR/garm.exe -tags osusergo,netgo,sqlite_omit_load_extension -ldflags "-s -w -X main.Version=$(git describe --always --dirty)" . cd $GARM_SOURCE/cmd/garm-cli -go build -mod vendor -o $BIN_DIR/garm-cli -tags osusergo,netgo -ldflags "-linkmode external -extldflags '-static' -s -w -X garm/cmd/garm-cli/cmd.Version=$(git describe --always --dirty)" . +go build -mod vendor -o $BIN_DIR/garm-cli -tags osusergo,netgo -ldflags "-linkmode external -extldflags '-static' -s -w -X garm/cmd/garm-cli/cmd.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" . # GOOS=windows CGO_ENABLED=0 go build -mod vendor -o $BIN_DIR/garm-cli.exe -ldflags "-s -w -X garm/cmd/garm-cli/cmd.Version=$(git describe --always --dirty)" . chown $USER_ID:$USER_GROUP -R "$BIN_DIR" From 79b9a1583c77b35e2d31dacb670ec2ce5df902b2 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 4 Apr 2025 20:44:57 +0000 Subject: [PATCH 002/179] Add scaleset client This change moves the github client to a subpackage in utils and adds the scaleset github client code. Signed-off-by: Gabriel Adrian Samfira --- database/watcher/watcher.go | 6 +- runner/pool/pool.go | 5 +- runner/pool/watcher.go | 4 +- util/github/client.go | 471 ++++++++++++++++++++++ util/github/scalesets/client.go | 95 +++++ util/github/scalesets/jobs.go | 88 ++++ util/github/scalesets/message_sessions.go | 265 ++++++++++++ util/github/scalesets/runners.go | 129 ++++++ util/github/scalesets/scalesets.go | 204 ++++++++++ util/github/scalesets/token.go | 105 +++++ util/github/scalesets/util.go | 54 +++ util/logging.go | 2 +- util/util.go | 436 +------------------- 13 files changed, 1432 insertions(+), 432 deletions(-) create mode 100644 util/github/client.go create mode 100644 util/github/scalesets/client.go create mode 100644 util/github/scalesets/jobs.go create mode 100644 util/github/scalesets/message_sessions.go create mode 100644 util/github/scalesets/runners.go create mode 100644 util/github/scalesets/scalesets.go create mode 100644 util/github/scalesets/token.go create mode 100644 util/github/scalesets/util.go diff --git a/database/watcher/watcher.go b/database/watcher/watcher.go index ec81d5bd..2ef1aeee 100644 --- a/database/watcher/watcher.go +++ b/database/watcher/watcher.go @@ -17,7 +17,7 @@ func InitWatcher(ctx context.Context) { if databaseWatcher != nil { return } - ctx = garmUtil.WithContext(ctx, slog.Any("watcher", "database")) + ctx = garmUtil.WithSlogContext(ctx, slog.Any("watcher", "database")) w := &watcher{ producers: make(map[string]*producer), consumers: make(map[string]*consumer), @@ -33,7 +33,7 @@ func RegisterProducer(ctx context.Context, id string) (common.Producer, error) { if databaseWatcher == nil { return nil, common.ErrWatcherNotInitialized } - ctx = garmUtil.WithContext(ctx, slog.Any("producer_id", id)) + ctx = garmUtil.WithSlogContext(ctx, slog.Any("producer_id", id)) return databaseWatcher.RegisterProducer(ctx, id) } @@ -41,7 +41,7 @@ func RegisterConsumer(ctx context.Context, id string, filters ...common.PayloadF if databaseWatcher == nil { return nil, common.ErrWatcherNotInitialized } - ctx = garmUtil.WithContext(ctx, slog.Any("consumer_id", id)) + ctx = garmUtil.WithSlogContext(ctx, slog.Any("consumer_id", id)) return databaseWatcher.RegisterConsumer(ctx, id, filters...) } diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 7e2a6080..9e86c415 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -41,6 +41,7 @@ import ( "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" + ghClient "github.com/cloudbase/garm/util/github" ) var ( @@ -65,8 +66,8 @@ const ( ) func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, instanceTokenGetter auth.InstanceTokenGetter, providers map[string]common.Provider, store dbCommon.Store) (common.PoolManager, error) { - ctx = garmUtil.WithContext(ctx, slog.Any("pool_mgr", entity.String()), slog.Any("pool_type", entity.EntityType)) - ghc, err := garmUtil.GithubClient(ctx, entity, entity.Credentials) + ctx = garmUtil.WithSlogContext(ctx, slog.Any("pool_mgr", entity.String()), slog.Any("pool_type", entity.EntityType)) + ghc, err := ghClient.GithubClient(ctx, entity) if err != nil { return nil, errors.Wrap(err, "getting github client") } diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index b17494d5..29950748 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -9,7 +9,7 @@ import ( "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" runnerCommon "github.com/cloudbase/garm/runner/common" - garmUtil "github.com/cloudbase/garm/util" + ghClient "github.com/cloudbase/garm/util/github" ) // entityGetter is implemented by all github entities (repositories, organizations and enterprises) @@ -28,7 +28,7 @@ func (r *basePoolManager) handleControllerUpdateEvent(controllerInfo params.Cont func (r *basePoolManager) getClientOrStub() runnerCommon.GithubClient { var err error var ghc runnerCommon.GithubClient - ghc, err = garmUtil.GithubClient(r.ctx, r.entity, r.entity.Credentials) + ghc, err = ghClient.GithubClient(r.ctx, r.entity) if err != nil { slog.WarnContext(r.ctx, "failed to create github client", "error", err) ghc = &stubGithubClient{ diff --git a/util/github/client.go b/util/github/client.go new file mode 100644 index 00000000..800c5b00 --- /dev/null +++ b/util/github/client.go @@ -0,0 +1,471 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package github + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "log/slog" + "net/http" + "net/url" + + "github.com/google/go-github/v57/github" + "github.com/pkg/errors" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/metrics" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/runner/common" +) + +type githubClient struct { + *github.ActionsService + org *github.OrganizationsService + repo *github.RepositoriesService + enterprise *github.EnterpriseService + + entity params.GithubEntity + cli *github.Client +} + +func (g *githubClient) ListEntityHooks(ctx context.Context, opts *github.ListOptions) (ret []*github.Hook, response *github.Response, err error) { + metrics.GithubOperationCount.WithLabelValues( + "ListHooks", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListHooks", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.repo.ListHooks(ctx, g.entity.Owner, g.entity.Name, opts) + case params.GithubEntityTypeOrganization: + ret, response, err = g.org.ListHooks(ctx, g.entity.Owner, opts) + default: + return nil, nil, fmt.Errorf("invalid entity type: %s", g.entity.EntityType) + } + return ret, response, err +} + +func (g *githubClient) GetEntityHook(ctx context.Context, id int64) (ret *github.Hook, err error) { + metrics.GithubOperationCount.WithLabelValues( + "GetHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "GetHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, _, err = g.repo.GetHook(ctx, g.entity.Owner, g.entity.Name, id) + case params.GithubEntityTypeOrganization: + ret, _, err = g.org.GetHook(ctx, g.entity.Owner, id) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { + metrics.GithubOperationCount.WithLabelValues( + "CreateHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "CreateHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, _, err = g.repo.CreateHook(ctx, g.entity.Owner, g.entity.Name, hook) + case params.GithubEntityTypeOrganization: + ret, _, err = g.org.CreateHook(ctx, g.entity.Owner, hook) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) DeleteEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { + metrics.GithubOperationCount.WithLabelValues( + "DeleteHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "DeleteHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, err = g.repo.DeleteHook(ctx, g.entity.Owner, g.entity.Name, id) + case params.GithubEntityTypeOrganization: + ret, err = g.org.DeleteHook(ctx, g.entity.Owner, id) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { + metrics.GithubOperationCount.WithLabelValues( + "PingHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "PingHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, err = g.repo.PingHook(ctx, g.entity.Owner, g.entity.Name, id) + case params.GithubEntityTypeOrganization: + ret, err = g.org.PingHook(ctx, g.entity.Owner, id) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} + +func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { + var ret *github.Runners + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "ListEntityRunners", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListEntityRunners", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.ListRunners(ctx, g.entity.Owner, g.entity.Name, opts) + case params.GithubEntityTypeOrganization: + ret, response, err = g.ListOrganizationRunners(ctx, g.entity.Owner, opts) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.ListRunners(ctx, g.entity.Owner, opts) + default: + return nil, nil, errors.New("invalid entity type") + } + + return ret, response, err +} + +func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { + var ret []*github.RunnerApplicationDownload + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "ListEntityRunnerApplicationDownloads", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListEntityRunnerApplicationDownloads", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.ListRunnerApplicationDownloads(ctx, g.entity.Owner, g.entity.Name) + case params.GithubEntityTypeOrganization: + ret, response, err = g.ListOrganizationRunnerApplicationDownloads(ctx, g.entity.Owner) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.ListRunnerApplicationDownloads(ctx, g.entity.Owner) + default: + return nil, nil, errors.New("invalid entity type") + } + + return ret, response, err +} + +func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "RemoveEntityRunner", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "RemoveEntityRunner", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + response, err = g.RemoveRunner(ctx, g.entity.Owner, g.entity.Name, runnerID) + case params.GithubEntityTypeOrganization: + response, err = g.RemoveOrganizationRunner(ctx, g.entity.Owner, runnerID) + case params.GithubEntityTypeEnterprise: + response, err = g.enterprise.RemoveRunner(ctx, g.entity.Owner, runnerID) + default: + return nil, errors.New("invalid entity type") + } + + return response, err +} + +func (g *githubClient) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { + var ret *github.RegistrationToken + var response *github.Response + var err error + + metrics.GithubOperationCount.WithLabelValues( + "CreateEntityRegistrationToken", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "CreateEntityRegistrationToken", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.CreateRegistrationToken(ctx, g.entity.Owner, g.entity.Name) + case params.GithubEntityTypeOrganization: + ret, response, err = g.CreateOrganizationRegistrationToken(ctx, g.entity.Owner) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.CreateRegistrationToken(ctx, g.entity.Owner) + default: + return nil, nil, errors.New("invalid entity type") + } + + return ret, response, err +} + +func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { + opts := github.ListOrgRunnerGroupOptions{ + ListOptions: github.ListOptions{ + PerPage: 100, + }, + } + + for { + metrics.GithubOperationCount.WithLabelValues( + "ListOrganizationRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + runnerGroups, ghResp, err := g.ListOrganizationRunnerGroups(ctx, entity.Owner, &opts) + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListOrganizationRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { + return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + } + return 0, errors.Wrap(err, "fetching runners") + } + for _, runnerGroup := range runnerGroups.RunnerGroups { + if runnerGroup.Name != nil && *runnerGroup.Name == rgName { + return *runnerGroup.ID, nil + } + } + if ghResp.NextPage == 0 { + break + } + opts.Page = ghResp.NextPage + } + return 0, runnerErrors.NewNotFoundError("runner group %s not found", rgName) +} + +func (g *githubClient) getEnterpriseRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { + opts := github.ListEnterpriseRunnerGroupOptions{ + ListOptions: github.ListOptions{ + PerPage: 100, + }, + } + + for { + metrics.GithubOperationCount.WithLabelValues( + "ListRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + runnerGroups, ghResp, err := g.enterprise.ListRunnerGroups(ctx, entity.Owner, &opts) + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "ListRunnerGroups", // label: operation + entity.LabelScope(), // label: scope + ).Inc() + if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { + return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + } + return 0, errors.Wrap(err, "fetching runners") + } + for _, runnerGroup := range runnerGroups.RunnerGroups { + if runnerGroup.Name != nil && *runnerGroup.Name == rgName { + return *runnerGroup.ID, nil + } + } + if ghResp.NextPage == 0 { + break + } + opts.Page = ghResp.NextPage + } + return 0, runnerErrors.NewNotFoundError("runner group not found") +} + +func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) { + // If no runner group is set, use the default runner group ID. This is also the default for + // repository level runners. + var rgID int64 = 1 + + if pool.GitHubRunnerGroup != "" { + switch g.entity.EntityType { + case params.GithubEntityTypeOrganization: + rgID, err = g.getOrganizationRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) + case params.GithubEntityTypeEnterprise: + rgID, err = g.getEnterpriseRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) + } + + if err != nil { + return nil, nil, fmt.Errorf("getting runner group ID: %w", err) + } + } + + req := github.GenerateJITConfigRequest{ + Name: instance, + RunnerGroupID: rgID, + Labels: labels, + // nolint:golangci-lint,godox + // TODO(gabriel-samfira): Should we make this configurable? + WorkFolder: github.String("_work"), + } + + metrics.GithubOperationCount.WithLabelValues( + "GetEntityJITConfig", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + + var ret *github.JITRunnerConfig + var response *github.Response + + switch g.entity.EntityType { + case params.GithubEntityTypeRepository: + ret, response, err = g.GenerateRepoJITConfig(ctx, g.entity.Owner, g.entity.Name, &req) + case params.GithubEntityTypeOrganization: + ret, response, err = g.GenerateOrgJITConfig(ctx, g.entity.Owner, &req) + case params.GithubEntityTypeEnterprise: + ret, response, err = g.enterprise.GenerateEnterpriseJITConfig(ctx, g.entity.Owner, &req) + } + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "GetEntityJITConfig", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + if response != nil && response.StatusCode == http.StatusUnauthorized { + return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) + } + return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) + } + + defer func(run *github.Runner) { + if err != nil && run != nil { + _, innerErr := g.RemoveEntityRunner(ctx, run.GetID()) + slog.With(slog.Any("error", innerErr)).ErrorContext( + ctx, "failed to remove runner", + "runner_id", run.GetID(), string(g.entity.EntityType), g.entity.String()) + } + }(ret.Runner) + + decoded, err := base64.StdEncoding.DecodeString(*ret.EncodedJITConfig) + if err != nil { + return nil, nil, fmt.Errorf("failed to decode JIT config: %w", err) + } + + var jitConfig map[string]string + if err := json.Unmarshal(decoded, &jitConfig); err != nil { + return nil, nil, fmt.Errorf("failed to unmarshal JIT config: %w", err) + } + + return jitConfig, ret.Runner, nil +} + +func (g *githubClient) GetEntity() params.GithubEntity { + return g.entity +} + +func (g *githubClient) GithubBaseURL() *url.URL { + return g.cli.BaseURL +} + +func GithubClient(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { + // func GithubClient(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { + httpClient, err := entity.Credentials.GetHTTPClient(ctx) + if err != nil { + return nil, errors.Wrap(err, "fetching http client") + } + + ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs( + entity.Credentials.APIBaseURL, entity.Credentials.UploadBaseURL) + if err != nil { + return nil, errors.Wrap(err, "fetching github client") + } + + cli := &githubClient{ + ActionsService: ghClient.Actions, + org: ghClient.Organizations, + repo: ghClient.Repositories, + enterprise: ghClient.Enterprise, + cli: ghClient, + entity: entity, + } + + return cli, nil +} diff --git a/util/github/scalesets/client.go b/util/github/scalesets/client.go new file mode 100644 index 00000000..f0b2deac --- /dev/null +++ b/util/github/scalesets/client.go @@ -0,0 +1,95 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package scalesets + +import ( + "fmt" + "io" + "net/http" + "sync" + + "github.com/google/go-github/v57/github" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/runner/common" +) + +func NewClient(cli common.GithubClient) (*ScaleSetClient, error) { + return &ScaleSetClient{ + ghCli: cli, + httpClient: &http.Client{}, + }, nil +} + +type ScaleSetClient struct { + ghCli common.GithubClient + httpClient *http.Client + + // scale sets are aparently available through the same security + // contex that a normal runner would use. We connect to the same + // API endpoint a runner would connect to, in order to fetch jobs. + // To do this, we use a runner registration token. + runnerRegistrationToken *github.RegistrationToken + // actionsServiceInfo holds the pipeline URL and the JWT token to + // access it. The pipeline URL is the base URL where we can access + // the scale set endpoints. + actionsServiceInfo *params.ActionsServiceAdminInfoResponse + + mux sync.Mutex +} + +func (s *ScaleSetClient) SetGithubClient(cli common.GithubClient) { + s.mux.Lock() + defer s.mux.Unlock() + s.ghCli = cli +} + +func (s *ScaleSetClient) Do(req *http.Request) (*http.Response, error) { + if s.httpClient == nil { + return nil, fmt.Errorf("http client is not initialized") + } + + resp, err := s.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to dispatch HTTP request: %w", err) + } + + if resp.StatusCode >= 200 && resp.StatusCode < 300 { + return resp, nil + } + + var body []byte + if resp != nil { + defer resp.Body.Close() + body, err = io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read body: %w", err) + } + } + + switch resp.StatusCode { + case 404: + return nil, runnerErrors.NewNotFoundError("resource %s not found: %q", req.URL.String(), string(body)) + case 400: + return nil, runnerErrors.NewBadRequestError("bad request while calling %s: %q", req.URL.String(), string(body)) + case 409: + return nil, runnerErrors.NewConflictError("conflict while calling %s: %q", req.URL.String(), string(body)) + case 401, 403: + return nil, runnerErrors.ErrUnauthorized + default: + return nil, fmt.Errorf("request to %s failed with status code %d: %q", req.URL.String(), resp.StatusCode, string(body)) + } +} diff --git a/util/github/scalesets/jobs.go b/util/github/scalesets/jobs.go new file mode 100644 index 00000000..b087ad63 --- /dev/null +++ b/util/github/scalesets/jobs.go @@ -0,0 +1,88 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package scalesets + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + + "github.com/cloudbase/garm/params" +) + +type acquireJobsResult struct { + Count int `json:"count"` + Value []int64 `json:"value"` +} + +func (s *ScaleSetClient) AcquireJobs(ctx context.Context, runnerScaleSetId int, messageQueueAccessToken string, requestIds []int64) ([]int64, error) { + u := fmt.Sprintf("%s/%d/acquirejobs?api-version=6.0-preview", scaleSetEndpoint, runnerScaleSetId) + + body, err := json.Marshal(requestIds) + if err != nil { + return nil, err + } + + req, err := s.newActionsRequest(ctx, http.MethodPost, u, bytes.NewBuffer(body)) + if err != nil { + return nil, fmt.Errorf("failed to construct request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", messageQueueAccessToken)) + + resp, err := s.Do(req) + if err != nil { + return nil, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + var acquiredJobs acquireJobsResult + err = json.NewDecoder(resp.Body).Decode(&acquiredJobs) + if err != nil { + return nil, fmt.Errorf("failed to decode response: %w", err) + } + + return acquiredJobs.Value, nil +} + +func (s *ScaleSetClient) GetAcquirableJobs(ctx context.Context, runnerScaleSetId int) (params.AcquirableJobList, error) { + path := fmt.Sprintf("%d/acquirablejobs", runnerScaleSetId) + + req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return params.AcquirableJobList{}, fmt.Errorf("failed to construct request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return params.AcquirableJobList{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusNoContent { + return params.AcquirableJobList{Count: 0, Jobs: []params.AcquirableJob{}}, nil + } + + var acquirableJobList params.AcquirableJobList + err = json.NewDecoder(resp.Body).Decode(&acquirableJobList) + if err != nil { + return params.AcquirableJobList{}, fmt.Errorf("failed to decode response: %w", err) + } + + return acquirableJobList, nil +} diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go new file mode 100644 index 00000000..ae70239e --- /dev/null +++ b/util/github/scalesets/message_sessions.go @@ -0,0 +1,265 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package scalesets + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "log/slog" + "math/rand/v2" + "net/http" + "net/url" + "strconv" + "sync" + "time" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" +) + +const maxCapacityHeader = "X-ScaleSetMaxCapacity" + +func NewMessageSession(ctx context.Context, cli *ScaleSetClient, session *params.RunnerScaleSetSession) (*MessageSession, error) { + sess := &MessageSession{ + ssCli: cli, + session: session, + ctx: ctx, + done: make(chan struct{}), + closed: false, + } + go sess.loop() + return sess, nil +} + +type MessageSession struct { + ssCli *ScaleSetClient + session *params.RunnerScaleSetSession + ctx context.Context + + done chan struct{} + closed bool + lastErr error + + mux sync.Mutex +} + +func (m *MessageSession) Close() error { + m.mux.Lock() + defer m.mux.Unlock() + if m.closed { + return nil + } + close(m.done) + m.closed = true + return nil +} + +func (m *MessageSession) LastError() error { + return m.lastErr +} + +func (m *MessageSession) loop() { + timer := time.NewTimer(1 * time.Minute) + defer timer.Stop() + if m.closed { + return + } + for { + select { + case <-m.ctx.Done(): + return + case <-m.done: + return + case <-timer.C: + if err := m.maybeRefreshToken(m.ctx); err != nil { + // We endlessly retry. If it's a transient error, it should eventually + // work, if it's credentials issues, users can update them. + slog.With(slog.Any("error", err)).ErrorContext(m.ctx, "failed to refresh message queue token") + m.lastErr = err + } + } + } +} + +func (m *MessageSession) SessionsRelativeURL() (string, error) { + if m.session == nil { + return "", fmt.Errorf("session is nil") + } + if m.session.RunnerScaleSet == nil { + return "", fmt.Errorf("runner scale set is nil") + } + relativePath := fmt.Sprintf("%s/%d/sessions/%s", scaleSetEndpoint, m.session.RunnerScaleSet.Id, m.session.SessionId.String()) + return relativePath, nil +} + +func (m *MessageSession) Refresh(ctx context.Context) error { + m.mux.Lock() + defer m.mux.Unlock() + + relPath, err := m.SessionsRelativeURL() + if err != nil { + return fmt.Errorf("failed to get session URL: %w", err) + } + req, err := m.ssCli.newActionsRequest(ctx, http.MethodPatch, relPath, nil) + if err != nil { + return fmt.Errorf("failed to create message delete request: %w", err) + } + resp, err := m.ssCli.Do(req) + if err != nil { + return fmt.Errorf("failed to delete message session: %w", err) + } + + var refreshedSession params.RunnerScaleSetSession + if err := json.NewDecoder(resp.Body).Decode(&refreshedSession); err != nil { + return fmt.Errorf("failed to decode response: %w", err) + } + + m.session = &refreshedSession + return nil +} + +func (m *MessageSession) maybeRefreshToken(ctx context.Context) error { + if m.session == nil { + return fmt.Errorf("session is nil") + } + // add some jitter + jitter := time.Duration(rand.IntN(10000)) * time.Millisecond + if m.session.ExpiresIn(2*time.Minute + jitter) { + if err := m.Refresh(ctx); err != nil { + return fmt.Errorf("failed to refresh message queue token: %w", err) + } + } + return nil +} + +func (m *MessageSession) GetMessage(ctx context.Context, lastMessageId int64, maxCapacity uint) (params.RunnerScaleSetMessage, error) { + u, err := url.Parse(m.session.MessageQueueUrl) + if err != nil { + return params.RunnerScaleSetMessage{}, err + } + + if lastMessageId > 0 { + q := u.Query() + q.Set("lastMessageId", strconv.FormatInt(lastMessageId, 10)) + u.RawQuery = q.Encode() + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return params.RunnerScaleSetMessage{}, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Accept", "application/json; api-version=6.0-preview") + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", m.session.MessageQueueAccessToken)) + req.Header.Set(maxCapacityHeader, fmt.Sprintf("%d", maxCapacity)) + + resp, err := m.ssCli.Do(req) + if err != nil { + return params.RunnerScaleSetMessage{}, fmt.Errorf("request to %s failed: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusAccepted { + return params.RunnerScaleSetMessage{}, nil + } + + var message params.RunnerScaleSetMessage + if err := json.NewDecoder(resp.Body).Decode(&message); err != nil { + return params.RunnerScaleSetMessage{}, fmt.Errorf("failed to decode response: %w", err) + } + return message, nil +} + +func (m *MessageSession) DeleteMessage(ctx context.Context, messageId int64) error { + u, err := url.Parse(m.session.MessageQueueUrl) + if err != nil { + return err + } + + u.Path = fmt.Sprintf("%s/%d", u.Path, messageId) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, u.String(), nil) + if err != nil { + return err + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", m.session.MessageQueueAccessToken)) + + resp, err := m.ssCli.Do(req) + if err != nil { + return err + } + + resp.Body.Close() + return nil +} + +func (s *ScaleSetClient) CreateMessageSession(ctx context.Context, runnerScaleSetId int, owner string) (*MessageSession, error) { + path := fmt.Sprintf("%s/%d/sessions", scaleSetEndpoint, runnerScaleSetId) + + newSession := params.RunnerScaleSetSession{ + OwnerName: owner, + } + + requestData, err := json.Marshal(newSession) + if err != nil { + return nil, fmt.Errorf("failed to marshal session data: %w", err) + } + + req, err := s.newActionsRequest(ctx, http.MethodPost, path, bytes.NewBuffer(requestData)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request to %s: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + var createdSession params.RunnerScaleSetSession + if err := json.NewDecoder(resp.Body).Decode(&createdSession); err != nil { + return nil, fmt.Errorf("failed to decode response: %w", err) + } + + return &MessageSession{ + ssCli: s, + session: &createdSession, + }, nil +} + +func (s *ScaleSetClient) DeleteMessageSession(ctx context.Context, session *MessageSession) error { + path, err := session.SessionsRelativeURL() + if err != nil { + return fmt.Errorf("failed to delete session: %w", err) + } + + req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) + if err != nil { + return fmt.Errorf("failed to create message delete request: %w", err) + } + + _, err = s.Do(req) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + return fmt.Errorf("failed to delete message session: %w", err) + } + } + return nil +} diff --git a/util/github/scalesets/runners.go b/util/github/scalesets/runners.go new file mode 100644 index 00000000..2d1519dc --- /dev/null +++ b/util/github/scalesets/runners.go @@ -0,0 +1,129 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package scalesets + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" +) + +type scaleSetJitRunnerConfig struct { + Name string `json:"name"` + WorkFolder string `json:"workFolder"` +} + +func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName string, scaleSet params.RunnerScaleSet) (params.RunnerScaleSetJitRunnerConfig, error) { + runnerSettings := scaleSetJitRunnerConfig{ + Name: runnerName, + WorkFolder: "_work", + } + + body, err := json.Marshal(runnerSettings) + if err != nil { + return params.RunnerScaleSetJitRunnerConfig{}, err + } + + req, err := s.newActionsRequest(ctx, http.MethodPost, scaleSet.RunnerJitConfigUrl, bytes.NewBuffer(body)) + if err != nil { + return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to create request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + var runnerJitConfig params.RunnerScaleSetJitRunnerConfig + if err := json.NewDecoder(resp.Body).Decode(&runnerJitConfig); err != nil { + return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to decode response: %w", err) + } + return runnerJitConfig, nil +} + +func (s *ScaleSetClient) GetRunner(ctx context.Context, runnerId int64) (params.RunnerReference, error) { + path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerId) + + req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return params.RunnerReference{}, fmt.Errorf("failed to construct request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerReference{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + var runnerReference params.RunnerReference + if err := json.NewDecoder(resp.Body).Decode(&runnerReference); err != nil { + return params.RunnerReference{}, fmt.Errorf("failed to decode response: %w", err) + } + + return runnerReference, nil +} + +func (s *ScaleSetClient) GetRunnerByName(ctx context.Context, runnerName string) (params.RunnerReference, error) { + path := fmt.Sprintf("%s?agentName=%s", runnerEndpoint, runnerName) + + req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return params.RunnerReference{}, fmt.Errorf("failed to construct request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerReference{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + var runnerList params.RunnerReferenceList + if err := json.NewDecoder(resp.Body).Decode(&runnerList); err != nil { + return params.RunnerReference{}, fmt.Errorf("failed to decode response: %w", err) + } + + if runnerList.Count == 0 { + return params.RunnerReference{}, fmt.Errorf("could not find runner with name %q: %w", runnerName, runnerErrors.ErrNotFound) + } + + if runnerList.Count > 1 { + return params.RunnerReference{}, fmt.Errorf("failed to decode response: %w", err) + } + + return runnerList.RunnerReferences[0], nil +} + +func (s *ScaleSetClient) RemoveRunner(ctx context.Context, runnerId int64) error { + path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerId) + + req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) + if err != nil { + return fmt.Errorf("failed to construct request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + + resp.Body.Close() + return nil +} diff --git a/util/github/scalesets/scalesets.go b/util/github/scalesets/scalesets.go new file mode 100644 index 00000000..7c70daec --- /dev/null +++ b/util/github/scalesets/scalesets.go @@ -0,0 +1,204 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package scalesets + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/http/httputil" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" +) + +const ( + runnerEndpoint = "_apis/distributedtask/pools/0/agents" + scaleSetEndpoint = "_apis/runtime/runnerscalesets" +) + +const ( + HeaderActionsActivityID = "ActivityId" + HeaderGitHubRequestID = "X-GitHub-Request-Id" +) + +func (s *ScaleSetClient) GetRunnerScaleSetByNameAndRunnerGroup(ctx context.Context, runnerGroupId int, name string) (params.RunnerScaleSet, error) { + path := fmt.Sprintf("%s?runnerGroupId=%d&name=%s", scaleSetEndpoint, runnerGroupId, name) + req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return params.RunnerScaleSet{}, err + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerScaleSet{}, err + } + + var runnerScaleSetList *params.RunnerScaleSetsResponse + if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSetList); err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) + } + if runnerScaleSetList.Count == 0 { + return params.RunnerScaleSet{}, runnerErrors.NewNotFoundError("runner scale set with name %s and runner group ID %d was not found", name, runnerGroupId) + } + + // Runner scale sets must have a uniqe name. Attempting to create a runner scale set with the same name as + // an existing scale set will result in a Bad Request (400) error. + return runnerScaleSetList.RunnerScaleSets[0], nil +} + +func (s *ScaleSetClient) GetRunnerScaleSetById(ctx context.Context, runnerScaleSetId int) (params.RunnerScaleSet, error) { + path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetId) + req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return params.RunnerScaleSet{}, err + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to get runner scaleset with ID %d: %w", runnerScaleSetId, err) + } + + var runnerScaleSet params.RunnerScaleSet + if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSet); err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) + } + return runnerScaleSet, nil +} + +// ListRunnerScaleSets lists all runner scale sets in a github entity. +func (s *ScaleSetClient) ListRunnerScaleSets(ctx context.Context) (*params.RunnerScaleSetsResponse, error) { + req, err := s.newActionsRequest(ctx, http.MethodGet, scaleSetEndpoint, nil) + if err != nil { + return nil, err + } + data, err := httputil.DumpRequest(req, false) + if err == nil { + fmt.Println(string(data)) + } + resp, err := s.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to list runner scale sets: %w", err) + } + + var runnerScaleSetList params.RunnerScaleSetsResponse + if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSetList); err != nil { + return nil, fmt.Errorf("failed to decode response: %w", err) + } + + return &runnerScaleSetList, nil +} + +// CreateRunnerScaleSet creates a new runner scale set in the target GitHub entity. +func (s *ScaleSetClient) CreateRunnerScaleSet(ctx context.Context, runnerScaleSet *params.RunnerScaleSet) (params.RunnerScaleSet, error) { + body, err := json.Marshal(runnerScaleSet) + if err != nil { + return params.RunnerScaleSet{}, err + } + + req, err := s.newActionsRequest(ctx, http.MethodPost, scaleSetEndpoint, bytes.NewReader(body)) + if err != nil { + return params.RunnerScaleSet{}, err + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to create runner scale set: %w", err) + } + + var createdRunnerScaleSet params.RunnerScaleSet + if err := json.NewDecoder(resp.Body).Decode(&createdRunnerScaleSet); err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) + } + return createdRunnerScaleSet, nil +} + +func (s *ScaleSetClient) UpdateRunnerScaleSet(ctx context.Context, runnerScaleSetId int, runnerScaleSet params.RunnerScaleSet) (params.RunnerScaleSet, error) { + path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetId) + + body, err := json.Marshal(runnerScaleSet) + if err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to marshal request: %w", err) + } + + req, err := s.newActionsRequest(ctx, http.MethodPatch, path, bytes.NewReader(body)) + if err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to create request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to make request: %w", err) + } + + var ret params.RunnerScaleSet + if err := json.NewDecoder(resp.Body).Decode(&ret); err != nil { + return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) + } + return ret, nil +} + +func (s *ScaleSetClient) DeleteRunnerScaleSet(ctx context.Context, runnerScaleSetId int) error { + path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetId) + req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) + if err != nil { + return err + } + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return err + } + + if resp.StatusCode != http.StatusNoContent { + return fmt.Errorf("failed to delete scale set with code %d", resp.StatusCode) + } + + resp.Body.Close() + return nil +} + +func (s *ScaleSetClient) GetRunnerGroupByName(ctx context.Context, runnerGroup string) (params.RunnerGroup, error) { + path := fmt.Sprintf("_apis/runtime/runnergroups/?groupName=%s", runnerGroup) + req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) + if err != nil { + return params.RunnerGroup{}, err + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerGroup{}, fmt.Errorf("failed to make request: %w", err) + } + defer resp.Body.Close() + + var runnerGroupList params.RunnerGroupList + err = json.NewDecoder(resp.Body).Decode(&runnerGroupList) + if err != nil { + return params.RunnerGroup{}, fmt.Errorf("failed to decode response: %w", err) + } + + if runnerGroupList.Count == 0 { + return params.RunnerGroup{}, runnerErrors.NewNotFoundError("runner group %s does not exist", runnerGroup) + } + + if runnerGroupList.Count > 1 { + return params.RunnerGroup{}, runnerErrors.NewConflictError("multiple runner groups exist with the same name (%s)", runnerGroup) + } + + return runnerGroupList.RunnerGroups[0], nil +} diff --git a/util/github/scalesets/token.go b/util/github/scalesets/token.go new file mode 100644 index 00000000..47aa764f --- /dev/null +++ b/util/github/scalesets/token.go @@ -0,0 +1,105 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package scalesets + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/cloudbase/garm/params" +) + +func (s *ScaleSetClient) getActionServiceInfo(ctx context.Context) (params.ActionsServiceAdminInfoResponse, error) { + regPath := "/actions/runner-registration" + baseURL := s.ghCli.GithubBaseURL() + url, err := baseURL.Parse(regPath) + if err != nil { + return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to parse url: %w", err) + } + + entity := s.ghCli.GetEntity() + body := params.ActionsServiceAdminInfoRequest{ + URL: entity.GithubURL(), + RunnerEvent: "register", + } + + buf := &bytes.Buffer{} + enc := json.NewEncoder(buf) + enc.SetEscapeHTML(false) + + if err := enc.Encode(body); err != nil { + return params.ActionsServiceAdminInfoResponse{}, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url.String(), buf) + if err != nil { + return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", fmt.Sprintf("RemoteAuth %s", *s.runnerRegistrationToken.Token)) + + resp, err := s.Do(req) + if err != nil { + return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to get actions service admin info: %w", err) + } + defer resp.Body.Close() + + data, err := io.ReadAll(resp.Body) + if err != nil { + return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to read response body: %w", err) + } + data = bytes.TrimPrefix(data, []byte("\xef\xbb\xbf")) + + var info params.ActionsServiceAdminInfoResponse + if err := json.Unmarshal(data, &info); err != nil { + return params.ActionsServiceAdminInfoResponse{}, fmt.Errorf("failed to decode response: %w", err) + } + + return info, nil +} + +func (s *ScaleSetClient) ensureAdminInfo(ctx context.Context) error { + s.mux.Lock() + defer s.mux.Unlock() + + var expiresAt time.Time + if s.runnerRegistrationToken != nil { + expiresAt = s.runnerRegistrationToken.GetExpiresAt().Time + } + + now := time.Now().UTC().Add(2 * time.Minute) + if now.After(expiresAt) || s.runnerRegistrationToken == nil { + token, _, err := s.ghCli.CreateEntityRegistrationToken(ctx) + if err != nil { + return fmt.Errorf("failed to fetch runner registration token: %w", err) + } + s.runnerRegistrationToken = token + } + + if s.actionsServiceInfo == nil || s.actionsServiceInfo.ExpiresIn(2*time.Minute) { + info, err := s.getActionServiceInfo(ctx) + if err != nil { + return fmt.Errorf("failed to get action service info: %w", err) + } + s.actionsServiceInfo = &info + } + + return nil +} diff --git a/util/github/scalesets/util.go b/util/github/scalesets/util.go new file mode 100644 index 00000000..4f79098b --- /dev/null +++ b/util/github/scalesets/util.go @@ -0,0 +1,54 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package scalesets + +import ( + "context" + "fmt" + "io" + "net/http" +) + +func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, path string, body io.Reader) (*http.Request, error) { + if err := s.ensureAdminInfo(ctx); err != nil { + return nil, fmt.Errorf("failed to update token: %w", err) + } + + actionsUri, err := s.actionsServiceInfo.GetURL() + if err != nil { + return nil, fmt.Errorf("failed to get pipeline URL: %w", err) + } + + uri, err := actionsUri.Parse(path) + if err != nil { + return nil, fmt.Errorf("failed to parse path: %w", err) + } + + q := uri.Query() + if q.Get("api-version") == "" { + q.Set("api-version", "6.0-preview") + } + uri.RawQuery = q.Encode() + + req, err := http.NewRequestWithContext(ctx, method, uri.String(), body) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", s.actionsServiceInfo.Token)) + + return req, nil +} diff --git a/util/logging.go b/util/logging.go index ac35863b..bb7b0562 100644 --- a/util/logging.go +++ b/util/logging.go @@ -25,6 +25,6 @@ func (h ContextHandler) Handle(ctx context.Context, r slog.Record) error { return h.Handler.Handle(ctx, r) } -func WithContext(ctx context.Context, attrs ...slog.Attr) context.Context { +func WithSlogContext(ctx context.Context, attrs ...slog.Attr) context.Context { return context.WithValue(ctx, slogCtxFields, attrs) } diff --git a/util/util.go b/util/util.go index eb390743..da1264d2 100644 --- a/util/util.go +++ b/util/util.go @@ -16,442 +16,30 @@ package util import ( "context" - "encoding/base64" - "encoding/json" - "fmt" - "log/slog" "net/http" - "github.com/google/go-github/v57/github" "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/metrics" - "github.com/cloudbase/garm/params" + commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/runner/common" ) -type githubClient struct { - *github.ActionsService - org *github.OrganizationsService - repo *github.RepositoriesService - enterprise *github.EnterpriseService - - entity params.GithubEntity -} - -func (g *githubClient) ListEntityHooks(ctx context.Context, opts *github.ListOptions) (ret []*github.Hook, response *github.Response, err error) { - metrics.GithubOperationCount.WithLabelValues( - "ListHooks", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListHooks", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, response, err = g.repo.ListHooks(ctx, g.entity.Owner, g.entity.Name, opts) - case params.GithubEntityTypeOrganization: - ret, response, err = g.org.ListHooks(ctx, g.entity.Owner, opts) - default: - return nil, nil, fmt.Errorf("invalid entity type: %s", g.entity.EntityType) - } - return ret, response, err -} - -func (g *githubClient) GetEntityHook(ctx context.Context, id int64) (ret *github.Hook, err error) { - metrics.GithubOperationCount.WithLabelValues( - "GetHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "GetHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, _, err = g.repo.GetHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.GithubEntityTypeOrganization: - ret, _, err = g.org.GetHook(ctx, g.entity.Owner, id) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { - metrics.GithubOperationCount.WithLabelValues( - "CreateHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "CreateHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, _, err = g.repo.CreateHook(ctx, g.entity.Owner, g.entity.Name, hook) - case params.GithubEntityTypeOrganization: - ret, _, err = g.org.CreateHook(ctx, g.entity.Owner, hook) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) DeleteEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { - metrics.GithubOperationCount.WithLabelValues( - "DeleteHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "DeleteHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, err = g.repo.DeleteHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.GithubEntityTypeOrganization: - ret, err = g.org.DeleteHook(ctx, g.entity.Owner, id) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { - metrics.GithubOperationCount.WithLabelValues( - "PingHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "PingHook", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, err = g.repo.PingHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.GithubEntityTypeOrganization: - ret, err = g.org.PingHook(ctx, g.entity.Owner, id) - default: - return nil, errors.New("invalid entity type") - } - return ret, err -} - -func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { - var ret *github.Runners - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "ListEntityRunners", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListEntityRunners", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, response, err = g.ListRunners(ctx, g.entity.Owner, g.entity.Name, opts) - case params.GithubEntityTypeOrganization: - ret, response, err = g.ListOrganizationRunners(ctx, g.entity.Owner, opts) - case params.GithubEntityTypeEnterprise: - ret, response, err = g.enterprise.ListRunners(ctx, g.entity.Owner, opts) - default: - return nil, nil, errors.New("invalid entity type") - } - - return ret, response, err -} - -func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { - var ret []*github.RunnerApplicationDownload - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "ListEntityRunnerApplicationDownloads", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListEntityRunnerApplicationDownloads", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, response, err = g.ListRunnerApplicationDownloads(ctx, g.entity.Owner, g.entity.Name) - case params.GithubEntityTypeOrganization: - ret, response, err = g.ListOrganizationRunnerApplicationDownloads(ctx, g.entity.Owner) - case params.GithubEntityTypeEnterprise: - ret, response, err = g.enterprise.ListRunnerApplicationDownloads(ctx, g.entity.Owner) - default: - return nil, nil, errors.New("invalid entity type") - } - - return ret, response, err -} - -func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "RemoveEntityRunner", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "RemoveEntityRunner", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - response, err = g.RemoveRunner(ctx, g.entity.Owner, g.entity.Name, runnerID) - case params.GithubEntityTypeOrganization: - response, err = g.RemoveOrganizationRunner(ctx, g.entity.Owner, runnerID) - case params.GithubEntityTypeEnterprise: - response, err = g.enterprise.RemoveRunner(ctx, g.entity.Owner, runnerID) - default: - return nil, errors.New("invalid entity type") - } - - return response, err -} - -func (g *githubClient) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { - var ret *github.RegistrationToken - var response *github.Response - var err error - - metrics.GithubOperationCount.WithLabelValues( - "CreateEntityRegistrationToken", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - defer func() { - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "CreateEntityRegistrationToken", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - } - }() - - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, response, err = g.CreateRegistrationToken(ctx, g.entity.Owner, g.entity.Name) - case params.GithubEntityTypeOrganization: - ret, response, err = g.CreateOrganizationRegistrationToken(ctx, g.entity.Owner) - case params.GithubEntityTypeEnterprise: - ret, response, err = g.enterprise.CreateRegistrationToken(ctx, g.entity.Owner) - default: - return nil, nil, errors.New("invalid entity type") - } - - return ret, response, err -} - -func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { - opts := github.ListOrgRunnerGroupOptions{ - ListOptions: github.ListOptions{ - PerPage: 100, - }, - } - - for { - metrics.GithubOperationCount.WithLabelValues( - "ListOrganizationRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - runnerGroups, ghResp, err := g.ListOrganizationRunnerGroups(ctx, entity.Owner, &opts) - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListOrganizationRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") - } - return 0, errors.Wrap(err, "fetching runners") - } - for _, runnerGroup := range runnerGroups.RunnerGroups { - if runnerGroup.Name != nil && *runnerGroup.Name == rgName { - return *runnerGroup.ID, nil - } - } - if ghResp.NextPage == 0 { - break - } - opts.Page = ghResp.NextPage - } - return 0, runnerErrors.NewNotFoundError("runner group %s not found", rgName) -} - -func (g *githubClient) getEnterpriseRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { - opts := github.ListEnterpriseRunnerGroupOptions{ - ListOptions: github.ListOptions{ - PerPage: 100, - }, - } - - for { - metrics.GithubOperationCount.WithLabelValues( - "ListRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - runnerGroups, ghResp, err := g.enterprise.ListRunnerGroups(ctx, entity.Owner, &opts) - if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "ListRunnerGroups", // label: operation - entity.LabelScope(), // label: scope - ).Inc() - if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") - } - return 0, errors.Wrap(err, "fetching runners") - } - for _, runnerGroup := range runnerGroups.RunnerGroups { - if runnerGroup.Name != nil && *runnerGroup.Name == rgName { - return *runnerGroup.ID, nil - } - } - if ghResp.NextPage == 0 { - break - } - opts.Page = ghResp.NextPage - } - return 0, runnerErrors.NewNotFoundError("runner group not found") -} - -func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) { - // If no runner group is set, use the default runner group ID. This is also the default for - // repository level runners. - var rgID int64 = 1 - - if pool.GitHubRunnerGroup != "" { - switch g.entity.EntityType { - case params.GithubEntityTypeOrganization: - rgID, err = g.getOrganizationRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) - case params.GithubEntityTypeEnterprise: - rgID, err = g.getEnterpriseRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) - } - - if err != nil { - return nil, nil, fmt.Errorf("getting runner group ID: %w", err) - } - } - - req := github.GenerateJITConfigRequest{ - Name: instance, - RunnerGroupID: rgID, - Labels: labels, - // nolint:golangci-lint,godox - // TODO(gabriel-samfira): Should we make this configurable? - WorkFolder: github.String("_work"), - } - - metrics.GithubOperationCount.WithLabelValues( - "GetEntityJITConfig", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - - var ret *github.JITRunnerConfig - var response *github.Response - - switch g.entity.EntityType { - case params.GithubEntityTypeRepository: - ret, response, err = g.GenerateRepoJITConfig(ctx, g.entity.Owner, g.entity.Name, &req) - case params.GithubEntityTypeOrganization: - ret, response, err = g.GenerateOrgJITConfig(ctx, g.entity.Owner, &req) - case params.GithubEntityTypeEnterprise: - ret, response, err = g.enterprise.GenerateEnterpriseJITConfig(ctx, g.entity.Owner, &req) - } +func FetchTools(ctx context.Context, cli common.GithubClient) ([]commonParams.RunnerApplicationDownload, error) { + tools, ghResp, err := cli.ListEntityRunnerApplicationDownloads(ctx) if err != nil { - metrics.GithubOperationFailedCount.WithLabelValues( - "GetEntityJITConfig", // label: operation - g.entity.LabelScope(), // label: scope - ).Inc() - if response != nil && response.StatusCode == http.StatusUnauthorized { - return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) + if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { + return nil, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching tools") } - return nil, nil, fmt.Errorf("failed to get JIT config: %w", err) + return nil, errors.Wrap(err, "fetching runner tools") } - defer func(run *github.Runner) { - if err != nil && run != nil { - _, innerErr := g.RemoveEntityRunner(ctx, run.GetID()) - slog.With(slog.Any("error", innerErr)).ErrorContext( - ctx, "failed to remove runner", - "runner_id", run.GetID(), string(g.entity.EntityType), g.entity.String()) + ret := []commonParams.RunnerApplicationDownload{} + for _, tool := range tools { + if tool == nil { + continue } - }(ret.Runner) - - decoded, err := base64.StdEncoding.DecodeString(*ret.EncodedJITConfig) - if err != nil { - return nil, nil, fmt.Errorf("failed to decode JIT config: %w", err) + ret = append(ret, commonParams.RunnerApplicationDownload(*tool)) } - - var jitConfig map[string]string - if err := json.Unmarshal(decoded, &jitConfig); err != nil { - return nil, nil, fmt.Errorf("failed to unmarshal JIT config: %w", err) - } - - return jitConfig, ret.Runner, nil -} - -func GithubClient(ctx context.Context, entity params.GithubEntity, credsDetails params.GithubCredentials) (common.GithubClient, error) { - httpClient, err := credsDetails.GetHTTPClient(ctx) - if err != nil { - return nil, errors.Wrap(err, "fetching http client") - } - - ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs(credsDetails.APIBaseURL, credsDetails.UploadBaseURL) - if err != nil { - return nil, errors.Wrap(err, "fetching github client") - } - - cli := &githubClient{ - ActionsService: ghClient.Actions, - org: ghClient.Organizations, - repo: ghClient.Repositories, - enterprise: ghClient.Enterprise, - entity: entity, - } - return cli, nil + return ret, nil } From 51c7d2a80667bda28876cecd38aa63656a147747 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 6 Apr 2025 16:18:42 +0000 Subject: [PATCH 003/179] Add scaleset types Signed-off-by: Gabriel Adrian Samfira --- params/github.go | 246 ++++++++++++++++++++++++++++++++++++- params/params.go | 12 ++ runner/common/util.go | 6 + runner/pool/stub_client.go | 9 ++ 4 files changed, 272 insertions(+), 1 deletion(-) diff --git a/params/github.go b/params/github.go index cb9cecf0..379b0a68 100644 --- a/params/github.go +++ b/params/github.go @@ -14,7 +14,14 @@ package params -import "time" +import ( + "fmt" + "net/url" + "time" + + jwt "github.com/golang-jwt/jwt/v5" + "github.com/google/uuid" +) type Event string @@ -208,3 +215,240 @@ type WorkflowJob struct { SiteAdmin bool `json:"site_admin"` } `json:"sender"` } + +type RunnerSetting struct { + Ephemeral bool `json:"ephemeral,omitempty"` + IsElastic bool `json:"isElastic,omitempty"` + DisableUpdate bool `json:"disableUpdate,omitempty"` +} + +type Label struct { + Type string `json:"type"` + Name string `json:"name"` +} + +type RunnerScaleSetStatistic struct { + TotalAvailableJobs int `json:"totalAvailableJobs"` + TotalAcquiredJobs int `json:"totalAcquiredJobs"` + TotalAssignedJobs int `json:"totalAssignedJobs"` + TotalRunningJobs int `json:"totalRunningJobs"` + TotalRegisteredRunners int `json:"totalRegisteredRunners"` + TotalBusyRunners int `json:"totalBusyRunners"` + TotalIdleRunners int `json:"totalIdleRunners"` +} + +type RunnerScaleSet struct { + Id int `json:"id,omitempty"` + Name string `json:"name,omitempty"` + RunnerGroupId int `json:"runnerGroupId,omitempty"` + RunnerGroupName string `json:"runnerGroupName,omitempty"` + Labels []Label `json:"labels,omitempty"` + RunnerSetting RunnerSetting `json:"RunnerSetting,omitempty"` + CreatedOn time.Time `json:"createdOn,omitempty"` + RunnerJitConfigUrl string `json:"runnerJitConfigUrl,omitempty"` + GetAcquirableJobsUrl string `json:"getAcquirableJobsUrl,omitempty"` + AcquireJobsUrl string `json:"acquireJobsUrl,omitempty"` + Statistics *RunnerScaleSetStatistic `json:"statistics,omitempty"` + Status string `json:"status,omitempty"` + Enabled *bool `json:"enabled,omitempty"` +} + +type RunnerScaleSetsResponse struct { + Count int `json:"count"` + RunnerScaleSets []RunnerScaleSet `json:"value"` +} + +type ActionsServiceAdminInfoResponse struct { + URL string `json:"url,omitempty"` + Token string `json:"token,omitempty"` +} + +func (a ActionsServiceAdminInfoResponse) GetURL() (*url.URL, error) { + if a.URL == "" { + return nil, fmt.Errorf("no url specified") + } + u, err := url.ParseRequestURI(a.URL) + if err != nil { + return nil, fmt.Errorf("failed to parse URL: %w", err) + } + return u, nil +} + +func (a ActionsServiceAdminInfoResponse) getJWT() (*jwt.Token, error) { + // We're parsing a token we got from the GitHub API. We can't verify its signature. + // We do need the expiration date however, or other info. + token, _, err := jwt.NewParser().ParseUnverified(a.Token, &jwt.RegisteredClaims{}) + if err != nil { + return nil, fmt.Errorf("failed to parse jwt token: %w", err) + } + return token, nil +} + +func (a ActionsServiceAdminInfoResponse) ExiresAt() (time.Time, error) { + jwt, err := a.getJWT() + if err != nil { + return time.Time{}, fmt.Errorf("failed to decode jwt token: %w", err) + } + expiration, err := jwt.Claims.GetExpirationTime() + if err != nil { + return time.Time{}, fmt.Errorf("failed to get expiration time: %w", err) + } + + return expiration.Time, nil +} + +func (a ActionsServiceAdminInfoResponse) IsExpired() bool { + if exp, err := a.ExiresAt(); err == nil { + return time.Now().UTC().After(exp) + } + return true +} + +func (a ActionsServiceAdminInfoResponse) TimeRemaining() (time.Duration, error) { + exp, err := a.ExiresAt() + if err != nil { + return 0, fmt.Errorf("failed to get expiration: %w", err) + } + now := time.Now().UTC() + return exp.Sub(now), nil +} + +func (a ActionsServiceAdminInfoResponse) ExpiresIn(t time.Duration) bool { + remaining, err := a.TimeRemaining() + if err != nil { + return true + } + return remaining <= t +} + +type ActionsServiceAdminInfoRequest struct { + URL string `json:"url,omitempty"` + RunnerEvent string `json:"runner_event,omitempty"` +} + +type RunnerScaleSetSession struct { + SessionId *uuid.UUID `json:"sessionId,omitempty"` + OwnerName string `json:"ownerName,omitempty"` + RunnerScaleSet *RunnerScaleSet `json:"runnerScaleSet,omitempty"` + MessageQueueUrl string `json:"messageQueueUrl,omitempty"` + MessageQueueAccessToken string `json:"messageQueueAccessToken,omitempty"` + Statistics *RunnerScaleSetStatistic `json:"statistics,omitempty"` +} + +func (a RunnerScaleSetSession) GetURL() (*url.URL, error) { + if a.MessageQueueUrl == "" { + return nil, fmt.Errorf("no url specified") + } + u, err := url.ParseRequestURI(a.MessageQueueUrl) + if err != nil { + return nil, fmt.Errorf("failed to parse URL: %w", err) + } + return u, nil +} + +func (a RunnerScaleSetSession) getJWT() (*jwt.Token, error) { + // We're parsing a token we got from the GitHub API. We can't verify its signature. + // We do need the expiration date however, or other info. + token, _, err := jwt.NewParser().ParseUnverified(a.MessageQueueAccessToken, &jwt.RegisteredClaims{}) + if err != nil { + return nil, fmt.Errorf("failed to parse jwt token: %w", err) + } + return token, nil +} + +func (a RunnerScaleSetSession) ExiresAt() (time.Time, error) { + jwt, err := a.getJWT() + if err != nil { + return time.Time{}, fmt.Errorf("failed to decode jwt token: %w", err) + } + expiration, err := jwt.Claims.GetExpirationTime() + if err != nil { + return time.Time{}, fmt.Errorf("failed to get expiration time: %w", err) + } + + return expiration.Time, nil +} + +func (a RunnerScaleSetSession) IsExpired() bool { + if exp, err := a.ExiresAt(); err == nil { + return time.Now().UTC().After(exp) + } + return true +} + +func (a RunnerScaleSetSession) TimeRemaining() (time.Duration, error) { + exp, err := a.ExiresAt() + if err != nil { + return 0, fmt.Errorf("failed to get expiration: %w", err) + } + now := time.Now().UTC() + return exp.Sub(now), nil +} + +func (a RunnerScaleSetSession) ExpiresIn(t time.Duration) bool { + remaining, err := a.TimeRemaining() + if err != nil { + return true + } + return remaining <= t +} + +type RunnerScaleSetMessage struct { + MessageId int64 `json:"messageId"` + MessageType string `json:"messageType"` + Body string `json:"body"` + Statistics *RunnerScaleSetStatistic `json:"statistics"` +} + +type RunnerReference struct { + Id int `json:"id"` + Name string `json:"name"` + RunnerScaleSetId int `json:"runnerScaleSetId"` + CreatedOn time.Time `json:"createdOn"` + RunnerGroupID uint64 `json:"runnerGroupId"` + RunnerGroupName string `json:"runnerGroupName"` + Version string `json:"version"` + Enabled bool `json:"enabled"` + Ephemeral bool `json:"ephemeral"` + Status RunnerStatus `json:"status"` + DisableUpdate bool `json:"disableUpdate"` + ProvisioningState string `json:"provisioningState"` +} + +type RunnerScaleSetJitRunnerConfig struct { + Runner *RunnerReference `json:"runner"` + EncodedJITConfig string `json:"encodedJITConfig"` +} + +type RunnerReferenceList struct { + Count int `json:"count"` + RunnerReferences []RunnerReference `json:"value"` +} + +type AcquirableJobList struct { + Count int `json:"count"` + Jobs []AcquirableJob `json:"value"` +} + +type AcquirableJob struct { + AcquireJobUrl string `json:"acquireJobUrl"` + MessageType string `json:"messageType"` + RunnerRequestId int64 `json:"run0ne00rRequestId"` + RepositoryName string `json:"repositoryName"` + OwnerName string `json:"ownerName"` + JobWorkflowRef string `json:"jobWorkflowRef"` + EventName string `json:"eventName"` + RequestLabels []string `json:"requestLabels"` +} + +type RunnerGroup struct { + ID int64 `json:"id"` + Name string `json:"name"` + Size int64 `json:"size"` + IsDefault bool `json:"isDefaultGroup"` +} + +type RunnerGroupList struct { + Count int `json:"count"` + RunnerGroups []RunnerGroup `json:"value"` +} diff --git a/params/params.go b/params/params.go index 3a337dba..c7a66a05 100644 --- a/params/params.go +++ b/params/params.go @@ -837,6 +837,18 @@ type GithubEntity struct { WebhookSecret string `json:"-"` } +func (g *GithubEntity) GithubURL() string { + switch g.EntityType { + case GithubEntityTypeRepository: + return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL, g.Owner, g.Name) + case GithubEntityTypeOrganization: + return fmt.Sprintf("%s/%s", g.Credentials.BaseURL, g.Owner) + case GithubEntityTypeEnterprise: + return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL, g.Owner) + } + return "" +} + func (g GithubEntity) GetPoolBalancerType() PoolBalancerType { if g.PoolBalancerType == "" { return PoolBalancerTypeRoundRobin diff --git a/runner/common/util.go b/runner/common/util.go index d2e6c16b..ee5110e1 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -2,6 +2,7 @@ package common import ( "context" + "net/url" "github.com/google/go-github/v57/github" @@ -19,6 +20,11 @@ type GithubEntityOperations interface { RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) + + // GetEntity returns the GitHub entity for which the github client was instanciated. + GetEntity() params.GithubEntity + // GithubBaseURL returns the base URL for the github or GHES API. + GithubBaseURL() *url.URL } // GithubClient that describes the minimum list of functions we need to interact with github. diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index df547501..d291e736 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -2,6 +2,7 @@ package pool import ( "context" + "net/url" "github.com/google/go-github/v57/github" @@ -55,3 +56,11 @@ func (s *stubGithubClient) GetEntityJITConfig(_ context.Context, _ string, _ par func (s *stubGithubClient) GetWorkflowJobByID(_ context.Context, _, _ string, _ int64) (*github.WorkflowJob, *github.Response, error) { return nil, nil, s.err } + +func (s *stubGithubClient) GetEntity() params.GithubEntity { + return params.GithubEntity{} +} + +func (s *stubGithubClient) GithubBaseURL() *url.URL { + return nil +} From d7d6d1e31a3b58eb2a6033ab4d3d534b09a2491a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 6 Apr 2025 16:20:07 +0000 Subject: [PATCH 004/179] Add mocks Signed-off-by: Gabriel Adrian Samfira --- runner/common/mocks/GithubClient.go | 40 +++++++++++++++++++ runner/common/mocks/GithubEntityOperations.go | 40 +++++++++++++++++++ 2 files changed, 80 insertions(+) diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index c59c631b..4ca73de3 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -9,6 +9,8 @@ import ( mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" + + url "net/url" ) // GithubClient is an autogenerated mock type for the GithubClient type @@ -115,6 +117,24 @@ func (_m *GithubClient) DeleteEntityHook(ctx context.Context, id int64) (*github return r0, r1 } +// GetEntity provides a mock function with given fields: +func (_m *GithubClient) GetEntity() params.GithubEntity { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetEntity") + } + + var r0 params.GithubEntity + if rf, ok := ret.Get(0).(func() params.GithubEntity); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(params.GithubEntity) + } + + return r0 +} + // GetEntityHook provides a mock function with given fields: ctx, id func (_m *GithubClient) GetEntityHook(ctx context.Context, id int64) (*github.Hook, error) { ret := _m.Called(ctx, id) @@ -223,6 +243,26 @@ func (_m *GithubClient) GetWorkflowJobByID(ctx context.Context, owner string, re return r0, r1, r2 } +// GithubBaseURL provides a mock function with given fields: +func (_m *GithubClient) GithubBaseURL() *url.URL { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GithubBaseURL") + } + + var r0 *url.URL + if rf, ok := ret.Get(0).(func() *url.URL); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*url.URL) + } + } + + return r0 +} + // ListEntityHooks provides a mock function with given fields: ctx, opts func (_m *GithubClient) ListEntityHooks(ctx context.Context, opts *github.ListOptions) ([]*github.Hook, *github.Response, error) { ret := _m.Called(ctx, opts) diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index 488387f6..599a04a0 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -9,6 +9,8 @@ import ( mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" + + url "net/url" ) // GithubEntityOperations is an autogenerated mock type for the GithubEntityOperations type @@ -115,6 +117,24 @@ func (_m *GithubEntityOperations) DeleteEntityHook(ctx context.Context, id int64 return r0, r1 } +// GetEntity provides a mock function with given fields: +func (_m *GithubEntityOperations) GetEntity() params.GithubEntity { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetEntity") + } + + var r0 params.GithubEntity + if rf, ok := ret.Get(0).(func() params.GithubEntity); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(params.GithubEntity) + } + + return r0 +} + // GetEntityHook provides a mock function with given fields: ctx, id func (_m *GithubEntityOperations) GetEntityHook(ctx context.Context, id int64) (*github.Hook, error) { ret := _m.Called(ctx, id) @@ -184,6 +204,26 @@ func (_m *GithubEntityOperations) GetEntityJITConfig(ctx context.Context, instan return r0, r1, r2 } +// GithubBaseURL provides a mock function with given fields: +func (_m *GithubEntityOperations) GithubBaseURL() *url.URL { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GithubBaseURL") + } + + var r0 *url.URL + if rf, ok := ret.Get(0).(func() *url.URL); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*url.URL) + } + } + + return r0 +} + // ListEntityHooks provides a mock function with given fields: ctx, opts func (_m *GithubEntityOperations) ListEntityHooks(ctx context.Context, opts *github.ListOptions) ([]*github.Hook, *github.Response, error) { ret := _m.Called(ctx, opts) From e51f19acc834316533f9e7a02f22f641b8d3e91c Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 6 Apr 2025 17:54:35 +0000 Subject: [PATCH 005/179] Fix lint errors Signed-off-by: Gabriel Adrian Samfira --- Makefile | 2 +- auth/instance_middleware.go | 9 ++++++- params/github.go | 28 ++++++++++----------- params/params.go | 24 ++++++++++++++++++ runner/pool/pool.go | 21 ++++++++++------ runner/pool/watcher.go | 2 +- test/integration/credentials_test.go | 12 ++++----- test/integration/endpoints_test.go | 2 +- test/integration/external_provider_test.go | 2 +- util/github/client.go | 2 +- util/github/scalesets/jobs.go | 10 ++++---- util/github/scalesets/message_sessions.go | 29 +++++++++++++--------- util/github/scalesets/runners.go | 10 ++++---- util/github/scalesets/scalesets.go | 20 +++++++-------- util/github/scalesets/util.go | 4 +-- 15 files changed, 109 insertions(+), 68 deletions(-) diff --git a/Makefile b/Makefile index a655e9f5..ad9ed795 100644 --- a/Makefile +++ b/Makefile @@ -106,7 +106,7 @@ $(LOCALBIN): GOLANGCI_LINT ?= $(LOCALBIN)/golangci-lint ## Tool Versions -GOLANGCI_LINT_VERSION ?= v1.61.0 +GOLANGCI_LINT_VERSION ?= v1.64.8 .PHONY: golangci-lint golangci-lint: $(GOLANGCI_LINT) ## Download golangci-lint locally if necessary. If wrong version is installed, it will be overwritten. diff --git a/auth/instance_middleware.go b/auth/instance_middleware.go index c21be3e7..b7194d5c 100644 --- a/auth/instance_middleware.go +++ b/auth/instance_middleware.go @@ -18,6 +18,7 @@ import ( "context" "fmt" "log/slog" + "math" "net/http" "strings" "time" @@ -63,7 +64,13 @@ func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity str // Token expiration is equal to the bootstrap timeout set on the pool plus the polling // interval garm uses to check for timed out runners. Runners that have not sent their info // by the end of this interval are most likely failed and will be reaped by garm anyway. - expireToken := time.Now().Add(time.Duration(ttlMinutes)*time.Minute + common.PoolReapTimeoutInterval) + var ttl int + if ttlMinutes > math.MaxInt { + ttl = math.MaxInt + } else { + ttl = int(ttlMinutes) + } + expireToken := time.Now().Add(time.Duration(ttl)*time.Minute + common.PoolReapTimeoutInterval) expires := &jwt.NumericDate{ Time: expireToken, } diff --git a/params/github.go b/params/github.go index 379b0a68..81540683 100644 --- a/params/github.go +++ b/params/github.go @@ -238,16 +238,16 @@ type RunnerScaleSetStatistic struct { } type RunnerScaleSet struct { - Id int `json:"id,omitempty"` + ID int `json:"id,omitempty"` Name string `json:"name,omitempty"` - RunnerGroupId int `json:"runnerGroupId,omitempty"` + RunnerGroupID int `json:"runnerGroupId,omitempty"` RunnerGroupName string `json:"runnerGroupName,omitempty"` Labels []Label `json:"labels,omitempty"` RunnerSetting RunnerSetting `json:"RunnerSetting,omitempty"` CreatedOn time.Time `json:"createdOn,omitempty"` - RunnerJitConfigUrl string `json:"runnerJitConfigUrl,omitempty"` - GetAcquirableJobsUrl string `json:"getAcquirableJobsUrl,omitempty"` - AcquireJobsUrl string `json:"acquireJobsUrl,omitempty"` + RunnerJitConfigURL string `json:"runnerJitConfigUrl,omitempty"` + GetAcquirableJobsURL string `json:"getAcquirableJobsUrl,omitempty"` + AcquireJobsURL string `json:"acquireJobsUrl,omitempty"` Statistics *RunnerScaleSetStatistic `json:"statistics,omitempty"` Status string `json:"status,omitempty"` Enabled *bool `json:"enabled,omitempty"` @@ -327,19 +327,19 @@ type ActionsServiceAdminInfoRequest struct { } type RunnerScaleSetSession struct { - SessionId *uuid.UUID `json:"sessionId,omitempty"` + SessionID *uuid.UUID `json:"sessionId,omitempty"` OwnerName string `json:"ownerName,omitempty"` RunnerScaleSet *RunnerScaleSet `json:"runnerScaleSet,omitempty"` - MessageQueueUrl string `json:"messageQueueUrl,omitempty"` + MessageQueueURL string `json:"messageQueueUrl,omitempty"` MessageQueueAccessToken string `json:"messageQueueAccessToken,omitempty"` Statistics *RunnerScaleSetStatistic `json:"statistics,omitempty"` } func (a RunnerScaleSetSession) GetURL() (*url.URL, error) { - if a.MessageQueueUrl == "" { + if a.MessageQueueURL == "" { return nil, fmt.Errorf("no url specified") } - u, err := url.ParseRequestURI(a.MessageQueueUrl) + u, err := url.ParseRequestURI(a.MessageQueueURL) if err != nil { return nil, fmt.Errorf("failed to parse URL: %w", err) } @@ -394,16 +394,16 @@ func (a RunnerScaleSetSession) ExpiresIn(t time.Duration) bool { } type RunnerScaleSetMessage struct { - MessageId int64 `json:"messageId"` + MessageID int64 `json:"messageId"` MessageType string `json:"messageType"` Body string `json:"body"` Statistics *RunnerScaleSetStatistic `json:"statistics"` } type RunnerReference struct { - Id int `json:"id"` + ID int `json:"id"` Name string `json:"name"` - RunnerScaleSetId int `json:"runnerScaleSetId"` + RunnerScaleSetID int `json:"runnerScaleSetId"` CreatedOn time.Time `json:"createdOn"` RunnerGroupID uint64 `json:"runnerGroupId"` RunnerGroupName string `json:"runnerGroupName"` @@ -431,9 +431,9 @@ type AcquirableJobList struct { } type AcquirableJob struct { - AcquireJobUrl string `json:"acquireJobUrl"` + AcquireJobURL string `json:"acquireJobUrl"` MessageType string `json:"messageType"` - RunnerRequestId int64 `json:"run0ne00rRequestId"` + RunnerRequestID int64 `json:"run0ne00rRequestId"` RepositoryName string `json:"repositoryName"` OwnerName string `json:"ownerName"` JobWorkflowRef string `json:"jobWorkflowRef"` diff --git a/params/params.go b/params/params.go index c7a66a05..375edc10 100644 --- a/params/params.go +++ b/params/params.go @@ -22,6 +22,7 @@ import ( "encoding/json" "encoding/pem" "fmt" + "math" "net/http" "time" @@ -326,6 +327,21 @@ type Pool struct { Priority uint `json:"priority,omitempty"` } +func (p Pool) MinIdleRunnersAsInt() int { + if p.MinIdleRunners > math.MaxInt { + return math.MaxInt + } + + return int(p.MinIdleRunners) +} + +func (p Pool) MaxRunnersAsInt() int { + if p.MaxRunners > math.MaxInt { + return math.MaxInt + } + return int(p.MaxRunners) +} + func (p Pool) GithubEntity() (GithubEntity, error) { switch p.PoolType() { case GithubEntityTypeRepository: @@ -611,6 +627,14 @@ type ControllerInfo struct { Version string `json:"version,omitempty"` } +func (c *ControllerInfo) JobBackoff() time.Duration { + if math.MaxInt64 > c.MinimumJobAgeBackoff { + return time.Duration(math.MaxInt64) + } + + return time.Duration(int64(c.MinimumJobAgeBackoff)) +} + type GithubCredentials struct { ID uint `json:"id,omitempty"` Name string `json:"name,omitempty"` diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 9e86c415..18e2d0a4 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -67,7 +67,7 @@ const ( func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, instanceTokenGetter auth.InstanceTokenGetter, providers map[string]common.Provider, store dbCommon.Store) (common.PoolManager, error) { ctx = garmUtil.WithSlogContext(ctx, slog.Any("pool_mgr", entity.String()), slog.Any("pool_type", entity.EntityType)) - ghc, err := ghClient.GithubClient(ctx, entity) + ghc, err := ghClient.Client(ctx, entity) if err != nil { return nil, errors.Wrap(err, "getting github client") } @@ -1044,7 +1044,7 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool return nil } - surplus := float64(len(idleWorkers) - int(pool.MinIdleRunners)) + surplus := float64(len(idleWorkers) - pool.MinIdleRunnersAsInt()) if surplus <= 0 { return nil @@ -1124,7 +1124,7 @@ func (r *basePoolManager) addRunnerToPool(pool params.Pool, aditionalLabels []st return fmt.Errorf("failed to list pool instances: %w", err) } - if poolInstanceCount >= int64(pool.MaxRunners) { + if poolInstanceCount >= int64(pool.MaxRunnersAsInt()) { return fmt.Errorf("max workers (%d) reached for pool %s", pool.MaxRunners, pool.ID) } @@ -1160,14 +1160,19 @@ func (r *basePoolManager) ensureIdleRunnersForOnePool(pool params.Pool) error { } var required int - if len(idleOrPendingWorkers) < int(pool.MinIdleRunners) { + if len(idleOrPendingWorkers) < pool.MinIdleRunnersAsInt() { // get the needed delta. - required = int(pool.MinIdleRunners) - len(idleOrPendingWorkers) + required = pool.MinIdleRunnersAsInt() - len(idleOrPendingWorkers) projectedInstanceCount := len(existingInstances) + required - if uint(projectedInstanceCount) > pool.MaxRunners { + + var projected uint + if projectedInstanceCount > 0 { + projected = uint(projectedInstanceCount) + } + if projected > pool.MaxRunners { // ensure we don't go above max workers - delta := projectedInstanceCount - int(pool.MaxRunners) + delta := projectedInstanceCount - pool.MaxRunnersAsInt() required -= delta } } @@ -1748,7 +1753,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { continue } - if time.Since(job.UpdatedAt) < time.Second*time.Duration(r.controllerInfo.MinimumJobAgeBackoff) { + if time.Since(job.UpdatedAt) < time.Second*r.controllerInfo.JobBackoff() { // give the idle runners a chance to pick up the job. slog.DebugContext( r.ctx, "job backoff not reached", "backoff_interval", r.controllerInfo.MinimumJobAgeBackoff, diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 29950748..7f05d93b 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -28,7 +28,7 @@ func (r *basePoolManager) handleControllerUpdateEvent(controllerInfo params.Cont func (r *basePoolManager) getClientOrStub() runnerCommon.GithubClient { var err error var ghc runnerCommon.GithubClient - ghc, err = ghClient.GithubClient(r.ctx, r.entity) + ghc, err = ghClient.Client(r.ctx, r.entity) if err != nil { slog.WarnContext(r.ctx, "failed to create github client", "error", err) ghc = &stubGithubClient{ diff --git a/test/integration/credentials_test.go b/test/integration/credentials_test.go index b83f131f..8d92bf22 100644 --- a/test/integration/credentials_test.go +++ b/test/integration/credentials_test.go @@ -18,7 +18,7 @@ func (suite *GarmSuite) TestGithubCredentialsErrorOnDuplicateCredentialsName() { creds, err := suite.createDummyCredentials(dummyCredentialsName, defaultEndpointName) suite.NoError(err) t.Cleanup(func() { - suite.DeleteGithubCredential(int64(creds.ID)) + suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec }) createCredsParams := params.CreateGithubCredentialsParams{ @@ -54,10 +54,10 @@ func (suite *GarmSuite) TestGithubCredentialsFailsToDeleteWhenInUse() { suite.NoError(err) t.Cleanup(func() { deleteRepo(suite.cli, suite.authToken, repo.ID) - deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) + deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) //nolint:gosec }) - err = deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) + err = deleteGithubCredentials(suite.cli, suite.authToken, int64(creds.ID)) //nolint:gosec suite.Error(err, "expected error when deleting credentials in use") } @@ -120,7 +120,7 @@ func (suite *GarmSuite) TestGithubCredentialsUpdateFailsWhenBothPATAndAppAreSupp creds, err := suite.createDummyCredentials(dummyCredentialsName, defaultEndpointName) suite.NoError(err) t.Cleanup(func() { - suite.DeleteGithubCredential(int64(creds.ID)) + suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec }) privateKeyBytes, err := getTestFileContents("certs/srv-key.pem") @@ -135,7 +135,7 @@ func (suite *GarmSuite) TestGithubCredentialsUpdateFailsWhenBothPATAndAppAreSupp PrivateKeyBytes: privateKeyBytes, }, } - _, err = updateGithubCredentials(suite.cli, suite.authToken, int64(creds.ID), updateCredsParams) + _, err = updateGithubCredentials(suite.cli, suite.authToken, int64(creds.ID), updateCredsParams) //nolint:gosec suite.Error(err, "expected error when updating credentials with both PAT and App") expectAPIStatusCode(err, 400) } @@ -182,7 +182,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsOnDuplicateName() { creds, err := suite.createDummyCredentials(dummyCredentialsName, defaultEndpointName) suite.NoError(err) t.Cleanup(func() { - suite.DeleteGithubCredential(int64(creds.ID)) + suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec }) createCredsParams := params.CreateGithubCredentialsParams{ diff --git a/test/integration/endpoints_test.go b/test/integration/endpoints_test.go index c6295349..e09916bc 100644 --- a/test/integration/endpoints_test.go +++ b/test/integration/endpoints_test.go @@ -100,7 +100,7 @@ func (suite *GarmSuite) TestGithubEndpointDeletionFailsWhenCredentialsExist() { err = deleteGithubEndpoint(suite.cli, suite.authToken, endpoint.Name) suite.Error(err, "expected error when deleting endpoint with credentials") - err = suite.DeleteGithubCredential(int64(creds.ID)) + err = suite.DeleteGithubCredential(int64(creds.ID)) //nolint:gosec suite.NoError(err, "error deleting credentials") err = suite.DeleteGithubEndpoint(endpoint.Name) suite.NoError(err, "error deleting endpoint") diff --git a/test/integration/external_provider_test.go b/test/integration/external_provider_test.go index cc6b18aa..ceb5b162 100644 --- a/test/integration/external_provider_test.go +++ b/test/integration/external_provider_test.go @@ -73,7 +73,7 @@ func (suite *GarmSuite) WaitPoolInstances(poolID string, status commonParams.Ins "Pool instance with pool_id %s reached status %v and runner_status %v, desired_instance_count %d, pool_instance_count %d", poolID, status, runnerStatus, instancesCount, len(poolInstances)) - if int(pool.MinIdleRunners) == instancesCount { + if pool.MinIdleRunnersAsInt() == instancesCount { return nil } time.Sleep(5 * time.Second) diff --git a/util/github/client.go b/util/github/client.go index 800c5b00..d4251764 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -445,7 +445,7 @@ func (g *githubClient) GithubBaseURL() *url.URL { return g.cli.BaseURL } -func GithubClient(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { +func Client(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { // func GithubClient(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { httpClient, err := entity.Credentials.GetHTTPClient(ctx) if err != nil { diff --git a/util/github/scalesets/jobs.go b/util/github/scalesets/jobs.go index b087ad63..defc9506 100644 --- a/util/github/scalesets/jobs.go +++ b/util/github/scalesets/jobs.go @@ -29,10 +29,10 @@ type acquireJobsResult struct { Value []int64 `json:"value"` } -func (s *ScaleSetClient) AcquireJobs(ctx context.Context, runnerScaleSetId int, messageQueueAccessToken string, requestIds []int64) ([]int64, error) { - u := fmt.Sprintf("%s/%d/acquirejobs?api-version=6.0-preview", scaleSetEndpoint, runnerScaleSetId) +func (s *ScaleSetClient) AcquireJobs(ctx context.Context, runnerScaleSetID int, messageQueueAccessToken string, requestIDs []int64) ([]int64, error) { + u := fmt.Sprintf("%s/%d/acquirejobs?api-version=6.0-preview", scaleSetEndpoint, runnerScaleSetID) - body, err := json.Marshal(requestIds) + body, err := json.Marshal(requestIDs) if err != nil { return nil, err } @@ -60,8 +60,8 @@ func (s *ScaleSetClient) AcquireJobs(ctx context.Context, runnerScaleSetId int, return acquiredJobs.Value, nil } -func (s *ScaleSetClient) GetAcquirableJobs(ctx context.Context, runnerScaleSetId int) (params.AcquirableJobList, error) { - path := fmt.Sprintf("%d/acquirablejobs", runnerScaleSetId) +func (s *ScaleSetClient) GetAcquirableJobs(ctx context.Context, runnerScaleSetID int) (params.AcquirableJobList, error) { + path := fmt.Sprintf("%d/acquirablejobs", runnerScaleSetID) req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) if err != nil { diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go index ae70239e..8af10173 100644 --- a/util/github/scalesets/message_sessions.go +++ b/util/github/scalesets/message_sessions.go @@ -17,11 +17,12 @@ package scalesets import ( "bytes" "context" + "crypto/rand" "encoding/json" "errors" "fmt" "log/slog" - "math/rand/v2" + "math/big" "net/http" "net/url" "strconv" @@ -103,7 +104,7 @@ func (m *MessageSession) SessionsRelativeURL() (string, error) { if m.session.RunnerScaleSet == nil { return "", fmt.Errorf("runner scale set is nil") } - relativePath := fmt.Sprintf("%s/%d/sessions/%s", scaleSetEndpoint, m.session.RunnerScaleSet.Id, m.session.SessionId.String()) + relativePath := fmt.Sprintf("%s/%d/sessions/%s", scaleSetEndpoint, m.session.RunnerScaleSet.ID, m.session.SessionID.String()) return relativePath, nil } @@ -138,7 +139,11 @@ func (m *MessageSession) maybeRefreshToken(ctx context.Context) error { return fmt.Errorf("session is nil") } // add some jitter - jitter := time.Duration(rand.IntN(10000)) * time.Millisecond + randInt, err := rand.Int(rand.Reader, big.NewInt(1000)) + if err != nil { + return fmt.Errorf("failed to get a random number") + } + jitter := time.Duration(randInt.Int64()) * time.Millisecond if m.session.ExpiresIn(2*time.Minute + jitter) { if err := m.Refresh(ctx); err != nil { return fmt.Errorf("failed to refresh message queue token: %w", err) @@ -147,15 +152,15 @@ func (m *MessageSession) maybeRefreshToken(ctx context.Context) error { return nil } -func (m *MessageSession) GetMessage(ctx context.Context, lastMessageId int64, maxCapacity uint) (params.RunnerScaleSetMessage, error) { - u, err := url.Parse(m.session.MessageQueueUrl) +func (m *MessageSession) GetMessage(ctx context.Context, lastMessageID int64, maxCapacity uint) (params.RunnerScaleSetMessage, error) { + u, err := url.Parse(m.session.MessageQueueURL) if err != nil { return params.RunnerScaleSetMessage{}, err } - if lastMessageId > 0 { + if lastMessageID > 0 { q := u.Query() - q.Set("lastMessageId", strconv.FormatInt(lastMessageId, 10)) + q.Set("lastMessageId", strconv.FormatInt(lastMessageID, 10)) u.RawQuery = q.Encode() } @@ -185,13 +190,13 @@ func (m *MessageSession) GetMessage(ctx context.Context, lastMessageId int64, ma return message, nil } -func (m *MessageSession) DeleteMessage(ctx context.Context, messageId int64) error { - u, err := url.Parse(m.session.MessageQueueUrl) +func (m *MessageSession) DeleteMessage(ctx context.Context, messageID int64) error { + u, err := url.Parse(m.session.MessageQueueURL) if err != nil { return err } - u.Path = fmt.Sprintf("%s/%d", u.Path, messageId) + u.Path = fmt.Sprintf("%s/%d", u.Path, messageID) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, u.String(), nil) if err != nil { @@ -210,8 +215,8 @@ func (m *MessageSession) DeleteMessage(ctx context.Context, messageId int64) err return nil } -func (s *ScaleSetClient) CreateMessageSession(ctx context.Context, runnerScaleSetId int, owner string) (*MessageSession, error) { - path := fmt.Sprintf("%s/%d/sessions", scaleSetEndpoint, runnerScaleSetId) +func (s *ScaleSetClient) CreateMessageSession(ctx context.Context, runnerScaleSetID int, owner string) (*MessageSession, error) { + path := fmt.Sprintf("%s/%d/sessions", scaleSetEndpoint, runnerScaleSetID) newSession := params.RunnerScaleSetSession{ OwnerName: owner, diff --git a/util/github/scalesets/runners.go b/util/github/scalesets/runners.go index 2d1519dc..d4d2b3f6 100644 --- a/util/github/scalesets/runners.go +++ b/util/github/scalesets/runners.go @@ -41,7 +41,7 @@ func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName return params.RunnerScaleSetJitRunnerConfig{}, err } - req, err := s.newActionsRequest(ctx, http.MethodPost, scaleSet.RunnerJitConfigUrl, bytes.NewBuffer(body)) + req, err := s.newActionsRequest(ctx, http.MethodPost, scaleSet.RunnerJitConfigURL, bytes.NewBuffer(body)) if err != nil { return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to create request: %w", err) } @@ -59,8 +59,8 @@ func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName return runnerJitConfig, nil } -func (s *ScaleSetClient) GetRunner(ctx context.Context, runnerId int64) (params.RunnerReference, error) { - path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerId) +func (s *ScaleSetClient) GetRunner(ctx context.Context, runnerID int64) (params.RunnerReference, error) { + path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerID) req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) if err != nil { @@ -111,8 +111,8 @@ func (s *ScaleSetClient) GetRunnerByName(ctx context.Context, runnerName string) return runnerList.RunnerReferences[0], nil } -func (s *ScaleSetClient) RemoveRunner(ctx context.Context, runnerId int64) error { - path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerId) +func (s *ScaleSetClient) RemoveRunner(ctx context.Context, runnerID int64) error { + path := fmt.Sprintf("%s/%d", runnerEndpoint, runnerID) req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) if err != nil { diff --git a/util/github/scalesets/scalesets.go b/util/github/scalesets/scalesets.go index 7c70daec..f7ef2763 100644 --- a/util/github/scalesets/scalesets.go +++ b/util/github/scalesets/scalesets.go @@ -36,8 +36,8 @@ const ( HeaderGitHubRequestID = "X-GitHub-Request-Id" ) -func (s *ScaleSetClient) GetRunnerScaleSetByNameAndRunnerGroup(ctx context.Context, runnerGroupId int, name string) (params.RunnerScaleSet, error) { - path := fmt.Sprintf("%s?runnerGroupId=%d&name=%s", scaleSetEndpoint, runnerGroupId, name) +func (s *ScaleSetClient) GetRunnerScaleSetByNameAndRunnerGroup(ctx context.Context, runnerGroupID int, name string) (params.RunnerScaleSet, error) { + path := fmt.Sprintf("%s?runnerGroupId=%d&name=%s", scaleSetEndpoint, runnerGroupID, name) req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) if err != nil { return params.RunnerScaleSet{}, err @@ -53,7 +53,7 @@ func (s *ScaleSetClient) GetRunnerScaleSetByNameAndRunnerGroup(ctx context.Conte return params.RunnerScaleSet{}, fmt.Errorf("failed to decode response: %w", err) } if runnerScaleSetList.Count == 0 { - return params.RunnerScaleSet{}, runnerErrors.NewNotFoundError("runner scale set with name %s and runner group ID %d was not found", name, runnerGroupId) + return params.RunnerScaleSet{}, runnerErrors.NewNotFoundError("runner scale set with name %s and runner group ID %d was not found", name, runnerGroupID) } // Runner scale sets must have a uniqe name. Attempting to create a runner scale set with the same name as @@ -61,8 +61,8 @@ func (s *ScaleSetClient) GetRunnerScaleSetByNameAndRunnerGroup(ctx context.Conte return runnerScaleSetList.RunnerScaleSets[0], nil } -func (s *ScaleSetClient) GetRunnerScaleSetById(ctx context.Context, runnerScaleSetId int) (params.RunnerScaleSet, error) { - path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetId) +func (s *ScaleSetClient) GetRunnerScaleSetByID(ctx context.Context, runnerScaleSetID int) (params.RunnerScaleSet, error) { + path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetID) req, err := s.newActionsRequest(ctx, http.MethodGet, path, nil) if err != nil { return params.RunnerScaleSet{}, err @@ -70,7 +70,7 @@ func (s *ScaleSetClient) GetRunnerScaleSetById(ctx context.Context, runnerScaleS resp, err := s.Do(req) if err != nil { - return params.RunnerScaleSet{}, fmt.Errorf("failed to get runner scaleset with ID %d: %w", runnerScaleSetId, err) + return params.RunnerScaleSet{}, fmt.Errorf("failed to get runner scaleset with ID %d: %w", runnerScaleSetID, err) } var runnerScaleSet params.RunnerScaleSet @@ -127,8 +127,8 @@ func (s *ScaleSetClient) CreateRunnerScaleSet(ctx context.Context, runnerScaleSe return createdRunnerScaleSet, nil } -func (s *ScaleSetClient) UpdateRunnerScaleSet(ctx context.Context, runnerScaleSetId int, runnerScaleSet params.RunnerScaleSet) (params.RunnerScaleSet, error) { - path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetId) +func (s *ScaleSetClient) UpdateRunnerScaleSet(ctx context.Context, runnerScaleSetID int, runnerScaleSet params.RunnerScaleSet) (params.RunnerScaleSet, error) { + path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetID) body, err := json.Marshal(runnerScaleSet) if err != nil { @@ -152,8 +152,8 @@ func (s *ScaleSetClient) UpdateRunnerScaleSet(ctx context.Context, runnerScaleSe return ret, nil } -func (s *ScaleSetClient) DeleteRunnerScaleSet(ctx context.Context, runnerScaleSetId int) error { - path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetId) +func (s *ScaleSetClient) DeleteRunnerScaleSet(ctx context.Context, runnerScaleSetID int) error { + path := fmt.Sprintf("%s/%d", scaleSetEndpoint, runnerScaleSetID) req, err := s.newActionsRequest(ctx, http.MethodDelete, path, nil) if err != nil { return err diff --git a/util/github/scalesets/util.go b/util/github/scalesets/util.go index 4f79098b..15c3a5cf 100644 --- a/util/github/scalesets/util.go +++ b/util/github/scalesets/util.go @@ -26,12 +26,12 @@ func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, path str return nil, fmt.Errorf("failed to update token: %w", err) } - actionsUri, err := s.actionsServiceInfo.GetURL() + actionsURI, err := s.actionsServiceInfo.GetURL() if err != nil { return nil, fmt.Errorf("failed to get pipeline URL: %w", err) } - uri, err := actionsUri.Parse(path) + uri, err := actionsURI.Parse(path) if err != nil { return nil, fmt.Errorf("failed to parse path: %w", err) } From 5ba53adf84f5e60feba71868927f344765845485 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 7 Apr 2025 16:45:05 +0000 Subject: [PATCH 006/179] Switch to locking package The locking logic was added to its own package as it may need to be used by other parts of the code. Signed-off-by: Gabriel Adrian Samfira --- cmd/garm/main.go | 12 +++ locking/interface.go | 16 ++++ .../locking.go => locking/local_locker.go | 14 +++- locking/locking.go | 46 ++++++++++ runner/pool/pool.go | 83 ++++++++----------- util/github/scalesets/message_sessions.go | 22 ++--- 6 files changed, 131 insertions(+), 62 deletions(-) create mode 100644 locking/interface.go rename runner/pool/locking.go => locking/local_locker.go (84%) create mode 100644 locking/locking.go diff --git a/cmd/garm/main.go b/cmd/garm/main.go index 45f8fe82..ebb30d55 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -41,6 +41,7 @@ import ( "github.com/cloudbase/garm/database" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/metrics" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" //nolint:typecheck @@ -214,6 +215,17 @@ func main() { log.Fatal(err) } + // Local locker for now. Will be configurable in the future, + // as we add scale-out capability to GARM. + lock, err := locking.NewLocalLocker(ctx, db) + if err != nil { + log.Fatalf("failed to create locker: %q", err) + } + + if err := locking.RegisterLocker(lock); err != nil { + log.Fatalf("failed to register locker: %q", err) + } + if err := maybeUpdateURLsFromConfig(*cfg, db); err != nil { log.Fatal(err) } diff --git a/locking/interface.go b/locking/interface.go new file mode 100644 index 00000000..fd547830 --- /dev/null +++ b/locking/interface.go @@ -0,0 +1,16 @@ +package locking + +import "time" + +// TODO(gabriel-samfira): needs owner attribute. +type Locker interface { + TryLock(key string) bool + Unlock(key string, remove bool) + Delete(key string) +} + +type InstanceDeleteBackoff interface { + ShouldProcess(key string) (bool, time.Time) + Delete(key string) + RecordFailure(key string) +} diff --git a/runner/pool/locking.go b/locking/local_locker.go similarity index 84% rename from runner/pool/locking.go rename to locking/local_locker.go index 70471f98..5298c9e7 100644 --- a/runner/pool/locking.go +++ b/locking/local_locker.go @@ -1,9 +1,11 @@ -package pool +package locking import ( + "context" "sync" "time" + dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/runner/common" ) @@ -11,10 +13,16 @@ const ( maxBackoffSeconds float64 = 1200 // 20 minutes ) +func NewLocalLocker(_ context.Context, _ dbCommon.Store) (Locker, error) { + return &keyMutex{}, nil +} + type keyMutex struct { muxes sync.Map } +var _ Locker = &keyMutex{} + func (k *keyMutex) TryLock(key string) bool { mux, _ := k.muxes.LoadOrStore(key, &sync.Mutex{}) keyMux := mux.(*sync.Mutex) @@ -37,6 +45,10 @@ func (k *keyMutex) Delete(key string) { k.muxes.Delete(key) } +func NewInstanceDeleteBackoff(_ context.Context) (InstanceDeleteBackoff, error) { + return &instanceDeleteBackoff{}, nil +} + type instanceBackOff struct { backoffSeconds float64 lastRecordedFailureTime time.Time diff --git a/locking/locking.go b/locking/locking.go new file mode 100644 index 00000000..793edb4e --- /dev/null +++ b/locking/locking.go @@ -0,0 +1,46 @@ +package locking + +import ( + "fmt" + "sync" +) + +var locker Locker +var lockerMux = sync.Mutex{} + +func TryLock(key string) (bool, error) { + if locker == nil { + return false, fmt.Errorf("no locker is registered") + } + + return locker.TryLock(key), nil +} +func Unlock(key string, remove bool) error { + if locker == nil { + return fmt.Errorf("no locker is registered") + } + + locker.Unlock(key, remove) + return nil +} + +func Delete(key string) error { + if locker == nil { + return fmt.Errorf("no locker is registered") + } + + locker.Delete(key) + return nil +} + +func RegisterLocker(lock Locker) error { + lockerMux.Lock() + defer lockerMux.Unlock() + + if locker != nil { + return fmt.Errorf("locker already registered") + } + + locker = lock + return nil +} diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 18e2d0a4..f6c97633 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -38,6 +38,7 @@ import ( "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" @@ -92,8 +93,10 @@ func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, insta } wg := &sync.WaitGroup{} - keyMuxes := &keyMutex{} - backoff := &instanceDeleteBackoff{} + backoff, err := locking.NewInstanceDeleteBackoff(ctx) + if err != nil { + return nil, errors.Wrap(err, "creating backoff") + } repo := &basePoolManager{ ctx: ctx, @@ -106,7 +109,6 @@ func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, insta providers: providers, quit: make(chan struct{}), wg: wg, - keyMux: keyMuxes, backoff: backoff, consumer: consumer, } @@ -132,8 +134,7 @@ type basePoolManager struct { mux sync.Mutex wg *sync.WaitGroup - keyMux *keyMutex - backoff *instanceDeleteBackoff + backoff locking.InstanceDeleteBackoff } func (r *basePoolManager) getProviderBaseParams(pool params.Pool) common.ProviderBaseParams { @@ -414,14 +415,14 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne } for _, instance := range dbInstances { - lockAcquired := r.keyMux.TryLock(instance.Name) - if !lockAcquired { + lockAcquired, err := locking.TryLock(instance.Name) + if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", instance.Name) + "runner_name", instance.Name, "error", err) continue } - defer r.keyMux.Unlock(instance.Name, false) + defer locking.Unlock(instance.Name, false) switch instance.Status { case commonParams.InstancePendingCreate, @@ -493,14 +494,14 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { slog.DebugContext( r.ctx, "attempting to lock instance", "runner_name", instance.Name) - lockAcquired := r.keyMux.TryLock(instance.Name) - if !lockAcquired { + lockAcquired, err := locking.TryLock(instance.Name) + if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", instance.Name) + "runner_name", instance.Name, "error", err) continue } - defer r.keyMux.Unlock(instance.Name, false) + defer locking.Unlock(instance.Name, false) pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { @@ -624,11 +625,11 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) poolInstanceCache[pool.ID] = poolInstances } - lockAcquired := r.keyMux.TryLock(dbInstance.Name) - if !lockAcquired { + lockAcquired, err := locking.TryLock(dbInstance.Name) + if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", dbInstance.Name) + "runner_name", dbInstance.Name, "error", err) continue } @@ -637,7 +638,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) g.Go(func() error { deleteMux := false defer func() { - r.keyMux.Unlock(dbInstance.Name, deleteMux) + locking.Unlock(dbInstance.Name, deleteMux) }() providerInstance, ok := instanceInList(dbInstance.Name, poolInstances) if !ok { @@ -877,7 +878,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error bootstrapArgs := commonParams.BootstrapInstance{ Name: instance.Name, Tools: r.tools, - RepoURL: r.GithubURL(), + RepoURL: r.entity.GithubURL(), MetadataURL: instance.MetadataURL, CallbackURL: instance.CallbackURL, InstanceToken: jwtToken, @@ -1062,14 +1063,14 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool for _, instanceToDelete := range idleWorkers[:numScaleDown] { instanceToDelete := instanceToDelete - lockAcquired := r.keyMux.TryLock(instanceToDelete.Name) - if !lockAcquired { + lockAcquired, err := locking.TryLock(instanceToDelete.Name) + if !lockAcquired || err != nil { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to acquire lock for instance", - "provider_id", instanceToDelete.Name) + "provider_id", instanceToDelete.Name, "error", err) continue } - defer r.keyMux.Unlock(instanceToDelete.Name, false) + defer locking.Unlock(instanceToDelete.Name, false) g.Go(func() error { slog.InfoContext( @@ -1215,16 +1216,16 @@ func (r *basePoolManager) retryFailedInstancesForOnePool(ctx context.Context, po slog.DebugContext( ctx, "attempting to retry failed instance", "runner_name", instance.Name) - lockAcquired := r.keyMux.TryLock(instance.Name) - if !lockAcquired { + lockAcquired, err := locking.TryLock(instance.Name) + if !lockAcquired || err != nil { slog.DebugContext( ctx, "failed to acquire lock for instance", - "runner_name", instance.Name) + "runner_name", instance.Name, "error", err) continue } g.Go(func() error { - defer r.keyMux.Unlock(instance.Name, false) + defer locking.Unlock(instance.Name, false) slog.DebugContext( ctx, "attempting to clean up any previous instance", "runner_name", instance.Name) @@ -1394,8 +1395,8 @@ func (r *basePoolManager) deletePendingInstances() error { r.ctx, "removing instance from pool", "runner_name", instance.Name, "pool_id", instance.PoolID) - lockAcquired := r.keyMux.TryLock(instance.Name) - if !lockAcquired { + lockAcquired, err := locking.TryLock(instance.Name) + if !lockAcquired || err != nil { slog.InfoContext( r.ctx, "failed to acquire lock for instance", "runner_name", instance.Name) @@ -1407,7 +1408,7 @@ func (r *basePoolManager) deletePendingInstances() error { slog.DebugContext( r.ctx, "backoff in effect for instance", "runner_name", instance.Name, "deadline", deadline) - r.keyMux.Unlock(instance.Name, false) + locking.Unlock(instance.Name, false) continue } @@ -1424,7 +1425,7 @@ func (r *basePoolManager) deletePendingInstances() error { currentStatus := instance.Status deleteMux := false defer func() { - r.keyMux.Unlock(instance.Name, deleteMux) + locking.Unlock(instance.Name, deleteMux) if deleteMux { // deleteMux is set only when the instance was successfully removed. // We can use it as a marker to signal that the backoff is no longer @@ -1501,11 +1502,11 @@ func (r *basePoolManager) addPendingInstances() error { r.ctx, "attempting to acquire lock for instance", "runner_name", instance.Name, "action", "create_pending") - lockAcquired := r.keyMux.TryLock(instance.Name) - if !lockAcquired { + lockAcquired, err := locking.TryLock(instance.Name) + if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", instance.Name) + "runner_name", instance.Name, "error", err) continue } @@ -1515,14 +1516,14 @@ func (r *basePoolManager) addPendingInstances() error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", instance.Name) - r.keyMux.Unlock(instance.Name, false) + locking.Unlock(instance.Name, false) // We failed to transition the instance to Creating. This means that garm will retry to create this instance // when the loop runs again and we end up with multiple instances. continue } go func(instance params.Instance) { - defer r.keyMux.Unlock(instance.Name, false) + defer locking.Unlock(instance.Name, false) slog.InfoContext( r.ctx, "creating instance in pool", "runner_name", instance.Name, @@ -2027,18 +2028,6 @@ func (r *basePoolManager) GetGithubRunners() ([]*github.Runner, error) { return allRunners, nil } -func (r *basePoolManager) GithubURL() string { - switch r.entity.EntityType { - case params.GithubEntityTypeRepository: - return fmt.Sprintf("%s/%s/%s", r.entity.Credentials.BaseURL, r.entity.Owner, r.entity.Name) - case params.GithubEntityTypeOrganization: - return fmt.Sprintf("%s/%s", r.entity.Credentials.BaseURL, r.entity.Owner) - case params.GithubEntityTypeEnterprise: - return fmt.Sprintf("%s/enterprises/%s", r.entity.Credentials.BaseURL, r.entity.Owner) - } - return "" -} - func (r *basePoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, error) { allHooks, err := r.listHooks(ctx) if err != nil { diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go index 8af10173..5ecdd94d 100644 --- a/util/github/scalesets/message_sessions.go +++ b/util/github/scalesets/message_sessions.go @@ -35,18 +35,6 @@ import ( const maxCapacityHeader = "X-ScaleSetMaxCapacity" -func NewMessageSession(ctx context.Context, cli *ScaleSetClient, session *params.RunnerScaleSetSession) (*MessageSession, error) { - sess := &MessageSession{ - ssCli: cli, - session: session, - ctx: ctx, - done: make(chan struct{}), - closed: false, - } - go sess.loop() - return sess, nil -} - type MessageSession struct { ssCli *ScaleSetClient session *params.RunnerScaleSetSession @@ -243,10 +231,16 @@ func (s *ScaleSetClient) CreateMessageSession(ctx context.Context, runnerScaleSe return nil, fmt.Errorf("failed to decode response: %w", err) } - return &MessageSession{ + sess := &MessageSession{ ssCli: s, session: &createdSession, - }, nil + ctx: ctx, + done: make(chan struct{}), + closed: false, + } + go sess.loop() + + return sess, nil } func (s *ScaleSetClient) DeleteMessageSession(ctx context.Context, session *MessageSession) error { From 85eac363d5b81208b34fa56d556d5dd587eafa96 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 8 Apr 2025 09:15:54 +0000 Subject: [PATCH 007/179] Add ScaleSet models, functions and types Signed-off-by: Gabriel Adrian Samfira --- database/common/watcher.go | 1 + database/sql/instances.go | 19 ++ database/sql/models.go | 54 +++++ database/sql/scalesets.go | 381 ++++++++++++++++++++++++++++++++++++ database/sql/sql.go | 1 + database/sql/util.go | 78 ++++++++ database/watcher/filters.go | 74 +++++-- params/params.go | 103 ++++++++++ params/requests.go | 73 +++++++ 9 files changed, 772 insertions(+), 12 deletions(-) create mode 100644 database/sql/scalesets.go diff --git a/database/common/watcher.go b/database/common/watcher.go index d8700189..85df1151 100644 --- a/database/common/watcher.go +++ b/database/common/watcher.go @@ -19,6 +19,7 @@ const ( ControllerEntityType DatabaseEntityType = "controller" GithubCredentialsEntityType DatabaseEntityType = "github_credentials" // #nosec G101 GithubEndpointEntityType DatabaseEntityType = "github_endpoint" + ScaleSetEntityType DatabaseEntityType = "scaleset" ) const ( diff --git a/database/sql/instances.go b/database/sql/instances.go index 864e7ba2..65cf0dba 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -288,6 +288,25 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par return ret, nil } +func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) { + var instances []Instance + query := s.conn.Model(&Instance{}).Preload("Job").Where("scale_set_id = ?", scalesetID) + + if err := query.Find(&instances); err.Error != nil { + return nil, errors.Wrap(err.Error, "fetching instances") + } + + var err error + ret := make([]params.Instance, len(instances)) + for idx, inst := range instances { + ret[idx], err = s.sqlToParamsInstance(inst) + if err != nil { + return nil, errors.Wrap(err, "converting instance") + } + } + return ret, nil +} + func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, error) { var instances []Instance diff --git a/database/sql/models.go b/database/sql/models.go index ac7a056a..d040760c 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -86,6 +86,54 @@ type Pool struct { Priority uint `gorm:"index:idx_pool_priority"` } +// ScaleSet represents a github scale set. Scale sets are almost identical to pools with a few +// notable exceptions: +// - Labels are no longer relevant +// - Workflows will use the scaleset name to target runners. +// - A scale set is a stand alone unit. If a workflow targets a scale set, no other runner will pick up that job. +type ScaleSet struct { + gorm.Model + + // ScaleSetID is the github ID of the scale set. This field may not be set if + // the scale set was ceated in GARM but has not yet been created in GitHub. + ScaleSetID int `gorm:"index:idx_scale_set"` + Name string `gorm:"index:idx_name"` + DisableUpdate bool + + // State stores the provisioning state of the scale set in GitHub + State params.ScaleSetState + // ExtendedState stores a more detailed message regarding the State. + // If an error occurs, the reason for the error will be stored here. + ExtendedState string + + ProviderName string + RunnerPrefix string + MaxRunners uint + MinIdleRunners uint + RunnerBootstrapTimeout uint + Image string + Flavor string + OSType commonParams.OSType + OSArch commonParams.OSArch + Enabled bool + // ExtraSpecs is an opaque json that gets sent to the provider + // as part of the bootstrap params for instances. It can contain + // any kind of data needed by providers. + ExtraSpecs datatypes.JSON + GitHubRunnerGroup string + + RepoID *uuid.UUID `gorm:"index"` + Repository Repository `gorm:"foreignKey:RepoID;"` + + OrgID *uuid.UUID `gorm:"index"` + Organization Organization `gorm:"foreignKey:OrgID"` + + EnterpriseID *uuid.UUID `gorm:"index"` + Enterprise Enterprise `gorm:"foreignKey:EnterpriseID"` + + Instances []Instance `gorm:"foreignKey:ScaleSetFkID"` +} + type Repository struct { Base @@ -98,6 +146,7 @@ type Repository struct { Name string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` WebhookSecret []byte Pools []Pool `gorm:"foreignKey:RepoID"` + ScaleSets []ScaleSet `gorm:"foreignKey:RepoID"` Jobs []WorkflowJob `gorm:"foreignKey:RepoID;constraint:OnDelete:SET NULL"` PoolBalancerType params.PoolBalancerType `gorm:"type:varchar(64)"` @@ -116,6 +165,7 @@ type Organization struct { Name string `gorm:"index:idx_org_name_nocase,collate:nocase"` WebhookSecret []byte Pools []Pool `gorm:"foreignKey:OrgID"` + ScaleSet []ScaleSet `gorm:"foreignKey:OrgID"` Jobs []WorkflowJob `gorm:"foreignKey:OrgID;constraint:OnDelete:SET NULL"` PoolBalancerType params.PoolBalancerType `gorm:"type:varchar(64)"` @@ -134,6 +184,7 @@ type Enterprise struct { Name string `gorm:"index:idx_ent_name_nocase,collate:nocase"` WebhookSecret []byte Pools []Pool `gorm:"foreignKey:EnterpriseID"` + ScaleSet []ScaleSet `gorm:"foreignKey:EnterpriseID"` Jobs []WorkflowJob `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:SET NULL"` PoolBalancerType params.PoolBalancerType `gorm:"type:varchar(64)"` @@ -187,6 +238,9 @@ type Instance struct { PoolID uuid.UUID Pool Pool `gorm:"foreignKey:PoolID"` + ScaleSetFkID *uint + ScaleSet ScaleSet `gorm:"foreignKey:ScaleSetFkID"` + StatusMessages []InstanceStatusUpdate `gorm:"foreignKey:InstanceID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` Job *WorkflowJob `gorm:"foreignKey:InstanceID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go new file mode 100644 index 00000000..3a5d8431 --- /dev/null +++ b/database/sql/scalesets.go @@ -0,0 +1,381 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package sql + +import ( + "context" + "fmt" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" + "github.com/google/uuid" + "github.com/pkg/errors" + "gorm.io/datatypes" + "gorm.io/gorm" +) + +func (s *sqlDatabase) ListAllScaleSets(_ context.Context) ([]params.ScaleSet, error) { + var scaleSets []ScaleSet + + q := s.conn.Model(&ScaleSet{}). + Preload("Organization"). + Preload("Repository"). + Preload("Enterprise"). + Omit("extra_specs"). + Omit("status_messages"). + Find(&scaleSets) + if q.Error != nil { + return nil, errors.Wrap(q.Error, "fetching all scale sets") + } + + ret := make([]params.ScaleSet, len(scaleSets)) + var err error + for idx, val := range scaleSets { + ret[idx], err = s.sqlToCommonScaleSet(val) + if err != nil { + return nil, errors.Wrap(err, "converting scale sets") + } + } + return ret, nil +} + +func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.GithubEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) { + if err := param.Validate(); err != nil { + return params.ScaleSet{}, fmt.Errorf("failed to validate create params: %w", err) + } + + defer func() { + if err == nil { + s.sendNotify(common.ScaleSetEntityType, common.CreateOperation, scaleSet) + } + }() + + newScaleSet := ScaleSet{ + Name: param.Name, + ScaleSetID: param.ScaleSetID, + DisableUpdate: param.DisableUpdate, + ProviderName: param.ProviderName, + RunnerPrefix: param.GetRunnerPrefix(), + MaxRunners: param.MaxRunners, + MinIdleRunners: param.MinIdleRunners, + RunnerBootstrapTimeout: param.RunnerBootstrapTimeout, + Image: param.Image, + Flavor: param.Flavor, + OSType: param.OSType, + OSArch: param.OSArch, + Enabled: param.Enabled, + GitHubRunnerGroup: param.GitHubRunnerGroup, + State: params.ScaleSetPendingCreate, + } + + if len(param.ExtraSpecs) > 0 { + newScaleSet.ExtraSpecs = datatypes.JSON(param.ExtraSpecs) + } + + entityID, err := uuid.Parse(entity.ID) + if err != nil { + return params.ScaleSet{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + } + + switch entity.EntityType { + case params.GithubEntityTypeRepository: + newScaleSet.RepoID = &entityID + case params.GithubEntityTypeOrganization: + newScaleSet.OrgID = &entityID + case params.GithubEntityTypeEnterprise: + newScaleSet.EnterpriseID = &entityID + } + err = s.conn.Transaction(func(tx *gorm.DB) error { + if err := s.hasGithubEntity(tx, entity.EntityType, entity.ID); err != nil { + return errors.Wrap(err, "checking entity existence") + } + + q := tx.Create(&newScaleSet) + if q.Error != nil { + return errors.Wrap(q.Error, "creating scale set") + } + + return nil + }) + if err != nil { + return params.ScaleSet{}, err + } + + dbScaleSet, err := s.getScaleSetByID(s.conn, newScaleSet.ID, "Instances", "Enterprise", "Organization", "Repository") + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "fetching scale set") + } + + return s.sqlToCommonScaleSet(dbScaleSet) +} + +func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.GithubEntityType, entityID string, preload ...string) ([]ScaleSet, error) { + if _, err := uuid.Parse(entityID); err != nil { + return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + } + + if err := s.hasGithubEntity(tx, entityType, entityID); err != nil { + return nil, errors.Wrap(err, "checking entity existence") + } + + var preloadEntity string + var fieldName string + switch entityType { + case params.GithubEntityTypeRepository: + fieldName = entityTypeRepoName + preloadEntity = "Repository" + case params.GithubEntityTypeOrganization: + fieldName = entityTypeOrgName + preloadEntity = "Organization" + case params.GithubEntityTypeEnterprise: + fieldName = entityTypeEnterpriseName + preloadEntity = "Enterprise" + default: + return nil, fmt.Errorf("invalid entityType: %v", entityType) + } + + q := tx + q = q.Preload(preloadEntity) + if len(preload) > 0 { + for _, item := range preload { + q = q.Preload(item) + } + } + + var scaleSets []ScaleSet + condition := fmt.Sprintf("%s = ?", fieldName) + err := q.Model(&ScaleSet{}). + Where(condition, entityID). + Omit("extra_specs"). + Omit("status_messages"). + Find(&scaleSets).Error + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return []ScaleSet{}, nil + } + return nil, errors.Wrap(err, "fetching scale sets") + } + + return scaleSets, nil +} + +func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.GithubEntity) ([]params.ScaleSet, error) { + scaleSets, err := s.listEntityScaleSets(s.conn, entity.EntityType, entity.ID) + if err != nil { + return nil, errors.Wrap(err, "fetching scale sets") + } + + ret := make([]params.ScaleSet, len(scaleSets)) + for idx, set := range scaleSets { + ret[idx], err = s.sqlToCommonScaleSet(set) + if err != nil { + return nil, errors.Wrap(err, "conbverting scale set") + } + } + + return ret, nil +} + +func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, new params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { + defer func() { + if err == nil { + s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, updatedScaleSet) + } + }() + err = s.conn.Transaction(func(tx *gorm.DB) error { + scaleSet, err := s.getEntityScaleSet(tx, entity.EntityType, entity.ID, scaleSetID, "Instances") + if err != nil { + return errors.Wrap(err, "fetching scale set") + } + + old, err := s.sqlToCommonScaleSet(scaleSet) + if err != nil { + return errors.Wrap(err, "converting scale set") + } + + updatedScaleSet, err = s.updateScaleSet(tx, scaleSet, param) + if err != nil { + return errors.Wrap(err, "updating scale set") + } + + if callback != nil { + if err := callback(old, updatedScaleSet); err != nil { + return errors.Wrap(err, "executing update callback") + } + } + return nil + }) + if err != nil { + return params.ScaleSet{}, err + } + return updatedScaleSet, nil +} + +func (s *sqlDatabase) getEntityScaleSet(tx *gorm.DB, entityType params.GithubEntityType, entityID string, scaleSetID uint, preload ...string) (ScaleSet, error) { + if entityID == "" { + return ScaleSet{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing entity id") + } + + if scaleSetID == 0 { + return ScaleSet{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing scaleset id") + } + + var fieldName string + var entityField string + switch entityType { + case params.GithubEntityTypeRepository: + fieldName = entityTypeRepoName + entityField = "Repository" + case params.GithubEntityTypeOrganization: + fieldName = entityTypeOrgName + entityField = "Organization" + case params.GithubEntityTypeEnterprise: + fieldName = entityTypeEnterpriseName + entityField = "Enterprise" + default: + return ScaleSet{}, fmt.Errorf("invalid entityType: %v", entityType) + } + + q := tx + q = q.Preload(entityField) + if len(preload) > 0 { + for _, item := range preload { + q = q.Preload(item) + } + } + + var scaleSet ScaleSet + condition := fmt.Sprintf("id = ? and %s = ?", fieldName) + err := q.Model(&ScaleSet{}). + Where(condition, scaleSetID, entityID). + First(&scaleSet).Error + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return ScaleSet{}, errors.Wrap(runnerErrors.ErrNotFound, "finding scale set") + } + return ScaleSet{}, errors.Wrap(err, "fetching scale set") + } + + return scaleSet, nil +} + +func (s *sqlDatabase) updateScaleSet(tx *gorm.DB, scaleSet ScaleSet, param params.UpdateScaleSetParams) (params.ScaleSet, error) { + if param.Enabled != nil && scaleSet.Enabled != *param.Enabled { + scaleSet.Enabled = *param.Enabled + } + + if param.State != nil && *param.State != scaleSet.State { + scaleSet.State = *param.State + } + + if param.ExtendedState != nil && *param.ExtendedState != scaleSet.ExtendedState { + scaleSet.ExtendedState = *param.ExtendedState + } + + if param.Name != "" { + scaleSet.Name = param.Name + } + + if param.GitHubRunnerGroup != nil && *param.GitHubRunnerGroup != "" { + scaleSet.GitHubRunnerGroup = *param.GitHubRunnerGroup + } + + if param.Flavor != "" { + scaleSet.Flavor = param.Flavor + } + + if param.Image != "" { + scaleSet.Image = param.Image + } + + if param.Prefix != "" { + scaleSet.RunnerPrefix = param.Prefix + } + + if param.MaxRunners != nil { + scaleSet.MaxRunners = *param.MaxRunners + } + + if param.MinIdleRunners != nil { + scaleSet.MinIdleRunners = *param.MinIdleRunners + } + + if param.OSArch != "" { + scaleSet.OSArch = param.OSArch + } + + if param.OSType != "" { + scaleSet.OSType = param.OSType + } + + if param.ExtraSpecs != nil { + scaleSet.ExtraSpecs = datatypes.JSON(param.ExtraSpecs) + } + + if param.RunnerBootstrapTimeout != nil && *param.RunnerBootstrapTimeout > 0 { + scaleSet.RunnerBootstrapTimeout = *param.RunnerBootstrapTimeout + } + + if param.GitHubRunnerGroup != nil { + scaleSet.GitHubRunnerGroup = *param.GitHubRunnerGroup + } + + if q := tx.Save(&scaleSet); q.Error != nil { + return params.ScaleSet{}, errors.Wrap(q.Error, "saving database entry") + } + + return s.sqlToCommonScaleSet(scaleSet) +} + +func (s *sqlDatabase) GetScaleSetByID(_ context.Context, scaleSet uint) (params.ScaleSet, error) { + set, err := s.getScaleSetByID(s.conn, scaleSet, "Instances", "Enterprise", "Organization", "Repository") + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "fetching scale set by ID") + } + return s.sqlToCommonScaleSet(set) +} + +func (s *sqlDatabase) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) { + var scaleSet params.ScaleSet + defer func() { + if err == nil && scaleSet.ID != 0 { + s.sendNotify(common.ScaleSetEntityType, common.DeleteOperation, scaleSet) + } + }() + err = s.conn.Transaction(func(tx *gorm.DB) error { + dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances") + if err != nil { + return errors.Wrap(err, "fetching scale set") + } + + if len(dbSet.Instances) > 0 { + return runnerErrors.NewBadRequestError("cannot delete scaleset with runners") + } + scaleSet, err = s.sqlToCommonScaleSet(dbSet) + if err != nil { + return errors.Wrap(err, "converting scale set") + } + + if q := tx.Unscoped().Delete(&dbSet); q.Error != nil { + return errors.Wrap(q.Error, "deleting scale set") + } + return nil + }) + if err != nil { + return errors.Wrap(err, "removing scale set") + } + return nil +} diff --git a/database/sql/sql.go b/database/sql/sql.go index 1a024516..4d23d253 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -428,6 +428,7 @@ func (s *sqlDatabase) migrateDB() error { &Instance{}, &ControllerInfo{}, &WorkflowJob{}, + &ScaleSet{}, ); err != nil { return errors.Wrap(err, "running auto migrate") } diff --git a/database/sql/util.go b/database/sql/util.go index cc2bbcb9..c5e412a9 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -73,6 +73,10 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e AditionalLabels: labels, } + if instance.ScaleSetFkID != nil { + ret.ScaleSetID = *instance.ScaleSetFkID + } + if instance.Job != nil { paramJob, err := sqlWorkflowJobToParamsJob(*instance.Job) if err != nil { @@ -265,6 +269,60 @@ func (s *sqlDatabase) sqlToCommonPool(pool Pool) (params.Pool, error) { return ret, nil } +func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, error) { + ret := params.ScaleSet{ + ID: scaleSet.ID, + ScaleSetID: scaleSet.ScaleSetID, + Name: scaleSet.Name, + DisableUpdate: scaleSet.DisableUpdate, + + ProviderName: scaleSet.ProviderName, + MaxRunners: scaleSet.MaxRunners, + MinIdleRunners: scaleSet.MinIdleRunners, + RunnerPrefix: params.RunnerPrefix{ + Prefix: scaleSet.RunnerPrefix, + }, + Image: scaleSet.Image, + Flavor: scaleSet.Flavor, + OSArch: scaleSet.OSArch, + OSType: scaleSet.OSType, + Enabled: scaleSet.Enabled, + Instances: make([]params.Instance, len(scaleSet.Instances)), + RunnerBootstrapTimeout: scaleSet.RunnerBootstrapTimeout, + ExtraSpecs: json.RawMessage(scaleSet.ExtraSpecs), + GitHubRunnerGroup: scaleSet.GitHubRunnerGroup, + State: scaleSet.State, + ExtendedState: scaleSet.ExtendedState, + } + + if scaleSet.RepoID != nil { + ret.RepoID = scaleSet.RepoID.String() + if scaleSet.Repository.Owner != "" && scaleSet.Repository.Name != "" { + ret.RepoName = fmt.Sprintf("%s/%s", scaleSet.Repository.Owner, scaleSet.Repository.Name) + } + } + + if scaleSet.OrgID != nil && scaleSet.Organization.Name != "" { + ret.OrgID = scaleSet.OrgID.String() + ret.OrgName = scaleSet.Organization.Name + } + + if scaleSet.EnterpriseID != nil && scaleSet.Enterprise.Name != "" { + ret.EnterpriseID = scaleSet.EnterpriseID.String() + ret.EnterpriseName = scaleSet.Enterprise.Name + } + + var err error + for idx, inst := range scaleSet.Instances { + ret.Instances[idx], err = s.sqlToParamsInstance(inst) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "converting instance") + } + } + + return ret, nil +} + func (s *sqlDatabase) sqlToCommonTags(tag Tag) params.Tag { return params.Tag{ ID: tag.ID.String(), @@ -452,6 +510,26 @@ func (s *sqlDatabase) getPoolByID(tx *gorm.DB, poolID string, preload ...string) return pool, nil } +func (s *sqlDatabase) getScaleSetByID(tx *gorm.DB, scaleSetID uint, preload ...string) (ScaleSet, error) { + var scaleSet ScaleSet + q := tx.Model(&ScaleSet{}) + if len(preload) > 0 { + for _, item := range preload { + q = q.Preload(item) + } + } + + q = q.Where("id = ?", scaleSetID).First(&scaleSet) + + if q.Error != nil { + if errors.Is(q.Error, gorm.ErrRecordNotFound) { + return ScaleSet{}, runnerErrors.ErrNotFound + } + return ScaleSet{}, errors.Wrap(q.Error, "fetching scale set from database") + } + return scaleSet, nil +} + func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.GithubEntityType, entityID string) error { u, err := uuid.Parse(entityID) if err != nil { diff --git a/database/watcher/filters.go b/database/watcher/filters.go index af1852dc..aa5131b1 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -5,7 +5,7 @@ import ( "github.com/cloudbase/garm/params" ) -type idGetter interface { +type IDGetter interface { GetID() string } @@ -72,21 +72,41 @@ func WithEntityPoolFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFu } switch ghEntity.EntityType { case params.GithubEntityTypeRepository: - if pool.RepoID != ghEntity.ID { - return false - } + return pool.RepoID == ghEntity.ID case params.GithubEntityTypeOrganization: - if pool.OrgID != ghEntity.ID { - return false - } + return pool.OrgID == ghEntity.ID case params.GithubEntityTypeEnterprise: - if pool.EnterpriseID != ghEntity.ID { - return false - } + return pool.EnterpriseID == ghEntity.ID + default: + return false + } + default: + return false + } + } +} + +// WithEntityPoolFilter returns true if the change payload is a pool that belongs to the +// supplied Github entity. This is useful when an entity worker wants to watch for changes +// in pools that belong to it. +func WithEntityScaleSetFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFunc { + return func(payload dbCommon.ChangePayload) bool { + switch payload.EntityType { + case dbCommon.ScaleSetEntityType: + scaleSet, ok := payload.Payload.(params.ScaleSet) + if !ok { + return false + } + switch ghEntity.EntityType { + case params.GithubEntityTypeRepository: + return scaleSet.RepoID == ghEntity.ID + case params.GithubEntityTypeOrganization: + return scaleSet.OrgID == ghEntity.ID + case params.GithubEntityTypeEnterprise: + return scaleSet.EnterpriseID == ghEntity.ID default: return false } - return true default: return false } @@ -100,7 +120,7 @@ func WithEntityFilter(entity params.GithubEntity) dbCommon.PayloadFilterFunc { if params.GithubEntityType(payload.EntityType) != entity.EntityType { return false } - var ent idGetter + var ent IDGetter var ok bool switch payload.EntityType { case dbCommon.RepositoryEntityType: @@ -210,3 +230,33 @@ func WithExcludeEntityTypeFilter(entityType dbCommon.DatabaseEntityType) dbCommo return payload.EntityType != entityType } } + +// WithScaleSetFilter returns a filter function that matches a particular scale set. +func WithScaleSetFilter(scaleset params.ScaleSet) dbCommon.PayloadFilterFunc { + return func(payload dbCommon.ChangePayload) bool { + if payload.EntityType != dbCommon.ScaleSetEntityType { + return false + } + + ss, ok := payload.Payload.(params.ScaleSet) + if !ok { + return false + } + + return ss.ID == scaleset.ID + } +} + +func WithScaleSetInstanceFilter(scaleset params.ScaleSet) dbCommon.PayloadFilterFunc { + return func(payload dbCommon.ChangePayload) bool { + if payload.EntityType != dbCommon.InstanceEntityType { + return false + } + + instance, ok := payload.Payload.(params.Instance) + if !ok { + return false + } + return instance.ScaleSetID == scaleset.ID + } +} diff --git a/params/params.go b/params/params.go index 375edc10..68227dd2 100644 --- a/params/params.go +++ b/params/params.go @@ -45,6 +45,7 @@ type ( WebhookEndpointType string GithubAuthType string PoolBalancerType string + ScaleSetState string ) const ( @@ -128,6 +129,14 @@ func (e GithubEntityType) String() string { return string(e) } +const ( + ScaleSetPendingCreate ScaleSetState = "pending_create" + ScaleSetCreated ScaleSetState = "created" + ScaleSetError ScaleSetState = "error" + ScaleSetPendingDelete ScaleSetState = "pending_delete" + ScaleSetPendingForceDelete ScaleSetState = "pending_force_delete" +) + type StatusMessage struct { CreatedAt time.Time `json:"created_at,omitempty"` Message string `json:"message,omitempty"` @@ -179,6 +188,9 @@ type Instance struct { // PoolID is the ID of the garm pool to which a runner belongs. PoolID string `json:"pool_id,omitempty"` + // ScaleSetID is the ID of the scale set to which a runner belongs. + ScaleSetID uint `json:"scale_set_id,omitempty"` + // ProviderFault holds any error messages captured from the IaaS provider that is // responsible for managing the lifecycle of the runner. ProviderFault []byte `json:"provider_fault,omitempty"` @@ -403,6 +415,97 @@ func (p *Pool) HasRequiredLabels(set []string) bool { // used by swagger client generated code type Pools []Pool +type ScaleSet struct { + RunnerPrefix + + ID uint `json:"id,omitempty"` + ScaleSetID int `json:"scale_set_id,omitempty"` + Name string `json:"name,omitempty"` + DisableUpdate bool `json:"disable_update"` + + State ScaleSetState `json:"state"` + ExtendedState string `json:"extended_state,omitempty"` + + ProviderName string `json:"provider_name,omitempty"` + MaxRunners uint `json:"max_runners,omitempty"` + MinIdleRunners uint `json:"min_idle_runners,omitempty"` + Image string `json:"image,omitempty"` + Flavor string `json:"flavor,omitempty"` + OSType commonParams.OSType `json:"os_type,omitempty"` + OSArch commonParams.OSArch `json:"os_arch,omitempty"` + Enabled bool `json:"enabled,omitempty"` + Instances []Instance `json:"instances,omitempty"` + + RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` + // ExtraSpecs is an opaque raw json that gets sent to the provider + // as part of the bootstrap params for instances. It can contain + // any kind of data needed by providers. The contents of this field means + // nothing to garm itself. We don't act on the information in this field at + // all. We only validate that it's a proper json. + ExtraSpecs json.RawMessage `json:"extra_specs,omitempty"` + // GithubRunnerGroup is the github runner group in which the runners will be added. + // The runner group must be created by someone with access to the enterprise. + GitHubRunnerGroup string `json:"github-runner-group,omitempty"` + + StatusMessages []StatusMessage `json:"status_messages"` + + RepoID string `json:"repo_id,omitempty"` + RepoName string `json:"repo_name,omitempty"` + + OrgID string `json:"org_id,omitempty"` + OrgName string `json:"org_name,omitempty"` + + EnterpriseID string `json:"enterprise_id,omitempty"` + EnterpriseName string `json:"enterprise_name,omitempty"` +} + +func (p ScaleSet) GithubEntity() (GithubEntity, error) { + switch p.ScaleSetType() { + case GithubEntityTypeRepository: + return GithubEntity{ + ID: p.RepoID, + EntityType: GithubEntityTypeRepository, + }, nil + case GithubEntityTypeOrganization: + return GithubEntity{ + ID: p.OrgID, + EntityType: GithubEntityTypeOrganization, + }, nil + case GithubEntityTypeEnterprise: + return GithubEntity{ + ID: p.EnterpriseID, + EntityType: GithubEntityTypeEnterprise, + }, nil + } + return GithubEntity{}, fmt.Errorf("pool has no associated entity") +} + +func (p *ScaleSet) ScaleSetType() GithubEntityType { + switch { + case p.RepoID != "": + return GithubEntityTypeRepository + case p.OrgID != "": + return GithubEntityTypeOrganization + case p.EnterpriseID != "": + return GithubEntityTypeEnterprise + } + return "" +} + +func (p ScaleSet) GetID() uint { + return p.ID +} + +func (p *ScaleSet) RunnerTimeout() uint { + if p.RunnerBootstrapTimeout == 0 { + return appdefaults.DefaultRunnerBootstrapTimeout + } + return p.RunnerBootstrapTimeout +} + +// used by swagger client generated code +type ScaleSets []ScaleSet + type Repository struct { ID string `json:"id,omitempty"` Owner string `json:"owner,omitempty"` diff --git a/params/requests.go b/params/requests.go index c7c46821..1166418f 100644 --- a/params/requests.go +++ b/params/requests.go @@ -533,3 +533,76 @@ func (u UpdateControllerParams) Validate() error { return nil } + +type CreateScaleSetParams struct { + RunnerPrefix + + Name string `json:"name"` + DisableUpdate bool `json:"disable_update"` + ScaleSetID int `json:"scale_set_id"` + + ProviderName string `json:"provider_name,omitempty"` + MaxRunners uint `json:"max_runners,omitempty"` + MinIdleRunners uint `json:"min_idle_runners,omitempty"` + Image string `json:"image,omitempty"` + Flavor string `json:"flavor,omitempty"` + OSType commonParams.OSType `json:"os_type,omitempty"` + OSArch commonParams.OSArch `json:"os_arch,omitempty"` + Tags []string `json:"tags,omitempty"` + Enabled bool `json:"enabled,omitempty"` + RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` + ExtraSpecs json.RawMessage `json:"extra_specs,omitempty"` + // GithubRunnerGroup is the github runner group in which the runners of this + // pool will be added to. + // The runner group must be created by someone with access to the enterprise. + GitHubRunnerGroup string `json:"github-runner-group,omitempty"` +} + +func (s *CreateScaleSetParams) Validate() error { + if s.ProviderName == "" { + return fmt.Errorf("missing provider") + } + + if s.MinIdleRunners > s.MaxRunners { + return fmt.Errorf("min_idle_runners cannot be larger than max_runners") + } + + if s.MaxRunners == 0 { + return fmt.Errorf("max_runners cannot be 0") + } + + if s.Flavor == "" { + return fmt.Errorf("missing flavor") + } + + if s.Image == "" { + return fmt.Errorf("missing image") + } + + if s.Name == "" { + return fmt.Errorf("missing scale set name") + } + + return nil +} + +type UpdateScaleSetParams struct { + RunnerPrefix + + Name string `json:"name,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + MaxRunners *uint `json:"max_runners,omitempty"` + MinIdleRunners *uint `json:"min_idle_runners,omitempty"` + RunnerBootstrapTimeout *uint `json:"runner_bootstrap_timeout,omitempty"` + Image string `json:"image,omitempty"` + Flavor string `json:"flavor,omitempty"` + OSType commonParams.OSType `json:"os_type,omitempty"` + OSArch commonParams.OSArch `json:"os_arch,omitempty"` + ExtraSpecs json.RawMessage `json:"extra_specs,omitempty"` + // GithubRunnerGroup is the github runner group in which the runners of this + // pool will be added to. + // The runner group must be created by someone with access to the enterprise. + GitHubRunnerGroup *string `json:"runner_group,omitempty"` + State *ScaleSetState `json:"state"` + ExtendedState *string `json:"extended_state"` +} From 7e1a83c79ab16322d25d18a0b2f843d473f00652 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 11 Apr 2025 10:42:31 +0000 Subject: [PATCH 008/179] Add API endpoint for some scaleset ops Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/instances.go | 48 +++ apiserver/controllers/scalesets.go | 211 ++++++++++++ apiserver/routers/routers.go | 19 ++ apiserver/swagger-models.yaml | 30 ++ apiserver/swagger.yaml | 131 ++++++++ client/garm_api_client.go | 5 + client/instances/instances_client.go | 40 +++ .../list_scale_set_instances_parameters.go | 151 +++++++++ .../list_scale_set_instances_responses.go | 184 +++++++++++ .../scalesets/delete_scale_set_parameters.go | 151 +++++++++ .../scalesets/delete_scale_set_responses.go | 106 ++++++ client/scalesets/get_scale_set_parameters.go | 151 +++++++++ client/scalesets/get_scale_set_responses.go | 184 +++++++++++ client/scalesets/list_scalesets_parameters.go | 128 ++++++++ client/scalesets/list_scalesets_responses.go | 184 +++++++++++ client/scalesets/scalesets_client.go | 217 +++++++++++++ .../scalesets/update_scale_set_parameters.go | 173 ++++++++++ .../scalesets/update_scale_set_responses.go | 184 +++++++++++ database/common/store.go | 12 + database/sql/instances.go | 12 +- database/sql/pools.go | 5 +- database/sql/util.go | 42 ++- database/watcher/filters.go | 21 ++ params/interfaces.go | 7 + params/params.go | 15 + runner/enterprises.go | 9 + runner/organizations.go | 9 + runner/pool/pool.go | 47 +-- runner/repositories.go | 9 + runner/scalesets.go | 306 ++++++++++++++++++ util/github/scalesets/message_sessions.go | 2 + 31 files changed, 2768 insertions(+), 25 deletions(-) create mode 100644 apiserver/controllers/scalesets.go create mode 100644 client/instances/list_scale_set_instances_parameters.go create mode 100644 client/instances/list_scale_set_instances_responses.go create mode 100644 client/scalesets/delete_scale_set_parameters.go create mode 100644 client/scalesets/delete_scale_set_responses.go create mode 100644 client/scalesets/get_scale_set_parameters.go create mode 100644 client/scalesets/get_scale_set_responses.go create mode 100644 client/scalesets/list_scalesets_parameters.go create mode 100644 client/scalesets/list_scalesets_responses.go create mode 100644 client/scalesets/scalesets_client.go create mode 100644 client/scalesets/update_scale_set_parameters.go create mode 100644 client/scalesets/update_scale_set_responses.go create mode 100644 params/interfaces.go create mode 100644 runner/scalesets.go diff --git a/apiserver/controllers/instances.go b/apiserver/controllers/instances.go index 962264f9..fd6d2c45 100644 --- a/apiserver/controllers/instances.go +++ b/apiserver/controllers/instances.go @@ -69,6 +69,54 @@ func (a *APIController) ListPoolInstancesHandler(w http.ResponseWriter, r *http. } } +// swagger:route GET /scalesets/{scalesetID}/instances instances ListScaleSetInstances +// +// List runner instances in a scale set. +// +// Parameters: +// + name: scalesetID +// description: Runner scale set ID. +// type: string +// in: path +// required: true +// +// Responses: +// 200: Instances +// default: APIErrorResponse +func (a *APIController) ListScaleSetInstancesHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + vars := mux.Vars(r) + scalesetID, ok := vars["scalesetID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No pool ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + id, err := strconv.ParseUint(scalesetID, 10, 64) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + instances, err := a.r.ListScaleSetInstances(ctx, uint(id)) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing pool instances") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(instances); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + // swagger:route GET /instances/{instanceName} instances GetInstance // // Get runner instance by name. diff --git a/apiserver/controllers/scalesets.go b/apiserver/controllers/scalesets.go new file mode 100644 index 00000000..d12928f0 --- /dev/null +++ b/apiserver/controllers/scalesets.go @@ -0,0 +1,211 @@ +// Copyright 2022 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package controllers + +import ( + "encoding/json" + "log/slog" + "net/http" + "strconv" + + "github.com/gorilla/mux" + + gErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/apiserver/params" + runnerParams "github.com/cloudbase/garm/params" +) + +// swagger:route GET /scalesets scalesets ListScalesets +// +// List all scalesets. +// +// Responses: +// 200: ScaleSets +// default: APIErrorResponse +func (a *APIController) ListAllScaleSetsHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + scalesets, err := a.r.ListAllScaleSets(ctx) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scalesets); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route GET /scalesets/{scalesetID} scalesets GetScaleSet +// +// Get scale set by ID. +// +// Parameters: +// + name: scalesetID +// description: ID of the scale set to fetch. +// type: string +// in: path +// required: true +// +// Responses: +// 200: ScaleSet +// default: APIErrorResponse +func (a *APIController) GetScaleSetByIDHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + scaleSetID, ok := vars["scalesetID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No scale set ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + id, err := strconv.ParseUint(scaleSetID, 10, 64) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + scaleSet, err := a.r.GetScaleSetByID(ctx, uint(id)) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "fetching scale set") + handleError(ctx, w, err) + return + } + + scaleSet.RunnerBootstrapTimeout = scaleSet.RunnerTimeout() + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route DELETE /scalesets/{scalesetID} scalesets DeleteScaleSet +// +// Delete scale set by ID. +// +// Parameters: +// + name: scalesetID +// description: ID of the scale set to delete. +// type: string +// in: path +// required: true +// +// Responses: +// default: APIErrorResponse +func (a *APIController) DeleteScaleSetByIDHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + scalesetID, ok := vars["scalesetID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No scale set ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + id, err := strconv.ParseUint(scalesetID, 10, 64) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + if err := a.r.DeleteScaleSetByID(ctx, uint(id)); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "removing scale set") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) +} + +// swagger:route PUT /scalesets/{scalesetID} scalesets UpdateScaleSet +// +// Update scale set by ID. +// +// Parameters: +// + name: scalesetID +// description: ID of the scale set to update. +// type: string +// in: path +// required: true +// +// + name: Body +// description: Parameters to update the scale set with. +// type: UpdateScaleSetParams +// in: body +// required: true +// +// Responses: +// 200: ScaleSet +// default: APIErrorResponse +func (a *APIController) UpdateScaleSetByIDHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + scalesetID, ok := vars["scalesetID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No scale set ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + id, err := strconv.ParseUint(scalesetID, 10, 64) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + var scaleSetData runnerParams.UpdateScaleSetParams + if err := json.NewDecoder(r.Body).Decode(&scaleSetData); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + scaleSet, err := a.r.UpdateScaleSetByID(ctx, uint(id), scaleSetData) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "updating scale set") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} diff --git a/apiserver/routers/routers.go b/apiserver/routers/routers.go index 8c0434bc..13c9a2f9 100644 --- a/apiserver/routers/routers.go +++ b/apiserver/routers/routers.go @@ -214,6 +214,25 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/pools/{poolID}/instances/", http.HandlerFunc(han.ListPoolInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/pools/{poolID}/instances", http.HandlerFunc(han.ListPoolInstancesHandler)).Methods("GET", "OPTIONS") + //////////////// + // Scale sets // + //////////////// + // List all pools + apiRouter.Handle("/scalesets/", http.HandlerFunc(han.ListAllScaleSetsHandler)).Methods("GET", "OPTIONS") + apiRouter.Handle("/scalesets", http.HandlerFunc(han.ListAllScaleSetsHandler)).Methods("GET", "OPTIONS") + // Get one pool + apiRouter.Handle("/scalesets/{scalesetID}/", http.HandlerFunc(han.GetScaleSetByIDHandler)).Methods("GET", "OPTIONS") + apiRouter.Handle("/scalesets/{scalesetID}", http.HandlerFunc(han.GetScaleSetByIDHandler)).Methods("GET", "OPTIONS") + // Delete one pool + apiRouter.Handle("/scalesets/{scalesetID}/", http.HandlerFunc(han.DeleteScaleSetByIDHandler)).Methods("DELETE", "OPTIONS") + apiRouter.Handle("/scalesets/{scalesetID}", http.HandlerFunc(han.DeleteScaleSetByIDHandler)).Methods("DELETE", "OPTIONS") + // Update one pool + apiRouter.Handle("/scalesets/{scalesetID}/", http.HandlerFunc(han.UpdateScaleSetByIDHandler)).Methods("PUT", "OPTIONS") + apiRouter.Handle("/scalesets/{scalesetID}", http.HandlerFunc(han.UpdateScaleSetByIDHandler)).Methods("PUT", "OPTIONS") + // List pool instances + apiRouter.Handle("/scalesets/{scalesetID}/instances/", http.HandlerFunc(han.ListScaleSetInstancesHandler)).Methods("GET", "OPTIONS") + apiRouter.Handle("/scalesets/{scalesetID}/instances", http.HandlerFunc(han.ListScaleSetInstancesHandler)).Methods("GET", "OPTIONS") + ///////////// // Runners // ///////////// diff --git a/apiserver/swagger-models.yaml b/apiserver/swagger-models.yaml index 88c6bd8d..ad83d6c8 100644 --- a/apiserver/swagger-models.yaml +++ b/apiserver/swagger-models.yaml @@ -130,6 +130,22 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params + ScaleSets: + type: array + x-go-type: + type: ScaleSets + import: + package: github.com/cloudbase/garm/params + alias: garm_params + items: + $ref: '#/definitions/ScaleSet' + ScaleSet: + type: object + x-go-type: + type: ScaleSet + import: + package: github.com/cloudbase/garm/params + alias: garm_params Repositories: type: array x-go-type: @@ -213,6 +229,13 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params + CreateScaleSetParams: + type: object + x-go-type: + type: CreateScaleSetParams + import: + package: github.com/cloudbase/garm/params + alias: garm_params UpdatePoolParams: type: object x-go-type: @@ -220,6 +243,13 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params + UpdateScaleSetParams: + type: object + x-go-type: + type: UpdateScaleSetParams + import: + package: github.com/cloudbase/garm/params + alias: garm_params APIErrorResponse: type: object x-go-type: diff --git a/apiserver/swagger.yaml b/apiserver/swagger.yaml index 42c573f0..cf4287e9 100644 --- a/apiserver/swagger.yaml +++ b/apiserver/swagger.yaml @@ -65,6 +65,13 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: CreateRepoParams + CreateScaleSetParams: + type: object + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: CreateScaleSetParams Credentials: items: $ref: '#/definitions/GithubCredentials' @@ -244,6 +251,22 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: Repository + ScaleSet: + type: object + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: ScaleSet + ScaleSets: + items: + $ref: '#/definitions/ScaleSet' + type: array + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: ScaleSets UpdateControllerParams: type: object x-go-type: @@ -279,6 +302,13 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: UpdatePoolParams + UpdateScaleSetParams: + type: object + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: UpdateScaleSetParams User: type: object x-go-type: @@ -1718,6 +1748,107 @@ paths: tags: - repositories - hooks + /scalesets: + get: + operationId: ListScalesets + responses: + "200": + description: ScaleSets + schema: + $ref: '#/definitions/ScaleSets' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: List all scalesets. + tags: + - scalesets + /scalesets/{scalesetID}: + delete: + operationId: DeleteScaleSet + parameters: + - description: ID of the scale set to delete. + in: path + name: scalesetID + required: true + type: string + responses: + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Delete scale set by ID. + tags: + - scalesets + get: + operationId: GetScaleSet + parameters: + - description: ID of the scale set to fetch. + in: path + name: scalesetID + required: true + type: string + responses: + "200": + description: ScaleSet + schema: + $ref: '#/definitions/ScaleSet' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Get scale set by ID. + tags: + - scalesets + put: + operationId: UpdateScaleSet + parameters: + - description: ID of the scale set to update. + in: path + name: scalesetID + required: true + type: string + - description: Parameters to update the scale set with. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/UpdateScaleSetParams' + description: Parameters to update the scale set with. + type: object + responses: + "200": + description: ScaleSet + schema: + $ref: '#/definitions/ScaleSet' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Update scale set by ID. + tags: + - scalesets + /scalesets/{scalesetID}/instances: + get: + operationId: ListScaleSetInstances + parameters: + - description: Runner scale set ID. + in: path + name: scalesetID + required: true + type: string + responses: + "200": + description: Instances + schema: + $ref: '#/definitions/Instances' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: List runner instances in a scale set. + tags: + - instances produces: - application/json security: diff --git a/client/garm_api_client.go b/client/garm_api_client.go index cbc65dfc..f5bc51b2 100644 --- a/client/garm_api_client.go +++ b/client/garm_api_client.go @@ -24,6 +24,7 @@ import ( "github.com/cloudbase/garm/client/pools" "github.com/cloudbase/garm/client/providers" "github.com/cloudbase/garm/client/repositories" + "github.com/cloudbase/garm/client/scalesets" ) // Default garm API HTTP client. @@ -82,6 +83,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *GarmAPI { cli.Pools = pools.New(transport, formats) cli.Providers = providers.New(transport, formats) cli.Repositories = repositories.New(transport, formats) + cli.Scalesets = scalesets.New(transport, formats) return cli } @@ -154,6 +156,8 @@ type GarmAPI struct { Repositories repositories.ClientService + Scalesets scalesets.ClientService + Transport runtime.ClientTransport } @@ -174,4 +178,5 @@ func (c *GarmAPI) SetTransport(transport runtime.ClientTransport) { c.Pools.SetTransport(transport) c.Providers.SetTransport(transport) c.Repositories.SetTransport(transport) + c.Scalesets.SetTransport(transport) } diff --git a/client/instances/instances_client.go b/client/instances/instances_client.go index 5b6af6f3..2c41f919 100644 --- a/client/instances/instances_client.go +++ b/client/instances/instances_client.go @@ -62,6 +62,8 @@ type ClientService interface { ListPoolInstances(params *ListPoolInstancesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListPoolInstancesOK, error) + ListScaleSetInstances(params *ListScaleSetInstancesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScaleSetInstancesOK, error) + SetTransport(transport runtime.ClientTransport) } @@ -211,6 +213,44 @@ func (a *Client) ListPoolInstances(params *ListPoolInstancesParams, authInfo run return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +ListScaleSetInstances lists runner instances in a scale set +*/ +func (a *Client) ListScaleSetInstances(params *ListScaleSetInstancesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScaleSetInstancesOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListScaleSetInstancesParams() + } + op := &runtime.ClientOperation{ + ID: "ListScaleSetInstances", + Method: "GET", + PathPattern: "/scalesets/{scalesetID}/instances", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &ListScaleSetInstancesReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListScaleSetInstancesOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListScaleSetInstancesDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport diff --git a/client/instances/list_scale_set_instances_parameters.go b/client/instances/list_scale_set_instances_parameters.go new file mode 100644 index 00000000..7b38ef82 --- /dev/null +++ b/client/instances/list_scale_set_instances_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package instances + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewListScaleSetInstancesParams creates a new ListScaleSetInstancesParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListScaleSetInstancesParams() *ListScaleSetInstancesParams { + return &ListScaleSetInstancesParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListScaleSetInstancesParamsWithTimeout creates a new ListScaleSetInstancesParams object +// with the ability to set a timeout on a request. +func NewListScaleSetInstancesParamsWithTimeout(timeout time.Duration) *ListScaleSetInstancesParams { + return &ListScaleSetInstancesParams{ + timeout: timeout, + } +} + +// NewListScaleSetInstancesParamsWithContext creates a new ListScaleSetInstancesParams object +// with the ability to set a context for a request. +func NewListScaleSetInstancesParamsWithContext(ctx context.Context) *ListScaleSetInstancesParams { + return &ListScaleSetInstancesParams{ + Context: ctx, + } +} + +// NewListScaleSetInstancesParamsWithHTTPClient creates a new ListScaleSetInstancesParams object +// with the ability to set a custom HTTPClient for a request. +func NewListScaleSetInstancesParamsWithHTTPClient(client *http.Client) *ListScaleSetInstancesParams { + return &ListScaleSetInstancesParams{ + HTTPClient: client, + } +} + +/* +ListScaleSetInstancesParams contains all the parameters to send to the API endpoint + + for the list scale set instances operation. + + Typically these are written to a http.Request. +*/ +type ListScaleSetInstancesParams struct { + + /* ScalesetID. + + Runner scale set ID. + */ + ScalesetID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list scale set instances params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListScaleSetInstancesParams) WithDefaults() *ListScaleSetInstancesParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list scale set instances params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListScaleSetInstancesParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list scale set instances params +func (o *ListScaleSetInstancesParams) WithTimeout(timeout time.Duration) *ListScaleSetInstancesParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list scale set instances params +func (o *ListScaleSetInstancesParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list scale set instances params +func (o *ListScaleSetInstancesParams) WithContext(ctx context.Context) *ListScaleSetInstancesParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list scale set instances params +func (o *ListScaleSetInstancesParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list scale set instances params +func (o *ListScaleSetInstancesParams) WithHTTPClient(client *http.Client) *ListScaleSetInstancesParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list scale set instances params +func (o *ListScaleSetInstancesParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithScalesetID adds the scalesetID to the list scale set instances params +func (o *ListScaleSetInstancesParams) WithScalesetID(scalesetID string) *ListScaleSetInstancesParams { + o.SetScalesetID(scalesetID) + return o +} + +// SetScalesetID adds the scalesetId to the list scale set instances params +func (o *ListScaleSetInstancesParams) SetScalesetID(scalesetID string) { + o.ScalesetID = scalesetID +} + +// WriteToRequest writes these params to a swagger request +func (o *ListScaleSetInstancesParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param scalesetID + if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/instances/list_scale_set_instances_responses.go b/client/instances/list_scale_set_instances_responses.go new file mode 100644 index 00000000..a966a9e7 --- /dev/null +++ b/client/instances/list_scale_set_instances_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package instances + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// ListScaleSetInstancesReader is a Reader for the ListScaleSetInstances structure. +type ListScaleSetInstancesReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListScaleSetInstancesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListScaleSetInstancesOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListScaleSetInstancesDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListScaleSetInstancesOK creates a ListScaleSetInstancesOK with default headers values +func NewListScaleSetInstancesOK() *ListScaleSetInstancesOK { + return &ListScaleSetInstancesOK{} +} + +/* +ListScaleSetInstancesOK describes a response with status code 200, with default header values. + +Instances +*/ +type ListScaleSetInstancesOK struct { + Payload garm_params.Instances +} + +// IsSuccess returns true when this list scale set instances o k response has a 2xx status code +func (o *ListScaleSetInstancesOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list scale set instances o k response has a 3xx status code +func (o *ListScaleSetInstancesOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list scale set instances o k response has a 4xx status code +func (o *ListScaleSetInstancesOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list scale set instances o k response has a 5xx status code +func (o *ListScaleSetInstancesOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list scale set instances o k response a status code equal to that given +func (o *ListScaleSetInstancesOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list scale set instances o k response +func (o *ListScaleSetInstancesOK) Code() int { + return 200 +} + +func (o *ListScaleSetInstancesOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] listScaleSetInstancesOK %s", 200, payload) +} + +func (o *ListScaleSetInstancesOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] listScaleSetInstancesOK %s", 200, payload) +} + +func (o *ListScaleSetInstancesOK) GetPayload() garm_params.Instances { + return o.Payload +} + +func (o *ListScaleSetInstancesOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListScaleSetInstancesDefault creates a ListScaleSetInstancesDefault with default headers values +func NewListScaleSetInstancesDefault(code int) *ListScaleSetInstancesDefault { + return &ListScaleSetInstancesDefault{ + _statusCode: code, + } +} + +/* +ListScaleSetInstancesDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type ListScaleSetInstancesDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this list scale set instances default response has a 2xx status code +func (o *ListScaleSetInstancesDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list scale set instances default response has a 3xx status code +func (o *ListScaleSetInstancesDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list scale set instances default response has a 4xx status code +func (o *ListScaleSetInstancesDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list scale set instances default response has a 5xx status code +func (o *ListScaleSetInstancesDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list scale set instances default response a status code equal to that given +func (o *ListScaleSetInstancesDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list scale set instances default response +func (o *ListScaleSetInstancesDefault) Code() int { + return o._statusCode +} + +func (o *ListScaleSetInstancesDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] ListScaleSetInstances default %s", o._statusCode, payload) +} + +func (o *ListScaleSetInstancesDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}/instances][%d] ListScaleSetInstances default %s", o._statusCode, payload) +} + +func (o *ListScaleSetInstancesDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *ListScaleSetInstancesDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/scalesets/delete_scale_set_parameters.go b/client/scalesets/delete_scale_set_parameters.go new file mode 100644 index 00000000..640f95a8 --- /dev/null +++ b/client/scalesets/delete_scale_set_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewDeleteScaleSetParams creates a new DeleteScaleSetParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewDeleteScaleSetParams() *DeleteScaleSetParams { + return &DeleteScaleSetParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewDeleteScaleSetParamsWithTimeout creates a new DeleteScaleSetParams object +// with the ability to set a timeout on a request. +func NewDeleteScaleSetParamsWithTimeout(timeout time.Duration) *DeleteScaleSetParams { + return &DeleteScaleSetParams{ + timeout: timeout, + } +} + +// NewDeleteScaleSetParamsWithContext creates a new DeleteScaleSetParams object +// with the ability to set a context for a request. +func NewDeleteScaleSetParamsWithContext(ctx context.Context) *DeleteScaleSetParams { + return &DeleteScaleSetParams{ + Context: ctx, + } +} + +// NewDeleteScaleSetParamsWithHTTPClient creates a new DeleteScaleSetParams object +// with the ability to set a custom HTTPClient for a request. +func NewDeleteScaleSetParamsWithHTTPClient(client *http.Client) *DeleteScaleSetParams { + return &DeleteScaleSetParams{ + HTTPClient: client, + } +} + +/* +DeleteScaleSetParams contains all the parameters to send to the API endpoint + + for the delete scale set operation. + + Typically these are written to a http.Request. +*/ +type DeleteScaleSetParams struct { + + /* ScalesetID. + + ID of the scale set to delete. + */ + ScalesetID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the delete scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *DeleteScaleSetParams) WithDefaults() *DeleteScaleSetParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the delete scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *DeleteScaleSetParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the delete scale set params +func (o *DeleteScaleSetParams) WithTimeout(timeout time.Duration) *DeleteScaleSetParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the delete scale set params +func (o *DeleteScaleSetParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the delete scale set params +func (o *DeleteScaleSetParams) WithContext(ctx context.Context) *DeleteScaleSetParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the delete scale set params +func (o *DeleteScaleSetParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the delete scale set params +func (o *DeleteScaleSetParams) WithHTTPClient(client *http.Client) *DeleteScaleSetParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the delete scale set params +func (o *DeleteScaleSetParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithScalesetID adds the scalesetID to the delete scale set params +func (o *DeleteScaleSetParams) WithScalesetID(scalesetID string) *DeleteScaleSetParams { + o.SetScalesetID(scalesetID) + return o +} + +// SetScalesetID adds the scalesetId to the delete scale set params +func (o *DeleteScaleSetParams) SetScalesetID(scalesetID string) { + o.ScalesetID = scalesetID +} + +// WriteToRequest writes these params to a swagger request +func (o *DeleteScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param scalesetID + if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/scalesets/delete_scale_set_responses.go b/client/scalesets/delete_scale_set_responses.go new file mode 100644 index 00000000..dd0f7334 --- /dev/null +++ b/client/scalesets/delete_scale_set_responses.go @@ -0,0 +1,106 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" +) + +// DeleteScaleSetReader is a Reader for the DeleteScaleSet structure. +type DeleteScaleSetReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *DeleteScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + result := NewDeleteScaleSetDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result +} + +// NewDeleteScaleSetDefault creates a DeleteScaleSetDefault with default headers values +func NewDeleteScaleSetDefault(code int) *DeleteScaleSetDefault { + return &DeleteScaleSetDefault{ + _statusCode: code, + } +} + +/* +DeleteScaleSetDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type DeleteScaleSetDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this delete scale set default response has a 2xx status code +func (o *DeleteScaleSetDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this delete scale set default response has a 3xx status code +func (o *DeleteScaleSetDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this delete scale set default response has a 4xx status code +func (o *DeleteScaleSetDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this delete scale set default response has a 5xx status code +func (o *DeleteScaleSetDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this delete scale set default response a status code equal to that given +func (o *DeleteScaleSetDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the delete scale set default response +func (o *DeleteScaleSetDefault) Code() int { + return o._statusCode +} + +func (o *DeleteScaleSetDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /scalesets/{scalesetID}][%d] DeleteScaleSet default %s", o._statusCode, payload) +} + +func (o *DeleteScaleSetDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /scalesets/{scalesetID}][%d] DeleteScaleSet default %s", o._statusCode, payload) +} + +func (o *DeleteScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *DeleteScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/scalesets/get_scale_set_parameters.go b/client/scalesets/get_scale_set_parameters.go new file mode 100644 index 00000000..9e31b46e --- /dev/null +++ b/client/scalesets/get_scale_set_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewGetScaleSetParams creates a new GetScaleSetParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewGetScaleSetParams() *GetScaleSetParams { + return &GetScaleSetParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewGetScaleSetParamsWithTimeout creates a new GetScaleSetParams object +// with the ability to set a timeout on a request. +func NewGetScaleSetParamsWithTimeout(timeout time.Duration) *GetScaleSetParams { + return &GetScaleSetParams{ + timeout: timeout, + } +} + +// NewGetScaleSetParamsWithContext creates a new GetScaleSetParams object +// with the ability to set a context for a request. +func NewGetScaleSetParamsWithContext(ctx context.Context) *GetScaleSetParams { + return &GetScaleSetParams{ + Context: ctx, + } +} + +// NewGetScaleSetParamsWithHTTPClient creates a new GetScaleSetParams object +// with the ability to set a custom HTTPClient for a request. +func NewGetScaleSetParamsWithHTTPClient(client *http.Client) *GetScaleSetParams { + return &GetScaleSetParams{ + HTTPClient: client, + } +} + +/* +GetScaleSetParams contains all the parameters to send to the API endpoint + + for the get scale set operation. + + Typically these are written to a http.Request. +*/ +type GetScaleSetParams struct { + + /* ScalesetID. + + ID of the scale set to fetch. + */ + ScalesetID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the get scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetScaleSetParams) WithDefaults() *GetScaleSetParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the get scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetScaleSetParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the get scale set params +func (o *GetScaleSetParams) WithTimeout(timeout time.Duration) *GetScaleSetParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the get scale set params +func (o *GetScaleSetParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the get scale set params +func (o *GetScaleSetParams) WithContext(ctx context.Context) *GetScaleSetParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the get scale set params +func (o *GetScaleSetParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the get scale set params +func (o *GetScaleSetParams) WithHTTPClient(client *http.Client) *GetScaleSetParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the get scale set params +func (o *GetScaleSetParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithScalesetID adds the scalesetID to the get scale set params +func (o *GetScaleSetParams) WithScalesetID(scalesetID string) *GetScaleSetParams { + o.SetScalesetID(scalesetID) + return o +} + +// SetScalesetID adds the scalesetId to the get scale set params +func (o *GetScaleSetParams) SetScalesetID(scalesetID string) { + o.ScalesetID = scalesetID +} + +// WriteToRequest writes these params to a swagger request +func (o *GetScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param scalesetID + if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/scalesets/get_scale_set_responses.go b/client/scalesets/get_scale_set_responses.go new file mode 100644 index 00000000..5b30e16f --- /dev/null +++ b/client/scalesets/get_scale_set_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// GetScaleSetReader is a Reader for the GetScaleSet structure. +type GetScaleSetReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *GetScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewGetScaleSetOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewGetScaleSetDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewGetScaleSetOK creates a GetScaleSetOK with default headers values +func NewGetScaleSetOK() *GetScaleSetOK { + return &GetScaleSetOK{} +} + +/* +GetScaleSetOK describes a response with status code 200, with default header values. + +ScaleSet +*/ +type GetScaleSetOK struct { + Payload garm_params.ScaleSet +} + +// IsSuccess returns true when this get scale set o k response has a 2xx status code +func (o *GetScaleSetOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this get scale set o k response has a 3xx status code +func (o *GetScaleSetOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this get scale set o k response has a 4xx status code +func (o *GetScaleSetOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this get scale set o k response has a 5xx status code +func (o *GetScaleSetOK) IsServerError() bool { + return false +} + +// IsCode returns true when this get scale set o k response a status code equal to that given +func (o *GetScaleSetOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the get scale set o k response +func (o *GetScaleSetOK) Code() int { + return 200 +} + +func (o *GetScaleSetOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] getScaleSetOK %s", 200, payload) +} + +func (o *GetScaleSetOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] getScaleSetOK %s", 200, payload) +} + +func (o *GetScaleSetOK) GetPayload() garm_params.ScaleSet { + return o.Payload +} + +func (o *GetScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewGetScaleSetDefault creates a GetScaleSetDefault with default headers values +func NewGetScaleSetDefault(code int) *GetScaleSetDefault { + return &GetScaleSetDefault{ + _statusCode: code, + } +} + +/* +GetScaleSetDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type GetScaleSetDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this get scale set default response has a 2xx status code +func (o *GetScaleSetDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this get scale set default response has a 3xx status code +func (o *GetScaleSetDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this get scale set default response has a 4xx status code +func (o *GetScaleSetDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this get scale set default response has a 5xx status code +func (o *GetScaleSetDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this get scale set default response a status code equal to that given +func (o *GetScaleSetDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the get scale set default response +func (o *GetScaleSetDefault) Code() int { + return o._statusCode +} + +func (o *GetScaleSetDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] GetScaleSet default %s", o._statusCode, payload) +} + +func (o *GetScaleSetDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets/{scalesetID}][%d] GetScaleSet default %s", o._statusCode, payload) +} + +func (o *GetScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *GetScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/scalesets/list_scalesets_parameters.go b/client/scalesets/list_scalesets_parameters.go new file mode 100644 index 00000000..b6fd1ccb --- /dev/null +++ b/client/scalesets/list_scalesets_parameters.go @@ -0,0 +1,128 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewListScalesetsParams creates a new ListScalesetsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListScalesetsParams() *ListScalesetsParams { + return &ListScalesetsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListScalesetsParamsWithTimeout creates a new ListScalesetsParams object +// with the ability to set a timeout on a request. +func NewListScalesetsParamsWithTimeout(timeout time.Duration) *ListScalesetsParams { + return &ListScalesetsParams{ + timeout: timeout, + } +} + +// NewListScalesetsParamsWithContext creates a new ListScalesetsParams object +// with the ability to set a context for a request. +func NewListScalesetsParamsWithContext(ctx context.Context) *ListScalesetsParams { + return &ListScalesetsParams{ + Context: ctx, + } +} + +// NewListScalesetsParamsWithHTTPClient creates a new ListScalesetsParams object +// with the ability to set a custom HTTPClient for a request. +func NewListScalesetsParamsWithHTTPClient(client *http.Client) *ListScalesetsParams { + return &ListScalesetsParams{ + HTTPClient: client, + } +} + +/* +ListScalesetsParams contains all the parameters to send to the API endpoint + + for the list scalesets operation. + + Typically these are written to a http.Request. +*/ +type ListScalesetsParams struct { + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list scalesets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListScalesetsParams) WithDefaults() *ListScalesetsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list scalesets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListScalesetsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list scalesets params +func (o *ListScalesetsParams) WithTimeout(timeout time.Duration) *ListScalesetsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list scalesets params +func (o *ListScalesetsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list scalesets params +func (o *ListScalesetsParams) WithContext(ctx context.Context) *ListScalesetsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list scalesets params +func (o *ListScalesetsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list scalesets params +func (o *ListScalesetsParams) WithHTTPClient(client *http.Client) *ListScalesetsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list scalesets params +func (o *ListScalesetsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WriteToRequest writes these params to a swagger request +func (o *ListScalesetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/scalesets/list_scalesets_responses.go b/client/scalesets/list_scalesets_responses.go new file mode 100644 index 00000000..05064308 --- /dev/null +++ b/client/scalesets/list_scalesets_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// ListScalesetsReader is a Reader for the ListScalesets structure. +type ListScalesetsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListScalesetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListScalesetsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListScalesetsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListScalesetsOK creates a ListScalesetsOK with default headers values +func NewListScalesetsOK() *ListScalesetsOK { + return &ListScalesetsOK{} +} + +/* +ListScalesetsOK describes a response with status code 200, with default header values. + +ScaleSets +*/ +type ListScalesetsOK struct { + Payload garm_params.ScaleSets +} + +// IsSuccess returns true when this list scalesets o k response has a 2xx status code +func (o *ListScalesetsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list scalesets o k response has a 3xx status code +func (o *ListScalesetsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list scalesets o k response has a 4xx status code +func (o *ListScalesetsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list scalesets o k response has a 5xx status code +func (o *ListScalesetsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list scalesets o k response a status code equal to that given +func (o *ListScalesetsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list scalesets o k response +func (o *ListScalesetsOK) Code() int { + return 200 +} + +func (o *ListScalesetsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets][%d] listScalesetsOK %s", 200, payload) +} + +func (o *ListScalesetsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets][%d] listScalesetsOK %s", 200, payload) +} + +func (o *ListScalesetsOK) GetPayload() garm_params.ScaleSets { + return o.Payload +} + +func (o *ListScalesetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListScalesetsDefault creates a ListScalesetsDefault with default headers values +func NewListScalesetsDefault(code int) *ListScalesetsDefault { + return &ListScalesetsDefault{ + _statusCode: code, + } +} + +/* +ListScalesetsDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type ListScalesetsDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this list scalesets default response has a 2xx status code +func (o *ListScalesetsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list scalesets default response has a 3xx status code +func (o *ListScalesetsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list scalesets default response has a 4xx status code +func (o *ListScalesetsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list scalesets default response has a 5xx status code +func (o *ListScalesetsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list scalesets default response a status code equal to that given +func (o *ListScalesetsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list scalesets default response +func (o *ListScalesetsDefault) Code() int { + return o._statusCode +} + +func (o *ListScalesetsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets][%d] ListScalesets default %s", o._statusCode, payload) +} + +func (o *ListScalesetsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /scalesets][%d] ListScalesets default %s", o._statusCode, payload) +} + +func (o *ListScalesetsDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *ListScalesetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/scalesets/scalesets_client.go b/client/scalesets/scalesets_client.go new file mode 100644 index 00000000..5375750d --- /dev/null +++ b/client/scalesets/scalesets_client.go @@ -0,0 +1,217 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "github.com/go-openapi/runtime" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// New creates a new scalesets API client. +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { + return &Client{transport: transport, formats: formats} +} + +// New creates a new scalesets API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new scalesets API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + +/* +Client for scalesets API +*/ +type Client struct { + transport runtime.ClientTransport + formats strfmt.Registry +} + +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + DeleteScaleSet(params *DeleteScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error + + GetScaleSet(params *GetScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetScaleSetOK, error) + + ListScalesets(params *ListScalesetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScalesetsOK, error) + + UpdateScaleSet(params *UpdateScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateScaleSetOK, error) + + SetTransport(transport runtime.ClientTransport) +} + +/* +DeleteScaleSet deletes scale set by ID +*/ +func (a *Client) DeleteScaleSet(params *DeleteScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error { + // TODO: Validate the params before sending + if params == nil { + params = NewDeleteScaleSetParams() + } + op := &runtime.ClientOperation{ + ID: "DeleteScaleSet", + Method: "DELETE", + PathPattern: "/scalesets/{scalesetID}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &DeleteScaleSetReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + _, err := a.transport.Submit(op) + if err != nil { + return err + } + return nil +} + +/* +GetScaleSet gets scale set by ID +*/ +func (a *Client) GetScaleSet(params *GetScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetScaleSetOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewGetScaleSetParams() + } + op := &runtime.ClientOperation{ + ID: "GetScaleSet", + Method: "GET", + PathPattern: "/scalesets/{scalesetID}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &GetScaleSetReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*GetScaleSetOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*GetScaleSetDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +ListScalesets lists all scalesets +*/ +func (a *Client) ListScalesets(params *ListScalesetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListScalesetsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListScalesetsParams() + } + op := &runtime.ClientOperation{ + ID: "ListScalesets", + Method: "GET", + PathPattern: "/scalesets", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &ListScalesetsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListScalesetsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListScalesetsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +UpdateScaleSet updates scale set by ID +*/ +func (a *Client) UpdateScaleSet(params *UpdateScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateScaleSetOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewUpdateScaleSetParams() + } + op := &runtime.ClientOperation{ + ID: "UpdateScaleSet", + Method: "PUT", + PathPattern: "/scalesets/{scalesetID}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &UpdateScaleSetReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*UpdateScaleSetOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*UpdateScaleSetDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +// SetTransport changes the transport on the client +func (a *Client) SetTransport(transport runtime.ClientTransport) { + a.transport = transport +} diff --git a/client/scalesets/update_scale_set_parameters.go b/client/scalesets/update_scale_set_parameters.go new file mode 100644 index 00000000..39668e9b --- /dev/null +++ b/client/scalesets/update_scale_set_parameters.go @@ -0,0 +1,173 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewUpdateScaleSetParams creates a new UpdateScaleSetParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewUpdateScaleSetParams() *UpdateScaleSetParams { + return &UpdateScaleSetParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewUpdateScaleSetParamsWithTimeout creates a new UpdateScaleSetParams object +// with the ability to set a timeout on a request. +func NewUpdateScaleSetParamsWithTimeout(timeout time.Duration) *UpdateScaleSetParams { + return &UpdateScaleSetParams{ + timeout: timeout, + } +} + +// NewUpdateScaleSetParamsWithContext creates a new UpdateScaleSetParams object +// with the ability to set a context for a request. +func NewUpdateScaleSetParamsWithContext(ctx context.Context) *UpdateScaleSetParams { + return &UpdateScaleSetParams{ + Context: ctx, + } +} + +// NewUpdateScaleSetParamsWithHTTPClient creates a new UpdateScaleSetParams object +// with the ability to set a custom HTTPClient for a request. +func NewUpdateScaleSetParamsWithHTTPClient(client *http.Client) *UpdateScaleSetParams { + return &UpdateScaleSetParams{ + HTTPClient: client, + } +} + +/* +UpdateScaleSetParams contains all the parameters to send to the API endpoint + + for the update scale set operation. + + Typically these are written to a http.Request. +*/ +type UpdateScaleSetParams struct { + + /* Body. + + Parameters to update the scale set with. + */ + Body garm_params.UpdateScaleSetParams + + /* ScalesetID. + + ID of the scale set to update. + */ + ScalesetID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the update scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateScaleSetParams) WithDefaults() *UpdateScaleSetParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the update scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateScaleSetParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the update scale set params +func (o *UpdateScaleSetParams) WithTimeout(timeout time.Duration) *UpdateScaleSetParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the update scale set params +func (o *UpdateScaleSetParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the update scale set params +func (o *UpdateScaleSetParams) WithContext(ctx context.Context) *UpdateScaleSetParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the update scale set params +func (o *UpdateScaleSetParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the update scale set params +func (o *UpdateScaleSetParams) WithHTTPClient(client *http.Client) *UpdateScaleSetParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the update scale set params +func (o *UpdateScaleSetParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the update scale set params +func (o *UpdateScaleSetParams) WithBody(body garm_params.UpdateScaleSetParams) *UpdateScaleSetParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the update scale set params +func (o *UpdateScaleSetParams) SetBody(body garm_params.UpdateScaleSetParams) { + o.Body = body +} + +// WithScalesetID adds the scalesetID to the update scale set params +func (o *UpdateScaleSetParams) WithScalesetID(scalesetID string) *UpdateScaleSetParams { + o.SetScalesetID(scalesetID) + return o +} + +// SetScalesetID adds the scalesetId to the update scale set params +func (o *UpdateScaleSetParams) SetScalesetID(scalesetID string) { + o.ScalesetID = scalesetID +} + +// WriteToRequest writes these params to a swagger request +func (o *UpdateScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + // path param scalesetID + if err := r.SetPathParam("scalesetID", o.ScalesetID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/scalesets/update_scale_set_responses.go b/client/scalesets/update_scale_set_responses.go new file mode 100644 index 00000000..666e8256 --- /dev/null +++ b/client/scalesets/update_scale_set_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package scalesets + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// UpdateScaleSetReader is a Reader for the UpdateScaleSet structure. +type UpdateScaleSetReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *UpdateScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewUpdateScaleSetOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewUpdateScaleSetDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewUpdateScaleSetOK creates a UpdateScaleSetOK with default headers values +func NewUpdateScaleSetOK() *UpdateScaleSetOK { + return &UpdateScaleSetOK{} +} + +/* +UpdateScaleSetOK describes a response with status code 200, with default header values. + +ScaleSet +*/ +type UpdateScaleSetOK struct { + Payload garm_params.ScaleSet +} + +// IsSuccess returns true when this update scale set o k response has a 2xx status code +func (o *UpdateScaleSetOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this update scale set o k response has a 3xx status code +func (o *UpdateScaleSetOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this update scale set o k response has a 4xx status code +func (o *UpdateScaleSetOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this update scale set o k response has a 5xx status code +func (o *UpdateScaleSetOK) IsServerError() bool { + return false +} + +// IsCode returns true when this update scale set o k response a status code equal to that given +func (o *UpdateScaleSetOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the update scale set o k response +func (o *UpdateScaleSetOK) Code() int { + return 200 +} + +func (o *UpdateScaleSetOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] updateScaleSetOK %s", 200, payload) +} + +func (o *UpdateScaleSetOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] updateScaleSetOK %s", 200, payload) +} + +func (o *UpdateScaleSetOK) GetPayload() garm_params.ScaleSet { + return o.Payload +} + +func (o *UpdateScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewUpdateScaleSetDefault creates a UpdateScaleSetDefault with default headers values +func NewUpdateScaleSetDefault(code int) *UpdateScaleSetDefault { + return &UpdateScaleSetDefault{ + _statusCode: code, + } +} + +/* +UpdateScaleSetDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type UpdateScaleSetDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this update scale set default response has a 2xx status code +func (o *UpdateScaleSetDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this update scale set default response has a 3xx status code +func (o *UpdateScaleSetDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this update scale set default response has a 4xx status code +func (o *UpdateScaleSetDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this update scale set default response has a 5xx status code +func (o *UpdateScaleSetDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this update scale set default response a status code equal to that given +func (o *UpdateScaleSetDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the update scale set default response +func (o *UpdateScaleSetDefault) Code() int { + return o._statusCode +} + +func (o *UpdateScaleSetDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] UpdateScaleSet default %s", o._statusCode, payload) +} + +func (o *UpdateScaleSetDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /scalesets/{scalesetID}][%d] UpdateScaleSet default %s", o._statusCode, payload) +} + +func (o *UpdateScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *UpdateScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/database/common/store.go b/database/common/store.go index 4d91e6cd..860ed8ac 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -135,6 +135,16 @@ type ControllerStore interface { UpdateController(info params.UpdateControllerParams) (params.ControllerInfo, error) } +type ScaleSetsStore interface { + ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) + CreateEntityScaleSet(_ context.Context, entity params.GithubEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) + ListEntityScaleSets(_ context.Context, entity params.GithubEntity) ([]params.ScaleSet, error) + UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, new params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) + GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) + DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) + ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) +} + //go:generate mockery --name=Store type Store interface { RepoStore @@ -148,7 +158,9 @@ type Store interface { GithubCredentialsStore ControllerStore EntityPoolStore + ScaleSetsStore ControllerInfo() (params.ControllerInfo, error) InitController() (params.ControllerInfo, error) + GetGithubEntity(_ context.Context, entityType params.GithubEntityType, entityID string) (params.GithubEntity, error) } diff --git a/database/sql/instances.go b/database/sql/instances.go index 65cf0dba..d4bfd019 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -97,6 +97,8 @@ func (s *sqlDatabase) getPoolInstanceByName(poolID string, instanceName string) } return Instance{}, errors.Wrap(q.Error, "fetching pool instance by name") } + + instance.Pool = pool return instance, nil } @@ -134,7 +136,7 @@ func (s *sqlDatabase) GetPoolInstanceByName(_ context.Context, poolID string, in } func (s *sqlDatabase) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { - instance, err := s.getInstanceByName(ctx, instanceName, "StatusMessages") + instance, err := s.getInstanceByName(ctx, instanceName, "StatusMessages", "Pool") if err != nil { return params.Instance{}, errors.Wrap(err, "fetching instance") } @@ -194,7 +196,7 @@ func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, } func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { - instance, err := s.getInstanceByName(ctx, instanceName) + instance, err := s.getInstanceByName(ctx, instanceName, "Pool") if err != nil { return params.Instance{}, errors.Wrap(err, "updating instance") } @@ -272,7 +274,7 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par } var instances []Instance - query := s.conn.Model(&Instance{}).Preload("Job").Where("pool_id = ?", u) + query := s.conn.Model(&Instance{}).Preload("Job", "Pool").Where("pool_id = ?", u) if err := query.Find(&instances); err.Error != nil { return nil, errors.Wrap(err.Error, "fetching instances") @@ -290,7 +292,7 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) { var instances []Instance - query := s.conn.Model(&Instance{}).Preload("Job").Where("scale_set_id = ?", scalesetID) + query := s.conn.Model(&Instance{}).Preload("Job", "ScaleSet").Where("scale_set_fk_id = ?", scalesetID) if err := query.Find(&instances); err.Error != nil { return nil, errors.Wrap(err.Error, "fetching instances") @@ -310,7 +312,7 @@ func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, error) { var instances []Instance - q := s.conn.Model(&Instance{}).Preload("Job").Find(&instances) + q := s.conn.Model(&Instance{}).Preload("Job", "Pool", "ScaleSet").Find(&instances) if q.Error != nil { return nil, errors.Wrap(q.Error, "fetching instances") } diff --git a/database/sql/pools.go b/database/sql/pools.go index fdcf3f5a..7454b1ef 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -427,7 +427,10 @@ func (s *sqlDatabase) ListEntityInstances(_ context.Context, entity params.Githu } ret := []params.Instance{} for _, pool := range pools { - for _, instance := range pool.Instances { + instances := pool.Instances + pool.Instances = nil + for _, instance := range instances { + instance.Pool = pool paramsInstance, err := s.sqlToParamsInstance(instance) if err != nil { return nil, errors.Wrap(err, "fetching instance") diff --git a/database/sql/util.go b/database/sql/util.go index c5e412a9..fb627814 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -15,6 +15,7 @@ package sql import ( + "context" "encoding/json" "fmt" @@ -60,7 +61,6 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e OSArch: instance.OSArch, Status: instance.Status, RunnerStatus: instance.RunnerStatus, - PoolID: instance.PoolID.String(), CallbackURL: instance.CallbackURL, MetadataURL: instance.MetadataURL, StatusMessages: []params.StatusMessage{}, @@ -75,6 +75,22 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e if instance.ScaleSetFkID != nil { ret.ScaleSetID = *instance.ScaleSetFkID + ret.ProviderName = instance.ScaleSet.ProviderName + ret.RunnerBootstrapTimeout = instance.ScaleSet.RunnerBootstrapTimeout + } + + if instance.PoolID != uuid.Nil { + ret.PoolID = instance.PoolID.String() + ret.ProviderName = instance.Pool.ProviderName + ret.RunnerBootstrapTimeout = instance.Pool.RunnerBootstrapTimeout + } + + if ret.ScaleSetID == 0 && ret.PoolID == "" { + return params.Instance{}, errors.New("missing pool or scale set id") + } + + if ret.ScaleSetID != 0 && ret.PoolID != "" { + return params.Instance{}, errors.New("both pool and scale set ids are set") } if instance.Job != nil { @@ -591,3 +607,27 @@ func (s *sqlDatabase) sendNotify(entityType dbCommon.DatabaseEntityType, op dbCo } return s.producer.Notify(message) } + +func (s *sqlDatabase) GetGithubEntity(_ context.Context, entityType params.GithubEntityType, entityID string) (params.GithubEntity, error) { + var ghEntity params.EntityGetter + var err error + switch entityType { + case params.GithubEntityTypeEnterprise: + ghEntity, err = s.GetEnterpriseByID(s.ctx, entityID) + case params.GithubEntityTypeOrganization: + ghEntity, err = s.GetOrganizationByID(s.ctx, entityID) + case params.GithubEntityTypeRepository: + ghEntity, err = s.GetRepositoryByID(s.ctx, entityID) + default: + return params.GithubEntity{}, errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") + } + if err != nil { + return params.GithubEntity{}, errors.Wrap(err, "failed to get ") + } + + entity, err := ghEntity.GetEntity() + if err != nil { + return params.GithubEntity{}, errors.Wrap(err, "failed to get entity") + } + return entity, nil +} diff --git a/database/watcher/filters.go b/database/watcher/filters.go index aa5131b1..0c259bce 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -260,3 +260,24 @@ func WithScaleSetInstanceFilter(scaleset params.ScaleSet) dbCommon.PayloadFilter return instance.ScaleSetID == scaleset.ID } } + +// EntityTypeCallbackFilter is a callback function that takes a ChangePayload and returns a boolean. +// This callback type is used in the WithEntityTypeAndCallbackFilter (and potentially others) when +// a filter needs to delegate logic to a specific callback function. +type EntityTypeCallbackFilter func(payload dbCommon.ChangePayload) (bool, error) + +// WithEntityTypeAndCallbackFilter returns a filter function that filters payloads by entity type and the +// result of a callback function. +func WithEntityTypeAndCallbackFilter(entityType dbCommon.DatabaseEntityType, callback EntityTypeCallbackFilter) dbCommon.PayloadFilterFunc { + return func(payload dbCommon.ChangePayload) bool { + if payload.EntityType != entityType { + return false + } + + ok, err := callback(payload) + if err != nil { + return false + } + return ok + } +} diff --git a/params/interfaces.go b/params/interfaces.go new file mode 100644 index 00000000..95f02a9a --- /dev/null +++ b/params/interfaces.go @@ -0,0 +1,7 @@ +package params + +// EntityGetter is implemented by all github entities (repositories, organizations and enterprises). +// It defines the GetEntity() function which returns a github entity. +type EntityGetter interface { + GetEntity() (GithubEntity, error) +} diff --git a/params/params.go b/params/params.go index 68227dd2..b0a6492e 100644 --- a/params/params.go +++ b/params/params.go @@ -153,6 +153,10 @@ type Instance struct { // instance in the provider. ProviderID string `json:"provider_id,omitempty"` + // ProviderName is the name of the IaaS where the instance was + // created. + ProviderName string `json:"provider_name"` + // AgentID is the github runner agent ID. AgentID int64 `json:"agent_id,omitempty"` @@ -212,6 +216,10 @@ type Instance struct { // Job is the current job that is being serviced by this runner. Job *Job `json:"job,omitempty"` + // RunnerBootstrapTimeout is the timeout in minutes after which the runner deployment + // will be considered failed. This value is caried over from the pool or scale set. + RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` + // Do not serialize sensitive info. CallbackURL string `json:"-"` MetadataURL string `json:"-"` @@ -229,6 +237,13 @@ func (i Instance) GetID() string { return i.ID } +func (i Instance) RunnerTimeout() uint { + if i.RunnerBootstrapTimeout == 0 { + return appdefaults.DefaultRunnerBootstrapTimeout + } + return i.RunnerBootstrapTimeout +} + // used by swagger client generated code type Instances []Instance diff --git a/runner/enterprises.go b/runner/enterprises.go index 3e9e3b8c..fb3f528b 100644 --- a/runner/enterprises.go +++ b/runner/enterprises.go @@ -145,6 +145,15 @@ func (r *Runner) DeleteEnterprise(ctx context.Context, enterpriseID string) erro return runnerErrors.NewBadRequestError("enterprise has pools defined (%s)", strings.Join(poolIDs, ", ")) } + scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) + if err != nil { + return errors.Wrap(err, "fetching enterprise scale sets") + } + + if len(scaleSets) > 0 { + return runnerErrors.NewBadRequestError("enterprise has scale sets defined; delete them first") + } + if err := r.poolManagerCtrl.DeleteEnterprisePoolManager(enterprise); err != nil { return errors.Wrap(err, "deleting enterprise pool manager") } diff --git a/runner/organizations.go b/runner/organizations.go index 39aa788b..4b5e3fd7 100644 --- a/runner/organizations.go +++ b/runner/organizations.go @@ -159,6 +159,15 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho return runnerErrors.NewBadRequestError("org has pools defined (%s)", strings.Join(poolIDs, ", ")) } + scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) + if err != nil { + return errors.Wrap(err, "fetching organization scale sets") + } + + if len(scaleSets) > 0 { + return runnerErrors.NewBadRequestError("organization has scale sets defined; delete them first") + } + if !keepWebhook && r.config.Default.EnableWebhookManagement { poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { diff --git a/runner/pool/pool.go b/runner/pool/pool.go index f6c97633..3ec72dad 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -415,6 +415,11 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne } for _, instance := range dbInstances { + if instance.ScaleSetID != 0 { + // ignore scale set instances. + continue + } + lockAcquired, err := locking.TryLock(instance.Name) if !lockAcquired || err != nil { slog.DebugContext( @@ -433,14 +438,9 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne continue } - pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) - if err != nil { - return errors.Wrap(err, "fetching instance pool info") - } - switch instance.RunnerStatus { case params.RunnerPending, params.RunnerInstalling: - if time.Since(instance.UpdatedAt).Minutes() < float64(pool.RunnerTimeout()) { + if time.Since(instance.UpdatedAt).Minutes() < float64(instance.RunnerTimeout()) { // runner is still installing. We give it a chance to finish. slog.DebugContext( r.ctx, "runner is still installing, give it a chance to finish", @@ -491,6 +491,11 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { } for _, instance := range dbInstances { + if instance.ScaleSetID != 0 { + // ignore scale set instances. + continue + } + slog.DebugContext( r.ctx, "attempting to lock instance", "runner_name", instance.Name) @@ -503,11 +508,7 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { } defer locking.Unlock(instance.Name, false) - pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) - if err != nil { - return errors.Wrap(err, "fetching instance pool info") - } - if time.Since(instance.UpdatedAt).Minutes() < float64(pool.RunnerTimeout()) { + if time.Since(instance.UpdatedAt).Minutes() < float64(instance.RunnerTimeout()) { continue } @@ -602,13 +603,13 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) } // check if the provider still has the instance. - provider, ok := r.providers[pool.ProviderName] + provider, ok := r.providers[dbInstance.ProviderName] if !ok { - return fmt.Errorf("unknown provider %s for pool %s", pool.ProviderName, pool.ID) + return fmt.Errorf("unknown provider %s for pool %s", dbInstance.ProviderName, dbInstance.PoolID) } var poolInstances []commonParams.ProviderInstance - poolInstances, ok = poolInstanceCache[pool.ID] + poolInstances, ok = poolInstanceCache[dbInstance.PoolID] if !ok { slog.DebugContext( r.ctx, "updating instances cache for pool", @@ -620,9 +621,9 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) } poolInstances, err = provider.ListInstances(r.ctx, pool.ID, listInstancesParams) if err != nil { - return errors.Wrapf(err, "fetching instances for pool %s", pool.ID) + return errors.Wrapf(err, "fetching instances for pool %s", dbInstance.PoolID) } - poolInstanceCache[pool.ID] = poolInstances + poolInstanceCache[dbInstance.PoolID] = poolInstances } lockAcquired, err := locking.TryLock(dbInstance.Name) @@ -1348,9 +1349,9 @@ func (r *basePoolManager) deleteInstanceFromProvider(ctx context.Context, instan return errors.Wrap(err, "fetching pool") } - provider, ok := r.providers[pool.ProviderName] + provider, ok := r.providers[instance.ProviderName] if !ok { - return fmt.Errorf("unknown provider %s for pool %s", pool.ProviderName, pool.ID) + return fmt.Errorf("unknown provider %s for pool %s", instance.ProviderName, instance.PoolID) } identifier := instance.ProviderID @@ -1386,6 +1387,11 @@ func (r *basePoolManager) deletePendingInstances() error { slog.DebugContext( r.ctx, "removing instances in pending_delete") for _, instance := range instances { + if instance.ScaleSetID != 0 { + // instance is part of a scale set. Skip. + continue + } + if instance.Status != commonParams.InstancePendingDelete && instance.Status != commonParams.InstancePendingForceDelete { // not in pending_delete status. Skip. continue @@ -1493,6 +1499,11 @@ func (r *basePoolManager) addPendingInstances() error { return fmt.Errorf("failed to fetch instances from store: %w", err) } for _, instance := range instances { + if instance.ScaleSetID != 0 { + // instance is part of a scale set. Skip. + continue + } + if instance.Status != commonParams.InstancePendingCreate { // not in pending_create status. Skip. continue diff --git a/runner/repositories.go b/runner/repositories.go index 5edff6ff..ab4f8e90 100644 --- a/runner/repositories.go +++ b/runner/repositories.go @@ -158,6 +158,15 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo return runnerErrors.NewBadRequestError("repo has pools defined (%s)", strings.Join(poolIDs, ", ")) } + scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) + if err != nil { + return errors.Wrap(err, "fetching repo scale sets") + } + + if len(scaleSets) > 0 { + return runnerErrors.NewBadRequestError("repo has scale sets defined; delete them first") + } + if !keepWebhook && r.config.Default.EnableWebhookManagement { poolMgr, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { diff --git a/runner/scalesets.go b/runner/scalesets.go new file mode 100644 index 00000000..5e8123b0 --- /dev/null +++ b/runner/scalesets.go @@ -0,0 +1,306 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package runner + +import ( + "context" + "encoding/json" + "fmt" + "log/slog" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/util/appdefaults" + "github.com/cloudbase/garm/util/github" + "github.com/cloudbase/garm/util/github/scalesets" + "github.com/pkg/errors" +) + +func (r *Runner) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) { + if !auth.IsAdmin(ctx) { + return []params.ScaleSet{}, runnerErrors.ErrUnauthorized + } + + scalesets, err := r.store.ListAllScaleSets(ctx) + if err != nil { + return nil, errors.Wrap(err, "fetching pools") + } + return scalesets, nil +} + +func (r *Runner) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) { + if !auth.IsAdmin(ctx) { + return params.ScaleSet{}, runnerErrors.ErrUnauthorized + } + + set, err := r.store.GetScaleSetByID(ctx, scaleSet) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "fetching scale set") + } + return set, nil +} + +func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error { + if !auth.IsAdmin(ctx) { + return runnerErrors.ErrUnauthorized + } + + scaleSet, err := r.store.GetScaleSetByID(ctx, scaleSetID) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + return errors.Wrap(err, "fetching scale set") + } + return nil + } + + if len(scaleSet.Instances) > 0 { + return runnerErrors.NewBadRequestError("scale set has runners") + } + + if scaleSet.Enabled { + return runnerErrors.NewBadRequestError("scale set is enabled; disable it first") + } + + paramEntity, err := scaleSet.GithubEntity() + if err != nil { + return errors.Wrap(err, "getting entity") + } + + entity, err := r.store.GetGithubEntity(ctx, paramEntity.EntityType, paramEntity.ID) + if err != nil { + return errors.Wrap(err, "getting entity") + } + + ghCli, err := github.Client(ctx, entity) + if err != nil { + return errors.Wrap(err, "creating github client") + } + + scalesetCli, err := scalesets.NewClient(ghCli) + if err != nil { + return errors.Wrap(err, "getting scaleset client") + } + + if err := scalesetCli.DeleteRunnerScaleSet(ctx, scaleSet.ScaleSetID); err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + slog.InfoContext(ctx, "scale set not found", "scale_set_id", scaleSet.ScaleSetID) + return nil + } + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to delete scale set from github") + return errors.Wrap(err, "deleting scale set from github") + } + if err := r.store.DeleteScaleSetByID(ctx, scaleSetID); err != nil { + return errors.Wrap(err, "deleting scale set") + } + return nil +} + +func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param params.UpdateScaleSetParams) (params.ScaleSet, error) { + if !auth.IsAdmin(ctx) { + return params.ScaleSet{}, runnerErrors.ErrUnauthorized + } + + scaleSet, err := r.store.GetScaleSetByID(ctx, scaleSetID) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "fetching scale set") + } + + maxRunners := scaleSet.MaxRunners + minIdleRunners := scaleSet.MinIdleRunners + + if param.MaxRunners != nil { + maxRunners = *param.MaxRunners + } + if param.MinIdleRunners != nil { + minIdleRunners = *param.MinIdleRunners + } + + if param.RunnerBootstrapTimeout != nil && *param.RunnerBootstrapTimeout == 0 { + return params.ScaleSet{}, runnerErrors.NewBadRequestError("runner_bootstrap_timeout cannot be 0") + } + + if minIdleRunners > maxRunners { + return params.ScaleSet{}, runnerErrors.NewBadRequestError("min_idle_runners cannot be larger than max_runners") + } + + paramEntity, err := scaleSet.GithubEntity() + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "getting entity") + } + + entity, err := r.store.GetGithubEntity(ctx, paramEntity.EntityType, paramEntity.ID) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "getting entity") + } + + ghCli, err := github.Client(ctx, entity) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "creating github client") + } + + callback := func(old, new params.ScaleSet) error { + scalesetCli, err := scalesets.NewClient(ghCli) + if err != nil { + return errors.Wrap(err, "getting scaleset client") + } + + updateParams := params.RunnerScaleSet{} + hasUpdates := false + if old.Name != new.Name { + updateParams.Name = new.Name + hasUpdates = true + } + + if old.Enabled != new.Enabled { + updateParams.Enabled = &new.Enabled + hasUpdates = true + } + + if old.GitHubRunnerGroup != new.GitHubRunnerGroup { + runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, new.GitHubRunnerGroup) + if err != nil { + return fmt.Errorf("error fetching runner group from github: %w", err) + } + updateParams.RunnerGroupID = int(runnerGroup.ID) + hasUpdates = true + } + + if old.DisableUpdate != new.DisableUpdate { + updateParams.RunnerSetting.DisableUpdate = new.DisableUpdate + hasUpdates = true + } + + if hasUpdates { + result, err := scalesetCli.UpdateRunnerScaleSet(ctx, new.ScaleSetID, updateParams) + if err != nil { + return fmt.Errorf("failed to update scaleset in github: %w", err) + } + asJs, _ := json.MarshalIndent(result, "", " ") + slog.Info("update result", "data", string(asJs)) + } + return nil + } + + newScaleSet, err := r.store.UpdateEntityScaleSet(ctx, entity, scaleSetID, param, callback) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "updating pool") + } + return newScaleSet, nil +} + +func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.GithubEntityType, entityID string, param params.CreateScaleSetParams) (scaleSetRet params.ScaleSet, err error) { + if !auth.IsAdmin(ctx) { + return params.ScaleSet{}, runnerErrors.ErrUnauthorized + } + + if param.RunnerBootstrapTimeout == 0 { + param.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout + } + + if param.GitHubRunnerGroup == "" { + param.GitHubRunnerGroup = "Default" + } + + entity, err := r.store.GetGithubEntity(ctx, entityType, entityID) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "getting entity") + } + + ghCli, err := github.Client(ctx, entity) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "creating github client") + } + + scalesetCli, err := scalesets.NewClient(ghCli) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "getting scaleset client") + } + var runnerGroupID int = 1 + if param.GitHubRunnerGroup != "Default" { + runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, param.GitHubRunnerGroup) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "getting runner group") + } + runnerGroupID = int(runnerGroup.ID) + } + + createParam := ¶ms.RunnerScaleSet{ + Name: param.Name, + RunnerGroupID: runnerGroupID, + Labels: []params.Label{ + { + Name: param.Name, + Type: "System", + }, + }, + RunnerSetting: params.RunnerSetting{ + Ephemeral: true, + DisableUpdate: param.DisableUpdate, + }, + Enabled: ¶m.Enabled, + } + + runnerScaleSet, err := scalesetCli.CreateRunnerScaleSet(ctx, createParam) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "creating runner scale set") + } + + asJs, _ := json.MarshalIndent(runnerScaleSet, "", " ") + slog.InfoContext(ctx, "scale set", "data", string(asJs)) + + defer func() { + if err != nil { + if innerErr := scalesetCli.DeleteRunnerScaleSet(ctx, runnerScaleSet.ID); innerErr != nil { + slog.With(slog.Any("error", innerErr)).ErrorContext(ctx, "failed to cleanup scale set") + } + } + }() + param.ScaleSetID = runnerScaleSet.ID + + scaleSet, err := r.store.CreateEntityScaleSet(ctx, entity, param) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "creating scale set") + } + + return scaleSet, nil +} + +func (r *Runner) ListScaleSetInstances(ctx context.Context, scalesetID uint) ([]params.Instance, error) { + if !auth.IsAdmin(ctx) { + return nil, runnerErrors.ErrUnauthorized + } + + instances, err := r.store.ListScaleSetInstances(ctx, scalesetID) + if err != nil { + return []params.Instance{}, errors.Wrap(err, "fetching instances") + } + return instances, nil +} + +func (r *Runner) ListEntityScaleSets(ctx context.Context, entityType params.GithubEntityType, entityID string) ([]params.ScaleSet, error) { + if !auth.IsAdmin(ctx) { + return []params.ScaleSet{}, runnerErrors.ErrUnauthorized + } + entity := params.GithubEntity{ + ID: entityID, + EntityType: entityType, + } + scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) + if err != nil { + return nil, errors.Wrap(err, "fetching scale sets") + } + return scaleSets, nil +} diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go index 5ecdd94d..e4152e08 100644 --- a/util/github/scalesets/message_sessions.go +++ b/util/github/scalesets/message_sessions.go @@ -80,7 +80,9 @@ func (m *MessageSession) loop() { // work, if it's credentials issues, users can update them. slog.With(slog.Any("error", err)).ErrorContext(m.ctx, "failed to refresh message queue token") m.lastErr = err + continue } + m.lastErr = nil } } } From 7174e030e2be0b6055bb2a6b9c3321a230525838 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 11 Apr 2025 13:27:35 +0000 Subject: [PATCH 009/179] Add scaleset commands Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/enterprises.go | 98 ++++ apiserver/controllers/organizations.go | 98 ++++ apiserver/controllers/repositories.go | 98 ++++ apiserver/routers/routers.go | 24 + apiserver/swagger.yaml | 153 ++++++ .../create_enterprise_scale_set_parameters.go | 173 ++++++ .../create_enterprise_scale_set_responses.go | 184 +++++++ client/enterprises/enterprises_client.go | 80 +++ .../list_enterprise_scale_sets_parameters.go | 151 +++++ .../list_enterprise_scale_sets_responses.go | 184 +++++++ .../create_org_scale_set_parameters.go | 173 ++++++ .../create_org_scale_set_responses.go | 184 +++++++ .../list_org_scale_sets_parameters.go | 151 +++++ .../list_org_scale_sets_responses.go | 184 +++++++ client/organizations/organizations_client.go | 80 +++ .../create_repo_scale_set_parameters.go | 173 ++++++ .../create_repo_scale_set_responses.go | 184 +++++++ .../list_repo_scale_sets_parameters.go | 151 +++++ .../list_repo_scale_sets_responses.go | 184 +++++++ client/repositories/repositories_client.go | 80 +++ cmd/garm-cli/cmd/scalesets.go | 518 ++++++++++++++++++ database/sql/enterprise.go | 2 +- database/sql/organizations.go | 2 +- database/sql/repositories.go | 2 +- runner/scalesets.go | 5 - util/github/client.go | 5 + 26 files changed, 3313 insertions(+), 8 deletions(-) create mode 100644 client/enterprises/create_enterprise_scale_set_parameters.go create mode 100644 client/enterprises/create_enterprise_scale_set_responses.go create mode 100644 client/enterprises/list_enterprise_scale_sets_parameters.go create mode 100644 client/enterprises/list_enterprise_scale_sets_responses.go create mode 100644 client/organizations/create_org_scale_set_parameters.go create mode 100644 client/organizations/create_org_scale_set_responses.go create mode 100644 client/organizations/list_org_scale_sets_parameters.go create mode 100644 client/organizations/list_org_scale_sets_responses.go create mode 100644 client/repositories/create_repo_scale_set_parameters.go create mode 100644 client/repositories/create_repo_scale_set_responses.go create mode 100644 client/repositories/list_repo_scale_sets_parameters.go create mode 100644 client/repositories/list_repo_scale_sets_responses.go create mode 100644 cmd/garm-cli/cmd/scalesets.go diff --git a/apiserver/controllers/enterprises.go b/apiserver/controllers/enterprises.go index d4b20826..9be1f1bc 100644 --- a/apiserver/controllers/enterprises.go +++ b/apiserver/controllers/enterprises.go @@ -277,6 +277,62 @@ func (a *APIController) CreateEnterprisePoolHandler(w http.ResponseWriter, r *ht } } +// swagger:route POST /enterprises/{enterpriseID}/scalesets enterprises scalesets CreateEnterpriseScaleSet +// +// Create enterprise pool with the parameters given. +// +// Parameters: +// + name: enterpriseID +// description: Enterprise ID. +// type: string +// in: path +// required: true +// +// + name: Body +// description: Parameters used when creating the enterprise scale set. +// type: CreateScaleSetParams +// in: body +// required: true +// +// Responses: +// 200: ScaleSet +// default: APIErrorResponse +func (a *APIController) CreateEnterpriseScaleSetHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + enterpriseID, ok := vars["enterpriseID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No enterprise ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + var scaleSetData runnerParams.CreateScaleSetParams + if err := json.NewDecoder(r.Body).Decode(&scaleSetData); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.GithubEntityTypeEnterprise, enterpriseID, scaleSetData) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating enterprise scale set") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + // swagger:route GET /enterprises/{enterpriseID}/pools enterprises pools ListEnterprisePools // // List enterprise pools. @@ -319,6 +375,48 @@ func (a *APIController) ListEnterprisePoolsHandler(w http.ResponseWriter, r *htt } } +// swagger:route GET /enterprises/{enterpriseID}/scalesets enterprises scalesets ListEnterpriseScaleSets +// +// List enterprise scale sets. +// +// Parameters: +// + name: enterpriseID +// description: Enterprise ID. +// type: string +// in: path +// required: true +// +// Responses: +// 200: ScaleSets +// default: APIErrorResponse +func (a *APIController) ListEnterpriseScaleSetsHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + vars := mux.Vars(r) + enterpriseID, ok := vars["enterpriseID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No enterprise ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.GithubEntityTypeEnterprise, enterpriseID) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSets); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + // swagger:route GET /enterprises/{enterpriseID}/pools/{poolID} enterprises pools GetEnterprisePool // // Get enterprise pool by ID. diff --git a/apiserver/controllers/organizations.go b/apiserver/controllers/organizations.go index ca2ef3b5..149dd490 100644 --- a/apiserver/controllers/organizations.go +++ b/apiserver/controllers/organizations.go @@ -287,6 +287,62 @@ func (a *APIController) CreateOrgPoolHandler(w http.ResponseWriter, r *http.Requ } } +// swagger:route POST /organizations/{orgID}/scalesets organizations scalesets CreateOrgScaleSet +// +// Create organization scale set with the parameters given. +// +// Parameters: +// + name: orgID +// description: Organization ID. +// type: string +// in: path +// required: true +// +// + name: Body +// description: Parameters used when creating the organization scale set. +// type: CreateScaleSetParams +// in: body +// required: true +// +// Responses: +// 200: ScaleSet +// default: APIErrorResponse +func (a *APIController) CreateOrgScaleSetHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + orgID, ok := vars["orgID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No org ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + var scalesetData runnerParams.CreateScaleSetParams + if err := json.NewDecoder(r.Body).Decode(&scalesetData); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.GithubEntityTypeOrganization, orgID, scalesetData) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating organization scale set") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + // swagger:route GET /organizations/{orgID}/pools organizations pools ListOrgPools // // List organization pools. @@ -329,6 +385,48 @@ func (a *APIController) ListOrgPoolsHandler(w http.ResponseWriter, r *http.Reque } } +// swagger:route GET /organizations/{orgID}/scalesets organizations scalesets ListOrgScaleSets +// +// List organization scale sets. +// +// Parameters: +// + name: orgID +// description: Organization ID. +// type: string +// in: path +// required: true +// +// Responses: +// 200: ScaleSets +// default: APIErrorResponse +func (a *APIController) ListOrgScaleSetsHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + vars := mux.Vars(r) + orgID, ok := vars["orgID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No org ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.GithubEntityTypeOrganization, orgID) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSets); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + // swagger:route GET /organizations/{orgID}/pools/{poolID} organizations pools GetOrgPool // // Get organization pool by ID. diff --git a/apiserver/controllers/repositories.go b/apiserver/controllers/repositories.go index 7cc3c4f5..14693aac 100644 --- a/apiserver/controllers/repositories.go +++ b/apiserver/controllers/repositories.go @@ -286,6 +286,62 @@ func (a *APIController) CreateRepoPoolHandler(w http.ResponseWriter, r *http.Req } } +// swagger:route POST /repositories/{repoID}/scalesets repositories scalesets CreateRepoScaleSet +// +// Create repository scale set with the parameters given. +// +// Parameters: +// + name: repoID +// description: Repository ID. +// type: string +// in: path +// required: true +// +// + name: Body +// description: Parameters used when creating the repository scale set. +// type: CreateScaleSetParams +// in: body +// required: true +// +// Responses: +// 200: ScaleSet +// default: APIErrorResponse +func (a *APIController) CreateRepoScaleSetHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + repoID, ok := vars["repoID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No repo ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + var scaleSetData runnerParams.CreateScaleSetParams + if err := json.NewDecoder(r.Body).Decode(&scaleSetData); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.GithubEntityTypeRepository, repoID, scaleSetData) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating repository scale set") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + // swagger:route GET /repositories/{repoID}/pools repositories pools ListRepoPools // // List repository pools. @@ -328,6 +384,48 @@ func (a *APIController) ListRepoPoolsHandler(w http.ResponseWriter, r *http.Requ } } +// swagger:route GET /repositories/{repoID}/scalesets repositories scalesets ListRepoScaleSets +// +// List repository scale sets. +// +// Parameters: +// + name: repoID +// description: Repository ID. +// type: string +// in: path +// required: true +// +// Responses: +// 200: ScaleSets +// default: APIErrorResponse +func (a *APIController) ListRepoScaleSetsHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + vars := mux.Vars(r) + repoID, ok := vars["repoID"] + if !ok { + w.WriteHeader(http.StatusBadRequest) + if err := json.NewEncoder(w).Encode(params.APIErrorResponse{ + Error: "Bad Request", + Details: "No repo ID specified", + }); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } + return + } + + scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.GithubEntityTypeRepository, repoID) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(scaleSets); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + // swagger:route GET /repositories/{repoID}/pools/{poolID} repositories pools GetRepoPool // // Get repository pool by ID. diff --git a/apiserver/routers/routers.go b/apiserver/routers/routers.go index 13c9a2f9..ec135292 100644 --- a/apiserver/routers/routers.go +++ b/apiserver/routers/routers.go @@ -265,6 +265,14 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/repositories/{repoID}/pools/", http.HandlerFunc(han.CreateRepoPoolHandler)).Methods("POST", "OPTIONS") apiRouter.Handle("/repositories/{repoID}/pools", http.HandlerFunc(han.CreateRepoPoolHandler)).Methods("POST", "OPTIONS") + // Create scale set + apiRouter.Handle("/repositories/{repoID}/scalesets/", http.HandlerFunc(han.CreateRepoScaleSetHandler)).Methods("POST", "OPTIONS") + apiRouter.Handle("/repositories/{repoID}/scalesets", http.HandlerFunc(han.CreateRepoScaleSetHandler)).Methods("POST", "OPTIONS") + + // List scale sets + apiRouter.Handle("/repositories/{repoID}/scalesets/", http.HandlerFunc(han.ListRepoScaleSetsHandler)).Methods("GET", "OPTIONS") + apiRouter.Handle("/repositories/{repoID}/scalesets", http.HandlerFunc(han.ListRepoScaleSetsHandler)).Methods("GET", "OPTIONS") + // Repo instances list apiRouter.Handle("/repositories/{repoID}/instances/", http.HandlerFunc(han.ListRepoInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/repositories/{repoID}/instances", http.HandlerFunc(han.ListRepoInstancesHandler)).Methods("GET", "OPTIONS") @@ -315,6 +323,14 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/organizations/{orgID}/pools/", http.HandlerFunc(han.CreateOrgPoolHandler)).Methods("POST", "OPTIONS") apiRouter.Handle("/organizations/{orgID}/pools", http.HandlerFunc(han.CreateOrgPoolHandler)).Methods("POST", "OPTIONS") + // Create org scale set + apiRouter.Handle("/organizations/{orgID}/scalesets/", http.HandlerFunc(han.CreateOrgScaleSetHandler)).Methods("POST", "OPTIONS") + apiRouter.Handle("/organizations/{orgID}/scalesets", http.HandlerFunc(han.CreateOrgScaleSetHandler)).Methods("POST", "OPTIONS") + + // List org scale sets + apiRouter.Handle("/organizations/{orgID}/scalesets/", http.HandlerFunc(han.ListOrgScaleSetsHandler)).Methods("GET", "OPTIONS") + apiRouter.Handle("/organizations/{orgID}/scalesets", http.HandlerFunc(han.ListOrgScaleSetsHandler)).Methods("GET", "OPTIONS") + // Org instances list apiRouter.Handle("/organizations/{orgID}/instances/", http.HandlerFunc(han.ListOrgInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/organizations/{orgID}/instances", http.HandlerFunc(han.ListOrgInstancesHandler)).Methods("GET", "OPTIONS") @@ -365,6 +381,14 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/enterprises/{enterpriseID}/pools/", http.HandlerFunc(han.CreateEnterprisePoolHandler)).Methods("POST", "OPTIONS") apiRouter.Handle("/enterprises/{enterpriseID}/pools", http.HandlerFunc(han.CreateEnterprisePoolHandler)).Methods("POST", "OPTIONS") + // Create enterprise scale sets + apiRouter.Handle("/enterprises/{enterpriseID}/scalesets/", http.HandlerFunc(han.CreateEnterpriseScaleSetHandler)).Methods("POST", "OPTIONS") + apiRouter.Handle("/enterprises/{enterpriseID}/scalesets", http.HandlerFunc(han.CreateEnterpriseScaleSetHandler)).Methods("POST", "OPTIONS") + + // List enterprise scale sets + apiRouter.Handle("/enterprises/{enterpriseID}/scalesets/", http.HandlerFunc(han.ListEnterpriseScaleSetsHandler)).Methods("GET", "OPTIONS") + apiRouter.Handle("/enterprises/{enterpriseID}/scalesets", http.HandlerFunc(han.ListEnterpriseScaleSetsHandler)).Methods("GET", "OPTIONS") + // Enterprise instances list apiRouter.Handle("/enterprises/{enterpriseID}/instances/", http.HandlerFunc(han.ListEnterpriseInstancesHandler)).Methods("GET", "OPTIONS") apiRouter.Handle("/enterprises/{enterpriseID}/instances", http.HandlerFunc(han.ListEnterpriseInstancesHandler)).Methods("GET", "OPTIONS") diff --git a/apiserver/swagger.yaml b/apiserver/swagger.yaml index cf4287e9..2f89ab77 100644 --- a/apiserver/swagger.yaml +++ b/apiserver/swagger.yaml @@ -646,6 +646,57 @@ paths: tags: - enterprises - pools + /enterprises/{enterpriseID}/scalesets: + get: + operationId: ListEnterpriseScaleSets + parameters: + - description: Enterprise ID. + in: path + name: enterpriseID + required: true + type: string + responses: + "200": + description: ScaleSets + schema: + $ref: '#/definitions/ScaleSets' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: List enterprise scale sets. + tags: + - enterprises + - scalesets + post: + operationId: CreateEnterpriseScaleSet + parameters: + - description: Enterprise ID. + in: path + name: enterpriseID + required: true + type: string + - description: Parameters used when creating the enterprise scale set. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/CreateScaleSetParams' + description: Parameters used when creating the enterprise scale set. + type: object + responses: + "200": + description: ScaleSet + schema: + $ref: '#/definitions/ScaleSet' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Create enterprise pool with the parameters given. + tags: + - enterprises + - scalesets /first-run: post: operationId: FirstRun @@ -1229,6 +1280,57 @@ paths: tags: - organizations - pools + /organizations/{orgID}/scalesets: + get: + operationId: ListOrgScaleSets + parameters: + - description: Organization ID. + in: path + name: orgID + required: true + type: string + responses: + "200": + description: ScaleSets + schema: + $ref: '#/definitions/ScaleSets' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: List organization scale sets. + tags: + - organizations + - scalesets + post: + operationId: CreateOrgScaleSet + parameters: + - description: Organization ID. + in: path + name: orgID + required: true + type: string + - description: Parameters used when creating the organization scale set. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/CreateScaleSetParams' + description: Parameters used when creating the organization scale set. + type: object + responses: + "200": + description: ScaleSet + schema: + $ref: '#/definitions/ScaleSet' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Create organization scale set with the parameters given. + tags: + - organizations + - scalesets /organizations/{orgID}/webhook: delete: operationId: UninstallOrgWebhook @@ -1678,6 +1780,57 @@ paths: tags: - repositories - pools + /repositories/{repoID}/scalesets: + get: + operationId: ListRepoScaleSets + parameters: + - description: Repository ID. + in: path + name: repoID + required: true + type: string + responses: + "200": + description: ScaleSets + schema: + $ref: '#/definitions/ScaleSets' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: List repository scale sets. + tags: + - repositories + - scalesets + post: + operationId: CreateRepoScaleSet + parameters: + - description: Repository ID. + in: path + name: repoID + required: true + type: string + - description: Parameters used when creating the repository scale set. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/CreateScaleSetParams' + description: Parameters used when creating the repository scale set. + type: object + responses: + "200": + description: ScaleSet + schema: + $ref: '#/definitions/ScaleSet' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Create repository scale set with the parameters given. + tags: + - repositories + - scalesets /repositories/{repoID}/webhook: delete: operationId: UninstallRepoWebhook diff --git a/client/enterprises/create_enterprise_scale_set_parameters.go b/client/enterprises/create_enterprise_scale_set_parameters.go new file mode 100644 index 00000000..76fe13ec --- /dev/null +++ b/client/enterprises/create_enterprise_scale_set_parameters.go @@ -0,0 +1,173 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package enterprises + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewCreateEnterpriseScaleSetParams creates a new CreateEnterpriseScaleSetParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateEnterpriseScaleSetParams() *CreateEnterpriseScaleSetParams { + return &CreateEnterpriseScaleSetParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateEnterpriseScaleSetParamsWithTimeout creates a new CreateEnterpriseScaleSetParams object +// with the ability to set a timeout on a request. +func NewCreateEnterpriseScaleSetParamsWithTimeout(timeout time.Duration) *CreateEnterpriseScaleSetParams { + return &CreateEnterpriseScaleSetParams{ + timeout: timeout, + } +} + +// NewCreateEnterpriseScaleSetParamsWithContext creates a new CreateEnterpriseScaleSetParams object +// with the ability to set a context for a request. +func NewCreateEnterpriseScaleSetParamsWithContext(ctx context.Context) *CreateEnterpriseScaleSetParams { + return &CreateEnterpriseScaleSetParams{ + Context: ctx, + } +} + +// NewCreateEnterpriseScaleSetParamsWithHTTPClient creates a new CreateEnterpriseScaleSetParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateEnterpriseScaleSetParamsWithHTTPClient(client *http.Client) *CreateEnterpriseScaleSetParams { + return &CreateEnterpriseScaleSetParams{ + HTTPClient: client, + } +} + +/* +CreateEnterpriseScaleSetParams contains all the parameters to send to the API endpoint + + for the create enterprise scale set operation. + + Typically these are written to a http.Request. +*/ +type CreateEnterpriseScaleSetParams struct { + + /* Body. + + Parameters used when creating the enterprise scale set. + */ + Body garm_params.CreateScaleSetParams + + /* EnterpriseID. + + Enterprise ID. + */ + EnterpriseID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create enterprise scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateEnterpriseScaleSetParams) WithDefaults() *CreateEnterpriseScaleSetParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create enterprise scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateEnterpriseScaleSetParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) WithTimeout(timeout time.Duration) *CreateEnterpriseScaleSetParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) WithContext(ctx context.Context) *CreateEnterpriseScaleSetParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) WithHTTPClient(client *http.Client) *CreateEnterpriseScaleSetParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) WithBody(body garm_params.CreateScaleSetParams) *CreateEnterpriseScaleSetParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) SetBody(body garm_params.CreateScaleSetParams) { + o.Body = body +} + +// WithEnterpriseID adds the enterpriseID to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) WithEnterpriseID(enterpriseID string) *CreateEnterpriseScaleSetParams { + o.SetEnterpriseID(enterpriseID) + return o +} + +// SetEnterpriseID adds the enterpriseId to the create enterprise scale set params +func (o *CreateEnterpriseScaleSetParams) SetEnterpriseID(enterpriseID string) { + o.EnterpriseID = enterpriseID +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateEnterpriseScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + // path param enterpriseID + if err := r.SetPathParam("enterpriseID", o.EnterpriseID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/enterprises/create_enterprise_scale_set_responses.go b/client/enterprises/create_enterprise_scale_set_responses.go new file mode 100644 index 00000000..46107fc3 --- /dev/null +++ b/client/enterprises/create_enterprise_scale_set_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package enterprises + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// CreateEnterpriseScaleSetReader is a Reader for the CreateEnterpriseScaleSet structure. +type CreateEnterpriseScaleSetReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateEnterpriseScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateEnterpriseScaleSetOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewCreateEnterpriseScaleSetDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreateEnterpriseScaleSetOK creates a CreateEnterpriseScaleSetOK with default headers values +func NewCreateEnterpriseScaleSetOK() *CreateEnterpriseScaleSetOK { + return &CreateEnterpriseScaleSetOK{} +} + +/* +CreateEnterpriseScaleSetOK describes a response with status code 200, with default header values. + +ScaleSet +*/ +type CreateEnterpriseScaleSetOK struct { + Payload garm_params.ScaleSet +} + +// IsSuccess returns true when this create enterprise scale set o k response has a 2xx status code +func (o *CreateEnterpriseScaleSetOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create enterprise scale set o k response has a 3xx status code +func (o *CreateEnterpriseScaleSetOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create enterprise scale set o k response has a 4xx status code +func (o *CreateEnterpriseScaleSetOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create enterprise scale set o k response has a 5xx status code +func (o *CreateEnterpriseScaleSetOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create enterprise scale set o k response a status code equal to that given +func (o *CreateEnterpriseScaleSetOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create enterprise scale set o k response +func (o *CreateEnterpriseScaleSetOK) Code() int { + return 200 +} + +func (o *CreateEnterpriseScaleSetOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] createEnterpriseScaleSetOK %s", 200, payload) +} + +func (o *CreateEnterpriseScaleSetOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] createEnterpriseScaleSetOK %s", 200, payload) +} + +func (o *CreateEnterpriseScaleSetOK) GetPayload() garm_params.ScaleSet { + return o.Payload +} + +func (o *CreateEnterpriseScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateEnterpriseScaleSetDefault creates a CreateEnterpriseScaleSetDefault with default headers values +func NewCreateEnterpriseScaleSetDefault(code int) *CreateEnterpriseScaleSetDefault { + return &CreateEnterpriseScaleSetDefault{ + _statusCode: code, + } +} + +/* +CreateEnterpriseScaleSetDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type CreateEnterpriseScaleSetDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this create enterprise scale set default response has a 2xx status code +func (o *CreateEnterpriseScaleSetDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this create enterprise scale set default response has a 3xx status code +func (o *CreateEnterpriseScaleSetDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this create enterprise scale set default response has a 4xx status code +func (o *CreateEnterpriseScaleSetDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this create enterprise scale set default response has a 5xx status code +func (o *CreateEnterpriseScaleSetDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this create enterprise scale set default response a status code equal to that given +func (o *CreateEnterpriseScaleSetDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the create enterprise scale set default response +func (o *CreateEnterpriseScaleSetDefault) Code() int { + return o._statusCode +} + +func (o *CreateEnterpriseScaleSetDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] CreateEnterpriseScaleSet default %s", o._statusCode, payload) +} + +func (o *CreateEnterpriseScaleSetDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /enterprises/{enterpriseID}/scalesets][%d] CreateEnterpriseScaleSet default %s", o._statusCode, payload) +} + +func (o *CreateEnterpriseScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *CreateEnterpriseScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/enterprises/enterprises_client.go b/client/enterprises/enterprises_client.go index 91ab1bff..0014ca96 100644 --- a/client/enterprises/enterprises_client.go +++ b/client/enterprises/enterprises_client.go @@ -58,6 +58,8 @@ type ClientService interface { CreateEnterprisePool(params *CreateEnterprisePoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateEnterprisePoolOK, error) + CreateEnterpriseScaleSet(params *CreateEnterpriseScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateEnterpriseScaleSetOK, error) + DeleteEnterprise(params *DeleteEnterpriseParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error DeleteEnterprisePool(params *DeleteEnterprisePoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -70,6 +72,8 @@ type ClientService interface { ListEnterprisePools(params *ListEnterprisePoolsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterprisePoolsOK, error) + ListEnterpriseScaleSets(params *ListEnterpriseScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterpriseScaleSetsOK, error) + ListEnterprises(params *ListEnterprisesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterprisesOK, error) UpdateEnterprise(params *UpdateEnterpriseParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateEnterpriseOK, error) @@ -155,6 +159,44 @@ func (a *Client) CreateEnterprisePool(params *CreateEnterprisePoolParams, authIn return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +CreateEnterpriseScaleSet creates enterprise pool with the parameters given +*/ +func (a *Client) CreateEnterpriseScaleSet(params *CreateEnterpriseScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateEnterpriseScaleSetOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateEnterpriseScaleSetParams() + } + op := &runtime.ClientOperation{ + ID: "CreateEnterpriseScaleSet", + Method: "POST", + PathPattern: "/enterprises/{enterpriseID}/scalesets", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &CreateEnterpriseScaleSetReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateEnterpriseScaleSetOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*CreateEnterpriseScaleSetDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* DeleteEnterprise deletes enterprise by ID */ @@ -371,6 +413,44 @@ func (a *Client) ListEnterprisePools(params *ListEnterprisePoolsParams, authInfo return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +ListEnterpriseScaleSets lists enterprise scale sets +*/ +func (a *Client) ListEnterpriseScaleSets(params *ListEnterpriseScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListEnterpriseScaleSetsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListEnterpriseScaleSetsParams() + } + op := &runtime.ClientOperation{ + ID: "ListEnterpriseScaleSets", + Method: "GET", + PathPattern: "/enterprises/{enterpriseID}/scalesets", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &ListEnterpriseScaleSetsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListEnterpriseScaleSetsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListEnterpriseScaleSetsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* ListEnterprises lists all enterprises */ diff --git a/client/enterprises/list_enterprise_scale_sets_parameters.go b/client/enterprises/list_enterprise_scale_sets_parameters.go new file mode 100644 index 00000000..f835717c --- /dev/null +++ b/client/enterprises/list_enterprise_scale_sets_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package enterprises + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewListEnterpriseScaleSetsParams creates a new ListEnterpriseScaleSetsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListEnterpriseScaleSetsParams() *ListEnterpriseScaleSetsParams { + return &ListEnterpriseScaleSetsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListEnterpriseScaleSetsParamsWithTimeout creates a new ListEnterpriseScaleSetsParams object +// with the ability to set a timeout on a request. +func NewListEnterpriseScaleSetsParamsWithTimeout(timeout time.Duration) *ListEnterpriseScaleSetsParams { + return &ListEnterpriseScaleSetsParams{ + timeout: timeout, + } +} + +// NewListEnterpriseScaleSetsParamsWithContext creates a new ListEnterpriseScaleSetsParams object +// with the ability to set a context for a request. +func NewListEnterpriseScaleSetsParamsWithContext(ctx context.Context) *ListEnterpriseScaleSetsParams { + return &ListEnterpriseScaleSetsParams{ + Context: ctx, + } +} + +// NewListEnterpriseScaleSetsParamsWithHTTPClient creates a new ListEnterpriseScaleSetsParams object +// with the ability to set a custom HTTPClient for a request. +func NewListEnterpriseScaleSetsParamsWithHTTPClient(client *http.Client) *ListEnterpriseScaleSetsParams { + return &ListEnterpriseScaleSetsParams{ + HTTPClient: client, + } +} + +/* +ListEnterpriseScaleSetsParams contains all the parameters to send to the API endpoint + + for the list enterprise scale sets operation. + + Typically these are written to a http.Request. +*/ +type ListEnterpriseScaleSetsParams struct { + + /* EnterpriseID. + + Enterprise ID. + */ + EnterpriseID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list enterprise scale sets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListEnterpriseScaleSetsParams) WithDefaults() *ListEnterpriseScaleSetsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list enterprise scale sets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListEnterpriseScaleSetsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) WithTimeout(timeout time.Duration) *ListEnterpriseScaleSetsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) WithContext(ctx context.Context) *ListEnterpriseScaleSetsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) WithHTTPClient(client *http.Client) *ListEnterpriseScaleSetsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithEnterpriseID adds the enterpriseID to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) WithEnterpriseID(enterpriseID string) *ListEnterpriseScaleSetsParams { + o.SetEnterpriseID(enterpriseID) + return o +} + +// SetEnterpriseID adds the enterpriseId to the list enterprise scale sets params +func (o *ListEnterpriseScaleSetsParams) SetEnterpriseID(enterpriseID string) { + o.EnterpriseID = enterpriseID +} + +// WriteToRequest writes these params to a swagger request +func (o *ListEnterpriseScaleSetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param enterpriseID + if err := r.SetPathParam("enterpriseID", o.EnterpriseID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/enterprises/list_enterprise_scale_sets_responses.go b/client/enterprises/list_enterprise_scale_sets_responses.go new file mode 100644 index 00000000..9c2564c2 --- /dev/null +++ b/client/enterprises/list_enterprise_scale_sets_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package enterprises + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// ListEnterpriseScaleSetsReader is a Reader for the ListEnterpriseScaleSets structure. +type ListEnterpriseScaleSetsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListEnterpriseScaleSetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListEnterpriseScaleSetsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListEnterpriseScaleSetsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListEnterpriseScaleSetsOK creates a ListEnterpriseScaleSetsOK with default headers values +func NewListEnterpriseScaleSetsOK() *ListEnterpriseScaleSetsOK { + return &ListEnterpriseScaleSetsOK{} +} + +/* +ListEnterpriseScaleSetsOK describes a response with status code 200, with default header values. + +ScaleSets +*/ +type ListEnterpriseScaleSetsOK struct { + Payload garm_params.ScaleSets +} + +// IsSuccess returns true when this list enterprise scale sets o k response has a 2xx status code +func (o *ListEnterpriseScaleSetsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list enterprise scale sets o k response has a 3xx status code +func (o *ListEnterpriseScaleSetsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list enterprise scale sets o k response has a 4xx status code +func (o *ListEnterpriseScaleSetsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list enterprise scale sets o k response has a 5xx status code +func (o *ListEnterpriseScaleSetsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list enterprise scale sets o k response a status code equal to that given +func (o *ListEnterpriseScaleSetsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list enterprise scale sets o k response +func (o *ListEnterpriseScaleSetsOK) Code() int { + return 200 +} + +func (o *ListEnterpriseScaleSetsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] listEnterpriseScaleSetsOK %s", 200, payload) +} + +func (o *ListEnterpriseScaleSetsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] listEnterpriseScaleSetsOK %s", 200, payload) +} + +func (o *ListEnterpriseScaleSetsOK) GetPayload() garm_params.ScaleSets { + return o.Payload +} + +func (o *ListEnterpriseScaleSetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListEnterpriseScaleSetsDefault creates a ListEnterpriseScaleSetsDefault with default headers values +func NewListEnterpriseScaleSetsDefault(code int) *ListEnterpriseScaleSetsDefault { + return &ListEnterpriseScaleSetsDefault{ + _statusCode: code, + } +} + +/* +ListEnterpriseScaleSetsDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type ListEnterpriseScaleSetsDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this list enterprise scale sets default response has a 2xx status code +func (o *ListEnterpriseScaleSetsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list enterprise scale sets default response has a 3xx status code +func (o *ListEnterpriseScaleSetsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list enterprise scale sets default response has a 4xx status code +func (o *ListEnterpriseScaleSetsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list enterprise scale sets default response has a 5xx status code +func (o *ListEnterpriseScaleSetsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list enterprise scale sets default response a status code equal to that given +func (o *ListEnterpriseScaleSetsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list enterprise scale sets default response +func (o *ListEnterpriseScaleSetsDefault) Code() int { + return o._statusCode +} + +func (o *ListEnterpriseScaleSetsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] ListEnterpriseScaleSets default %s", o._statusCode, payload) +} + +func (o *ListEnterpriseScaleSetsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /enterprises/{enterpriseID}/scalesets][%d] ListEnterpriseScaleSets default %s", o._statusCode, payload) +} + +func (o *ListEnterpriseScaleSetsDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *ListEnterpriseScaleSetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/organizations/create_org_scale_set_parameters.go b/client/organizations/create_org_scale_set_parameters.go new file mode 100644 index 00000000..0e222693 --- /dev/null +++ b/client/organizations/create_org_scale_set_parameters.go @@ -0,0 +1,173 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package organizations + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewCreateOrgScaleSetParams creates a new CreateOrgScaleSetParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateOrgScaleSetParams() *CreateOrgScaleSetParams { + return &CreateOrgScaleSetParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateOrgScaleSetParamsWithTimeout creates a new CreateOrgScaleSetParams object +// with the ability to set a timeout on a request. +func NewCreateOrgScaleSetParamsWithTimeout(timeout time.Duration) *CreateOrgScaleSetParams { + return &CreateOrgScaleSetParams{ + timeout: timeout, + } +} + +// NewCreateOrgScaleSetParamsWithContext creates a new CreateOrgScaleSetParams object +// with the ability to set a context for a request. +func NewCreateOrgScaleSetParamsWithContext(ctx context.Context) *CreateOrgScaleSetParams { + return &CreateOrgScaleSetParams{ + Context: ctx, + } +} + +// NewCreateOrgScaleSetParamsWithHTTPClient creates a new CreateOrgScaleSetParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateOrgScaleSetParamsWithHTTPClient(client *http.Client) *CreateOrgScaleSetParams { + return &CreateOrgScaleSetParams{ + HTTPClient: client, + } +} + +/* +CreateOrgScaleSetParams contains all the parameters to send to the API endpoint + + for the create org scale set operation. + + Typically these are written to a http.Request. +*/ +type CreateOrgScaleSetParams struct { + + /* Body. + + Parameters used when creating the organization scale set. + */ + Body garm_params.CreateScaleSetParams + + /* OrgID. + + Organization ID. + */ + OrgID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create org scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateOrgScaleSetParams) WithDefaults() *CreateOrgScaleSetParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create org scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateOrgScaleSetParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create org scale set params +func (o *CreateOrgScaleSetParams) WithTimeout(timeout time.Duration) *CreateOrgScaleSetParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create org scale set params +func (o *CreateOrgScaleSetParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create org scale set params +func (o *CreateOrgScaleSetParams) WithContext(ctx context.Context) *CreateOrgScaleSetParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create org scale set params +func (o *CreateOrgScaleSetParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create org scale set params +func (o *CreateOrgScaleSetParams) WithHTTPClient(client *http.Client) *CreateOrgScaleSetParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create org scale set params +func (o *CreateOrgScaleSetParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create org scale set params +func (o *CreateOrgScaleSetParams) WithBody(body garm_params.CreateScaleSetParams) *CreateOrgScaleSetParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create org scale set params +func (o *CreateOrgScaleSetParams) SetBody(body garm_params.CreateScaleSetParams) { + o.Body = body +} + +// WithOrgID adds the orgID to the create org scale set params +func (o *CreateOrgScaleSetParams) WithOrgID(orgID string) *CreateOrgScaleSetParams { + o.SetOrgID(orgID) + return o +} + +// SetOrgID adds the orgId to the create org scale set params +func (o *CreateOrgScaleSetParams) SetOrgID(orgID string) { + o.OrgID = orgID +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateOrgScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + // path param orgID + if err := r.SetPathParam("orgID", o.OrgID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/organizations/create_org_scale_set_responses.go b/client/organizations/create_org_scale_set_responses.go new file mode 100644 index 00000000..3a91d03f --- /dev/null +++ b/client/organizations/create_org_scale_set_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package organizations + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// CreateOrgScaleSetReader is a Reader for the CreateOrgScaleSet structure. +type CreateOrgScaleSetReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateOrgScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateOrgScaleSetOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewCreateOrgScaleSetDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreateOrgScaleSetOK creates a CreateOrgScaleSetOK with default headers values +func NewCreateOrgScaleSetOK() *CreateOrgScaleSetOK { + return &CreateOrgScaleSetOK{} +} + +/* +CreateOrgScaleSetOK describes a response with status code 200, with default header values. + +ScaleSet +*/ +type CreateOrgScaleSetOK struct { + Payload garm_params.ScaleSet +} + +// IsSuccess returns true when this create org scale set o k response has a 2xx status code +func (o *CreateOrgScaleSetOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create org scale set o k response has a 3xx status code +func (o *CreateOrgScaleSetOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create org scale set o k response has a 4xx status code +func (o *CreateOrgScaleSetOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create org scale set o k response has a 5xx status code +func (o *CreateOrgScaleSetOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create org scale set o k response a status code equal to that given +func (o *CreateOrgScaleSetOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create org scale set o k response +func (o *CreateOrgScaleSetOK) Code() int { + return 200 +} + +func (o *CreateOrgScaleSetOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] createOrgScaleSetOK %s", 200, payload) +} + +func (o *CreateOrgScaleSetOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] createOrgScaleSetOK %s", 200, payload) +} + +func (o *CreateOrgScaleSetOK) GetPayload() garm_params.ScaleSet { + return o.Payload +} + +func (o *CreateOrgScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateOrgScaleSetDefault creates a CreateOrgScaleSetDefault with default headers values +func NewCreateOrgScaleSetDefault(code int) *CreateOrgScaleSetDefault { + return &CreateOrgScaleSetDefault{ + _statusCode: code, + } +} + +/* +CreateOrgScaleSetDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type CreateOrgScaleSetDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this create org scale set default response has a 2xx status code +func (o *CreateOrgScaleSetDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this create org scale set default response has a 3xx status code +func (o *CreateOrgScaleSetDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this create org scale set default response has a 4xx status code +func (o *CreateOrgScaleSetDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this create org scale set default response has a 5xx status code +func (o *CreateOrgScaleSetDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this create org scale set default response a status code equal to that given +func (o *CreateOrgScaleSetDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the create org scale set default response +func (o *CreateOrgScaleSetDefault) Code() int { + return o._statusCode +} + +func (o *CreateOrgScaleSetDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] CreateOrgScaleSet default %s", o._statusCode, payload) +} + +func (o *CreateOrgScaleSetDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /organizations/{orgID}/scalesets][%d] CreateOrgScaleSet default %s", o._statusCode, payload) +} + +func (o *CreateOrgScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *CreateOrgScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/organizations/list_org_scale_sets_parameters.go b/client/organizations/list_org_scale_sets_parameters.go new file mode 100644 index 00000000..711ec788 --- /dev/null +++ b/client/organizations/list_org_scale_sets_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package organizations + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewListOrgScaleSetsParams creates a new ListOrgScaleSetsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListOrgScaleSetsParams() *ListOrgScaleSetsParams { + return &ListOrgScaleSetsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListOrgScaleSetsParamsWithTimeout creates a new ListOrgScaleSetsParams object +// with the ability to set a timeout on a request. +func NewListOrgScaleSetsParamsWithTimeout(timeout time.Duration) *ListOrgScaleSetsParams { + return &ListOrgScaleSetsParams{ + timeout: timeout, + } +} + +// NewListOrgScaleSetsParamsWithContext creates a new ListOrgScaleSetsParams object +// with the ability to set a context for a request. +func NewListOrgScaleSetsParamsWithContext(ctx context.Context) *ListOrgScaleSetsParams { + return &ListOrgScaleSetsParams{ + Context: ctx, + } +} + +// NewListOrgScaleSetsParamsWithHTTPClient creates a new ListOrgScaleSetsParams object +// with the ability to set a custom HTTPClient for a request. +func NewListOrgScaleSetsParamsWithHTTPClient(client *http.Client) *ListOrgScaleSetsParams { + return &ListOrgScaleSetsParams{ + HTTPClient: client, + } +} + +/* +ListOrgScaleSetsParams contains all the parameters to send to the API endpoint + + for the list org scale sets operation. + + Typically these are written to a http.Request. +*/ +type ListOrgScaleSetsParams struct { + + /* OrgID. + + Organization ID. + */ + OrgID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list org scale sets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListOrgScaleSetsParams) WithDefaults() *ListOrgScaleSetsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list org scale sets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListOrgScaleSetsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list org scale sets params +func (o *ListOrgScaleSetsParams) WithTimeout(timeout time.Duration) *ListOrgScaleSetsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list org scale sets params +func (o *ListOrgScaleSetsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list org scale sets params +func (o *ListOrgScaleSetsParams) WithContext(ctx context.Context) *ListOrgScaleSetsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list org scale sets params +func (o *ListOrgScaleSetsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list org scale sets params +func (o *ListOrgScaleSetsParams) WithHTTPClient(client *http.Client) *ListOrgScaleSetsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list org scale sets params +func (o *ListOrgScaleSetsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithOrgID adds the orgID to the list org scale sets params +func (o *ListOrgScaleSetsParams) WithOrgID(orgID string) *ListOrgScaleSetsParams { + o.SetOrgID(orgID) + return o +} + +// SetOrgID adds the orgId to the list org scale sets params +func (o *ListOrgScaleSetsParams) SetOrgID(orgID string) { + o.OrgID = orgID +} + +// WriteToRequest writes these params to a swagger request +func (o *ListOrgScaleSetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param orgID + if err := r.SetPathParam("orgID", o.OrgID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/organizations/list_org_scale_sets_responses.go b/client/organizations/list_org_scale_sets_responses.go new file mode 100644 index 00000000..0b470fa1 --- /dev/null +++ b/client/organizations/list_org_scale_sets_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package organizations + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// ListOrgScaleSetsReader is a Reader for the ListOrgScaleSets structure. +type ListOrgScaleSetsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListOrgScaleSetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListOrgScaleSetsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListOrgScaleSetsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListOrgScaleSetsOK creates a ListOrgScaleSetsOK with default headers values +func NewListOrgScaleSetsOK() *ListOrgScaleSetsOK { + return &ListOrgScaleSetsOK{} +} + +/* +ListOrgScaleSetsOK describes a response with status code 200, with default header values. + +ScaleSets +*/ +type ListOrgScaleSetsOK struct { + Payload garm_params.ScaleSets +} + +// IsSuccess returns true when this list org scale sets o k response has a 2xx status code +func (o *ListOrgScaleSetsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list org scale sets o k response has a 3xx status code +func (o *ListOrgScaleSetsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list org scale sets o k response has a 4xx status code +func (o *ListOrgScaleSetsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list org scale sets o k response has a 5xx status code +func (o *ListOrgScaleSetsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list org scale sets o k response a status code equal to that given +func (o *ListOrgScaleSetsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list org scale sets o k response +func (o *ListOrgScaleSetsOK) Code() int { + return 200 +} + +func (o *ListOrgScaleSetsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] listOrgScaleSetsOK %s", 200, payload) +} + +func (o *ListOrgScaleSetsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] listOrgScaleSetsOK %s", 200, payload) +} + +func (o *ListOrgScaleSetsOK) GetPayload() garm_params.ScaleSets { + return o.Payload +} + +func (o *ListOrgScaleSetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListOrgScaleSetsDefault creates a ListOrgScaleSetsDefault with default headers values +func NewListOrgScaleSetsDefault(code int) *ListOrgScaleSetsDefault { + return &ListOrgScaleSetsDefault{ + _statusCode: code, + } +} + +/* +ListOrgScaleSetsDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type ListOrgScaleSetsDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this list org scale sets default response has a 2xx status code +func (o *ListOrgScaleSetsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list org scale sets default response has a 3xx status code +func (o *ListOrgScaleSetsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list org scale sets default response has a 4xx status code +func (o *ListOrgScaleSetsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list org scale sets default response has a 5xx status code +func (o *ListOrgScaleSetsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list org scale sets default response a status code equal to that given +func (o *ListOrgScaleSetsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list org scale sets default response +func (o *ListOrgScaleSetsDefault) Code() int { + return o._statusCode +} + +func (o *ListOrgScaleSetsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] ListOrgScaleSets default %s", o._statusCode, payload) +} + +func (o *ListOrgScaleSetsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /organizations/{orgID}/scalesets][%d] ListOrgScaleSets default %s", o._statusCode, payload) +} + +func (o *ListOrgScaleSetsDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *ListOrgScaleSetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/organizations/organizations_client.go b/client/organizations/organizations_client.go index 5ce5d647..cd3e1211 100644 --- a/client/organizations/organizations_client.go +++ b/client/organizations/organizations_client.go @@ -58,6 +58,8 @@ type ClientService interface { CreateOrgPool(params *CreateOrgPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateOrgPoolOK, error) + CreateOrgScaleSet(params *CreateOrgScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateOrgScaleSetOK, error) + DeleteOrg(params *DeleteOrgParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error DeleteOrgPool(params *DeleteOrgPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -74,6 +76,8 @@ type ClientService interface { ListOrgPools(params *ListOrgPoolsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgPoolsOK, error) + ListOrgScaleSets(params *ListOrgScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgScaleSetsOK, error) + ListOrgs(params *ListOrgsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgsOK, error) UninstallOrgWebhook(params *UninstallOrgWebhookParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -161,6 +165,44 @@ func (a *Client) CreateOrgPool(params *CreateOrgPoolParams, authInfo runtime.Cli return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +CreateOrgScaleSet creates organization scale set with the parameters given +*/ +func (a *Client) CreateOrgScaleSet(params *CreateOrgScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateOrgScaleSetOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateOrgScaleSetParams() + } + op := &runtime.ClientOperation{ + ID: "CreateOrgScaleSet", + Method: "POST", + PathPattern: "/organizations/{orgID}/scalesets", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &CreateOrgScaleSetReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateOrgScaleSetOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*CreateOrgScaleSetDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* DeleteOrg deletes organization by ID */ @@ -455,6 +497,44 @@ func (a *Client) ListOrgPools(params *ListOrgPoolsParams, authInfo runtime.Clien return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +ListOrgScaleSets lists organization scale sets +*/ +func (a *Client) ListOrgScaleSets(params *ListOrgScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListOrgScaleSetsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListOrgScaleSetsParams() + } + op := &runtime.ClientOperation{ + ID: "ListOrgScaleSets", + Method: "GET", + PathPattern: "/organizations/{orgID}/scalesets", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &ListOrgScaleSetsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListOrgScaleSetsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListOrgScaleSetsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* ListOrgs lists organizations */ diff --git a/client/repositories/create_repo_scale_set_parameters.go b/client/repositories/create_repo_scale_set_parameters.go new file mode 100644 index 00000000..9b8784dc --- /dev/null +++ b/client/repositories/create_repo_scale_set_parameters.go @@ -0,0 +1,173 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package repositories + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewCreateRepoScaleSetParams creates a new CreateRepoScaleSetParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateRepoScaleSetParams() *CreateRepoScaleSetParams { + return &CreateRepoScaleSetParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateRepoScaleSetParamsWithTimeout creates a new CreateRepoScaleSetParams object +// with the ability to set a timeout on a request. +func NewCreateRepoScaleSetParamsWithTimeout(timeout time.Duration) *CreateRepoScaleSetParams { + return &CreateRepoScaleSetParams{ + timeout: timeout, + } +} + +// NewCreateRepoScaleSetParamsWithContext creates a new CreateRepoScaleSetParams object +// with the ability to set a context for a request. +func NewCreateRepoScaleSetParamsWithContext(ctx context.Context) *CreateRepoScaleSetParams { + return &CreateRepoScaleSetParams{ + Context: ctx, + } +} + +// NewCreateRepoScaleSetParamsWithHTTPClient creates a new CreateRepoScaleSetParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateRepoScaleSetParamsWithHTTPClient(client *http.Client) *CreateRepoScaleSetParams { + return &CreateRepoScaleSetParams{ + HTTPClient: client, + } +} + +/* +CreateRepoScaleSetParams contains all the parameters to send to the API endpoint + + for the create repo scale set operation. + + Typically these are written to a http.Request. +*/ +type CreateRepoScaleSetParams struct { + + /* Body. + + Parameters used when creating the repository scale set. + */ + Body garm_params.CreateScaleSetParams + + /* RepoID. + + Repository ID. + */ + RepoID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create repo scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateRepoScaleSetParams) WithDefaults() *CreateRepoScaleSetParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create repo scale set params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateRepoScaleSetParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create repo scale set params +func (o *CreateRepoScaleSetParams) WithTimeout(timeout time.Duration) *CreateRepoScaleSetParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create repo scale set params +func (o *CreateRepoScaleSetParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create repo scale set params +func (o *CreateRepoScaleSetParams) WithContext(ctx context.Context) *CreateRepoScaleSetParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create repo scale set params +func (o *CreateRepoScaleSetParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create repo scale set params +func (o *CreateRepoScaleSetParams) WithHTTPClient(client *http.Client) *CreateRepoScaleSetParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create repo scale set params +func (o *CreateRepoScaleSetParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create repo scale set params +func (o *CreateRepoScaleSetParams) WithBody(body garm_params.CreateScaleSetParams) *CreateRepoScaleSetParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create repo scale set params +func (o *CreateRepoScaleSetParams) SetBody(body garm_params.CreateScaleSetParams) { + o.Body = body +} + +// WithRepoID adds the repoID to the create repo scale set params +func (o *CreateRepoScaleSetParams) WithRepoID(repoID string) *CreateRepoScaleSetParams { + o.SetRepoID(repoID) + return o +} + +// SetRepoID adds the repoId to the create repo scale set params +func (o *CreateRepoScaleSetParams) SetRepoID(repoID string) { + o.RepoID = repoID +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateRepoScaleSetParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + // path param repoID + if err := r.SetPathParam("repoID", o.RepoID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/repositories/create_repo_scale_set_responses.go b/client/repositories/create_repo_scale_set_responses.go new file mode 100644 index 00000000..4d02d5c1 --- /dev/null +++ b/client/repositories/create_repo_scale_set_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package repositories + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// CreateRepoScaleSetReader is a Reader for the CreateRepoScaleSet structure. +type CreateRepoScaleSetReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateRepoScaleSetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateRepoScaleSetOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewCreateRepoScaleSetDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreateRepoScaleSetOK creates a CreateRepoScaleSetOK with default headers values +func NewCreateRepoScaleSetOK() *CreateRepoScaleSetOK { + return &CreateRepoScaleSetOK{} +} + +/* +CreateRepoScaleSetOK describes a response with status code 200, with default header values. + +ScaleSet +*/ +type CreateRepoScaleSetOK struct { + Payload garm_params.ScaleSet +} + +// IsSuccess returns true when this create repo scale set o k response has a 2xx status code +func (o *CreateRepoScaleSetOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create repo scale set o k response has a 3xx status code +func (o *CreateRepoScaleSetOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create repo scale set o k response has a 4xx status code +func (o *CreateRepoScaleSetOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create repo scale set o k response has a 5xx status code +func (o *CreateRepoScaleSetOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create repo scale set o k response a status code equal to that given +func (o *CreateRepoScaleSetOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create repo scale set o k response +func (o *CreateRepoScaleSetOK) Code() int { + return 200 +} + +func (o *CreateRepoScaleSetOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] createRepoScaleSetOK %s", 200, payload) +} + +func (o *CreateRepoScaleSetOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] createRepoScaleSetOK %s", 200, payload) +} + +func (o *CreateRepoScaleSetOK) GetPayload() garm_params.ScaleSet { + return o.Payload +} + +func (o *CreateRepoScaleSetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateRepoScaleSetDefault creates a CreateRepoScaleSetDefault with default headers values +func NewCreateRepoScaleSetDefault(code int) *CreateRepoScaleSetDefault { + return &CreateRepoScaleSetDefault{ + _statusCode: code, + } +} + +/* +CreateRepoScaleSetDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type CreateRepoScaleSetDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this create repo scale set default response has a 2xx status code +func (o *CreateRepoScaleSetDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this create repo scale set default response has a 3xx status code +func (o *CreateRepoScaleSetDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this create repo scale set default response has a 4xx status code +func (o *CreateRepoScaleSetDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this create repo scale set default response has a 5xx status code +func (o *CreateRepoScaleSetDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this create repo scale set default response a status code equal to that given +func (o *CreateRepoScaleSetDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the create repo scale set default response +func (o *CreateRepoScaleSetDefault) Code() int { + return o._statusCode +} + +func (o *CreateRepoScaleSetDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] CreateRepoScaleSet default %s", o._statusCode, payload) +} + +func (o *CreateRepoScaleSetDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /repositories/{repoID}/scalesets][%d] CreateRepoScaleSet default %s", o._statusCode, payload) +} + +func (o *CreateRepoScaleSetDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *CreateRepoScaleSetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/repositories/list_repo_scale_sets_parameters.go b/client/repositories/list_repo_scale_sets_parameters.go new file mode 100644 index 00000000..2582c498 --- /dev/null +++ b/client/repositories/list_repo_scale_sets_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package repositories + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewListRepoScaleSetsParams creates a new ListRepoScaleSetsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListRepoScaleSetsParams() *ListRepoScaleSetsParams { + return &ListRepoScaleSetsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListRepoScaleSetsParamsWithTimeout creates a new ListRepoScaleSetsParams object +// with the ability to set a timeout on a request. +func NewListRepoScaleSetsParamsWithTimeout(timeout time.Duration) *ListRepoScaleSetsParams { + return &ListRepoScaleSetsParams{ + timeout: timeout, + } +} + +// NewListRepoScaleSetsParamsWithContext creates a new ListRepoScaleSetsParams object +// with the ability to set a context for a request. +func NewListRepoScaleSetsParamsWithContext(ctx context.Context) *ListRepoScaleSetsParams { + return &ListRepoScaleSetsParams{ + Context: ctx, + } +} + +// NewListRepoScaleSetsParamsWithHTTPClient creates a new ListRepoScaleSetsParams object +// with the ability to set a custom HTTPClient for a request. +func NewListRepoScaleSetsParamsWithHTTPClient(client *http.Client) *ListRepoScaleSetsParams { + return &ListRepoScaleSetsParams{ + HTTPClient: client, + } +} + +/* +ListRepoScaleSetsParams contains all the parameters to send to the API endpoint + + for the list repo scale sets operation. + + Typically these are written to a http.Request. +*/ +type ListRepoScaleSetsParams struct { + + /* RepoID. + + Repository ID. + */ + RepoID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list repo scale sets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListRepoScaleSetsParams) WithDefaults() *ListRepoScaleSetsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list repo scale sets params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListRepoScaleSetsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list repo scale sets params +func (o *ListRepoScaleSetsParams) WithTimeout(timeout time.Duration) *ListRepoScaleSetsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list repo scale sets params +func (o *ListRepoScaleSetsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list repo scale sets params +func (o *ListRepoScaleSetsParams) WithContext(ctx context.Context) *ListRepoScaleSetsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list repo scale sets params +func (o *ListRepoScaleSetsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list repo scale sets params +func (o *ListRepoScaleSetsParams) WithHTTPClient(client *http.Client) *ListRepoScaleSetsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list repo scale sets params +func (o *ListRepoScaleSetsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithRepoID adds the repoID to the list repo scale sets params +func (o *ListRepoScaleSetsParams) WithRepoID(repoID string) *ListRepoScaleSetsParams { + o.SetRepoID(repoID) + return o +} + +// SetRepoID adds the repoId to the list repo scale sets params +func (o *ListRepoScaleSetsParams) SetRepoID(repoID string) { + o.RepoID = repoID +} + +// WriteToRequest writes these params to a swagger request +func (o *ListRepoScaleSetsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param repoID + if err := r.SetPathParam("repoID", o.RepoID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/repositories/list_repo_scale_sets_responses.go b/client/repositories/list_repo_scale_sets_responses.go new file mode 100644 index 00000000..4e2d98a2 --- /dev/null +++ b/client/repositories/list_repo_scale_sets_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package repositories + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// ListRepoScaleSetsReader is a Reader for the ListRepoScaleSets structure. +type ListRepoScaleSetsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListRepoScaleSetsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListRepoScaleSetsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListRepoScaleSetsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListRepoScaleSetsOK creates a ListRepoScaleSetsOK with default headers values +func NewListRepoScaleSetsOK() *ListRepoScaleSetsOK { + return &ListRepoScaleSetsOK{} +} + +/* +ListRepoScaleSetsOK describes a response with status code 200, with default header values. + +ScaleSets +*/ +type ListRepoScaleSetsOK struct { + Payload garm_params.ScaleSets +} + +// IsSuccess returns true when this list repo scale sets o k response has a 2xx status code +func (o *ListRepoScaleSetsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list repo scale sets o k response has a 3xx status code +func (o *ListRepoScaleSetsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list repo scale sets o k response has a 4xx status code +func (o *ListRepoScaleSetsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list repo scale sets o k response has a 5xx status code +func (o *ListRepoScaleSetsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list repo scale sets o k response a status code equal to that given +func (o *ListRepoScaleSetsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list repo scale sets o k response +func (o *ListRepoScaleSetsOK) Code() int { + return 200 +} + +func (o *ListRepoScaleSetsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] listRepoScaleSetsOK %s", 200, payload) +} + +func (o *ListRepoScaleSetsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] listRepoScaleSetsOK %s", 200, payload) +} + +func (o *ListRepoScaleSetsOK) GetPayload() garm_params.ScaleSets { + return o.Payload +} + +func (o *ListRepoScaleSetsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListRepoScaleSetsDefault creates a ListRepoScaleSetsDefault with default headers values +func NewListRepoScaleSetsDefault(code int) *ListRepoScaleSetsDefault { + return &ListRepoScaleSetsDefault{ + _statusCode: code, + } +} + +/* +ListRepoScaleSetsDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type ListRepoScaleSetsDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this list repo scale sets default response has a 2xx status code +func (o *ListRepoScaleSetsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list repo scale sets default response has a 3xx status code +func (o *ListRepoScaleSetsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list repo scale sets default response has a 4xx status code +func (o *ListRepoScaleSetsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list repo scale sets default response has a 5xx status code +func (o *ListRepoScaleSetsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list repo scale sets default response a status code equal to that given +func (o *ListRepoScaleSetsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list repo scale sets default response +func (o *ListRepoScaleSetsDefault) Code() int { + return o._statusCode +} + +func (o *ListRepoScaleSetsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] ListRepoScaleSets default %s", o._statusCode, payload) +} + +func (o *ListRepoScaleSetsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /repositories/{repoID}/scalesets][%d] ListRepoScaleSets default %s", o._statusCode, payload) +} + +func (o *ListRepoScaleSetsDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *ListRepoScaleSetsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/repositories/repositories_client.go b/client/repositories/repositories_client.go index 56a0a684..017bf0f8 100644 --- a/client/repositories/repositories_client.go +++ b/client/repositories/repositories_client.go @@ -58,6 +58,8 @@ type ClientService interface { CreateRepoPool(params *CreateRepoPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateRepoPoolOK, error) + CreateRepoScaleSet(params *CreateRepoScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateRepoScaleSetOK, error) + DeleteRepo(params *DeleteRepoParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error DeleteRepoPool(params *DeleteRepoPoolParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -74,6 +76,8 @@ type ClientService interface { ListRepoPools(params *ListRepoPoolsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListRepoPoolsOK, error) + ListRepoScaleSets(params *ListRepoScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListRepoScaleSetsOK, error) + ListRepos(params *ListReposParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListReposOK, error) UninstallRepoWebhook(params *UninstallRepoWebhookParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error @@ -161,6 +165,44 @@ func (a *Client) CreateRepoPool(params *CreateRepoPoolParams, authInfo runtime.C return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +CreateRepoScaleSet creates repository scale set with the parameters given +*/ +func (a *Client) CreateRepoScaleSet(params *CreateRepoScaleSetParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateRepoScaleSetOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateRepoScaleSetParams() + } + op := &runtime.ClientOperation{ + ID: "CreateRepoScaleSet", + Method: "POST", + PathPattern: "/repositories/{repoID}/scalesets", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &CreateRepoScaleSetReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateRepoScaleSetOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*CreateRepoScaleSetDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* DeleteRepo deletes repository by ID */ @@ -455,6 +497,44 @@ func (a *Client) ListRepoPools(params *ListRepoPoolsParams, authInfo runtime.Cli return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +ListRepoScaleSets lists repository scale sets +*/ +func (a *Client) ListRepoScaleSets(params *ListRepoScaleSetsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListRepoScaleSetsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListRepoScaleSetsParams() + } + op := &runtime.ClientOperation{ + ID: "ListRepoScaleSets", + Method: "GET", + PathPattern: "/repositories/{repoID}/scalesets", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &ListRepoScaleSetsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListRepoScaleSetsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListRepoScaleSetsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* ListRepos lists repositories */ diff --git a/cmd/garm-cli/cmd/scalesets.go b/cmd/garm-cli/cmd/scalesets.go new file mode 100644 index 00000000..04c537ee --- /dev/null +++ b/cmd/garm-cli/cmd/scalesets.go @@ -0,0 +1,518 @@ +// Copyright 2022 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package cmd + +import ( + "fmt" + "os" + + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + + commonParams "github.com/cloudbase/garm-provider-common/params" + apiClientEnterprises "github.com/cloudbase/garm/client/enterprises" + apiClientOrgs "github.com/cloudbase/garm/client/organizations" + apiClientRepos "github.com/cloudbase/garm/client/repositories" + apiClientScaleSets "github.com/cloudbase/garm/client/scalesets" + "github.com/cloudbase/garm/cmd/garm-cli/common" + "github.com/cloudbase/garm/params" +) + +var ( + scalesetProvider string + scalesetMaxRunners uint + scalesetMinIdleRunners uint + scalesetRunnerPrefix string + scalesetName string + scalesetImage string + scalesetFlavor string + scalesetOSType string + scalesetOSArch string + scalesetEnabled bool + scalesetRunnerBootstrapTimeout uint + scalesetRepository string + scalesetOrganization string + scalesetEnterprise string + scalesetExtraSpecsFile string + scalesetExtraSpecs string + scalesetAll bool + scalesetGitHubRunnerGroup string +) + +type scalesetPayloadGetter interface { + GetPayload() params.ScaleSet +} + +type scalesetsPayloadGetter interface { + GetPayload() params.ScaleSets +} + +// scalesetCmd represents the scale set command +var scalesetCmd = &cobra.Command{ + Use: "scaleset", + SilenceUsage: true, + Short: "List scale sets", + Long: `Query information or perform operations on scale sets.`, + Run: nil, +} + +var scalesetListCmd = &cobra.Command{ + Use: "list", + Aliases: []string{"ls"}, + Short: "List scale sets", + Long: `List scale sets of repositories, orgs or all of the above. + +This command will list scale sets from one repo, one org or all scale sets +on the system. The list flags are mutually exclusive. You must however +specify one of them. + +Example: + + List scalesets from one repo: + garm-cli scaleset list --repo=05e7eac6-4705-486d-89c9-0170bbb576af + + List scalesets from one org: + garm-cli scaleset list --org=5493e51f-3170-4ce3-9f05-3fe690fc6ec6 + + List scalesets from one enterprise: + garm-cli scaleset list --enterprise=a8ee4c66-e762-4cbe-a35d-175dba2c9e62 + + List all scalesets from all repos, orgs and enterprises: + garm-cli scaleset list --all + +`, + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + var response scalesetsPayloadGetter + var err error + + switch len(args) { + case 0: + if cmd.Flags().Changed("repo") { + listRepoScaleSetsReq := apiClientRepos.NewListRepoScaleSetsParams() + listRepoScaleSetsReq.RepoID = scalesetRepository + response, err = apiCli.Repositories.ListRepoScaleSets(listRepoScaleSetsReq, authToken) + } else if cmd.Flags().Changed("org") { + listOrgScaleSetsReq := apiClientOrgs.NewListOrgScaleSetsParams() + listOrgScaleSetsReq.OrgID = scalesetOrganization + response, err = apiCli.Organizations.ListOrgScaleSets(listOrgScaleSetsReq, authToken) + } else if cmd.Flags().Changed("enterprise") { + listEnterpriseScaleSetsReq := apiClientEnterprises.NewListEnterpriseScaleSetsParams() + listEnterpriseScaleSetsReq.EnterpriseID = scalesetEnterprise + response, err = apiCli.Enterprises.ListEnterpriseScaleSets(listEnterpriseScaleSetsReq, authToken) + } else if cmd.Flags().Changed("all") { + listScaleSetsReq := apiClientScaleSets.NewListScalesetsParams() + response, err = apiCli.Scalesets.ListScalesets(listScaleSetsReq, authToken) + } else { + cmd.Help() //nolint + os.Exit(0) + } + default: + cmd.Help() //nolint + os.Exit(0) + } + + if err != nil { + return err + } + formatScaleSets(response.GetPayload()) + return nil + }, +} + +var scaleSetShowCmd = &cobra.Command{ + Use: "show", + Short: "Show details for a scale set", + Long: `Displays a detailed view of a single scale set.`, + SilenceUsage: true, + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + if len(args) == 0 { + return fmt.Errorf("requires a scale set ID") + } + + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + getScaleSetReq := apiClientScaleSets.NewGetScaleSetParams() + getScaleSetReq.ScalesetID = args[0] + response, err := apiCli.Scalesets.GetScaleSet(getScaleSetReq, authToken) + if err != nil { + return err + } + formatOneScaleSet(response.Payload) + return nil + }, +} + +var scaleSetDeleteCmd = &cobra.Command{ + Use: "delete", + Aliases: []string{"remove", "rm", "del"}, + Short: "Delete scale set by ID", + Long: `Delete one scale set by referencing it's ID, regardless of repo or org.`, + SilenceUsage: true, + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + if len(args) == 0 { + return fmt.Errorf("requires a scale set ID") + } + + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + deleteScaleSetReq := apiClientScaleSets.NewDeleteScaleSetParams() + deleteScaleSetReq.ScalesetID = args[0] + if err := apiCli.Scalesets.DeleteScaleSet(deleteScaleSetReq, authToken); err != nil { + return err + } + return nil + }, +} + +var scaleSetAddCmd = &cobra.Command{ + Use: "add", + Aliases: []string{"create"}, + Short: "Add scale set", + Long: `Add a new scale set.`, + SilenceUsage: true, + RunE: func(cmd *cobra.Command, _ []string) error { + if needsInit { + return errNeedsInitError + } + + newScaleSetParams := params.CreateScaleSetParams{ + RunnerPrefix: params.RunnerPrefix{ + Prefix: scalesetRunnerPrefix, + }, + ProviderName: scalesetProvider, + Name: scalesetName, + MaxRunners: scalesetMaxRunners, + MinIdleRunners: scalesetMinIdleRunners, + Image: scalesetImage, + Flavor: scalesetFlavor, + OSType: commonParams.OSType(scalesetOSType), + OSArch: commonParams.OSArch(scalesetOSArch), + Enabled: scalesetEnabled, + RunnerBootstrapTimeout: scalesetRunnerBootstrapTimeout, + GitHubRunnerGroup: scalesetGitHubRunnerGroup, + } + + if cmd.Flags().Changed("extra-specs") { + data, err := asRawMessage([]byte(scalesetExtraSpecs)) + if err != nil { + return err + } + newScaleSetParams.ExtraSpecs = data + } + + if scalesetExtraSpecsFile != "" { + data, err := extraSpecsFromFile(scalesetExtraSpecsFile) + if err != nil { + return err + } + newScaleSetParams.ExtraSpecs = data + } + + if err := newScaleSetParams.Validate(); err != nil { + return err + } + + var err error + var response scalesetPayloadGetter + if cmd.Flags().Changed("repo") { + newRepoScaleSetReq := apiClientRepos.NewCreateRepoScaleSetParams() + newRepoScaleSetReq.RepoID = scalesetRepository + newRepoScaleSetReq.Body = newScaleSetParams + response, err = apiCli.Repositories.CreateRepoScaleSet(newRepoScaleSetReq, authToken) + } else if cmd.Flags().Changed("org") { + newOrgScaleSetReq := apiClientOrgs.NewCreateOrgScaleSetParams() + newOrgScaleSetReq.OrgID = scalesetOrganization + newOrgScaleSetReq.Body = newScaleSetParams + response, err = apiCli.Organizations.CreateOrgScaleSet(newOrgScaleSetReq, authToken) + } else if cmd.Flags().Changed("enterprise") { + newEnterpriseScaleSetReq := apiClientEnterprises.NewCreateEnterpriseScaleSetParams() + newEnterpriseScaleSetReq.EnterpriseID = scalesetEnterprise + newEnterpriseScaleSetReq.Body = newScaleSetParams + response, err = apiCli.Enterprises.CreateEnterpriseScaleSet(newEnterpriseScaleSetReq, authToken) + } else { + cmd.Help() //nolint + os.Exit(0) + } + + if err != nil { + return err + } + + formatOneScaleSet(response.GetPayload()) + return nil + }, +} + +var scaleSetUpdateCmd = &cobra.Command{ + Use: "update", + Short: "Update one scale set", + Long: `Updates scale set characteristics. + +This command updates the scale set characteristics. Runners already created prior to updating +the scale set, will not be recreated. If they no longer suit your needs, you will need to +explicitly remove them using the runner delete command. + `, + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + if len(args) == 0 { + return fmt.Errorf("command requires a scale set ID") + } + + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + updateScaleSetReq := apiClientScaleSets.NewUpdateScaleSetParams() + scaleSetUpdateParams := params.UpdateScaleSetParams{} + + if cmd.Flags().Changed("image") { + scaleSetUpdateParams.Image = scalesetImage + } + + if cmd.Flags().Changed("name") { + scaleSetUpdateParams.Name = scalesetName + } + + if cmd.Flags().Changed("flavor") { + scaleSetUpdateParams.Flavor = scalesetFlavor + } + + if cmd.Flags().Changed("os-type") { + scaleSetUpdateParams.OSType = commonParams.OSType(scalesetOSType) + } + + if cmd.Flags().Changed("os-arch") { + scaleSetUpdateParams.OSArch = commonParams.OSArch(scalesetOSArch) + } + + if cmd.Flags().Changed("max-runners") { + scaleSetUpdateParams.MaxRunners = &scalesetMaxRunners + } + + if cmd.Flags().Changed("min-idle-runners") { + scaleSetUpdateParams.MinIdleRunners = &scalesetMinIdleRunners + } + + if cmd.Flags().Changed("runner-prefix") { + scaleSetUpdateParams.RunnerPrefix = params.RunnerPrefix{ + Prefix: scalesetRunnerPrefix, + } + } + + if cmd.Flags().Changed("runner-group") { + scaleSetUpdateParams.GitHubRunnerGroup = &scalesetGitHubRunnerGroup + } + + if cmd.Flags().Changed("enabled") { + scaleSetUpdateParams.Enabled = &scalesetEnabled + } + + if cmd.Flags().Changed("runner-bootstrap-timeout") { + scaleSetUpdateParams.RunnerBootstrapTimeout = &scalesetRunnerBootstrapTimeout + } + + if cmd.Flags().Changed("extra-specs") { + data, err := asRawMessage([]byte(scalesetExtraSpecs)) + if err != nil { + return err + } + scaleSetUpdateParams.ExtraSpecs = data + } + + if scalesetExtraSpecsFile != "" { + data, err := extraSpecsFromFile(scalesetExtraSpecsFile) + if err != nil { + return err + } + scaleSetUpdateParams.ExtraSpecs = data + } + + updateScaleSetReq.ScalesetID = args[0] + updateScaleSetReq.Body = scaleSetUpdateParams + response, err := apiCli.Scalesets.UpdateScaleSet(updateScaleSetReq, authToken) + if err != nil { + return err + } + + formatOneScaleSet(response.Payload) + return nil + }, +} + +func init() { + scalesetListCmd.Flags().StringVarP(&scalesetRepository, "repo", "r", "", "List all scale sets within this repository.") + scalesetListCmd.Flags().StringVarP(&scalesetOrganization, "org", "o", "", "List all scale sets within this organization.") + scalesetListCmd.Flags().StringVarP(&scalesetEnterprise, "enterprise", "e", "", "List all scale sets within this enterprise.") + scalesetListCmd.Flags().BoolVarP(&scalesetAll, "all", "a", false, "List all scale sets, regardless of org or repo.") + scalesetListCmd.MarkFlagsMutuallyExclusive("repo", "org", "all", "enterprise") + + scaleSetUpdateCmd.Flags().StringVar(&scalesetImage, "image", "", "The provider-specific image name to use for runners in this scale set.") + scaleSetUpdateCmd.Flags().StringVar(&scalesetFlavor, "flavor", "", "The flavor to use for the runners in this scale set.") + scaleSetUpdateCmd.Flags().StringVar(&scalesetName, "name", "", "The name of the scale set. This option is mandatory.") + scaleSetUpdateCmd.Flags().StringVar(&scalesetOSType, "os-type", "linux", "Operating system type (windows, linux, etc).") + scaleSetUpdateCmd.Flags().StringVar(&scalesetOSArch, "os-arch", "amd64", "Operating system architecture (amd64, arm, etc).") + scaleSetUpdateCmd.Flags().StringVar(&scalesetRunnerPrefix, "runner-prefix", "", "The name prefix to use for runners in this scale set.") + scaleSetUpdateCmd.Flags().UintVar(&scalesetMaxRunners, "max-runners", 5, "The maximum number of runner this scale set will create.") + scaleSetUpdateCmd.Flags().UintVar(&scalesetMinIdleRunners, "min-idle-runners", 1, "Attempt to maintain a minimum of idle self-hosted runners of this type.") + scaleSetUpdateCmd.Flags().StringVar(&scalesetGitHubRunnerGroup, "runner-group", "", "The GitHub runner group in which all runners of this scale set will be added.") + scaleSetUpdateCmd.Flags().BoolVar(&scalesetEnabled, "enabled", false, "Enable this scale set.") + scaleSetUpdateCmd.Flags().UintVar(&scalesetRunnerBootstrapTimeout, "runner-bootstrap-timeout", 20, "Duration in minutes after which a runner is considered failed if it does not join Github.") + scaleSetUpdateCmd.Flags().StringVar(&scalesetExtraSpecsFile, "extra-specs-file", "", "A file containing a valid json which will be passed to the IaaS provider managing the scale set.") + scaleSetUpdateCmd.Flags().StringVar(&scalesetExtraSpecs, "extra-specs", "", "A valid json which will be passed to the IaaS provider managing the scale set.") + scaleSetUpdateCmd.MarkFlagsMutuallyExclusive("extra-specs-file", "extra-specs") + + scaleSetAddCmd.Flags().StringVar(&scalesetProvider, "provider-name", "", "The name of the provider where runners will be created.") + scaleSetAddCmd.Flags().StringVar(&scalesetImage, "image", "", "The provider-specific image name to use for runners in this scale set.") + scaleSetAddCmd.Flags().StringVar(&scalesetName, "name", "", "The name of the scale set. This option is mandatory.") + scaleSetAddCmd.Flags().StringVar(&scalesetFlavor, "flavor", "", "The flavor to use for this runner.") + scaleSetAddCmd.Flags().StringVar(&scalesetRunnerPrefix, "runner-prefix", "", "The name prefix to use for runners in this scale set.") + scaleSetAddCmd.Flags().StringVar(&scalesetOSType, "os-type", "linux", "Operating system type (windows, linux, etc).") + scaleSetAddCmd.Flags().StringVar(&scalesetOSArch, "os-arch", "amd64", "Operating system architecture (amd64, arm, etc).") + scaleSetAddCmd.Flags().StringVar(&scalesetExtraSpecsFile, "extra-specs-file", "", "A file containing a valid json which will be passed to the IaaS provider managing the scale set.") + scaleSetAddCmd.Flags().StringVar(&scalesetExtraSpecs, "extra-specs", "", "A valid json which will be passed to the IaaS provider managing the scale set.") + scaleSetAddCmd.Flags().StringVar(&scalesetGitHubRunnerGroup, "runner-group", "", "The GitHub runner group in which all runners of this scale set will be added.") + scaleSetAddCmd.Flags().UintVar(&scalesetMaxRunners, "max-runners", 5, "The maximum number of runner this scale set will create.") + scaleSetAddCmd.Flags().UintVar(&scalesetRunnerBootstrapTimeout, "runner-bootstrap-timeout", 20, "Duration in minutes after which a runner is considered failed if it does not join Github.") + scaleSetAddCmd.Flags().UintVar(&scalesetMinIdleRunners, "min-idle-runners", 1, "Attempt to maintain a minimum of idle self-hosted runners of this type.") + scaleSetAddCmd.Flags().BoolVar(&scalesetEnabled, "enabled", false, "Enable this scale set.") + scaleSetAddCmd.MarkFlagRequired("provider-name") //nolint + scaleSetAddCmd.MarkFlagRequired("name") //nolint + scaleSetAddCmd.MarkFlagRequired("image") //nolint + scaleSetAddCmd.MarkFlagRequired("flavor") //nolint + + scaleSetAddCmd.Flags().StringVarP(&scalesetRepository, "repo", "r", "", "Add the new scale set within this repository.") + scaleSetAddCmd.Flags().StringVarP(&scalesetOrganization, "org", "o", "", "Add the new scale set within this organization.") + scaleSetAddCmd.Flags().StringVarP(&scalesetEnterprise, "enterprise", "e", "", "Add the new scale set within this enterprise.") + scaleSetAddCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise") + scaleSetAddCmd.MarkFlagsMutuallyExclusive("extra-specs-file", "extra-specs") + + scalesetCmd.AddCommand( + scalesetListCmd, + scaleSetShowCmd, + scaleSetDeleteCmd, + scaleSetUpdateCmd, + scaleSetAddCmd, + ) + + rootCmd.AddCommand(scalesetCmd) +} + +func formatScaleSets(scaleSets []params.ScaleSet) { + if outputFormat == common.OutputFormatJSON { + printAsJSON(scaleSets) + return + } + t := table.NewWriter() + header := table.Row{"ID", "Scale Set Name", "Image", "Flavor", "Belongs to", "Level", "Enabled", "Runner Prefix", "Provider"} + t.AppendHeader(header) + + for _, scaleSet := range scaleSets { + var belongsTo string + var level string + + switch { + case scaleSet.RepoID != "" && scaleSet.RepoName != "": + belongsTo = scaleSet.RepoName + level = "repo" + case scaleSet.OrgID != "" && scaleSet.OrgName != "": + belongsTo = scaleSet.OrgName + level = "org" + case scaleSet.EnterpriseID != "" && scaleSet.EnterpriseName != "": + belongsTo = scaleSet.EnterpriseName + level = "enterprise" + } + t.AppendRow(table.Row{scaleSet.ID, scaleSet.Name, scaleSet.Image, scaleSet.Flavor, belongsTo, level, scaleSet.Enabled, scaleSet.GetRunnerPrefix(), scaleSet.ProviderName}) + t.AppendSeparator() + } + fmt.Println(t.Render()) +} + +func formatOneScaleSet(scaleSet params.ScaleSet) { + if outputFormat == common.OutputFormatJSON { + printAsJSON(scaleSet) + return + } + t := table.NewWriter() + rowConfigAutoMerge := table.RowConfig{AutoMerge: true} + + header := table.Row{"Field", "Value"} + + var belongsTo string + var level string + + switch { + case scaleSet.RepoID != "" && scaleSet.RepoName != "": + belongsTo = scaleSet.RepoName + level = "repo" + case scaleSet.OrgID != "" && scaleSet.OrgName != "": + belongsTo = scaleSet.OrgName + level = "org" + case scaleSet.EnterpriseID != "" && scaleSet.EnterpriseName != "": + belongsTo = scaleSet.EnterpriseName + level = "enterprise" + } + + t.AppendHeader(header) + t.AppendRow(table.Row{"ID", scaleSet.ID}) + t.AppendRow(table.Row{"Scale Set ID", scaleSet.ScaleSetID}) + t.AppendRow(table.Row{"Scale Name", scaleSet.Name}) + t.AppendRow(table.Row{"Provider Name", scaleSet.ProviderName}) + t.AppendRow(table.Row{"Image", scaleSet.Image}) + t.AppendRow(table.Row{"Flavor", scaleSet.Flavor}) + t.AppendRow(table.Row{"OS Type", scaleSet.OSType}) + t.AppendRow(table.Row{"OS Architecture", scaleSet.OSArch}) + t.AppendRow(table.Row{"Max Runners", scaleSet.MaxRunners}) + t.AppendRow(table.Row{"Min Idle Runners", scaleSet.MinIdleRunners}) + t.AppendRow(table.Row{"Runner Bootstrap Timeout", scaleSet.RunnerBootstrapTimeout}) + t.AppendRow(table.Row{"Belongs to", belongsTo}) + t.AppendRow(table.Row{"Level", level}) + t.AppendRow(table.Row{"Enabled", scaleSet.Enabled}) + t.AppendRow(table.Row{"Runner Prefix", scaleSet.GetRunnerPrefix()}) + t.AppendRow(table.Row{"Extra specs", string(scaleSet.ExtraSpecs)}) + t.AppendRow(table.Row{"GitHub Runner Group", scaleSet.GitHubRunnerGroup}) + + if len(scaleSet.Instances) > 0 { + for _, instance := range scaleSet.Instances { + t.AppendRow(table.Row{"Instances", fmt.Sprintf("%s (%s)", instance.Name, instance.ID)}, rowConfigAutoMerge) + } + } + + t.SetColumnConfigs([]table.ColumnConfig{ + {Number: 1, AutoMerge: true}, + {Number: 2, AutoMerge: false, WidthMax: 100}, + }) + fmt.Println(t.Render()) +} diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index dfcb10a2..414a7aaf 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -96,7 +96,7 @@ func (s *sqlDatabase) GetEnterprise(ctx context.Context, name, endpointName stri } func (s *sqlDatabase) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) { - enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Pools", "Credentials", "Endpoint") + enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") if err != nil { return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } diff --git a/database/sql/organizations.go b/database/sql/organizations.go index c41b9269..07ce32d8 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -216,7 +216,7 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para } func (s *sqlDatabase) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) { - org, err := s.getOrgByID(ctx, s.conn, orgID, "Pools", "Credentials", "Endpoint") + org, err := s.getOrgByID(ctx, s.conn, orgID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") if err != nil { return params.Organization{}, errors.Wrap(err, "fetching org") } diff --git a/database/sql/repositories.go b/database/sql/repositories.go index c1eaef3b..6b744163 100644 --- a/database/sql/repositories.go +++ b/database/sql/repositories.go @@ -216,7 +216,7 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param } func (s *sqlDatabase) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) { - repo, err := s.getRepoByID(ctx, s.conn, repoID, "Pools", "Credentials", "Endpoint") + repo, err := s.getRepoByID(ctx, s.conn, repoID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") if err != nil { return params.Repository{}, errors.Wrap(err, "fetching repo") } diff --git a/runner/scalesets.go b/runner/scalesets.go index 5e8123b0..34ba699a 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -164,11 +164,6 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param hasUpdates = true } - if old.Enabled != new.Enabled { - updateParams.Enabled = &new.Enabled - hasUpdates = true - } - if old.GitHubRunnerGroup != new.GitHubRunnerGroup { runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, new.GitHubRunnerGroup) if err != nil { diff --git a/util/github/client.go b/util/github/client.go index d4251764..f8c04d28 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -452,6 +452,11 @@ func Client(ctx context.Context, entity params.GithubEntity) (common.GithubClien return nil, errors.Wrap(err, "fetching http client") } + slog.InfoContext( + ctx, "creating client with", + "entity", entity.String(), "base_url", entity.Credentials.APIBaseURL, + "upload_url", entity.Credentials.UploadBaseURL) + ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs( entity.Credentials.APIBaseURL, entity.Credentials.UploadBaseURL) if err != nil { From 6a5c309399599838db6f53beb2a3d67a7bed8102 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 15 Apr 2025 09:36:28 +0000 Subject: [PATCH 010/179] Add some worker code Signed-off-by: Gabriel Adrian Samfira --- cmd/garm/main.go | 14 ++ database/sql/models.go | 2 + params/github.go | 17 ++ util/github/client.go | 4 +- workers/common/interfaces.go | 9 + workers/entity/controller.go | 208 ++++++++++++++++++++++ workers/entity/controller_watcher.go | 98 ++++++++++ workers/entity/util.go | 35 ++++ workers/entity/worker.go | 110 ++++++++++++ workers/entity/worker_watcher.go | 76 ++++++++ workers/scaleset/controller.go | 237 +++++++++++++++++++++++++ workers/scaleset/controller_watcher.go | 207 +++++++++++++++++++++ workers/scaleset/scaleset.go | 76 ++++++++ workers/scaleset/status.go | 13 ++ workers/scaleset/util.go | 28 +++ 15 files changed, 1132 insertions(+), 2 deletions(-) create mode 100644 workers/common/interfaces.go create mode 100644 workers/entity/controller.go create mode 100644 workers/entity/controller_watcher.go create mode 100644 workers/entity/util.go create mode 100644 workers/entity/worker.go create mode 100644 workers/entity/worker_watcher.go create mode 100644 workers/scaleset/controller.go create mode 100644 workers/scaleset/controller_watcher.go create mode 100644 workers/scaleset/scaleset.go create mode 100644 workers/scaleset/status.go create mode 100644 workers/scaleset/util.go diff --git a/cmd/garm/main.go b/cmd/garm/main.go index ebb30d55..3ffcdc1f 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -49,6 +49,7 @@ import ( garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/appdefaults" "github.com/cloudbase/garm/websocket" + "github.com/cloudbase/garm/workers/entity" ) var ( @@ -230,6 +231,14 @@ func main() { log.Fatal(err) } + entityController, err := entity.NewController(ctx, db, *cfg) + if err != nil { + log.Fatalf("failed to create entity controller: %+v", err) + } + if err := entityController.Start(); err != nil { + log.Fatalf("failed to start entity controller: %+v", err) + } + runner, err := runner.NewRunner(ctx, *cfg, db) if err != nil { log.Fatalf("failed to create controller: %+v", err) @@ -326,6 +335,11 @@ func main() { <-ctx.Done() + slog.InfoContext(ctx, "shutting down entity controller") + if err := entityController.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop entity controller") + } + shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 60*time.Second) defer shutdownCancel() if err := srv.Shutdown(shutdownCtx); err != nil { diff --git a/database/sql/models.go b/database/sql/models.go index d040760c..c44baceb 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -96,6 +96,8 @@ type ScaleSet struct { // ScaleSetID is the github ID of the scale set. This field may not be set if // the scale set was ceated in GARM but has not yet been created in GitHub. + // The scale set ID is also not globally unique. It is only unique within the context + // of an entity. ScaleSetID int `gorm:"index:idx_scale_set"` Name string `gorm:"index:idx_name"` DisableUpdate bool diff --git a/params/github.go b/params/github.go index 81540683..2d132d50 100644 --- a/params/github.go +++ b/params/github.go @@ -15,6 +15,8 @@ package params import ( + "encoding/base64" + "encoding/json" "fmt" "net/url" "time" @@ -420,6 +422,21 @@ type RunnerScaleSetJitRunnerConfig struct { EncodedJITConfig string `json:"encodedJITConfig"` } +func (r RunnerScaleSetJitRunnerConfig) DecodedJITConfig() (map[string]string, error) { + if r.EncodedJITConfig == "" { + return nil, fmt.Errorf("no encoded JIT config specified") + } + decoded, err := base64.StdEncoding.DecodeString(r.EncodedJITConfig) + if err != nil { + return nil, fmt.Errorf("failed to decode JIT config: %w", err) + } + jitConfig := make(map[string]string) + if err := json.Unmarshal(decoded, &jitConfig); err != nil { + return nil, fmt.Errorf("failed to unmarshal JIT config: %w", err) + } + return jitConfig, nil +} + type RunnerReferenceList struct { Count int `json:"count"` RunnerReferences []RunnerReference `json:"value"` diff --git a/util/github/client.go b/util/github/client.go index f8c04d28..ae0b6485 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -452,8 +452,8 @@ func Client(ctx context.Context, entity params.GithubEntity) (common.GithubClien return nil, errors.Wrap(err, "fetching http client") } - slog.InfoContext( - ctx, "creating client with", + slog.DebugContext( + ctx, "creating client for entity", "entity", entity.String(), "base_url", entity.Credentials.APIBaseURL, "upload_url", entity.Credentials.UploadBaseURL) diff --git a/workers/common/interfaces.go b/workers/common/interfaces.go new file mode 100644 index 00000000..4791a500 --- /dev/null +++ b/workers/common/interfaces.go @@ -0,0 +1,9 @@ +package common + +import ( + commonParams "github.com/cloudbase/garm-provider-common/params" +) + +type ToolsGetter interface { + GetTools() ([]commonParams.RunnerApplicationDownload, error) +} diff --git a/workers/entity/controller.go b/workers/entity/controller.go new file mode 100644 index 00000000..c1547302 --- /dev/null +++ b/workers/entity/controller.go @@ -0,0 +1,208 @@ +package entity + +import ( + "context" + "fmt" + "log/slog" + "sync" + + "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/config" + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/runner/common" + "github.com/cloudbase/garm/runner/providers" + garmUtil "github.com/cloudbase/garm/util" +) + +func NewController(ctx context.Context, store dbCommon.Store, cfg config.Config) (*Controller, error) { + consumerID := "entity-controller" + ctrlID, err := store.ControllerInfo() + if err != nil { + return nil, fmt.Errorf("getting controller info: %w", err) + } + + ctx = garmUtil.WithSlogContext( + ctx, + slog.Any("worker", consumerID)) + ctx = auth.GetAdminContext(ctx) + + providers, err := providers.LoadProvidersFromConfig(ctx, cfg, ctrlID.ControllerID.String()) + if err != nil { + return nil, fmt.Errorf("loading providers: %w", err) + } + + return &Controller{ + consumerID: consumerID, + ctx: ctx, + store: store, + providers: providers, + Entities: make(map[string]*Worker), + }, nil +} + +type Controller struct { + consumerID string + ctx context.Context + + consumer dbCommon.Consumer + store dbCommon.Store + + providers map[string]common.Provider + Entities map[string]*Worker + + running bool + quit chan struct{} + + mux sync.Mutex +} + +func (c *Controller) loadAllRepositories() error { + c.mux.Lock() + defer c.mux.Unlock() + repos, err := c.store.ListRepositories(c.ctx) + if err != nil { + return fmt.Errorf("fetching repositories: %w", err) + } + + for _, repo := range repos { + entity, err := repo.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) + if err != nil { + return fmt.Errorf("creating worker: %w", err) + } + slog.DebugContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) + if err := worker.Start(); err != nil { + return fmt.Errorf("starting worker: %w", err) + } + c.Entities[entity.ID] = worker + } + return nil +} + +func (c *Controller) loadAllOrganizations() error { + c.mux.Lock() + defer c.mux.Unlock() + orgs, err := c.store.ListOrganizations(c.ctx) + if err != nil { + return fmt.Errorf("fetching organizations: %w", err) + } + for _, org := range orgs { + entity, err := org.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) + if err != nil { + return fmt.Errorf("creating worker: %w", err) + } + slog.DebugContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) + if err := worker.Start(); err != nil { + return fmt.Errorf("starting worker: %w", err) + } + c.Entities[entity.ID] = worker + } + return nil +} + +func (c *Controller) loadAllEnterprises() error { + c.mux.Lock() + defer c.mux.Unlock() + enterprises, err := c.store.ListEnterprises(c.ctx) + if err != nil { + return fmt.Errorf("fetching enterprises: %w", err) + } + for _, enterprise := range enterprises { + entity, err := enterprise.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) + if err != nil { + return fmt.Errorf("creating worker: %w", err) + } + slog.DebugContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) + if err := worker.Start(); err != nil { + return fmt.Errorf("starting worker: %w", err) + } + c.Entities[entity.ID] = worker + } + return nil +} + +func (c *Controller) Start() error { + c.mux.Lock() + if c.running { + c.mux.Unlock() + return nil + } + c.mux.Unlock() + + if err := c.loadAllRepositories(); err != nil { + return fmt.Errorf("loading repositories: %w", err) + } + if err := c.loadAllOrganizations(); err != nil { + return fmt.Errorf("loading organizations: %w", err) + } + if err := c.loadAllEnterprises(); err != nil { + return fmt.Errorf("loading enterprises: %w", err) + } + + consumer, err := watcher.RegisterConsumer( + c.ctx, c.consumerID, + composeControllerWatcherFilters(), + ) + + if err != nil { + return fmt.Errorf("failed to create consumer for entity controller: %w", err) + } + + c.mux.Lock() + c.consumer = consumer + c.running = true + c.quit = make(chan struct{}) + c.mux.Unlock() + + go c.loop() + + return nil +} + +func (c *Controller) Stop() error { + c.mux.Lock() + defer c.mux.Unlock() + if !c.running { + return nil + } + slog.DebugContext(c.ctx, "stopping entity controller") + + for entityID, worker := range c.Entities { + if err := worker.Stop(); err != nil { + slog.ErrorContext(c.ctx, "stopping worker for entity", "entity_id", entityID, "error", err) + } + } + + c.running = false + close(c.quit) + c.quit = nil + c.consumer.Close() + return nil +} + +func (c *Controller) loop() { + defer c.Stop() + for { + select { + case payload := <-c.consumer.Watch(): + slog.InfoContext(c.ctx, "received payload", slog.Any("payload", payload)) + go c.handleWatcherEvent(payload) + case <-c.ctx.Done(): + return + case <-c.quit: + return + } + } +} diff --git a/workers/entity/controller_watcher.go b/workers/entity/controller_watcher.go new file mode 100644 index 00000000..ace63702 --- /dev/null +++ b/workers/entity/controller_watcher.go @@ -0,0 +1,98 @@ +package entity + +import ( + "log/slog" + + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" +) + +func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { + var entityGetter params.EntityGetter + switch event.EntityType { + case dbCommon.RepositoryEntityType: + slog.DebugContext(c.ctx, "got repository payload event") + repo, ok := event.Payload.(params.Repository) + if !ok { + slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + } + entityGetter = repo + case dbCommon.OrganizationEntityType: + slog.DebugContext(c.ctx, "got organization payload event") + org, ok := event.Payload.(params.Organization) + if !ok { + slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + } + entityGetter = org + case dbCommon.EnterpriseEntityType: + slog.DebugContext(c.ctx, "got enterprise payload event") + ent, ok := event.Payload.(params.Enterprise) + if !ok { + slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + } + entityGetter = ent + default: + slog.ErrorContext(c.ctx, "invalid entity type", "entity_type", event.EntityType) + return + } + + if entityGetter == nil { + return + } + + switch event.Operation { + case dbCommon.CreateOperation: + slog.DebugContext(c.ctx, "got create operation") + c.handleWatcherCreateOperation(entityGetter, event) + case dbCommon.DeleteOperation: + slog.DebugContext(c.ctx, "got delete operation") + c.handleWatcherDeleteOperation(entityGetter, event) + default: + slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) + return + } +} + +func (c *Controller) handleWatcherCreateOperation(entityGetter params.EntityGetter, event dbCommon.ChangePayload) { + c.mux.Lock() + defer c.mux.Unlock() + entity, err := entityGetter.GetEntity() + if err != nil { + slog.ErrorContext(c.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) + return + } + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) + if err != nil { + slog.ErrorContext(c.ctx, "creating worker from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) + return + } + + slog.InfoContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) + if err := worker.Start(); err != nil { + slog.ErrorContext(c.ctx, "starting worker", "entity_id", entity.ID, "error", err) + return + } + + c.Entities[entity.ID] = worker +} + +func (c *Controller) handleWatcherDeleteOperation(entityGetter params.EntityGetter, event dbCommon.ChangePayload) { + c.mux.Lock() + defer c.mux.Unlock() + entity, err := entityGetter.GetEntity() + if err != nil { + slog.ErrorContext(c.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) + return + } + worker, ok := c.Entities[entity.ID] + if !ok { + slog.InfoContext(c.ctx, "entity not found in worker list", "entity_id", entity.ID) + return + } + slog.InfoContext(c.ctx, "stopping entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) + if err := worker.Stop(); err != nil { + slog.ErrorContext(c.ctx, "stopping worker", "entity_id", entity.ID, "error", err) + return + } + delete(c.Entities, entity.ID) +} diff --git a/workers/entity/util.go b/workers/entity/util.go new file mode 100644 index 00000000..28b9f955 --- /dev/null +++ b/workers/entity/util.go @@ -0,0 +1,35 @@ +package entity + +import ( + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" +) + +func composeControllerWatcherFilters() dbCommon.PayloadFilterFunc { + return watcher.WithAll( + watcher.WithAny( + watcher.WithEntityTypeFilter(dbCommon.RepositoryEntityType), + watcher.WithEntityTypeFilter(dbCommon.OrganizationEntityType), + watcher.WithEntityTypeFilter(dbCommon.EnterpriseEntityType), + ), + watcher.WithAny( + watcher.WithOperationTypeFilter(dbCommon.CreateOperation), + watcher.WithOperationTypeFilter(dbCommon.DeleteOperation), + ), + ) +} + +func composeWorkerWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFunc { + return watcher.WithAny( + watcher.WithAll( + watcher.WithEntityFilter(entity), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + ), + // Watch for credentials updates. + watcher.WithAll( + watcher.WithGithubCredentialsFilter(entity.Credentials), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + ), + ) +} diff --git a/workers/entity/worker.go b/workers/entity/worker.go new file mode 100644 index 00000000..2a8591cb --- /dev/null +++ b/workers/entity/worker.go @@ -0,0 +1,110 @@ +package entity + +import ( + "context" + "fmt" + "log/slog" + "sync" + + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/runner/common" + garmUtil "github.com/cloudbase/garm/util" + "github.com/cloudbase/garm/workers/scaleset" +) + +func NewWorker(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Worker, error) { + consumerID := fmt.Sprintf("entity-worker-%s", entity.String()) + + ctx = garmUtil.WithSlogContext( + ctx, + slog.Any("worker", consumerID)) + + return &Worker{ + ctx: ctx, + consumerID: consumerID, + store: store, + Entity: entity, + providers: providers, + }, nil +} + +type Worker struct { + ctx context.Context + consumerID string + + consumer dbCommon.Consumer + store dbCommon.Store + + Entity params.GithubEntity + providers map[string]common.Provider + scaleSetController *scaleset.Controller + // TODO(gabriel-samfira): replace current pool manager with something similar + // to the scale set controller. + // poolManager *pool.Controller + + mux sync.Mutex + running bool + quit chan struct{} +} + +func (w *Worker) Stop() error { + w.mux.Lock() + defer w.mux.Unlock() + + if !w.running { + return nil + } + slog.DebugContext(w.ctx, "stopping entity worker") + + if err := w.scaleSetController.Stop(); err != nil { + return fmt.Errorf("stopping scale set controller: %w", err) + } + w.scaleSetController = nil + + w.running = false + close(w.quit) + w.consumer.Close() + return nil +} + +func (w *Worker) Start() error { + w.mux.Lock() + defer w.mux.Unlock() + + scaleSetController, err := scaleset.NewController(w.ctx, w.store, w.Entity, w.providers) + if err != nil { + return fmt.Errorf("creating scale set controller: %w", err) + } + w.scaleSetController = scaleSetController + + consumer, err := watcher.RegisterConsumer( + w.ctx, w.consumerID, + composeWorkerWatcherFilters(w.Entity), + ) + if err != nil { + return fmt.Errorf("registering consumer: %w", err) + } + w.consumer = consumer + + w.running = true + w.quit = make(chan struct{}) + go w.loop() + return nil +} + +func (w *Worker) loop() { + defer w.Stop() + for { + select { + case payload := <-w.consumer.Watch(): + slog.InfoContext(w.ctx, "received payload", slog.Any("payload", payload)) + go w.handleWorkerWatcherEvent(payload) + case <-w.ctx.Done(): + return + case <-w.quit: + return + } + } +} diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go new file mode 100644 index 00000000..7e00112e --- /dev/null +++ b/workers/entity/worker_watcher.go @@ -0,0 +1,76 @@ +package entity + +import ( + "log/slog" + + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" +) + +func (w *Worker) handleWorkerWatcherEvent(event dbCommon.ChangePayload) { + // This worker may be for a repo, org or enterprise. React only to the entity type + // that this worker is for. + entityType := dbCommon.DatabaseEntityType(w.Entity.EntityType) + switch event.EntityType { + case entityType: + entityGetter, ok := event.Payload.(params.EntityGetter) + if !ok { + slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return + } + entity, err := entityGetter.GetEntity() + if err != nil { + slog.ErrorContext(w.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) + return + } + w.handleEntityEventPayload(entity, event) + return + case dbCommon.GithubCredentialsEntityType: + slog.DebugContext(w.ctx, "got github credentials payload event") + credentials, ok := event.Payload.(params.GithubCredentials) + if !ok { + slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return + } + w.handleEntityCredentialsEventPayload(credentials, event) + default: + slog.DebugContext(w.ctx, "invalid entity type; ignoring", "entity_type", event.EntityType) + } +} + +func (w *Worker) handleEntityEventPayload(entity params.GithubEntity, event dbCommon.ChangePayload) { + switch event.Operation { + case dbCommon.UpdateOperation: + slog.DebugContext(w.ctx, "got update operation") + w.mux.Lock() + defer w.mux.Unlock() + + credentials := entity.Credentials + if w.Entity.Credentials.ID != credentials.ID { + // credentials were swapped on the entity. We need to recompose the watcher + // filters. + w.consumer.SetFilters(composeWorkerWatcherFilters(entity)) + } + w.Entity = entity + default: + slog.ErrorContext(w.ctx, "invalid operation type", "operation_type", event.Operation) + } +} + +func (w *Worker) handleEntityCredentialsEventPayload(credentials params.GithubCredentials, event dbCommon.ChangePayload) { + switch event.Operation { + case dbCommon.UpdateOperation: + slog.DebugContext(w.ctx, "got delete operation") + w.mux.Lock() + defer w.mux.Unlock() + if w.Entity.Credentials.ID != credentials.ID { + // The channel is buffered. We may get an old update. If credentials get updated + // immediately after they are swapped on the entity, we may still get an update + // pushed to the channel before the filters are swapped. We can ignore the update. + return + } + w.Entity.Credentials = credentials + default: + slog.ErrorContext(w.ctx, "invalid operation type", "operation_type", event.Operation) + } +} diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go new file mode 100644 index 00000000..de426621 --- /dev/null +++ b/workers/scaleset/controller.go @@ -0,0 +1,237 @@ +package scaleset + +import ( + "context" + "fmt" + "log/slog" + "sync" + "time" + + commonParams "github.com/cloudbase/garm-provider-common/params" + + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/runner/common" + garmUtil "github.com/cloudbase/garm/util" + "github.com/cloudbase/garm/util/github" + "github.com/cloudbase/garm/util/github/scalesets" +) + +func NewController(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Controller, error) { + + consumerID := fmt.Sprintf("scaleset-worker-%s", entity.String()) + + ctx = garmUtil.WithSlogContext( + ctx, + slog.Any("worker", consumerID)) + + return &Controller{ + ctx: ctx, + consumerID: consumerID, + ScaleSets: make(map[uint]*scaleSet), + Entity: entity, + providers: providers, + store: store, + statusUpdates: make(chan scaleSetStatus, 10), + }, nil +} + +type scaleSet struct { + scaleSet params.ScaleSet + status scaleSetStatus + worker *Worker + + mux sync.Mutex +} + +func (s *scaleSet) updateStatus(status scaleSetStatus) { + s.mux.Lock() + defer s.mux.Unlock() + + s.status = status +} + +func (s *scaleSet) Stop() error { + s.mux.Lock() + defer s.mux.Unlock() + + if s.worker == nil { + return nil + } + + return s.worker.Stop() +} + +// Controller is responsible for managing scale sets for one github entity. +type Controller struct { + ctx context.Context + consumerID string + + ScaleSets map[uint]*scaleSet + + Entity params.GithubEntity + + consumer dbCommon.Consumer + store dbCommon.Store + providers map[string]common.Provider + + tools []commonParams.RunnerApplicationDownload + ghCli common.GithubClient + scaleSetCli *scalesets.ScaleSetClient + forgeCredsAreValid bool + + statusUpdates chan scaleSetStatus + + mux sync.Mutex + running bool + quit chan struct{} +} + +func (c *Controller) loadAllScaleSets() error { + scaleSets, err := c.store.ListEntityScaleSets(c.ctx, c.Entity) + if err != nil { + return fmt.Errorf("listing scale sets: %w", err) + } + + for _, sSet := range scaleSets { + if err := c.handleScaleSetCreateOperation(sSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") + continue + } + } + return nil +} + +func (c *Controller) Start() (err error) { + c.mux.Lock() + if c.running { + c.mux.Unlock() + return nil + } + c.mux.Unlock() + + if err := c.loadAllScaleSets(); err != nil { + return fmt.Errorf("loading all scale sets: %w", err) + } + + ghCli, err := github.Client(c.ctx, c.Entity) + if err != nil { + return fmt.Errorf("creating github client: %w", err) + } + + consumer, err := watcher.RegisterConsumer( + c.ctx, c.consumerID, + composeControllerWatcherFilters(c.Entity), + ) + if err != nil { + return fmt.Errorf("registering consumer: %w", err) + } + + c.mux.Lock() + c.ghCli = ghCli + c.consumer = consumer + c.running = true + c.quit = make(chan struct{}) + c.mux.Unlock() + + go c.loop() + return nil +} + +func (c *Controller) Stop() error { + c.mux.Lock() + defer c.mux.Unlock() + + if !c.running { + return nil + } + slog.DebugContext(c.ctx, "stopping scaleset controller", "entity", c.Entity.String()) + + for scaleSetID, scaleSet := range c.ScaleSets { + if err := scaleSet.Stop(); err != nil { + slog.ErrorContext(c.ctx, "stopping worker for scale set", "scale_set_id", scaleSetID, "error", err) + continue + } + delete(c.ScaleSets, scaleSetID) + } + + c.running = false + close(c.quit) + c.quit = nil + close(c.statusUpdates) + c.statusUpdates = nil + c.consumer.Close() + + return nil +} + +func (c *Controller) updateTools() error { + c.mux.Lock() + defer c.mux.Unlock() + + tools, err := garmUtil.FetchTools(c.ctx, c.ghCli) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + c.ctx, "failed to update tools for entity", "entity", c.Entity.String()) + c.forgeCredsAreValid = false + return fmt.Errorf("failed to update tools for entity %s: %w", c.Entity.String(), err) + } + c.forgeCredsAreValid = true + c.tools = tools + return nil +} + +func (c *Controller) handleScaleSetStatusUpdates(status scaleSetStatus) { + if status.scaleSet.ID == 0 { + slog.DebugContext(c.ctx, "invalid scale set ID; ignoring") + return + } + + scaleSet, ok := c.ScaleSets[status.scaleSet.ID] + if !ok { + slog.DebugContext(c.ctx, "scale set not found; ignoring") + return + } + + scaleSet.updateStatus(status) +} + +func (c *Controller) loop() { + defer c.Stop() + updateToolsTicker := time.NewTicker(common.PoolToolUpdateInterval) + initialToolUpdate := make(chan struct{}, 1) + go func() { + slog.Info("running initial tool update") + if err := c.updateTools(); err != nil { + slog.With(slog.Any("error", err)).Error("failed to update tools") + } + initialToolUpdate <- struct{}{} + }() + + for { + select { + case payload := <-c.consumer.Watch(): + slog.InfoContext(c.ctx, "received payload", slog.Any("payload", payload)) + go c.handleWatcherEvent(payload) + case <-c.ctx.Done(): + return + case _, ok := <-initialToolUpdate: + if ok { + // channel received the initial update slug. We can close it now. + close(initialToolUpdate) + } + case update, ok := <-c.statusUpdates: + if !ok { + return + } + go c.handleScaleSetStatusUpdates(update) + case <-updateToolsTicker.C: + if err := c.updateTools(); err != nil { + slog.With(slog.Any("error", err)).Error("failed to update tools") + } + case <-c.quit: + return + } + } +} diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go new file mode 100644 index 00000000..4b347f59 --- /dev/null +++ b/workers/scaleset/controller_watcher.go @@ -0,0 +1,207 @@ +package scaleset + +import ( + "fmt" + "log/slog" + + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/util/github" + scaleSetCli "github.com/cloudbase/garm/util/github/scalesets" +) + +func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { + entityType := dbCommon.DatabaseEntityType(c.Entity.EntityType) + switch event.EntityType { + case dbCommon.ScaleSetEntityType: + slog.DebugContext(c.ctx, "got scale set payload event") + c.handleScaleSet(event) + case entityType: + slog.DebugContext(c.ctx, "got entity payload event") + c.handleEntityEvent(event) + case dbCommon.GithubCredentialsEntityType: + slog.DebugContext(c.ctx, "got github credentials payload event") + c.handleCredentialsEvent(event) + default: + slog.ErrorContext(c.ctx, "invalid entity type", "entity_type", event.EntityType) + return + } +} + +func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { + scaleSet, ok := event.Payload.(params.ScaleSet) + if !ok { + slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return + } + + switch event.Operation { + case dbCommon.CreateOperation: + slog.DebugContext(c.ctx, "got create operation for scale set", "scale_set_id", scaleSet.ID, "scale_set_name", scaleSet.Name) + if err := c.handleScaleSetCreateOperation(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") + } + case dbCommon.UpdateOperation: + slog.DebugContext(c.ctx, "got update operation for scale set", "scale_set_id", scaleSet.ID, "scale_set_name", scaleSet.Name) + if err := c.handleScaleSetUpdateOperation(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set update operation") + } + case dbCommon.DeleteOperation: + slog.DebugContext(c.ctx, "got delete operation") + if err := c.handleScaleSetDeleteOperation(scaleSet); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set delete operation") + } + default: + slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) + return + } +} + +func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet) error { + c.mux.Lock() + defer c.mux.Unlock() + + if _, ok := c.ScaleSets[sSet.ID]; ok { + slog.DebugContext(c.ctx, "scale set already exists in worker list", "scale_set_id", sSet.ID) + return nil + } + + provider, ok := c.providers[sSet.ProviderName] + if !ok { + // Providers are currently static, set in the config and cannot be updated without a restart. + // ScaleSets and pools also do not allow updating the provider. This condition is not recoverable + // without a restart, so we don't need to instantiate a worker for this scale set. + return fmt.Errorf("provider %s not found for scale set %s", sSet.ProviderName, sSet.Name) + } + + worker, err := NewWorker(c.ctx, c.store, sSet, provider) + if err != nil { + return fmt.Errorf("creating scale set worker: %w", err) + } + if err := worker.Start(); err != nil { + // The Start() function should only return an error if an unrecoverable error occurs. + // For transient errors, it should mark the scale set as being in error, but continue + // to retry fixing the condition. For example, not being able to retrieve tools due to bad + // credentials should not stop the worker. The credentials can be fixed and the worker + // can continue to work. + return fmt.Errorf("starting scale set worker: %w", err) + } + c.ScaleSets[sSet.ID] = &scaleSet{ + scaleSet: sSet, + status: scaleSetStatus{}, + worker: worker, + } + return nil +} + +func (c *Controller) handleScaleSetDeleteOperation(sSet params.ScaleSet) error { + c.mux.Lock() + defer c.mux.Unlock() + + set, ok := c.ScaleSets[sSet.ID] + if !ok { + slog.DebugContext(c.ctx, "scale set not found in worker list", "scale_set_id", sSet.ID) + return nil + } + + slog.DebugContext(c.ctx, "stopping scale set worker", "scale_set_id", sSet.ID) + if err := set.worker.Stop(); err != nil { + return fmt.Errorf("stopping scale set worker: %w", err) + } + delete(c.ScaleSets, sSet.ID) + return nil +} + +func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { + c.mux.Lock() + defer c.mux.Unlock() + + if _, ok := c.ScaleSets[sSet.ID]; !ok { + // Some error may have occured when the scale set was first created, so we + // attempt to create it after the user updated the scale set, hopefully + // fixing the reason for the failure. + return c.handleScaleSetCreateOperation(sSet) + } + // We let the watcher in the scale set worker handle the update operation. + return nil +} + +func (c *Controller) handleCredentialsEvent(event dbCommon.ChangePayload) { + credentials, ok := event.Payload.(params.GithubCredentials) + if !ok { + slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return + } + + switch event.Operation { + case dbCommon.UpdateOperation: + slog.DebugContext(c.ctx, "got update operation") + c.mux.Lock() + defer c.mux.Unlock() + + if c.Entity.Credentials.ID != credentials.ID { + // credentials were swapped on the entity. We need to recompose the watcher + // filters. + return + } + c.Entity.Credentials = credentials + + if err := c.updateAndBroadcastCredentials(c.Entity); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to update credentials") + return + } + default: + slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) + return + } +} + +func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { + entity, ok := event.Payload.(params.GithubEntity) + if !ok { + slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return + } + + switch event.Operation { + case dbCommon.UpdateOperation: + slog.DebugContext(c.ctx, "got update operation") + c.mux.Lock() + defer c.mux.Unlock() + + if c.Entity.Credentials.ID != entity.Credentials.ID { + // credentials were swapped on the entity. We need to recompose the watcher + // filters. + c.consumer.SetFilters(composeControllerWatcherFilters(entity)) + if err := c.updateAndBroadcastCredentials(c.Entity); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to update credentials") + } + } + c.Entity = entity + default: + slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) + return + } +} + +func (c *Controller) updateAndBroadcastCredentials(entity params.GithubEntity) error { + ghCli, err := github.Client(c.ctx, entity) + if err != nil { + return fmt.Errorf("creating github client: %w", err) + } + + setCli, err := scaleSetCli.NewClient(ghCli) + if err != nil { + return fmt.Errorf("creating scaleset client: %w", err) + } + c.ghCli = ghCli + c.scaleSetCli = setCli + + for _, scaleSet := range c.ScaleSets { + if err := scaleSet.worker.SetGithubClient(ghCli, setCli); err != nil { + slog.ErrorContext(c.ctx, "setting github client on worker", "error", err) + continue + } + } + return nil +} diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go new file mode 100644 index 00000000..0592a8cf --- /dev/null +++ b/workers/scaleset/scaleset.go @@ -0,0 +1,76 @@ +package scaleset + +import ( + "context" + "sync" + + commonParams "github.com/cloudbase/garm-provider-common/params" + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/runner/common" + "github.com/cloudbase/garm/util/github/scalesets" +) + +func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleSet, provider common.Provider) (*Worker, error) { + return &Worker{ + ctx: ctx, + store: store, + provider: provider, + Entity: scaleSet, + }, nil +} + +type Worker struct { + ctx context.Context + + provider common.Provider + store dbCommon.Store + Entity params.ScaleSet + tools []commonParams.RunnerApplicationDownload + + ghCli common.GithubClient + scaleSetCli *scalesets.ScaleSetClient + + mux sync.Mutex + running bool + quit chan struct{} +} + +func (w *Worker) Stop() error { + return nil +} + +func (w *Worker) Start() error { + w.mux.Lock() + defer w.mux.Unlock() + + go w.loop() + return nil +} + +func (w *Worker) SetTools(tools []commonParams.RunnerApplicationDownload) { + w.mux.Lock() + defer w.mux.Unlock() + + w.tools = tools +} + +func (w *Worker) SetGithubClient(client common.GithubClient, scaleSetCli *scalesets.ScaleSetClient) error { + w.mux.Lock() + defer w.mux.Unlock() + + // TODO: + // * stop current listener if any + + w.ghCli = client + w.scaleSetCli = scaleSetCli + + // TODO: + // * start new listener + + return nil +} + +func (w *Worker) loop() { + +} diff --git a/workers/scaleset/status.go b/workers/scaleset/status.go new file mode 100644 index 00000000..29d9ae4f --- /dev/null +++ b/workers/scaleset/status.go @@ -0,0 +1,13 @@ +package scaleset + +import ( + "time" + + "github.com/cloudbase/garm/params" +) + +type scaleSetStatus struct { + err error + heartbeat time.Time + scaleSet params.ScaleSet +} diff --git a/workers/scaleset/util.go b/workers/scaleset/util.go new file mode 100644 index 00000000..a594f88c --- /dev/null +++ b/workers/scaleset/util.go @@ -0,0 +1,28 @@ +package scaleset + +import ( + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" +) + +func composeControllerWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFunc { + return watcher.WithAny( + watcher.WithAll( + watcher.WithEntityScaleSetFilter(entity), + watcher.WithAny( + watcher.WithOperationTypeFilter(dbCommon.CreateOperation), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + watcher.WithOperationTypeFilter(dbCommon.DeleteOperation), + ), + ), + watcher.WithAll( + watcher.WithEntityFilter(entity), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + ), + watcher.WithAll( + watcher.WithGithubCredentialsFilter(entity.Credentials), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + ), + ) +} From a2aeac731cda50750e4a7d7d8f5fff0a6ce01a9d Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 16 Apr 2025 16:39:16 +0000 Subject: [PATCH 011/179] Scale set workers properly come online This adds the workers needed to start listening for scale set messages. There is no handling of messages yet. Signed-off-by: Gabriel Adrian Samfira --- cache/cache.go | 65 ++++++++ database/common/store.go | 1 + database/sql/models.go | 38 +++++ database/sql/sql.go | 3 + database/sql/util.go | 133 ++++++++++++++++ params/github.go | 32 ++++ runner/scalesets.go | 1 + workers/entity/controller.go | 4 +- workers/entity/worker.go | 16 +- workers/scaleset/controller.go | 41 +++-- workers/scaleset/controller_watcher.go | 20 +-- workers/scaleset/interfaces.go | 12 ++ workers/scaleset/scaleset.go | 205 +++++++++++++++++++++---- workers/scaleset/scaleset_helper.go | 20 +++ workers/scaleset/scaleset_listener.go | 134 ++++++++++++++++ 15 files changed, 668 insertions(+), 57 deletions(-) create mode 100644 cache/cache.go create mode 100644 workers/scaleset/interfaces.go create mode 100644 workers/scaleset/scaleset_helper.go create mode 100644 workers/scaleset/scaleset_listener.go diff --git a/cache/cache.go b/cache/cache.go new file mode 100644 index 00000000..2fa52456 --- /dev/null +++ b/cache/cache.go @@ -0,0 +1,65 @@ +package cache + +import ( + "sync" + "time" + + commonParams "github.com/cloudbase/garm-provider-common/params" + + "github.com/cloudbase/garm/params" +) + +var githubToolsCache *GithubToolsCache + +func init() { + ghToolsCache := &GithubToolsCache{ + entities: make(map[string]GithubEntityTools), + } + githubToolsCache = ghToolsCache +} + +type GithubEntityTools struct { + updatedAt time.Time + entity params.GithubEntity + tools []commonParams.RunnerApplicationDownload +} + +type GithubToolsCache struct { + mux sync.Mutex + // entity IDs are UUID4s. It is highly unlikely they will collide (🤞). + entities map[string]GithubEntityTools +} + +func (g *GithubToolsCache) Get(entity params.GithubEntity) ([]commonParams.RunnerApplicationDownload, bool) { + g.mux.Lock() + defer g.mux.Unlock() + + if cache, ok := g.entities[entity.String()]; ok { + if time.Since(cache.updatedAt) > 1*time.Hour { + // Stale cache, remove it. + delete(g.entities, entity.String()) + return nil, false + } + return cache.tools, true + } + return nil, false +} + +func (g *GithubToolsCache) Set(entity params.GithubEntity, tools []commonParams.RunnerApplicationDownload) { + g.mux.Lock() + defer g.mux.Unlock() + + g.entities[entity.String()] = GithubEntityTools{ + updatedAt: time.Now(), + entity: entity, + tools: tools, + } +} + +func SetGithubToolsCache(entity params.GithubEntity, tools []commonParams.RunnerApplicationDownload) { + githubToolsCache.Set(entity, tools) +} + +func GetGithubToolsCache(entity params.GithubEntity) ([]commonParams.RunnerApplicationDownload, bool) { + return githubToolsCache.Get(entity) +} diff --git a/database/common/store.go b/database/common/store.go index 860ed8ac..c732400a 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -163,4 +163,5 @@ type Store interface { ControllerInfo() (params.ControllerInfo, error) InitController() (params.ControllerInfo, error) GetGithubEntity(_ context.Context, entityType params.GithubEntityType, entityID string) (params.GithubEntity, error) + AddEntityEvent(ctx context.Context, entity params.GithubEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error } diff --git a/database/sql/models.go b/database/sql/models.go index c44baceb..e443e75a 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -136,6 +136,17 @@ type ScaleSet struct { Instances []Instance `gorm:"foreignKey:ScaleSetFkID"` } +type RepositoryEvent struct { + gorm.Model + + EventType params.EventType + EventLevel params.EventLevel + Message string `gorm:"type:text"` + + RepoID uuid.UUID `gorm:"index:idx_repo_event"` + Repo Repository `gorm:"foreignKey:RepoID"` +} + type Repository struct { Base @@ -154,8 +165,20 @@ type Repository struct { EndpointName *string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` + + Events []RepositoryEvent `gorm:"foreignKey:RepoID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } +type OrganizationEvent struct { + gorm.Model + + EventType params.EventType + EventLevel params.EventLevel + Message string `gorm:"type:text"` + + OrgID uuid.UUID `gorm:"index:idx_org_event"` + Org Organization `gorm:"foreignKey:OrgID"` +} type Organization struct { Base @@ -173,6 +196,19 @@ type Organization struct { EndpointName *string `gorm:"index:idx_org_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` + + Events []OrganizationEvent `gorm:"foreignKey:OrgID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` +} + +type EnterpriseEvent struct { + gorm.Model + + EventType params.EventType + EventLevel params.EventLevel + Message string `gorm:"type:text"` + + EnterpriseID uuid.UUID `gorm:"index:idx_enterprise_event"` + Enterprise Enterprise `gorm:"foreignKey:EnterpriseID"` } type Enterprise struct { @@ -192,6 +228,8 @@ type Enterprise struct { EndpointName *string `gorm:"index:idx_ent_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` + + Events []EnterpriseEvent `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type Address struct { diff --git a/database/sql/sql.go b/database/sql/sql.go index 4d23d253..a704d9c3 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -423,6 +423,9 @@ func (s *sqlDatabase) migrateDB() error { &Repository{}, &Organization{}, &Enterprise{}, + &EnterpriseEvent{}, + &OrganizationEvent{}, + &RepositoryEvent{}, &Address{}, &InstanceStatusUpdate{}, &Instance{}, diff --git a/database/sql/util.go b/database/sql/util.go index fb627814..c1a44cb8 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -631,3 +631,136 @@ func (s *sqlDatabase) GetGithubEntity(_ context.Context, entityType params.Githu } return entity, nil } + +func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { + repo, err := s.GetRepositoryByID(ctx, repoID) + if err != nil { + return errors.Wrap(err, "updating instance") + } + + msg := InstanceStatusUpdate{ + Message: statusMessage, + EventType: event, + EventLevel: eventLevel, + } + + if err := s.conn.Model(&repo).Association("Events").Append(&msg); err != nil { + return errors.Wrap(err, "adding status message") + } + + if maxEvents > 0 { + repoID, err := uuid.Parse(repo.ID) + if err != nil { + return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + } + var latestEvents []OrganizationEvent + q := s.conn.Model(&OrganizationEvent{}). + Limit(maxEvents).Order("id desc"). + Where("repo_id = ?", repoID).Find(&latestEvents) + if q.Error != nil { + return errors.Wrap(q.Error, "fetching latest events") + } + if len(latestEvents) == maxEvents { + lastInList := latestEvents[len(latestEvents)-1] + if err := s.conn.Where("repo_id = ? and id < ?", repoID, lastInList.ID).Unscoped().Delete(&OrganizationEvent{}).Error; err != nil { + return errors.Wrap(err, "deleting old events") + } + } + } + return nil +} + +func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { + org, err := s.GetOrganizationByID(ctx, orgID) + if err != nil { + return errors.Wrap(err, "updating instance") + } + + msg := InstanceStatusUpdate{ + Message: statusMessage, + EventType: event, + EventLevel: eventLevel, + } + + if err := s.conn.Model(&org).Association("Events").Append(&msg); err != nil { + return errors.Wrap(err, "adding status message") + } + + if maxEvents > 0 { + orgID, err := uuid.Parse(org.ID) + if err != nil { + return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + } + var latestEvents []OrganizationEvent + q := s.conn.Model(&OrganizationEvent{}). + Limit(maxEvents).Order("id desc"). + Where("org_id = ?", orgID).Find(&latestEvents) + if q.Error != nil { + return errors.Wrap(q.Error, "fetching latest events") + } + if len(latestEvents) == maxEvents { + lastInList := latestEvents[len(latestEvents)-1] + if err := s.conn.Where("org_id = ? and id < ?", orgID, lastInList.ID).Unscoped().Delete(&OrganizationEvent{}).Error; err != nil { + return errors.Wrap(err, "deleting old events") + } + } + } + return nil +} + +func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { + ent, err := s.GetEnterpriseByID(ctx, entID) + if err != nil { + return errors.Wrap(err, "updating instance") + } + + msg := InstanceStatusUpdate{ + Message: statusMessage, + EventType: event, + EventLevel: eventLevel, + } + + if err := s.conn.Model(&ent).Association("Events").Append(&msg); err != nil { + return errors.Wrap(err, "adding status message") + } + + if maxEvents > 0 { + entID, err := uuid.Parse(ent.ID) + if err != nil { + return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + } + var latestEvents []EnterpriseEvent + q := s.conn.Model(&EnterpriseEvent{}). + Limit(maxEvents).Order("id desc"). + Where("enterprise_id = ?", entID).Find(&latestEvents) + if q.Error != nil { + return errors.Wrap(q.Error, "fetching latest events") + } + if len(latestEvents) == maxEvents { + lastInList := latestEvents[len(latestEvents)-1] + if err := s.conn.Where("enterprise_id = ? and id < ?", entID, lastInList.ID).Unscoped().Delete(&EnterpriseEvent{}).Error; err != nil { + return errors.Wrap(err, "deleting old events") + } + } + } + + return nil +} + +func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.GithubEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { + if maxEvents == 0 { + return errors.Wrap(runnerErrors.ErrBadRequest, "max events cannot be 0") + } + // TODO(gabriel-samfira): Should we send watcher notifications for events? + // Not sure it's of any value. + switch entity.EntityType { + case params.GithubEntityTypeRepository: + return s.addRepositoryEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) + case params.GithubEntityTypeOrganization: + return s.addOrgEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) + case params.GithubEntityTypeEnterprise: + return s.addEnterpriseEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) + default: + return errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") + } +} diff --git a/params/github.go b/params/github.go index 2d132d50..9eec6e8c 100644 --- a/params/github.go +++ b/params/github.go @@ -402,6 +402,18 @@ type RunnerScaleSetMessage struct { Statistics *RunnerScaleSetStatistic `json:"statistics"` } +func (r RunnerScaleSetMessage) GetJobsFromBody() ([]ScaleSetJobMessage, error) { + var body []ScaleSetJobMessage + if r.Body == "" { + return nil, fmt.Errorf("no body specified") + } + + if err := json.Unmarshal([]byte(r.Body), &body); err != nil { + return nil, fmt.Errorf("failed to unmarshal body: %w", err) + } + return body, nil +} + type RunnerReference struct { ID int `json:"id"` Name string `json:"name"` @@ -469,3 +481,23 @@ type RunnerGroupList struct { Count int `json:"count"` RunnerGroups []RunnerGroup `json:"value"` } + +type ScaleSetJobMessage struct { + MessageType string `json:"messageType,omitempty"` + RunnerRequestId int64 `json:"runnerRequestId,omitempty"` + RepositoryName string `json:"repositoryName,omitempty"` + OwnerName string `json:"ownerName,omitempty"` + JobWorkflowRef string `json:"jobWorkflowRef,omitempty"` + JobDisplayName string `json:"jobDisplayName,omitempty"` + WorkflowRunId int64 `json:"workflowRunId,omitempty"` + EventName string `json:"eventName,omitempty"` + RequestLabels []string `json:"requestLabels,omitempty"` + QueueTime time.Time `json:"queueTime,omitempty"` + ScaleSetAssignTime time.Time `json:"scaleSetAssignTime,omitempty"` + RunnerAssignTime time.Time `json:"runnerAssignTime,omitempty"` + FinishTime time.Time `json:"finishTime,omitempty"` + Result string `json:"result,omitempty"` + RunnerId int `json:"runnerId,omitempty"` + RunnerName string `json:"runnerName,omitempty"` + AcquireJobUrl string `json:"acquireJobUrl,omitempty"` +} diff --git a/runner/scalesets.go b/runner/scalesets.go index 34ba699a..ef45a783 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -94,6 +94,7 @@ func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error return errors.Wrap(err, "getting scaleset client") } + slog.DebugContext(ctx, "deleting scale set", "scale_set_id", scaleSet.ScaleSetID) if err := scalesetCli.DeleteRunnerScaleSet(ctx, scaleSet.ScaleSetID); err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { slog.InfoContext(ctx, "scale set not found", "scale_set_id", scaleSet.ScaleSetID) diff --git a/workers/entity/controller.go b/workers/entity/controller.go index c1547302..1e0035c0 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -74,7 +74,6 @@ func (c *Controller) loadAllRepositories() error { if err != nil { return fmt.Errorf("creating worker: %w", err) } - slog.DebugContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) if err := worker.Start(); err != nil { return fmt.Errorf("starting worker: %w", err) } @@ -99,7 +98,6 @@ func (c *Controller) loadAllOrganizations() error { if err != nil { return fmt.Errorf("creating worker: %w", err) } - slog.DebugContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) if err := worker.Start(); err != nil { return fmt.Errorf("starting worker: %w", err) } @@ -124,7 +122,6 @@ func (c *Controller) loadAllEnterprises() error { if err != nil { return fmt.Errorf("creating worker: %w", err) } - slog.DebugContext(c.ctx, "starting entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) if err := worker.Start(); err != nil { return fmt.Errorf("starting worker: %w", err) } @@ -172,6 +169,7 @@ func (c *Controller) Start() error { } func (c *Controller) Stop() error { + slog.DebugContext(c.ctx, "stopping entity controller", "entity", c.consumerID) c.mux.Lock() defer c.mux.Unlock() if !c.running { diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 2a8591cb..49fb75cb 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -50,6 +50,7 @@ type Worker struct { } func (w *Worker) Stop() error { + slog.DebugContext(w.ctx, "stopping entity worker", "entity", w.consumerID) w.mux.Lock() defer w.mux.Unlock() @@ -69,7 +70,8 @@ func (w *Worker) Stop() error { return nil } -func (w *Worker) Start() error { +func (w *Worker) Start() (err error) { + slog.DebugContext(w.ctx, "starting entity worker", "entity", w.consumerID) w.mux.Lock() defer w.mux.Unlock() @@ -77,8 +79,19 @@ func (w *Worker) Start() error { if err != nil { return fmt.Errorf("creating scale set controller: %w", err) } + + if err := scaleSetController.Start(); err != nil { + return fmt.Errorf("starting scale set controller: %w", err) + } w.scaleSetController = scaleSetController + defer func() { + if err != nil { + w.scaleSetController.Stop() + w.scaleSetController = nil + } + }() + consumer, err := watcher.RegisterConsumer( w.ctx, w.consumerID, composeWorkerWatcherFilters(w.Entity), @@ -90,6 +103,7 @@ func (w *Worker) Start() error { w.running = true w.quit = make(chan struct{}) + go w.loop() return nil } diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index de426621..809a2cba 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -2,20 +2,21 @@ package scaleset import ( "context" + "errors" "fmt" "log/slog" "sync" "time" - commonParams "github.com/cloudbase/garm-provider-common/params" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/github" - "github.com/cloudbase/garm/util/github/scalesets" ) func NewController(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Controller, error) { @@ -76,9 +77,7 @@ type Controller struct { store dbCommon.Store providers map[string]common.Provider - tools []commonParams.RunnerApplicationDownload ghCli common.GithubClient - scaleSetCli *scalesets.ScaleSetClient forgeCredsAreValid bool statusUpdates chan scaleSetStatus @@ -88,14 +87,15 @@ type Controller struct { quit chan struct{} } -func (c *Controller) loadAllScaleSets() error { +func (c *Controller) loadAllScaleSets(cli common.GithubClient) error { scaleSets, err := c.store.ListEntityScaleSets(c.ctx, c.Entity) if err != nil { return fmt.Errorf("listing scale sets: %w", err) } for _, sSet := range scaleSets { - if err := c.handleScaleSetCreateOperation(sSet); err != nil { + slog.DebugContext(c.ctx, "loading scale set", "scale_set", sSet.ID) + if err := c.handleScaleSetCreateOperation(sSet, cli); err != nil { slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") continue } @@ -104,6 +104,7 @@ func (c *Controller) loadAllScaleSets() error { } func (c *Controller) Start() (err error) { + slog.DebugContext(c.ctx, "starting scale set controller", "scale_set", c.consumerID) c.mux.Lock() if c.running { c.mux.Unlock() @@ -111,15 +112,16 @@ func (c *Controller) Start() (err error) { } c.mux.Unlock() - if err := c.loadAllScaleSets(); err != nil { - return fmt.Errorf("loading all scale sets: %w", err) - } - ghCli, err := github.Client(c.ctx, c.Entity) if err != nil { return fmt.Errorf("creating github client: %w", err) } + slog.DebugContext(c.ctx, "loaging scale sets", "entity", c.Entity.String()) + if err := c.loadAllScaleSets(ghCli); err != nil { + return fmt.Errorf("loading all scale sets: %w", err) + } + consumer, err := watcher.RegisterConsumer( c.ctx, c.consumerID, composeControllerWatcherFilters(c.Entity), @@ -140,6 +142,7 @@ func (c *Controller) Start() (err error) { } func (c *Controller) Stop() error { + slog.DebugContext(c.ctx, "stopping scale set controller", "scale_set", c.consumerID) c.mux.Lock() defer c.mux.Unlock() @@ -170,15 +173,21 @@ func (c *Controller) updateTools() error { c.mux.Lock() defer c.mux.Unlock() + slog.DebugContext(c.ctx, "updating tools for entity", "entity", c.Entity.String()) + tools, err := garmUtil.FetchTools(c.ctx, c.ghCli) if err != nil { slog.With(slog.Any("error", err)).ErrorContext( c.ctx, "failed to update tools for entity", "entity", c.Entity.String()) - c.forgeCredsAreValid = false + if errors.Is(err, runnerErrors.ErrUnauthorized) { + // TODO: block all scale sets + c.forgeCredsAreValid = false + } return fmt.Errorf("failed to update tools for entity %s: %w", c.Entity.String(), err) } + slog.DebugContext(c.ctx, "tools successfully updated for entity", "entity", c.Entity.String()) c.forgeCredsAreValid = true - c.tools = tools + cache.SetGithubToolsCache(c.Entity, tools) return nil } @@ -202,7 +211,7 @@ func (c *Controller) loop() { updateToolsTicker := time.NewTicker(common.PoolToolUpdateInterval) initialToolUpdate := make(chan struct{}, 1) go func() { - slog.Info("running initial tool update") + slog.InfoContext(c.ctx, "running initial tool update") if err := c.updateTools(); err != nil { slog.With(slog.Any("error", err)).Error("failed to update tools") } @@ -211,7 +220,11 @@ func (c *Controller) loop() { for { select { - case payload := <-c.consumer.Watch(): + case payload, ok := <-c.consumer.Watch(): + if !ok { + slog.InfoContext(c.ctx, "consumer channel closed") + return + } slog.InfoContext(c.ctx, "received payload", slog.Any("payload", payload)) go c.handleWatcherEvent(payload) case <-c.ctx.Done(): diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 4b347f59..591e768e 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -6,8 +6,8 @@ import ( dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/runner/common" "github.com/cloudbase/garm/util/github" - scaleSetCli "github.com/cloudbase/garm/util/github/scalesets" ) func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { @@ -38,7 +38,7 @@ func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { switch event.Operation { case dbCommon.CreateOperation: slog.DebugContext(c.ctx, "got create operation for scale set", "scale_set_id", scaleSet.ID, "scale_set_name", scaleSet.Name) - if err := c.handleScaleSetCreateOperation(scaleSet); err != nil { + if err := c.handleScaleSetCreateOperation(scaleSet, c.ghCli); err != nil { slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") } case dbCommon.UpdateOperation: @@ -57,7 +57,7 @@ func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { } } -func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet) error { +func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli common.GithubClient) error { c.mux.Lock() defer c.mux.Unlock() @@ -74,7 +74,7 @@ func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet) error { return fmt.Errorf("provider %s not found for scale set %s", sSet.ProviderName, sSet.Name) } - worker, err := NewWorker(c.ctx, c.store, sSet, provider) + worker, err := NewWorker(c.ctx, c.store, sSet, provider, ghCli) if err != nil { return fmt.Errorf("creating scale set worker: %w", err) } @@ -120,7 +120,7 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { // Some error may have occured when the scale set was first created, so we // attempt to create it after the user updated the scale set, hopefully // fixing the reason for the failure. - return c.handleScaleSetCreateOperation(sSet) + return c.handleScaleSetCreateOperation(sSet, c.ghCli) } // We let the watcher in the scale set worker handle the update operation. return nil @@ -140,8 +140,7 @@ func (c *Controller) handleCredentialsEvent(event dbCommon.ChangePayload) { defer c.mux.Unlock() if c.Entity.Credentials.ID != credentials.ID { - // credentials were swapped on the entity. We need to recompose the watcher - // filters. + // stale update event. return } c.Entity.Credentials = credentials @@ -190,15 +189,10 @@ func (c *Controller) updateAndBroadcastCredentials(entity params.GithubEntity) e return fmt.Errorf("creating github client: %w", err) } - setCli, err := scaleSetCli.NewClient(ghCli) - if err != nil { - return fmt.Errorf("creating scaleset client: %w", err) - } c.ghCli = ghCli - c.scaleSetCli = setCli for _, scaleSet := range c.ScaleSets { - if err := scaleSet.worker.SetGithubClient(ghCli, setCli); err != nil { + if err := scaleSet.worker.SetGithubClient(ghCli); err != nil { slog.ErrorContext(c.ctx, "setting github client on worker", "error", err) continue } diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go new file mode 100644 index 00000000..365ac0be --- /dev/null +++ b/workers/scaleset/interfaces.go @@ -0,0 +1,12 @@ +package scaleset + +import ( + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/util/github/scalesets" +) + +type scaleSetHelper interface { + ScaleSetCLI() *scalesets.ScaleSetClient + GetScaleSet() params.ScaleSet + Owner() string +} diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 0592a8cf..c5e31b5d 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -2,34 +2,54 @@ package scaleset import ( "context" + "fmt" + "log/slog" "sync" + "time" - commonParams "github.com/cloudbase/garm-provider-common/params" dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" "github.com/cloudbase/garm/util/github/scalesets" ) -func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleSet, provider common.Provider) (*Worker, error) { +func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleSet, provider common.Provider, ghCli common.GithubClient) (*Worker, error) { + consumerID := fmt.Sprintf("scaleset-worker-%s-%d", scaleSet.Name, scaleSet.ID) + controllerInfo, err := store.ControllerInfo() + if err != nil { + return nil, fmt.Errorf("getting controller info: %w", err) + } + scaleSetCli, err := scalesets.NewClient(ghCli) + if err != nil { + return nil, fmt.Errorf("creating scale set client: %w", err) + } return &Worker{ - ctx: ctx, - store: store, - provider: provider, - Entity: scaleSet, + ctx: ctx, + controllerInfo: controllerInfo, + consumerID: consumerID, + store: store, + provider: provider, + Entity: scaleSet, + ghCli: ghCli, + scaleSetCli: scaleSetCli, }, nil } type Worker struct { - ctx context.Context + ctx context.Context + consumerID string + controllerInfo params.ControllerInfo provider common.Provider store dbCommon.Store Entity params.ScaleSet - tools []commonParams.RunnerApplicationDownload ghCli common.GithubClient scaleSetCli *scalesets.ScaleSetClient + consumer dbCommon.Consumer + + listener *scaleSetListener mux sync.Mutex running bool @@ -37,40 +57,173 @@ type Worker struct { } func (w *Worker) Stop() error { - return nil -} - -func (w *Worker) Start() error { + slog.DebugContext(w.ctx, "stopping scale set worker", "scale_set", w.consumerID) w.mux.Lock() defer w.mux.Unlock() + if !w.running { + return nil + } + + w.consumer.Close() + w.running = false + if w.quit != nil { + close(w.quit) + w.quit = nil + } + w.listener.Stop() + w.listener = nil + return nil +} + +func (w *Worker) Start() (err error) { + slog.DebugContext(w.ctx, "starting scale set worker", "scale_set", w.consumerID) + w.mux.Lock() + defer w.mux.Unlock() + + if w.running { + return nil + } + + consumer, err := watcher.RegisterConsumer( + w.ctx, w.consumerID, + watcher.WithAll( + watcher.WithScaleSetFilter(w.Entity), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + ), + ) + if err != nil { + return fmt.Errorf("error registering consumer: %w", err) + } + defer func() { + if err != nil { + consumer.Close() + w.consumer = nil + } + }() + + slog.DebugContext(w.ctx, "creating scale set listener") + listener := newListener(w.ctx, w) + + slog.DebugContext(w.ctx, "starting scale set listener") + if err := listener.Start(); err != nil { + return fmt.Errorf("error starting listener: %w", err) + } + + w.listener = listener + w.consumer = consumer + w.running = true + w.quit = make(chan struct{}) + + slog.DebugContext(w.ctx, "starting scale set worker loops", "scale_set", w.consumerID) go w.loop() + go w.keepListenerAlive() return nil } -func (w *Worker) SetTools(tools []commonParams.RunnerApplicationDownload) { +func (w *Worker) SetGithubClient(client common.GithubClient) error { w.mux.Lock() defer w.mux.Unlock() - w.tools = tools -} - -func (w *Worker) SetGithubClient(client common.GithubClient, scaleSetCli *scalesets.ScaleSetClient) error { - w.mux.Lock() - defer w.mux.Unlock() - - // TODO: - // * stop current listener if any + if err := w.listener.Stop(); err != nil { + slog.ErrorContext(w.ctx, "error stopping listener", "error", err) + } w.ghCli = client + scaleSetCli, err := scalesets.NewClient(client) + if err != nil { + return fmt.Errorf("error creating scale set client: %w", err) + } w.scaleSetCli = scaleSetCli - - // TODO: - // * start new listener - return nil } +func (w *Worker) handleEvent(event dbCommon.ChangePayload) { + scaleSet, ok := event.Payload.(params.ScaleSet) + if !ok { + slog.ErrorContext(w.ctx, "invalid payload for scale set type", "scale_set_type", event.EntityType, "payload", event.Payload) + return + } + switch event.Operation { + case dbCommon.UpdateOperation: + slog.DebugContext(w.ctx, "got update operation") + w.mux.Lock() + w.Entity = scaleSet + w.mux.Unlock() + default: + slog.DebugContext(w.ctx, "invalid operation type; ignoring", "operation_type", event.Operation) + } +} + func (w *Worker) loop() { + defer w.Stop() + for { + select { + case <-w.quit: + return + case event, ok := <-w.consumer.Watch(): + if !ok { + slog.InfoContext(w.ctx, "consumer channel closed") + return + } + go w.handleEvent(event) + case <-w.ctx.Done(): + slog.DebugContext(w.ctx, "context done") + return + } + } +} + +func (w *Worker) sleepWithCancel(sleepTime time.Duration) (canceled bool) { + ticker := time.NewTicker(sleepTime) + defer ticker.Stop() + + select { + case <-ticker.C: + return false + case <-w.quit: + return true + case <-w.ctx.Done(): + return true + } +} + +func (w *Worker) keepListenerAlive() { + var backoff time.Duration + for { + select { + case <-w.quit: + return + case <-w.ctx.Done(): + return + case <-w.listener.Wait(): + slog.DebugContext(w.ctx, "listener is stopped; attempting to restart") + for { + w.mux.Lock() + w.listener.Stop() //cleanup + slog.DebugContext(w.ctx, "attempting to restart") + if err := w.listener.Start(); err != nil { + w.mux.Unlock() + slog.ErrorContext(w.ctx, "error restarting listener", "error", err) + if backoff > 60*time.Second { + backoff = 60 * time.Second + } else if backoff == 0 { + backoff = 5 * time.Second + slog.InfoContext(w.ctx, "backing off restart attempt", "backoff", backoff) + } else { + backoff *= 2 + } + slog.ErrorContext(w.ctx, "error restarting listener", "error", err, "backoff", backoff) + if canceled := w.sleepWithCancel(backoff); canceled { + slog.DebugContext(w.ctx, "listener restart canceled") + return + } + continue + } + w.mux.Unlock() + break + } + } + } } diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go new file mode 100644 index 00000000..8cfa9264 --- /dev/null +++ b/workers/scaleset/scaleset_helper.go @@ -0,0 +1,20 @@ +package scaleset + +import ( + "fmt" + + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/util/github/scalesets" +) + +func (w *Worker) ScaleSetCLI() *scalesets.ScaleSetClient { + return w.scaleSetCli +} + +func (w *Worker) GetScaleSet() params.ScaleSet { + return w.Entity +} + +func (w *Worker) Owner() string { + return fmt.Sprintf("garm-%s", w.controllerInfo.ControllerID) +} diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go new file mode 100644 index 00000000..f92eaff1 --- /dev/null +++ b/workers/scaleset/scaleset_listener.go @@ -0,0 +1,134 @@ +package scaleset + +import ( + "context" + "errors" + "fmt" + "log/slog" + "sync" + + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/util/github/scalesets" +) + +func newListener(ctx context.Context, scaleSetHelper scaleSetHelper) *scaleSetListener { + return &scaleSetListener{ + ctx: ctx, + scaleSetHelper: scaleSetHelper, + } +} + +type scaleSetListener struct { + // ctx is the global context for the worker + ctx context.Context + // listenerCtx is the context for the listener. We pass this + // context to GetMessages() which blocks until a message is + // available. We need to be able to cancel that longpoll request + // independent of the worker context, in case we need to restart + // the listener without restarting the worker. + listenerCtx context.Context + cancelFunc context.CancelFunc + lastMessageID int64 + + scaleSetHelper scaleSetHelper + messageSession *scalesets.MessageSession + + mux sync.Mutex + running bool + quit chan struct{} +} + +func (l *scaleSetListener) Start() error { + slog.DebugContext(l.ctx, "starting scale set listener", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) + l.mux.Lock() + defer l.mux.Unlock() + + l.listenerCtx, l.cancelFunc = context.WithCancel(context.Background()) + scaleSet := l.scaleSetHelper.GetScaleSet() + slog.DebugContext(l.ctx, "creating new message session", "scale_set", scaleSet.ScaleSetID) + session, err := l.scaleSetHelper.ScaleSetCLI().CreateMessageSession( + l.listenerCtx, scaleSet.ScaleSetID, + l.scaleSetHelper.Owner(), + ) + if err != nil { + return fmt.Errorf("creating message session: %w", err) + } + l.messageSession = session + l.quit = make(chan struct{}) + l.running = true + go l.loop() + + return nil +} + +func (l *scaleSetListener) Stop() error { + l.mux.Lock() + defer l.mux.Unlock() + + if !l.running { + return nil + } + + if l.messageSession != nil { + slog.DebugContext(l.ctx, "closing message session", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) + if err := l.messageSession.Close(); err != nil { + slog.ErrorContext(l.ctx, "closing message session", "error", err) + } + if err := l.scaleSetHelper.ScaleSetCLI().DeleteMessageSession(context.Background(), l.messageSession); err != nil { + slog.ErrorContext(l.ctx, "error deleting message session", "error", err) + } + } + l.cancelFunc() + l.messageSession.Close() + l.running = false + close(l.quit) + return nil +} + +func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage) { + l.mux.Lock() + defer l.mux.Unlock() + body, err := msg.GetJobsFromBody() + if err != nil { + slog.ErrorContext(l.ctx, "getting jobs from body", "error", err) + return + } + slog.InfoContext(l.ctx, "handling message", "message", msg, "body", body) + l.lastMessageID = msg.MessageID +} + +func (l *scaleSetListener) loop() { + defer l.Stop() + + slog.DebugContext(l.ctx, "starting scale set listener loop", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) + for { + select { + case <-l.quit: + return + case <-l.listenerCtx.Done(): + slog.DebugContext(l.ctx, "stopping scale set listener") + return + case <-l.ctx.Done(): + slog.DebugContext(l.ctx, "scaleset worker has stopped") + return + default: + slog.DebugContext(l.ctx, "getting message") + msg, err := l.messageSession.GetMessage( + l.listenerCtx, l.lastMessageID, l.scaleSetHelper.GetScaleSet().MaxRunners) + if err != nil { + if !errors.Is(err, context.Canceled) { + slog.ErrorContext(l.ctx, "getting message", "error", err) + } + return + } + l.handleSessionMessage(msg) + } + } +} + +func (l *scaleSetListener) Wait() <-chan struct{} { + if !l.running { + return nil + } + return l.listenerCtx.Done() +} From 19ba210804069d08cbfb22793fb6bf5d3f9704d7 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 16 Apr 2025 23:07:29 +0000 Subject: [PATCH 012/179] Several fixes * Close response body in scaleset client * Wait for message listener loop to exit before attempting restart * Add LastMessageID field to scaleset model and function to update it Signed-off-by: Gabriel Adrian Samfira --- database/common/store.go | 1 + database/sql/models.go | 1 + database/sql/scalesets.go | 12 +++++ database/sql/util.go | 1 + params/github.go | 4 ++ params/params.go | 2 + util/github/scalesets/message_sessions.go | 40 +++++++++++++---- util/github/scalesets/scalesets.go | 7 ++- workers/scaleset/interfaces.go | 1 + workers/scaleset/scaleset.go | 6 +++ workers/scaleset/scaleset_helper.go | 7 +++ workers/scaleset/scaleset_listener.go | 55 +++++++++++++++++++---- 12 files changed, 118 insertions(+), 19 deletions(-) diff --git a/database/common/store.go b/database/common/store.go index c732400a..a2b2cf77 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -143,6 +143,7 @@ type ScaleSetsStore interface { GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) + SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error } //go:generate mockery --name=Store diff --git a/database/sql/models.go b/database/sql/models.go index e443e75a..5b4d86f9 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -118,6 +118,7 @@ type ScaleSet struct { OSType commonParams.OSType OSArch commonParams.OSArch Enabled bool + LastMessageID int64 // ExtraSpecs is an opaque json that gets sent to the provider // as part of the bootstrap params for instances. It can contain // any kind of data needed by providers. diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 3a5d8431..7a67f2d6 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -379,3 +379,15 @@ func (s *sqlDatabase) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) ( } return nil } + +func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error { + if err := s.conn.Transaction(func(tx *gorm.DB) error { + if q := tx.Model(&ScaleSet{}).Where("id = ?", scaleSetID).Update("last_message_id", lastMessageID); q.Error != nil { + return errors.Wrap(q.Error, "saving database entry") + } + return nil + }); err != nil { + return errors.Wrap(err, "setting last message ID") + } + return nil +} diff --git a/database/sql/util.go b/database/sql/util.go index c1a44cb8..c7b64961 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -309,6 +309,7 @@ func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, e GitHubRunnerGroup: scaleSet.GitHubRunnerGroup, State: scaleSet.State, ExtendedState: scaleSet.ExtendedState, + LastMessageID: scaleSet.LastMessageID, } if scaleSet.RepoID != nil { diff --git a/params/github.go b/params/github.go index 9eec6e8c..b609e682 100644 --- a/params/github.go +++ b/params/github.go @@ -402,6 +402,10 @@ type RunnerScaleSetMessage struct { Statistics *RunnerScaleSetStatistic `json:"statistics"` } +func (r RunnerScaleSetMessage) IsNil() bool { + return r.MessageID == 0 && r.MessageType == "" && r.Body == "" && r.Statistics == nil +} + func (r RunnerScaleSetMessage) GetJobsFromBody() ([]ScaleSetJobMessage, error) { var body []ScaleSetJobMessage if r.Body == "" { diff --git a/params/params.go b/params/params.go index b0a6492e..3ac0c0c5 100644 --- a/params/params.go +++ b/params/params.go @@ -472,6 +472,8 @@ type ScaleSet struct { EnterpriseID string `json:"enterprise_id,omitempty"` EnterpriseName string `json:"enterprise_name,omitempty"` + + LastMessageID int64 `json:"-"` } func (p ScaleSet) GithubEntity() (GithubEntity, error) { diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go index e4152e08..5e260b96 100644 --- a/util/github/scalesets/message_sessions.go +++ b/util/github/scalesets/message_sessions.go @@ -31,6 +31,7 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/params" + garmUtil "github.com/cloudbase/garm/util" ) const maxCapacityHeader = "X-ScaleSetMaxCapacity" @@ -63,16 +64,22 @@ func (m *MessageSession) LastError() error { } func (m *MessageSession) loop() { - timer := time.NewTimer(1 * time.Minute) + slog.DebugContext(m.ctx, "starting message session refresh loop", "session_id", m.session.SessionID.String()) + timer := time.NewTicker(1 * time.Minute) defer timer.Stop() + defer m.Close() + if m.closed { + slog.DebugContext(m.ctx, "message session refresh loop closed") return } for { select { case <-m.ctx.Done(): + slog.DebugContext(m.ctx, "message session refresh loop context done") return case <-m.done: + slog.DebugContext(m.ctx, "message session refresh loop done") return case <-timer.C: if err := m.maybeRefreshToken(m.ctx); err != nil { @@ -99,6 +106,7 @@ func (m *MessageSession) SessionsRelativeURL() (string, error) { } func (m *MessageSession) Refresh(ctx context.Context) error { + slog.DebugContext(ctx, "refreshing message session token", "session_id", m.session.SessionID.String()) m.mux.Lock() defer m.mux.Unlock() @@ -114,13 +122,15 @@ func (m *MessageSession) Refresh(ctx context.Context) error { if err != nil { return fmt.Errorf("failed to delete message session: %w", err) } + defer resp.Body.Close() var refreshedSession params.RunnerScaleSetSession if err := json.NewDecoder(resp.Body).Decode(&refreshedSession); err != nil { return fmt.Errorf("failed to decode response: %w", err) } - - m.session = &refreshedSession + slog.DebugContext(ctx, "refreshed message session token", "session_id", refreshedSession.SessionID.String()) + m.session.MessageQueueAccessToken = refreshedSession.MessageQueueAccessToken + m.session.Statistics = refreshedSession.Statistics return nil } @@ -129,16 +139,23 @@ func (m *MessageSession) maybeRefreshToken(ctx context.Context) error { return fmt.Errorf("session is nil") } // add some jitter - randInt, err := rand.Int(rand.Reader, big.NewInt(1000)) + randInt, err := rand.Int(rand.Reader, big.NewInt(5000)) if err != nil { return fmt.Errorf("failed to get a random number") } - jitter := time.Duration(randInt.Int64()) * time.Millisecond - if m.session.ExpiresIn(2*time.Minute + jitter) { + expiresAt, err := m.session.ExiresAt() + if err != nil { + return fmt.Errorf("failed to get expires at: %w", err) + } + expiresIn := time.Duration(randInt.Int64())*time.Millisecond + 10*time.Minute + slog.DebugContext(ctx, "checking if message session token needs refresh", "expires_at", expiresAt) + if m.session.ExpiresIn(expiresIn) { + slog.DebugContext(ctx, "refreshing message session token") if err := m.Refresh(ctx); err != nil { return fmt.Errorf("failed to refresh message queue token: %w", err) } } + return nil } @@ -170,6 +187,7 @@ func (m *MessageSession) GetMessage(ctx context.Context, lastMessageID int64, ma defer resp.Body.Close() if resp.StatusCode == http.StatusAccepted { + slog.DebugContext(ctx, "no messages available in queue") return params.RunnerScaleSetMessage{}, nil } @@ -200,8 +218,8 @@ func (m *MessageSession) DeleteMessage(ctx context.Context, messageID int64) err if err != nil { return err } - resp.Body.Close() + return nil } @@ -233,10 +251,13 @@ func (s *ScaleSetClient) CreateMessageSession(ctx context.Context, runnerScaleSe return nil, fmt.Errorf("failed to decode response: %w", err) } + msgSessionCtx := garmUtil.WithSlogContext( + ctx, + slog.Any("session_id", createdSession.SessionID.String())) sess := &MessageSession{ ssCli: s, session: &createdSession, - ctx: ctx, + ctx: msgSessionCtx, done: make(chan struct{}), closed: false, } @@ -256,11 +277,12 @@ func (s *ScaleSetClient) DeleteMessageSession(ctx context.Context, session *Mess return fmt.Errorf("failed to create message delete request: %w", err) } - _, err = s.Do(req) + resp, err := s.Do(req) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { return fmt.Errorf("failed to delete message session: %w", err) } } + defer resp.Body.Close() return nil } diff --git a/util/github/scalesets/scalesets.go b/util/github/scalesets/scalesets.go index f7ef2763..2aae493a 100644 --- a/util/github/scalesets/scalesets.go +++ b/util/github/scalesets/scalesets.go @@ -47,6 +47,7 @@ func (s *ScaleSetClient) GetRunnerScaleSetByNameAndRunnerGroup(ctx context.Conte if err != nil { return params.RunnerScaleSet{}, err } + defer resp.Body.Close() var runnerScaleSetList *params.RunnerScaleSetsResponse if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSetList); err != nil { @@ -72,6 +73,7 @@ func (s *ScaleSetClient) GetRunnerScaleSetByID(ctx context.Context, runnerScaleS if err != nil { return params.RunnerScaleSet{}, fmt.Errorf("failed to get runner scaleset with ID %d: %w", runnerScaleSetID, err) } + defer resp.Body.Close() var runnerScaleSet params.RunnerScaleSet if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSet); err != nil { @@ -94,6 +96,7 @@ func (s *ScaleSetClient) ListRunnerScaleSets(ctx context.Context) (*params.Runne if err != nil { return nil, fmt.Errorf("failed to list runner scale sets: %w", err) } + defer resp.Body.Close() var runnerScaleSetList params.RunnerScaleSetsResponse if err := json.NewDecoder(resp.Body).Decode(&runnerScaleSetList); err != nil { @@ -119,6 +122,7 @@ func (s *ScaleSetClient) CreateRunnerScaleSet(ctx context.Context, runnerScaleSe if err != nil { return params.RunnerScaleSet{}, fmt.Errorf("failed to create runner scale set: %w", err) } + defer resp.Body.Close() var createdRunnerScaleSet params.RunnerScaleSet if err := json.NewDecoder(resp.Body).Decode(&createdRunnerScaleSet); err != nil { @@ -144,6 +148,7 @@ func (s *ScaleSetClient) UpdateRunnerScaleSet(ctx context.Context, runnerScaleSe if err != nil { return params.RunnerScaleSet{}, fmt.Errorf("failed to make request: %w", err) } + defer resp.Body.Close() var ret params.RunnerScaleSet if err := json.NewDecoder(resp.Body).Decode(&ret); err != nil { @@ -164,12 +169,12 @@ func (s *ScaleSetClient) DeleteRunnerScaleSet(ctx context.Context, runnerScaleSe if err != nil { return err } + defer resp.Body.Close() if resp.StatusCode != http.StatusNoContent { return fmt.Errorf("failed to delete scale set with code %d", resp.StatusCode) } - resp.Body.Close() return nil } diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go index 365ac0be..7b96168d 100644 --- a/workers/scaleset/interfaces.go +++ b/workers/scaleset/interfaces.go @@ -8,5 +8,6 @@ import ( type scaleSetHelper interface { ScaleSetCLI() *scalesets.ScaleSetClient GetScaleSet() params.ScaleSet + SetLastMessageID(id int64) error Owner() string } diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index c5e31b5d..c392d5cd 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -148,6 +148,12 @@ func (w *Worker) handleEvent(event dbCommon.ChangePayload) { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got update operation") w.mux.Lock() + if scaleSet.MaxRunners < w.Entity.MaxRunners { + slog.DebugContext(w.ctx, "max runners changed; stopping listener") + if err := w.listener.Stop(); err != nil { + slog.ErrorContext(w.ctx, "error stopping listener", "error", err) + } + } w.Entity = scaleSet w.mux.Unlock() default: diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 8cfa9264..abfd37c4 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -18,3 +18,10 @@ func (w *Worker) GetScaleSet() params.ScaleSet { func (w *Worker) Owner() string { return fmt.Sprintf("garm-%s", w.controllerInfo.ControllerID) } + +func (w *Worker) SetLastMessageID(id int64) error { + if err := w.store.SetScaleSetLastMessageID(w.ctx, w.Entity.ID, id); err != nil { + return fmt.Errorf("setting last message ID: %w", err) + } + return nil +} diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index f92eaff1..bf22b61b 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -7,6 +7,7 @@ import ( "log/slog" "sync" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/util/github/scalesets" ) @@ -15,6 +16,7 @@ func newListener(ctx context.Context, scaleSetHelper scaleSetHelper) *scaleSetLi return &scaleSetListener{ ctx: ctx, scaleSetHelper: scaleSetHelper, + lastMessageID: scaleSetHelper.GetScaleSet().LastMessageID, } } @@ -33,9 +35,10 @@ type scaleSetListener struct { scaleSetHelper scaleSetHelper messageSession *scalesets.MessageSession - mux sync.Mutex - running bool - quit chan struct{} + mux sync.Mutex + running bool + quit chan struct{} + loopExited chan struct{} } func (l *scaleSetListener) Start() error { @@ -56,6 +59,7 @@ func (l *scaleSetListener) Start() error { l.messageSession = session l.quit = make(chan struct{}) l.running = true + l.loopExited = make(chan struct{}) go l.loop() return nil @@ -78,10 +82,12 @@ func (l *scaleSetListener) Stop() error { slog.ErrorContext(l.ctx, "error deleting message session", "error", err) } } - l.cancelFunc() + l.messageSession.Close() l.running = false + l.listenerCtx = nil close(l.quit) + l.cancelFunc() return nil } @@ -91,14 +97,22 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage body, err := msg.GetJobsFromBody() if err != nil { slog.ErrorContext(l.ctx, "getting jobs from body", "error", err) - return } slog.InfoContext(l.ctx, "handling message", "message", msg, "body", body) - l.lastMessageID = msg.MessageID + if msg.MessageID < l.lastMessageID { + slog.DebugContext(l.ctx, "message is older than last message, ignoring") + } else { + l.lastMessageID = msg.MessageID + if err := l.scaleSetHelper.SetLastMessageID(msg.MessageID); err != nil { + slog.ErrorContext(l.ctx, "setting last message ID", "error", err) + } + } } func (l *scaleSetListener) loop() { + defer close(l.loopExited) defer l.Stop() + retryAfterUnauthorized := false slog.DebugContext(l.ctx, "starting scale set listener loop", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) for { @@ -112,23 +126,46 @@ func (l *scaleSetListener) loop() { slog.DebugContext(l.ctx, "scaleset worker has stopped") return default: - slog.DebugContext(l.ctx, "getting message") + slog.DebugContext(l.ctx, "getting message", "last_message_id", l.lastMessageID, "max_runners", l.scaleSetHelper.GetScaleSet().MaxRunners) msg, err := l.messageSession.GetMessage( l.listenerCtx, l.lastMessageID, l.scaleSetHelper.GetScaleSet().MaxRunners) if err != nil { + if errors.Is(err, runnerErrors.ErrUnauthorized) { + if retryAfterUnauthorized { + slog.DebugContext(l.ctx, "unauthorized, stopping listener") + return + } + // The session manager refreshes the token automatically, but once we call + // GetMessage(), it blocks until a new message is sent on the longpoll. + // If there are no messages for a while, the token used to longpoll expires + // and we get an unauthorized error. We simply need to retry the request + // and it should use the refreshed token. If we fail a second time, we can + // return and the scaleset worker will attempt to restart the listener. + retryAfterUnauthorized = true + slog.DebugContext(l.ctx, "got unauthorized error, retrying") + continue + } if !errors.Is(err, context.Canceled) { slog.ErrorContext(l.ctx, "getting message", "error", err) } + slog.DebugContext(l.ctx, "stopping scale set listener") return } - l.handleSessionMessage(msg) + retryAfterUnauthorized = false + if !msg.IsNil() { + l.handleSessionMessage(msg) + } } } } func (l *scaleSetListener) Wait() <-chan struct{} { + l.mux.Lock() if !l.running { + slog.DebugContext(l.ctx, "scale set listener is not running") + l.mux.Unlock() return nil } - return l.listenerCtx.Done() + l.mux.Unlock() + return l.loopExited } From 12f40a53525e475b93721138f2973c48559c10a4 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 10:52:39 +0000 Subject: [PATCH 013/179] Fix refresh session It seems that Status is a string when you create a session, but a number when you refresh it (?). Signed-off-by: Gabriel Adrian Samfira --- params/github.go | 2 +- util/github/scalesets/message_sessions.go | 10 ++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/params/github.go b/params/github.go index b609e682..9b0a1e43 100644 --- a/params/github.go +++ b/params/github.go @@ -251,7 +251,7 @@ type RunnerScaleSet struct { GetAcquirableJobsURL string `json:"getAcquirableJobsUrl,omitempty"` AcquireJobsURL string `json:"acquireJobsUrl,omitempty"` Statistics *RunnerScaleSetStatistic `json:"statistics,omitempty"` - Status string `json:"status,omitempty"` + Status interface{} `json:"status,omitempty"` Enabled *bool `json:"enabled,omitempty"` } diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go index 5e260b96..efd684d4 100644 --- a/util/github/scalesets/message_sessions.go +++ b/util/github/scalesets/message_sessions.go @@ -59,6 +59,10 @@ func (m *MessageSession) Close() error { return nil } +func (m *MessageSession) MessageQueueAccessToken() string { + return m.session.MessageQueueAccessToken +} + func (m *MessageSession) LastError() error { return m.lastErr } @@ -128,9 +132,8 @@ func (m *MessageSession) Refresh(ctx context.Context) error { if err := json.NewDecoder(resp.Body).Decode(&refreshedSession); err != nil { return fmt.Errorf("failed to decode response: %w", err) } - slog.DebugContext(ctx, "refreshed message session token", "session_id", refreshedSession.SessionID.String()) - m.session.MessageQueueAccessToken = refreshedSession.MessageQueueAccessToken - m.session.Statistics = refreshedSession.Statistics + slog.DebugContext(ctx, "refreshed message session token", "session", refreshedSession) + m.session = &refreshedSession return nil } @@ -150,7 +153,6 @@ func (m *MessageSession) maybeRefreshToken(ctx context.Context) error { expiresIn := time.Duration(randInt.Int64())*time.Millisecond + 10*time.Minute slog.DebugContext(ctx, "checking if message session token needs refresh", "expires_at", expiresAt) if m.session.ExpiresIn(expiresIn) { - slog.DebugContext(ctx, "refreshing message session token") if err := m.Refresh(ctx); err != nil { return fmt.Errorf("failed to refresh message queue token: %w", err) } From c177c311478db625589f9f33ba56dd947e3c1c59 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 10:53:22 +0000 Subject: [PATCH 014/179] Add some message handling Signed-off-by: Gabriel Adrian Samfira --- params/params.go | 12 +++++ workers/scaleset/interfaces.go | 2 + workers/scaleset/scaleset_helper.go | 13 +++++ workers/scaleset/scaleset_listener.go | 74 +++++++++++++++++++++++++-- 4 files changed, 98 insertions(+), 3 deletions(-) diff --git a/params/params.go b/params/params.go index 3ac0c0c5..5f7cf60a 100644 --- a/params/params.go +++ b/params/params.go @@ -46,6 +46,7 @@ type ( GithubAuthType string PoolBalancerType string ScaleSetState string + ScaleSetMessageType string ) const ( @@ -137,6 +138,17 @@ const ( ScaleSetPendingForceDelete ScaleSetState = "pending_force_delete" ) +const ( + MessageTypeRunnerScaleSetJobMessages ScaleSetMessageType = "RunnerScaleSetJobMessages" +) + +const ( + MessageTypeJobAssigned = "JobAssigned" + MessageTypeJobCompleted = "JobCompleted" + MessageTypeJobStarted = "JobStarted" + MessageTypeJobAvailable = "JobAvailable" +) + type StatusMessage struct { CreatedAt time.Time `json:"created_at,omitempty"` Message string `json:"message,omitempty"` diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go index 7b96168d..077a35e5 100644 --- a/workers/scaleset/interfaces.go +++ b/workers/scaleset/interfaces.go @@ -10,4 +10,6 @@ type scaleSetHelper interface { GetScaleSet() params.ScaleSet SetLastMessageID(id int64) error Owner() string + HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error + HandleJobsStarted(jobs []params.ScaleSetJobMessage) error } diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index abfd37c4..4604a919 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -25,3 +25,16 @@ func (w *Worker) SetLastMessageID(id int64) error { } return nil } + +// HandleJobCompleted handles a job completed message. If a job had a runner +// assigned and was not canceled before it had a chance to run, then we mark +// that runner as pending_delete. +func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error { + return nil +} + +// HandleJobStarted updates the runners from idle to active in the DB and +// assigns the job to them. +func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) error { + return nil +} diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index bf22b61b..80ba67c3 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -94,6 +94,12 @@ func (l *scaleSetListener) Stop() error { func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage) { l.mux.Lock() defer l.mux.Unlock() + + if params.ScaleSetMessageType(msg.MessageType) != params.MessageTypeRunnerScaleSetJobMessages { + slog.DebugContext(l.ctx, "message is not a job message, ignoring") + return + } + body, err := msg.GetJobsFromBody() if err != nil { slog.ErrorContext(l.ctx, "getting jobs from body", "error", err) @@ -101,11 +107,68 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage slog.InfoContext(l.ctx, "handling message", "message", msg, "body", body) if msg.MessageID < l.lastMessageID { slog.DebugContext(l.ctx, "message is older than last message, ignoring") + return + } + + var completedJobs []params.ScaleSetJobMessage + var availableJobs []params.ScaleSetJobMessage + var startedJobs []params.ScaleSetJobMessage + + for _, job := range body { + switch job.MessageType { + case params.MessageTypeJobAssigned: + slog.InfoContext(l.ctx, "new job assigned", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName) + case params.MessageTypeJobStarted: + slog.InfoContext(l.ctx, "job started", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) + startedJobs = append(startedJobs, job) + case params.MessageTypeJobCompleted: + slog.InfoContext(l.ctx, "job completed", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) + completedJobs = append(completedJobs, job) + case params.MessageTypeJobAvailable: + slog.InfoContext(l.ctx, "job available", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName) + availableJobs = append(availableJobs, job) + default: + slog.DebugContext(l.ctx, "unknown message type", "message_type", job.MessageType) + } + } + + if len(availableJobs) > 0 { + jobIds := make([]int64, len(availableJobs)) + for idx, job := range availableJobs { + jobIds[idx] = job.RunnerRequestId + } + idsAcquired, err := l.scaleSetHelper.ScaleSetCLI().AcquireJobs( + l.listenerCtx, l.scaleSetHelper.GetScaleSet().ScaleSetID, + l.messageSession.MessageQueueAccessToken(), jobIds) + if err != nil { + // don't mark message as processed. It will be requeued. + slog.ErrorContext(l.ctx, "acquiring jobs", "error", err) + return + } + slog.DebugContext(l.ctx, "acquired jobs", "job_ids", idsAcquired) + } + + if len(completedJobs) > 0 { + if err := l.scaleSetHelper.HandleJobsCompleted(completedJobs); err != nil { + slog.ErrorContext(l.ctx, "error handling completed jobs", "error", err) + return + } + } + + if len(startedJobs) > 0 { + if err := l.scaleSetHelper.HandleJobsStarted(startedJobs); err != nil { + slog.ErrorContext(l.ctx, "error handling started jobs", "error", err) + return + } + } + + if err := l.scaleSetHelper.SetLastMessageID(msg.MessageID); err != nil { + slog.ErrorContext(l.ctx, "setting last message ID", "error", err) } else { l.lastMessageID = msg.MessageID - if err := l.scaleSetHelper.SetLastMessageID(msg.MessageID); err != nil { - slog.ErrorContext(l.ctx, "setting last message ID", "error", err) - } + } + if err := l.messageSession.DeleteMessage(l.listenerCtx, msg.MessageID); err != nil { + slog.ErrorContext(l.ctx, "deleting message", "error", err) } } @@ -127,6 +190,9 @@ func (l *scaleSetListener) loop() { return default: slog.DebugContext(l.ctx, "getting message", "last_message_id", l.lastMessageID, "max_runners", l.scaleSetHelper.GetScaleSet().MaxRunners) + // TODO: consume initial message on startup and consolidate. + // The scale set may have undergone several messages while GARM was + // down. msg, err := l.messageSession.GetMessage( l.listenerCtx, l.lastMessageID, l.scaleSetHelper.GetScaleSet().MaxRunners) if err != nil { @@ -153,6 +219,8 @@ func (l *scaleSetListener) loop() { } retryAfterUnauthorized = false if !msg.IsNil() { + // Longpoll returns after 50 seconds. If no message arrives during that interval + // we get a nil message. We can simply ignore it and continue. l.handleSessionMessage(msg) } } From fc4bd863aa4e442f6857a80ae85e4631b4ee14a7 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 15:13:24 +0000 Subject: [PATCH 015/179] Add some db functions to handle scaleset instances Signed-off-by: Gabriel Adrian Samfira --- database/common/store.go | 7 ++- database/sql/instances.go | 23 +--------- database/sql/scaleset_instances.go | 69 +++++++++++++++++++++++++++++ workers/scaleset/scaleset.go | 10 ++--- workers/scaleset/scaleset_helper.go | 4 +- 5 files changed, 84 insertions(+), 29 deletions(-) create mode 100644 database/sql/scaleset_instances.go diff --git a/database/common/store.go b/database/common/store.go index a2b2cf77..2ac55a4b 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -142,10 +142,14 @@ type ScaleSetsStore interface { UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, new params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) - ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error } +type ScaleSetInstanceStore interface { + ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) + CreateScaleSetInstance(_ context.Context, scaleSetID uint, param params.CreateInstanceParams) (instance params.Instance, err error) +} + //go:generate mockery --name=Store type Store interface { RepoStore @@ -160,6 +164,7 @@ type Store interface { ControllerStore EntityPoolStore ScaleSetsStore + ScaleSetInstanceStore ControllerInfo() (params.ControllerInfo, error) InitController() (params.ControllerInfo, error) diff --git a/database/sql/instances.go b/database/sql/instances.go index d4bfd019..f88cd33b 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -136,7 +136,7 @@ func (s *sqlDatabase) GetPoolInstanceByName(_ context.Context, poolID string, in } func (s *sqlDatabase) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { - instance, err := s.getInstanceByName(ctx, instanceName, "StatusMessages", "Pool") + instance, err := s.getInstanceByName(ctx, instanceName, "StatusMessages", "Pool", "ScaleSet") if err != nil { return params.Instance{}, errors.Wrap(err, "fetching instance") } @@ -196,7 +196,7 @@ func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, } func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { - instance, err := s.getInstanceByName(ctx, instanceName, "Pool") + instance, err := s.getInstanceByName(ctx, instanceName, "Pool", "ScaleSet") if err != nil { return params.Instance{}, errors.Wrap(err, "updating instance") } @@ -290,25 +290,6 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par return ret, nil } -func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) { - var instances []Instance - query := s.conn.Model(&Instance{}).Preload("Job", "ScaleSet").Where("scale_set_fk_id = ?", scalesetID) - - if err := query.Find(&instances); err.Error != nil { - return nil, errors.Wrap(err.Error, "fetching instances") - } - - var err error - ret := make([]params.Instance, len(instances)) - for idx, inst := range instances { - ret[idx], err = s.sqlToParamsInstance(inst) - if err != nil { - return nil, errors.Wrap(err, "converting instance") - } - } - return ret, nil -} - func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, error) { var instances []Instance diff --git a/database/sql/scaleset_instances.go b/database/sql/scaleset_instances.go new file mode 100644 index 00000000..3278b934 --- /dev/null +++ b/database/sql/scaleset_instances.go @@ -0,0 +1,69 @@ +package sql + +import ( + "context" + + "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" + "github.com/pkg/errors" +) + +func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, param params.CreateInstanceParams) (instance params.Instance, err error) { + scaleSet, err := s.getScaleSetByID(s.conn, scaleSetID) + if err != nil { + return params.Instance{}, errors.Wrap(err, "fetching scale set") + } + + defer func() { + if err == nil { + s.sendNotify(common.InstanceEntityType, common.CreateOperation, instance) + } + }() + + var secret []byte + if len(param.JitConfiguration) > 0 { + secret, err = s.marshalAndSeal(param.JitConfiguration) + if err != nil { + return params.Instance{}, errors.Wrap(err, "marshalling jit config") + } + } + + newInstance := Instance{ + ScaleSet: scaleSet, + Name: param.Name, + Status: param.Status, + RunnerStatus: param.RunnerStatus, + OSType: param.OSType, + OSArch: param.OSArch, + CallbackURL: param.CallbackURL, + MetadataURL: param.MetadataURL, + GitHubRunnerGroup: param.GitHubRunnerGroup, + JitConfiguration: secret, + AgentID: param.AgentID, + } + q := s.conn.Create(&newInstance) + if q.Error != nil { + return params.Instance{}, errors.Wrap(q.Error, "creating instance") + } + + return s.sqlToParamsInstance(newInstance) +} + +func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) { + var instances []Instance + query := s.conn.Model(&Instance{}).Preload("Job", "ScaleSet").Where("scale_set_fk_id = ?", scalesetID) + + if err := query.Find(&instances); err.Error != nil { + return nil, errors.Wrap(err.Error, "fetching instances") + } + + var err error + ret := make([]params.Instance, len(instances)) + for idx, inst := range instances { + ret[idx], err = s.sqlToParamsInstance(inst) + if err != nil { + return nil, errors.Wrap(err, "converting instance") + } + } + return ret, nil +} diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index c392d5cd..f2fc36af 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -30,7 +30,7 @@ func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleS consumerID: consumerID, store: store, provider: provider, - Entity: scaleSet, + scaleSet: scaleSet, ghCli: ghCli, scaleSetCli: scaleSetCli, }, nil @@ -43,7 +43,7 @@ type Worker struct { provider common.Provider store dbCommon.Store - Entity params.ScaleSet + scaleSet params.ScaleSet ghCli common.GithubClient scaleSetCli *scalesets.ScaleSetClient @@ -88,7 +88,7 @@ func (w *Worker) Start() (err error) { consumer, err := watcher.RegisterConsumer( w.ctx, w.consumerID, watcher.WithAll( - watcher.WithScaleSetFilter(w.Entity), + watcher.WithScaleSetFilter(w.scaleSet), watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), ), ) @@ -148,13 +148,13 @@ func (w *Worker) handleEvent(event dbCommon.ChangePayload) { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got update operation") w.mux.Lock() - if scaleSet.MaxRunners < w.Entity.MaxRunners { + if scaleSet.MaxRunners < w.scaleSet.MaxRunners { slog.DebugContext(w.ctx, "max runners changed; stopping listener") if err := w.listener.Stop(); err != nil { slog.ErrorContext(w.ctx, "error stopping listener", "error", err) } } - w.Entity = scaleSet + w.scaleSet = scaleSet w.mux.Unlock() default: slog.DebugContext(w.ctx, "invalid operation type; ignoring", "operation_type", event.Operation) diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 4604a919..4d84a76b 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -12,7 +12,7 @@ func (w *Worker) ScaleSetCLI() *scalesets.ScaleSetClient { } func (w *Worker) GetScaleSet() params.ScaleSet { - return w.Entity + return w.scaleSet } func (w *Worker) Owner() string { @@ -20,7 +20,7 @@ func (w *Worker) Owner() string { } func (w *Worker) SetLastMessageID(id int64) error { - if err := w.store.SetScaleSetLastMessageID(w.ctx, w.Entity.ID, id); err != nil { + if err := w.store.SetScaleSetLastMessageID(w.ctx, w.scaleSet.ID, id); err != nil { return fmt.Errorf("setting last message ID: %w", err) } return nil From 8d10dd4716b859c1db39a0cd819aa17b6c01916c Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 15:53:58 +0000 Subject: [PATCH 016/179] Update garm-provider-common Signed-off-by: Gabriel Adrian Samfira --- database/common/store.go | 1 + database/sql/models.go | 1 + database/sql/scalesets.go | 12 ++++++++++++ database/sql/util.go | 1 + go.mod | 2 +- go.sum | 4 ++-- params/params.go | 19 ++++++++++--------- .../garm-provider-common/params/params.go | 1 + vendor/modules.txt | 4 ++-- workers/scaleset/scaleset.go | 1 + 10 files changed, 32 insertions(+), 14 deletions(-) diff --git a/database/common/store.go b/database/common/store.go index 2ac55a4b..b7222d1c 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -143,6 +143,7 @@ type ScaleSetsStore interface { GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error + SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int64) error } type ScaleSetInstanceStore interface { diff --git a/database/sql/models.go b/database/sql/models.go index 5b4d86f9..ce156ccd 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -119,6 +119,7 @@ type ScaleSet struct { OSArch commonParams.OSArch Enabled bool LastMessageID int64 + DesiredRunnerCount int64 // ExtraSpecs is an opaque json that gets sent to the provider // as part of the bootstrap params for instances. It can contain // any kind of data needed by providers. diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 7a67f2d6..03c34800 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -391,3 +391,15 @@ func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID u } return nil } + +func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int64) error { + if err := s.conn.Transaction(func(tx *gorm.DB) error { + if q := tx.Model(&ScaleSet{}).Where("id = ?", scaleSetID).Update("desired_runner_count", desiredRunnerCount); q.Error != nil { + return errors.Wrap(q.Error, "saving database entry") + } + return nil + }); err != nil { + return errors.Wrap(err, "setting desired runner count") + } + return nil +} diff --git a/database/sql/util.go b/database/sql/util.go index c7b64961..dda3e9cf 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -310,6 +310,7 @@ func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, e State: scaleSet.State, ExtendedState: scaleSet.ExtendedState, LastMessageID: scaleSet.LastMessageID, + DesiredRunnerCount: scaleSet.DesiredRunnerCount, } if scaleSet.RepoID != nil { diff --git a/go.mod b/go.mod index 79a09894..bd2a0c86 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 github.com/bradleyfalzon/ghinstallation/v2 v2.15.0 - github.com/cloudbase/garm-provider-common v0.1.4 + github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e github.com/felixge/httpsnoop v1.0.4 github.com/go-openapi/errors v0.22.1 github.com/go-openapi/runtime v0.28.0 diff --git a/go.sum b/go.sum index e14f0c22..1deb1931 100644 --- a/go.sum +++ b/go.sum @@ -19,8 +19,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= -github.com/cloudbase/garm-provider-common v0.1.4 h1:spRjl0PV4r8vKaCTNp6xBQbRKfls/cmbBEl/i/eGWSo= -github.com/cloudbase/garm-provider-common v0.1.4/go.mod h1:sK26i2NpjjAjhanNKiWw8iPkqt+XeohTKpFnEP7JdZ4= +github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e h1:giq2Prk9I/ez1dc4/r9jivf2jbhjX9apZ41TWQ5g3qE= +github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e/go.mod h1:sSrTBtTc0q72MZdmS9EuLLdDhkmXZAqAwRIgEK0TqUo= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= diff --git a/params/params.go b/params/params.go index 5f7cf60a..8f63fecb 100644 --- a/params/params.go +++ b/params/params.go @@ -453,15 +453,16 @@ type ScaleSet struct { State ScaleSetState `json:"state"` ExtendedState string `json:"extended_state,omitempty"` - ProviderName string `json:"provider_name,omitempty"` - MaxRunners uint `json:"max_runners,omitempty"` - MinIdleRunners uint `json:"min_idle_runners,omitempty"` - Image string `json:"image,omitempty"` - Flavor string `json:"flavor,omitempty"` - OSType commonParams.OSType `json:"os_type,omitempty"` - OSArch commonParams.OSArch `json:"os_arch,omitempty"` - Enabled bool `json:"enabled,omitempty"` - Instances []Instance `json:"instances,omitempty"` + ProviderName string `json:"provider_name,omitempty"` + MaxRunners uint `json:"max_runners,omitempty"` + MinIdleRunners uint `json:"min_idle_runners,omitempty"` + Image string `json:"image,omitempty"` + Flavor string `json:"flavor,omitempty"` + OSType commonParams.OSType `json:"os_type,omitempty"` + OSArch commonParams.OSArch `json:"os_arch,omitempty"` + Enabled bool `json:"enabled,omitempty"` + Instances []Instance `json:"instances,omitempty"` + DesiredRunnerCount int64 `json:"desired_runner_count,omitempty"` RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` // ExtraSpecs is an opaque raw json that gets sent to the provider diff --git a/vendor/github.com/cloudbase/garm-provider-common/params/params.go b/vendor/github.com/cloudbase/garm-provider-common/params/params.go index 95a6e6bb..0a63f709 100644 --- a/vendor/github.com/cloudbase/garm-provider-common/params/params.go +++ b/vendor/github.com/cloudbase/garm-provider-common/params/params.go @@ -45,6 +45,7 @@ const ( InstancePendingDelete InstanceStatus = "pending_delete" InstancePendingForceDelete InstanceStatus = "pending_force_delete" InstanceDeleting InstanceStatus = "deleting" + InstanceDeleted InstanceStatus = "deleted" InstancePendingCreate InstanceStatus = "pending_create" InstanceCreating InstanceStatus = "creating" InstanceStatusUnknown InstanceStatus = "unknown" diff --git a/vendor/modules.txt b/vendor/modules.txt index a9d04a3c..c19620d9 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -21,8 +21,8 @@ github.com/cespare/xxhash/v2 # github.com/chzyer/readline v1.5.1 ## explicit; go 1.15 github.com/chzyer/readline -# github.com/cloudbase/garm-provider-common v0.1.4 -## explicit; go 1.22 +# github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e +## explicit; go 1.23.0 github.com/cloudbase/garm-provider-common/defaults github.com/cloudbase/garm-provider-common/errors github.com/cloudbase/garm-provider-common/execution/common diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index f2fc36af..e6db9d57 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -154,6 +154,7 @@ func (w *Worker) handleEvent(event dbCommon.ChangePayload) { slog.ErrorContext(w.ctx, "error stopping listener", "error", err) } } + // TODO: should we kick off auto-scaling if desired runner count changes? w.scaleSet = scaleSet w.mux.Unlock() default: From 94f264d44403462820cb58df5e0f8a5883b226a3 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 16:53:54 +0000 Subject: [PATCH 017/179] Handle JobStarted and JobCompleted Signed-off-by: Gabriel Adrian Samfira --- database/common/store.go | 2 +- database/sql/models.go | 2 +- database/sql/scalesets.go | 2 +- params/params.go | 2 +- workers/scaleset/interfaces.go | 1 + workers/scaleset/scaleset_helper.go | 48 +++++++++++++++++++++++++++ workers/scaleset/scaleset_listener.go | 5 +++ 7 files changed, 58 insertions(+), 4 deletions(-) diff --git a/database/common/store.go b/database/common/store.go index b7222d1c..82c5e4c0 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -143,7 +143,7 @@ type ScaleSetsStore interface { GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error - SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int64) error + SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) error } type ScaleSetInstanceStore interface { diff --git a/database/sql/models.go b/database/sql/models.go index ce156ccd..45e329f6 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -119,7 +119,7 @@ type ScaleSet struct { OSArch commonParams.OSArch Enabled bool LastMessageID int64 - DesiredRunnerCount int64 + DesiredRunnerCount int // ExtraSpecs is an opaque json that gets sent to the provider // as part of the bootstrap params for instances. It can contain // any kind of data needed by providers. diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 03c34800..3adc423c 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -392,7 +392,7 @@ func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID u return nil } -func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int64) error { +func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) error { if err := s.conn.Transaction(func(tx *gorm.DB) error { if q := tx.Model(&ScaleSet{}).Where("id = ?", scaleSetID).Update("desired_runner_count", desiredRunnerCount); q.Error != nil { return errors.Wrap(q.Error, "saving database entry") diff --git a/params/params.go b/params/params.go index 8f63fecb..64a53984 100644 --- a/params/params.go +++ b/params/params.go @@ -462,7 +462,7 @@ type ScaleSet struct { OSArch commonParams.OSArch `json:"os_arch,omitempty"` Enabled bool `json:"enabled,omitempty"` Instances []Instance `json:"instances,omitempty"` - DesiredRunnerCount int64 `json:"desired_runner_count,omitempty"` + DesiredRunnerCount int `json:"desired_runner_count,omitempty"` RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` // ExtraSpecs is an opaque raw json that gets sent to the provider diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go index 077a35e5..51d1d54b 100644 --- a/workers/scaleset/interfaces.go +++ b/workers/scaleset/interfaces.go @@ -9,6 +9,7 @@ type scaleSetHelper interface { ScaleSetCLI() *scalesets.ScaleSetClient GetScaleSet() params.ScaleSet SetLastMessageID(id int64) error + SetDesiredRunnerCount(count int) error Owner() string HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error HandleJobsStarted(jobs []params.ScaleSetJobMessage) error diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 4d84a76b..ca673c4d 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -1,7 +1,12 @@ package scaleset import ( + "errors" "fmt" + "log/slog" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/util/github/scalesets" @@ -30,11 +35,54 @@ func (w *Worker) SetLastMessageID(id int64) error { // assigned and was not canceled before it had a chance to run, then we mark // that runner as pending_delete. func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error { + for _, job := range jobs { + if job.RunnerName == "" { + // This job was not assigned to a runner, so we can skip it. + continue + } + // Set the runner to pending_delete. + runnerUpdateParams := params.UpdateInstanceParams{ + Status: commonParams.InstancePendingDelete, + RunnerStatus: params.RunnerTerminated, + } + _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, runnerUpdateParams) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + return fmt.Errorf("updating runner %s: %w", job.RunnerName, err) + } + } + } return nil } // HandleJobStarted updates the runners from idle to active in the DB and // assigns the job to them. func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) error { + for _, job := range jobs { + if job.RunnerName == "" { + // This should not happen, but just in case. + continue + } + + updateParams := params.UpdateInstanceParams{ + RunnerStatus: params.RunnerActive, + } + + _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, updateParams) + if err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + slog.InfoContext(w.ctx, "runner not found; handled by some other controller?", "runner_name", job.RunnerName) + continue + } + return fmt.Errorf("updating runner %s: %w", job.RunnerName, err) + } + } + return nil +} + +func (w *Worker) SetDesiredRunnerCount(count int) error { + if err := w.store.SetScaleSetDesiredRunnerCount(w.ctx, w.scaleSet.ID, count); err != nil { + return fmt.Errorf("setting desired runner count: %w", err) + } return nil } diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 80ba67c3..58d99bf3 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -167,6 +167,11 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage } else { l.lastMessageID = msg.MessageID } + + if err := l.scaleSetHelper.SetDesiredRunnerCount(msg.Statistics.TotalAssignedJobs); err != nil { + slog.ErrorContext(l.ctx, "setting desired runner count", "error", err) + } + if err := l.messageSession.DeleteMessage(l.listenerCtx, msg.MessageID); err != nil { slog.ErrorContext(l.ctx, "deleting message", "error", err) } From d949cecbe7f81ba57407afd9524b7c95724127f0 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 17:02:06 +0000 Subject: [PATCH 018/179] Keep a cache of runners in the scaleset worker We will need to run various checks against the runners that are managed by a scale set. The runners are updated by the DB watcher, so we should always have an up to date view of their state. We can then confidently monitor them without needing to query the DB. Signed-off-by: Gabriel Adrian Samfira --- workers/scaleset/scaleset.go | 57 +++++++++++++++++++++++++++++++++--- 1 file changed, 53 insertions(+), 4 deletions(-) diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index e6db9d57..a4648dc0 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -33,6 +33,7 @@ func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleS scaleSet: scaleSet, ghCli: ghCli, scaleSetCli: scaleSetCli, + runners: make(map[string]params.Instance), }, nil } @@ -44,6 +45,7 @@ type Worker struct { provider common.Provider store dbCommon.Store scaleSet params.ScaleSet + runners map[string]params.Instance ghCli common.GithubClient scaleSetCli *scalesets.ScaleSetClient @@ -85,11 +87,23 @@ func (w *Worker) Start() (err error) { return nil } + instances, err := w.store.ListScaleSetInstances(w.ctx, w.scaleSet.ID) + if err != nil { + return fmt.Errorf("listing scale set instances: %w", err) + } + + for _, instance := range instances { + w.runners[instance.ID] = instance + } + consumer, err := watcher.RegisterConsumer( w.ctx, w.consumerID, - watcher.WithAll( - watcher.WithScaleSetFilter(w.scaleSet), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + watcher.WithAny( + watcher.WithAll( + watcher.WithScaleSetFilter(w.scaleSet), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), + ), + watcher.WithScaleSetInstanceFilter(w.scaleSet), ), ) if err != nil { @@ -138,7 +152,7 @@ func (w *Worker) SetGithubClient(client common.GithubClient) error { return nil } -func (w *Worker) handleEvent(event dbCommon.ChangePayload) { +func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { scaleSet, ok := event.Payload.(params.ScaleSet) if !ok { slog.ErrorContext(w.ctx, "invalid payload for scale set type", "scale_set_type", event.EntityType, "payload", event.Payload) @@ -162,6 +176,41 @@ func (w *Worker) handleEvent(event dbCommon.ChangePayload) { } } +func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { + instance, ok := event.Payload.(params.Instance) + if !ok { + slog.ErrorContext(w.ctx, "invalid payload for instance type", "instance_type", event.EntityType, "payload", event.Payload) + return + } + switch event.Operation { + case dbCommon.UpdateOperation, dbCommon.CreateOperation: + slog.DebugContext(w.ctx, "got update operation") + w.mux.Lock() + w.runners[instance.ID] = instance + w.mux.Unlock() + case dbCommon.DeleteOperation: + slog.DebugContext(w.ctx, "got delete operation") + w.mux.Lock() + delete(w.runners, instance.ID) + w.mux.Unlock() + default: + slog.DebugContext(w.ctx, "invalid operation type; ignoring", "operation_type", event.Operation) + } +} + +func (w *Worker) handleEvent(event dbCommon.ChangePayload) { + switch event.EntityType { + case dbCommon.ScaleSetEntityType: + slog.DebugContext(w.ctx, "got scaleset event", "event", event) + w.handleScaleSetEvent(event) + case dbCommon.InstanceEntityType: + slog.DebugContext(w.ctx, "got instance event", "event", event) + w.handleInstanceEntityEvent(event) + default: + slog.DebugContext(w.ctx, "invalid entity type; ignoring", "entity_type", event.EntityType) + } +} + func (w *Worker) loop() { defer w.Stop() From 8c62b6de8c578b1b9d289ed857a0c915d3a13622 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 22:03:29 +0000 Subject: [PATCH 019/179] Obey enabled/disabled status Signed-off-by: Gabriel Adrian Samfira --- workers/scaleset/scaleset.go | 42 +++++++++++++++++++++------ workers/scaleset/scaleset_listener.go | 6 ++++ 2 files changed, 39 insertions(+), 9 deletions(-) diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index a4648dc0..dee1c70c 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -119,9 +119,13 @@ func (w *Worker) Start() (err error) { slog.DebugContext(w.ctx, "creating scale set listener") listener := newListener(w.ctx, w) - slog.DebugContext(w.ctx, "starting scale set listener") - if err := listener.Start(); err != nil { - return fmt.Errorf("error starting listener: %w", err) + if w.scaleSet.Enabled { + slog.DebugContext(w.ctx, "starting scale set listener") + if err := listener.Start(); err != nil { + return fmt.Errorf("error starting listener: %w", err) + } + } else { + slog.InfoContext(w.ctx, "scale set is disabled; not starting listener") } w.listener = listener @@ -162,10 +166,19 @@ func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got update operation") w.mux.Lock() - if scaleSet.MaxRunners < w.scaleSet.MaxRunners { - slog.DebugContext(w.ctx, "max runners changed; stopping listener") - if err := w.listener.Stop(); err != nil { - slog.ErrorContext(w.ctx, "error stopping listener", "error", err) + + if scaleSet.MaxRunners < w.scaleSet.MaxRunners || !scaleSet.Enabled { + // we stop the listener if the scale set is disabled or if the max runners + // is decreased. In the case where max runners changes but the scale set + // is still enabled, we rely on the keepListenerAlive to restart the listener + // which will listen for new messages with the changed max runners. This way + // we don't have to potentially wait for 50 second for the max runner value + // to be updated, in which time we might get more runners spawned than the + // new max runner value. + if w.listener.IsRunning() { + if err := w.listener.Stop(); err != nil { + slog.ErrorContext(w.ctx, "error stopping listener", "error", err) + } } } // TODO: should we kick off auto-scaling if desired runner count changes? @@ -239,15 +252,26 @@ func (w *Worker) sleepWithCancel(sleepTime time.Duration) (canceled bool) { case <-ticker.C: return false case <-w.quit: - return true case <-w.ctx.Done(): - return true } + return true } func (w *Worker) keepListenerAlive() { var backoff time.Duration for { + w.mux.Lock() + if !w.scaleSet.Enabled { + if canceled := w.sleepWithCancel(2 * time.Second); canceled { + slog.DebugContext(w.ctx, "worker is stopped; exiting keepListenerAlive") + w.mux.Unlock() + return + } + w.mux.Unlock() + continue + } + w.mux.Unlock() + select { case <-w.quit: return diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 58d99bf3..43a2e5c1 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -91,6 +91,12 @@ func (l *scaleSetListener) Stop() error { return nil } +func (l *scaleSetListener) IsRunning() bool { + l.mux.Lock() + defer l.mux.Unlock() + return l.running +} + func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage) { l.mux.Lock() defer l.mux.Unlock() From bc470c5f7896eb2506dec3a9a6590065920a3833 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Apr 2025 22:59:24 +0000 Subject: [PATCH 020/179] WiP Signed-off-by: Gabriel Adrian Samfira --- locking/interface.go | 1 + locking/local_locker.go | 6 ++++++ locking/locking.go | 9 ++++++++ workers/scaleset/scaleset.go | 33 +++++++++++++++++++++++++++++ workers/scaleset/scaleset_helper.go | 9 ++++++++ 5 files changed, 58 insertions(+) diff --git a/locking/interface.go b/locking/interface.go index fd547830..07380a7b 100644 --- a/locking/interface.go +++ b/locking/interface.go @@ -5,6 +5,7 @@ import "time" // TODO(gabriel-samfira): needs owner attribute. type Locker interface { TryLock(key string) bool + Lock(key string) Unlock(key string, remove bool) Delete(key string) } diff --git a/locking/local_locker.go b/locking/local_locker.go index 5298c9e7..ad41345c 100644 --- a/locking/local_locker.go +++ b/locking/local_locker.go @@ -29,6 +29,12 @@ func (k *keyMutex) TryLock(key string) bool { return keyMux.TryLock() } +func (k *keyMutex) Lock(key string) { + mux, _ := k.muxes.LoadOrStore(key, &sync.Mutex{}) + keyMux := mux.(*sync.Mutex) + keyMux.Lock() +} + func (k *keyMutex) Unlock(key string, remove bool) { mux, ok := k.muxes.Load(key) if !ok { diff --git a/locking/locking.go b/locking/locking.go index 793edb4e..6628d8b1 100644 --- a/locking/locking.go +++ b/locking/locking.go @@ -15,6 +15,15 @@ func TryLock(key string) (bool, error) { return locker.TryLock(key), nil } + +func Lock(key string) { + if locker == nil { + panic("no locker is registered") + } + + locker.Lock(key) +} + func Unlock(key string, remove bool) error { if locker == nil { return fmt.Errorf("no locker is registered") diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index dee1c70c..4a982ad4 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -136,6 +136,7 @@ func (w *Worker) Start() (err error) { slog.DebugContext(w.ctx, "starting scale set worker loops", "scale_set", w.consumerID) go w.loop() go w.keepListenerAlive() + go w.handleAutoScale() return nil } @@ -307,3 +308,35 @@ func (w *Worker) keepListenerAlive() { } } } + +func (w *Worker) handleAutoScale() { + ticker := time.NewTicker(5 * time.Second) + defer ticker.Stop() + + for { + select { + case <-w.quit: + return + case <-w.ctx.Done(): + return + case <-ticker.C: + var desiredRunners uint + if w.scaleSet.DesiredRunnerCount > 0 { + desiredRunners = uint(w.scaleSet.DesiredRunnerCount) + } + targetRunners := min(w.scaleSet.MinIdleRunners+desiredRunners, w.scaleSet.MaxRunners) + + currentRunners := uint(len(w.runners)) + if currentRunners == targetRunners { + slog.DebugContext(w.ctx, "desired runner count reached", "desired_runners", targetRunners) + continue + } + + if currentRunners < targetRunners { + slog.DebugContext(w.ctx, "scaling up", "current_runners", currentRunners, "target_runners", targetRunners) + } else { + slog.DebugContext(w.ctx, "scaling down", "current_runners", currentRunners, "target_runners", targetRunners) + } + } + } +} diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index ca673c4d..e6ae9197 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -8,6 +8,7 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" + "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/util/github/scalesets" ) @@ -45,12 +46,16 @@ func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error { Status: commonParams.InstancePendingDelete, RunnerStatus: params.RunnerTerminated, } + + locking.Lock(job.RunnerName) _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, runnerUpdateParams) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { + locking.Unlock(job.RunnerName, false) return fmt.Errorf("updating runner %s: %w", job.RunnerName, err) } } + locking.Unlock(job.RunnerName, false) } return nil } @@ -68,14 +73,18 @@ func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) error { RunnerStatus: params.RunnerActive, } + locking.Lock(job.RunnerName) _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, updateParams) if err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { slog.InfoContext(w.ctx, "runner not found; handled by some other controller?", "runner_name", job.RunnerName) + locking.Unlock(job.RunnerName, true) continue } + locking.Unlock(job.RunnerName, false) return fmt.Errorf("updating runner %s: %w", job.RunnerName, err) } + locking.Unlock(job.RunnerName, false) } return nil } From 7376a5fe741d0aa7dcf7adea230342af3974d84a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 19 Apr 2025 14:56:55 +0000 Subject: [PATCH 021/179] Fix scale set restart logic Signed-off-by: Gabriel Adrian Samfira --- workers/scaleset/scaleset.go | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 4a982ad4..7e134adb 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -176,10 +176,8 @@ func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { // we don't have to potentially wait for 50 second for the max runner value // to be updated, in which time we might get more runners spawned than the // new max runner value. - if w.listener.IsRunning() { - if err := w.listener.Stop(); err != nil { - slog.ErrorContext(w.ctx, "error stopping listener", "error", err) - } + if err := w.listener.Stop(); err != nil { + slog.ErrorContext(w.ctx, "error stopping listener", "error", err) } } // TODO: should we kick off auto-scaling if desired runner count changes? @@ -271,6 +269,8 @@ func (w *Worker) keepListenerAlive() { w.mux.Unlock() continue } + // noop if already started + w.listener.Start() w.mux.Unlock() select { @@ -280,9 +280,19 @@ func (w *Worker) keepListenerAlive() { return case <-w.listener.Wait(): slog.DebugContext(w.ctx, "listener is stopped; attempting to restart") + w.mux.Lock() + if !w.scaleSet.Enabled { + w.mux.Unlock() + continue + } + w.mux.Unlock() for { w.mux.Lock() w.listener.Stop() //cleanup + if !w.scaleSet.Enabled { + w.mux.Unlock() + break + } slog.DebugContext(w.ctx, "attempting to restart") if err := w.listener.Start(); err != nil { w.mux.Unlock() @@ -313,6 +323,13 @@ func (w *Worker) handleAutoScale() { ticker := time.NewTicker(5 * time.Second) defer ticker.Stop() + lastMsg := "" + lastMsgDebugLog := func(msg string, targetRunners, currentRunners uint) { + if lastMsg != msg { + slog.DebugContext(w.ctx, msg, "current_runners", currentRunners, "target_runners", targetRunners) + lastMsg = msg + } + } for { select { case <-w.quit: @@ -328,14 +345,14 @@ func (w *Worker) handleAutoScale() { currentRunners := uint(len(w.runners)) if currentRunners == targetRunners { - slog.DebugContext(w.ctx, "desired runner count reached", "desired_runners", targetRunners) + lastMsgDebugLog("desired runner count reached", targetRunners, currentRunners) continue } if currentRunners < targetRunners { - slog.DebugContext(w.ctx, "scaling up", "current_runners", currentRunners, "target_runners", targetRunners) + lastMsgDebugLog("scaling up", targetRunners, currentRunners) } else { - slog.DebugContext(w.ctx, "scaling down", "current_runners", currentRunners, "target_runners", targetRunners) + lastMsgDebugLog("attempting to scale down", targetRunners, currentRunners) } } } From 020210d6adfd8cfd190445f25032e7a3458d5a1a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 20 Apr 2025 17:39:52 +0000 Subject: [PATCH 022/179] Handle scale up and down; add provider worker Signed-off-by: Gabriel Adrian Samfira --- cmd/garm/main.go | 24 +++-- database/common/store.go | 1 + database/sql/instances.go | 35 ++++++- database/sql/models.go | 2 +- database/sql/scaleset_instances.go | 2 +- database/sql/util.go | 2 +- database/watcher/filters.go | 23 +++++ params/github.go | 24 ++--- util/github/scalesets/runners.go | 31 +++++- util/github/scalesets/util.go | 3 + workers/entity/controller.go | 14 +-- workers/provider/provider.go | 73 +++++++++++++ workers/provider/util.go | 18 ++++ workers/scaleset/scaleset.go | 159 +++++++++++++++++++++++++++++ 14 files changed, 372 insertions(+), 39 deletions(-) create mode 100644 workers/provider/provider.go create mode 100644 workers/provider/util.go diff --git a/cmd/garm/main.go b/cmd/garm/main.go index 3ffcdc1f..5879fd0a 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -46,6 +46,7 @@ import ( "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" //nolint:typecheck runnerMetrics "github.com/cloudbase/garm/runner/metrics" + "github.com/cloudbase/garm/runner/providers" garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/appdefaults" "github.com/cloudbase/garm/websocket" @@ -62,16 +63,17 @@ var signals = []os.Signal{ syscall.SIGTERM, } -func maybeInitController(db common.Store) error { - if _, err := db.ControllerInfo(); err == nil { - return nil +func maybeInitController(db common.Store) (params.ControllerInfo, error) { + if info, err := db.ControllerInfo(); err == nil { + return info, nil } - if _, err := db.InitController(); err != nil { - return errors.Wrap(err, "initializing controller") + info, err := db.InitController() + if err != nil { + return params.ControllerInfo{}, errors.Wrap(err, "initializing controller") } - return nil + return info, nil } func setupLogging(ctx context.Context, logCfg config.Logging, hub *websocket.Hub) { @@ -212,7 +214,8 @@ func main() { log.Fatal(err) } - if err := maybeInitController(db); err != nil { + controllerInfo, err := maybeInitController(db) + if err != nil { log.Fatal(err) } @@ -231,7 +234,12 @@ func main() { log.Fatal(err) } - entityController, err := entity.NewController(ctx, db, *cfg) + providers, err := providers.LoadProvidersFromConfig(ctx, *cfg, controllerInfo.ControllerID.String()) + if err != nil { + log.Fatalf("loading providers: %+v", err) + } + + entityController, err := entity.NewController(ctx, db, providers) if err != nil { log.Fatalf("failed to create entity controller: %+v", err) } diff --git a/database/common/store.go b/database/common/store.go index 82c5e4c0..87804281 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -92,6 +92,7 @@ type UserStore interface { type InstanceStore interface { CreateInstance(ctx context.Context, poolID string, param params.CreateInstanceParams) (params.Instance, error) DeleteInstance(ctx context.Context, poolID string, instanceName string) error + DeleteInstanceByName(ctx context.Context, instanceName string) error UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) // Probably a bad idea without some king of filter or at least pagination diff --git a/database/sql/instances.go b/database/sql/instances.go index f88cd33b..cf0020b5 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -177,6 +177,39 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN return nil } +func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName string) error { + instance, err := s.getInstanceByName(ctx, instanceName) + if err != nil { + return errors.Wrap(err, "deleting instance") + } + + defer func() { + if err == nil { + var providerID string + if instance.ProviderID != nil { + providerID = *instance.ProviderID + } + if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, params.Instance{ + ID: instance.ID.String(), + Name: instance.Name, + ProviderID: providerID, + AgentID: instance.AgentID, + PoolID: instance.PoolID.String(), + }); notifyErr != nil { + slog.With(slog.Any("error", notifyErr)).Error("failed to send notify") + } + } + }() + + if q := s.conn.Unscoped().Delete(&instance); q.Error != nil { + if errors.Is(q.Error, gorm.ErrRecordNotFound) { + return nil + } + return errors.Wrap(q.Error, "deleting instance") + } + return nil +} + func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, statusMessage string) error { instance, err := s.getInstanceByName(ctx, instanceName) if err != nil { @@ -293,7 +326,7 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, error) { var instances []Instance - q := s.conn.Model(&Instance{}).Preload("Job", "Pool", "ScaleSet").Find(&instances) + q := s.conn.Model(&Instance{}).Preload("Job").Find(&instances) if q.Error != nil { return nil, errors.Wrap(q.Error, "fetching instances") } diff --git a/database/sql/models.go b/database/sql/models.go index 45e329f6..c1b6462d 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -277,7 +277,7 @@ type Instance struct { GitHubRunnerGroup string AditionalLabels datatypes.JSON - PoolID uuid.UUID + PoolID *uuid.UUID Pool Pool `gorm:"foreignKey:PoolID"` ScaleSetFkID *uint diff --git a/database/sql/scaleset_instances.go b/database/sql/scaleset_instances.go index 3278b934..106df956 100644 --- a/database/sql/scaleset_instances.go +++ b/database/sql/scaleset_instances.go @@ -51,7 +51,7 @@ func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) { var instances []Instance - query := s.conn.Model(&Instance{}).Preload("Job", "ScaleSet").Where("scale_set_fk_id = ?", scalesetID) + query := s.conn.Model(&Instance{}).Preload("Job").Where("scale_set_fk_id = ?", scalesetID) if err := query.Find(&instances); err.Error != nil { return nil, errors.Wrap(err.Error, "fetching instances") diff --git a/database/sql/util.go b/database/sql/util.go index dda3e9cf..112d0a76 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -79,7 +79,7 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e ret.RunnerBootstrapTimeout = instance.ScaleSet.RunnerBootstrapTimeout } - if instance.PoolID != uuid.Nil { + if instance.PoolID != nil { ret.PoolID = instance.PoolID.String() ret.ProviderName = instance.Pool.ProviderName ret.RunnerBootstrapTimeout = instance.Pool.RunnerBootstrapTimeout diff --git a/database/watcher/filters.go b/database/watcher/filters.go index 0c259bce..6a7e8abf 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -1,6 +1,8 @@ package watcher import ( + commonParams "github.com/cloudbase/garm-provider-common/params" + dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) @@ -281,3 +283,24 @@ func WithEntityTypeAndCallbackFilter(entityType dbCommon.DatabaseEntityType, cal return ok } } + +func WithInstanceStatusFilter(statuses ...commonParams.InstanceStatus) dbCommon.PayloadFilterFunc { + return func(payload dbCommon.ChangePayload) bool { + if payload.EntityType != dbCommon.InstanceEntityType { + return false + } + instance, ok := payload.Payload.(params.Instance) + if !ok { + return false + } + if len(statuses) == 0 { + return false + } + for _, status := range statuses { + if instance.Status == status { + return true + } + } + return false + } +} diff --git a/params/github.go b/params/github.go index 9b0a1e43..e0ad0452 100644 --- a/params/github.go +++ b/params/github.go @@ -419,18 +419,18 @@ func (r RunnerScaleSetMessage) GetJobsFromBody() ([]ScaleSetJobMessage, error) { } type RunnerReference struct { - ID int `json:"id"` - Name string `json:"name"` - RunnerScaleSetID int `json:"runnerScaleSetId"` - CreatedOn time.Time `json:"createdOn"` - RunnerGroupID uint64 `json:"runnerGroupId"` - RunnerGroupName string `json:"runnerGroupName"` - Version string `json:"version"` - Enabled bool `json:"enabled"` - Ephemeral bool `json:"ephemeral"` - Status RunnerStatus `json:"status"` - DisableUpdate bool `json:"disableUpdate"` - ProvisioningState string `json:"provisioningState"` + ID int64 `json:"id"` + Name string `json:"name"` + RunnerScaleSetID int `json:"runnerScaleSetId"` + CreatedOn interface{} `json:"createdOn"` + RunnerGroupID uint64 `json:"runnerGroupId"` + RunnerGroupName string `json:"runnerGroupName"` + Version string `json:"version"` + Enabled bool `json:"enabled"` + Ephemeral bool `json:"ephemeral"` + Status interface{} `json:"status"` + DisableUpdate bool `json:"disableUpdate"` + ProvisioningState string `json:"provisioningState"` } type RunnerScaleSetJitRunnerConfig struct { diff --git a/util/github/scalesets/runners.go b/util/github/scalesets/runners.go index d4d2b3f6..4d6434eb 100644 --- a/util/github/scalesets/runners.go +++ b/util/github/scalesets/runners.go @@ -30,7 +30,7 @@ type scaleSetJitRunnerConfig struct { WorkFolder string `json:"workFolder"` } -func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName string, scaleSet params.RunnerScaleSet) (params.RunnerScaleSetJitRunnerConfig, error) { +func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName string, scaleSetID int) (params.RunnerScaleSetJitRunnerConfig, error) { runnerSettings := scaleSetJitRunnerConfig{ Name: runnerName, WorkFolder: "_work", @@ -41,7 +41,14 @@ func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName return params.RunnerScaleSetJitRunnerConfig{}, err } - req, err := s.newActionsRequest(ctx, http.MethodPost, scaleSet.RunnerJitConfigURL, bytes.NewBuffer(body)) + serviceUrl, err := s.actionsServiceInfo.GetURL() + if err != nil { + return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to get pipeline URL: %w", err) + } + jitConfigPath := fmt.Sprintf("/%s/%d/generatejitconfig", scaleSetEndpoint, scaleSetID) + jitConfigURL := serviceUrl.JoinPath(jitConfigPath) + + req, err := s.newActionsRequest(ctx, http.MethodPost, jitConfigURL.String(), bytes.NewBuffer(body)) if err != nil { return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to create request: %w", err) } @@ -81,6 +88,26 @@ func (s *ScaleSetClient) GetRunner(ctx context.Context, runnerID int64) (params. return runnerReference, nil } +func (s *ScaleSetClient) ListAllRunners(ctx context.Context) (params.RunnerReferenceList, error) { + req, err := s.newActionsRequest(ctx, http.MethodGet, runnerEndpoint, nil) + if err != nil { + return params.RunnerReferenceList{}, fmt.Errorf("failed to construct request: %w", err) + } + + resp, err := s.Do(req) + if err != nil { + return params.RunnerReferenceList{}, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + defer resp.Body.Close() + + var runnerList params.RunnerReferenceList + if err := json.NewDecoder(resp.Body).Decode(&runnerList); err != nil { + return params.RunnerReferenceList{}, fmt.Errorf("failed to decode response: %w", err) + } + + return runnerList, nil +} + func (s *ScaleSetClient) GetRunnerByName(ctx context.Context, runnerName string) (params.RunnerReference, error) { path := fmt.Sprintf("%s?agentName=%s", runnerEndpoint, runnerName) diff --git a/util/github/scalesets/util.go b/util/github/scalesets/util.go index 15c3a5cf..66171dd6 100644 --- a/util/github/scalesets/util.go +++ b/util/github/scalesets/util.go @@ -18,6 +18,7 @@ import ( "context" "fmt" "io" + "log/slog" "net/http" ) @@ -50,5 +51,7 @@ func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, path str req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", s.actionsServiceInfo.Token)) + slog.DebugContext(ctx, "newActionsRequest", "method", method, "url", uri.String(), "body", body, "headers", req.Header) + return req, nil } diff --git a/workers/entity/controller.go b/workers/entity/controller.go index 1e0035c0..bfdcabfe 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -7,31 +7,19 @@ import ( "sync" "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/config" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/runner/common" - "github.com/cloudbase/garm/runner/providers" garmUtil "github.com/cloudbase/garm/util" ) -func NewController(ctx context.Context, store dbCommon.Store, cfg config.Config) (*Controller, error) { +func NewController(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider) (*Controller, error) { consumerID := "entity-controller" - ctrlID, err := store.ControllerInfo() - if err != nil { - return nil, fmt.Errorf("getting controller info: %w", err) - } - ctx = garmUtil.WithSlogContext( ctx, slog.Any("worker", consumerID)) ctx = auth.GetAdminContext(ctx) - providers, err := providers.LoadProvidersFromConfig(ctx, cfg, ctrlID.ControllerID.String()) - if err != nil { - return nil, fmt.Errorf("loading providers: %w", err) - } - return &Controller{ consumerID: consumerID, ctx: ctx, diff --git a/workers/provider/provider.go b/workers/provider/provider.go new file mode 100644 index 00000000..7f0784e9 --- /dev/null +++ b/workers/provider/provider.go @@ -0,0 +1,73 @@ +package provider + +import ( + "context" + "fmt" + "sync" + + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/runner/common" +) + +func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider) (*provider, error) { + consumerID := "provider-worker" + return &provider{ + ctx: context.Background(), + store: store, + consumerID: consumerID, + providers: providers, + }, nil +} + +type provider struct { + ctx context.Context + consumerID string + + consumer dbCommon.Consumer + // TODO: not all workers should have access to the store. + // We need to implement way to RPC from workers to controllers + // and abstract that into something we can use to eventually + // scale out. + store dbCommon.Store + + providers map[string]common.Provider + + mux sync.Mutex + running bool + quit chan struct{} +} + +func (p *provider) Start() error { + p.mux.Lock() + defer p.mux.Unlock() + + if p.running { + return nil + } + + consumer, err := watcher.RegisterConsumer( + p.ctx, p.consumerID, composeProviderWatcher()) + if err != nil { + return fmt.Errorf("registering consumer: %w", err) + } + p.consumer = consumer + + p.quit = make(chan struct{}) + p.running = true + return nil +} + +func (p *provider) Stop() error { + p.mux.Lock() + defer p.mux.Unlock() + + if !p.running { + return nil + } + + p.consumer.Close() + close(p.quit) + p.running = false + return nil +} diff --git a/workers/provider/util.go b/workers/provider/util.go new file mode 100644 index 00000000..2d84e25e --- /dev/null +++ b/workers/provider/util.go @@ -0,0 +1,18 @@ +package provider + +import ( + commonParams "github.com/cloudbase/garm-provider-common/params" + + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" +) + +func composeProviderWatcher() dbCommon.PayloadFilterFunc { + return watcher.WithAny( + watcher.WithInstanceStatusFilter( + commonParams.InstancePendingCreate, + commonParams.InstancePendingDelete, + commonParams.InstancePendingForceDelete, + ), + ) +} diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 7e134adb..24df1cbb 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -2,13 +2,19 @@ package scaleset import ( "context" + "errors" "fmt" "log/slog" "sync" "time" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + commonParams "github.com/cloudbase/garm-provider-common/params" + + "github.com/cloudbase/garm-provider-common/util" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" "github.com/cloudbase/garm/util/github/scalesets" @@ -188,6 +194,17 @@ func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { } } +func (w *Worker) handleInstanceCleanup(instance params.Instance) error { + if instance.Status == commonParams.InstanceDeleted { + if err := w.store.DeleteInstanceByName(w.ctx, instance.Name); err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + return fmt.Errorf("deleting instance %s: %w", instance.ID, err) + } + } + } + return nil +} + func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { instance, ok := event.Payload.(params.Instance) if !ok { @@ -319,6 +336,138 @@ func (w *Worker) keepListenerAlive() { } } +func (w *Worker) handleScaleUp(target, current uint) { + if !w.scaleSet.Enabled { + slog.DebugContext(w.ctx, "scale set is disabled; not scaling up") + return + } + + if target <= current { + slog.DebugContext(w.ctx, "target is less than or equal to current; not scaling up") + return + } + + controllerConfig, err := w.store.ControllerInfo() + if err != nil { + slog.ErrorContext(w.ctx, "error getting controller config", "error", err) + return + } + + for i := current; i < target; i++ { + newRunnerName := fmt.Sprintf("%s-%s", w.scaleSet.GetRunnerPrefix(), util.NewID()) + jitConfig, err := w.scaleSetCli.GenerateJitRunnerConfig(w.ctx, newRunnerName, w.scaleSet.ScaleSetID) + if err != nil { + slog.ErrorContext(w.ctx, "error generating jit config", "error", err) + continue + } + slog.DebugContext(w.ctx, "creating new runner", "runner_name", newRunnerName) + decodedJit, err := jitConfig.DecodedJITConfig() + if err != nil { + slog.ErrorContext(w.ctx, "error decoding jit config", "error", err) + continue + } + runnerParams := params.CreateInstanceParams{ + Name: newRunnerName, + Status: commonParams.InstancePendingCreate, + RunnerStatus: params.RunnerPending, + OSArch: w.scaleSet.OSArch, + OSType: w.scaleSet.OSType, + CallbackURL: controllerConfig.CallbackURL, + MetadataURL: controllerConfig.MetadataURL, + CreateAttempt: 1, + GitHubRunnerGroup: w.scaleSet.GitHubRunnerGroup, + JitConfiguration: decodedJit, + AgentID: int64(jitConfig.Runner.ID), + } + + if _, err := w.store.CreateScaleSetInstance(w.ctx, w.scaleSet.ID, runnerParams); err != nil { + slog.ErrorContext(w.ctx, "error creating instance", "error", err) + if err := w.scaleSetCli.RemoveRunner(w.ctx, jitConfig.Runner.ID); err != nil { + slog.ErrorContext(w.ctx, "error deleting runner", "error", err) + } + continue + } + + runnerDetails, err := w.scaleSetCli.GetRunner(w.ctx, jitConfig.Runner.ID) + if err != nil { + slog.ErrorContext(w.ctx, "error getting runner details", "error", err) + continue + } + slog.DebugContext(w.ctx, "runner details", "runner_details", runnerDetails) + } +} + +func (w *Worker) handleScaleDown(target, current uint) { + delta := current - target + if delta <= 0 { + return + } + w.mux.Lock() + defer w.mux.Unlock() + removed := 0 + for _, runner := range w.runners { + if removed >= int(delta) { + break + } + + locked, err := locking.TryLock(runner.Name) + if err != nil || !locked { + slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) + continue + } + + switch runner.Status { + case commonParams.InstancePendingCreate, commonParams.InstanceRunning: + case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete: + removed++ + locking.Unlock(runner.Name, true) + continue + default: + slog.DebugContext(w.ctx, "runner is not in a valid state; skipping", "runner_name", runner.Name, "runner_status", runner.Status) + locking.Unlock(runner.Name, false) + continue + } + + switch runner.RunnerStatus { + case params.RunnerTerminated, params.RunnerActive: + slog.DebugContext(w.ctx, "runner is not in a valid state; skipping", "runner_name", runner.Name, "runner_status", runner.RunnerStatus) + locking.Unlock(runner.Name, false) + continue + } + + slog.DebugContext(w.ctx, "removing runner", "runner_name", runner.Name) + if err := w.scaleSetCli.RemoveRunner(w.ctx, runner.AgentID); err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + slog.ErrorContext(w.ctx, "error removing runner", "runner_name", runner.Name, "error", err) + locking.Unlock(runner.Name, false) + continue + } + } + runnerUpdateParams := params.UpdateInstanceParams{ + Status: commonParams.InstancePendingDelete, + } + if _, err := w.store.UpdateInstance(w.ctx, runner.Name, runnerUpdateParams); err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + // The error seems to be that the instance was removed from the database. We still had it in our + // state, so either the update never came from the watcher or something else happened. + // Remove it from the local cache. + delete(w.runners, runner.ID) + removed++ + locking.Unlock(runner.Name, true) + continue + } + // TODO: This should not happen, unless there is some issue with the database. + // The UpdateInstance() function should add tenacity, but even in that case, if it + // still errors out, we need to handle it somehow. + slog.ErrorContext(w.ctx, "error updating runner", "runner_name", runner.Name, "error", err) + locking.Unlock(runner.Name, false) + continue + } + removed++ + locking.Unlock(runner.Name, false) + } +} + func (w *Worker) handleAutoScale() { ticker := time.NewTicker(5 * time.Second) defer ticker.Stop() @@ -337,6 +486,14 @@ func (w *Worker) handleAutoScale() { case <-w.ctx.Done(): return case <-ticker.C: + w.mux.Lock() + for _, instance := range w.runners { + if err := w.handleInstanceCleanup(instance); err != nil { + slog.ErrorContext(w.ctx, "error cleaning up instance", "instance_id", instance.ID, "error", err) + } + } + w.mux.Unlock() + var desiredRunners uint if w.scaleSet.DesiredRunnerCount > 0 { desiredRunners = uint(w.scaleSet.DesiredRunnerCount) @@ -351,8 +508,10 @@ func (w *Worker) handleAutoScale() { if currentRunners < targetRunners { lastMsgDebugLog("scaling up", targetRunners, currentRunners) + w.handleScaleUp(targetRunners, currentRunners) } else { lastMsgDebugLog("attempting to scale down", targetRunners, currentRunners) + w.handleScaleDown(targetRunners, currentRunners) } } } From 436fd7746fa1bcd7c58b87bbafcfa4627c3a97bc Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 23 Apr 2025 08:59:33 +0000 Subject: [PATCH 023/179] WiP Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/instances.go | 2 +- apiserver/controllers/scalesets.go | 6 +- database/sql/models.go | 17 +++- database/sql/sql.go | 1 + database/sql/util.go | 34 ++++++++ workers/provider/provider.go | 123 ++++++++++++++++++++++++++++ workers/scaleset/scaleset.go | 125 ++++++++++++++++++++++++++++- 7 files changed, 301 insertions(+), 7 deletions(-) diff --git a/apiserver/controllers/instances.go b/apiserver/controllers/instances.go index fd6d2c45..3209a5c2 100644 --- a/apiserver/controllers/instances.go +++ b/apiserver/controllers/instances.go @@ -97,7 +97,7 @@ func (a *APIController) ListScaleSetInstancesHandler(w http.ResponseWriter, r *h } return } - id, err := strconv.ParseUint(scalesetID, 10, 64) + id, err := strconv.ParseUint(scalesetID, 10, 32) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") handleError(ctx, w, gErrors.ErrBadRequest) diff --git a/apiserver/controllers/scalesets.go b/apiserver/controllers/scalesets.go index d12928f0..1d26221b 100644 --- a/apiserver/controllers/scalesets.go +++ b/apiserver/controllers/scalesets.go @@ -79,7 +79,7 @@ func (a *APIController) GetScaleSetByIDHandler(w http.ResponseWriter, r *http.Re } return } - id, err := strconv.ParseUint(scaleSetID, 10, 64) + id, err := strconv.ParseUint(scaleSetID, 10, 32) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") handleError(ctx, w, gErrors.ErrBadRequest) @@ -130,7 +130,7 @@ func (a *APIController) DeleteScaleSetByIDHandler(w http.ResponseWriter, r *http return } - id, err := strconv.ParseUint(scalesetID, 10, 64) + id, err := strconv.ParseUint(scalesetID, 10, 32) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") handleError(ctx, w, gErrors.ErrBadRequest) @@ -183,7 +183,7 @@ func (a *APIController) UpdateScaleSetByIDHandler(w http.ResponseWriter, r *http return } - id, err := strconv.ParseUint(scalesetID, 10, 64) + id, err := strconv.ParseUint(scalesetID, 10, 32) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") handleError(ctx, w, gErrors.ErrBadRequest) diff --git a/database/sql/models.go b/database/sql/models.go index c1b6462d..3b1dcc9b 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -86,6 +86,17 @@ type Pool struct { Priority uint `gorm:"index:idx_pool_priority"` } +type ScaleSetEvent struct { + gorm.Model + + EventType params.EventType + EventLevel params.EventLevel + Message string `gorm:"type:text"` + + ScaleSetID uint `gorm:"index:idx_scale_set_event"` + ScaleSet ScaleSet `gorm:"foreignKey:ScaleSetID"` +} + // ScaleSet represents a github scale set. Scale sets are almost identical to pools with a few // notable exceptions: // - Labels are no longer relevant @@ -135,7 +146,11 @@ type ScaleSet struct { EnterpriseID *uuid.UUID `gorm:"index"` Enterprise Enterprise `gorm:"foreignKey:EnterpriseID"` - Instances []Instance `gorm:"foreignKey:ScaleSetFkID"` + Status string + StatusReason string `gorm:"type:text"` + + Instances []Instance `gorm:"foreignKey:ScaleSetFkID"` + Events []ScaleSetEvent `gorm:"foreignKey:ScaleSetID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type RepositoryEvent struct { diff --git a/database/sql/sql.go b/database/sql/sql.go index a704d9c3..878224c6 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -432,6 +432,7 @@ func (s *sqlDatabase) migrateDB() error { &ControllerInfo{}, &WorkflowJob{}, &ScaleSet{}, + &ScaleSetEvent{}, ); err != nil { return errors.Wrap(err, "running auto migrate") } diff --git a/database/sql/util.go b/database/sql/util.go index 112d0a76..5bd8de01 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -634,6 +634,40 @@ func (s *sqlDatabase) GetGithubEntity(_ context.Context, entityType params.Githu return entity, nil } +func (s *sqlDatabase) AddScaleSetEvent(ctx context.Context, scaleSetID uint, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { + scaleSet, err := s.GetScaleSetByID(ctx, scaleSetID) + if err != nil { + return errors.Wrap(err, "updating instance") + } + + msg := InstanceStatusUpdate{ + Message: statusMessage, + EventType: event, + EventLevel: eventLevel, + } + + if err := s.conn.Model(&scaleSet).Association("Events").Append(&msg); err != nil { + return errors.Wrap(err, "adding status message") + } + + if maxEvents > 0 { + var latestEvents []ScaleSetEvent + q := s.conn.Model(&ScaleSetEvent{}). + Limit(maxEvents).Order("id desc"). + Where("scale_set_id = ?", scaleSetID).Find(&latestEvents) + if q.Error != nil { + return errors.Wrap(q.Error, "fetching latest events") + } + if len(latestEvents) == maxEvents { + lastInList := latestEvents[len(latestEvents)-1] + if err := s.conn.Where("scale_set_id = ? and id < ?", scaleSetID, lastInList.ID).Unscoped().Delete(&ScaleSetEvent{}).Error; err != nil { + return errors.Wrap(err, "deleting old events") + } + } + } + return nil +} + func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { repo, err := s.GetRepositoryByID(ctx, repoID) if err != nil { diff --git a/workers/provider/provider.go b/workers/provider/provider.go index 7f0784e9..969a373d 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -3,10 +3,12 @@ package provider import ( "context" "fmt" + "log/slog" "sync" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" ) @@ -32,12 +34,51 @@ type provider struct { store dbCommon.Store providers map[string]common.Provider + // A cache of all scale sets kept updated by the watcher. + // This helps us avoid a bunch of queries to the database. + scaleSets map[uint]params.ScaleSet + runners map[string]params.Instance mux sync.Mutex running bool quit chan struct{} } +func (p *provider) loadAllScaleSets() error { + p.mux.Lock() + defer p.mux.Unlock() + + scaleSets, err := p.store.ListAllScaleSets(p.ctx) + if err != nil { + return fmt.Errorf("fetching scale sets: %w", err) + } + + for _, scaleSet := range scaleSets { + p.scaleSets[scaleSet.ID] = scaleSet + } + + return nil +} + +// loadAllRunners loads all runners from the database. At this stage we only +// care about runners created by scale sets, but in the future, we will migrate +// the pool manager to the same model. +func (p *provider) loadAllRunners() error { + p.mux.Lock() + defer p.mux.Unlock() + + runners, err := p.store.ListAllInstances(p.ctx) + if err != nil { + return fmt.Errorf("fetching runners: %w", err) + } + + for _, runner := range runners { + p.runners[runner.Name] = runner + } + + return nil +} + func (p *provider) Start() error { p.mux.Lock() defer p.mux.Unlock() @@ -46,6 +87,14 @@ func (p *provider) Start() error { return nil } + if err := p.loadAllScaleSets(); err != nil { + return fmt.Errorf("loading all scale sets: %w", err) + } + + if err := p.loadAllRunners(); err != nil { + return fmt.Errorf("loading all runners: %w", err) + } + consumer, err := watcher.RegisterConsumer( p.ctx, p.consumerID, composeProviderWatcher()) if err != nil { @@ -55,6 +104,8 @@ func (p *provider) Start() error { p.quit = make(chan struct{}) p.running = true + go p.loop() + return nil } @@ -71,3 +122,75 @@ func (p *provider) Stop() error { p.running = false return nil } + +func (p *provider) loop() { + defer p.Stop() + for { + select { + case payload := <-p.consumer.Watch(): + slog.InfoContext(p.ctx, "received payload", slog.Any("payload", payload)) + go p.handleWatcherEvent(payload) + case <-p.ctx.Done(): + return + case <-p.quit: + return + } + } +} + +func (p *provider) handleWatcherEvent(payload dbCommon.ChangePayload) { + switch payload.EntityType { + case dbCommon.ScaleSetEntityType: + p.handleScaleSetEvent(payload) + case dbCommon.InstanceEntityType: + p.handleInstanceEvent(payload) + default: + slog.ErrorContext(p.ctx, "invalid entity type", "entity_type", payload.EntityType) + } +} + +func (p *provider) handleScaleSetEvent(event dbCommon.ChangePayload) { + p.mux.Lock() + defer p.mux.Unlock() + + scaleSet, ok := event.Payload.(params.ScaleSet) + if !ok { + slog.ErrorContext(p.ctx, "invalid payload type", "payload_type", fmt.Sprintf("%T", event.Payload)) + return + } + + switch event.Operation { + case dbCommon.CreateOperation, dbCommon.UpdateOperation: + slog.DebugContext(p.ctx, "got create/update operation") + p.scaleSets[scaleSet.ID] = scaleSet + case dbCommon.DeleteOperation: + slog.DebugContext(p.ctx, "got delete operation") + delete(p.scaleSets, scaleSet.ID) + default: + slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) + return + } +} + +func (p *provider) handleInstanceEvent(event dbCommon.ChangePayload) { + p.mux.Lock() + defer p.mux.Unlock() + + instance, ok := event.Payload.(params.Instance) + if !ok { + slog.ErrorContext(p.ctx, "invalid payload type", "payload_type", fmt.Sprintf("%T", event.Payload)) + return + } + + switch event.Operation { + case dbCommon.CreateOperation, dbCommon.UpdateOperation: + slog.DebugContext(p.ctx, "got create/update operation") + p.runners[instance.Name] = instance + case dbCommon.DeleteOperation: + slog.DebugContext(p.ctx, "got delete operation") + delete(p.runners, instance.Name) + default: + slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) + return + } +} diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 24df1cbb..012a41d1 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -99,7 +99,96 @@ func (w *Worker) Start() (err error) { } for _, instance := range instances { + if instance.Status == commonParams.InstanceCreating { + // We're just starting up. We found an instance stuck in creating. + // When a provider creates an instance, it sets the db instance to + // creating and then issues an API call to the IaaS to create the + // instance using some userdata it needs to come up. But the instance + // will still need to call back home to fetch aditional metadata and + // complete its setup. We should remove the instance as it is not + // possible to reliably determine the state of the instance (if it's in + // mid boot before it reached the phase where it runs the metadtata, or + // if it already failed). + instanceState := commonParams.InstancePendingDelete + locking.Lock(instance.Name) + if instance.AgentID != 0 { + if err := w.scaleSetCli.RemoveRunner(w.ctx, instance.AgentID); err != nil { + // scale sets use JIT runners. This means that we create the runner in github + // before we create the actual instance that will use the credentials. We need + // to remove the runner from github if it exists. + if !errors.Is(err, runnerErrors.ErrNotFound) { + if errors.Is(err, runnerErrors.ErrUnauthorized) { + // we don't have access to remove the runner. This implies that our + // credentials may have expired. + // + // TODO: we need to set the scale set as inactive and stop the listener (if any). + slog.ErrorContext(w.ctx, "error removing runner", "runner_name", instance.Name, "error", err) + w.runners[instance.ID] = instance + locking.Unlock(instance.Name, false) + continue + } + // The runner may have come up, registered and is currently running a + // job, in which case, github will not allow us to remove it. + runnerInstance, err := w.scaleSetCli.GetRunner(w.ctx, instance.AgentID) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + // We could not get info about the runner and it wasn't not found + slog.ErrorContext(w.ctx, "error getting runner details", "error", err) + w.runners[instance.ID] = instance + locking.Unlock(instance.Name, false) + continue + } + } + if runnerInstance.Status == string(params.RunnerIdle) || + runnerInstance.Status == string(params.RunnerActive) { + // This is a highly unlikely scenario, but let's account for it anyway. + // + // The runner is running a job or is idle. Mark it as running, as + // it appears that it finished booting and is now running. + // + // NOTE: if the instance was in creating and it managed to boot, there + // is a high chance that the we do not have a provider ID for the runner + // inside our database. When removing the runner, the provider will attempt + // to use the instance name instead of the provider ID, the same as when + // creation of the instance fails and we try to clean up any lingering resources + // in the provider. + slog.DebugContext(w.ctx, "runner is running a job or is idle; not removing", "runner_name", instance.Name) + instanceState = commonParams.InstanceRunning + } + } + } + } + runnerUpdateParams := params.UpdateInstanceParams{ + Status: instanceState, + } + instance, err = w.store.UpdateInstance(w.ctx, instance.Name, runnerUpdateParams) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + locking.Unlock(instance.Name, false) + return fmt.Errorf("updating runner %s: %w", instance.Name, err) + } + } + locking.Unlock(instance.Name, false) + } else if instance.Status == commonParams.InstanceDeleting { + // Set the instance in deleting. It is assumed that the runner was already + // removed from github either by github or by garm. Deleting status indicates + // that it was already being handled by the provider. There should be no entry on + // github for the runner if that was the case. + // Setting it in pending_delete will cause the provider to try again, an operation + // which is idempotent (if it's already deleted, the provider reports success). + runnerUpdateParams := params.UpdateInstanceParams{ + Status: commonParams.InstancePendingDelete, + } + instance, err = w.store.UpdateInstance(w.ctx, instance.Name, runnerUpdateParams) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + locking.Unlock(instance.Name, false) + return fmt.Errorf("updating runner %s: %w", instance.Name, err) + } + } + } w.runners[instance.ID] = instance + locking.Unlock(instance.Name, false) } consumer, err := watcher.RegisterConsumer( @@ -212,11 +301,43 @@ func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { return } switch event.Operation { - case dbCommon.UpdateOperation, dbCommon.CreateOperation: - slog.DebugContext(w.ctx, "got update operation") + case dbCommon.CreateOperation: + slog.DebugContext(w.ctx, "got create operation") w.mux.Lock() w.runners[instance.ID] = instance w.mux.Unlock() + case dbCommon.UpdateOperation: + slog.DebugContext(w.ctx, "got update operation") + w.mux.Lock() + oldInstance, ok := w.runners[instance.ID] + w.runners[instance.ID] = instance + + if !ok { + slog.DebugContext(w.ctx, "instance not found in local cache; ignoring", "instance_id", instance.ID) + w.mux.Unlock() + return + } + if oldInstance.RunnerStatus != instance.RunnerStatus && instance.RunnerStatus == params.RunnerIdle { + serviceRuner, err := w.scaleSetCli.GetRunner(w.ctx, instance.AgentID) + if err != nil { + slog.ErrorContext(w.ctx, "error getting runner details", "error", err) + w.mux.Unlock() + return + } + status, ok := serviceRuner.Status.(string) + if !ok { + slog.ErrorContext(w.ctx, "error getting runner status", "runner_id", instance.AgentID) + w.mux.Unlock() + return + } + if status != string(params.RunnerIdle) && status != string(params.RunnerActive) { + // TODO: Wait for the status to change for a while (30 seconds?). Mark the instance as + // pending_delete if the runner never comes online. + w.mux.Unlock() + return + } + } + w.mux.Unlock() case dbCommon.DeleteOperation: slog.DebugContext(w.ctx, "got delete operation") w.mux.Lock() From 004ad1f12446252a4ed5bf47e997dd0bddd0e272 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 24 Apr 2025 23:29:40 +0000 Subject: [PATCH 024/179] Add provider worker code Runners now get created and cleaned up in scale sets. Signed-off-by: Gabriel Adrian Samfira --- auth/instance_middleware.go | 4 +- cmd/garm/main.go | 22 ++ database/sql/instances.go | 12 +- database/sql/models.go | 17 +- database/sql/scalesets.go | 42 ++- database/sql/sql.go | 1 - database/sql/util.go | 34 --- locking/interface.go | 4 +- locking/local_locker.go | 40 ++- locking/locking.go | 20 +- runner/metadata.go | 48 ++- runner/pool/pool.go | 16 +- util/github/scalesets/util.go | 3 - workers/entity/controller.go | 2 +- workers/entity/worker.go | 2 +- workers/provider/errors.go | 7 + workers/provider/instance_manager.go | 422 ++++++++++++++++++++++++++ workers/provider/provider.go | 117 +++++-- workers/provider/provider_helper.go | 81 +++++ workers/scaleset/controller.go | 15 +- workers/scaleset/scaleset.go | 61 +++- workers/scaleset/scaleset_helper.go | 13 +- workers/scaleset/scaleset_listener.go | 1 + 23 files changed, 837 insertions(+), 147 deletions(-) create mode 100644 workers/provider/errors.go create mode 100644 workers/provider/instance_manager.go create mode 100644 workers/provider/provider_helper.go diff --git a/auth/instance_middleware.go b/auth/instance_middleware.go index b7194d5c..dbd3cfb7 100644 --- a/auth/instance_middleware.go +++ b/auth/instance_middleware.go @@ -60,7 +60,7 @@ type instanceToken struct { jwtSecret string } -func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity string, poolType params.GithubEntityType, ttlMinutes uint) (string, error) { +func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity string, entityType params.GithubEntityType, ttlMinutes uint) (string, error) { // Token expiration is equal to the bootstrap timeout set on the pool plus the polling // interval garm uses to check for timed out runners. Runners that have not sent their info // by the end of this interval are most likely failed and will be reaped by garm anyway. @@ -82,7 +82,7 @@ func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity str ID: instance.ID, Name: instance.Name, PoolID: instance.PoolID, - Scope: poolType, + Scope: entityType, Entity: entity, CreateAttempt: instance.CreateAttempt, } diff --git a/cmd/garm/main.go b/cmd/garm/main.go index 5879fd0a..d117dc6a 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -25,6 +25,7 @@ import ( "net/http" "os" "os/signal" + "runtime" "syscall" "time" @@ -51,6 +52,7 @@ import ( "github.com/cloudbase/garm/util/appdefaults" "github.com/cloudbase/garm/websocket" "github.com/cloudbase/garm/workers/entity" + "github.com/cloudbase/garm/workers/provider" ) var ( @@ -247,6 +249,19 @@ func main() { log.Fatalf("failed to start entity controller: %+v", err) } + instanceTokenGetter, err := auth.NewInstanceTokenGetter(cfg.JWTAuth.Secret) + if err != nil { + log.Fatalf("failed to create instance token getter: %+v", err) + } + + providerWorker, err := provider.NewWorker(ctx, db, providers, instanceTokenGetter) + if err != nil { + log.Fatalf("failed to create provider worker: %+v", err) + } + if err := providerWorker.Start(); err != nil { + log.Fatalf("failed to start provider worker: %+v", err) + } + runner, err := runner.NewRunner(ctx, *cfg, db) if err != nil { log.Fatalf("failed to create controller: %+v", err) @@ -305,6 +320,8 @@ func main() { } if cfg.Default.DebugServer { + runtime.SetBlockProfileRate(1) + runtime.SetMutexProfileFraction(1) slog.InfoContext(ctx, "setting up debug routes") router = routers.WithDebugServer(router) } @@ -348,6 +365,11 @@ func main() { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop entity controller") } + slog.InfoContext(ctx, "shutting down provider worker") + if err := providerWorker.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop provider worker") + } + shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 60*time.Second) defer shutdownCancel() if err := srv.Shutdown(shutdownCtx); err != nil { diff --git a/database/sql/instances.go b/database/sql/instances.go index cf0020b5..604682e9 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -189,13 +189,19 @@ func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName str if instance.ProviderID != nil { providerID = *instance.ProviderID } - if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, params.Instance{ + payload := params.Instance{ ID: instance.ID.String(), Name: instance.Name, ProviderID: providerID, AgentID: instance.AgentID, - PoolID: instance.PoolID.String(), - }); notifyErr != nil { + } + if instance.PoolID != nil { + payload.PoolID = instance.PoolID.String() + } + if instance.ScaleSetFkID != nil { + payload.ScaleSetID = *instance.ScaleSetFkID + } + if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, payload); notifyErr != nil { slog.With(slog.Any("error", notifyErr)).Error("failed to send notify") } } diff --git a/database/sql/models.go b/database/sql/models.go index 3b1dcc9b..c1b6462d 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -86,17 +86,6 @@ type Pool struct { Priority uint `gorm:"index:idx_pool_priority"` } -type ScaleSetEvent struct { - gorm.Model - - EventType params.EventType - EventLevel params.EventLevel - Message string `gorm:"type:text"` - - ScaleSetID uint `gorm:"index:idx_scale_set_event"` - ScaleSet ScaleSet `gorm:"foreignKey:ScaleSetID"` -} - // ScaleSet represents a github scale set. Scale sets are almost identical to pools with a few // notable exceptions: // - Labels are no longer relevant @@ -146,11 +135,7 @@ type ScaleSet struct { EnterpriseID *uuid.UUID `gorm:"index"` Enterprise Enterprise `gorm:"foreignKey:EnterpriseID"` - Status string - StatusReason string `gorm:"type:text"` - - Instances []Instance `gorm:"foreignKey:ScaleSetFkID"` - Events []ScaleSetEvent `gorm:"foreignKey:ScaleSetID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` + Instances []Instance `gorm:"foreignKey:ScaleSetFkID"` } type RepositoryEvent struct { diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 3adc423c..f168813b 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -380,10 +380,25 @@ func (s *sqlDatabase) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) ( return nil } -func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error { +func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) (err error) { + var scaleSet params.ScaleSet + defer func() { + if err == nil && scaleSet.ID != 0 { + s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, scaleSet) + } + }() if err := s.conn.Transaction(func(tx *gorm.DB) error { - if q := tx.Model(&ScaleSet{}).Where("id = ?", scaleSetID).Update("last_message_id", lastMessageID); q.Error != nil { - return errors.Wrap(q.Error, "saving database entry") + dbSet, err := s.getScaleSetByID(tx, scaleSetID) + if err != nil { + return errors.Wrap(err, "fetching scale set") + } + dbSet.LastMessageID = lastMessageID + if err := tx.Save(&dbSet).Error; err != nil { + return errors.Wrap(err, "saving database entry") + } + scaleSet, err = s.sqlToCommonScaleSet(dbSet) + if err != nil { + return errors.Wrap(err, "converting scale set") } return nil }); err != nil { @@ -392,10 +407,25 @@ func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID u return nil } -func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) error { +func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) (err error) { + var scaleSet params.ScaleSet + defer func() { + if err == nil && scaleSet.ID != 0 { + s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, scaleSet) + } + }() if err := s.conn.Transaction(func(tx *gorm.DB) error { - if q := tx.Model(&ScaleSet{}).Where("id = ?", scaleSetID).Update("desired_runner_count", desiredRunnerCount); q.Error != nil { - return errors.Wrap(q.Error, "saving database entry") + dbSet, err := s.getScaleSetByID(tx, scaleSetID) + if err != nil { + return errors.Wrap(err, "fetching scale set") + } + dbSet.DesiredRunnerCount = desiredRunnerCount + if err := tx.Save(&dbSet).Error; err != nil { + return errors.Wrap(err, "saving database entry") + } + scaleSet, err = s.sqlToCommonScaleSet(dbSet) + if err != nil { + return errors.Wrap(err, "converting scale set") } return nil }); err != nil { diff --git a/database/sql/sql.go b/database/sql/sql.go index 878224c6..a704d9c3 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -432,7 +432,6 @@ func (s *sqlDatabase) migrateDB() error { &ControllerInfo{}, &WorkflowJob{}, &ScaleSet{}, - &ScaleSetEvent{}, ); err != nil { return errors.Wrap(err, "running auto migrate") } diff --git a/database/sql/util.go b/database/sql/util.go index 5bd8de01..112d0a76 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -634,40 +634,6 @@ func (s *sqlDatabase) GetGithubEntity(_ context.Context, entityType params.Githu return entity, nil } -func (s *sqlDatabase) AddScaleSetEvent(ctx context.Context, scaleSetID uint, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - scaleSet, err := s.GetScaleSetByID(ctx, scaleSetID) - if err != nil { - return errors.Wrap(err, "updating instance") - } - - msg := InstanceStatusUpdate{ - Message: statusMessage, - EventType: event, - EventLevel: eventLevel, - } - - if err := s.conn.Model(&scaleSet).Association("Events").Append(&msg); err != nil { - return errors.Wrap(err, "adding status message") - } - - if maxEvents > 0 { - var latestEvents []ScaleSetEvent - q := s.conn.Model(&ScaleSetEvent{}). - Limit(maxEvents).Order("id desc"). - Where("scale_set_id = ?", scaleSetID).Find(&latestEvents) - if q.Error != nil { - return errors.Wrap(q.Error, "fetching latest events") - } - if len(latestEvents) == maxEvents { - lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("scale_set_id = ? and id < ?", scaleSetID, lastInList.ID).Unscoped().Delete(&ScaleSetEvent{}).Error; err != nil { - return errors.Wrap(err, "deleting old events") - } - } - } - return nil -} - func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { repo, err := s.GetRepositoryByID(ctx, repoID) if err != nil { diff --git a/locking/interface.go b/locking/interface.go index 07380a7b..d6a0b62d 100644 --- a/locking/interface.go +++ b/locking/interface.go @@ -4,8 +4,8 @@ import "time" // TODO(gabriel-samfira): needs owner attribute. type Locker interface { - TryLock(key string) bool - Lock(key string) + TryLock(key, identifier string) bool + Lock(key, identifier string) Unlock(key string, remove bool) Delete(key string) } diff --git a/locking/local_locker.go b/locking/local_locker.go index ad41345c..270138ef 100644 --- a/locking/local_locker.go +++ b/locking/local_locker.go @@ -2,6 +2,9 @@ package locking import ( "context" + "fmt" + "log/slog" + "runtime" "sync" "time" @@ -21,18 +24,29 @@ type keyMutex struct { muxes sync.Map } -var _ Locker = &keyMutex{} - -func (k *keyMutex) TryLock(key string) bool { - mux, _ := k.muxes.LoadOrStore(key, &sync.Mutex{}) - keyMux := mux.(*sync.Mutex) - return keyMux.TryLock() +type lockWithIdent struct { + mux sync.Mutex + ident string } -func (k *keyMutex) Lock(key string) { - mux, _ := k.muxes.LoadOrStore(key, &sync.Mutex{}) - keyMux := mux.(*sync.Mutex) - keyMux.Lock() +var _ Locker = &keyMutex{} + +func (k *keyMutex) TryLock(key, identifier string) bool { + mux, _ := k.muxes.LoadOrStore(key, &lockWithIdent{ + mux: sync.Mutex{}, + ident: identifier, + }) + keyMux := mux.(*lockWithIdent) + return keyMux.mux.TryLock() +} + +func (k *keyMutex) Lock(key, identifier string) { + mux, _ := k.muxes.LoadOrStore(key, &lockWithIdent{ + mux: sync.Mutex{}, + ident: identifier, + }) + keyMux := mux.(*lockWithIdent) + keyMux.mux.Lock() } func (k *keyMutex) Unlock(key string, remove bool) { @@ -40,11 +54,13 @@ func (k *keyMutex) Unlock(key string, remove bool) { if !ok { return } - keyMux := mux.(*sync.Mutex) + keyMux := mux.(*lockWithIdent) if remove { k.Delete(key) } - keyMux.Unlock() + _, filename, line, _ := runtime.Caller(1) + slog.Debug("unlocking", "key", key, "identifier", keyMux.ident, "caller", fmt.Sprintf("%s:%d", filename, line)) + keyMux.mux.Unlock() } func (k *keyMutex) Delete(key string) { diff --git a/locking/locking.go b/locking/locking.go index 6628d8b1..c7d99b1d 100644 --- a/locking/locking.go +++ b/locking/locking.go @@ -2,29 +2,41 @@ package locking import ( "fmt" + "log/slog" + "runtime" "sync" ) var locker Locker var lockerMux = sync.Mutex{} -func TryLock(key string) (bool, error) { +func TryLock(key, identifier string) (ok bool, err error) { + _, filename, line, _ := runtime.Caller(1) + slog.Debug("attempting to try lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) + defer slog.Debug("try lock returned", "key", key, "identifier", identifier, "locked", ok, "caller", fmt.Sprintf("%s:%d", filename, line)) if locker == nil { return false, fmt.Errorf("no locker is registered") } - return locker.TryLock(key), nil + ok = locker.TryLock(key, identifier) + return ok, nil } -func Lock(key string) { +func Lock(key, identifier string) { + _, filename, line, _ := runtime.Caller(1) + slog.Debug("attempting to lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) + defer slog.Debug("lock acquired", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) + if locker == nil { panic("no locker is registered") } - locker.Lock(key) + locker.Lock(key, identifier) } func Unlock(key string, remove bool) error { + _, filename, line, _ := runtime.Caller(1) + slog.Debug("attempting to unlock", "key", key, "remove", remove, "caller", fmt.Sprintf("%s:%d", filename, line)) if locker == nil { return fmt.Errorf("no locker is registered") } diff --git a/runner/metadata.go b/runner/metadata.go index 6b19c0d5..0be41fc7 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -7,7 +7,6 @@ import ( "fmt" "html/template" "log/slog" - "strings" "github.com/pkg/errors" @@ -57,24 +56,51 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { ctx, "failed to get instance params") return "", runnerErrors.ErrUnauthorized } + var entity params.GithubEntity - pool, err := r.store.GetPoolByID(r.ctx, instance.PoolID) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get pool", - "pool_id", instance.PoolID) - return "", errors.Wrap(err, "fetching pool") + if instance.PoolID != "" { + pool, err := r.store.GetPoolByID(r.ctx, instance.PoolID) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get pool", + "pool_id", instance.PoolID) + return "", errors.Wrap(err, "fetching pool") + } + entity, err = pool.GithubEntity() + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get pool entity", + "pool_id", instance.PoolID) + return "", errors.Wrap(err, "fetching pool entity") + } + } else if instance.ScaleSetID != 0 { + scaleSet, err := r.store.GetScaleSetByID(r.ctx, instance.ScaleSetID) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get scale set", + "scale_set_id", instance.ScaleSetID) + return "", errors.Wrap(err, "fetching scale set") + } + entity, err = scaleSet.GithubEntity() + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get scale set entity", + "scale_set_id", instance.ScaleSetID) + return "", errors.Wrap(err, "fetching scale set entity") + } + } else { + return "", errors.New("instance not associated with a pool or scale set") } tpl := "actions.runner.%s.%s" var serviceName string - switch pool.PoolType() { + switch entity.EntityType { case params.GithubEntityTypeEnterprise: - serviceName = fmt.Sprintf(tpl, pool.EnterpriseName, instance.Name) + serviceName = fmt.Sprintf(tpl, entity.Owner, instance.Name) case params.GithubEntityTypeOrganization: - serviceName = fmt.Sprintf(tpl, pool.OrgName, instance.Name) + serviceName = fmt.Sprintf(tpl, entity.Owner, instance.Name) case params.GithubEntityTypeRepository: - serviceName = fmt.Sprintf(tpl, strings.ReplaceAll(pool.RepoName, "/", "-"), instance.Name) + serviceName = fmt.Sprintf(tpl, fmt.Sprintf("%s-%s", entity.Owner, entity.Name), instance.Name) } return serviceName, nil } diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 3ec72dad..88be9e97 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -100,6 +100,7 @@ func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, insta repo := &basePoolManager{ ctx: ctx, + consumerID: consumerID, entity: entity, ghcli: ghc, controllerInfo: controllerInfo, @@ -117,6 +118,7 @@ func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, insta type basePoolManager struct { ctx context.Context + consumerID string entity params.GithubEntity ghcli common.GithubClient controllerInfo params.ControllerInfo @@ -420,7 +422,7 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne continue } - lockAcquired, err := locking.TryLock(instance.Name) + lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", @@ -499,7 +501,7 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { slog.DebugContext( r.ctx, "attempting to lock instance", "runner_name", instance.Name) - lockAcquired, err := locking.TryLock(instance.Name) + lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", @@ -626,7 +628,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) poolInstanceCache[dbInstance.PoolID] = poolInstances } - lockAcquired, err := locking.TryLock(dbInstance.Name) + lockAcquired, err := locking.TryLock(dbInstance.Name, r.consumerID) if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", @@ -1064,7 +1066,7 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool for _, instanceToDelete := range idleWorkers[:numScaleDown] { instanceToDelete := instanceToDelete - lockAcquired, err := locking.TryLock(instanceToDelete.Name) + lockAcquired, err := locking.TryLock(instanceToDelete.Name, r.consumerID) if !lockAcquired || err != nil { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to acquire lock for instance", @@ -1217,7 +1219,7 @@ func (r *basePoolManager) retryFailedInstancesForOnePool(ctx context.Context, po slog.DebugContext( ctx, "attempting to retry failed instance", "runner_name", instance.Name) - lockAcquired, err := locking.TryLock(instance.Name) + lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) if !lockAcquired || err != nil { slog.DebugContext( ctx, "failed to acquire lock for instance", @@ -1401,7 +1403,7 @@ func (r *basePoolManager) deletePendingInstances() error { r.ctx, "removing instance from pool", "runner_name", instance.Name, "pool_id", instance.PoolID) - lockAcquired, err := locking.TryLock(instance.Name) + lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) if !lockAcquired || err != nil { slog.InfoContext( r.ctx, "failed to acquire lock for instance", @@ -1513,7 +1515,7 @@ func (r *basePoolManager) addPendingInstances() error { r.ctx, "attempting to acquire lock for instance", "runner_name", instance.Name, "action", "create_pending") - lockAcquired, err := locking.TryLock(instance.Name) + lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) if !lockAcquired || err != nil { slog.DebugContext( r.ctx, "failed to acquire lock for instance", diff --git a/util/github/scalesets/util.go b/util/github/scalesets/util.go index 66171dd6..15c3a5cf 100644 --- a/util/github/scalesets/util.go +++ b/util/github/scalesets/util.go @@ -18,7 +18,6 @@ import ( "context" "fmt" "io" - "log/slog" "net/http" ) @@ -51,7 +50,5 @@ func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, path str req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", s.actionsServiceInfo.Token)) - slog.DebugContext(ctx, "newActionsRequest", "method", method, "url", uri.String(), "body", body, "headers", req.Header) - return req, nil } diff --git a/workers/entity/controller.go b/workers/entity/controller.go index bfdcabfe..424f9099 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -183,7 +183,7 @@ func (c *Controller) loop() { for { select { case payload := <-c.consumer.Watch(): - slog.InfoContext(c.ctx, "received payload", slog.Any("payload", payload)) + slog.InfoContext(c.ctx, "received payload") go c.handleWatcherEvent(payload) case <-c.ctx.Done(): return diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 49fb75cb..070a9711 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -113,7 +113,7 @@ func (w *Worker) loop() { for { select { case payload := <-w.consumer.Watch(): - slog.InfoContext(w.ctx, "received payload", slog.Any("payload", payload)) + slog.InfoContext(w.ctx, "received payload") go w.handleWorkerWatcherEvent(payload) case <-w.ctx.Done(): return diff --git a/workers/provider/errors.go b/workers/provider/errors.go new file mode 100644 index 00000000..d46a721b --- /dev/null +++ b/workers/provider/errors.go @@ -0,0 +1,7 @@ +package provider + +import "fmt" + +var ( + ErrInstanceDeleted = fmt.Errorf("instance deleted") +) diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go new file mode 100644 index 00000000..c20c75ae --- /dev/null +++ b/workers/provider/instance_manager.go @@ -0,0 +1,422 @@ +package provider + +import ( + "context" + "errors" + "fmt" + "log/slog" + "sync" + "time" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + commonParams "github.com/cloudbase/garm-provider-common/params" + + "github.com/cloudbase/garm/cache" + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/runner/common" + garmUtil "github.com/cloudbase/garm/util" +) + +func NewInstanceManager(ctx context.Context, instance params.Instance, scaleSet params.ScaleSet, provider common.Provider, helper providerHelper) (*instanceManager, error) { + ctx = garmUtil.WithSlogContext(ctx, slog.Any("instance", instance.Name)) + + githubEntity, err := scaleSet.GithubEntity() + if err != nil { + return nil, fmt.Errorf("getting github entity: %w", err) + } + return &instanceManager{ + ctx: ctx, + instance: instance, + provider: provider, + deleteBackoff: time.Second * 0, + scaleSet: scaleSet, + helper: helper, + scaleSetEntity: githubEntity, + }, nil +} + +// instanceManager handles the lifecycle of a single instance. +// When an instance is created, a new instance manager is created +// for it. When the instance is placed in pending_create, the manager +// will attempt to create a new compute resource in the designated +// provider. Finally, when an instance is marked as pending_delete, it is removed +// from the provider and on success the instance is marked as deleted. Failure to +// delete, will place the instance back in pending delete. The removal process is +// retried after a backoff period. Instances placed in force_pending_delete will +// ignore provider errors and exit. +type instanceManager struct { + ctx context.Context + + instance params.Instance + provider common.Provider + helper providerHelper + + scaleSet params.ScaleSet + scaleSetEntity params.GithubEntity + + deleteBackoff time.Duration + + updates chan dbCommon.ChangePayload + mux sync.Mutex + running bool + quit chan struct{} +} + +func (i *instanceManager) Start() error { + i.mux.Lock() + defer i.mux.Unlock() + + if i.running { + return nil + } + + // switch i.instance.Status { + // case commonParams.InstancePendingCreate, + // commonParams.InstancePendingDelete, + // commonParams.InstancePendingForceDelete: + // if err := i.consolidateState(); err != nil { + // return fmt.Errorf("consolidating state: %w", err) + // } + // case commonParams.InstanceDeleted: + // return ErrInstanceDeleted + // } + i.running = true + i.quit = make(chan struct{}) + i.updates = make(chan dbCommon.ChangePayload) + + go i.loop() + return nil +} + +func (i *instanceManager) Stop() error { + i.mux.Lock() + defer i.mux.Unlock() + + if !i.running { + return nil + } + + i.running = false + close(i.quit) + close(i.updates) + return nil +} + +func (i *instanceManager) sleepForBackOffOrCanceled() bool { + timer := time.NewTimer(i.deleteBackoff) + defer timer.Stop() + + select { + case <-timer.C: + return false + case <-i.quit: + return true + case <-i.ctx.Done(): + return true + } +} + +func (i *instanceManager) incrementBackOff() { + if i.deleteBackoff == 0 { + i.deleteBackoff = time.Second * 1 + } else { + i.deleteBackoff *= 2 + } + if i.deleteBackoff > time.Minute*5 { + i.deleteBackoff = time.Minute * 5 + } +} + +func (i *instanceManager) getEntity() (params.GithubEntity, error) { + entity, err := i.scaleSet.GithubEntity() + if err != nil { + return params.GithubEntity{}, fmt.Errorf("getting entity: %w", err) + } + ghEntity, err := i.helper.GetGithubEntity(entity) + if err != nil { + return params.GithubEntity{}, fmt.Errorf("getting entity: %w", err) + } + return ghEntity, nil +} + +func (i *instanceManager) pseudoPoolID() string { + // This is temporary. We need to extend providers to know about scale sets. + return fmt.Sprintf("%s-%s", i.scaleSet.Name, i.scaleSetEntity.ID) +} + +func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instance) error { + // TODO(gabriel-samfira): implement the creation of the instance in the provider. + entity, err := i.getEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + jwtValidity := instance.RunnerTimeout() + token, err := i.helper.InstanceTokenGetter().NewInstanceJWTToken( + instance, entity.String(), entity.EntityType, jwtValidity) + if err != nil { + return fmt.Errorf("creating instance token: %w", err) + } + tools, ok := cache.GetGithubToolsCache(entity) + if !ok { + return fmt.Errorf("tools not found in cache for entity %s", entity.String()) + } + + bootstrapArgs := commonParams.BootstrapInstance{ + Name: instance.Name, + Tools: tools, + RepoURL: entity.GithubURL(), + MetadataURL: instance.MetadataURL, + CallbackURL: instance.CallbackURL, + InstanceToken: token, + OSArch: i.scaleSet.OSArch, + OSType: i.scaleSet.OSType, + Flavor: i.scaleSet.Flavor, + Image: i.scaleSet.Image, + ExtraSpecs: i.scaleSet.ExtraSpecs, + // This is temporary. We need to extend providers to know about scale sets. + PoolID: i.pseudoPoolID(), + CACertBundle: entity.Credentials.CABundle, + GitHubRunnerGroup: i.scaleSet.GitHubRunnerGroup, + JitConfigEnabled: true, + } + + var instanceIDToDelete string + baseParams, err := i.getProviderBaseParams() + if err != nil { + return fmt.Errorf("getting provider base params: %w", err) + } + + defer func() { + if instanceIDToDelete != "" { + deleteInstanceParams := common.DeleteInstanceParams{ + DeleteInstanceV011: common.DeleteInstanceV011Params{ + ProviderBaseParams: baseParams, + }, + } + if err := i.provider.DeleteInstance(i.ctx, instanceIDToDelete, deleteInstanceParams); err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + slog.With(slog.Any("error", err)).ErrorContext( + i.ctx, "failed to cleanup instance", + "provider_id", instanceIDToDelete) + } + } + } + }() + + createInstanceParams := common.CreateInstanceParams{ + CreateInstanceV011: common.CreateInstanceV011Params{ + ProviderBaseParams: baseParams, + }, + } + + providerInstance, err := i.provider.CreateInstance(i.ctx, bootstrapArgs, createInstanceParams) + if err != nil { + instanceIDToDelete = instance.Name + return fmt.Errorf("creating instance in provider: %w", err) + } + + if providerInstance.Status == commonParams.InstanceError { + instanceIDToDelete = instance.ProviderID + if instanceIDToDelete == "" { + instanceIDToDelete = instance.Name + } + } + + updated, err := i.helper.updateArgsFromProviderInstance(instance.Name, providerInstance) + if err != nil { + return fmt.Errorf("updating instance args: %w", err) + } + i.instance = updated + + return nil +} + +func (i *instanceManager) getProviderBaseParams() (common.ProviderBaseParams, error) { + info, err := i.helper.GetControllerInfo() + if err != nil { + return common.ProviderBaseParams{}, fmt.Errorf("getting controller info: %w", err) + } + + return common.ProviderBaseParams{ + ControllerInfo: info, + }, nil +} + +func (i *instanceManager) handleDeleteInstanceInProvider(instance params.Instance) error { + slog.InfoContext(i.ctx, "deleting instance in provider", "runner_name", instance.Name) + identifier := instance.ProviderID + if identifier == "" { + // provider did not return a provider ID? + // try with name + identifier = instance.Name + } + + baseParams, err := i.getProviderBaseParams() + if err != nil { + return fmt.Errorf("getting provider base params: %w", err) + } + + slog.DebugContext( + i.ctx, "calling delete instance on provider", + "runner_name", instance.Name, + "provider_id", identifier) + + deleteInstanceParams := common.DeleteInstanceParams{ + DeleteInstanceV011: common.DeleteInstanceV011Params{ + ProviderBaseParams: baseParams, + }, + } + if err := i.provider.DeleteInstance(i.ctx, identifier, deleteInstanceParams); err != nil { + return fmt.Errorf("deleting instance in provider: %w", err) + } + return nil +} + +func (i *instanceManager) consolidateState() error { + i.mux.Lock() + defer i.mux.Unlock() + + switch i.instance.Status { + case commonParams.InstancePendingCreate: + // kick off the creation process + if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceCreating, nil); err != nil { + return fmt.Errorf("setting instance status to creating: %w", err) + } + if err := i.handleCreateInstanceInProvider(i.instance); err != nil { + slog.ErrorContext(i.ctx, "creating instance in provider", "error", err) + if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceError, []byte(err.Error())); err != nil { + return fmt.Errorf("setting instance status to error: %w", err) + } + } + case commonParams.InstanceRunning: + // Nothing to do. The provider finished creating the instance. + case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete: + // Remove or force remove the runner. When force remove is specified, we ignore + // IaaS errors. + if i.instance.Status == commonParams.InstancePendingDelete { + // invoke backoff sleep. We only do this for non forced removals, + // as force delete will always return, regardless of whether or not + // the remove operation succeeded in the provider. A user may decide + // to force delete a runner if GARM fails to remove it normally. + if canceled := i.sleepForBackOffOrCanceled(); canceled { + // the worker is shutting down. Return here. + return nil + } + } + + if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceDeleting, nil); err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + return nil + } + return fmt.Errorf("setting instance status to deleting: %w", err) + } + + if err := i.handleDeleteInstanceInProvider(i.instance); err != nil { + slog.ErrorContext(i.ctx, "deleting instance in provider", "error", err, "forced", i.instance.Status == commonParams.InstancePendingForceDelete) + if i.instance.Status == commonParams.InstancePendingDelete { + i.incrementBackOff() + if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstancePendingDelete, []byte(err.Error())); err != nil { + return fmt.Errorf("setting instance status to error: %w", err) + } + + return fmt.Errorf("error removing instance. Will retry: %w", err) + } + } + if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceDeleted, nil); err != nil { + return fmt.Errorf("setting instance status to deleted: %w", err) + } + case commonParams.InstanceError: + // Instance is in error state. We wait for next status or potentially retry + // spawning the instance with a backoff timer. + if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstancePendingDelete, nil); err != nil { + return fmt.Errorf("setting instance status to error: %w", err) + } + case commonParams.InstanceDeleted: + return ErrInstanceDeleted + } + return nil +} + +func (i *instanceManager) handleUpdate(update dbCommon.ChangePayload) error { + // We need a better way to handle instance state. Database updates may fail, and we + // end up with an inconsistent state between what we know about the instance and what + // is reflected in the database. + i.mux.Lock() + + if !i.running { + i.mux.Unlock() + return nil + } + + instance, ok := update.Payload.(params.Instance) + if !ok { + i.mux.Unlock() + return runnerErrors.NewBadRequestError("invalid payload type") + } + + i.instance = instance + if i.instance.Status == instance.Status { + // Nothing of interest happened. + i.mux.Unlock() + return nil + } + i.mux.Unlock() + return i.consolidateState() +} + +func (i *instanceManager) Update(instance dbCommon.ChangePayload) error { + i.mux.Lock() + defer i.mux.Unlock() + + if !i.running { + return runnerErrors.NewBadRequestError("instance manager is not running") + } + + timer := time.NewTimer(60 * time.Second) + defer timer.Stop() + + select { + case i.updates <- instance: + case <-i.quit: + return nil + case <-i.ctx.Done(): + return nil + case <-timer.C: + return fmt.Errorf("timeout while sending update to instance manager") + } + return nil +} + +func (i *instanceManager) loop() { + defer i.Stop() + ticker := time.NewTicker(5 * time.Second) + defer ticker.Stop() + for { + select { + case <-i.quit: + return + case <-i.ctx.Done(): + return + case <-ticker.C: + if err := i.consolidateState(); err != nil { + if errors.Is(err, ErrInstanceDeleted) { + // instance had been deleted, we can exit the loop. + return + } + slog.ErrorContext(i.ctx, "consolidating state", "error", err) + } + case update, ok := <-i.updates: + if !ok { + return + } + if err := i.handleUpdate(update); err != nil { + if errors.Is(err, ErrInstanceDeleted) { + // instance had been deleted, we can exit the loop. + return + } + slog.ErrorContext(i.ctx, "handling update", "error", err) + } + } + } +} diff --git a/workers/provider/provider.go b/workers/provider/provider.go index 969a373d..07f65b26 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -6,19 +6,25 @@ import ( "log/slog" "sync" + commonParams "github.com/cloudbase/garm-provider-common/params" + + "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" ) -func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider) (*provider, error) { +func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider, tokenGetter auth.InstanceTokenGetter) (*provider, error) { consumerID := "provider-worker" return &provider{ - ctx: context.Background(), - store: store, - consumerID: consumerID, - providers: providers, + ctx: context.Background(), + store: store, + consumerID: consumerID, + providers: providers, + tokenGetter: tokenGetter, + scaleSets: make(map[uint]params.ScaleSet), + runners: make(map[string]*instanceManager), }, nil } @@ -31,13 +37,14 @@ type provider struct { // We need to implement way to RPC from workers to controllers // and abstract that into something we can use to eventually // scale out. - store dbCommon.Store + store dbCommon.Store + tokenGetter auth.InstanceTokenGetter providers map[string]common.Provider // A cache of all scale sets kept updated by the watcher. // This helps us avoid a bunch of queries to the database. scaleSets map[uint]params.ScaleSet - runners map[string]params.Instance + runners map[string]*instanceManager mux sync.Mutex running bool @@ -45,9 +52,6 @@ type provider struct { } func (p *provider) loadAllScaleSets() error { - p.mux.Lock() - defer p.mux.Unlock() - scaleSets, err := p.store.ListAllScaleSets(p.ctx) if err != nil { return fmt.Errorf("fetching scale sets: %w", err) @@ -64,16 +68,46 @@ func (p *provider) loadAllScaleSets() error { // care about runners created by scale sets, but in the future, we will migrate // the pool manager to the same model. func (p *provider) loadAllRunners() error { - p.mux.Lock() - defer p.mux.Unlock() - runners, err := p.store.ListAllInstances(p.ctx) if err != nil { return fmt.Errorf("fetching runners: %w", err) } for _, runner := range runners { - p.runners[runner.Name] = runner + // Skip non scale set instances for now. This condition needs to be + // removed once we replace the current pool manager. + if runner.ScaleSetID == 0 { + continue + } + // Ignore runners in "creating" state. If we're just starting up and + // we find a runner in "creating" it was most likely interrupted while + // creating. It is unlikely that it is still usable. We allow the scale set + // worker to clean it up. It will eventually be marked as pending delete and + // this worker will get an update to clean up any resources left behing by + // an incomplete creation event. + if runner.Status == commonParams.InstanceCreating { + continue + } + scaleSet, ok := p.scaleSets[runner.ScaleSetID] + if !ok { + slog.ErrorContext(p.ctx, "scale set not found", "scale_set_id", runner.ScaleSetID) + continue + } + provider, ok := p.providers[scaleSet.ProviderName] + if !ok { + slog.ErrorContext(p.ctx, "provider not found", "provider_name", runner.ProviderName) + continue + } + instanceManager, err := NewInstanceManager( + p.ctx, runner, scaleSet, provider, p) + if err != nil { + return fmt.Errorf("creating instance manager: %w", err) + } + if err := instanceManager.Start(); err != nil { + return fmt.Errorf("starting instance manager: %w", err) + } + + p.runners[runner.Name] = instanceManager } return nil @@ -127,8 +161,12 @@ func (p *provider) loop() { defer p.Stop() for { select { - case payload := <-p.consumer.Watch(): - slog.InfoContext(p.ctx, "received payload", slog.Any("payload", payload)) + case payload, ok := <-p.consumer.Watch(): + if !ok { + slog.ErrorContext(p.ctx, "watcher channel closed") + return + } + slog.InfoContext(p.ctx, "received payload") go p.handleWatcherEvent(payload) case <-p.ctx.Done(): return @@ -172,6 +210,23 @@ func (p *provider) handleScaleSetEvent(event dbCommon.ChangePayload) { } } +func (p *provider) handleInstanceAdded(instance params.Instance) error { + scaleSet, ok := p.scaleSets[instance.ScaleSetID] + if !ok { + return fmt.Errorf("scale set not found for instance %s", instance.Name) + } + instanceManager, err := NewInstanceManager( + p.ctx, instance, scaleSet, p.providers[instance.ProviderName], p) + if err != nil { + return fmt.Errorf("creating instance manager: %w", err) + } + if err := instanceManager.Start(); err != nil { + return fmt.Errorf("starting instance manager: %w", err) + } + p.runners[instance.Name] = instanceManager + return nil +} + func (p *provider) handleInstanceEvent(event dbCommon.ChangePayload) { p.mux.Lock() defer p.mux.Unlock() @@ -183,11 +238,35 @@ func (p *provider) handleInstanceEvent(event dbCommon.ChangePayload) { } switch event.Operation { - case dbCommon.CreateOperation, dbCommon.UpdateOperation: - slog.DebugContext(p.ctx, "got create/update operation") - p.runners[instance.Name] = instance + case dbCommon.CreateOperation: + slog.DebugContext(p.ctx, "got create operation") + if err := p.handleInstanceAdded(instance); err != nil { + slog.ErrorContext(p.ctx, "failed to handle instance added", "error", err) + return + } + case dbCommon.UpdateOperation: + slog.DebugContext(p.ctx, "got update operation") + existingInstance, ok := p.runners[instance.Name] + if !ok { + if err := p.handleInstanceAdded(instance); err != nil { + slog.ErrorContext(p.ctx, "failed to handle instance added", "error", err) + return + } + } else { + if err := existingInstance.Update(event); err != nil { + slog.ErrorContext(p.ctx, "failed to update instance", "error", err) + return + } + } case dbCommon.DeleteOperation: slog.DebugContext(p.ctx, "got delete operation") + existingInstance, ok := p.runners[instance.Name] + if ok { + if err := existingInstance.Stop(); err != nil { + slog.ErrorContext(p.ctx, "failed to stop instance", "error", err) + return + } + } delete(p.runners, instance.Name) default: slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) diff --git a/workers/provider/provider_helper.go b/workers/provider/provider_helper.go new file mode 100644 index 00000000..d420cdad --- /dev/null +++ b/workers/provider/provider_helper.go @@ -0,0 +1,81 @@ +package provider + +import ( + "fmt" + + "github.com/cloudbase/garm-provider-common/errors" + commonParams "github.com/cloudbase/garm-provider-common/params" + "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/params" +) + +type providerHelper interface { + SetInstanceStatus(instanceName string, status commonParams.InstanceStatus, providerFault []byte) error + InstanceTokenGetter() auth.InstanceTokenGetter + updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) + GetControllerInfo() (params.ControllerInfo, error) + GetGithubEntity(entity params.GithubEntity) (params.GithubEntity, error) +} + +func (p *provider) updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) { + updateParams := params.UpdateInstanceParams{ + ProviderID: providerInstance.ProviderID, + OSName: providerInstance.OSName, + OSVersion: providerInstance.OSVersion, + Addresses: providerInstance.Addresses, + Status: providerInstance.Status, + ProviderFault: providerInstance.ProviderFault, + } + + updated, err := p.store.UpdateInstance(p.ctx, instanceName, updateParams) + if err != nil { + return params.Instance{}, fmt.Errorf("updating instance %s: %w", instanceName, err) + } + return updated, nil +} + +func (p *provider) GetControllerInfo() (params.ControllerInfo, error) { + p.mux.Lock() + defer p.mux.Unlock() + + info, err := p.store.ControllerInfo() + if err != nil { + return params.ControllerInfo{}, fmt.Errorf("getting controller info: %w", err) + } + + return info, nil +} + +func (p *provider) SetInstanceStatus(instanceName string, status commonParams.InstanceStatus, providerFault []byte) error { + p.mux.Lock() + defer p.mux.Unlock() + + if _, ok := p.runners[instanceName]; !ok { + return errors.ErrNotFound + } + + updateParams := params.UpdateInstanceParams{ + Status: status, + ProviderFault: providerFault, + } + + _, err := p.store.UpdateInstance(p.ctx, instanceName, updateParams) + if err != nil { + return fmt.Errorf("updating instance %s: %w", instanceName, err) + } + + return nil +} + +func (p *provider) InstanceTokenGetter() auth.InstanceTokenGetter { + return p.tokenGetter +} + +func (p *provider) GetGithubEntity(entity params.GithubEntity) (params.GithubEntity, error) { + ghEntity, err := p.store.GetGithubEntity(p.ctx, entity.EntityType, entity.ID) + if err != nil { + return params.GithubEntity{}, fmt.Errorf("getting github entity: %w", err) + } + + return ghEntity, nil +} diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 809a2cba..24d1aad3 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -210,6 +210,7 @@ func (c *Controller) loop() { defer c.Stop() updateToolsTicker := time.NewTicker(common.PoolToolUpdateInterval) initialToolUpdate := make(chan struct{}, 1) + defer close(initialToolUpdate) go func() { slog.InfoContext(c.ctx, "running initial tool update") if err := c.updateTools(); err != nil { @@ -225,21 +226,21 @@ func (c *Controller) loop() { slog.InfoContext(c.ctx, "consumer channel closed") return } - slog.InfoContext(c.ctx, "received payload", slog.Any("payload", payload)) + slog.InfoContext(c.ctx, "received payload") go c.handleWatcherEvent(payload) case <-c.ctx.Done(): return - case _, ok := <-initialToolUpdate: - if ok { - // channel received the initial update slug. We can close it now. - close(initialToolUpdate) - } + case <-initialToolUpdate: case update, ok := <-c.statusUpdates: if !ok { return } go c.handleScaleSetStatusUpdates(update) - case <-updateToolsTicker.C: + case _, ok := <-updateToolsTicker.C: + if !ok { + slog.InfoContext(c.ctx, "update tools ticker closed") + return + } if err := c.updateTools(); err != nil { slog.With(slog.Any("error", err)).Error("failed to update tools") } diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 012a41d1..ba7701d7 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -110,7 +110,7 @@ func (w *Worker) Start() (err error) { // mid boot before it reached the phase where it runs the metadtata, or // if it already failed). instanceState := commonParams.InstancePendingDelete - locking.Lock(instance.Name) + locking.Lock(instance.Name, w.consumerID) if instance.AgentID != 0 { if err := w.scaleSetCli.RemoveRunner(w.ctx, instance.AgentID); err != nil { // scale sets use JIT runners. This means that we create the runner in github @@ -119,9 +119,9 @@ func (w *Worker) Start() (err error) { if !errors.Is(err, runnerErrors.ErrNotFound) { if errors.Is(err, runnerErrors.ErrUnauthorized) { // we don't have access to remove the runner. This implies that our - // credentials may have expired. + // credentials may have expired or ar incorect. // - // TODO: we need to set the scale set as inactive and stop the listener (if any). + // TODO(gabriel-samfira): we need to set the scale set as inactive and stop the listener (if any). slog.ErrorContext(w.ctx, "error removing runner", "runner_name", instance.Name, "error", err) w.runners[instance.ID] = instance locking.Unlock(instance.Name, false) @@ -168,7 +168,6 @@ func (w *Worker) Start() (err error) { return fmt.Errorf("updating runner %s: %w", instance.Name, err) } } - locking.Unlock(instance.Name, false) } else if instance.Status == commonParams.InstanceDeleting { // Set the instance in deleting. It is assumed that the runner was already // removed from github either by github or by garm. Deleting status indicates @@ -309,6 +308,13 @@ func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got update operation") w.mux.Lock() + if instance.Status == commonParams.InstanceDeleted { + if err := w.handleInstanceCleanup(instance); err != nil { + slog.ErrorContext(w.ctx, "error cleaning up instance", "instance_id", instance.ID, "error", err) + } + w.mux.Unlock() + return + } oldInstance, ok := w.runners[instance.ID] w.runners[instance.ID] = instance @@ -351,10 +357,10 @@ func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { func (w *Worker) handleEvent(event dbCommon.ChangePayload) { switch event.EntityType { case dbCommon.ScaleSetEntityType: - slog.DebugContext(w.ctx, "got scaleset event", "event", event) + slog.DebugContext(w.ctx, "got scaleset event") w.handleScaleSetEvent(event) case dbCommon.InstanceEntityType: - slog.DebugContext(w.ctx, "got instance event", "event", event) + slog.DebugContext(w.ctx, "got instance event") w.handleInstanceEntityEvent(event) default: slog.DebugContext(w.ctx, "invalid entity type; ignoring", "entity_type", event.EntityType) @@ -509,12 +515,11 @@ func (w *Worker) handleScaleUp(target, current uint) { continue } - runnerDetails, err := w.scaleSetCli.GetRunner(w.ctx, jitConfig.Runner.ID) + _, err = w.scaleSetCli.GetRunner(w.ctx, jitConfig.Runner.ID) if err != nil { slog.ErrorContext(w.ctx, "error getting runner details", "error", err) continue } - slog.DebugContext(w.ctx, "runner details", "runner_details", runnerDetails) } } @@ -523,15 +528,42 @@ func (w *Worker) handleScaleDown(target, current uint) { if delta <= 0 { return } - w.mux.Lock() - defer w.mux.Unlock() removed := 0 + candidates := []params.Instance{} for _, runner := range w.runners { + locked, err := locking.TryLock(runner.Name, w.consumerID) + if err != nil || !locked { + slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) + continue + } + switch runner.Status { + case commonParams.InstanceRunning: + if runner.RunnerStatus != params.RunnerActive { + candidates = append(candidates, runner) + } + case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, + commonParams.InstanceDeleting, commonParams.InstanceDeleted: + removed++ + locking.Unlock(runner.Name, true) + continue + default: + slog.DebugContext(w.ctx, "runner is not in a valid state; skipping", "runner_name", runner.Name, "runner_status", runner.Status) + locking.Unlock(runner.Name, false) + continue + } + locking.Unlock(runner.Name, false) + } + + if removed >= int(delta) { + return + } + + for _, runner := range candidates { if removed >= int(delta) { break } - locked, err := locking.TryLock(runner.Name) + locked, err := locking.TryLock(runner.Name, w.consumerID) if err != nil || !locked { slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) continue @@ -539,7 +571,8 @@ func (w *Worker) handleScaleDown(target, current uint) { switch runner.Status { case commonParams.InstancePendingCreate, commonParams.InstanceRunning: - case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete: + case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, + commonParams.InstanceDeleting, commonParams.InstanceDeleted: removed++ locking.Unlock(runner.Name, true) continue @@ -613,8 +646,6 @@ func (w *Worker) handleAutoScale() { slog.ErrorContext(w.ctx, "error cleaning up instance", "instance_id", instance.ID, "error", err) } } - w.mux.Unlock() - var desiredRunners uint if w.scaleSet.DesiredRunnerCount > 0 { desiredRunners = uint(w.scaleSet.DesiredRunnerCount) @@ -624,6 +655,7 @@ func (w *Worker) handleAutoScale() { currentRunners := uint(len(w.runners)) if currentRunners == targetRunners { lastMsgDebugLog("desired runner count reached", targetRunners, currentRunners) + w.mux.Unlock() continue } @@ -634,6 +666,7 @@ func (w *Worker) handleAutoScale() { lastMsgDebugLog("attempting to scale down", targetRunners, currentRunners) w.handleScaleDown(targetRunners, currentRunners) } + w.mux.Unlock() } } } diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index e6ae9197..0cf01025 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -35,7 +35,10 @@ func (w *Worker) SetLastMessageID(id int64) error { // HandleJobCompleted handles a job completed message. If a job had a runner // assigned and was not canceled before it had a chance to run, then we mark // that runner as pending_delete. -func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error { +func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) (err error) { + slog.DebugContext(w.ctx, "handling job completed", "jobs", jobs) + defer slog.DebugContext(w.ctx, "finished handling job completed", "jobs", jobs, "error", err) + for _, job := range jobs { if job.RunnerName == "" { // This job was not assigned to a runner, so we can skip it. @@ -47,7 +50,7 @@ func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error { RunnerStatus: params.RunnerTerminated, } - locking.Lock(job.RunnerName) + locking.Lock(job.RunnerName, w.consumerID) _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, runnerUpdateParams) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { @@ -62,7 +65,9 @@ func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error { // HandleJobStarted updates the runners from idle to active in the DB and // assigns the job to them. -func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) error { +func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) (err error) { + slog.DebugContext(w.ctx, "handling job started", "jobs", jobs) + defer slog.DebugContext(w.ctx, "finished handling job started", "jobs", jobs, "error", err) for _, job := range jobs { if job.RunnerName == "" { // This should not happen, but just in case. @@ -73,7 +78,7 @@ func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) error { RunnerStatus: params.RunnerActive, } - locking.Lock(job.RunnerName) + locking.Lock(job.RunnerName, w.consumerID) _, err := w.store.UpdateInstance(w.ctx, job.RunnerName, updateParams) if err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 43a2e5c1..9fbf9a7e 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -232,6 +232,7 @@ func (l *scaleSetListener) loop() { if !msg.IsNil() { // Longpoll returns after 50 seconds. If no message arrives during that interval // we get a nil message. We can simply ignore it and continue. + slog.DebugContext(l.ctx, "handling message", "message_id", msg.MessageID) l.handleSessionMessage(msg) } } From f2ad7a3481cecdbdf9b8124834ee571b95042392 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 27 Apr 2025 19:34:44 +0000 Subject: [PATCH 025/179] Fix leftover instances and refactor Signed-off-by: Gabriel Adrian Samfira --- params/params.go | 4 +- runner/common/pool.go | 9 +-- runner/common/util.go | 2 +- runner/metadata.go | 4 +- runner/pool/pool.go | 71 ++++++++-------------- runner/pool/stub_client.go | 4 +- runner/pools.go | 2 +- runner/runner.go | 89 ++++++++++++++++++++++++++-- runner/scalesets.go | 4 +- util/github/client.go | 34 +++++++++-- workers/provider/instance_manager.go | 22 ++++--- workers/provider/provider.go | 4 ++ workers/scaleset/scaleset.go | 10 +++- 13 files changed, 178 insertions(+), 81 deletions(-) diff --git a/params/params.go b/params/params.go index 64a53984..43d1dd92 100644 --- a/params/params.go +++ b/params/params.go @@ -381,7 +381,7 @@ func (p Pool) MaxRunnersAsInt() int { return int(p.MaxRunners) } -func (p Pool) GithubEntity() (GithubEntity, error) { +func (p Pool) GetEntity() (GithubEntity, error) { switch p.PoolType() { case GithubEntityTypeRepository: return GithubEntity{ @@ -489,7 +489,7 @@ type ScaleSet struct { LastMessageID int64 `json:"-"` } -func (p ScaleSet) GithubEntity() (GithubEntity, error) { +func (p ScaleSet) GetEntity() (GithubEntity, error) { switch p.ScaleSetType() { case GithubEntityTypeRepository: return GithubEntity{ diff --git a/runner/common/pool.go b/runner/common/pool.go index 68a7ddf0..18f46a9d 100644 --- a/runner/common/pool.go +++ b/runner/common/pool.go @@ -54,13 +54,6 @@ type PoolManager interface { // for it and call this function with the WorkflowJob as a parameter. HandleWorkflowJob(job params.WorkflowJob) error - // DeleteRunner will attempt to remove a runner from the pool. If forceRemove is true, any error - // received from the provider will be ignored and we will proceed to remove the runner from the database. - // An error received while attempting to remove from GitHub (other than 404) will still stop the deletion - // process. This can happen if the runner is already processing a job. At which point, you can simply cancel - // the job in github. Doing so will prompt GARM to reap the runner automatically. - DeleteRunner(runner params.Instance, forceRemove, bypassGHUnauthorizedError bool) error - // InstallWebhook will create a webhook in github for the entity associated with this pool manager. InstallWebhook(ctx context.Context, param params.InstallWebhookParams) (params.HookInfo, error) // GetWebhookInfo will return information about the webhook installed in github for the entity associated @@ -74,6 +67,8 @@ type PoolManager interface { // may use internal or self signed certificates. RootCABundle() (params.CertificateBundle, error) + SetPoolRunningState(isRunning bool, failureReason string) + // Start will start the pool manager and all associated workers. Start() error // Stop will stop the pool manager and all associated workers. diff --git a/runner/common/util.go b/runner/common/util.go index ee5110e1..7dbec688 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -17,7 +17,7 @@ type GithubEntityOperations interface { PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) - RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) + RemoveEntityRunner(ctx context.Context, runnerID int64) error CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) diff --git a/runner/metadata.go b/runner/metadata.go index 0be41fc7..1d75fba4 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -66,7 +66,7 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { "pool_id", instance.PoolID) return "", errors.Wrap(err, "fetching pool") } - entity, err = pool.GithubEntity() + entity, err = pool.GetEntity() if err != nil { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to get pool entity", @@ -81,7 +81,7 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { "scale_set_id", instance.ScaleSetID) return "", errors.Wrap(err, "fetching scale set") } - entity, err = scaleSet.GithubEntity() + entity, err = scaleSet.GetEntity() if err != nil { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to get scale set entity", diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 88be9e97..f17ba15f 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -349,7 +349,7 @@ func (r *basePoolManager) startLoopForFunction(f func() error, interval time.Dur r.ctx, "error in loop", "loop_name", name) if errors.Is(err, runnerErrors.ErrUnauthorized) { - r.setPoolRunningState(false, err.Error()) + r.SetPoolRunningState(false, err.Error()) } } case <-r.ctx.Done(): @@ -380,7 +380,7 @@ func (r *basePoolManager) updateTools() error { if err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update tools for entity", "entity", r.entity.String()) - r.setPoolRunningState(false, err.Error()) + r.SetPoolRunningState(false, err.Error()) return fmt.Errorf("failed to update tools for entity %s: %w", r.entity.String(), err) } r.mux.Lock() @@ -388,7 +388,7 @@ func (r *basePoolManager) updateTools() error { r.mux.Unlock() slog.DebugContext(r.ctx, "successfully updated tools") - r.setPoolRunningState(true, "") + r.SetPoolRunningState(true, "") return err } @@ -565,16 +565,19 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) slog.InfoContext( r.ctx, "Runner has no database entry in garm, removing from github", "runner_name", runner.GetName()) - resp, err := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) - if err != nil { + if err := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()); err != nil { // Removed in the meantime? - if resp != nil && resp.StatusCode == http.StatusNotFound { + if errors.Is(err, runnerErrors.ErrNotFound) { continue } return errors.Wrap(err, "removing runner") } continue } + if dbInstance.ScaleSetID != 0 { + // ignore scale set instances. + continue + } switch dbInstance.Status { case commonParams.InstancePendingDelete, commonParams.InstanceDeleting: @@ -650,10 +653,9 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) slog.InfoContext( r.ctx, "Runner instance is no longer on the provider, removing from github", "runner_name", dbInstance.Name) - resp, err := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) - if err != nil { + if err := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()); err != nil { // Removed in the meantime? - if resp != nil && resp.StatusCode == http.StatusNotFound { + if errors.Is(err, runnerErrors.ErrNotFound) { slog.DebugContext( r.ctx, "runner disappeared from github", "runner_name", dbInstance.Name) @@ -806,7 +808,7 @@ func (r *basePoolManager) AddRunner(ctx context.Context, poolID string, aditiona } if runner != nil { - _, runnerCleanupErr := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) + runnerCleanupErr := r.ghcli.RemoveEntityRunner(r.ctx, runner.GetID()) if err != nil { slog.With(slog.Any("error", runnerCleanupErr)).ErrorContext( ctx, "failed to remove runner", @@ -840,7 +842,7 @@ func (r *basePoolManager) waitForTimeoutOrCancelled(timeout time.Duration) { } } -func (r *basePoolManager) setPoolRunningState(isRunning bool, failureReason string) { +func (r *basePoolManager) SetPoolRunningState(isRunning bool, failureReason string) { r.mux.Lock() r.managerErrorReason = failureReason r.managerIsRunning = isRunning @@ -1660,45 +1662,22 @@ func (r *basePoolManager) DeleteRunner(runner params.Instance, forceRemove, bypa if !r.managerIsRunning && !bypassGHUnauthorizedError { return runnerErrors.NewConflictError("pool manager is not running for %s", r.entity.String()) } + if runner.AgentID != 0 { - resp, err := r.ghcli.RemoveEntityRunner(r.ctx, runner.AgentID) - if err != nil { - if resp != nil { - switch resp.StatusCode { - case http.StatusUnprocessableEntity: - return errors.Wrapf(runnerErrors.ErrBadRequest, "removing runner: %q", err) - case http.StatusNotFound: - // Runner may have been deleted by a finished job, or manually by the user. - slog.DebugContext( - r.ctx, "runner was not found in github", - "agent_id", runner.AgentID) - case http.StatusUnauthorized: - slog.With(slog.Any("error", err)).ErrorContext(r.ctx, "failed to remove runner from github") - // Mark the pool as offline from this point forward - r.setPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) - slog.With(slog.Any("error", err)).ErrorContext( - r.ctx, "failed to remove runner") - if bypassGHUnauthorizedError { - slog.Info("bypass github unauthorized error is set, marking runner for deletion") - break - } - // evaluate the next switch case. - fallthrough - default: + if err := r.ghcli.RemoveEntityRunner(r.ctx, runner.AgentID); err != nil { + if errors.Is(err, runnerErrors.ErrUnauthorized) { + slog.With(slog.Any("error", err)).ErrorContext(r.ctx, "failed to remove runner from github") + // Mark the pool as offline from this point forward + r.SetPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) + slog.With(slog.Any("error", err)).ErrorContext( + r.ctx, "failed to remove runner") + if bypassGHUnauthorizedError { + slog.Info("bypass github unauthorized error is set, marking runner for deletion") + } else { return errors.Wrap(err, "removing runner") } } else { - errResp := &github.ErrorResponse{} - if errors.As(err, &errResp) { - if errResp.Response != nil && errResp.Response.StatusCode == http.StatusUnauthorized && bypassGHUnauthorizedError { - slog.Info("bypass github unauthorized error is set, marking runner for deletion") - } else { - return errors.Wrap(err, "removing runner") - } - } else { - // We got a nil response. Assume we are in error. - return errors.Wrap(err, "removing runner") - } + return errors.Wrap(err, "removing runner") } } } diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index d291e736..7a82567f 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -41,8 +41,8 @@ func (s *stubGithubClient) ListEntityRunnerApplicationDownloads(_ context.Contex return nil, nil, s.err } -func (s *stubGithubClient) RemoveEntityRunner(_ context.Context, _ int64) (*github.Response, error) { - return nil, s.err +func (s *stubGithubClient) RemoveEntityRunner(_ context.Context, _ int64) error { + return s.err } func (s *stubGithubClient) CreateEntityRegistrationToken(_ context.Context) (*github.RegistrationToken, *github.Response, error) { diff --git a/runner/pools.go b/runner/pools.go index f2eb3c25..15aecb5e 100644 --- a/runner/pools.go +++ b/runner/pools.go @@ -99,7 +99,7 @@ func (r *Runner) UpdatePoolByID(ctx context.Context, poolID string, param params return params.Pool{}, runnerErrors.NewBadRequestError("min_idle_runners cannot be larger than max_runners") } - entity, err := pool.GithubEntity() + entity, err := pool.GetEntity() if err != nil { return params.Pool{}, errors.Wrap(err, "getting entity") } diff --git a/runner/runner.go b/runner/runner.go index 5c0883aa..4032a94c 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -45,6 +45,8 @@ import ( "github.com/cloudbase/garm/runner/common" "github.com/cloudbase/garm/runner/pool" "github.com/cloudbase/garm/runner/providers" + "github.com/cloudbase/garm/util/github" + "github.com/cloudbase/garm/util/github/scalesets" ) func NewRunner(ctx context.Context, cfg config.Config, db dbCommon.Store) (*Runner, error) { @@ -849,13 +851,92 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel return runnerErrors.NewBadRequestError("runner must be in one of the following states: %q", strings.Join(validStates, ", ")) } - poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) + ghCli, ssCli, err := r.getGHCliFromInstance(ctx, instance) if err != nil { - return errors.Wrap(err, "fetching pool manager for instance") + return errors.Wrap(err, "fetching github client") } - if err := poolMgr.DeleteRunner(instance, forceDelete, bypassGithubUnauthorized); err != nil { - return errors.Wrap(err, "removing runner") + if instance.AgentID != 0 { + if instance.ScaleSetID != 0 { + err = ssCli.RemoveRunner(ctx, instance.AgentID) + } else if instance.PoolID != "" { + err = ghCli.RemoveEntityRunner(ctx, instance.AgentID) + } else { + return errors.New("instance does not have a pool or scale set") + } + + if err != nil { + if errors.Is(err, runnerErrors.ErrUnauthorized) && instance.PoolID != "" { + poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) + if err != nil { + return errors.Wrap(err, "fetching pool manager for instance") + } + poolMgr.SetPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) + } + if !bypassGithubUnauthorized { + return errors.Wrap(err, "removing runner from github") + } + } } + + instanceStatus := commonParams.InstancePendingDelete + if forceDelete { + instanceStatus = commonParams.InstancePendingForceDelete + } + + slog.InfoContext( + r.ctx, "setting instance status", + "runner_name", instance.Name, + "status", instanceStatus) + + updateParams := params.UpdateInstanceParams{ + Status: instanceStatus, + } + _, err = r.store.UpdateInstance(r.ctx, instance.Name, updateParams) + if err != nil { + return errors.Wrap(err, "updating runner state") + } + return nil } + +func (r *Runner) getGHCliFromInstance(ctx context.Context, instance params.Instance) (common.GithubClient, *scalesets.ScaleSetClient, error) { + // TODO(gabriel-samfira): We can probably cache the entity. + var entityGetter params.EntityGetter + var err error + if instance.PoolID != "" { + entityGetter, err = r.store.GetPoolByID(ctx, instance.PoolID) + if err != nil { + return nil, nil, errors.Wrap(err, "fetching pool") + } + } else if instance.ScaleSetID != 0 { + entityGetter, err = r.store.GetScaleSetByID(ctx, instance.ScaleSetID) + if err != nil { + return nil, nil, errors.Wrap(err, "fetching scale set") + } + } else { + return nil, nil, errors.New("instance does not have a pool or scale set") + } + + entity, err := entityGetter.GetEntity() + if err != nil { + return nil, nil, errors.Wrap(err, "fetching entity") + } + + // Fetching the entity from the database will populate all fields, including credentials. + entity, err = r.store.GetGithubEntity(ctx, entity.EntityType, entity.ID) + if err != nil { + return nil, nil, errors.Wrap(err, "fetching entity") + } + + ghCli, err := github.Client(ctx, entity) + if err != nil { + return nil, nil, errors.Wrap(err, "creating github client") + } + + scaleSetCli, err := scalesets.NewClient(ghCli) + if err != nil { + return nil, nil, errors.Wrap(err, "creating scaleset client") + } + return ghCli, scaleSetCli, nil +} diff --git a/runner/scalesets.go b/runner/scalesets.go index ef45a783..7b93a662 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -74,7 +74,7 @@ func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error return runnerErrors.NewBadRequestError("scale set is enabled; disable it first") } - paramEntity, err := scaleSet.GithubEntity() + paramEntity, err := scaleSet.GetEntity() if err != nil { return errors.Wrap(err, "getting entity") } @@ -137,7 +137,7 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param return params.ScaleSet{}, runnerErrors.NewBadRequestError("min_idle_runners cannot be larger than max_runners") } - paramEntity, err := scaleSet.GithubEntity() + paramEntity, err := scaleSet.GetEntity() if err != nil { return params.ScaleSet{}, errors.Wrap(err, "getting entity") } diff --git a/util/github/client.go b/util/github/client.go index ae0b6485..50f97d39 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -226,7 +226,7 @@ func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) return ret, response, err } -func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { +func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) error { var response *github.Response var err error @@ -251,10 +251,36 @@ func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) ( case params.GithubEntityTypeEnterprise: response, err = g.enterprise.RemoveRunner(ctx, g.entity.Owner, runnerID) default: - return nil, errors.New("invalid entity type") + return errors.New("invalid entity type") } - return response, err + switch response.StatusCode { + case http.StatusNotFound: + return runnerErrors.NewNotFoundError("runner %d not found", runnerID) + case http.StatusUnauthorized: + return runnerErrors.ErrUnauthorized + case http.StatusUnprocessableEntity: + return runnerErrors.NewBadRequestError("cannot remove runner %d in its current state", runnerID) + default: + if err != nil { + errResp := &github.ErrorResponse{} + if errors.As(err, &errResp) && errResp.Response != nil { + switch errResp.Response.StatusCode { + case http.StatusNotFound: + return runnerErrors.NewNotFoundError("runner %d not found", runnerID) + case http.StatusUnauthorized: + return runnerErrors.ErrUnauthorized + case http.StatusUnprocessableEntity: + return runnerErrors.NewBadRequestError("cannot remove runner %d in its current state", runnerID) + default: + return errors.Wrap(err, "removing runner") + } + } + return errors.Wrap(err, "removing runner") + } + } + + return nil } func (g *githubClient) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { @@ -417,7 +443,7 @@ func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, defer func(run *github.Runner) { if err != nil && run != nil { - _, innerErr := g.RemoveEntityRunner(ctx, run.GetID()) + innerErr := g.RemoveEntityRunner(ctx, run.GetID()) slog.With(slog.Any("error", innerErr)).ErrorContext( ctx, "failed to remove runner", "runner_id", run.GetID(), string(g.entity.EntityType), g.entity.String()) diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index c20c75ae..c784b41f 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -21,7 +21,7 @@ import ( func NewInstanceManager(ctx context.Context, instance params.Instance, scaleSet params.ScaleSet, provider common.Provider, helper providerHelper) (*instanceManager, error) { ctx = garmUtil.WithSlogContext(ctx, slog.Any("instance", instance.Name)) - githubEntity, err := scaleSet.GithubEntity() + githubEntity, err := scaleSet.GetEntity() if err != nil { return nil, fmt.Errorf("getting github entity: %w", err) } @@ -129,7 +129,7 @@ func (i *instanceManager) incrementBackOff() { } func (i *instanceManager) getEntity() (params.GithubEntity, error) { - entity, err := i.scaleSet.GithubEntity() + entity, err := i.scaleSet.GetEntity() if err != nil { return params.GithubEntity{}, fmt.Errorf("getting entity: %w", err) } @@ -276,6 +276,9 @@ func (i *instanceManager) handleDeleteInstanceInProvider(instance params.Instanc func (i *instanceManager) consolidateState() error { i.mux.Lock() defer i.mux.Unlock() + if !i.running { + return nil + } switch i.instance.Status { case commonParams.InstancePendingCreate: @@ -305,6 +308,7 @@ func (i *instanceManager) consolidateState() error { } } + prevStatus := i.instance.Status if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceDeleting, nil); err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { return nil @@ -314,7 +318,7 @@ func (i *instanceManager) consolidateState() error { if err := i.handleDeleteInstanceInProvider(i.instance); err != nil { slog.ErrorContext(i.ctx, "deleting instance in provider", "error", err, "forced", i.instance.Status == commonParams.InstancePendingForceDelete) - if i.instance.Status == commonParams.InstancePendingDelete { + if prevStatus == commonParams.InstancePendingDelete { i.incrementBackOff() if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstancePendingDelete, []byte(err.Error())); err != nil { return fmt.Errorf("setting instance status to error: %w", err) @@ -324,8 +328,11 @@ func (i *instanceManager) consolidateState() error { } } if err := i.helper.SetInstanceStatus(i.instance.Name, commonParams.InstanceDeleted, nil); err != nil { - return fmt.Errorf("setting instance status to deleted: %w", err) + if !errors.Is(err, runnerErrors.ErrNotFound) { + return fmt.Errorf("setting instance status to deleted: %w", err) + } } + return ErrInstanceDeleted case commonParams.InstanceError: // Instance is in error state. We wait for next status or potentially retry // spawning the instance with a backoff timer. @@ -343,26 +350,23 @@ func (i *instanceManager) handleUpdate(update dbCommon.ChangePayload) error { // end up with an inconsistent state between what we know about the instance and what // is reflected in the database. i.mux.Lock() + defer i.mux.Unlock() if !i.running { - i.mux.Unlock() return nil } instance, ok := update.Payload.(params.Instance) if !ok { - i.mux.Unlock() return runnerErrors.NewBadRequestError("invalid payload type") } i.instance = instance if i.instance.Status == instance.Status { // Nothing of interest happened. - i.mux.Unlock() return nil } - i.mux.Unlock() - return i.consolidateState() + return nil } func (i *instanceManager) Update(instance dbCommon.ChangePayload) error { diff --git a/workers/provider/provider.go b/workers/provider/provider.go index 07f65b26..0c2cf4df 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -88,6 +88,10 @@ func (p *provider) loadAllRunners() error { if runner.Status == commonParams.InstanceCreating { continue } + if runner.Status == commonParams.InstanceDeleting || runner.Status == commonParams.InstanceDeleted { + continue + } + scaleSet, ok := p.scaleSets[runner.ScaleSetID] if !ok { slog.ErrorContext(p.ctx, "scale set not found", "scale_set_id", runner.ScaleSetID) diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index ba7701d7..a0a5d657 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -64,6 +64,14 @@ type Worker struct { quit chan struct{} } +func (w *Worker) RunnersAndStatuses() map[string]string { + runners := make(map[string]string) + for _, runner := range w.runners { + runners[runner.Name] = string(runner.Status) + } + return runners +} + func (w *Worker) Stop() error { slog.DebugContext(w.ctx, "stopping scale set worker", "scale_set", w.consumerID) w.mux.Lock() @@ -629,7 +637,7 @@ func (w *Worker) handleAutoScale() { lastMsg := "" lastMsgDebugLog := func(msg string, targetRunners, currentRunners uint) { if lastMsg != msg { - slog.DebugContext(w.ctx, msg, "current_runners", currentRunners, "target_runners", targetRunners) + slog.DebugContext(w.ctx, msg, "current_runners", currentRunners, "target_runners", targetRunners, "current_runners", w.RunnersAndStatuses()) lastMsg = msg } } From a4ac85aa4a6c39fad5f1e5615c89e6ac73c5d948 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 27 Apr 2025 19:38:32 +0000 Subject: [PATCH 026/179] Update CLI to show scale sets Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/runner.go | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/cmd/garm-cli/cmd/runner.go b/cmd/garm-cli/cmd/runner.go index 08b9a6db..adee2965 100644 --- a/cmd/garm-cli/cmd/runner.go +++ b/cmd/garm-cli/cmd/runner.go @@ -228,14 +228,14 @@ func formatInstances(param []params.Instance, detailed bool) { return } t := table.NewWriter() - header := table.Row{"Nr", "Name", "Status", "Runner Status", "Pool ID"} + header := table.Row{"Nr", "Name", "Status", "Runner Status", "Pool ID", "Scalse Set ID"} if detailed { header = append(header, "Created At", "Updated At", "Job Name", "Started At", "Run ID", "Repository") } t.AppendHeader(header) for idx, inst := range param { - row := table.Row{idx + 1, inst.Name, inst.Status, inst.RunnerStatus, inst.PoolID} + row := table.Row{idx + 1, inst.Name, inst.Status, inst.RunnerStatus, inst.PoolID, inst.ScaleSetID} if detailed { row = append(row, inst.CreatedAt, inst.UpdatedAt) if inst.Job != nil { @@ -270,7 +270,11 @@ func formatSingleInstance(instance params.Instance) { t.AppendRow(table.Row{"OS Version", instance.OSVersion}, table.RowConfig{AutoMerge: false}) t.AppendRow(table.Row{"Status", instance.Status}, table.RowConfig{AutoMerge: false}) t.AppendRow(table.Row{"Runner Status", instance.RunnerStatus}, table.RowConfig{AutoMerge: false}) - t.AppendRow(table.Row{"Pool ID", instance.PoolID}, table.RowConfig{AutoMerge: false}) + if instance.PoolID != "" { + t.AppendRow(table.Row{"Pool ID", instance.PoolID}, table.RowConfig{AutoMerge: false}) + } else if instance.ScaleSetID != 0 { + t.AppendRow(table.Row{"Scale Set ID", instance.ScaleSetID}, table.RowConfig{AutoMerge: false}) + } if len(instance.Addresses) > 0 { for _, addr := range instance.Addresses { From 884be62a4d0232970f1fcd38549cabd9afa8a943 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 27 Apr 2025 20:28:06 +0000 Subject: [PATCH 027/179] Fix lint errors Signed-off-by: Gabriel Adrian Samfira --- cache/cache.go | 1 - cmd/garm-cli/cmd/pool.go | 12 +++---- cmd/garm-cli/cmd/root.go | 6 ++++ cmd/garm-cli/cmd/scalesets.go | 12 +++---- cmd/garm/main.go | 1 + database/common/store.go | 2 +- database/sql/pools.go | 6 ++-- database/sql/scaleset_instances.go | 3 +- database/sql/scalesets.go | 21 +++++------ database/sql/sql.go | 6 ++++ database/sql/util.go | 3 +- database/watcher/filters.go | 1 - locking/interface.go | 1 - locking/locking.go | 1 + params/github.go | 8 ++--- runner/metadata.go | 7 ++-- runner/runner.go | 16 +++++---- runner/scalesets.go | 21 +++++------ util/github/scalesets/runners.go | 4 +-- workers/entity/controller.go | 1 - workers/entity/worker.go | 3 -- workers/provider/errors.go | 4 +-- workers/provider/instance_manager.go | 4 +-- workers/provider/provider.go | 30 ++++++++-------- workers/provider/provider_helper.go | 10 +++--- workers/provider/util.go | 1 - workers/scaleset/controller.go | 48 ++++---------------------- workers/scaleset/controller_watcher.go | 6 ++-- workers/scaleset/scaleset.go | 18 ++++++---- workers/scaleset/scaleset_helper.go | 1 - workers/scaleset/scaleset_listener.go | 17 ++++----- workers/scaleset/status.go | 13 ------- 32 files changed, 127 insertions(+), 161 deletions(-) delete mode 100644 workers/scaleset/status.go diff --git a/cache/cache.go b/cache/cache.go index 2fa52456..1960de38 100644 --- a/cache/cache.go +++ b/cache/cache.go @@ -5,7 +5,6 @@ import ( "time" commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/params" ) diff --git a/cmd/garm-cli/cmd/pool.go b/cmd/garm-cli/cmd/pool.go index a4eee742..0b891e96 100644 --- a/cmd/garm-cli/cmd/pool.go +++ b/cmd/garm-cli/cmd/pool.go @@ -493,13 +493,13 @@ func formatPools(pools []params.Pool) { switch { case pool.RepoID != "" && pool.RepoName != "": belongsTo = pool.RepoName - level = "repo" + level = entityTypeRepo case pool.OrgID != "" && pool.OrgName != "": belongsTo = pool.OrgName - level = "org" + level = entityTypeOrg case pool.EnterpriseID != "" && pool.EnterpriseName != "": belongsTo = pool.EnterpriseName - level = "enterprise" + level = entityTypeEnterprise } row := table.Row{pool.ID, pool.Image, pool.Flavor, strings.Join(tags, " "), belongsTo, pool.Enabled} if long { @@ -532,13 +532,13 @@ func formatOnePool(pool params.Pool) { switch { case pool.RepoID != "" && pool.RepoName != "": belongsTo = pool.RepoName - level = "repo" + level = entityTypeRepo case pool.OrgID != "" && pool.OrgName != "": belongsTo = pool.OrgName - level = "org" + level = entityTypeOrg case pool.EnterpriseID != "" && pool.EnterpriseName != "": belongsTo = pool.EnterpriseName - level = "enterprise" + level = entityTypeEnterprise } t.AppendHeader(header) diff --git a/cmd/garm-cli/cmd/root.go b/cmd/garm-cli/cmd/root.go index d1370567..df3ef11b 100644 --- a/cmd/garm-cli/cmd/root.go +++ b/cmd/garm-cli/cmd/root.go @@ -31,6 +31,12 @@ import ( "github.com/cloudbase/garm/params" ) +const ( + entityTypeOrg string = "org" + entityTypeRepo string = "repo" + entityTypeEnterprise string = "enterprise" +) + var ( cfg *config.Config mgr config.Manager diff --git a/cmd/garm-cli/cmd/scalesets.go b/cmd/garm-cli/cmd/scalesets.go index 04c537ee..79486a0e 100644 --- a/cmd/garm-cli/cmd/scalesets.go +++ b/cmd/garm-cli/cmd/scalesets.go @@ -446,13 +446,13 @@ func formatScaleSets(scaleSets []params.ScaleSet) { switch { case scaleSet.RepoID != "" && scaleSet.RepoName != "": belongsTo = scaleSet.RepoName - level = "repo" + level = entityTypeRepo case scaleSet.OrgID != "" && scaleSet.OrgName != "": belongsTo = scaleSet.OrgName - level = "org" + level = entityTypeOrg case scaleSet.EnterpriseID != "" && scaleSet.EnterpriseName != "": belongsTo = scaleSet.EnterpriseName - level = "enterprise" + level = entityTypeEnterprise } t.AppendRow(table.Row{scaleSet.ID, scaleSet.Name, scaleSet.Image, scaleSet.Flavor, belongsTo, level, scaleSet.Enabled, scaleSet.GetRunnerPrefix(), scaleSet.ProviderName}) t.AppendSeparator() @@ -476,13 +476,13 @@ func formatOneScaleSet(scaleSet params.ScaleSet) { switch { case scaleSet.RepoID != "" && scaleSet.RepoName != "": belongsTo = scaleSet.RepoName - level = "repo" + level = entityTypeRepo case scaleSet.OrgID != "" && scaleSet.OrgName != "": belongsTo = scaleSet.OrgName - level = "org" + level = entityTypeOrg case scaleSet.EnterpriseID != "" && scaleSet.EnterpriseName != "": belongsTo = scaleSet.EnterpriseName - level = "enterprise" + level = entityTypeEnterprise } t.AppendHeader(header) diff --git a/cmd/garm/main.go b/cmd/garm/main.go index d117dc6a..c43e3c93 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -180,6 +180,7 @@ func maybeUpdateURLsFromConfig(cfg config.Config, store common.Store) error { return nil } +//gocyclo:ignore func main() { flag.Parse() if *version { diff --git a/database/common/store.go b/database/common/store.go index 87804281..65fd1343 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -140,7 +140,7 @@ type ScaleSetsStore interface { ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) CreateEntityScaleSet(_ context.Context, entity params.GithubEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) ListEntityScaleSets(_ context.Context, entity params.GithubEntity) ([]params.ScaleSet, error) - UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, new params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) + UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error diff --git a/database/sql/pools.go b/database/sql/pools.go index 7454b1ef..5cb6d136 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -101,13 +101,13 @@ func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.GithubEntityT switch entityType { case params.GithubEntityTypeRepository: fieldName = entityTypeRepoName - entityField = "Repository" + entityField = repositoryFieldName case params.GithubEntityTypeOrganization: fieldName = entityTypeOrgName - entityField = "Organization" + entityField = organizationFieldName case params.GithubEntityTypeEnterprise: fieldName = entityTypeEnterpriseName - entityField = "Enterprise" + entityField = enterpriseFieldName default: return Pool{}, fmt.Errorf("invalid entityType: %v", entityType) } diff --git a/database/sql/scaleset_instances.go b/database/sql/scaleset_instances.go index 106df956..fcb9e1f2 100644 --- a/database/sql/scaleset_instances.go +++ b/database/sql/scaleset_instances.go @@ -3,9 +3,10 @@ package sql import ( "context" + "github.com/pkg/errors" + "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" - "github.com/pkg/errors" ) func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, param params.CreateInstanceParams) (instance params.Instance, err error) { diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index f168813b..ea4878bf 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -18,13 +18,14 @@ import ( "context" "fmt" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/params" "github.com/google/uuid" "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" ) func (s *sqlDatabase) ListAllScaleSets(_ context.Context) ([]params.ScaleSet, error) { @@ -136,13 +137,13 @@ func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.GithubE switch entityType { case params.GithubEntityTypeRepository: fieldName = entityTypeRepoName - preloadEntity = "Repository" + preloadEntity = repositoryFieldName case params.GithubEntityTypeOrganization: fieldName = entityTypeOrgName - preloadEntity = "Organization" + preloadEntity = organizationFieldName case params.GithubEntityTypeEnterprise: fieldName = entityTypeEnterpriseName - preloadEntity = "Enterprise" + preloadEntity = enterpriseFieldName default: return nil, fmt.Errorf("invalid entityType: %v", entityType) } @@ -189,7 +190,7 @@ func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.Githu return ret, nil } -func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, new params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { +func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { defer func() { if err == nil { s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, updatedScaleSet) @@ -348,7 +349,7 @@ func (s *sqlDatabase) GetScaleSetByID(_ context.Context, scaleSet uint) (params. return s.sqlToCommonScaleSet(set) } -func (s *sqlDatabase) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) { +func (s *sqlDatabase) DeleteScaleSetByID(_ context.Context, scaleSetID uint) (err error) { var scaleSet params.ScaleSet defer func() { if err == nil && scaleSet.ID != 0 { @@ -380,7 +381,7 @@ func (s *sqlDatabase) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) ( return nil } -func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) (err error) { +func (s *sqlDatabase) SetScaleSetLastMessageID(_ context.Context, scaleSetID uint, lastMessageID int64) (err error) { var scaleSet params.ScaleSet defer func() { if err == nil && scaleSet.ID != 0 { @@ -407,7 +408,7 @@ func (s *sqlDatabase) SetScaleSetLastMessageID(ctx context.Context, scaleSetID u return nil } -func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) (err error) { +func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(_ context.Context, scaleSetID uint, desiredRunnerCount int) (err error) { var scaleSet params.ScaleSet defer func() { if err == nil && scaleSet.ID != 0 { diff --git a/database/sql/sql.go b/database/sql/sql.go index a704d9c3..76495732 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -36,6 +36,12 @@ import ( "github.com/cloudbase/garm/util/appdefaults" ) +const ( + repositoryFieldName string = "Repository" + organizationFieldName string = "Organization" + enterpriseFieldName string = "Enterprise" +) + // newDBConn returns a new gorm db connection, given the config func newDBConn(dbCfg config.Database) (conn *gorm.DB, err error) { dbType, connURI, err := dbCfg.GormParams() diff --git a/database/sql/util.go b/database/sql/util.go index 112d0a76..12513ede 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -753,8 +753,7 @@ func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.GithubEn if maxEvents == 0 { return errors.Wrap(runnerErrors.ErrBadRequest, "max events cannot be 0") } - // TODO(gabriel-samfira): Should we send watcher notifications for events? - // Not sure it's of any value. + switch entity.EntityType { case params.GithubEntityTypeRepository: return s.addRepositoryEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) diff --git a/database/watcher/filters.go b/database/watcher/filters.go index 6a7e8abf..251a6bc6 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -2,7 +2,6 @@ package watcher import ( commonParams "github.com/cloudbase/garm-provider-common/params" - dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) diff --git a/locking/interface.go b/locking/interface.go index d6a0b62d..7750167b 100644 --- a/locking/interface.go +++ b/locking/interface.go @@ -2,7 +2,6 @@ package locking import "time" -// TODO(gabriel-samfira): needs owner attribute. type Locker interface { TryLock(key, identifier string) bool Lock(key, identifier string) diff --git a/locking/locking.go b/locking/locking.go index c7d99b1d..c7ad89a3 100644 --- a/locking/locking.go +++ b/locking/locking.go @@ -8,6 +8,7 @@ import ( ) var locker Locker + var lockerMux = sync.Mutex{} func TryLock(key, identifier string) (ok bool, err error) { diff --git a/params/github.go b/params/github.go index e0ad0452..7f99750f 100644 --- a/params/github.go +++ b/params/github.go @@ -488,12 +488,12 @@ type RunnerGroupList struct { type ScaleSetJobMessage struct { MessageType string `json:"messageType,omitempty"` - RunnerRequestId int64 `json:"runnerRequestId,omitempty"` + RunnerRequestID int64 `json:"runnerRequestId,omitempty"` RepositoryName string `json:"repositoryName,omitempty"` OwnerName string `json:"ownerName,omitempty"` JobWorkflowRef string `json:"jobWorkflowRef,omitempty"` JobDisplayName string `json:"jobDisplayName,omitempty"` - WorkflowRunId int64 `json:"workflowRunId,omitempty"` + WorkflowRunID int64 `json:"workflowRunId,omitempty"` EventName string `json:"eventName,omitempty"` RequestLabels []string `json:"requestLabels,omitempty"` QueueTime time.Time `json:"queueTime,omitempty"` @@ -501,7 +501,7 @@ type ScaleSetJobMessage struct { RunnerAssignTime time.Time `json:"runnerAssignTime,omitempty"` FinishTime time.Time `json:"finishTime,omitempty"` Result string `json:"result,omitempty"` - RunnerId int `json:"runnerId,omitempty"` + RunnerID int `json:"runnerId,omitempty"` RunnerName string `json:"runnerName,omitempty"` - AcquireJobUrl string `json:"acquireJobUrl,omitempty"` + AcquireJobURL string `json:"acquireJobUrl,omitempty"` } diff --git a/runner/metadata.go b/runner/metadata.go index 1d75fba4..3892d350 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -58,7 +58,8 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { } var entity params.GithubEntity - if instance.PoolID != "" { + switch { + case instance.PoolID != "": pool, err := r.store.GetPoolByID(r.ctx, instance.PoolID) if err != nil { slog.With(slog.Any("error", err)).ErrorContext( @@ -73,7 +74,7 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { "pool_id", instance.PoolID) return "", errors.Wrap(err, "fetching pool entity") } - } else if instance.ScaleSetID != 0 { + case instance.ScaleSetID != 0: scaleSet, err := r.store.GetScaleSetByID(r.ctx, instance.ScaleSetID) if err != nil { slog.With(slog.Any("error", err)).ErrorContext( @@ -88,7 +89,7 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { "scale_set_id", instance.ScaleSetID) return "", errors.Wrap(err, "fetching scale set entity") } - } else { + default: return "", errors.New("instance not associated with a pool or scale set") } diff --git a/runner/runner.go b/runner/runner.go index 4032a94c..42a955fc 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -857,11 +857,12 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel } if instance.AgentID != 0 { - if instance.ScaleSetID != 0 { + switch { + case instance.ScaleSetID != 0: err = ssCli.RemoveRunner(ctx, instance.AgentID) - } else if instance.PoolID != "" { + case instance.PoolID != "": err = ghCli.RemoveEntityRunner(ctx, instance.AgentID) - } else { + default: return errors.New("instance does not have a pool or scale set") } @@ -901,20 +902,23 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel } func (r *Runner) getGHCliFromInstance(ctx context.Context, instance params.Instance) (common.GithubClient, *scalesets.ScaleSetClient, error) { + // nolint:golangci-lint,godox // TODO(gabriel-samfira): We can probably cache the entity. var entityGetter params.EntityGetter var err error - if instance.PoolID != "" { + + switch { + case instance.PoolID != "": entityGetter, err = r.store.GetPoolByID(ctx, instance.PoolID) if err != nil { return nil, nil, errors.Wrap(err, "fetching pool") } - } else if instance.ScaleSetID != 0 { + case instance.ScaleSetID != 0: entityGetter, err = r.store.GetScaleSetByID(ctx, instance.ScaleSetID) if err != nil { return nil, nil, errors.Wrap(err, "fetching scale set") } - } else { + default: return nil, nil, errors.New("instance does not have a pool or scale set") } diff --git a/runner/scalesets.go b/runner/scalesets.go index 7b93a662..f55b5dca 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -20,13 +20,14 @@ import ( "fmt" "log/slog" + "github.com/pkg/errors" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/util/appdefaults" "github.com/cloudbase/garm/util/github" "github.com/cloudbase/garm/util/github/scalesets" - "github.com/pkg/errors" ) func (r *Runner) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) { @@ -152,7 +153,7 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param return params.ScaleSet{}, errors.Wrap(err, "creating github client") } - callback := func(old, new params.ScaleSet) error { + callback := func(old, newSet params.ScaleSet) error { scalesetCli, err := scalesets.NewClient(ghCli) if err != nil { return errors.Wrap(err, "getting scaleset client") @@ -160,13 +161,13 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param updateParams := params.RunnerScaleSet{} hasUpdates := false - if old.Name != new.Name { - updateParams.Name = new.Name + if old.Name != newSet.Name { + updateParams.Name = newSet.Name hasUpdates = true } - if old.GitHubRunnerGroup != new.GitHubRunnerGroup { - runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, new.GitHubRunnerGroup) + if old.GitHubRunnerGroup != newSet.GitHubRunnerGroup { + runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, newSet.GitHubRunnerGroup) if err != nil { return fmt.Errorf("error fetching runner group from github: %w", err) } @@ -174,13 +175,13 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param hasUpdates = true } - if old.DisableUpdate != new.DisableUpdate { - updateParams.RunnerSetting.DisableUpdate = new.DisableUpdate + if old.DisableUpdate != newSet.DisableUpdate { + updateParams.RunnerSetting.DisableUpdate = newSet.DisableUpdate hasUpdates = true } if hasUpdates { - result, err := scalesetCli.UpdateRunnerScaleSet(ctx, new.ScaleSetID, updateParams) + result, err := scalesetCli.UpdateRunnerScaleSet(ctx, newSet.ScaleSetID, updateParams) if err != nil { return fmt.Errorf("failed to update scaleset in github: %w", err) } @@ -224,7 +225,7 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.Git if err != nil { return params.ScaleSet{}, errors.Wrap(err, "getting scaleset client") } - var runnerGroupID int = 1 + runnerGroupID := 1 if param.GitHubRunnerGroup != "Default" { runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, param.GitHubRunnerGroup) if err != nil { diff --git a/util/github/scalesets/runners.go b/util/github/scalesets/runners.go index 4d6434eb..178361a1 100644 --- a/util/github/scalesets/runners.go +++ b/util/github/scalesets/runners.go @@ -41,12 +41,12 @@ func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName return params.RunnerScaleSetJitRunnerConfig{}, err } - serviceUrl, err := s.actionsServiceInfo.GetURL() + serviceURL, err := s.actionsServiceInfo.GetURL() if err != nil { return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to get pipeline URL: %w", err) } jitConfigPath := fmt.Sprintf("/%s/%d/generatejitconfig", scaleSetEndpoint, scaleSetID) - jitConfigURL := serviceUrl.JoinPath(jitConfigPath) + jitConfigURL := serviceURL.JoinPath(jitConfigPath) req, err := s.newActionsRequest(ctx, http.MethodPost, jitConfigURL.String(), bytes.NewBuffer(body)) if err != nil { diff --git a/workers/entity/controller.go b/workers/entity/controller.go index 424f9099..b0adcb36 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -140,7 +140,6 @@ func (c *Controller) Start() error { c.ctx, c.consumerID, composeControllerWatcherFilters(), ) - if err != nil { return fmt.Errorf("failed to create consumer for entity controller: %w", err) } diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 070a9711..95026c73 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -40,9 +40,6 @@ type Worker struct { Entity params.GithubEntity providers map[string]common.Provider scaleSetController *scaleset.Controller - // TODO(gabriel-samfira): replace current pool manager with something similar - // to the scale set controller. - // poolManager *pool.Controller mux sync.Mutex running bool diff --git a/workers/provider/errors.go b/workers/provider/errors.go index d46a721b..40cfc9a8 100644 --- a/workers/provider/errors.go +++ b/workers/provider/errors.go @@ -2,6 +2,4 @@ package provider import "fmt" -var ( - ErrInstanceDeleted = fmt.Errorf("instance deleted") -) +var ErrInstanceDeleted = fmt.Errorf("instance deleted") diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index c784b41f..506e6ef1 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -10,7 +10,6 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" @@ -18,7 +17,7 @@ import ( garmUtil "github.com/cloudbase/garm/util" ) -func NewInstanceManager(ctx context.Context, instance params.Instance, scaleSet params.ScaleSet, provider common.Provider, helper providerHelper) (*instanceManager, error) { +func newInstanceManager(ctx context.Context, instance params.Instance, scaleSet params.ScaleSet, provider common.Provider, helper providerHelper) (*instanceManager, error) { ctx = garmUtil.WithSlogContext(ctx, slog.Any("instance", instance.Name)) githubEntity, err := scaleSet.GetEntity() @@ -146,7 +145,6 @@ func (i *instanceManager) pseudoPoolID() string { } func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instance) error { - // TODO(gabriel-samfira): implement the creation of the instance in the provider. entity, err := i.getEntity() if err != nil { return fmt.Errorf("getting entity: %w", err) diff --git a/workers/provider/provider.go b/workers/provider/provider.go index 0c2cf4df..7d648bd7 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -7,7 +7,6 @@ import ( "sync" commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" @@ -15,9 +14,9 @@ import ( "github.com/cloudbase/garm/runner/common" ) -func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider, tokenGetter auth.InstanceTokenGetter) (*provider, error) { +func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider, tokenGetter auth.InstanceTokenGetter) (*Provider, error) { consumerID := "provider-worker" - return &provider{ + return &Provider{ ctx: context.Background(), store: store, consumerID: consumerID, @@ -28,11 +27,12 @@ func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]c }, nil } -type provider struct { +type Provider struct { ctx context.Context consumerID string consumer dbCommon.Consumer + // nolint:golangci-lint,godox // TODO: not all workers should have access to the store. // We need to implement way to RPC from workers to controllers // and abstract that into something we can use to eventually @@ -51,7 +51,7 @@ type provider struct { quit chan struct{} } -func (p *provider) loadAllScaleSets() error { +func (p *Provider) loadAllScaleSets() error { scaleSets, err := p.store.ListAllScaleSets(p.ctx) if err != nil { return fmt.Errorf("fetching scale sets: %w", err) @@ -67,7 +67,7 @@ func (p *provider) loadAllScaleSets() error { // loadAllRunners loads all runners from the database. At this stage we only // care about runners created by scale sets, but in the future, we will migrate // the pool manager to the same model. -func (p *provider) loadAllRunners() error { +func (p *Provider) loadAllRunners() error { runners, err := p.store.ListAllInstances(p.ctx) if err != nil { return fmt.Errorf("fetching runners: %w", err) @@ -102,7 +102,7 @@ func (p *provider) loadAllRunners() error { slog.ErrorContext(p.ctx, "provider not found", "provider_name", runner.ProviderName) continue } - instanceManager, err := NewInstanceManager( + instanceManager, err := newInstanceManager( p.ctx, runner, scaleSet, provider, p) if err != nil { return fmt.Errorf("creating instance manager: %w", err) @@ -117,7 +117,7 @@ func (p *provider) loadAllRunners() error { return nil } -func (p *provider) Start() error { +func (p *Provider) Start() error { p.mux.Lock() defer p.mux.Unlock() @@ -147,7 +147,7 @@ func (p *provider) Start() error { return nil } -func (p *provider) Stop() error { +func (p *Provider) Stop() error { p.mux.Lock() defer p.mux.Unlock() @@ -161,7 +161,7 @@ func (p *provider) Stop() error { return nil } -func (p *provider) loop() { +func (p *Provider) loop() { defer p.Stop() for { select { @@ -180,7 +180,7 @@ func (p *provider) loop() { } } -func (p *provider) handleWatcherEvent(payload dbCommon.ChangePayload) { +func (p *Provider) handleWatcherEvent(payload dbCommon.ChangePayload) { switch payload.EntityType { case dbCommon.ScaleSetEntityType: p.handleScaleSetEvent(payload) @@ -191,7 +191,7 @@ func (p *provider) handleWatcherEvent(payload dbCommon.ChangePayload) { } } -func (p *provider) handleScaleSetEvent(event dbCommon.ChangePayload) { +func (p *Provider) handleScaleSetEvent(event dbCommon.ChangePayload) { p.mux.Lock() defer p.mux.Unlock() @@ -214,12 +214,12 @@ func (p *provider) handleScaleSetEvent(event dbCommon.ChangePayload) { } } -func (p *provider) handleInstanceAdded(instance params.Instance) error { +func (p *Provider) handleInstanceAdded(instance params.Instance) error { scaleSet, ok := p.scaleSets[instance.ScaleSetID] if !ok { return fmt.Errorf("scale set not found for instance %s", instance.Name) } - instanceManager, err := NewInstanceManager( + instanceManager, err := newInstanceManager( p.ctx, instance, scaleSet, p.providers[instance.ProviderName], p) if err != nil { return fmt.Errorf("creating instance manager: %w", err) @@ -231,7 +231,7 @@ func (p *provider) handleInstanceAdded(instance params.Instance) error { return nil } -func (p *provider) handleInstanceEvent(event dbCommon.ChangePayload) { +func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { p.mux.Lock() defer p.mux.Unlock() diff --git a/workers/provider/provider_helper.go b/workers/provider/provider_helper.go index d420cdad..6a53bab3 100644 --- a/workers/provider/provider_helper.go +++ b/workers/provider/provider_helper.go @@ -17,7 +17,7 @@ type providerHelper interface { GetGithubEntity(entity params.GithubEntity) (params.GithubEntity, error) } -func (p *provider) updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) { +func (p *Provider) updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) { updateParams := params.UpdateInstanceParams{ ProviderID: providerInstance.ProviderID, OSName: providerInstance.OSName, @@ -34,7 +34,7 @@ func (p *provider) updateArgsFromProviderInstance(instanceName string, providerI return updated, nil } -func (p *provider) GetControllerInfo() (params.ControllerInfo, error) { +func (p *Provider) GetControllerInfo() (params.ControllerInfo, error) { p.mux.Lock() defer p.mux.Unlock() @@ -46,7 +46,7 @@ func (p *provider) GetControllerInfo() (params.ControllerInfo, error) { return info, nil } -func (p *provider) SetInstanceStatus(instanceName string, status commonParams.InstanceStatus, providerFault []byte) error { +func (p *Provider) SetInstanceStatus(instanceName string, status commonParams.InstanceStatus, providerFault []byte) error { p.mux.Lock() defer p.mux.Unlock() @@ -67,11 +67,11 @@ func (p *provider) SetInstanceStatus(instanceName string, status commonParams.In return nil } -func (p *provider) InstanceTokenGetter() auth.InstanceTokenGetter { +func (p *Provider) InstanceTokenGetter() auth.InstanceTokenGetter { return p.tokenGetter } -func (p *provider) GetGithubEntity(entity params.GithubEntity) (params.GithubEntity, error) { +func (p *Provider) GetGithubEntity(entity params.GithubEntity) (params.GithubEntity, error) { ghEntity, err := p.store.GetGithubEntity(p.ctx, entity.EntityType, entity.ID) if err != nil { return params.GithubEntity{}, fmt.Errorf("getting github entity: %w", err) diff --git a/workers/provider/util.go b/workers/provider/util.go index 2d84e25e..1868611e 100644 --- a/workers/provider/util.go +++ b/workers/provider/util.go @@ -2,7 +2,6 @@ package provider import ( commonParams "github.com/cloudbase/garm-provider-common/params" - dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" ) diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 24d1aad3..a5c198e7 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -9,7 +9,6 @@ import ( "time" runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" @@ -20,7 +19,6 @@ import ( ) func NewController(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Controller, error) { - consumerID := fmt.Sprintf("scaleset-worker-%s", entity.String()) ctx = garmUtil.WithSlogContext( @@ -28,31 +26,22 @@ func NewController(ctx context.Context, store dbCommon.Store, entity params.Gith slog.Any("worker", consumerID)) return &Controller{ - ctx: ctx, - consumerID: consumerID, - ScaleSets: make(map[uint]*scaleSet), - Entity: entity, - providers: providers, - store: store, - statusUpdates: make(chan scaleSetStatus, 10), + ctx: ctx, + consumerID: consumerID, + ScaleSets: make(map[uint]*scaleSet), + Entity: entity, + providers: providers, + store: store, }, nil } type scaleSet struct { scaleSet params.ScaleSet - status scaleSetStatus worker *Worker mux sync.Mutex } -func (s *scaleSet) updateStatus(status scaleSetStatus) { - s.mux.Lock() - defer s.mux.Unlock() - - s.status = status -} - func (s *scaleSet) Stop() error { s.mux.Lock() defer s.mux.Unlock() @@ -80,8 +69,6 @@ type Controller struct { ghCli common.GithubClient forgeCredsAreValid bool - statusUpdates chan scaleSetStatus - mux sync.Mutex running bool quit chan struct{} @@ -162,8 +149,6 @@ func (c *Controller) Stop() error { c.running = false close(c.quit) c.quit = nil - close(c.statusUpdates) - c.statusUpdates = nil c.consumer.Close() return nil @@ -180,6 +165,7 @@ func (c *Controller) updateTools() error { slog.With(slog.Any("error", err)).ErrorContext( c.ctx, "failed to update tools for entity", "entity", c.Entity.String()) if errors.Is(err, runnerErrors.ErrUnauthorized) { + // nolint:golangci-lint,godox // TODO: block all scale sets c.forgeCredsAreValid = false } @@ -191,21 +177,6 @@ func (c *Controller) updateTools() error { return nil } -func (c *Controller) handleScaleSetStatusUpdates(status scaleSetStatus) { - if status.scaleSet.ID == 0 { - slog.DebugContext(c.ctx, "invalid scale set ID; ignoring") - return - } - - scaleSet, ok := c.ScaleSets[status.scaleSet.ID] - if !ok { - slog.DebugContext(c.ctx, "scale set not found; ignoring") - return - } - - scaleSet.updateStatus(status) -} - func (c *Controller) loop() { defer c.Stop() updateToolsTicker := time.NewTicker(common.PoolToolUpdateInterval) @@ -231,11 +202,6 @@ func (c *Controller) loop() { case <-c.ctx.Done(): return case <-initialToolUpdate: - case update, ok := <-c.statusUpdates: - if !ok { - return - } - go c.handleScaleSetStatusUpdates(update) case _, ok := <-updateToolsTicker.C: if !ok { slog.InfoContext(c.ctx, "update tools ticker closed") diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 591e768e..04cfe1cd 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -88,8 +88,8 @@ func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli c } c.ScaleSets[sSet.ID] = &scaleSet{ scaleSet: sSet, - status: scaleSetStatus{}, - worker: worker, + // status: scaleSetStatus{}, + worker: worker, } return nil } @@ -117,7 +117,7 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { defer c.mux.Unlock() if _, ok := c.ScaleSets[sSet.ID]; !ok { - // Some error may have occured when the scale set was first created, so we + // Some error may have occurred when the scale set was first created, so we // attempt to create it after the user updated the scale set, hopefully // fixing the reason for the failure. return c.handleScaleSetCreateOperation(sSet, c.ghCli) diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index a0a5d657..a2ca2515 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -10,7 +10,6 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm-provider-common/util" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" @@ -127,8 +126,9 @@ func (w *Worker) Start() (err error) { if !errors.Is(err, runnerErrors.ErrNotFound) { if errors.Is(err, runnerErrors.ErrUnauthorized) { // we don't have access to remove the runner. This implies that our - // credentials may have expired or ar incorect. + // credentials may have expired or ar incorrect. // + // nolint:golangci-lint,godox // TODO(gabriel-samfira): we need to set the scale set as inactive and stop the listener (if any). slog.ErrorContext(w.ctx, "error removing runner", "runner_name", instance.Name, "error", err) w.runners[instance.ID] = instance @@ -282,6 +282,7 @@ func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { slog.ErrorContext(w.ctx, "error stopping listener", "error", err) } } + // nolint:golangci-lint,godox // TODO: should we kick off auto-scaling if desired runner count changes? w.scaleSet = scaleSet w.mux.Unlock() @@ -345,6 +346,7 @@ func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { return } if status != string(params.RunnerIdle) && status != string(params.RunnerActive) { + // nolint:golangci-lint,godox // TODO: Wait for the status to change for a while (30 seconds?). Mark the instance as // pending_delete if the runner never comes online. w.mux.Unlock() @@ -440,7 +442,7 @@ func (w *Worker) keepListenerAlive() { w.mux.Unlock() for { w.mux.Lock() - w.listener.Stop() //cleanup + w.listener.Stop() // cleanup if !w.scaleSet.Enabled { w.mux.Unlock() break @@ -449,12 +451,13 @@ func (w *Worker) keepListenerAlive() { if err := w.listener.Start(); err != nil { w.mux.Unlock() slog.ErrorContext(w.ctx, "error restarting listener", "error", err) - if backoff > 60*time.Second { + switch { + case backoff > 60*time.Second: backoff = 60 * time.Second - } else if backoff == 0 { + case backoff == 0: backoff = 5 * time.Second slog.InfoContext(w.ctx, "backing off restart attempt", "backoff", backoff) - } else { + default: backoff *= 2 } slog.ErrorContext(w.ctx, "error restarting listener", "error", err, "backoff", backoff) @@ -512,7 +515,7 @@ func (w *Worker) handleScaleUp(target, current uint) { CreateAttempt: 1, GitHubRunnerGroup: w.scaleSet.GitHubRunnerGroup, JitConfiguration: decodedJit, - AgentID: int64(jitConfig.Runner.ID), + AgentID: jitConfig.Runner.ID, } if _, err := w.store.CreateScaleSetInstance(w.ctx, w.scaleSet.ID, runnerParams); err != nil { @@ -618,6 +621,7 @@ func (w *Worker) handleScaleDown(target, current uint) { locking.Unlock(runner.Name, true) continue } + // nolint:golangci-lint,godox // TODO: This should not happen, unless there is some issue with the database. // The UpdateInstance() function should add tenacity, but even in that case, if it // still errors out, we need to handle it somehow. diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 0cf01025..b83351f2 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -7,7 +7,6 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" - "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/util/github/scalesets" diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 9fbf9a7e..77f4077b 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -123,15 +123,15 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage for _, job := range body { switch job.MessageType { case params.MessageTypeJobAssigned: - slog.InfoContext(l.ctx, "new job assigned", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName) + slog.InfoContext(l.ctx, "new job assigned", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName) case params.MessageTypeJobStarted: - slog.InfoContext(l.ctx, "job started", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) + slog.InfoContext(l.ctx, "job started", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) startedJobs = append(startedJobs, job) case params.MessageTypeJobCompleted: - slog.InfoContext(l.ctx, "job completed", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) + slog.InfoContext(l.ctx, "job completed", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) completedJobs = append(completedJobs, job) case params.MessageTypeJobAvailable: - slog.InfoContext(l.ctx, "job available", "job_id", job.RunnerRequestId, "job_name", job.JobDisplayName) + slog.InfoContext(l.ctx, "job available", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName) availableJobs = append(availableJobs, job) default: slog.DebugContext(l.ctx, "unknown message type", "message_type", job.MessageType) @@ -139,13 +139,13 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage } if len(availableJobs) > 0 { - jobIds := make([]int64, len(availableJobs)) + jobIDs := make([]int64, len(availableJobs)) for idx, job := range availableJobs { - jobIds[idx] = job.RunnerRequestId + jobIDs[idx] = job.RunnerRequestID } idsAcquired, err := l.scaleSetHelper.ScaleSetCLI().AcquireJobs( l.listenerCtx, l.scaleSetHelper.GetScaleSet().ScaleSetID, - l.messageSession.MessageQueueAccessToken(), jobIds) + l.messageSession.MessageQueueAccessToken(), jobIDs) if err != nil { // don't mark message as processed. It will be requeued. slog.ErrorContext(l.ctx, "acquiring jobs", "error", err) @@ -201,7 +201,8 @@ func (l *scaleSetListener) loop() { return default: slog.DebugContext(l.ctx, "getting message", "last_message_id", l.lastMessageID, "max_runners", l.scaleSetHelper.GetScaleSet().MaxRunners) - // TODO: consume initial message on startup and consolidate. + // nolint:golangci-lint,godox + // TODO(gabriel-samfira): consume initial message on startup and consolidate. // The scale set may have undergone several messages while GARM was // down. msg, err := l.messageSession.GetMessage( diff --git a/workers/scaleset/status.go b/workers/scaleset/status.go deleted file mode 100644 index 29d9ae4f..00000000 --- a/workers/scaleset/status.go +++ /dev/null @@ -1,13 +0,0 @@ -package scaleset - -import ( - "time" - - "github.com/cloudbase/garm/params" -) - -type scaleSetStatus struct { - err error - heartbeat time.Time - scaleSet params.ScaleSet -} From 55b4e74066b8f74acde95a279b047e62a5f2adc7 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 27 Apr 2025 20:34:04 +0000 Subject: [PATCH 028/179] Update mocks Signed-off-by: Gabriel Adrian Samfira --- database/common/mocks/Store.go | 326 +++++++++++++++++- runner/common/mocks/GithubClient.go | 28 +- runner/common/mocks/GithubEntityOperations.go | 28 +- runner/common/mocks/PoolManager.go | 41 +-- runner/common/mocks/Provider.go | 92 ++--- runner/mocks/PoolManagerController.go | 8 +- 6 files changed, 404 insertions(+), 119 deletions(-) diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index 0791ff36..f7f508b5 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -14,6 +14,24 @@ type Store struct { mock.Mock } +// AddEntityEvent provides a mock function with given fields: ctx, entity, event, eventLevel, statusMessage, maxEvents +func (_m *Store) AddEntityEvent(ctx context.Context, entity params.GithubEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { + ret := _m.Called(ctx, entity, event, eventLevel, statusMessage, maxEvents) + + if len(ret) == 0 { + panic("no return value specified for AddEntityEvent") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.EventType, params.EventLevel, string, int) error); ok { + r0 = rf(ctx, entity, event, eventLevel, statusMessage, maxEvents) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // AddInstanceEvent provides a mock function with given fields: ctx, instanceName, event, eventLevel, eventMessage func (_m *Store) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error { ret := _m.Called(ctx, instanceName, event, eventLevel, eventMessage) @@ -50,7 +68,7 @@ func (_m *Store) BreakLockJobIsQueued(ctx context.Context, jobID int64) error { return r0 } -// ControllerInfo provides a mock function with given fields: +// ControllerInfo provides a mock function with no fields func (_m *Store) ControllerInfo() (params.ControllerInfo, error) { ret := _m.Called() @@ -134,6 +152,34 @@ func (_m *Store) CreateEntityPool(ctx context.Context, entity params.GithubEntit return r0, r1 } +// CreateEntityScaleSet provides a mock function with given fields: _a0, entity, param +func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.GithubEntity, param params.CreateScaleSetParams) (params.ScaleSet, error) { + ret := _m.Called(_a0, entity, param) + + if len(ret) == 0 { + panic("no return value specified for CreateEntityScaleSet") + } + + var r0 params.ScaleSet + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreateScaleSetParams) (params.ScaleSet, error)); ok { + return rf(_a0, entity, param) + } + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreateScaleSetParams) params.ScaleSet); ok { + r0 = rf(_a0, entity, param) + } else { + r0 = ret.Get(0).(params.ScaleSet) + } + + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, params.CreateScaleSetParams) error); ok { + r1 = rf(_a0, entity, param) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CreateGithubCredentials provides a mock function with given fields: ctx, param func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.GithubCredentials, error) { ret := _m.Called(ctx, param) @@ -302,6 +348,34 @@ func (_m *Store) CreateRepository(ctx context.Context, owner string, name string return r0, r1 } +// CreateScaleSetInstance provides a mock function with given fields: _a0, scaleSetID, param +func (_m *Store) CreateScaleSetInstance(_a0 context.Context, scaleSetID uint, param params.CreateInstanceParams) (params.Instance, error) { + ret := _m.Called(_a0, scaleSetID, param) + + if len(ret) == 0 { + panic("no return value specified for CreateScaleSetInstance") + } + + var r0 params.Instance + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint, params.CreateInstanceParams) (params.Instance, error)); ok { + return rf(_a0, scaleSetID, param) + } + if rf, ok := ret.Get(0).(func(context.Context, uint, params.CreateInstanceParams) params.Instance); ok { + r0 = rf(_a0, scaleSetID, param) + } else { + r0 = ret.Get(0).(params.Instance) + } + + if rf, ok := ret.Get(1).(func(context.Context, uint, params.CreateInstanceParams) error); ok { + r1 = rf(_a0, scaleSetID, param) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CreateUser provides a mock function with given fields: ctx, user func (_m *Store) CreateUser(ctx context.Context, user params.NewUserParams) (params.User, error) { ret := _m.Called(ctx, user) @@ -438,6 +512,24 @@ func (_m *Store) DeleteInstance(ctx context.Context, poolID string, instanceName return r0 } +// DeleteInstanceByName provides a mock function with given fields: ctx, instanceName +func (_m *Store) DeleteInstanceByName(ctx context.Context, instanceName string) error { + ret := _m.Called(ctx, instanceName) + + if len(ret) == 0 { + panic("no return value specified for DeleteInstanceByName") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, instanceName) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // DeleteJob provides a mock function with given fields: ctx, jobID func (_m *Store) DeleteJob(ctx context.Context, jobID int64) error { ret := _m.Called(ctx, jobID) @@ -510,6 +602,24 @@ func (_m *Store) DeleteRepository(ctx context.Context, repoID string) error { return r0 } +// DeleteScaleSetByID provides a mock function with given fields: ctx, scaleSetID +func (_m *Store) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error { + ret := _m.Called(ctx, scaleSetID) + + if len(ret) == 0 { + panic("no return value specified for DeleteScaleSetByID") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint) error); ok { + r0 = rf(ctx, scaleSetID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // FindPoolsMatchingAllTags provides a mock function with given fields: ctx, entityType, entityID, tags func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params.GithubEntityType, entityID string, tags []string) ([]params.Pool, error) { ret := _m.Called(ctx, entityType, entityID, tags) @@ -736,6 +846,34 @@ func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.Git return r0, r1 } +// GetGithubEntity provides a mock function with given fields: _a0, entityType, entityID +func (_m *Store) GetGithubEntity(_a0 context.Context, entityType params.GithubEntityType, entityID string) (params.GithubEntity, error) { + ret := _m.Called(_a0, entityType, entityID) + + if len(ret) == 0 { + panic("no return value specified for GetGithubEntity") + } + + var r0 params.GithubEntity + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string) (params.GithubEntity, error)); ok { + return rf(_a0, entityType, entityID) + } + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string) params.GithubEntity); ok { + r0 = rf(_a0, entityType, entityID) + } else { + r0 = ret.Get(0).(params.GithubEntity) + } + + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntityType, string) error); ok { + r1 = rf(_a0, entityType, entityID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetInstanceByName provides a mock function with given fields: ctx, instanceName func (_m *Store) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { ret := _m.Called(ctx, instanceName) @@ -960,6 +1098,34 @@ func (_m *Store) GetRepositoryByID(ctx context.Context, repoID string) (params.R return r0, r1 } +// GetScaleSetByID provides a mock function with given fields: ctx, scaleSet +func (_m *Store) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) { + ret := _m.Called(ctx, scaleSet) + + if len(ret) == 0 { + panic("no return value specified for GetScaleSetByID") + } + + var r0 params.ScaleSet + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint) (params.ScaleSet, error)); ok { + return rf(ctx, scaleSet) + } + if rf, ok := ret.Get(0).(func(context.Context, uint) params.ScaleSet); ok { + r0 = rf(ctx, scaleSet) + } else { + r0 = ret.Get(0).(params.ScaleSet) + } + + if rf, ok := ret.Get(1).(func(context.Context, uint) error); ok { + r1 = rf(ctx, scaleSet) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetUser provides a mock function with given fields: ctx, user func (_m *Store) GetUser(ctx context.Context, user string) (params.User, error) { ret := _m.Called(ctx, user) @@ -1034,7 +1200,7 @@ func (_m *Store) HasAdminUser(ctx context.Context) bool { return r0 } -// InitController provides a mock function with given fields: +// InitController provides a mock function with no fields func (_m *Store) InitController() (params.ControllerInfo, error) { ret := _m.Called() @@ -1152,6 +1318,36 @@ func (_m *Store) ListAllPools(ctx context.Context) ([]params.Pool, error) { return r0, r1 } +// ListAllScaleSets provides a mock function with given fields: ctx +func (_m *Store) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for ListAllScaleSets") + } + + var r0 []params.ScaleSet + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) ([]params.ScaleSet, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) []params.ScaleSet); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]params.ScaleSet) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // ListEnterprises provides a mock function with given fields: ctx func (_m *Store) ListEnterprises(ctx context.Context) ([]params.Enterprise, error) { ret := _m.Called(ctx) @@ -1272,6 +1468,36 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.GithubEntity return r0, r1 } +// ListEntityScaleSets provides a mock function with given fields: _a0, entity +func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.GithubEntity) ([]params.ScaleSet, error) { + ret := _m.Called(_a0, entity) + + if len(ret) == 0 { + panic("no return value specified for ListEntityScaleSets") + } + + var r0 []params.ScaleSet + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) ([]params.ScaleSet, error)); ok { + return rf(_a0, entity) + } + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) []params.ScaleSet); ok { + r0 = rf(_a0, entity) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]params.ScaleSet) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity) error); ok { + r1 = rf(_a0, entity) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // ListGithubCredentials provides a mock function with given fields: ctx func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.GithubCredentials, error) { ret := _m.Called(ctx) @@ -1452,6 +1678,36 @@ func (_m *Store) ListRepositories(ctx context.Context) ([]params.Repository, err return r0, r1 } +// ListScaleSetInstances provides a mock function with given fields: _a0, scalesetID +func (_m *Store) ListScaleSetInstances(_a0 context.Context, scalesetID uint) ([]params.Instance, error) { + ret := _m.Called(_a0, scalesetID) + + if len(ret) == 0 { + panic("no return value specified for ListScaleSetInstances") + } + + var r0 []params.Instance + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint) ([]params.Instance, error)); ok { + return rf(_a0, scalesetID) + } + if rf, ok := ret.Get(0).(func(context.Context, uint) []params.Instance); ok { + r0 = rf(_a0, scalesetID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]params.Instance) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, uint) error); ok { + r1 = rf(_a0, scalesetID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // LockJob provides a mock function with given fields: ctx, jobID, entityID func (_m *Store) LockJob(ctx context.Context, jobID int64, entityID string) error { ret := _m.Called(ctx, jobID, entityID) @@ -1498,6 +1754,42 @@ func (_m *Store) PoolInstanceCount(ctx context.Context, poolID string) (int64, e return r0, r1 } +// SetScaleSetDesiredRunnerCount provides a mock function with given fields: ctx, scaleSetID, desiredRunnerCount +func (_m *Store) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) error { + ret := _m.Called(ctx, scaleSetID, desiredRunnerCount) + + if len(ret) == 0 { + panic("no return value specified for SetScaleSetDesiredRunnerCount") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint, int) error); ok { + r0 = rf(ctx, scaleSetID, desiredRunnerCount) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SetScaleSetLastMessageID provides a mock function with given fields: ctx, scaleSetID, lastMessageID +func (_m *Store) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error { + ret := _m.Called(ctx, scaleSetID, lastMessageID) + + if len(ret) == 0 { + panic("no return value specified for SetScaleSetLastMessageID") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint, int64) error); ok { + r0 = rf(ctx, scaleSetID, lastMessageID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // UnlockJob provides a mock function with given fields: ctx, jobID, entityID func (_m *Store) UnlockJob(ctx context.Context, jobID int64, entityID string) error { ret := _m.Called(ctx, jobID, entityID) @@ -1600,6 +1892,34 @@ func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.GithubEntit return r0, r1 } +// UpdateEntityScaleSet provides a mock function with given fields: _a0, entity, scaleSetID, param, callback +func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error) { + ret := _m.Called(_a0, entity, scaleSetID, param, callback) + + if len(ret) == 0 { + panic("no return value specified for UpdateEntityScaleSet") + } + + var r0 params.ScaleSet + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error)); ok { + return rf(_a0, entity, scaleSetID, param, callback) + } + if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) params.ScaleSet); ok { + r0 = rf(_a0, entity, scaleSetID, param, callback) + } else { + r0 = ret.Get(0).(params.ScaleSet) + } + + if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) error); ok { + r1 = rf(_a0, entity, scaleSetID, param, callback) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // UpdateGithubCredentials provides a mock function with given fields: ctx, id, param func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.GithubCredentials, error) { ret := _m.Called(ctx, id, param) diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index 4ca73de3..fb6729fc 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -117,7 +117,7 @@ func (_m *GithubClient) DeleteEntityHook(ctx context.Context, id int64) (*github return r0, r1 } -// GetEntity provides a mock function with given fields: +// GetEntity provides a mock function with no fields func (_m *GithubClient) GetEntity() params.GithubEntity { ret := _m.Called() @@ -243,7 +243,7 @@ func (_m *GithubClient) GetWorkflowJobByID(ctx context.Context, owner string, re return r0, r1, r2 } -// GithubBaseURL provides a mock function with given fields: +// GithubBaseURL provides a mock function with no fields func (_m *GithubClient) GithubBaseURL() *url.URL { ret := _m.Called() @@ -411,33 +411,21 @@ func (_m *GithubClient) PingEntityHook(ctx context.Context, id int64) (*github.R } // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID -func (_m *GithubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { +func (_m *GithubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) error { ret := _m.Called(ctx, runnerID) if len(ret) == 0 { panic("no return value specified for RemoveEntityRunner") } - var r0 *github.Response - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, int64) (*github.Response, error)); ok { - return rf(ctx, runnerID) - } - if rf, ok := ret.Get(0).(func(context.Context, int64) *github.Response); ok { + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok { r0 = rf(ctx, runnerID) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*github.Response) - } + r0 = ret.Error(0) } - if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { - r1 = rf(ctx, runnerID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 + return r0 } // NewGithubClient creates a new instance of GithubClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index 599a04a0..cb2a2182 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -117,7 +117,7 @@ func (_m *GithubEntityOperations) DeleteEntityHook(ctx context.Context, id int64 return r0, r1 } -// GetEntity provides a mock function with given fields: +// GetEntity provides a mock function with no fields func (_m *GithubEntityOperations) GetEntity() params.GithubEntity { ret := _m.Called() @@ -204,7 +204,7 @@ func (_m *GithubEntityOperations) GetEntityJITConfig(ctx context.Context, instan return r0, r1, r2 } -// GithubBaseURL provides a mock function with given fields: +// GithubBaseURL provides a mock function with no fields func (_m *GithubEntityOperations) GithubBaseURL() *url.URL { ret := _m.Called() @@ -372,33 +372,21 @@ func (_m *GithubEntityOperations) PingEntityHook(ctx context.Context, id int64) } // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID -func (_m *GithubEntityOperations) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) { +func (_m *GithubEntityOperations) RemoveEntityRunner(ctx context.Context, runnerID int64) error { ret := _m.Called(ctx, runnerID) if len(ret) == 0 { panic("no return value specified for RemoveEntityRunner") } - var r0 *github.Response - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, int64) (*github.Response, error)); ok { - return rf(ctx, runnerID) - } - if rf, ok := ret.Get(0).(func(context.Context, int64) *github.Response); ok { + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok { r0 = rf(ctx, runnerID) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*github.Response) - } + r0 = ret.Error(0) } - if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { - r1 = rf(ctx, runnerID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 + return r0 } // NewGithubEntityOperations creates a new instance of GithubEntityOperations. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. diff --git a/runner/common/mocks/PoolManager.go b/runner/common/mocks/PoolManager.go index bf1af0c0..08cfb975 100644 --- a/runner/common/mocks/PoolManager.go +++ b/runner/common/mocks/PoolManager.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -14,24 +14,6 @@ type PoolManager struct { mock.Mock } -// DeleteRunner provides a mock function with given fields: runner, forceRemove, bypassGHUnauthorizedError -func (_m *PoolManager) DeleteRunner(runner params.Instance, forceRemove bool, bypassGHUnauthorizedError bool) error { - ret := _m.Called(runner, forceRemove, bypassGHUnauthorizedError) - - if len(ret) == 0 { - panic("no return value specified for DeleteRunner") - } - - var r0 error - if rf, ok := ret.Get(0).(func(params.Instance, bool, bool) error); ok { - r0 = rf(runner, forceRemove, bypassGHUnauthorizedError) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // GetWebhookInfo provides a mock function with given fields: ctx func (_m *PoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, error) { ret := _m.Called(ctx) @@ -60,7 +42,7 @@ func (_m *PoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, err return r0, r1 } -// GithubRunnerRegistrationToken provides a mock function with given fields: +// GithubRunnerRegistrationToken provides a mock function with no fields func (_m *PoolManager) GithubRunnerRegistrationToken() (string, error) { ret := _m.Called() @@ -106,7 +88,7 @@ func (_m *PoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return r0 } -// ID provides a mock function with given fields: +// ID provides a mock function with no fields func (_m *PoolManager) ID() string { ret := _m.Called() @@ -152,7 +134,7 @@ func (_m *PoolManager) InstallWebhook(ctx context.Context, param params.InstallW return r0, r1 } -// RootCABundle provides a mock function with given fields: +// RootCABundle provides a mock function with no fields func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { ret := _m.Called() @@ -180,7 +162,12 @@ func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { return r0, r1 } -// Start provides a mock function with given fields: +// SetPoolRunningState provides a mock function with given fields: isRunning, failureReason +func (_m *PoolManager) SetPoolRunningState(isRunning bool, failureReason string) { + _m.Called(isRunning, failureReason) +} + +// Start provides a mock function with no fields func (_m *PoolManager) Start() error { ret := _m.Called() @@ -198,7 +185,7 @@ func (_m *PoolManager) Start() error { return r0 } -// Status provides a mock function with given fields: +// Status provides a mock function with no fields func (_m *PoolManager) Status() params.PoolManagerStatus { ret := _m.Called() @@ -216,7 +203,7 @@ func (_m *PoolManager) Status() params.PoolManagerStatus { return r0 } -// Stop provides a mock function with given fields: +// Stop provides a mock function with no fields func (_m *PoolManager) Stop() error { ret := _m.Called() @@ -252,7 +239,7 @@ func (_m *PoolManager) UninstallWebhook(ctx context.Context) error { return r0 } -// Wait provides a mock function with given fields: +// Wait provides a mock function with no fields func (_m *PoolManager) Wait() error { ret := _m.Called() @@ -270,7 +257,7 @@ func (_m *PoolManager) Wait() error { return r0 } -// WebhookSecret provides a mock function with given fields: +// WebhookSecret provides a mock function with no fields func (_m *PoolManager) WebhookSecret() string { ret := _m.Called() diff --git a/runner/common/mocks/Provider.go b/runner/common/mocks/Provider.go index 92dece39..e7491ac5 100644 --- a/runner/common/mocks/Provider.go +++ b/runner/common/mocks/Provider.go @@ -1,15 +1,17 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks import ( context "context" + common "github.com/cloudbase/garm/runner/common" + garm_provider_commonparams "github.com/cloudbase/garm-provider-common/params" + mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" ) // Provider is an autogenerated mock type for the Provider type @@ -17,7 +19,7 @@ type Provider struct { mock.Mock } -// AsParams provides a mock function with given fields: +// AsParams provides a mock function with no fields func (_m *Provider) AsParams() params.Provider { ret := _m.Called() @@ -35,9 +37,9 @@ func (_m *Provider) AsParams() params.Provider { return r0 } -// CreateInstance provides a mock function with given fields: ctx, bootstrapParams +// CreateInstance provides a mock function with given fields: ctx, bootstrapParams, createInstanceParams func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_provider_commonparams.BootstrapInstance, createInstanceParams common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { - ret := _m.Called(ctx, bootstrapParams) + ret := _m.Called(ctx, bootstrapParams, createInstanceParams) if len(ret) == 0 { panic("no return value specified for CreateInstance") @@ -45,17 +47,17 @@ func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_pro var r0 garm_provider_commonparams.ProviderInstance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance) (garm_provider_commonparams.ProviderInstance, error)); ok { - return rf(ctx, bootstrapParams) + if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error)); ok { + return rf(ctx, bootstrapParams, createInstanceParams) } - if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance) garm_provider_commonparams.ProviderInstance); ok { - r0 = rf(ctx, bootstrapParams) + if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) garm_provider_commonparams.ProviderInstance); ok { + r0 = rf(ctx, bootstrapParams, createInstanceParams) } else { r0 = ret.Get(0).(garm_provider_commonparams.ProviderInstance) } - if rf, ok := ret.Get(1).(func(context.Context, garm_provider_commonparams.BootstrapInstance) error); ok { - r1 = rf(ctx, bootstrapParams) + if rf, ok := ret.Get(1).(func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) error); ok { + r1 = rf(ctx, bootstrapParams, createInstanceParams) } else { r1 = ret.Error(1) } @@ -63,17 +65,17 @@ func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_pro return r0, r1 } -// DeleteInstance provides a mock function with given fields: ctx, instance +// DeleteInstance provides a mock function with given fields: ctx, instance, deleteInstanceParams func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteInstanceParams common.DeleteInstanceParams) error { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, deleteInstanceParams) if len(ret) == 0 { panic("no return value specified for DeleteInstance") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.DeleteInstanceParams) error); ok { + r0 = rf(ctx, instance, deleteInstanceParams) } else { r0 = ret.Error(0) } @@ -81,7 +83,7 @@ func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteI return r0 } -// DisableJITConfig provides a mock function with given fields: +// DisableJITConfig provides a mock function with no fields func (_m *Provider) DisableJITConfig() bool { ret := _m.Called() @@ -99,9 +101,9 @@ func (_m *Provider) DisableJITConfig() bool { return r0 } -// GetInstance provides a mock function with given fields: ctx, instance +// GetInstance provides a mock function with given fields: ctx, instance, getInstanceParams func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanceParams common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, getInstanceParams) if len(ret) == 0 { panic("no return value specified for GetInstance") @@ -109,17 +111,17 @@ func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanc var r0 garm_provider_commonparams.ProviderInstance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (garm_provider_commonparams.ProviderInstance, error)); ok { - return rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error)); ok { + return rf(ctx, instance, getInstanceParams) } - if rf, ok := ret.Get(0).(func(context.Context, string) garm_provider_commonparams.ProviderInstance); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.GetInstanceParams) garm_provider_commonparams.ProviderInstance); ok { + r0 = rf(ctx, instance, getInstanceParams) } else { r0 = ret.Get(0).(garm_provider_commonparams.ProviderInstance) } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, instance) + if rf, ok := ret.Get(1).(func(context.Context, string, common.GetInstanceParams) error); ok { + r1 = rf(ctx, instance, getInstanceParams) } else { r1 = ret.Error(1) } @@ -127,9 +129,9 @@ func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanc return r0, r1 } -// ListInstances provides a mock function with given fields: ctx, poolID +// ListInstances provides a mock function with given fields: ctx, poolID, listInstancesParams func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstancesParams common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error) { - ret := _m.Called(ctx, poolID) + ret := _m.Called(ctx, poolID, listInstancesParams) if len(ret) == 0 { panic("no return value specified for ListInstances") @@ -137,19 +139,19 @@ func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstan var r0 []garm_provider_commonparams.ProviderInstance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]garm_provider_commonparams.ProviderInstance, error)); ok { - return rf(ctx, poolID) + if rf, ok := ret.Get(0).(func(context.Context, string, common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error)); ok { + return rf(ctx, poolID, listInstancesParams) } - if rf, ok := ret.Get(0).(func(context.Context, string) []garm_provider_commonparams.ProviderInstance); ok { - r0 = rf(ctx, poolID) + if rf, ok := ret.Get(0).(func(context.Context, string, common.ListInstancesParams) []garm_provider_commonparams.ProviderInstance); ok { + r0 = rf(ctx, poolID, listInstancesParams) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]garm_provider_commonparams.ProviderInstance) } } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, poolID) + if rf, ok := ret.Get(1).(func(context.Context, string, common.ListInstancesParams) error); ok { + r1 = rf(ctx, poolID, listInstancesParams) } else { r1 = ret.Error(1) } @@ -157,17 +159,17 @@ func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstan return r0, r1 } -// RemoveAllInstances provides a mock function with given fields: ctx -func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstances common.RemoveAllInstancesParams) error { - ret := _m.Called(ctx) +// RemoveAllInstances provides a mock function with given fields: ctx, removeAllInstancesParams +func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstancesParams common.RemoveAllInstancesParams) error { + ret := _m.Called(ctx, removeAllInstancesParams) if len(ret) == 0 { panic("no return value specified for RemoveAllInstances") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context) error); ok { - r0 = rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, common.RemoveAllInstancesParams) error); ok { + r0 = rf(ctx, removeAllInstancesParams) } else { r0 = ret.Error(0) } @@ -175,17 +177,17 @@ func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstances c return r0 } -// Start provides a mock function with given fields: ctx, instance +// Start provides a mock function with given fields: ctx, instance, startParams func (_m *Provider) Start(ctx context.Context, instance string, startParams common.StartParams) error { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, startParams) if len(ret) == 0 { panic("no return value specified for Start") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.StartParams) error); ok { + r0 = rf(ctx, instance, startParams) } else { r0 = ret.Error(0) } @@ -193,17 +195,17 @@ func (_m *Provider) Start(ctx context.Context, instance string, startParams comm return r0 } -// Stop provides a mock function with given fields: ctx, instance +// Stop provides a mock function with given fields: ctx, instance, stopParams func (_m *Provider) Stop(ctx context.Context, instance string, stopParams common.StopParams) error { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, stopParams) if len(ret) == 0 { panic("no return value specified for Stop") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.StopParams) error); ok { + r0 = rf(ctx, instance, stopParams) } else { r0 = ret.Error(0) } diff --git a/runner/mocks/PoolManagerController.go b/runner/mocks/PoolManagerController.go index 2e680daa..05720ebe 100644 --- a/runner/mocks/PoolManagerController.go +++ b/runner/mocks/PoolManagerController.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -193,7 +193,7 @@ func (_m *PoolManagerController) GetEnterprisePoolManager(enterprise params.Ente return r0, r1 } -// GetEnterprisePoolManagers provides a mock function with given fields: +// GetEnterprisePoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetEnterprisePoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -253,7 +253,7 @@ func (_m *PoolManagerController) GetOrgPoolManager(org params.Organization) (com return r0, r1 } -// GetOrgPoolManagers provides a mock function with given fields: +// GetOrgPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetOrgPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -313,7 +313,7 @@ func (_m *PoolManagerController) GetRepoPoolManager(repo params.Repository) (com return r0, r1 } -// GetRepoPoolManagers provides a mock function with given fields: +// GetRepoPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetRepoPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() From 4b1d51f1d0ec57e8bcf150c292eedeb85ebe415a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 27 Apr 2025 20:42:42 +0000 Subject: [PATCH 029/179] Fix nil pointer deref Signed-off-by: Gabriel Adrian Samfira --- database/sql/instances.go | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/database/sql/instances.go b/database/sql/instances.go index 604682e9..987eb264 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -156,13 +156,20 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN if instance.ProviderID != nil { providerID = *instance.ProviderID } - if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, params.Instance{ + instanceNotif := params.Instance{ ID: instance.ID.String(), Name: instance.Name, ProviderID: providerID, AgentID: instance.AgentID, - PoolID: instance.PoolID.String(), - }); notifyErr != nil { + } + switch { + case instance.PoolID != nil: + instanceNotif.PoolID = instance.PoolID.String() + case instance.ScaleSetFkID != nil: + instanceNotif.ScaleSetID = *instance.ScaleSetFkID + } + + if notifyErr := s.sendNotify(common.InstanceEntityType, common.DeleteOperation, instanceNotif); notifyErr != nil { slog.With(slog.Any("error", notifyErr)).Error("failed to send notify") } } @@ -313,7 +320,7 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par } var instances []Instance - query := s.conn.Model(&Instance{}).Preload("Job", "Pool").Where("pool_id = ?", u) + query := s.conn.Model(&Instance{}).Preload("Job").Where("pool_id = ?", u) if err := query.Find(&instances); err.Error != nil { return nil, errors.Wrap(err.Error, "fetching instances") From 64d1501b0ed29e39265fd65f9a4d38d482ae8eb6 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 27 Apr 2025 21:03:37 +0000 Subject: [PATCH 030/179] DeleteInstance should noop if error not found Signed-off-by: Gabriel Adrian Samfira --- database/sql/instances.go | 6 ++++++ database/sql/instances_test.go | 17 +++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/database/sql/instances.go b/database/sql/instances.go index 987eb264..39e32211 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -147,6 +147,9 @@ func (s *sqlDatabase) GetInstanceByName(ctx context.Context, instanceName string func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceName string) (err error) { instance, err := s.getPoolInstanceByName(poolID, instanceName) if err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + return nil + } return errors.Wrap(err, "deleting instance") } @@ -187,6 +190,9 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName string) error { instance, err := s.getInstanceByName(ctx, instanceName) if err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + return nil + } return errors.Wrap(err, "deleting instance") } diff --git a/database/sql/instances_test.go b/database/sql/instances_test.go index de37033d..9d000cef 100644 --- a/database/sql/instances_test.go +++ b/database/sql/instances_test.go @@ -277,6 +277,23 @@ func (s *InstancesTestSuite) TestDeleteInstance() { _, err = s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) s.Require().Equal("fetching instance: fetching pool instance by name: not found", err.Error()) + + err = s.Store.DeleteInstance(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) + s.Require().Nil(err) +} + +func (s *InstancesTestSuite) TestDeleteInstanceByName() { + storeInstance := s.Fixtures.Instances[0] + + err := s.Store.DeleteInstanceByName(s.adminCtx, storeInstance.Name) + + s.Require().Nil(err) + + _, err = s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) + s.Require().Equal("fetching instance: fetching pool instance by name: not found", err.Error()) + + err = s.Store.DeleteInstanceByName(s.adminCtx, storeInstance.Name) + s.Require().Nil(err) } func (s *InstancesTestSuite) TestDeleteInstanceInvalidPoolID() { From 22302fdd7aac193ff659892e3c8c3181445b937b Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 28 Apr 2025 12:08:42 +0000 Subject: [PATCH 031/179] Add scaleset watcher to provider Fixes provider not spawning runners for newly added scale set Signed-off-by: Gabriel Adrian Samfira --- params/github.go | 1 + workers/entity/controller.go | 1 - workers/entity/worker.go | 2 -- workers/provider/provider.go | 5 +++++ workers/provider/util.go | 1 + workers/scaleset/controller.go | 1 - workers/scaleset/scaleset.go | 5 ----- workers/scaleset/scaleset_listener.go | 5 ----- 8 files changed, 7 insertions(+), 14 deletions(-) diff --git a/params/github.go b/params/github.go index 7f99750f..4b37b83b 100644 --- a/params/github.go +++ b/params/github.go @@ -431,6 +431,7 @@ type RunnerReference struct { Status interface{} `json:"status"` DisableUpdate bool `json:"disableUpdate"` ProvisioningState string `json:"provisioningState"` + Labels []Label `json:"labels,omitempty"` } type RunnerScaleSetJitRunnerConfig struct { diff --git a/workers/entity/controller.go b/workers/entity/controller.go index b0adcb36..41708ec2 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -172,7 +172,6 @@ func (c *Controller) Stop() error { c.running = false close(c.quit) - c.quit = nil c.consumer.Close() return nil } diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 95026c73..8aebb747 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -59,7 +59,6 @@ func (w *Worker) Stop() error { if err := w.scaleSetController.Stop(); err != nil { return fmt.Errorf("stopping scale set controller: %w", err) } - w.scaleSetController = nil w.running = false close(w.quit) @@ -85,7 +84,6 @@ func (w *Worker) Start() (err error) { defer func() { if err != nil { w.scaleSetController.Stop() - w.scaleSetController = nil } }() diff --git a/workers/provider/provider.go b/workers/provider/provider.go index 7d648bd7..ba95d733 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -241,6 +241,11 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { return } + if instance.ScaleSetID == 0 { + slog.DebugContext(p.ctx, "skipping instance event for non scale set instance") + return + } + switch event.Operation { case dbCommon.CreateOperation: slog.DebugContext(p.ctx, "got create operation") diff --git a/workers/provider/util.go b/workers/provider/util.go index 1868611e..7e6395ff 100644 --- a/workers/provider/util.go +++ b/workers/provider/util.go @@ -13,5 +13,6 @@ func composeProviderWatcher() dbCommon.PayloadFilterFunc { commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, ), + watcher.WithEntityTypeFilter(dbCommon.ScaleSetEntityType), ) } diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index a5c198e7..02528656 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -148,7 +148,6 @@ func (c *Controller) Stop() error { c.running = false close(c.quit) - c.quit = nil c.consumer.Close() return nil diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index a2ca2515..a4b690ef 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -84,10 +84,8 @@ func (w *Worker) Stop() error { w.running = false if w.quit != nil { close(w.quit) - w.quit = nil } w.listener.Stop() - w.listener = nil return nil } @@ -214,7 +212,6 @@ func (w *Worker) Start() (err error) { defer func() { if err != nil { consumer.Close() - w.consumer = nil } }() @@ -282,8 +279,6 @@ func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { slog.ErrorContext(w.ctx, "error stopping listener", "error", err) } } - // nolint:golangci-lint,godox - // TODO: should we kick off auto-scaling if desired runner count changes? w.scaleSet = scaleSet w.mux.Unlock() default: diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 77f4077b..07b3bf96 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -85,7 +85,6 @@ func (l *scaleSetListener) Stop() error { l.messageSession.Close() l.running = false - l.listenerCtx = nil close(l.quit) l.cancelFunc() return nil @@ -201,10 +200,6 @@ func (l *scaleSetListener) loop() { return default: slog.DebugContext(l.ctx, "getting message", "last_message_id", l.lastMessageID, "max_runners", l.scaleSetHelper.GetScaleSet().MaxRunners) - // nolint:golangci-lint,godox - // TODO(gabriel-samfira): consume initial message on startup and consolidate. - // The scale set may have undergone several messages while GARM was - // down. msg, err := l.messageSession.GetMessage( l.listenerCtx, l.lastMessageID, l.scaleSetHelper.GetScaleSet().MaxRunners) if err != nil { From fafe98e62f6f3c6b77b392dbacd5c06aad77bd3a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 28 Apr 2025 13:19:27 +0000 Subject: [PATCH 032/179] Update go-github Signed-off-by: Gabriel Adrian Samfira --- go.mod | 3 +- go.sum | 2 - params/params.go | 2 +- runner/common/mocks/GithubClient.go | 2 +- runner/common/mocks/GithubEnterpriseClient.go | 2 +- runner/common/mocks/GithubEntityOperations.go | 2 +- runner/common/mocks/OrganizationHooks.go | 2 +- runner/common/mocks/RepositoryHooks.go | 2 +- runner/common/util.go | 4 +- runner/pool/common.go | 26 +- runner/pool/pool.go | 20 +- runner/pool/stub_client.go | 4 +- runner/pool/util.go | 2 +- test/integration/gh_cleanup/main.go | 10 +- test/integration/jobs_test.go | 2 +- test/integration/organizations_test.go | 2 +- test/integration/repositories_test.go | 2 +- util/github/client.go | 4 +- util/github/scalesets/client.go | 2 +- .../github.com/google/go-github/v57/AUTHORS | 487 - .../github.com/google/go-github/v57/LICENSE | 27 - .../google/go-github/v57/github/actions.go | 12 - .../go-github/v57/github/actions_artifacts.go | 167 - .../go-github/v57/github/actions_cache.go | 249 - .../go-github/v57/github/actions_oidc.go | 81 - .../github/actions_permissions_enterprise.go | 207 - .../v57/github/actions_permissions_orgs.go | 220 - .../v57/github/actions_required_workflows.go | 267 - .../v57/github/actions_runner_groups.go | 337 - .../go-github/v57/github/actions_runners.go | 371 - .../go-github/v57/github/actions_secrets.go | 396 - .../go-github/v57/github/actions_variables.go | 331 - .../v57/github/actions_workflow_jobs.go | 139 - .../v57/github/actions_workflow_runs.go | 410 - .../go-github/v57/github/actions_workflows.go | 237 - .../google/go-github/v57/github/activity.go | 77 - .../go-github/v57/github/activity_events.go | 235 - .../v57/github/activity_notifications.go | 241 - .../go-github/v57/github/activity_star.go | 152 - .../go-github/v57/github/activity_watching.go | 158 - .../google/go-github/v57/github/admin.go | 123 - .../google/go-github/v57/github/admin_orgs.go | 95 - .../go-github/v57/github/admin_stats.go | 172 - .../go-github/v57/github/admin_users.go | 141 - .../google/go-github/v57/github/apps.go | 420 - .../google/go-github/v57/github/apps_hooks.go | 52 - .../v57/github/apps_hooks_deliveries.go | 78 - .../go-github/v57/github/apps_installation.go | 138 - .../go-github/v57/github/apps_manifest.go | 51 - .../go-github/v57/github/apps_marketplace.go | 207 - .../go-github/v57/github/authorizations.go | 293 - .../google/go-github/v57/github/billing.go | 215 - .../google/go-github/v57/github/checks.go | 475 - .../go-github/v57/github/code-scanning.go | 652 - .../go-github/v57/github/codesofconduct.go | 87 - .../google/go-github/v57/github/codespaces.go | 266 - .../v57/github/codespaces_secrets.go | 451 - .../google/go-github/v57/github/dependabot.go | 12 - .../go-github/v57/github/dependabot_alerts.go | 176 - .../v57/github/dependabot_secrets.go | 289 - .../go-github/v57/github/dependency_graph.go | 82 - .../google/go-github/v57/github/doc.go | 194 - .../google/go-github/v57/github/emojis.go | 40 - .../google/go-github/v57/github/enterprise.go | 12 - .../enterprise_actions_runner_groups.go | 336 - .../v57/github/enterprise_actions_runners.go | 118 - .../v57/github/enterprise_audit_log.go | 37 - .../enterprise_code_security_and_analysis.go | 84 - .../google/go-github/v57/github/event.go | 54 - .../go-github/v57/github/event_types.go | 1795 -- .../google/go-github/v57/github/gists.go | 397 - .../go-github/v57/github/gists_comments.go | 128 - .../google/go-github/v57/github/git.go | 12 - .../google/go-github/v57/github/git_blobs.go | 88 - .../go-github/v57/github/git_commits.go | 225 - .../google/go-github/v57/github/git_refs.go | 185 - .../google/go-github/v57/github/git_tags.go | 88 - .../google/go-github/v57/github/git_trees.go | 166 - .../go-github/v57/github/github-accessors.go | 25375 ---------------- .../google/go-github/v57/github/github.go | 1537 - .../google/go-github/v57/github/gitignore.go | 68 - .../go-github/v57/github/interactions.go | 28 - .../go-github/v57/github/interactions_orgs.go | 86 - .../v57/github/interactions_repos.go | 86 - .../go-github/v57/github/issue_import.go | 154 - .../google/go-github/v57/github/issues.go | 382 - .../go-github/v57/github/issues_assignees.go | 103 - .../go-github/v57/github/issues_comments.go | 165 - .../go-github/v57/github/issues_events.go | 186 - .../go-github/v57/github/issues_labels.go | 253 - .../go-github/v57/github/issues_milestones.go | 157 - .../go-github/v57/github/issues_timeline.go | 195 - .../google/go-github/v57/github/licenses.go | 101 - .../google/go-github/v57/github/markdown.go | 69 - .../google/go-github/v57/github/messages.go | 352 - .../google/go-github/v57/github/meta.go | 160 - .../google/go-github/v57/github/migrations.go | 240 - .../v57/github/migrations_source_import.go | 321 - .../go-github/v57/github/migrations_user.go | 230 - .../google/go-github/v57/github/orgs.go | 316 - .../v57/github/orgs_actions_allowed.go | 34 - .../v57/github/orgs_actions_permissions.go | 34 - .../go-github/v57/github/orgs_audit_log.go | 160 - .../github/orgs_credential_authorizations.go | 99 - .../go-github/v57/github/orgs_custom_roles.go | 128 - .../google/go-github/v57/github/orgs_hooks.go | 142 - .../v57/github/orgs_hooks_configuration.go | 53 - .../v57/github/orgs_hooks_deliveries.go | 79 - .../go-github/v57/github/orgs_members.go | 422 - .../v57/github/orgs_outside_collaborators.go | 87 - .../go-github/v57/github/orgs_packages.go | 165 - .../v57/github/orgs_personal_access_tokens.go | 36 - .../go-github/v57/github/orgs_projects.go | 64 - .../go-github/v57/github/orgs_properties.go | 198 - .../google/go-github/v57/github/orgs_rules.go | 115 - .../v57/github/orgs_security_managers.go | 63 - .../v57/github/orgs_users_blocking.go | 99 - .../google/go-github/v57/github/packages.go | 143 - .../google/go-github/v57/github/projects.go | 634 - .../google/go-github/v57/github/pulls.go | 508 - .../go-github/v57/github/pulls_comments.go | 217 - .../go-github/v57/github/pulls_reviewers.go | 86 - .../go-github/v57/github/pulls_reviews.go | 329 - .../go-github/v57/github/pulls_threads.go | 17 - .../google/go-github/v57/github/rate_limit.go | 113 - .../google/go-github/v57/github/reactions.go | 570 - .../google/go-github/v57/github/repos.go | 2387 -- .../v57/github/repos_actions_access.go | 59 - .../v57/github/repos_actions_allowed.go | 53 - .../v57/github/repos_actions_permissions.go | 66 - .../go-github/v57/github/repos_autolinks.go | 112 - .../go-github/v57/github/repos_codeowners.go | 61 - .../v57/github/repos_collaborators.go | 176 - .../go-github/v57/github/repos_comments.go | 173 - .../go-github/v57/github/repos_commits.go | 325 - .../v57/github/repos_community_health.go | 63 - .../go-github/v57/github/repos_contents.go | 359 - .../repos_deployment_branch_policies.go | 135 - .../go-github/v57/github/repos_deployments.go | 264 - .../v57/github/repos_environments.go | 252 - .../go-github/v57/github/repos_forks.go | 97 - .../go-github/v57/github/repos_hooks.go | 271 - .../v57/github/repos_hooks_configuration.go | 53 - .../v57/github/repos_hooks_deliveries.go | 142 - .../go-github/v57/github/repos_invitations.go | 95 - .../google/go-github/v57/github/repos_keys.go | 99 - .../google/go-github/v57/github/repos_lfs.go | 53 - .../go-github/v57/github/repos_merging.go | 76 - .../go-github/v57/github/repos_pages.go | 324 - .../v57/github/repos_prereceive_hooks.go | 118 - .../go-github/v57/github/repos_projects.go | 73 - .../go-github/v57/github/repos_releases.go | 475 - .../go-github/v57/github/repos_rules.go | 511 - .../go-github/v57/github/repos_stats.go | 242 - .../go-github/v57/github/repos_statuses.go | 138 - .../google/go-github/v57/github/repos_tags.go | 82 - .../go-github/v57/github/repos_traffic.go | 149 - .../google/go-github/v57/github/scim.go | 217 - .../google/go-github/v57/github/search.go | 347 - .../go-github/v57/github/secret_scanning.go | 257 - .../v57/github/security_advisories.go | 248 - .../google/go-github/v57/github/strings.go | 94 - .../google/go-github/v57/github/teams.go | 1067 - .../v57/github/teams_discussion_comments.go | 262 - .../go-github/v57/github/teams_discussions.go | 267 - .../go-github/v57/github/teams_members.go | 263 - .../google/go-github/v57/github/timestamp.go | 52 - .../google/go-github/v57/github/users.go | 294 - .../v57/github/users_administration.go | 80 - .../go-github/v57/github/users_blocking.go | 99 - .../go-github/v57/github/users_emails.go | 105 - .../go-github/v57/github/users_followers.go | 135 - .../go-github/v57/github/users_gpg_keys.go | 138 - .../google/go-github/v57/github/users_keys.go | 122 - .../go-github/v57/github/users_packages.go | 235 - .../go-github/v57/github/users_projects.go | 72 - .../v57/github/users_ssh_signing_keys.go | 117 - .../go-github/v57/github/with_appengine.go | 21 - .../go-github/v57/github/without_appengine.go | 20 - vendor/modules.txt | 3 - 180 files changed, 42 insertions(+), 60445 deletions(-) delete mode 100644 vendor/github.com/google/go-github/v57/AUTHORS delete mode 100644 vendor/github.com/google/go-github/v57/LICENSE delete mode 100644 vendor/github.com/google/go-github/v57/github/actions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_artifacts.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_cache.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_oidc.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_required_workflows.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_runner_groups.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_runners.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_secrets.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_variables.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_workflows.go delete mode 100644 vendor/github.com/google/go-github/v57/github/activity.go delete mode 100644 vendor/github.com/google/go-github/v57/github/activity_events.go delete mode 100644 vendor/github.com/google/go-github/v57/github/activity_notifications.go delete mode 100644 vendor/github.com/google/go-github/v57/github/activity_star.go delete mode 100644 vendor/github.com/google/go-github/v57/github/activity_watching.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin_orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin_stats.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin_users.go delete mode 100644 vendor/github.com/google/go-github/v57/github/apps.go delete mode 100644 vendor/github.com/google/go-github/v57/github/apps_hooks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/apps_hooks_deliveries.go delete mode 100644 vendor/github.com/google/go-github/v57/github/apps_installation.go delete mode 100644 vendor/github.com/google/go-github/v57/github/apps_manifest.go delete mode 100644 vendor/github.com/google/go-github/v57/github/apps_marketplace.go delete mode 100644 vendor/github.com/google/go-github/v57/github/authorizations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/billing.go delete mode 100644 vendor/github.com/google/go-github/v57/github/checks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/code-scanning.go delete mode 100644 vendor/github.com/google/go-github/v57/github/codesofconduct.go delete mode 100644 vendor/github.com/google/go-github/v57/github/codespaces.go delete mode 100644 vendor/github.com/google/go-github/v57/github/codespaces_secrets.go delete mode 100644 vendor/github.com/google/go-github/v57/github/dependabot.go delete mode 100644 vendor/github.com/google/go-github/v57/github/dependabot_alerts.go delete mode 100644 vendor/github.com/google/go-github/v57/github/dependabot_secrets.go delete mode 100644 vendor/github.com/google/go-github/v57/github/dependency_graph.go delete mode 100644 vendor/github.com/google/go-github/v57/github/doc.go delete mode 100644 vendor/github.com/google/go-github/v57/github/emojis.go delete mode 100644 vendor/github.com/google/go-github/v57/github/enterprise.go delete mode 100644 vendor/github.com/google/go-github/v57/github/enterprise_actions_runner_groups.go delete mode 100644 vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go delete mode 100644 vendor/github.com/google/go-github/v57/github/enterprise_audit_log.go delete mode 100644 vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go delete mode 100644 vendor/github.com/google/go-github/v57/github/event.go delete mode 100644 vendor/github.com/google/go-github/v57/github/event_types.go delete mode 100644 vendor/github.com/google/go-github/v57/github/gists.go delete mode 100644 vendor/github.com/google/go-github/v57/github/gists_comments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git_blobs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git_commits.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git_refs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git_tags.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git_trees.go delete mode 100644 vendor/github.com/google/go-github/v57/github/github-accessors.go delete mode 100644 vendor/github.com/google/go-github/v57/github/github.go delete mode 100644 vendor/github.com/google/go-github/v57/github/gitignore.go delete mode 100644 vendor/github.com/google/go-github/v57/github/interactions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/interactions_orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/interactions_repos.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issue_import.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_assignees.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_comments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_events.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_labels.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_milestones.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_timeline.go delete mode 100644 vendor/github.com/google/go-github/v57/github/licenses.go delete mode 100644 vendor/github.com/google/go-github/v57/github/markdown.go delete mode 100644 vendor/github.com/google/go-github/v57/github/messages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/meta.go delete mode 100644 vendor/github.com/google/go-github/v57/github/migrations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/migrations_source_import.go delete mode 100644 vendor/github.com/google/go-github/v57/github/migrations_user.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_actions_allowed.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_actions_permissions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_audit_log.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_hooks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_hooks_configuration.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_hooks_deliveries.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_members.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_outside_collaborators.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_packages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_properties.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_rules.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_security_managers.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_users_blocking.go delete mode 100644 vendor/github.com/google/go-github/v57/github/packages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/pulls.go delete mode 100644 vendor/github.com/google/go-github/v57/github/pulls_comments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/pulls_reviewers.go delete mode 100644 vendor/github.com/google/go-github/v57/github/pulls_reviews.go delete mode 100644 vendor/github.com/google/go-github/v57/github/pulls_threads.go delete mode 100644 vendor/github.com/google/go-github/v57/github/rate_limit.go delete mode 100644 vendor/github.com/google/go-github/v57/github/reactions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_actions_access.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_actions_allowed.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_autolinks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_codeowners.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_collaborators.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_comments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_commits.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_community_health.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_contents.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_deployment_branch_policies.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_deployments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_environments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_forks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_hooks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_invitations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_keys.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_lfs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_merging.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_pages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_releases.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_rules.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_stats.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_statuses.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_tags.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_traffic.go delete mode 100644 vendor/github.com/google/go-github/v57/github/scim.go delete mode 100644 vendor/github.com/google/go-github/v57/github/search.go delete mode 100644 vendor/github.com/google/go-github/v57/github/secret_scanning.go delete mode 100644 vendor/github.com/google/go-github/v57/github/security_advisories.go delete mode 100644 vendor/github.com/google/go-github/v57/github/strings.go delete mode 100644 vendor/github.com/google/go-github/v57/github/teams.go delete mode 100644 vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/teams_discussions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/teams_members.go delete mode 100644 vendor/github.com/google/go-github/v57/github/timestamp.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_administration.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_blocking.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_emails.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_followers.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_gpg_keys.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_keys.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_packages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_ssh_signing_keys.go delete mode 100644 vendor/github.com/google/go-github/v57/github/with_appengine.go delete mode 100644 vendor/github.com/google/go-github/v57/github/without_appengine.go diff --git a/go.mod b/go.mod index bd2a0c86..afc1af60 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 github.com/golang-jwt/jwt/v5 v5.2.2 - github.com/google/go-github/v57 v57.0.0 + github.com/google/go-github/v71 v71.0.0 github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.2 github.com/gorilla/mux v1.8.1 @@ -56,7 +56,6 @@ require ( github.com/go-openapi/validate v0.24.0 // indirect github.com/go-sql-driver/mysql v1.9.2 // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect - github.com/google/go-github/v71 v71.0.0 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect diff --git a/go.sum b/go.sum index 1deb1931..14e83505 100644 --- a/go.sum +++ b/go.sum @@ -66,8 +66,6 @@ github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EO github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs= -github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw= github.com/google/go-github/v71 v71.0.0 h1:Zi16OymGKZZMm8ZliffVVJ/Q9YZreDKONCr+WUd0Z30= github.com/google/go-github/v71 v71.0.0/go.mod h1:URZXObp2BLlMjwu0O8g4y6VBneUj2bCHgnI8FfgZ51M= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= diff --git a/params/params.go b/params/params.go index 43d1dd92..69ec179c 100644 --- a/params/params.go +++ b/params/params.go @@ -27,7 +27,7 @@ import ( "time" "github.com/bradleyfalzon/ghinstallation/v2" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "github.com/google/uuid" "golang.org/x/oauth2" diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index fb6729fc..f1009d5a 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v71/github" mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" diff --git a/runner/common/mocks/GithubEnterpriseClient.go b/runner/common/mocks/GithubEnterpriseClient.go index 36b6517d..fa2966f0 100644 --- a/runner/common/mocks/GithubEnterpriseClient.go +++ b/runner/common/mocks/GithubEnterpriseClient.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v71/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index cb2a2182..a482a985 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v71/github" mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" diff --git a/runner/common/mocks/OrganizationHooks.go b/runner/common/mocks/OrganizationHooks.go index 67d17a30..46638f02 100644 --- a/runner/common/mocks/OrganizationHooks.go +++ b/runner/common/mocks/OrganizationHooks.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v71/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/mocks/RepositoryHooks.go b/runner/common/mocks/RepositoryHooks.go index 72b7831a..76ed0db0 100644 --- a/runner/common/mocks/RepositoryHooks.go +++ b/runner/common/mocks/RepositoryHooks.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v71/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/util.go b/runner/common/util.go index 7dbec688..71b1849f 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -4,7 +4,7 @@ import ( "context" "net/url" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "github.com/cloudbase/garm/params" ) @@ -15,7 +15,7 @@ type GithubEntityOperations interface { CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) DeleteEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) - ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) + ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) RemoveEntityRunner(ctx context.Context, runnerID int64) error CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) diff --git a/runner/pool/common.go b/runner/pool/common.go index fcf4f73f..6820be1a 100644 --- a/runner/pool/common.go +++ b/runner/pool/common.go @@ -6,7 +6,7 @@ import ( "net/url" "strings" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -21,16 +21,15 @@ func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, partialMatches := []string{} for _, hook := range allHooks { - hookURL, ok := hook.Config["url"].(string) - if !ok { + hookURL := strings.ToLower(hook.Config.GetURL()) + if hookURL == "" { continue } - hookURL = strings.ToLower(hookURL) - if hook.Config["url"] == req.Config["url"] { + if hook.Config.GetURL() == req.Config.GetURL() { return runnerErrors.NewConflictError("hook already installed") } else if strings.Contains(hookURL, controllerID) || strings.Contains(hookURL, parsed.Hostname()) { - partialMatches = append(partialMatches, hook.Config["url"].(string)) + partialMatches = append(partialMatches, hook.Config.GetURL()) } } @@ -42,19 +41,10 @@ func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, } func hookToParamsHookInfo(hook *github.Hook) params.HookInfo { - var hookURL string - url, ok := hook.Config["url"] - if ok { - hookURL = url.(string) - } + hookURL := hook.Config.GetURL() - var insecureSSL bool - insecureSSLConfig, ok := hook.Config["insecure_ssl"] - if ok { - if insecureSSLConfig.(string) == "1" { - insecureSSL = true - } - } + insecureSSLConfig := hook.Config.GetInsecureSSL() + insecureSSL := insecureSSLConfig == "1" return params.HookInfo{ ID: *hook.ID, diff --git a/runner/pool/pool.go b/runner/pool/pool.go index f17ba15f..0173b3fc 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -27,7 +27,7 @@ import ( "sync" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "github.com/google/uuid" "github.com/pkg/errors" "golang.org/x/sync/errgroup" @@ -1930,12 +1930,12 @@ func (r *basePoolManager) InstallWebhook(ctx context.Context, param params.Insta insecureSSL = "1" } req := &github.Hook{ - Active: github.Bool(true), - Config: map[string]interface{}{ - "url": r.controllerInfo.ControllerWebhookURL, - "content_type": "json", - "insecure_ssl": insecureSSL, - "secret": r.WebhookSecret(), + Active: github.Ptr(true), + Config: &github.HookConfig{ + ContentType: github.Ptr("json"), + InsecureSSL: github.Ptr(insecureSSL), + URL: github.Ptr(r.controllerInfo.ControllerWebhookURL), + Secret: github.Ptr(r.WebhookSecret()), }, Events: []string{ "workflow_job", @@ -1997,8 +1997,10 @@ func (r *basePoolManager) FetchTools() ([]commonParams.RunnerApplicationDownload } func (r *basePoolManager) GetGithubRunners() ([]*github.Runner, error) { - opts := github.ListOptions{ - PerPage: 100, + opts := github.ListRunnersOptions{ + ListOptions: github.ListOptions{ + PerPage: 100, + }, } var allRunners []*github.Runner diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index 7a82567f..d01c834e 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -4,7 +4,7 @@ import ( "context" "net/url" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "github.com/cloudbase/garm/params" ) @@ -33,7 +33,7 @@ func (s *stubGithubClient) PingEntityHook(_ context.Context, _ int64) (*github.R return nil, s.err } -func (s *stubGithubClient) ListEntityRunners(_ context.Context, _ *github.ListOptions) (*github.Runners, *github.Response, error) { +func (s *stubGithubClient) ListEntityRunners(_ context.Context, _ *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { return nil, nil, s.err } diff --git a/runner/pool/util.go b/runner/pool/util.go index e2308160..9b7b7f14 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -6,7 +6,7 @@ import ( "sync" "sync/atomic" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" diff --git a/test/integration/gh_cleanup/main.go b/test/integration/gh_cleanup/main.go index 0095dba8..6ec50304 100644 --- a/test/integration/gh_cleanup/main.go +++ b/test/integration/gh_cleanup/main.go @@ -6,7 +6,7 @@ import ( "log/slog" "os" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "golang.org/x/oauth2" ) @@ -141,8 +141,8 @@ func getGhOrgWebhook(url, ghToken, orgName string) (*github.Hook, error) { } for _, hook := range ghOrgHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.Config.GetURL() + if hookURL == url { return hook, nil } } @@ -158,8 +158,8 @@ func getGhRepoWebhook(url, ghToken, orgName, repoName string) (*github.Hook, err } for _, hook := range ghRepoHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.Config.GetURL() + if hookURL == url { return hook, nil } } diff --git a/test/integration/jobs_test.go b/test/integration/jobs_test.go index e9483e17..4d87c077 100644 --- a/test/integration/jobs_test.go +++ b/test/integration/jobs_test.go @@ -8,7 +8,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/params" diff --git a/test/integration/organizations_test.go b/test/integration/organizations_test.go index 5089725b..0151d2fc 100644 --- a/test/integration/organizations_test.go +++ b/test/integration/organizations_test.go @@ -8,7 +8,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/params" diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index 0c516a74..bcf948e5 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -8,7 +8,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "golang.org/x/oauth2" commonParams "github.com/cloudbase/garm-provider-common/params" diff --git a/util/github/client.go b/util/github/client.go index 50f97d39..1b899913 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -23,7 +23,7 @@ import ( "net/http" "net/url" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -162,7 +162,7 @@ func (g *githubClient) PingEntityHook(ctx context.Context, id int64) (ret *githu return ret, err } -func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { +func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { var ret *github.Runners var response *github.Response var err error diff --git a/util/github/scalesets/client.go b/util/github/scalesets/client.go index f0b2deac..7a8a53fd 100644 --- a/util/github/scalesets/client.go +++ b/util/github/scalesets/client.go @@ -20,7 +20,7 @@ import ( "net/http" "sync" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v71/github" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/params" diff --git a/vendor/github.com/google/go-github/v57/AUTHORS b/vendor/github.com/google/go-github/v57/AUTHORS deleted file mode 100644 index 74a21dc6..00000000 --- a/vendor/github.com/google/go-github/v57/AUTHORS +++ /dev/null @@ -1,487 +0,0 @@ -# This is the official list of go-github authors for copyright purposes. -# -# This does not necessarily list everyone who has contributed code, since in -# some cases, their employer may be the copyright holder. To see the full list -# of contributors, see the revision history in source control or -# https://github.com/google/go-github/graphs/contributors. -# -# Authors who wish to be recognized in this file should add themselves (or -# their employer, as appropriate). - -178inaba -2BFL -413x -6543 <6543@obermui.de> -Abed Kibbe -Abhinav Gupta -Abhishek Veeramalla -aboy -Adam Kohring -adrienzieba -afdesk -Ahmad Nurus S -Ahmed Hagy -Aidan -Aidan Steele -Ainsley Chong -ajz01 -Akeda Bagus -Akhil Mohan -Alec Thomas -Aleks Clark -Alex Bramley -Alex Ellis -Alex Orr -Alex Su -Alex Unger -Alexander Harkness -Alexis Gauthiez -Ali Farooq -Allan Guwatudde -Allen Sun -Amey Sakhadeo -Anders Janmyr -Andreas Garnæs -Andrew Ryabchun -Andrew Svoboda -Andy Grunwald -Andy Hume -Andy Lindeman -angie pinilla -anjanashenoy -Anshuman Bhartiya -Antoine -Antoine Pelisse -Anton Nguyen -Anubha Kushwaha -appilon -aprp -apurwaj2 -Aravind -Arda Kuyumcu -Arıl Bozoluk -Asier Marruedo -Austin Burdine -Austin Dizzy -Azuka Okuleye -Ben Batha -Benjamen Keroack -Berkay Tacyildiz -Beshr Kayali -Beyang Liu -Billy Keyes -Billy Lynch -Bingtan Lu -Bjorn Neergaard -Björn Häuser -Bo Huang -boljen -Bracken -Brad Harris -Brad Moylan -Bradley Falzon -Bradley McAllister -Brandon Butler -Brandon Cook -Brandon Stubbs -Brett Kuhlman -Brett Logan -Brian Egizi -Bryan Boreham -Bryan Peterson -Cami Diez -Carl Johnson -Carlos Alexandro Becker -Carlos Tadeu Panato Junior -ChandanChainani -chandresh-pancholi -Charles Fenwick Elliott -Charlie Yan -Chmouel Boudjnah -Chris King -Chris Mc -Chris Raborg -Chris Roche -Chris Schaefer -chrisforrette -Christian Bargmann -Christian Muehlhaeuser -Christoph Jerolimov -Christoph Sassenberg -CI Monk -Colin Misare -Craig Gumbley -Craig Peterson -Cristian Maglie -Cyb3r Jak3 -Daehyeok Mun -Dalton Hubble -Daniel Lanner -Daniel Leavitt -Daniel Nilsson -Daoq -Dave Du Cros -Dave Henderson -Dave Perrett -Dave Protasowski -David Deng -David Gamba -David J. M. Karlsen -David Jannotta -David Ji -David Lopez Reyes -Davide Zipeto -Dennis Webb -Derek Jobst -DeviousLab -Dhi Aurrahman -Diego Lapiduz -Diogo Vilela -Dmitri Shuralyov -dmnlk -Don Petersen -Doug Turner -Drew Fradette -Dustin Deus -Dustin Lish -Eivind -Eli Uriegas -Elliott Beach -Emerson Wood -Emil V -Eng Zer Jun -eperm -Erick Fejta -Erik Nobel -erwinvaneyk -Evan Anderson -Evan Elias -Fabian Holler -Fabrice -Fatema-Moaiyadi -Federico Di Pierro -Felix Geisendörfer -Filippo Valsorda -Florian Forster -Florian Wagner -Francesc Gil -Francis -Francisco Guimarães -François de Metz -Fredrik Jönsson -Gabriel -Gal Ofri -Garrett Squire -George Kontridze -Georgy Buranov -Glen Mailer -Gnahz -Google Inc. -Grachev Mikhail -griffin_stewie -guangwu -Guillaume Jacquet -Guz Alexander -Guðmundur Bjarni Ólafsson -Hanno Hecker -Hari haran -Harikesh00 -haya14busa -haya14busa -Hiroki Ito -Hubot Jr -Huy Tr -huydx -i2bskn -Iain Steers -Ikko Ashimine -Ilia Choly -Ioannis Georgoulas -Isao Jonas -ishan upadhyay -isqua -Jacob Valdemar -Jake Krammer -Jake White -Jameel Haffejee -James Bowes -James Cockbain -James Loh -James Maguire -James Turley -Jamie West -Jan Kosecki -Jan Švábík -Jason Field -Javier Campanini -Jef LeCompte -Jeff Wenzbauer -Jens Rantil -Jeremy Morris -Jesse Haka -Jesse Newland -Jihoon Chung -Jille Timmermans -Jimmi Dyson -Joan Saum -Joe Tsai -John Barton -John Engelman -John Jones -John Liu -Jordan Brockopp -Jordan Burandt -Jordan Sussman -Jorge Gómez Reus -Joshua Bezaleel Abednego -João Cerqueira -JP Phillips -jpbelanger-mtl -Juan -Juan Basso -Julien Garcia Gonzalez -Julien Rostand -Junya Kono -Justin Abrahms -Justin Toh -Jusung Lee -jzhoucliqr -k0ral -k1rnt -kadern0 -Karthik Sundari -Katrina Owen -Kautilya Tripathi -Keita Urashima -Kevin Burke -Kevin Wang -Kevin Zhao -kgalli -Kirill -Konrad Malawski -Kookheon Kwon -Krishna Indani -Krzysztof Kowalczyk -Kshitij Saraogi -Kumar Saurabh -Kyle Kurz -kyokomi -Lars Lehtonen -Laurent Verdoïa -leopoldwang -Liam Galvin -Lluis Campos -Lovro Mažgon -Loïs Postula -Luca Campese -Lucas Alcantara -Lucas Martin-King -Luis Davim -Luke Evers -Luke Hinds -Luke Kysow -Luke Roberts -Luke Young -lynn [they] -Magnus Kulke -Maksim Zhylinski -Marc Binder -Marcelo Carlos -Mark Tareshawty -Martin Holman -Martin-Louis Bright -Martins Sipenko -Marwan Sulaiman -Masayuki Izumi -Mat Geist -Matija Horvat -Matin Rahmanian -Matt -Matt Brender -Matt Dainty -Matt Gaunt -Matt Landis -Matt Moore -Matt Simons -Maxime Bury -Michael Meng -Michael Spiegel -Michael Tiller -Michał Glapa -Michelangelo Morrillo -Miguel Elias dos Santos -Mike Chen -mohammad ali <2018cs92@student.uet.edu.pk> -Mohammed AlDujaili -Mukundan Senthil -Munia Balayil -Mustafa Abban -Nadav Kaner -Naoki Kanatani -Nathan VanBenschoten -Navaneeth Suresh -Neal Caffery -Neil O'Toole -Nick Miyake -Nick Platt -Nick Spragg -Nicolas Chapurlat -Nikhita Raghunath -Nilesh Singh -Noah Hanjun Lee -Noah Zoschke -ns-cweber -nxya -Ole Orhagen -Oleg Kovalov -Ondřej Kupka -Ori Talmor -Osama Faqhruldin -oslowalk -Pablo Pérez Schröder -Palash Nigam -Panagiotis Moustafellos -Parham Alvani -pari-27 -Parker Moore -parkhyukjun89 -Pat Alwell -Patrick DeVivo -Patrick Marabeas -Patrik Nordlén -Pavel Dvoinos -Pavel Shtanko -Pete Wagner -Petr Shevtsov -Pierce McEntagart -Pierre Carrier -Piotr Zurek -Piyush Chugh -Pratik Mallya -Qais Patankar -Quang Le Hong -Quentin Leffray -Quinn Slack -Rackspace US, Inc. -Radek Simko -Radliński Ignacy -Rafael Aramizu Gomes -Rajat Jindal -Rajendra arora -Rajkumar -Ranbir Singh -Ravi Shekhar Jethani -RaviTeja Pothana -rc1140 -Red Hat, Inc. -Reetuparna Mukherjee -reeves122 -Reinier Timmer -Renjith R -Ricco Førgaard -Richard de Vries -Rob Figueiredo -Rohit Upadhyay -Rojan Dinc -Ronak Jain -Ronan Pelliard -Ross Gustafson -Ruben Vereecken -Russell Boley -Ryan Leung -Ryan Lower -Ryo Nakao -Saaarah -Safwan Olaimat -Sahil Dua -Sai Ravi Teja Chintakrindi -saisi -Sam Minnée -Sandeep Sukhani -Sander Knape -Sander van Harmelen -Sanket Payghan -Sarah Funkhouser -Sarasa Kisaragi -Sasha Melentyev -Sean Wang -Sebastian Mandrean -Sebastian Mæland Pedersen -Sergei Popinevskii -Sergey Romanov -Sergio Garcia -Seth Vargo -Sevki -Shagun Khemka -shakeelrao -Shawn Catanzarite -Shawn Smith -Shibasis Patel -Sho Okada -Shrikrishna Singh -Simon Davis -sona-tar -soniachikh -SoundCloud, Ltd. -Sridhar Mocherla -SriVignessh Pss -Stefan Sedich -Steve Teuber -Stian Eikeland -Suhaib Mujahid -sushmita wable -Szymon Kodrebski -Søren Hansen -T.J. Corrigan -Takashi Yoneuchi -Takayuki Watanabe -Taketoshi Fujiwara -Taketoshi Fujiwara -Takuma Kajikawa -Tasya Aditya Rukmana -Theo Henson -Theofilos Petsios -Thomas Aidan Curran -Thomas Bruyelle -Tim Rogers -Timothy O'Brien -Timothée Peignier -Tingluo Huang -tkhandel -Tobias Gesellchen -Tom Payne -Trey Tacon -tsbkw -ttacon -Vaibhav Singh -Varadarajan Aravamudhan -Victor Castell -Victor Vrantchan -Victory Osikwemhe -vikkyomkar -Vivek -Vlad Ungureanu -Wasim Thabraze -Weslei Juan Moser Pereira -Wheeler Law -Will Maier -Will Norris -Willem D'Haeseleer -William Bailey -William Cooke -Xabi -xibz -Yann Malet -Yannick Utard -Yarden Shoham -Yicheng Qin -Yosuke Akatsuka -Yumikiyo Osanai -Yurii Soldak -Yusef Mohamadi -Yusuke Kuoka -Zach Latta -zhouhaibing089 -六开箱 -缘生 -蒋航 diff --git a/vendor/github.com/google/go-github/v57/LICENSE b/vendor/github.com/google/go-github/v57/LICENSE deleted file mode 100644 index 28b6486f..00000000 --- a/vendor/github.com/google/go-github/v57/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013 The go-github AUTHORS. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/google/go-github/v57/github/actions.go b/vendor/github.com/google/go-github/v57/github/actions.go deleted file mode 100644 index 4b88a1e1..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// ActionsService handles communication with the actions related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/actions/ -type ActionsService service diff --git a/vendor/github.com/google/go-github/v57/github/actions_artifacts.go b/vendor/github.com/google/go-github/v57/github/actions_artifacts.go deleted file mode 100644 index f804b809..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_artifacts.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" -) - -// ArtifactWorkflowRun represents a GitHub artifact's workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts -type ArtifactWorkflowRun struct { - ID *int64 `json:"id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - HeadRepositoryID *int64 `json:"head_repository_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` -} - -// Artifact represents a GitHub artifact. Artifacts allow sharing -// data between jobs in a workflow and provide storage for data -// once a workflow is complete. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts -type Artifact struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - SizeInBytes *int64 `json:"size_in_bytes,omitempty"` - URL *string `json:"url,omitempty"` - ArchiveDownloadURL *string `json:"archive_download_url,omitempty"` - Expired *bool `json:"expired,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` - WorkflowRun *ArtifactWorkflowRun `json:"workflow_run,omitempty"` -} - -// ArtifactList represents a list of GitHub artifacts. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#artifacts -type ArtifactList struct { - TotalCount *int64 `json:"total_count,omitempty"` - Artifacts []*Artifact `json:"artifacts,omitempty"` -} - -// ListArtifacts lists all artifacts that belong to a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#list-artifacts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/artifacts -func (s *ActionsService) ListArtifacts(ctx context.Context, owner, repo string, opts *ListOptions) (*ArtifactList, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - artifactList := new(ArtifactList) - resp, err := s.client.Do(ctx, req, artifactList) - if err != nil { - return nil, resp, err - } - - return artifactList, resp, nil -} - -// ListWorkflowRunArtifacts lists all artifacts that belong to a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#list-workflow-run-artifacts -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts -func (s *ActionsService) ListWorkflowRunArtifacts(ctx context.Context, owner, repo string, runID int64, opts *ListOptions) (*ArtifactList, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/artifacts", owner, repo, runID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - artifactList := new(ArtifactList) - resp, err := s.client.Do(ctx, req, artifactList) - if err != nil { - return nil, resp, err - } - - return artifactList, resp, nil -} - -// GetArtifact gets a specific artifact for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#get-an-artifact -// -//meta:operation GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id} -func (s *ActionsService) GetArtifact(ctx context.Context, owner, repo string, artifactID int64) (*Artifact, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v", owner, repo, artifactID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - artifact := new(Artifact) - resp, err := s.client.Do(ctx, req, artifact) - if err != nil { - return nil, resp, err - } - - return artifact, resp, nil -} - -// DownloadArtifact gets a redirect URL to download an archive for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#download-an-artifact -// -//meta:operation GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format} -func (s *ActionsService) DownloadArtifact(ctx context.Context, owner, repo string, artifactID int64, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v/zip", owner, repo, artifactID) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - if err != nil { - return nil, newResponse(resp), err - } - - return parsedURL, newResponse(resp), nil -} - -// DeleteArtifact deletes a workflow run artifact. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#delete-an-artifact -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id} -func (s *ActionsService) DeleteArtifact(ctx context.Context, owner, repo string, artifactID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v", owner, repo, artifactID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_cache.go b/vendor/github.com/google/go-github/v57/github/actions_cache.go deleted file mode 100644 index 271d7d82..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_cache.go +++ /dev/null @@ -1,249 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsCache represents a GitHub action cache. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#about-the-cache-api -type ActionsCache struct { - ID *int64 `json:"id,omitempty" url:"-"` - Ref *string `json:"ref,omitempty" url:"ref"` - Key *string `json:"key,omitempty" url:"key"` - Version *string `json:"version,omitempty" url:"-"` - LastAccessedAt *Timestamp `json:"last_accessed_at,omitempty" url:"-"` - CreatedAt *Timestamp `json:"created_at,omitempty" url:"-"` - SizeInBytes *int64 `json:"size_in_bytes,omitempty" url:"-"` -} - -// ActionsCacheList represents a list of GitHub actions Cache. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository -type ActionsCacheList struct { - TotalCount int `json:"total_count"` - ActionsCaches []*ActionsCache `json:"actions_caches,omitempty"` -} - -// ActionsCacheUsage represents a GitHub Actions Cache Usage object. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-a-repository -type ActionsCacheUsage struct { - FullName string `json:"full_name"` - ActiveCachesSizeInBytes int64 `json:"active_caches_size_in_bytes"` - ActiveCachesCount int `json:"active_caches_count"` -} - -// ActionsCacheUsageList represents a list of repositories with GitHub Actions cache usage for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-a-repository -type ActionsCacheUsageList struct { - TotalCount int `json:"total_count"` - RepoCacheUsage []*ActionsCacheUsage `json:"repository_cache_usages,omitempty"` -} - -// TotalCacheUsage represents total GitHub actions cache usage of an organization or enterprise. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-an-enterprise -type TotalCacheUsage struct { - TotalActiveCachesUsageSizeInBytes int64 `json:"total_active_caches_size_in_bytes"` - TotalActiveCachesCount int `json:"total_active_caches_count"` -} - -// ActionsCacheListOptions represents a list of all possible optional Query parameters for ListCaches method. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository -type ActionsCacheListOptions struct { - ListOptions - // The Git reference for the results you want to list. - // The ref for a branch can be formatted either as refs/heads/ - // or simply . To reference a pull request use refs/pull//merge - Ref *string `url:"ref,omitempty"` - Key *string `url:"key,omitempty"` - // Can be one of: "created_at", "last_accessed_at", "size_in_bytes". Default: "last_accessed_at" - Sort *string `url:"sort,omitempty"` - // Can be one of: "asc", "desc" Default: desc - Direction *string `url:"direction,omitempty"` -} - -// ListCaches lists the GitHub Actions caches for a repository. -// You must authenticate using an access token with the repo scope to use this endpoint. -// -// Permissions: must have the actions:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/caches -func (s *ActionsService) ListCaches(ctx context.Context, owner, repo string, opts *ActionsCacheListOptions) (*ActionsCacheList, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/caches", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionCacheList := new(ActionsCacheList) - resp, err := s.client.Do(ctx, req, actionCacheList) - if err != nil { - return nil, resp, err - } - - return actionCacheList, resp, nil -} - -// DeleteCachesByKey deletes one or more GitHub Actions caches for a repository, using a complete cache key. -// By default, all caches that match the provided key are deleted, but you can optionally provide -// a Git ref to restrict deletions to caches that match both the provided key and the Git ref. -// The ref for a branch can be formatted either as "refs/heads/" or simply "". -// To reference a pull request use "refs/pull//merge". If you don't want to use ref just pass nil in parameter. -// -// Permissions: You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have the actions:write permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/caches -func (s *ActionsService) DeleteCachesByKey(ctx context.Context, owner, repo, key string, ref *string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/caches", owner, repo) - u, err := addOptions(u, ActionsCache{Key: &key, Ref: ref}) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteCachesByID deletes a GitHub Actions cache for a repository, using a cache ID. -// -// Permissions: You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have the actions:write permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/caches/{cache_id} -func (s *ActionsService) DeleteCachesByID(ctx context.Context, owner, repo string, cacheID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/caches/%v", owner, repo, cacheID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetCacheUsageForRepo gets GitHub Actions cache usage for a repository. The data fetched using this API is refreshed approximately every 5 minutes, -// so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an -// access token with the repo scope. GitHub Apps must have the actions:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/cache/usage -func (s *ActionsService) GetCacheUsageForRepo(ctx context.Context, owner, repo string) (*ActionsCacheUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/cache/usage", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(ActionsCacheUsage) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} - -// ListCacheUsageByRepoForOrg lists repositories and their GitHub Actions cache usage for an organization. The data fetched using this API is -// refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: You must authenticate using an access token with the read:org scope to use this endpoint. -// GitHub Apps must have the organization_admistration:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-repositories-with-github-actions-cache-usage-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/cache/usage-by-repository -func (s *ActionsService) ListCacheUsageByRepoForOrg(ctx context.Context, org string, opts *ListOptions) (*ActionsCacheUsageList, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/cache/usage-by-repository", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(ActionsCacheUsageList) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} - -// GetTotalCacheUsageForOrg gets the total GitHub Actions cache usage for an organization. The data fetched using this API is refreshed approximately every -// 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: You must authenticate using an access token with the read:org scope to use this endpoint. -// GitHub Apps must have the organization_admistration:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/cache/usage -func (s *ActionsService) GetTotalCacheUsageForOrg(ctx context.Context, org string) (*TotalCacheUsage, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/cache/usage", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(TotalCacheUsage) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} - -// GetTotalCacheUsageForEnterprise gets the total GitHub Actions cache usage for an enterprise. The data fetched using this API is refreshed approximately every 5 minutes, -// so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: You must authenticate using an access token with the "admin:enterprise" scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/cache#get-github-actions-cache-usage-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/cache/usage -func (s *ActionsService) GetTotalCacheUsageForEnterprise(ctx context.Context, enterprise string) (*TotalCacheUsage, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/cache/usage", enterprise) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(TotalCacheUsage) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_oidc.go b/vendor/github.com/google/go-github/v57/github/actions_oidc.go deleted file mode 100644 index 596aa9d9..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_oidc.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OIDCSubjectClaimCustomTemplate represents an OIDC subject claim customization template. -type OIDCSubjectClaimCustomTemplate struct { - UseDefault *bool `json:"use_default,omitempty"` - IncludeClaimKeys []string `json:"include_claim_keys,omitempty"` -} - -// GetOrgOIDCSubjectClaimCustomTemplate gets the subject claim customization template for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/oidc/customization/sub -func (s *ActionsService) GetOrgOIDCSubjectClaimCustomTemplate(ctx context.Context, org string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/oidc/customization/sub", org) - return s.getOIDCSubjectClaimCustomTemplate(ctx, u) -} - -// GetRepoOIDCSubjectClaimCustomTemplate gets the subject claim customization template for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/oidc/customization/sub -func (s *ActionsService) GetRepoOIDCSubjectClaimCustomTemplate(ctx context.Context, owner, repo string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/oidc/customization/sub", owner, repo) - return s.getOIDCSubjectClaimCustomTemplate(ctx, u) -} - -func (s *ActionsService) getOIDCSubjectClaimCustomTemplate(ctx context.Context, url string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - tmpl := new(OIDCSubjectClaimCustomTemplate) - resp, err := s.client.Do(ctx, req, tmpl) - if err != nil { - return nil, resp, err - } - - return tmpl, resp, nil -} - -// SetOrgOIDCSubjectClaimCustomTemplate sets the subject claim customization for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/oidc/customization/sub -func (s *ActionsService) SetOrgOIDCSubjectClaimCustomTemplate(ctx context.Context, org string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/oidc/customization/sub", org) - return s.setOIDCSubjectClaimCustomTemplate(ctx, u, template) -} - -// SetRepoOIDCSubjectClaimCustomTemplate sets the subject claim customization for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/oidc/customization/sub -func (s *ActionsService) SetRepoOIDCSubjectClaimCustomTemplate(ctx context.Context, owner, repo string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/oidc/customization/sub", owner, repo) - return s.setOIDCSubjectClaimCustomTemplate(ctx, u, template) -} - -func (s *ActionsService) setOIDCSubjectClaimCustomTemplate(ctx context.Context, url string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, template) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go b/vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go deleted file mode 100644 index 7e10444a..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsEnabledOnEnterpriseRepos represents all the repositories in an enterprise for which Actions is enabled. -type ActionsEnabledOnEnterpriseRepos struct { - TotalCount int `json:"total_count"` - Organizations []*Organization `json:"organizations"` -} - -// ActionsPermissionsEnterprise represents a policy for allowed actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions -type ActionsPermissionsEnterprise struct { - EnabledOrganizations *string `json:"enabled_organizations,omitempty"` - AllowedActions *string `json:"allowed_actions,omitempty"` - SelectedActionsURL *string `json:"selected_actions_url,omitempty"` -} - -func (a ActionsPermissionsEnterprise) String() string { - return Stringify(a) -} - -// GetActionsPermissionsInEnterprise gets the GitHub Actions permissions policy for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#get-github-actions-permissions-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/permissions -func (s *ActionsService) GetActionsPermissionsInEnterprise(ctx context.Context, enterprise string) (*ActionsPermissionsEnterprise, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions", enterprise) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissionsEnterprise) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} - -// EditActionsPermissionsInEnterprise sets the permissions policy in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-github-actions-permissions-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions -func (s *ActionsService) EditActionsPermissionsInEnterprise(ctx context.Context, enterprise string, actionsPermissionsEnterprise ActionsPermissionsEnterprise) (*ActionsPermissionsEnterprise, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions", enterprise) - req, err := s.client.NewRequest("PUT", u, actionsPermissionsEnterprise) - if err != nil { - return nil, nil, err - } - - p := new(ActionsPermissionsEnterprise) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// ListEnabledOrgsInEnterprise lists the selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#list-selected-organizations-enabled-for-github-actions-in-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/permissions/organizations -func (s *ActionsService) ListEnabledOrgsInEnterprise(ctx context.Context, owner string, opts *ListOptions) (*ActionsEnabledOnEnterpriseRepos, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations", owner) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - orgs := &ActionsEnabledOnEnterpriseRepos{} - resp, err := s.client.Do(ctx, req, orgs) - if err != nil { - return nil, resp, err - } - - return orgs, resp, nil -} - -// SetEnabledOrgsInEnterprise replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-selected-organizations-enabled-for-github-actions-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions/organizations -func (s *ActionsService) SetEnabledOrgsInEnterprise(ctx context.Context, owner string, organizationIDs []int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations", owner) - - req, err := s.client.NewRequest("PUT", u, struct { - IDs []int64 `json:"selected_organization_ids"` - }{IDs: organizationIDs}) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// AddEnabledOrgInEnterprise adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#enable-a-selected-organization-for-github-actions-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id} -func (s *ActionsService) AddEnabledOrgInEnterprise(ctx context.Context, owner string, organizationID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations/%v", owner, organizationID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// RemoveEnabledOrgInEnterprise removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#disable-a-selected-organization-for-github-actions-in-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id} -func (s *ActionsService) RemoveEnabledOrgInEnterprise(ctx context.Context, owner string, organizationID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations/%v", owner, organizationID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// GetActionsAllowedInEnterprise gets the actions that are allowed in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/permissions/selected-actions -func (s *ActionsService) GetActionsAllowedInEnterprise(ctx context.Context, enterprise string) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/selected-actions", enterprise) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsAllowed := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, actionsAllowed) - if err != nil { - return nil, resp, err - } - - return actionsAllowed, resp, nil -} - -// EditActionsAllowedInEnterprise sets the actions that are allowed in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions/selected-actions -func (s *ActionsService) EditActionsAllowedInEnterprise(ctx context.Context, enterprise string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/selected-actions", enterprise) - req, err := s.client.NewRequest("PUT", u, actionsAllowed) - if err != nil { - return nil, nil, err - } - - p := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go b/vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go deleted file mode 100644 index 1a31e4c6..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go +++ /dev/null @@ -1,220 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsPermissions represents a policy for repositories and allowed actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions -type ActionsPermissions struct { - EnabledRepositories *string `json:"enabled_repositories,omitempty"` - AllowedActions *string `json:"allowed_actions,omitempty"` - SelectedActionsURL *string `json:"selected_actions_url,omitempty"` -} - -func (a ActionsPermissions) String() string { - return Stringify(a) -} - -// ActionsEnabledOnOrgRepos represents all the repositories in an organization for which Actions is enabled. -type ActionsEnabledOnOrgRepos struct { - TotalCount int `json:"total_count"` - Repositories []*Repository `json:"repositories"` -} - -// ActionsAllowed represents selected actions that are allowed. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions -type ActionsAllowed struct { - GithubOwnedAllowed *bool `json:"github_owned_allowed,omitempty"` - VerifiedAllowed *bool `json:"verified_allowed,omitempty"` - PatternsAllowed []string `json:"patterns_allowed,omitempty"` -} - -func (a ActionsAllowed) String() string { - return Stringify(a) -} - -// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-github-actions-permissions-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions -func (s *ActionsService) GetActionsPermissions(ctx context.Context, org string) (*ActionsPermissions, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissions) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} - -// EditActionsPermissions sets the permissions policy for repositories and allowed actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-github-actions-permissions-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions -func (s *ActionsService) EditActionsPermissions(ctx context.Context, org string, actionsPermissions ActionsPermissions) (*ActionsPermissions, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions", org) - req, err := s.client.NewRequest("PUT", u, actionsPermissions) - if err != nil { - return nil, nil, err - } - - p := new(ActionsPermissions) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// ListEnabledReposInOrg lists the selected repositories that are enabled for GitHub Actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#list-selected-repositories-enabled-for-github-actions-in-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions/repositories -func (s *ActionsService) ListEnabledReposInOrg(ctx context.Context, owner string, opts *ListOptions) (*ActionsEnabledOnOrgRepos, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories", owner) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - repos := &ActionsEnabledOnOrgRepos{} - resp, err := s.client.Do(ctx, req, repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// SetEnabledReposInOrg replaces the list of selected repositories that are enabled for GitHub Actions in an organization.. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-selected-repositories-enabled-for-github-actions-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/repositories -func (s *ActionsService) SetEnabledReposInOrg(ctx context.Context, owner string, repositoryIDs []int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories", owner) - - req, err := s.client.NewRequest("PUT", u, struct { - IDs []int64 `json:"selected_repository_ids"` - }{IDs: repositoryIDs}) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// AddEnabledReposInOrg adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#enable-a-selected-repository-for-github-actions-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/repositories/{repository_id} -func (s *ActionsService) AddEnabledReposInOrg(ctx context.Context, owner string, repositoryID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories/%v", owner, repositoryID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// RemoveEnabledReposInOrg removes a single repository from the list of enabled repos for GitHub Actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#disable-a-selected-repository-for-github-actions-in-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/permissions/repositories/{repository_id} -func (s *ActionsService) RemoveEnabledReposInOrg(ctx context.Context, owner string, repositoryID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories/%v", owner, repositoryID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// GetActionsAllowed gets the actions that are allowed in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions/selected-actions -func (s *ActionsService) GetActionsAllowed(ctx context.Context, org string) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/selected-actions", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsAllowed := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, actionsAllowed) - if err != nil { - return nil, resp, err - } - - return actionsAllowed, resp, nil -} - -// EditActionsAllowed sets the actions that are allowed in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/selected-actions -func (s *ActionsService) EditActionsAllowed(ctx context.Context, org string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/selected-actions", org) - req, err := s.client.NewRequest("PUT", u, actionsAllowed) - if err != nil { - return nil, nil, err - } - - p := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_required_workflows.go b/vendor/github.com/google/go-github/v57/github/actions_required_workflows.go deleted file mode 100644 index b89741a8..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_required_workflows.go +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrgRequiredWorkflow represents a required workflow object at the org level. -type OrgRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - Scope *string `json:"scope,omitempty"` - Ref *string `json:"ref,omitempty"` - State *string `json:"state,omitempty"` - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -// OrgRequiredWorkflows represents the required workflows for the org. -type OrgRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*OrgRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// CreateUpdateRequiredWorkflowOptions represents the input object used to create or update required workflows. -type CreateUpdateRequiredWorkflowOptions struct { - WorkflowFilePath *string `json:"workflow_file_path,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Scope *string `json:"scope,omitempty"` - SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -// RequiredWorkflowSelectedRepos represents the repos that a required workflow is applied to. -type RequiredWorkflowSelectedRepos struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -// RepoRequiredWorkflow represents a required workflow object at the repo level. -type RepoRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - State *string `json:"state,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BadgeURL *string `json:"badge_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - SourceRepository *Repository `json:"source_repository,omitempty"` -} - -// RepoRequiredWorkflows represents the required workflows for a repo. -type RepoRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*RepoRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// ListOrgRequiredWorkflows lists the RequiredWorkflows for an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows -func (s *ActionsService) ListOrgRequiredWorkflows(ctx context.Context, org string, opts *ListOptions) (*OrgRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(OrgRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} - -// CreateRequiredWorkflow creates the required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation POST /orgs/{org}/actions/required_workflows -func (s *ActionsService) CreateRequiredWorkflow(ctx context.Context, org string, createRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - req, err := s.client.NewRequest("POST", url, createRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// GetRequiredWorkflowByID get the RequiredWorkflows for an org by its ID. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) GetRequiredWorkflowByID(ctx context.Context, owner string, requiredWorkflowID int64) (*OrgRequiredWorkflow, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", owner, requiredWorkflowID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, &requiredWorkflow) - if err != nil { - return nil, resp, err - } - - return requiredWorkflow, resp, nil -} - -// UpdateRequiredWorkflow updates a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PATCH /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) UpdateRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64, updateRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("PATCH", url, updateRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// DeleteRequiredWorkflow deletes a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) DeleteRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRequiredWorkflowSelectedRepos lists the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) ListRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, opts *ListOptions) (*RequiredWorkflowSelectedRepos, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflowRepos := new(RequiredWorkflowSelectedRepos) - resp, err := s.client.Do(ctx, req, &requiredWorkflowRepos) - if err != nil { - return nil, resp, err - } - - return requiredWorkflowRepos, resp, nil -} - -// SetRequiredWorkflowSelectedRepos sets the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) SetRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRepoToRequiredWorkflow adds the Repository to a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) AddRepoToRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// RemoveRepoFromRequiredWorkflow removes the Repository from a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) RemoveRepoFromRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRepoRequiredWorkflows lists the RequiredWorkflows for a repo. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /repos/{owner}/{repo}/actions/required_workflows -func (s *ActionsService) ListRepoRequiredWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*RepoRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/required_workflows", owner, repo) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(RepoRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_runner_groups.go b/vendor/github.com/google/go-github/v57/github/actions_runner_groups.go deleted file mode 100644 index a1f453f3..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_runner_groups.go +++ /dev/null @@ -1,337 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RunnerGroup represents a self-hosted runner group configured in an organization. -type RunnerGroup struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - Default *bool `json:"default,omitempty"` - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - RunnersURL *string `json:"runners_url,omitempty"` - Inherited *bool `json:"inherited,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` - WorkflowRestrictionsReadOnly *bool `json:"workflow_restrictions_read_only,omitempty"` -} - -// RunnerGroups represents a collection of self-hosted runner groups configured for an organization. -type RunnerGroups struct { - TotalCount int `json:"total_count"` - RunnerGroups []*RunnerGroup `json:"runner_groups"` -} - -// CreateRunnerGroupRequest represents a request to create a Runner group for an organization. -type CreateRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - // List of repository IDs that can access the runner group. - SelectedRepositoryIDs []int64 `json:"selected_repository_ids,omitempty"` - // Runners represent a list of runner IDs to add to the runner group. - Runners []int64 `json:"runners,omitempty"` - // If set to True, public repos can use this runner group - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - // If true, the runner group will be restricted to running only the workflows specified in the SelectedWorkflows slice. - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - // List of workflows the runner group should be allowed to run. This setting will be ignored unless RestrictedToWorkflows is set to true. - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// UpdateRunnerGroupRequest represents a request to update a Runner group for an organization. -type UpdateRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// SetRepoAccessRunnerGroupRequest represents a request to replace the list of repositories -// that can access a self-hosted runner group configured in an organization. -type SetRepoAccessRunnerGroupRequest struct { - // Updated list of repository IDs that should be given access to the runner group. - SelectedRepositoryIDs []int64 `json:"selected_repository_ids"` -} - -// SetRunnerGroupRunnersRequest represents a request to replace the list of -// self-hosted runners that are part of an organization runner group. -type SetRunnerGroupRunnersRequest struct { - // Updated list of runner IDs that should be given access to the runner group. - Runners []int64 `json:"runners"` -} - -// ListOrgRunnerGroupOptions extend ListOptions to have the optional parameters VisibleToRepository. -type ListOrgRunnerGroupOptions struct { - ListOptions - - // Only return runner groups that are allowed to be used by this repository. - VisibleToRepository string `url:"visible_to_repository,omitempty"` -} - -// ListOrganizationRunnerGroups lists all self-hosted runner groups configured in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runner-groups-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups -func (s *ActionsService) ListOrganizationRunnerGroups(ctx context.Context, org string, opts *ListOrgRunnerGroupOptions) (*RunnerGroups, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := &RunnerGroups{} - resp, err := s.client.Do(ctx, req, &groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// GetOrganizationRunnerGroup gets a specific self-hosted runner group for an organization using its RunnerGroup ID. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#get-a-self-hosted-runner-group-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups/{runner_group_id} -func (s *ActionsService) GetOrganizationRunnerGroup(ctx context.Context, org string, groupID int64) (*RunnerGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(RunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// DeleteOrganizationRunnerGroup deletes a self-hosted runner group from an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#delete-a-self-hosted-runner-group-from-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runner-groups/{runner_group_id} -func (s *ActionsService) DeleteOrganizationRunnerGroup(ctx context.Context, org string, groupID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateOrganizationRunnerGroup creates a new self-hosted runner group for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#create-a-self-hosted-runner-group-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runner-groups -func (s *ActionsService) CreateOrganizationRunnerGroup(ctx context.Context, org string, createReq CreateRunnerGroupRequest) (*RunnerGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups", org) - req, err := s.client.NewRequest("POST", u, createReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(RunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// UpdateOrganizationRunnerGroup updates a self-hosted runner group for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#update-a-self-hosted-runner-group-for-an-organization -// -//meta:operation PATCH /orgs/{org}/actions/runner-groups/{runner_group_id} -func (s *ActionsService) UpdateOrganizationRunnerGroup(ctx context.Context, org string, groupID int64, updateReq UpdateRunnerGroupRequest) (*RunnerGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) - req, err := s.client.NewRequest("PATCH", u, updateReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(RunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// ListRepositoryAccessRunnerGroup lists the repositories with access to a self-hosted runner group configured in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-repository-access-to-a-self-hosted-runner-group-in-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories -func (s *ActionsService) ListRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID int64, opts *ListOptions) (*ListRepositories, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories", org, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - repos := &ListRepositories{} - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// SetRepositoryAccessRunnerGroup replaces the list of repositories that have access to a self-hosted runner group configured in an organization -// with a new List of repositories. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-repository-access-for-a-self-hosted-runner-group-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories -func (s *ActionsService) SetRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID int64, ids SetRepoAccessRunnerGroupRequest) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories", org, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRepositoryAccessRunnerGroup adds a repository to the list of selected repositories that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-repository-access-to-a-self-hosted-runner-group-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id} -func (s *ActionsService) AddRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID, repoID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories/%v", org, groupID, repoID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveRepositoryAccessRunnerGroup removes a repository from the list of selected repositories that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-repository-access-to-a-self-hosted-runner-group-in-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id} -func (s *ActionsService) RemoveRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID, repoID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories/%v", org, groupID, repoID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListRunnerGroupRunners lists self-hosted runners that are in a specific organization group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runners-in-a-group-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners -func (s *ActionsService) ListRunnerGroupRunners(ctx context.Context, org string, groupID int64, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners", org, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// SetRunnerGroupRunners replaces the list of self-hosted runners that are part of an organization runner group -// with a new list of runners. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-self-hosted-runners-in-a-group-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners -func (s *ActionsService) SetRunnerGroupRunners(ctx context.Context, org string, groupID int64, ids SetRunnerGroupRunnersRequest) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners", org, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRunnerGroupRunners adds a self-hosted runner to a runner group configured in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-a-self-hosted-runner-to-a-group-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *ActionsService) AddRunnerGroupRunners(ctx context.Context, org string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners/%v", org, groupID, runnerID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveRunnerGroupRunners removes a self-hosted runner from a group configured in an organization. -// The runner is then returned to the default group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-a-self-hosted-runner-from-a-group-for-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *ActionsService) RemoveRunnerGroupRunners(ctx context.Context, org string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners/%v", org, groupID, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_runners.go b/vendor/github.com/google/go-github/v57/github/actions_runners.go deleted file mode 100644 index 90cf5804..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_runners.go +++ /dev/null @@ -1,371 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RunnerApplicationDownload represents a binary for the self-hosted runner application that can be downloaded. -type RunnerApplicationDownload struct { - OS *string `json:"os,omitempty"` - Architecture *string `json:"architecture,omitempty"` - DownloadURL *string `json:"download_url,omitempty"` - Filename *string `json:"filename,omitempty"` - TempDownloadToken *string `json:"temp_download_token,omitempty"` - SHA256Checksum *string `json:"sha256_checksum,omitempty"` -} - -// ListRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-runner-applications-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runners/downloads -func (s *ActionsService) ListRunnerApplicationDownloads(ctx context.Context, owner, repo string) ([]*RunnerApplicationDownload, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/downloads", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rads []*RunnerApplicationDownload - resp, err := s.client.Do(ctx, req, &rads) - if err != nil { - return nil, resp, err - } - - return rads, resp, nil -} - -// GenerateJITConfigRequest specifies body parameters to GenerateRepoJITConfig. -type GenerateJITConfigRequest struct { - Name string `json:"name"` - RunnerGroupID int64 `json:"runner_group_id"` - WorkFolder *string `json:"work_folder,omitempty"` - - // Labels represents the names of the custom labels to add to the runner. - // Minimum items: 1. Maximum items: 100. - Labels []string `json:"labels"` -} - -// JITRunnerConfig represents encoded JIT configuration that can be used to bootstrap a self-hosted runner. -type JITRunnerConfig struct { - Runner *Runner `json:"runner,omitempty"` - EncodedJITConfig *string `json:"encoded_jit_config,omitempty"` -} - -// GenerateOrgJITConfig generate a just-in-time configuration for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runners/generate-jitconfig -func (s *ActionsService) GenerateOrgJITConfig(ctx context.Context, owner string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/generate-jitconfig", owner) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - jitConfig := new(JITRunnerConfig) - resp, err := s.client.Do(ctx, req, jitConfig) - if err != nil { - return nil, resp, err - } - - return jitConfig, resp, nil -} - -// GenerateRepoJITConfig generates a just-in-time configuration for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig -func (s *ActionsService) GenerateRepoJITConfig(ctx context.Context, owner, repo string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/generate-jitconfig", owner, repo) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - jitConfig := new(JITRunnerConfig) - resp, err := s.client.Do(ctx, req, jitConfig) - if err != nil { - return nil, resp, err - } - - return jitConfig, resp, nil -} - -// RegistrationToken represents a token that can be used to add a self-hosted runner to a repository. -type RegistrationToken struct { - Token *string `json:"token,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} - -// CreateRegistrationToken creates a token that can be used to add a self-hosted runner. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-registration-token-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/actions/runners/registration-token -func (s *ActionsService) CreateRegistrationToken(ctx context.Context, owner, repo string) (*RegistrationToken, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/registration-token", owner, repo) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - registrationToken := new(RegistrationToken) - resp, err := s.client.Do(ctx, req, registrationToken) - if err != nil { - return nil, resp, err - } - - return registrationToken, resp, nil -} - -// Runner represents a self-hosted runner registered with a repository. -type Runner struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - OS *string `json:"os,omitempty"` - Status *string `json:"status,omitempty"` - Busy *bool `json:"busy,omitempty"` - Labels []*RunnerLabels `json:"labels,omitempty"` -} - -// RunnerLabels represents a collection of labels attached to each runner. -type RunnerLabels struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` -} - -// Runners represents a collection of self-hosted runners for a repository. -type Runners struct { - TotalCount int `json:"total_count"` - Runners []*Runner `json:"runners"` -} - -// ListRunners lists all the self-hosted runners for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-self-hosted-runners-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runners -func (s *ActionsService) ListRunners(ctx context.Context, owner, repo string, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// GetRunner gets a specific self-hosted runner for a repository using its runner ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#get-a-self-hosted-runner-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runners/{runner_id} -func (s *ActionsService) GetRunner(ctx context.Context, owner, repo string, runnerID int64) (*Runner, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/%v", owner, repo, runnerID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runner := new(Runner) - resp, err := s.client.Do(ctx, req, runner) - if err != nil { - return nil, resp, err - } - - return runner, resp, nil -} - -// RemoveToken represents a token that can be used to remove a self-hosted runner from a repository. -type RemoveToken struct { - Token *string `json:"token,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} - -// CreateRemoveToken creates a token that can be used to remove a self-hosted runner from a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-remove-token-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/actions/runners/remove-token -func (s *ActionsService) CreateRemoveToken(ctx context.Context, owner, repo string) (*RemoveToken, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/remove-token", owner, repo) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - removeToken := new(RemoveToken) - resp, err := s.client.Do(ctx, req, removeToken) - if err != nil { - return nil, resp, err - } - - return removeToken, resp, nil -} - -// RemoveRunner forces the removal of a self-hosted runner in a repository using the runner id. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/runners/{runner_id} -func (s *ActionsService) RemoveRunner(ctx context.Context, owner, repo string, runnerID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/%v", owner, repo, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListOrganizationRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-runner-applications-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runners/downloads -func (s *ActionsService) ListOrganizationRunnerApplicationDownloads(ctx context.Context, owner string) ([]*RunnerApplicationDownload, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/downloads", owner) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rads []*RunnerApplicationDownload - resp, err := s.client.Do(ctx, req, &rads) - if err != nil { - return nil, resp, err - } - - return rads, resp, nil -} - -// CreateOrganizationRegistrationToken creates a token that can be used to add a self-hosted runner to an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-registration-token-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runners/registration-token -func (s *ActionsService) CreateOrganizationRegistrationToken(ctx context.Context, owner string) (*RegistrationToken, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/registration-token", owner) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - registrationToken := new(RegistrationToken) - resp, err := s.client.Do(ctx, req, registrationToken) - if err != nil { - return nil, resp, err - } - - return registrationToken, resp, nil -} - -// ListOrganizationRunners lists all the self-hosted runners for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-self-hosted-runners-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runners -func (s *ActionsService) ListOrganizationRunners(ctx context.Context, owner string, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners", owner) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// GetOrganizationRunner gets a specific self-hosted runner for an organization using its runner ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#get-a-self-hosted-runner-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runners/{runner_id} -func (s *ActionsService) GetOrganizationRunner(ctx context.Context, owner string, runnerID int64) (*Runner, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/%v", owner, runnerID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runner := new(Runner) - resp, err := s.client.Do(ctx, req, runner) - if err != nil { - return nil, resp, err - } - - return runner, resp, nil -} - -// CreateOrganizationRemoveToken creates a token that can be used to remove a self-hosted runner from an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-remove-token-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runners/remove-token -func (s *ActionsService) CreateOrganizationRemoveToken(ctx context.Context, owner string) (*RemoveToken, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/remove-token", owner) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - removeToken := new(RemoveToken) - resp, err := s.client.Do(ctx, req, removeToken) - if err != nil { - return nil, resp, err - } - - return removeToken, resp, nil -} - -// RemoveOrganizationRunner forces the removal of a self-hosted runner from an organization using the runner id. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runners/{runner_id} -func (s *ActionsService) RemoveOrganizationRunner(ctx context.Context, owner string, runnerID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/%v", owner, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_secrets.go b/vendor/github.com/google/go-github/v57/github/actions_secrets.go deleted file mode 100644 index 2d4ba98d..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_secrets.go +++ /dev/null @@ -1,396 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" - "strconv" -) - -// PublicKey represents the public key that should be used to encrypt secrets. -type PublicKey struct { - KeyID *string `json:"key_id"` - Key *string `json:"key"` -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// This ensures GitHub Enterprise versions which return a numeric key id -// do not error out when unmarshaling. -func (p *PublicKey) UnmarshalJSON(data []byte) error { - var pk struct { - KeyID interface{} `json:"key_id"` - Key *string `json:"key"` - } - - if err := json.Unmarshal(data, &pk); err != nil { - return err - } - - p.Key = pk.Key - - switch v := pk.KeyID.(type) { - case nil: - return nil - case string: - p.KeyID = &v - case float64: - p.KeyID = String(strconv.FormatFloat(v, 'f', -1, 64)) - default: - return fmt.Errorf("unable to unmarshal %T as a string", v) - } - - return nil -} - -func (s *ActionsService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - pubKey := new(PublicKey) - resp, err := s.client.Do(ctx, req, pubKey) - if err != nil { - return nil, resp, err - } - - return pubKey, resp, nil -} - -// GetRepoPublicKey gets a public key that should be used for secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-a-repository-public-key -// -//meta:operation GET /repos/{owner}/{repo}/actions/secrets/public-key -func (s *ActionsService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/public-key", owner, repo) - return s.getPublicKey(ctx, url) -} - -// GetOrgPublicKey gets a public key that should be used for secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-organization-public-key -// -//meta:operation GET /orgs/{org}/actions/secrets/public-key -func (s *ActionsService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/public-key", org) - return s.getPublicKey(ctx, url) -} - -// GetEnvPublicKey gets a public key that should be used for secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-environment-public-key -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key -func (s *ActionsService) GetEnvPublicKey(ctx context.Context, repoID int, env string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/public-key", repoID, env) - return s.getPublicKey(ctx, url) -} - -// Secret represents a repository action secret. -type Secret struct { - Name string `json:"name"` - CreatedAt Timestamp `json:"created_at"` - UpdatedAt Timestamp `json:"updated_at"` - Visibility string `json:"visibility,omitempty"` - SelectedRepositoriesURL string `json:"selected_repositories_url,omitempty"` -} - -// Secrets represents one item from the ListSecrets response. -type Secrets struct { - TotalCount int `json:"total_count"` - Secrets []*Secret `json:"secrets"` -} - -func (s *ActionsService) listSecrets(ctx context.Context, url string, opts *ListOptions) (*Secrets, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - secrets := new(Secrets) - resp, err := s.client.Do(ctx, req, &secrets) - if err != nil { - return nil, resp, err - } - - return secrets, resp, nil -} - -// ListRepoSecrets lists all secrets available in a repository -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-repository-secrets -// -//meta:operation GET /repos/{owner}/{repo}/actions/secrets -func (s *ActionsService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets", owner, repo) - return s.listSecrets(ctx, url, opts) -} - -// ListOrgSecrets lists all secrets available in an organization -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-organization-secrets -// -//meta:operation GET /orgs/{org}/actions/secrets -func (s *ActionsService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets", org) - return s.listSecrets(ctx, url, opts) -} - -// ListEnvSecrets lists all secrets available in an environment. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-environment-secrets -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/secrets -func (s *ActionsService) ListEnvSecrets(ctx context.Context, repoID int, env string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets", repoID, env) - return s.listSecrets(ctx, url, opts) -} - -func (s *ActionsService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - secret := new(Secret) - resp, err := s.client.Do(ctx, req, secret) - if err != nil { - return nil, resp, err - } - - return secret, resp, nil -} - -// GetRepoSecret gets a single repository secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-a-repository-secret -// -//meta:operation GET /repos/{owner}/{repo}/actions/secrets/{secret_name} -func (s *ActionsService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, name) - return s.getSecret(ctx, url) -} - -// GetOrgSecret gets a single organization secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-organization-secret -// -//meta:operation GET /orgs/{org}/actions/secrets/{secret_name} -func (s *ActionsService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, name) - return s.getSecret(ctx, url) -} - -// GetEnvSecret gets a single environment secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-environment-secret -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name} -func (s *ActionsService) GetEnvSecret(ctx context.Context, repoID int, env, secretName string) (*Secret, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, secretName) - return s.getSecret(ctx, url) -} - -// SelectedRepoIDs are the repository IDs that have access to the actions secrets. -type SelectedRepoIDs []int64 - -// EncryptedSecret represents a secret that is encrypted using a public key. -// -// The value of EncryptedValue must be your secret, encrypted with -// LibSodium (see documentation here: https://libsodium.gitbook.io/doc/bindings_for_other_languages) -// using the public key retrieved using the GetPublicKey method. -type EncryptedSecret struct { - Name string `json:"-"` - KeyID string `json:"key_id"` - EncryptedValue string `json:"encrypted_value"` - Visibility string `json:"visibility,omitempty"` - SelectedRepositoryIDs SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -func (s *ActionsService) putSecret(ctx context.Context, url string, eSecret *EncryptedSecret) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, eSecret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateOrUpdateRepoSecret creates or updates a repository secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#create-or-update-a-repository-secret -// -//meta:operation PUT /repos/{owner}/{repo}/actions/secrets/{secret_name} -func (s *ActionsService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *EncryptedSecret) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -// CreateOrUpdateOrgSecret creates or updates an organization secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#create-or-update-an-organization-secret -// -//meta:operation PUT /orgs/{org}/actions/secrets/{secret_name} -func (s *ActionsService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *EncryptedSecret) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -// CreateOrUpdateEnvSecret creates or updates a single environment secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#create-or-update-an-environment-secret -// -//meta:operation PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name} -func (s *ActionsService) CreateOrUpdateEnvSecret(ctx context.Context, repoID int, env string, eSecret *EncryptedSecret) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -func (s *ActionsService) deleteSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteRepoSecret deletes a secret in a repository using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#delete-a-repository-secret -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name} -func (s *ActionsService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, name) - return s.deleteSecret(ctx, url) -} - -// DeleteOrgSecret deletes a secret in an organization using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#delete-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/actions/secrets/{secret_name} -func (s *ActionsService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, name) - return s.deleteSecret(ctx, url) -} - -// DeleteEnvSecret deletes a secret in an environment using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#delete-an-environment-secret -// -//meta:operation DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name} -func (s *ActionsService) DeleteEnvSecret(ctx context.Context, repoID int, env, secretName string) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, secretName) - return s.deleteSecret(ctx, url) -} - -// SelectedReposList represents the list of repositories selected for an organization secret. -type SelectedReposList struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -func (s *ActionsService) listSelectedReposForSecret(ctx context.Context, url string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(SelectedReposList) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// ListSelectedReposForOrgSecret lists all repositories that have access to a secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-selected-repositories-for-an-organization-secret -// -//meta:operation GET /orgs/{org}/actions/secrets/{secret_name}/repositories -func (s *ActionsService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories", org, name) - return s.listSelectedReposForSecret(ctx, url, opts) -} - -func (s *ActionsService) setSelectedReposForSecret(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// SetSelectedReposForOrgSecret sets the repositories that have access to a secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#set-selected-repositories-for-an-organization-secret -// -//meta:operation PUT /orgs/{org}/actions/secrets/{secret_name}/repositories -func (s *ActionsService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories", org, name) - return s.setSelectedReposForSecret(ctx, url, ids) -} - -func (s *ActionsService) addSelectedRepoToSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddSelectedRepoToOrgSecret adds a repository to an organization secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#add-selected-repository-to-an-organization-secret -// -//meta:operation PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id} -func (s *ActionsService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.addSelectedRepoToSecret(ctx, url) -} - -func (s *ActionsService) removeSelectedRepoFromSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSelectedRepoFromOrgSecret removes a repository from an organization secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#remove-selected-repository-from-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id} -func (s *ActionsService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.removeSelectedRepoFromSecret(ctx, url) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_variables.go b/vendor/github.com/google/go-github/v57/github/actions_variables.go deleted file mode 100644 index 244d1590..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_variables.go +++ /dev/null @@ -1,331 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsVariable represents a repository action variable. -type ActionsVariable struct { - Name string `json:"name"` - Value string `json:"value"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Visibility *string `json:"visibility,omitempty"` - // Used by ListOrgVariables and GetOrgVariables - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - // Used by UpdateOrgVariable and CreateOrgVariable - SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -// ActionsVariables represents one item from the ListVariables response. -type ActionsVariables struct { - TotalCount int `json:"total_count"` - Variables []*ActionsVariable `json:"variables"` -} - -func (s *ActionsService) listVariables(ctx context.Context, url string, opts *ListOptions) (*ActionsVariables, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - variables := new(ActionsVariables) - resp, err := s.client.Do(ctx, req, &variables) - if err != nil { - return nil, resp, err - } - - return variables, resp, nil -} - -// ListRepoVariables lists all variables available in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-repository-variables -// -//meta:operation GET /repos/{owner}/{repo}/actions/variables -func (s *ActionsService) ListRepoVariables(ctx context.Context, owner, repo string, opts *ListOptions) (*ActionsVariables, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables", owner, repo) - return s.listVariables(ctx, url, opts) -} - -// ListOrgVariables lists all variables available in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-organization-variables -// -//meta:operation GET /orgs/{org}/actions/variables -func (s *ActionsService) ListOrgVariables(ctx context.Context, org string, opts *ListOptions) (*ActionsVariables, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables", org) - return s.listVariables(ctx, url, opts) -} - -// ListEnvVariables lists all variables available in an environment. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-environment-variables -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/variables -func (s *ActionsService) ListEnvVariables(ctx context.Context, repoID int, env string, opts *ListOptions) (*ActionsVariables, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables", repoID, env) - return s.listVariables(ctx, url, opts) -} - -func (s *ActionsService) getVariable(ctx context.Context, url string) (*ActionsVariable, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - variable := new(ActionsVariable) - resp, err := s.client.Do(ctx, req, variable) - if err != nil { - return nil, resp, err - } - - return variable, resp, nil -} - -// GetRepoVariable gets a single repository variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#get-a-repository-variable -// -//meta:operation GET /repos/{owner}/{repo}/actions/variables/{name} -func (s *ActionsService) GetRepoVariable(ctx context.Context, owner, repo, name string) (*ActionsVariable, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, name) - return s.getVariable(ctx, url) -} - -// GetOrgVariable gets a single organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#get-an-organization-variable -// -//meta:operation GET /orgs/{org}/actions/variables/{name} -func (s *ActionsService) GetOrgVariable(ctx context.Context, org, name string) (*ActionsVariable, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, name) - return s.getVariable(ctx, url) -} - -// GetEnvVariable gets a single environment variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#get-an-environment-variable -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/variables/{name} -func (s *ActionsService) GetEnvVariable(ctx context.Context, repoID int, env, variableName string) (*ActionsVariable, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variableName) - return s.getVariable(ctx, url) -} - -func (s *ActionsService) postVariable(ctx context.Context, url string, variable *ActionsVariable) (*Response, error) { - req, err := s.client.NewRequest("POST", url, variable) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// CreateRepoVariable creates a repository variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#create-a-repository-variable -// -//meta:operation POST /repos/{owner}/{repo}/actions/variables -func (s *ActionsService) CreateRepoVariable(ctx context.Context, owner, repo string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables", owner, repo) - return s.postVariable(ctx, url, variable) -} - -// CreateOrgVariable creates an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#create-an-organization-variable -// -//meta:operation POST /orgs/{org}/actions/variables -func (s *ActionsService) CreateOrgVariable(ctx context.Context, org string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables", org) - return s.postVariable(ctx, url, variable) -} - -// CreateEnvVariable creates an environment variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#create-an-environment-variable -// -//meta:operation POST /repositories/{repository_id}/environments/{environment_name}/variables -func (s *ActionsService) CreateEnvVariable(ctx context.Context, repoID int, env string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables", repoID, env) - return s.postVariable(ctx, url, variable) -} - -func (s *ActionsService) patchVariable(ctx context.Context, url string, variable *ActionsVariable) (*Response, error) { - req, err := s.client.NewRequest("PATCH", url, variable) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// UpdateRepoVariable updates a repository variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#update-a-repository-variable -// -//meta:operation PATCH /repos/{owner}/{repo}/actions/variables/{name} -func (s *ActionsService) UpdateRepoVariable(ctx context.Context, owner, repo string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, variable.Name) - return s.patchVariable(ctx, url, variable) -} - -// UpdateOrgVariable updates an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#update-an-organization-variable -// -//meta:operation PATCH /orgs/{org}/actions/variables/{name} -func (s *ActionsService) UpdateOrgVariable(ctx context.Context, org string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, variable.Name) - return s.patchVariable(ctx, url, variable) -} - -// UpdateEnvVariable updates an environment variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#update-an-environment-variable -// -//meta:operation PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name} -func (s *ActionsService) UpdateEnvVariable(ctx context.Context, repoID int, env string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variable.Name) - return s.patchVariable(ctx, url, variable) -} - -func (s *ActionsService) deleteVariable(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteRepoVariable deletes a variable in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#delete-a-repository-variable -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/variables/{name} -func (s *ActionsService) DeleteRepoVariable(ctx context.Context, owner, repo, name string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, name) - return s.deleteVariable(ctx, url) -} - -// DeleteOrgVariable deletes a variable in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#delete-an-organization-variable -// -//meta:operation DELETE /orgs/{org}/actions/variables/{name} -func (s *ActionsService) DeleteOrgVariable(ctx context.Context, org, name string) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, name) - return s.deleteVariable(ctx, url) -} - -// DeleteEnvVariable deletes a variable in an environment. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#delete-an-environment-variable -// -//meta:operation DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name} -func (s *ActionsService) DeleteEnvVariable(ctx context.Context, repoID int, env, variableName string) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variableName) - return s.deleteVariable(ctx, url) -} - -func (s *ActionsService) listSelectedReposForVariable(ctx context.Context, url string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(SelectedReposList) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// ListSelectedReposForOrgVariable lists all repositories that have access to a variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-selected-repositories-for-an-organization-variable -// -//meta:operation GET /orgs/{org}/actions/variables/{name}/repositories -func (s *ActionsService) ListSelectedReposForOrgVariable(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories", org, name) - return s.listSelectedReposForVariable(ctx, url, opts) -} - -func (s *ActionsService) setSelectedReposForVariable(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// SetSelectedReposForOrgVariable sets the repositories that have access to a variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#set-selected-repositories-for-an-organization-variable -// -//meta:operation PUT /orgs/{org}/actions/variables/{name}/repositories -func (s *ActionsService) SetSelectedReposForOrgVariable(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories", org, name) - return s.setSelectedReposForVariable(ctx, url, ids) -} - -func (s *ActionsService) addSelectedRepoToVariable(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddSelectedRepoToOrgVariable adds a repository to an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#add-selected-repository-to-an-organization-variable -// -//meta:operation PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id} -func (s *ActionsService) AddSelectedRepoToOrgVariable(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories/%v", org, name, *repo.ID) - return s.addSelectedRepoToVariable(ctx, url) -} - -func (s *ActionsService) removeSelectedRepoFromVariable(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSelectedRepoFromOrgVariable removes a repository from an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#remove-selected-repository-from-an-organization-variable -// -//meta:operation DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id} -func (s *ActionsService) RemoveSelectedRepoFromOrgVariable(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories/%v", org, name, *repo.ID) - return s.removeSelectedRepoFromVariable(ctx, url) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go b/vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go deleted file mode 100644 index 0e0eb6e1..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" -) - -// TaskStep represents a single task step from a sequence of tasks of a job. -type TaskStep struct { - Name *string `json:"name,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - Number *int64 `json:"number,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` -} - -// WorkflowJob represents a repository action workflow job. -type WorkflowJob struct { - ID *int64 `json:"id,omitempty"` - RunID *int64 `json:"run_id,omitempty"` - RunURL *string `json:"run_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` - Name *string `json:"name,omitempty"` - Steps []*TaskStep `json:"steps,omitempty"` - CheckRunURL *string `json:"check_run_url,omitempty"` - // Labels represents runner labels from the `runs-on:` key from a GitHub Actions workflow. - Labels []string `json:"labels,omitempty"` - RunnerID *int64 `json:"runner_id,omitempty"` - RunnerName *string `json:"runner_name,omitempty"` - RunnerGroupID *int64 `json:"runner_group_id,omitempty"` - RunnerGroupName *string `json:"runner_group_name,omitempty"` - RunAttempt *int64 `json:"run_attempt,omitempty"` - WorkflowName *string `json:"workflow_name,omitempty"` -} - -// Jobs represents a slice of repository action workflow job. -type Jobs struct { - TotalCount *int `json:"total_count,omitempty"` - Jobs []*WorkflowJob `json:"jobs,omitempty"` -} - -// ListWorkflowJobsOptions specifies optional parameters to ListWorkflowJobs. -type ListWorkflowJobsOptions struct { - // Filter specifies how jobs should be filtered by their completed_at timestamp. - // Possible values are: - // latest - Returns jobs from the most recent execution of the workflow run - // all - Returns all jobs for a workflow run, including from old executions of the workflow run - // - // Default value is "latest". - Filter string `url:"filter,omitempty"` - ListOptions -} - -// ListWorkflowJobs lists all jobs for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-jobs#list-jobs-for-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs -func (s *ActionsService) ListWorkflowJobs(ctx context.Context, owner, repo string, runID int64, opts *ListWorkflowJobsOptions) (*Jobs, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/runs/%v/jobs", owner, repo, runID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - jobs := new(Jobs) - resp, err := s.client.Do(ctx, req, &jobs) - if err != nil { - return nil, resp, err - } - - return jobs, resp, nil -} - -// GetWorkflowJobByID gets a specific job in a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-jobs#get-a-job-for-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/jobs/{job_id} -func (s *ActionsService) GetWorkflowJobByID(ctx context.Context, owner, repo string, jobID int64) (*WorkflowJob, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v", owner, repo, jobID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - job := new(WorkflowJob) - resp, err := s.client.Do(ctx, req, job) - if err != nil { - return nil, resp, err - } - - return job, resp, nil -} - -// GetWorkflowJobLogs gets a redirect URL to download a plain text file of logs for a workflow job. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-jobs#download-job-logs-for-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs -func (s *ActionsService) GetWorkflowJobLogs(ctx context.Context, owner, repo string, jobID int64, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v/logs", owner, repo, jobID) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - return parsedURL, newResponse(resp), err -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go b/vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go deleted file mode 100644 index bc7afe9e..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go +++ /dev/null @@ -1,410 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" -) - -// WorkflowRun represents a repository action workflow run. -type WorkflowRun struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - RunNumber *int `json:"run_number,omitempty"` - RunAttempt *int `json:"run_attempt,omitempty"` - Event *string `json:"event,omitempty"` - DisplayTitle *string `json:"display_title,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - WorkflowID *int64 `json:"workflow_id,omitempty"` - CheckSuiteID *int64 `json:"check_suite_id,omitempty"` - CheckSuiteNodeID *string `json:"check_suite_node_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - RunStartedAt *Timestamp `json:"run_started_at,omitempty"` - JobsURL *string `json:"jobs_url,omitempty"` - LogsURL *string `json:"logs_url,omitempty"` - CheckSuiteURL *string `json:"check_suite_url,omitempty"` - ArtifactsURL *string `json:"artifacts_url,omitempty"` - CancelURL *string `json:"cancel_url,omitempty"` - RerunURL *string `json:"rerun_url,omitempty"` - PreviousAttemptURL *string `json:"previous_attempt_url,omitempty"` - HeadCommit *HeadCommit `json:"head_commit,omitempty"` - WorkflowURL *string `json:"workflow_url,omitempty"` - Repository *Repository `json:"repository,omitempty"` - HeadRepository *Repository `json:"head_repository,omitempty"` - Actor *User `json:"actor,omitempty"` - TriggeringActor *User `json:"triggering_actor,omitempty"` - ReferencedWorkflows []*ReferencedWorkflow `json:"referenced_workflows,omitempty"` -} - -// WorkflowRuns represents a slice of repository action workflow run. -type WorkflowRuns struct { - TotalCount *int `json:"total_count,omitempty"` - WorkflowRuns []*WorkflowRun `json:"workflow_runs,omitempty"` -} - -// ListWorkflowRunsOptions specifies optional parameters to ListWorkflowRuns. -type ListWorkflowRunsOptions struct { - Actor string `url:"actor,omitempty"` - Branch string `url:"branch,omitempty"` - Event string `url:"event,omitempty"` - Status string `url:"status,omitempty"` - Created string `url:"created,omitempty"` - HeadSHA string `url:"head_sha,omitempty"` - ExcludePullRequests bool `url:"exclude_pull_requests,omitempty"` - CheckSuiteID int64 `url:"check_suite_id,omitempty"` - ListOptions -} - -// WorkflowRunUsage represents a usage of a specific workflow run. -type WorkflowRunUsage struct { - Billable *WorkflowRunBillMap `json:"billable,omitempty"` - RunDurationMS *int64 `json:"run_duration_ms,omitempty"` -} - -// WorkflowRunBillMap represents different runner environments available for a workflow run. -// Its key is the name of its environment, e.g. "UBUNTU", "MACOS", "WINDOWS", etc. -type WorkflowRunBillMap map[string]*WorkflowRunBill - -// WorkflowRunBill specifies billable time for a specific environment in a workflow run. -type WorkflowRunBill struct { - TotalMS *int64 `json:"total_ms,omitempty"` - Jobs *int `json:"jobs,omitempty"` - JobRuns []*WorkflowRunJobRun `json:"job_runs,omitempty"` -} - -// WorkflowRunJobRun represents a usage of individual jobs of a specific workflow run. -type WorkflowRunJobRun struct { - JobID *int `json:"job_id,omitempty"` - DurationMS *int64 `json:"duration_ms,omitempty"` -} - -// WorkflowRunAttemptOptions specifies optional parameters to GetWorkflowRunAttempt. -type WorkflowRunAttemptOptions struct { - ExcludePullRequests *bool `url:"exclude_pull_requests,omitempty"` -} - -// PendingDeploymentsRequest specifies body parameters to PendingDeployments. -type PendingDeploymentsRequest struct { - EnvironmentIDs []int64 `json:"environment_ids"` - // State can be one of: "approved", "rejected". - State string `json:"state"` - Comment string `json:"comment"` -} - -type ReferencedWorkflow struct { - Path *string `json:"path,omitempty"` - SHA *string `json:"sha,omitempty"` - Ref *string `json:"ref,omitempty"` -} - -func (s *ActionsService) listWorkflowRuns(ctx context.Context, endpoint string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u, err := addOptions(endpoint, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runs := new(WorkflowRuns) - resp, err := s.client.Do(ctx, req, &runs) - if err != nil { - return nil, resp, err - } - - return runs, resp, nil -} - -// ListWorkflowRunsByID lists all workflow runs by workflow ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#list-workflow-runs-for-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs -func (s *ActionsService) ListWorkflowRunsByID(ctx context.Context, owner, repo string, workflowID int64, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/workflows/%v/runs", owner, repo, workflowID) - return s.listWorkflowRuns(ctx, u, opts) -} - -// ListWorkflowRunsByFileName lists all workflow runs by workflow file name. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#list-workflow-runs-for-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs -func (s *ActionsService) ListWorkflowRunsByFileName(ctx context.Context, owner, repo, workflowFileName string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/workflows/%v/runs", owner, repo, workflowFileName) - return s.listWorkflowRuns(ctx, u, opts) -} - -// ListRepositoryWorkflowRuns lists all workflow runs for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#list-workflow-runs-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs -func (s *ActionsService) ListRepositoryWorkflowRuns(ctx context.Context, owner, repo string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/runs", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runs := new(WorkflowRuns) - resp, err := s.client.Do(ctx, req, &runs) - if err != nil { - return nil, resp, err - } - - return runs, resp, nil -} - -// GetWorkflowRunByID gets a specific workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#get-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id} -func (s *ActionsService) GetWorkflowRunByID(ctx context.Context, owner, repo string, runID int64) (*WorkflowRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v", owner, repo, runID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - run := new(WorkflowRun) - resp, err := s.client.Do(ctx, req, run) - if err != nil { - return nil, resp, err - } - - return run, resp, nil -} - -// GetWorkflowRunAttempt gets a specific workflow run attempt. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#get-a-workflow-run-attempt -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number} -func (s *ActionsService) GetWorkflowRunAttempt(ctx context.Context, owner, repo string, runID int64, attemptNumber int, opts *WorkflowRunAttemptOptions) (*WorkflowRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/attempts/%v", owner, repo, runID, attemptNumber) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - run := new(WorkflowRun) - resp, err := s.client.Do(ctx, req, run) - if err != nil { - return nil, resp, err - } - - return run, resp, nil -} - -// GetWorkflowRunAttemptLogs gets a redirect URL to download a plain text file of logs for a workflow run for attempt number. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#download-workflow-run-attempt-logs -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs -func (s *ActionsService) GetWorkflowRunAttemptLogs(ctx context.Context, owner, repo string, runID int64, attemptNumber int, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/attempts/%v/logs", owner, repo, runID, attemptNumber) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - return parsedURL, newResponse(resp), err -} - -// RerunWorkflowByID re-runs a workflow by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#re-run-a-workflow -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun -func (s *ActionsService) RerunWorkflowByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/rerun", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RerunFailedJobsByID re-runs all of the failed jobs and their dependent jobs in a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#re-run-failed-jobs-from-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs -func (s *ActionsService) RerunFailedJobsByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/rerun-failed-jobs", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RerunJobByID re-runs a job and its dependent jobs in a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#re-run-a-job-from-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun -func (s *ActionsService) RerunJobByID(ctx context.Context, owner, repo string, jobID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v/rerun", owner, repo, jobID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CancelWorkflowRunByID cancels a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#cancel-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel -func (s *ActionsService) CancelWorkflowRunByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/cancel", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetWorkflowRunLogs gets a redirect URL to download a plain text file of logs for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#download-workflow-run-logs -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs -func (s *ActionsService) GetWorkflowRunLogs(ctx context.Context, owner, repo string, runID int64, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/logs", owner, repo, runID) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - return parsedURL, newResponse(resp), err -} - -// DeleteWorkflowRun deletes a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#delete-a-workflow-run -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/runs/{run_id} -func (s *ActionsService) DeleteWorkflowRun(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v", owner, repo, runID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteWorkflowRunLogs deletes all logs for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#delete-workflow-run-logs -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs -func (s *ActionsService) DeleteWorkflowRunLogs(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/logs", owner, repo, runID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetWorkflowRunUsageByID gets a specific workflow usage run by run ID in the unit of billable milliseconds. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#get-workflow-run-usage -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing -func (s *ActionsService) GetWorkflowRunUsageByID(ctx context.Context, owner, repo string, runID int64) (*WorkflowRunUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/timing", owner, repo, runID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - workflowRunUsage := new(WorkflowRunUsage) - resp, err := s.client.Do(ctx, req, workflowRunUsage) - if err != nil { - return nil, resp, err - } - - return workflowRunUsage, resp, nil -} - -// PendingDeployments approve or reject pending deployments that are waiting on approval by a required reviewer. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#review-pending-deployments-for-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments -func (s *ActionsService) PendingDeployments(ctx context.Context, owner, repo string, runID int64, request *PendingDeploymentsRequest) ([]*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/pending_deployments", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - var deployments []*Deployment - resp, err := s.client.Do(ctx, req, &deployments) - if err != nil { - return nil, resp, err - } - - return deployments, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_workflows.go b/vendor/github.com/google/go-github/v57/github/actions_workflows.go deleted file mode 100644 index 0214e6ab..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_workflows.go +++ /dev/null @@ -1,237 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Workflow represents a repository action workflow. -type Workflow struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - State *string `json:"state,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BadgeURL *string `json:"badge_url,omitempty"` -} - -// Workflows represents a slice of repository action workflows. -type Workflows struct { - TotalCount *int `json:"total_count,omitempty"` - Workflows []*Workflow `json:"workflows,omitempty"` -} - -// WorkflowUsage represents a usage of a specific workflow. -type WorkflowUsage struct { - Billable *WorkflowBillMap `json:"billable,omitempty"` -} - -// WorkflowBillMap represents different runner environments available for a workflow. -// Its key is the name of its environment, e.g. "UBUNTU", "MACOS", "WINDOWS", etc. -type WorkflowBillMap map[string]*WorkflowBill - -// WorkflowBill specifies billable time for a specific environment in a workflow. -type WorkflowBill struct { - TotalMS *int64 `json:"total_ms,omitempty"` -} - -// CreateWorkflowDispatchEventRequest represents a request to create a workflow dispatch event. -type CreateWorkflowDispatchEventRequest struct { - // Ref represents the reference of the workflow run. - // The reference can be a branch or a tag. - // Ref is required when creating a workflow dispatch event. - Ref string `json:"ref"` - // Inputs represents input keys and values configured in the workflow file. - // The maximum number of properties is 10. - // Default: Any default properties configured in the workflow file will be used when `inputs` are omitted. - Inputs map[string]interface{} `json:"inputs,omitempty"` -} - -// ListWorkflows lists all workflows in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#list-repository-workflows -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows -func (s *ActionsService) ListWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*Workflows, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/workflows", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - workflows := new(Workflows) - resp, err := s.client.Do(ctx, req, &workflows) - if err != nil { - return nil, resp, err - } - - return workflows, resp, nil -} - -// GetWorkflowByID gets a specific workflow by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id} -func (s *ActionsService) GetWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Workflow, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v", owner, repo, workflowID) - - return s.getWorkflow(ctx, u) -} - -// GetWorkflowByFileName gets a specific workflow by file name. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id} -func (s *ActionsService) GetWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Workflow, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v", owner, repo, workflowFileName) - - return s.getWorkflow(ctx, u) -} - -func (s *ActionsService) getWorkflow(ctx context.Context, url string) (*Workflow, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - workflow := new(Workflow) - resp, err := s.client.Do(ctx, req, workflow) - if err != nil { - return nil, resp, err - } - - return workflow, resp, nil -} - -// GetWorkflowUsageByID gets a specific workflow usage by ID in the unit of billable milliseconds. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-workflow-usage -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing -func (s *ActionsService) GetWorkflowUsageByID(ctx context.Context, owner, repo string, workflowID int64) (*WorkflowUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/timing", owner, repo, workflowID) - - return s.getWorkflowUsage(ctx, u) -} - -// GetWorkflowUsageByFileName gets a specific workflow usage by file name in the unit of billable milliseconds. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-workflow-usage -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing -func (s *ActionsService) GetWorkflowUsageByFileName(ctx context.Context, owner, repo, workflowFileName string) (*WorkflowUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/timing", owner, repo, workflowFileName) - - return s.getWorkflowUsage(ctx, u) -} - -func (s *ActionsService) getWorkflowUsage(ctx context.Context, url string) (*WorkflowUsage, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - workflowUsage := new(WorkflowUsage) - resp, err := s.client.Do(ctx, req, workflowUsage) - if err != nil { - return nil, resp, err - } - - return workflowUsage, resp, nil -} - -// CreateWorkflowDispatchEventByID manually triggers a GitHub Actions workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#create-a-workflow-dispatch-event -// -//meta:operation POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches -func (s *ActionsService) CreateWorkflowDispatchEventByID(ctx context.Context, owner, repo string, workflowID int64, event CreateWorkflowDispatchEventRequest) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/dispatches", owner, repo, workflowID) - - return s.createWorkflowDispatchEvent(ctx, u, &event) -} - -// CreateWorkflowDispatchEventByFileName manually triggers a GitHub Actions workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#create-a-workflow-dispatch-event -// -//meta:operation POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches -func (s *ActionsService) CreateWorkflowDispatchEventByFileName(ctx context.Context, owner, repo, workflowFileName string, event CreateWorkflowDispatchEventRequest) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/dispatches", owner, repo, workflowFileName) - - return s.createWorkflowDispatchEvent(ctx, u, &event) -} - -func (s *ActionsService) createWorkflowDispatchEvent(ctx context.Context, url string, event *CreateWorkflowDispatchEventRequest) (*Response, error) { - req, err := s.client.NewRequest("POST", url, event) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// EnableWorkflowByID enables a workflow and sets the state of the workflow to "active". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#enable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable -func (s *ActionsService) EnableWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/enable", owner, repo, workflowID) - return s.doNewPutRequest(ctx, u) -} - -// EnableWorkflowByFileName enables a workflow and sets the state of the workflow to "active". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#enable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable -func (s *ActionsService) EnableWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/enable", owner, repo, workflowFileName) - return s.doNewPutRequest(ctx, u) -} - -// DisableWorkflowByID disables a workflow and sets the state of the workflow to "disabled_manually". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#disable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable -func (s *ActionsService) DisableWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/disable", owner, repo, workflowID) - return s.doNewPutRequest(ctx, u) -} - -// DisableWorkflowByFileName disables a workflow and sets the state of the workflow to "disabled_manually". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#disable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable -func (s *ActionsService) DisableWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/disable", owner, repo, workflowFileName) - return s.doNewPutRequest(ctx, u) -} - -func (s *ActionsService) doNewPutRequest(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/activity.go b/vendor/github.com/google/go-github/v57/github/activity.go deleted file mode 100644 index edf8cc43..00000000 --- a/vendor/github.com/google/go-github/v57/github/activity.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import "context" - -// ActivityService handles communication with the activity related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/activity/ -type ActivityService service - -// FeedLink represents a link to a related resource. -type FeedLink struct { - HRef *string `json:"href,omitempty"` - Type *string `json:"type,omitempty"` -} - -// Feeds represents timeline resources in Atom format. -type Feeds struct { - TimelineURL *string `json:"timeline_url,omitempty"` - UserURL *string `json:"user_url,omitempty"` - CurrentUserPublicURL *string `json:"current_user_public_url,omitempty"` - CurrentUserURL *string `json:"current_user_url,omitempty"` - CurrentUserActorURL *string `json:"current_user_actor_url,omitempty"` - CurrentUserOrganizationURL *string `json:"current_user_organization_url,omitempty"` - CurrentUserOrganizationURLs []string `json:"current_user_organization_urls,omitempty"` - Links *FeedLinks `json:"_links,omitempty"` -} - -// FeedLinks represents the links in a Feed. -type FeedLinks struct { - Timeline *FeedLink `json:"timeline,omitempty"` - User *FeedLink `json:"user,omitempty"` - CurrentUserPublic *FeedLink `json:"current_user_public,omitempty"` - CurrentUser *FeedLink `json:"current_user,omitempty"` - CurrentUserActor *FeedLink `json:"current_user_actor,omitempty"` - CurrentUserOrganization *FeedLink `json:"current_user_organization,omitempty"` - CurrentUserOrganizations []*FeedLink `json:"current_user_organizations,omitempty"` -} - -// ListFeeds lists all the feeds available to the authenticated user. -// -// GitHub provides several timeline resources in Atom format: -// -// Timeline: The GitHub global public timeline -// User: The public timeline for any user, using URI template -// Current user public: The public timeline for the authenticated user -// Current user: The private timeline for the authenticated user -// Current user actor: The private timeline for activity created by the -// authenticated user -// Current user organizations: The private timeline for the organizations -// the authenticated user is a member of. -// -// Note: Private feeds are only returned when authenticating via Basic Auth -// since current feed URIs use the older, non revocable auth tokens. -// -// GitHub API docs: https://docs.github.com/rest/activity/feeds#get-feeds -// -//meta:operation GET /feeds -func (s *ActivityService) ListFeeds(ctx context.Context) (*Feeds, *Response, error) { - req, err := s.client.NewRequest("GET", "feeds", nil) - if err != nil { - return nil, nil, err - } - - f := &Feeds{} - resp, err := s.client.Do(ctx, req, f) - if err != nil { - return nil, resp, err - } - - return f, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/activity_events.go b/vendor/github.com/google/go-github/v57/github/activity_events.go deleted file mode 100644 index b12baa99..00000000 --- a/vendor/github.com/google/go-github/v57/github/activity_events.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListEvents drinks from the firehose of all public events across GitHub. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events -// -//meta:operation GET /events -func (s *ActivityService) ListEvents(ctx context.Context, opts *ListOptions) ([]*Event, *Response, error) { - u, err := addOptions("events", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListRepositoryEvents lists events for a repository. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-repository-events -// -//meta:operation GET /repos/{owner}/{repo}/events -func (s *ActivityService) ListRepositoryEvents(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListIssueEventsForRepository lists issue events for a repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#list-issue-events-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/issues/events -func (s *ActivityService) ListIssueEventsForRepository(ctx context.Context, owner, repo string, opts *ListOptions) ([]*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*IssueEvent - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsForRepoNetwork lists public events for a network of repositories. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events-for-a-network-of-repositories -// -//meta:operation GET /networks/{owner}/{repo}/events -func (s *ActivityService) ListEventsForRepoNetwork(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("networks/%v/%v/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsForOrganization lists public events for an organization. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-organization-events -// -//meta:operation GET /orgs/{org}/events -func (s *ActivityService) ListEventsForOrganization(ctx context.Context, org string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("orgs/%v/events", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsPerformedByUser lists the events performed by a user. If publicOnly is -// true, only public events will be returned. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-events-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events-for-a-user -// -//meta:operation GET /users/{username}/events -//meta:operation GET /users/{username}/events/public -func (s *ActivityService) ListEventsPerformedByUser(ctx context.Context, user string, publicOnly bool, opts *ListOptions) ([]*Event, *Response, error) { - var u string - if publicOnly { - u = fmt.Sprintf("users/%v/events/public", user) - } else { - u = fmt.Sprintf("users/%v/events", user) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsReceivedByUser lists the events received by a user. If publicOnly is -// true, only public events will be returned. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-events-received-by-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events-received-by-a-user -// -//meta:operation GET /users/{username}/received_events -//meta:operation GET /users/{username}/received_events/public -func (s *ActivityService) ListEventsReceivedByUser(ctx context.Context, user string, publicOnly bool, opts *ListOptions) ([]*Event, *Response, error) { - var u string - if publicOnly { - u = fmt.Sprintf("users/%v/received_events/public", user) - } else { - u = fmt.Sprintf("users/%v/received_events", user) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListUserEventsForOrganization provides the user’s organization dashboard. You -// must be authenticated as the user to view this. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-organization-events-for-the-authenticated-user -// -//meta:operation GET /users/{username}/events/orgs/{org} -func (s *ActivityService) ListUserEventsForOrganization(ctx context.Context, org, user string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("users/%v/events/orgs/%v", user, org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/activity_notifications.go b/vendor/github.com/google/go-github/v57/github/activity_notifications.go deleted file mode 100644 index 47f22261..00000000 --- a/vendor/github.com/google/go-github/v57/github/activity_notifications.go +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// Notification identifies a GitHub notification for a user. -type Notification struct { - ID *string `json:"id,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Subject *NotificationSubject `json:"subject,omitempty"` - - // Reason identifies the event that triggered the notification. - // - // GitHub API docs: https://docs.github.com/rest/activity#notification-reasons - Reason *string `json:"reason,omitempty"` - - Unread *bool `json:"unread,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - LastReadAt *Timestamp `json:"last_read_at,omitempty"` - URL *string `json:"url,omitempty"` -} - -// NotificationSubject identifies the subject of a notification. -type NotificationSubject struct { - Title *string `json:"title,omitempty"` - URL *string `json:"url,omitempty"` - LatestCommentURL *string `json:"latest_comment_url,omitempty"` - Type *string `json:"type,omitempty"` -} - -// NotificationListOptions specifies the optional parameters to the -// ActivityService.ListNotifications method. -type NotificationListOptions struct { - All bool `url:"all,omitempty"` - Participating bool `url:"participating,omitempty"` - Since time.Time `url:"since,omitempty"` - Before time.Time `url:"before,omitempty"` - - ListOptions -} - -// ListNotifications lists all notifications for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#list-notifications-for-the-authenticated-user -// -//meta:operation GET /notifications -func (s *ActivityService) ListNotifications(ctx context.Context, opts *NotificationListOptions) ([]*Notification, *Response, error) { - u := "notifications" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var notifications []*Notification - resp, err := s.client.Do(ctx, req, ¬ifications) - if err != nil { - return nil, resp, err - } - - return notifications, resp, nil -} - -// ListRepositoryNotifications lists all notifications in a given repository -// for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#list-repository-notifications-for-the-authenticated-user -// -//meta:operation GET /repos/{owner}/{repo}/notifications -func (s *ActivityService) ListRepositoryNotifications(ctx context.Context, owner, repo string, opts *NotificationListOptions) ([]*Notification, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/notifications", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var notifications []*Notification - resp, err := s.client.Do(ctx, req, ¬ifications) - if err != nil { - return nil, resp, err - } - - return notifications, resp, nil -} - -type markReadOptions struct { - LastReadAt Timestamp `json:"last_read_at,omitempty"` -} - -// MarkNotificationsRead marks all notifications up to lastRead as read. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#mark-notifications-as-read -// -//meta:operation PUT /notifications -func (s *ActivityService) MarkNotificationsRead(ctx context.Context, lastRead Timestamp) (*Response, error) { - opts := &markReadOptions{ - LastReadAt: lastRead, - } - req, err := s.client.NewRequest("PUT", "notifications", opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// MarkRepositoryNotificationsRead marks all notifications up to lastRead in -// the specified repository as read. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#mark-repository-notifications-as-read -// -//meta:operation PUT /repos/{owner}/{repo}/notifications -func (s *ActivityService) MarkRepositoryNotificationsRead(ctx context.Context, owner, repo string, lastRead Timestamp) (*Response, error) { - opts := &markReadOptions{ - LastReadAt: lastRead, - } - u := fmt.Sprintf("repos/%v/%v/notifications", owner, repo) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetThread gets the specified notification thread. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#get-a-thread -// -//meta:operation GET /notifications/threads/{thread_id} -func (s *ActivityService) GetThread(ctx context.Context, id string) (*Notification, *Response, error) { - u := fmt.Sprintf("notifications/threads/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - notification := new(Notification) - resp, err := s.client.Do(ctx, req, notification) - if err != nil { - return nil, resp, err - } - - return notification, resp, nil -} - -// MarkThreadRead marks the specified thread as read. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#mark-a-thread-as-read -// -//meta:operation PATCH /notifications/threads/{thread_id} -func (s *ActivityService) MarkThreadRead(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("notifications/threads/%v", id) - - req, err := s.client.NewRequest("PATCH", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetThreadSubscription checks to see if the authenticated user is subscribed -// to a thread. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#get-a-thread-subscription-for-the-authenticated-user -// -//meta:operation GET /notifications/threads/{thread_id}/subscription -func (s *ActivityService) GetThreadSubscription(ctx context.Context, id string) (*Subscription, *Response, error) { - u := fmt.Sprintf("notifications/threads/%v/subscription", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - return nil, resp, err - } - - return sub, resp, nil -} - -// SetThreadSubscription sets the subscription for the specified thread for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#set-a-thread-subscription -// -//meta:operation PUT /notifications/threads/{thread_id}/subscription -func (s *ActivityService) SetThreadSubscription(ctx context.Context, id string, subscription *Subscription) (*Subscription, *Response, error) { - u := fmt.Sprintf("notifications/threads/%v/subscription", id) - - req, err := s.client.NewRequest("PUT", u, subscription) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - return nil, resp, err - } - - return sub, resp, nil -} - -// DeleteThreadSubscription deletes the subscription for the specified thread -// for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#delete-a-thread-subscription -// -//meta:operation DELETE /notifications/threads/{thread_id}/subscription -func (s *ActivityService) DeleteThreadSubscription(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("notifications/threads/%v/subscription", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/activity_star.go b/vendor/github.com/google/go-github/v57/github/activity_star.go deleted file mode 100644 index cebdacf7..00000000 --- a/vendor/github.com/google/go-github/v57/github/activity_star.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" -) - -// StarredRepository is returned by ListStarred. -type StarredRepository struct { - StarredAt *Timestamp `json:"starred_at,omitempty"` - Repository *Repository `json:"repo,omitempty"` -} - -// Stargazer represents a user that has starred a repository. -type Stargazer struct { - StarredAt *Timestamp `json:"starred_at,omitempty"` - User *User `json:"user,omitempty"` -} - -// ListStargazers lists people who have starred the specified repo. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#list-stargazers -// -//meta:operation GET /repos/{owner}/{repo}/stargazers -func (s *ActivityService) ListStargazers(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Stargazer, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/stargazers", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeStarringPreview) - - var stargazers []*Stargazer - resp, err := s.client.Do(ctx, req, &stargazers) - if err != nil { - return nil, resp, err - } - - return stargazers, resp, nil -} - -// ActivityListStarredOptions specifies the optional parameters to the -// ActivityService.ListStarred method. -type ActivityListStarredOptions struct { - // How to sort the repository list. Possible values are: created, updated, - // pushed, full_name. Default is "full_name". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort repositories. Possible values are: asc, desc. - // Default is "asc" when sort is "full_name", otherwise default is "desc". - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListStarred lists all the repos starred by a user. Passing the empty string -// will list the starred repositories for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#list-repositories-starred-by-a-user -// GitHub API docs: https://docs.github.com/rest/activity/starring#list-repositories-starred-by-the-authenticated-user -// -//meta:operation GET /user/starred -//meta:operation GET /users/{username}/starred -func (s *ActivityService) ListStarred(ctx context.Context, user string, opts *ActivityListStarredOptions) ([]*StarredRepository, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/starred", user) - } else { - u = "user/starred" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when APIs fully launch - acceptHeaders := []string{mediaTypeStarringPreview, mediaTypeTopicsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var repos []*StarredRepository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// IsStarred checks if a repository is starred by authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#check-if-a-repository-is-starred-by-the-authenticated-user -// -//meta:operation GET /user/starred/{owner}/{repo} -func (s *ActivityService) IsStarred(ctx context.Context, owner, repo string) (bool, *Response, error) { - u := fmt.Sprintf("user/starred/%v/%v", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - starred, err := parseBoolResponse(err) - return starred, resp, err -} - -// Star a repository as the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#star-a-repository-for-the-authenticated-user -// -//meta:operation PUT /user/starred/{owner}/{repo} -func (s *ActivityService) Star(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("user/starred/%v/%v", owner, repo) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unstar a repository as the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#unstar-a-repository-for-the-authenticated-user -// -//meta:operation DELETE /user/starred/{owner}/{repo} -func (s *ActivityService) Unstar(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("user/starred/%v/%v", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/activity_watching.go b/vendor/github.com/google/go-github/v57/github/activity_watching.go deleted file mode 100644 index 34859005..00000000 --- a/vendor/github.com/google/go-github/v57/github/activity_watching.go +++ /dev/null @@ -1,158 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Subscription identifies a repository or thread subscription. -type Subscription struct { - Subscribed *bool `json:"subscribed,omitempty"` - Ignored *bool `json:"ignored,omitempty"` - Reason *string `json:"reason,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - - // only populated for repository subscriptions - RepositoryURL *string `json:"repository_url,omitempty"` - - // only populated for thread subscriptions - ThreadURL *string `json:"thread_url,omitempty"` -} - -// ListWatchers lists watchers of a particular repo. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#list-watchers -// -//meta:operation GET /repos/{owner}/{repo}/subscribers -func (s *ActivityService) ListWatchers(ctx context.Context, owner, repo string, opts *ListOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscribers", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var watchers []*User - resp, err := s.client.Do(ctx, req, &watchers) - if err != nil { - return nil, resp, err - } - - return watchers, resp, nil -} - -// ListWatched lists the repositories the specified user is watching. Passing -// the empty string will fetch watched repos for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#list-repositories-watched-by-a-user -// GitHub API docs: https://docs.github.com/rest/activity/watching#list-repositories-watched-by-the-authenticated-user -// -//meta:operation GET /user/subscriptions -//meta:operation GET /users/{username}/subscriptions -func (s *ActivityService) ListWatched(ctx context.Context, user string, opts *ListOptions) ([]*Repository, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/subscriptions", user) - } else { - u = "user/subscriptions" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var watched []*Repository - resp, err := s.client.Do(ctx, req, &watched) - if err != nil { - return nil, resp, err - } - - return watched, resp, nil -} - -// GetRepositorySubscription returns the subscription for the specified -// repository for the authenticated user. If the authenticated user is not -// watching the repository, a nil Subscription is returned. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#get-a-repository-subscription -// -//meta:operation GET /repos/{owner}/{repo}/subscription -func (s *ActivityService) GetRepositorySubscription(ctx context.Context, owner, repo string) (*Subscription, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - // if it's just a 404, don't return that as an error - _, err = parseBoolResponse(err) - return nil, resp, err - } - - return sub, resp, nil -} - -// SetRepositorySubscription sets the subscription for the specified repository -// for the authenticated user. -// -// To watch a repository, set subscription.Subscribed to true. -// To ignore notifications made within a repository, set subscription.Ignored to true. -// To stop watching a repository, use DeleteRepositorySubscription. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#set-a-repository-subscription -// -//meta:operation PUT /repos/{owner}/{repo}/subscription -func (s *ActivityService) SetRepositorySubscription(ctx context.Context, owner, repo string, subscription *Subscription) (*Subscription, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) - - req, err := s.client.NewRequest("PUT", u, subscription) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - return nil, resp, err - } - - return sub, resp, nil -} - -// DeleteRepositorySubscription deletes the subscription for the specified -// repository for the authenticated user. -// -// This is used to stop watching a repository. To control whether or not to -// receive notifications from a repository, use SetRepositorySubscription. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#delete-a-repository-subscription -// -//meta:operation DELETE /repos/{owner}/{repo}/subscription -func (s *ActivityService) DeleteRepositorySubscription(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/admin.go b/vendor/github.com/google/go-github/v57/github/admin.go deleted file mode 100644 index 8eee9854..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AdminService handles communication with the admin related methods of the -// GitHub API. These API routes are normally only accessible for GitHub -// Enterprise installations. -// -// GitHub API docs: https://docs.github.com/rest/enterprise-admin -type AdminService service - -// TeamLDAPMapping represents the mapping between a GitHub team and an LDAP group. -type TeamLDAPMapping struct { - ID *int64 `json:"id,omitempty"` - LDAPDN *string `json:"ldap_dn,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Slug *string `json:"slug,omitempty"` - Description *string `json:"description,omitempty"` - Privacy *string `json:"privacy,omitempty"` - Permission *string `json:"permission,omitempty"` - - MembersURL *string `json:"members_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` -} - -func (m TeamLDAPMapping) String() string { - return Stringify(m) -} - -// UserLDAPMapping represents the mapping between a GitHub user and an LDAP user. -type UserLDAPMapping struct { - ID *int64 `json:"id,omitempty"` - LDAPDN *string `json:"ldap_dn,omitempty"` - Login *string `json:"login,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - GravatarID *string `json:"gravatar_id,omitempty"` - Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin,omitempty"` - - URL *string `json:"url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - FollowingURL *string `json:"following_url,omitempty"` - FollowersURL *string `json:"followers_url,omitempty"` - GistsURL *string `json:"gists_url,omitempty"` - OrganizationsURL *string `json:"organizations_url,omitempty"` - ReceivedEventsURL *string `json:"received_events_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` - StarredURL *string `json:"starred_url,omitempty"` - SubscriptionsURL *string `json:"subscriptions_url,omitempty"` -} - -func (m UserLDAPMapping) String() string { - return Stringify(m) -} - -// Enterprise represents the GitHub enterprise profile. -type Enterprise struct { - ID *int `json:"id,omitempty"` - Slug *string `json:"slug,omitempty"` - Name *string `json:"name,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - Description *string `json:"description,omitempty"` - WebsiteURL *string `json:"website_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -func (m Enterprise) String() string { - return Stringify(m) -} - -// UpdateUserLDAPMapping updates the mapping between a GitHub user and an LDAP user. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/ldap#update-ldap-mapping-for-a-user -// -//meta:operation PATCH /admin/ldap/users/{username}/mapping -func (s *AdminService) UpdateUserLDAPMapping(ctx context.Context, user string, mapping *UserLDAPMapping) (*UserLDAPMapping, *Response, error) { - u := fmt.Sprintf("admin/ldap/users/%v/mapping", user) - req, err := s.client.NewRequest("PATCH", u, mapping) - if err != nil { - return nil, nil, err - } - - m := new(UserLDAPMapping) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// UpdateTeamLDAPMapping updates the mapping between a GitHub team and an LDAP group. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/ldap#update-ldap-mapping-for-a-team -// -//meta:operation PATCH /admin/ldap/teams/{team_id}/mapping -func (s *AdminService) UpdateTeamLDAPMapping(ctx context.Context, team int64, mapping *TeamLDAPMapping) (*TeamLDAPMapping, *Response, error) { - u := fmt.Sprintf("admin/ldap/teams/%v/mapping", team) - req, err := s.client.NewRequest("PATCH", u, mapping) - if err != nil { - return nil, nil, err - } - - m := new(TeamLDAPMapping) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/admin_orgs.go b/vendor/github.com/google/go-github/v57/github/admin_orgs.go deleted file mode 100644 index c734d4de..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin_orgs.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// createOrgRequest is a subset of Organization and is used internally -// by CreateOrg to pass only the known fields for the endpoint. -type createOrgRequest struct { - Login *string `json:"login,omitempty"` - Admin *string `json:"admin,omitempty"` -} - -// CreateOrg creates a new organization in GitHub Enterprise. -// -// Note that only a subset of the org fields are used and org must -// not be nil. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/orgs#create-an-organization -// -//meta:operation POST /admin/organizations -func (s *AdminService) CreateOrg(ctx context.Context, org *Organization, admin string) (*Organization, *Response, error) { - u := "admin/organizations" - - orgReq := &createOrgRequest{ - Login: org.Login, - Admin: &admin, - } - - req, err := s.client.NewRequest("POST", u, orgReq) - if err != nil { - return nil, nil, err - } - - o := new(Organization) - resp, err := s.client.Do(ctx, req, o) - if err != nil { - return nil, resp, err - } - - return o, resp, nil -} - -// renameOrgRequest is a subset of Organization and is used internally -// by RenameOrg and RenameOrgByName to pass only the known fields for the endpoint. -type renameOrgRequest struct { - Login *string `json:"login,omitempty"` -} - -// RenameOrgResponse is the response given when renaming an Organization. -type RenameOrgResponse struct { - Message *string `json:"message,omitempty"` - URL *string `json:"url,omitempty"` -} - -// RenameOrg renames an organization in GitHub Enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/orgs#update-an-organization-name -// -//meta:operation PATCH /admin/organizations/{org} -func (s *AdminService) RenameOrg(ctx context.Context, org *Organization, newName string) (*RenameOrgResponse, *Response, error) { - return s.RenameOrgByName(ctx, *org.Login, newName) -} - -// RenameOrgByName renames an organization in GitHub Enterprise using its current name. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/orgs#update-an-organization-name -// -//meta:operation PATCH /admin/organizations/{org} -func (s *AdminService) RenameOrgByName(ctx context.Context, org, newName string) (*RenameOrgResponse, *Response, error) { - u := fmt.Sprintf("admin/organizations/%v", org) - - orgReq := &renameOrgRequest{ - Login: &newName, - } - - req, err := s.client.NewRequest("PATCH", u, orgReq) - if err != nil { - return nil, nil, err - } - - o := new(RenameOrgResponse) - resp, err := s.client.Do(ctx, req, o) - if err != nil { - return nil, resp, err - } - - return o, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/admin_stats.go b/vendor/github.com/google/go-github/v57/github/admin_stats.go deleted file mode 100644 index aa23f5d1..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin_stats.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// AdminStats represents a variety of stats of a GitHub Enterprise -// installation. -type AdminStats struct { - Issues *IssueStats `json:"issues,omitempty"` - Hooks *HookStats `json:"hooks,omitempty"` - Milestones *MilestoneStats `json:"milestones,omitempty"` - Orgs *OrgStats `json:"orgs,omitempty"` - Comments *CommentStats `json:"comments,omitempty"` - Pages *PageStats `json:"pages,omitempty"` - Users *UserStats `json:"users,omitempty"` - Gists *GistStats `json:"gists,omitempty"` - Pulls *PullStats `json:"pulls,omitempty"` - Repos *RepoStats `json:"repos,omitempty"` -} - -func (s AdminStats) String() string { - return Stringify(s) -} - -// IssueStats represents the number of total, open and closed issues. -type IssueStats struct { - TotalIssues *int `json:"total_issues,omitempty"` - OpenIssues *int `json:"open_issues,omitempty"` - ClosedIssues *int `json:"closed_issues,omitempty"` -} - -func (s IssueStats) String() string { - return Stringify(s) -} - -// HookStats represents the number of total, active and inactive hooks. -type HookStats struct { - TotalHooks *int `json:"total_hooks,omitempty"` - ActiveHooks *int `json:"active_hooks,omitempty"` - InactiveHooks *int `json:"inactive_hooks,omitempty"` -} - -func (s HookStats) String() string { - return Stringify(s) -} - -// MilestoneStats represents the number of total, open and close milestones. -type MilestoneStats struct { - TotalMilestones *int `json:"total_milestones,omitempty"` - OpenMilestones *int `json:"open_milestones,omitempty"` - ClosedMilestones *int `json:"closed_milestones,omitempty"` -} - -func (s MilestoneStats) String() string { - return Stringify(s) -} - -// OrgStats represents the number of total, disabled organizations and the team -// and team member count. -type OrgStats struct { - TotalOrgs *int `json:"total_orgs,omitempty"` - DisabledOrgs *int `json:"disabled_orgs,omitempty"` - TotalTeams *int `json:"total_teams,omitempty"` - TotalTeamMembers *int `json:"total_team_members,omitempty"` -} - -func (s OrgStats) String() string { - return Stringify(s) -} - -// CommentStats represents the number of total comments on commits, gists, issues -// and pull requests. -type CommentStats struct { - TotalCommitComments *int `json:"total_commit_comments,omitempty"` - TotalGistComments *int `json:"total_gist_comments,omitempty"` - TotalIssueComments *int `json:"total_issue_comments,omitempty"` - TotalPullRequestComments *int `json:"total_pull_request_comments,omitempty"` -} - -func (s CommentStats) String() string { - return Stringify(s) -} - -// PageStats represents the total number of github pages. -type PageStats struct { - TotalPages *int `json:"total_pages,omitempty"` -} - -func (s PageStats) String() string { - return Stringify(s) -} - -// UserStats represents the number of total, admin and suspended users. -type UserStats struct { - TotalUsers *int `json:"total_users,omitempty"` - AdminUsers *int `json:"admin_users,omitempty"` - SuspendedUsers *int `json:"suspended_users,omitempty"` -} - -func (s UserStats) String() string { - return Stringify(s) -} - -// GistStats represents the number of total, private and public gists. -type GistStats struct { - TotalGists *int `json:"total_gists,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` -} - -func (s GistStats) String() string { - return Stringify(s) -} - -// PullStats represents the number of total, merged, mergable and unmergeable -// pull-requests. -type PullStats struct { - TotalPulls *int `json:"total_pulls,omitempty"` - MergedPulls *int `json:"merged_pulls,omitempty"` - MergablePulls *int `json:"mergeable_pulls,omitempty"` - UnmergablePulls *int `json:"unmergeable_pulls,omitempty"` -} - -func (s PullStats) String() string { - return Stringify(s) -} - -// RepoStats represents the number of total, root, fork, organization repositories -// together with the total number of pushes and wikis. -type RepoStats struct { - TotalRepos *int `json:"total_repos,omitempty"` - RootRepos *int `json:"root_repos,omitempty"` - ForkRepos *int `json:"fork_repos,omitempty"` - OrgRepos *int `json:"org_repos,omitempty"` - TotalPushes *int `json:"total_pushes,omitempty"` - TotalWikis *int `json:"total_wikis,omitempty"` -} - -func (s RepoStats) String() string { - return Stringify(s) -} - -// GetAdminStats returns a variety of metrics about a GitHub Enterprise -// installation. -// -// Please note that this is only available to site administrators, -// otherwise it will error with a 404 not found (instead of 401 or 403). -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/admin-stats#get-all-statistics -// -//meta:operation GET /enterprise/stats/all -func (s *AdminService) GetAdminStats(ctx context.Context) (*AdminStats, *Response, error) { - u := "enterprise/stats/all" - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - m := new(AdminStats) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/admin_users.go b/vendor/github.com/google/go-github/v57/github/admin_users.go deleted file mode 100644 index 3916a470..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin_users.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// createUserRequest is a subset of User and is used internally -// by CreateUser to pass only the known fields for the endpoint. -type createUserRequest struct { - Login *string `json:"login,omitempty"` - Email *string `json:"email,omitempty"` -} - -// CreateUser creates a new user in GitHub Enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#create-a-user -// -//meta:operation POST /admin/users -func (s *AdminService) CreateUser(ctx context.Context, login, email string) (*User, *Response, error) { - u := "admin/users" - - userReq := &createUserRequest{ - Login: &login, - Email: &email, - } - - req, err := s.client.NewRequest("POST", u, userReq) - if err != nil { - return nil, nil, err - } - - var user User - resp, err := s.client.Do(ctx, req, &user) - if err != nil { - return nil, resp, err - } - - return &user, resp, nil -} - -// DeleteUser deletes a user in GitHub Enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#delete-a-user -// -//meta:operation DELETE /admin/users/{username} -func (s *AdminService) DeleteUser(ctx context.Context, username string) (*Response, error) { - u := "admin/users/" + username - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// ImpersonateUserOptions represents the scoping for the OAuth token. -type ImpersonateUserOptions struct { - Scopes []string `json:"scopes,omitempty"` -} - -// OAuthAPP represents the GitHub Site Administrator OAuth app. -type OAuthAPP struct { - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - ClientID *string `json:"client_id,omitempty"` -} - -func (s OAuthAPP) String() string { - return Stringify(s) -} - -// UserAuthorization represents the impersonation response. -type UserAuthorization struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Scopes []string `json:"scopes,omitempty"` - Token *string `json:"token,omitempty"` - TokenLastEight *string `json:"token_last_eight,omitempty"` - HashedToken *string `json:"hashed_token,omitempty"` - App *OAuthAPP `json:"app,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` -} - -// CreateUserImpersonation creates an impersonation OAuth token. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#create-an-impersonation-oauth-token -// -//meta:operation POST /admin/users/{username}/authorizations -func (s *AdminService) CreateUserImpersonation(ctx context.Context, username string, opts *ImpersonateUserOptions) (*UserAuthorization, *Response, error) { - u := fmt.Sprintf("admin/users/%s/authorizations", username) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - a := new(UserAuthorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// DeleteUserImpersonation deletes an impersonation OAuth token. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#delete-an-impersonation-oauth-token -// -//meta:operation DELETE /admin/users/{username}/authorizations -func (s *AdminService) DeleteUserImpersonation(ctx context.Context, username string) (*Response, error) { - u := fmt.Sprintf("admin/users/%s/authorizations", username) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/apps.go b/vendor/github.com/google/go-github/v57/github/apps.go deleted file mode 100644 index f0392f2d..00000000 --- a/vendor/github.com/google/go-github/v57/github/apps.go +++ /dev/null @@ -1,420 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AppsService provides access to the installation related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/apps/ -type AppsService service - -// App represents a GitHub App. -type App struct { - ID *int64 `json:"id,omitempty"` - Slug *string `json:"slug,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - ExternalURL *string `json:"external_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Permissions *InstallationPermissions `json:"permissions,omitempty"` - Events []string `json:"events,omitempty"` - InstallationsCount *int `json:"installations_count,omitempty"` -} - -// InstallationToken represents an installation token. -type InstallationToken struct { - Token *string `json:"token,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` - Permissions *InstallationPermissions `json:"permissions,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -// InstallationTokenOptions allow restricting a token's access to specific repositories. -type InstallationTokenOptions struct { - // The IDs of the repositories that the installation token can access. - // Providing repository IDs restricts the access of an installation token to specific repositories. - RepositoryIDs []int64 `json:"repository_ids,omitempty"` - - // The names of the repositories that the installation token can access. - // Providing repository names restricts the access of an installation token to specific repositories. - Repositories []string `json:"repositories,omitempty"` - - // The permissions granted to the access token. - // The permissions object includes the permission names and their access type. - Permissions *InstallationPermissions `json:"permissions,omitempty"` -} - -// InstallationPermissions lists the repository and organization permissions for an installation. -// -// Permission names taken from: -// -// https://docs.github.com/enterprise-server@3.0/rest/apps#create-an-installation-access-token-for-an-app -// https://docs.github.com/rest/apps#create-an-installation-access-token-for-an-app -type InstallationPermissions struct { - Actions *string `json:"actions,omitempty"` - Administration *string `json:"administration,omitempty"` - Blocking *string `json:"blocking,omitempty"` - Checks *string `json:"checks,omitempty"` - Contents *string `json:"contents,omitempty"` - ContentReferences *string `json:"content_references,omitempty"` - Deployments *string `json:"deployments,omitempty"` - Emails *string `json:"emails,omitempty"` - Environments *string `json:"environments,omitempty"` - Followers *string `json:"followers,omitempty"` - Issues *string `json:"issues,omitempty"` - Metadata *string `json:"metadata,omitempty"` - Members *string `json:"members,omitempty"` - OrganizationAdministration *string `json:"organization_administration,omitempty"` - OrganizationCustomRoles *string `json:"organization_custom_roles,omitempty"` - OrganizationHooks *string `json:"organization_hooks,omitempty"` - OrganizationPackages *string `json:"organization_packages,omitempty"` - OrganizationPlan *string `json:"organization_plan,omitempty"` - OrganizationPreReceiveHooks *string `json:"organization_pre_receive_hooks,omitempty"` - OrganizationProjects *string `json:"organization_projects,omitempty"` - OrganizationSecrets *string `json:"organization_secrets,omitempty"` - OrganizationSelfHostedRunners *string `json:"organization_self_hosted_runners,omitempty"` - OrganizationUserBlocking *string `json:"organization_user_blocking,omitempty"` - Packages *string `json:"packages,omitempty"` - Pages *string `json:"pages,omitempty"` - PullRequests *string `json:"pull_requests,omitempty"` - RepositoryHooks *string `json:"repository_hooks,omitempty"` - RepositoryProjects *string `json:"repository_projects,omitempty"` - RepositoryPreReceiveHooks *string `json:"repository_pre_receive_hooks,omitempty"` - Secrets *string `json:"secrets,omitempty"` - SecretScanningAlerts *string `json:"secret_scanning_alerts,omitempty"` - SecurityEvents *string `json:"security_events,omitempty"` - SingleFile *string `json:"single_file,omitempty"` - Statuses *string `json:"statuses,omitempty"` - TeamDiscussions *string `json:"team_discussions,omitempty"` - VulnerabilityAlerts *string `json:"vulnerability_alerts,omitempty"` - Workflows *string `json:"workflows,omitempty"` -} - -// InstallationRequest represents a pending GitHub App installation request. -type InstallationRequest struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Account *User `json:"account,omitempty"` - Requester *User `json:"requester,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -// Installation represents a GitHub Apps installation. -type Installation struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AppID *int64 `json:"app_id,omitempty"` - AppSlug *string `json:"app_slug,omitempty"` - TargetID *int64 `json:"target_id,omitempty"` - Account *User `json:"account,omitempty"` - AccessTokensURL *string `json:"access_tokens_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - TargetType *string `json:"target_type,omitempty"` - SingleFileName *string `json:"single_file_name,omitempty"` - RepositorySelection *string `json:"repository_selection,omitempty"` - Events []string `json:"events,omitempty"` - SingleFilePaths []string `json:"single_file_paths,omitempty"` - Permissions *InstallationPermissions `json:"permissions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - HasMultipleSingleFiles *bool `json:"has_multiple_single_files,omitempty"` - SuspendedBy *User `json:"suspended_by,omitempty"` - SuspendedAt *Timestamp `json:"suspended_at,omitempty"` -} - -// Attachment represents a GitHub Apps attachment. -type Attachment struct { - ID *int64 `json:"id,omitempty"` - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` -} - -// ContentReference represents a reference to a URL in an issue or pull request. -type ContentReference struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Reference *string `json:"reference,omitempty"` -} - -func (i Installation) String() string { - return Stringify(i) -} - -// Get a single GitHub App. Passing the empty string will get -// the authenticated GitHub App. -// -// Note: appSlug is just the URL-friendly name of your GitHub App. -// You can find this on the settings page for your GitHub App -// (e.g., https://github.com/settings/apps/:app_slug). -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-an-app -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-the-authenticated-app -// -//meta:operation GET /app -//meta:operation GET /apps/{app_slug} -func (s *AppsService) Get(ctx context.Context, appSlug string) (*App, *Response, error) { - var u string - if appSlug != "" { - u = fmt.Sprintf("apps/%v", appSlug) - } else { - u = "app" - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - app := new(App) - resp, err := s.client.Do(ctx, req, app) - if err != nil { - return nil, resp, err - } - - return app, resp, nil -} - -// ListInstallationRequests lists the pending installation requests that the current GitHub App has. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#list-installation-requests-for-the-authenticated-app -// -//meta:operation GET /app/installation-requests -func (s *AppsService) ListInstallationRequests(ctx context.Context, opts *ListOptions) ([]*InstallationRequest, *Response, error) { - u, err := addOptions("app/installation-requests", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var i []*InstallationRequest - resp, err := s.client.Do(ctx, req, &i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// ListInstallations lists the installations that the current GitHub App has. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#list-installations-for-the-authenticated-app -// -//meta:operation GET /app/installations -func (s *AppsService) ListInstallations(ctx context.Context, opts *ListOptions) ([]*Installation, *Response, error) { - u, err := addOptions("app/installations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var i []*Installation - resp, err := s.client.Do(ctx, req, &i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// GetInstallation returns the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-an-installation-for-the-authenticated-app -// -//meta:operation GET /app/installations/{installation_id} -func (s *AppsService) GetInstallation(ctx context.Context, id int64) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("app/installations/%v", id)) -} - -// ListUserInstallations lists installations that are accessible to the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#list-app-installations-accessible-to-the-user-access-token -// -//meta:operation GET /user/installations -func (s *AppsService) ListUserInstallations(ctx context.Context, opts *ListOptions) ([]*Installation, *Response, error) { - u, err := addOptions("user/installations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var i struct { - Installations []*Installation `json:"installations"` - } - resp, err := s.client.Do(ctx, req, &i) - if err != nil { - return nil, resp, err - } - - return i.Installations, resp, nil -} - -// SuspendInstallation suspends the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#suspend-an-app-installation -// -//meta:operation PUT /app/installations/{installation_id}/suspended -func (s *AppsService) SuspendInstallation(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("app/installations/%v/suspended", id) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UnsuspendInstallation unsuspends the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#unsuspend-an-app-installation -// -//meta:operation DELETE /app/installations/{installation_id}/suspended -func (s *AppsService) UnsuspendInstallation(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("app/installations/%v/suspended", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteInstallation deletes the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#delete-an-installation-for-the-authenticated-app -// -//meta:operation DELETE /app/installations/{installation_id} -func (s *AppsService) DeleteInstallation(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("app/installations/%v", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateInstallationToken creates a new installation token. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#create-an-installation-access-token-for-an-app -// -//meta:operation POST /app/installations/{installation_id}/access_tokens -func (s *AppsService) CreateInstallationToken(ctx context.Context, id int64, opts *InstallationTokenOptions) (*InstallationToken, *Response, error) { - u := fmt.Sprintf("app/installations/%v/access_tokens", id) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - t := new(InstallationToken) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// CreateAttachment creates a new attachment on user comment containing a url. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.3/rest/reference/apps#create-a-content-attachment -// -//meta:operation POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments -func (s *AppsService) CreateAttachment(ctx context.Context, contentReferenceID int64, title, body string) (*Attachment, *Response, error) { - u := fmt.Sprintf("content_references/%v/attachments", contentReferenceID) - payload := &Attachment{Title: String(title), Body: String(body)} - req, err := s.client.NewRequest("POST", u, payload) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeContentAttachmentsPreview) - - m := &Attachment{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// FindOrganizationInstallation finds the organization's installation information. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-an-organization-installation-for-the-authenticated-app -// -//meta:operation GET /orgs/{org}/installation -func (s *AppsService) FindOrganizationInstallation(ctx context.Context, org string) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("orgs/%v/installation", org)) -} - -// FindRepositoryInstallation finds the repository's installation information. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-a-repository-installation-for-the-authenticated-app -// -//meta:operation GET /repos/{owner}/{repo}/installation -func (s *AppsService) FindRepositoryInstallation(ctx context.Context, owner, repo string) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("repos/%v/%v/installation", owner, repo)) -} - -// FindRepositoryInstallationByID finds the repository's installation information. -// -// Note: FindRepositoryInstallationByID uses the undocumented GitHub API endpoint "GET /repositories/{repository_id}/installation". -// -//meta:operation GET /repositories/{repository_id}/installation -func (s *AppsService) FindRepositoryInstallationByID(ctx context.Context, id int64) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("repositories/%d/installation", id)) -} - -// FindUserInstallation finds the user's installation information. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-a-user-installation-for-the-authenticated-app -// -//meta:operation GET /users/{username}/installation -func (s *AppsService) FindUserInstallation(ctx context.Context, user string) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("users/%v/installation", user)) -} - -func (s *AppsService) getInstallation(ctx context.Context, url string) (*Installation, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - i := new(Installation) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/apps_hooks.go b/vendor/github.com/google/go-github/v57/github/apps_hooks.go deleted file mode 100644 index 6046827e..00000000 --- a/vendor/github.com/google/go-github/v57/github/apps_hooks.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// GetHookConfig returns the webhook configuration for a GitHub App. -// The underlying transport must be authenticated as an app. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#get-a-webhook-configuration-for-an-app -// -//meta:operation GET /app/hook/config -func (s *AppsService) GetHookConfig(ctx context.Context) (*HookConfig, *Response, error) { - req, err := s.client.NewRequest("GET", "app/hook/config", nil) - if err != nil { - return nil, nil, err - } - - config := new(HookConfig) - resp, err := s.client.Do(ctx, req, &config) - if err != nil { - return nil, resp, err - } - - return config, resp, nil -} - -// UpdateHookConfig updates the webhook configuration for a GitHub App. -// The underlying transport must be authenticated as an app. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#update-a-webhook-configuration-for-an-app -// -//meta:operation PATCH /app/hook/config -func (s *AppsService) UpdateHookConfig(ctx context.Context, config *HookConfig) (*HookConfig, *Response, error) { - req, err := s.client.NewRequest("PATCH", "app/hook/config", config) - if err != nil { - return nil, nil, err - } - - c := new(HookConfig) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/apps_hooks_deliveries.go b/vendor/github.com/google/go-github/v57/github/apps_hooks_deliveries.go deleted file mode 100644 index 59800a0a..00000000 --- a/vendor/github.com/google/go-github/v57/github/apps_hooks_deliveries.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListHookDeliveries lists deliveries of an App webhook. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#list-deliveries-for-an-app-webhook -// -//meta:operation GET /app/hook/deliveries -func (s *AppsService) ListHookDeliveries(ctx context.Context, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { - u, err := addOptions("app/hook/deliveries", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deliveries := []*HookDelivery{} - resp, err := s.client.Do(ctx, req, &deliveries) - if err != nil { - return nil, resp, err - } - - return deliveries, resp, nil -} - -// GetHookDelivery returns the App webhook delivery with the specified ID. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#get-a-delivery-for-an-app-webhook -// -//meta:operation GET /app/hook/deliveries/{delivery_id} -func (s *AppsService) GetHookDelivery(ctx context.Context, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("app/hook/deliveries/%v", deliveryID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// RedeliverHookDelivery redelivers a delivery for an App webhook. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#redeliver-a-delivery-for-an-app-webhook -// -//meta:operation POST /app/hook/deliveries/{delivery_id}/attempts -func (s *AppsService) RedeliverHookDelivery(ctx context.Context, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("app/hook/deliveries/%v/attempts", deliveryID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/apps_installation.go b/vendor/github.com/google/go-github/v57/github/apps_installation.go deleted file mode 100644 index d430511d..00000000 --- a/vendor/github.com/google/go-github/v57/github/apps_installation.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" -) - -// ListRepositories represents the response from the list repos endpoints. -type ListRepositories struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories"` -} - -// ListRepos lists the repositories that are accessible to the authenticated installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#list-repositories-accessible-to-the-app-installation -// -//meta:operation GET /installation/repositories -func (s *AppsService) ListRepos(ctx context.Context, opts *ListOptions) (*ListRepositories, *Response, error) { - u, err := addOptions("installation/repositories", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{ - mediaTypeTopicsPreview, - mediaTypeRepositoryVisibilityPreview, - mediaTypeRepositoryTemplatePreview, - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var r *ListRepositories - - resp, err := s.client.Do(ctx, req, &r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// ListUserRepos lists repositories that are accessible -// to the authenticated user for an installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#list-repositories-accessible-to-the-user-access-token -// -//meta:operation GET /user/installations/{installation_id}/repositories -func (s *AppsService) ListUserRepos(ctx context.Context, id int64, opts *ListOptions) (*ListRepositories, *Response, error) { - u := fmt.Sprintf("user/installations/%v/repositories", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{ - mediaTypeTopicsPreview, - mediaTypeRepositoryVisibilityPreview, - mediaTypeRepositoryTemplatePreview, - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var r *ListRepositories - resp, err := s.client.Do(ctx, req, &r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// AddRepository adds a single repository to an installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#add-a-repository-to-an-app-installation -// -//meta:operation PUT /user/installations/{installation_id}/repositories/{repository_id} -func (s *AppsService) AddRepository(ctx context.Context, instID, repoID int64) (*Repository, *Response, error) { - u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// RemoveRepository removes a single repository from an installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#remove-a-repository-from-an-app-installation -// -//meta:operation DELETE /user/installations/{installation_id}/repositories/{repository_id} -func (s *AppsService) RemoveRepository(ctx context.Context, instID, repoID int64) (*Response, error) { - u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RevokeInstallationToken revokes an installation token. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#revoke-an-installation-access-token -// -//meta:operation DELETE /installation/token -func (s *AppsService) RevokeInstallationToken(ctx context.Context) (*Response, error) { - u := "installation/token" - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/apps_manifest.go b/vendor/github.com/google/go-github/v57/github/apps_manifest.go deleted file mode 100644 index 5b6ff9af..00000000 --- a/vendor/github.com/google/go-github/v57/github/apps_manifest.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AppConfig describes the configuration of a GitHub App. -type AppConfig struct { - ID *int64 `json:"id,omitempty"` - Slug *string `json:"slug,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - ExternalURL *string `json:"external_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClientID *string `json:"client_id,omitempty"` - ClientSecret *string `json:"client_secret,omitempty"` - WebhookSecret *string `json:"webhook_secret,omitempty"` - PEM *string `json:"pem,omitempty"` -} - -// CompleteAppManifest completes the App manifest handshake flow for the given -// code. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#create-a-github-app-from-a-manifest -// -//meta:operation POST /app-manifests/{code}/conversions -func (s *AppsService) CompleteAppManifest(ctx context.Context, code string) (*AppConfig, *Response, error) { - u := fmt.Sprintf("app-manifests/%s/conversions", code) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - cfg := new(AppConfig) - resp, err := s.client.Do(ctx, req, cfg) - if err != nil { - return nil, resp, err - } - - return cfg, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/apps_marketplace.go b/vendor/github.com/google/go-github/v57/github/apps_marketplace.go deleted file mode 100644 index 976775a7..00000000 --- a/vendor/github.com/google/go-github/v57/github/apps_marketplace.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// MarketplaceService handles communication with the marketplace related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/apps#marketplace -type MarketplaceService struct { - client *Client - // Stubbed controls whether endpoints that return stubbed data are used - // instead of production endpoints. Stubbed data is fake data that's useful - // for testing your GitHub Apps. Stubbed data is hard-coded and will not - // change based on actual subscriptions. - // - // GitHub API docs: https://docs.github.com/rest/apps#testing-with-stubbed-endpoints - Stubbed bool -} - -// MarketplacePlan represents a GitHub Apps Marketplace Listing Plan. -type MarketplacePlan struct { - URL *string `json:"url,omitempty"` - AccountsURL *string `json:"accounts_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - MonthlyPriceInCents *int `json:"monthly_price_in_cents,omitempty"` - YearlyPriceInCents *int `json:"yearly_price_in_cents,omitempty"` - // The pricing model for this listing. Can be one of "flat-rate", "per-unit", or "free". - PriceModel *string `json:"price_model,omitempty"` - UnitName *string `json:"unit_name,omitempty"` - Bullets *[]string `json:"bullets,omitempty"` - // State can be one of the values "draft" or "published". - State *string `json:"state,omitempty"` - HasFreeTrial *bool `json:"has_free_trial,omitempty"` -} - -// MarketplacePurchase represents a GitHub Apps Marketplace Purchase. -type MarketplacePurchase struct { - Account *MarketplacePurchaseAccount `json:"account,omitempty"` - // BillingCycle can be one of the values "yearly", "monthly" or nil. - BillingCycle *string `json:"billing_cycle,omitempty"` - NextBillingDate *Timestamp `json:"next_billing_date,omitempty"` - UnitCount *int `json:"unit_count,omitempty"` - Plan *MarketplacePlan `json:"plan,omitempty"` - OnFreeTrial *bool `json:"on_free_trial,omitempty"` - FreeTrialEndsOn *Timestamp `json:"free_trial_ends_on,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -// MarketplacePendingChange represents a pending change to a GitHub Apps Marketplace Plan. -type MarketplacePendingChange struct { - EffectiveDate *Timestamp `json:"effective_date,omitempty"` - UnitCount *int `json:"unit_count,omitempty"` - ID *int64 `json:"id,omitempty"` - Plan *MarketplacePlan `json:"plan,omitempty"` -} - -// MarketplacePlanAccount represents a GitHub Account (user or organization) on a specific plan. -type MarketplacePlanAccount struct { - URL *string `json:"url,omitempty"` - Type *string `json:"type,omitempty"` - ID *int64 `json:"id,omitempty"` - Login *string `json:"login,omitempty"` - OrganizationBillingEmail *string `json:"organization_billing_email,omitempty"` - MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` - MarketplacePendingChange *MarketplacePendingChange `json:"marketplace_pending_change,omitempty"` -} - -// MarketplacePurchaseAccount represents a GitHub Account (user or organization) for a Purchase. -type MarketplacePurchaseAccount struct { - URL *string `json:"url,omitempty"` - Type *string `json:"type,omitempty"` - ID *int64 `json:"id,omitempty"` - Login *string `json:"login,omitempty"` - OrganizationBillingEmail *string `json:"organization_billing_email,omitempty"` - Email *string `json:"email,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// ListPlans lists all plans for your Marketplace listing. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-plans -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-plans-stubbed -// -//meta:operation GET /marketplace_listing/plans -//meta:operation GET /marketplace_listing/stubbed/plans -func (s *MarketplaceService) ListPlans(ctx context.Context, opts *ListOptions) ([]*MarketplacePlan, *Response, error) { - uri := s.marketplaceURI("plans") - u, err := addOptions(uri, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var plans []*MarketplacePlan - resp, err := s.client.Do(ctx, req, &plans) - if err != nil { - return nil, resp, err - } - - return plans, resp, nil -} - -// ListPlanAccountsForPlan lists all GitHub accounts (user or organization) on a specific plan. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-accounts-for-a-plan -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-accounts-for-a-plan-stubbed -// -//meta:operation GET /marketplace_listing/plans/{plan_id}/accounts -//meta:operation GET /marketplace_listing/stubbed/plans/{plan_id}/accounts -func (s *MarketplaceService) ListPlanAccountsForPlan(ctx context.Context, planID int64, opts *ListOptions) ([]*MarketplacePlanAccount, *Response, error) { - uri := s.marketplaceURI(fmt.Sprintf("plans/%v/accounts", planID)) - u, err := addOptions(uri, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var accounts []*MarketplacePlanAccount - resp, err := s.client.Do(ctx, req, &accounts) - if err != nil { - return nil, resp, err - } - - return accounts, resp, nil -} - -// GetPlanAccountForAccount get GitHub account (user or organization) associated with an account. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#get-a-subscription-plan-for-an-account -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#get-a-subscription-plan-for-an-account-stubbed -// -//meta:operation GET /marketplace_listing/accounts/{account_id} -//meta:operation GET /marketplace_listing/stubbed/accounts/{account_id} -func (s *MarketplaceService) GetPlanAccountForAccount(ctx context.Context, accountID int64) (*MarketplacePlanAccount, *Response, error) { - uri := s.marketplaceURI(fmt.Sprintf("accounts/%v", accountID)) - - req, err := s.client.NewRequest("GET", uri, nil) - if err != nil { - return nil, nil, err - } - - var account *MarketplacePlanAccount - resp, err := s.client.Do(ctx, req, &account) - if err != nil { - return nil, resp, err - } - - return account, resp, nil -} - -// ListMarketplacePurchasesForUser lists all GitHub marketplace purchases made by a user. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-subscriptions-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-subscriptions-for-the-authenticated-user-stubbed -// -//meta:operation GET /user/marketplace_purchases -//meta:operation GET /user/marketplace_purchases/stubbed -func (s *MarketplaceService) ListMarketplacePurchasesForUser(ctx context.Context, opts *ListOptions) ([]*MarketplacePurchase, *Response, error) { - uri := "user/marketplace_purchases" - if s.Stubbed { - uri = "user/marketplace_purchases/stubbed" - } - - u, err := addOptions(uri, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var purchases []*MarketplacePurchase - resp, err := s.client.Do(ctx, req, &purchases) - if err != nil { - return nil, resp, err - } - return purchases, resp, nil -} - -func (s *MarketplaceService) marketplaceURI(endpoint string) string { - url := "marketplace_listing" - if s.Stubbed { - url = "marketplace_listing/stubbed" - } - return url + "/" + endpoint -} diff --git a/vendor/github.com/google/go-github/v57/github/authorizations.go b/vendor/github.com/google/go-github/v57/github/authorizations.go deleted file mode 100644 index 7adc5323..00000000 --- a/vendor/github.com/google/go-github/v57/github/authorizations.go +++ /dev/null @@ -1,293 +0,0 @@ -// Copyright 2015 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Scope models a GitHub authorization scope. -// -// GitHub API docs: https://docs.github.com/rest/oauth/#scopes -type Scope string - -// This is the set of scopes for GitHub API V3 -const ( - ScopeNone Scope = "(no scope)" // REVISIT: is this actually returned, or just a documentation artifact? - ScopeUser Scope = "user" - ScopeUserEmail Scope = "user:email" - ScopeUserFollow Scope = "user:follow" - ScopePublicRepo Scope = "public_repo" - ScopeRepo Scope = "repo" - ScopeRepoDeployment Scope = "repo_deployment" - ScopeRepoStatus Scope = "repo:status" - ScopeDeleteRepo Scope = "delete_repo" - ScopeNotifications Scope = "notifications" - ScopeGist Scope = "gist" - ScopeReadRepoHook Scope = "read:repo_hook" - ScopeWriteRepoHook Scope = "write:repo_hook" - ScopeAdminRepoHook Scope = "admin:repo_hook" - ScopeAdminOrgHook Scope = "admin:org_hook" - ScopeReadOrg Scope = "read:org" - ScopeWriteOrg Scope = "write:org" - ScopeAdminOrg Scope = "admin:org" - ScopeReadPublicKey Scope = "read:public_key" - ScopeWritePublicKey Scope = "write:public_key" - ScopeAdminPublicKey Scope = "admin:public_key" - ScopeReadGPGKey Scope = "read:gpg_key" - ScopeWriteGPGKey Scope = "write:gpg_key" - ScopeAdminGPGKey Scope = "admin:gpg_key" - ScopeSecurityEvents Scope = "security_events" -) - -// AuthorizationsService handles communication with the authorization related -// methods of the GitHub API. -// -// This service requires HTTP Basic Authentication; it cannot be accessed using -// an OAuth token. -// -// GitHub API docs: https://docs.github.com/rest/oauth-authorizations -type AuthorizationsService service - -// Authorization represents an individual GitHub authorization. -type Authorization struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Scopes []Scope `json:"scopes,omitempty"` - Token *string `json:"token,omitempty"` - TokenLastEight *string `json:"token_last_eight,omitempty"` - HashedToken *string `json:"hashed_token,omitempty"` - App *AuthorizationApp `json:"app,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` - - // User is only populated by the Check and Reset methods. - User *User `json:"user,omitempty"` -} - -func (a Authorization) String() string { - return Stringify(a) -} - -// AuthorizationApp represents an individual GitHub app (in the context of authorization). -type AuthorizationApp struct { - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - ClientID *string `json:"client_id,omitempty"` -} - -func (a AuthorizationApp) String() string { - return Stringify(a) -} - -// Grant represents an OAuth application that has been granted access to an account. -type Grant struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - App *AuthorizationApp `json:"app,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Scopes []string `json:"scopes,omitempty"` -} - -func (g Grant) String() string { - return Stringify(g) -} - -// AuthorizationRequest represents a request to create an authorization. -type AuthorizationRequest struct { - Scopes []Scope `json:"scopes,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - ClientID *string `json:"client_id,omitempty"` - ClientSecret *string `json:"client_secret,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` -} - -func (a AuthorizationRequest) String() string { - return Stringify(a) -} - -// AuthorizationUpdateRequest represents a request to update an authorization. -// -// Note that for any one update, you must only provide one of the "scopes" -// fields. That is, you may provide only one of "Scopes", or "AddScopes", or -// "RemoveScopes". -// -// GitHub API docs: https://docs.github.com/rest/oauth-authorizations#update-an-existing-authorization -type AuthorizationUpdateRequest struct { - Scopes []string `json:"scopes,omitempty"` - AddScopes []string `json:"add_scopes,omitempty"` - RemoveScopes []string `json:"remove_scopes,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` -} - -func (a AuthorizationUpdateRequest) String() string { - return Stringify(a) -} - -// Check if an OAuth token is valid for a specific app. -// -// Note that this operation requires the use of BasicAuth, but where the -// username is the OAuth application clientID, and the password is its -// clientSecret. Invalid tokens will return a 404 Not Found. -// -// The returned Authorization.User field will be populated. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#check-a-token -// -//meta:operation POST /applications/{client_id}/token -func (s *AuthorizationsService) Check(ctx context.Context, clientID, accessToken string) (*Authorization, *Response, error) { - u := fmt.Sprintf("applications/%v/token", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("POST", u, reqBody) - if err != nil { - return nil, nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - a := new(Authorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// Reset is used to reset a valid OAuth token without end user involvement. -// Applications must save the "token" property in the response, because changes -// take effect immediately. -// -// Note that this operation requires the use of BasicAuth, but where the -// username is the OAuth application clientID, and the password is its -// clientSecret. Invalid tokens will return a 404 Not Found. -// -// The returned Authorization.User field will be populated. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#reset-a-token -// -//meta:operation PATCH /applications/{client_id}/token -func (s *AuthorizationsService) Reset(ctx context.Context, clientID, accessToken string) (*Authorization, *Response, error) { - u := fmt.Sprintf("applications/%v/token", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("PATCH", u, reqBody) - if err != nil { - return nil, nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - a := new(Authorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// Revoke an authorization for an application. -// -// Note that this operation requires the use of BasicAuth, but where the -// username is the OAuth application clientID, and the password is its -// clientSecret. Invalid tokens will return a 404 Not Found. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#delete-an-app-token -// -//meta:operation DELETE /applications/{client_id}/token -func (s *AuthorizationsService) Revoke(ctx context.Context, clientID, accessToken string) (*Response, error) { - u := fmt.Sprintf("applications/%v/token", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("DELETE", u, reqBody) - if err != nil { - return nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - return s.client.Do(ctx, req, nil) -} - -// DeleteGrant deletes an OAuth application grant. Deleting an application's -// grant will also delete all OAuth tokens associated with the application for -// the user. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#delete-an-app-authorization -// -//meta:operation DELETE /applications/{client_id}/grant -func (s *AuthorizationsService) DeleteGrant(ctx context.Context, clientID, accessToken string) (*Response, error) { - u := fmt.Sprintf("applications/%v/grant", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("DELETE", u, reqBody) - if err != nil { - return nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - return s.client.Do(ctx, req, nil) -} - -// CreateImpersonation creates an impersonation OAuth token. -// -// This requires admin permissions. With the returned Authorization.Token -// you can e.g. create or delete a user's public SSH key. NOTE: creating a -// new token automatically revokes an existing one. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#create-an-impersonation-oauth-token -// -//meta:operation POST /admin/users/{username}/authorizations -func (s *AuthorizationsService) CreateImpersonation(ctx context.Context, username string, authReq *AuthorizationRequest) (*Authorization, *Response, error) { - u := fmt.Sprintf("admin/users/%v/authorizations", username) - req, err := s.client.NewRequest("POST", u, authReq) - if err != nil { - return nil, nil, err - } - - a := new(Authorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - return a, resp, nil -} - -// DeleteImpersonation deletes an impersonation OAuth token. -// -// NOTE: there can be only one at a time. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#delete-an-impersonation-oauth-token -// -//meta:operation DELETE /admin/users/{username}/authorizations -func (s *AuthorizationsService) DeleteImpersonation(ctx context.Context, username string) (*Response, error) { - u := fmt.Sprintf("admin/users/%v/authorizations", username) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/billing.go b/vendor/github.com/google/go-github/v57/github/billing.go deleted file mode 100644 index 6d7579b8..00000000 --- a/vendor/github.com/google/go-github/v57/github/billing.go +++ /dev/null @@ -1,215 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// BillingService provides access to the billing related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/billing -type BillingService service - -// ActionBilling represents a GitHub Action billing. -type ActionBilling struct { - TotalMinutesUsed float64 `json:"total_minutes_used"` - TotalPaidMinutesUsed float64 `json:"total_paid_minutes_used"` - IncludedMinutes float64 `json:"included_minutes"` - MinutesUsedBreakdown MinutesUsedBreakdown `json:"minutes_used_breakdown"` -} - -// MinutesUsedBreakdown counts the actions minutes used by machine type (e.g. UBUNTU, WINDOWS, MACOS). -type MinutesUsedBreakdown = map[string]int - -// PackageBilling represents a GitHub Package billing. -type PackageBilling struct { - TotalGigabytesBandwidthUsed int `json:"total_gigabytes_bandwidth_used"` - TotalPaidGigabytesBandwidthUsed int `json:"total_paid_gigabytes_bandwidth_used"` - IncludedGigabytesBandwidth float64 `json:"included_gigabytes_bandwidth"` -} - -// StorageBilling represents a GitHub Storage billing. -type StorageBilling struct { - DaysLeftInBillingCycle int `json:"days_left_in_billing_cycle"` - EstimatedPaidStorageForMonth float64 `json:"estimated_paid_storage_for_month"` - EstimatedStorageForMonth float64 `json:"estimated_storage_for_month"` -} - -// ActiveCommitters represents the total active committers across all repositories in an Organization. -type ActiveCommitters struct { - TotalAdvancedSecurityCommitters int `json:"total_advanced_security_committers"` - Repositories []*RepositoryActiveCommitters `json:"repositories,omitempty"` -} - -// RepositoryActiveCommitters represents active committers on each repository. -type RepositoryActiveCommitters struct { - Name *string `json:"name,omitempty"` - AdvancedSecurityCommitters *int `json:"advanced_security_committers,omitempty"` - AdvancedSecurityCommittersBreakdown []*AdvancedSecurityCommittersBreakdown `json:"advanced_security_committers_breakdown,omitempty"` -} - -// AdvancedSecurityCommittersBreakdown represents the user activity breakdown for ActiveCommitters. -type AdvancedSecurityCommittersBreakdown struct { - UserLogin *string `json:"user_login,omitempty"` - LastPushedDate *string `json:"last_pushed_date,omitempty"` -} - -// GetActionsBillingOrg returns the summary of the free and paid GitHub Actions minutes used for an Org. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-actions-billing-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/actions -func (s *BillingService) GetActionsBillingOrg(ctx context.Context, org string) (*ActionBilling, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/actions", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsOrgBilling := new(ActionBilling) - resp, err := s.client.Do(ctx, req, actionsOrgBilling) - if err != nil { - return nil, resp, err - } - - return actionsOrgBilling, resp, nil -} - -// GetPackagesBillingOrg returns the free and paid storage used for GitHub Packages in gigabytes for an Org. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-packages-billing-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/packages -func (s *BillingService) GetPackagesBillingOrg(ctx context.Context, org string) (*PackageBilling, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/packages", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - packagesOrgBilling := new(PackageBilling) - resp, err := s.client.Do(ctx, req, packagesOrgBilling) - if err != nil { - return nil, resp, err - } - - return packagesOrgBilling, resp, nil -} - -// GetStorageBillingOrg returns the estimated paid and estimated total storage used for GitHub Actions -// and GitHub Packages in gigabytes for an Org. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-shared-storage-billing-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/shared-storage -func (s *BillingService) GetStorageBillingOrg(ctx context.Context, org string) (*StorageBilling, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/shared-storage", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - storageOrgBilling := new(StorageBilling) - resp, err := s.client.Do(ctx, req, storageOrgBilling) - if err != nil { - return nil, resp, err - } - - return storageOrgBilling, resp, nil -} - -// GetAdvancedSecurityActiveCommittersOrg returns the GitHub Advanced Security active committers for an organization per repository. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/billing/billing#get-github-advanced-security-active-committers-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/advanced-security -func (s *BillingService) GetAdvancedSecurityActiveCommittersOrg(ctx context.Context, org string, opts *ListOptions) (*ActiveCommitters, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/advanced-security", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - activeOrgCommitters := new(ActiveCommitters) - resp, err := s.client.Do(ctx, req, activeOrgCommitters) - if err != nil { - return nil, resp, err - } - - return activeOrgCommitters, resp, nil -} - -// GetActionsBillingUser returns the summary of the free and paid GitHub Actions minutes used for a user. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-actions-billing-for-a-user -// -//meta:operation GET /users/{username}/settings/billing/actions -func (s *BillingService) GetActionsBillingUser(ctx context.Context, user string) (*ActionBilling, *Response, error) { - u := fmt.Sprintf("users/%v/settings/billing/actions", user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsUserBilling := new(ActionBilling) - resp, err := s.client.Do(ctx, req, actionsUserBilling) - if err != nil { - return nil, resp, err - } - - return actionsUserBilling, resp, nil -} - -// GetPackagesBillingUser returns the free and paid storage used for GitHub Packages in gigabytes for a user. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-packages-billing-for-a-user -// -//meta:operation GET /users/{username}/settings/billing/packages -func (s *BillingService) GetPackagesBillingUser(ctx context.Context, user string) (*PackageBilling, *Response, error) { - u := fmt.Sprintf("users/%v/settings/billing/packages", user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - packagesUserBilling := new(PackageBilling) - resp, err := s.client.Do(ctx, req, packagesUserBilling) - if err != nil { - return nil, resp, err - } - - return packagesUserBilling, resp, nil -} - -// GetStorageBillingUser returns the estimated paid and estimated total storage used for GitHub Actions -// and GitHub Packages in gigabytes for a user. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-shared-storage-billing-for-a-user -// -//meta:operation GET /users/{username}/settings/billing/shared-storage -func (s *BillingService) GetStorageBillingUser(ctx context.Context, user string) (*StorageBilling, *Response, error) { - u := fmt.Sprintf("users/%v/settings/billing/shared-storage", user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - storageUserBilling := new(StorageBilling) - resp, err := s.client.Do(ctx, req, storageUserBilling) - if err != nil { - return nil, resp, err - } - - return storageUserBilling, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/checks.go b/vendor/github.com/google/go-github/v57/github/checks.go deleted file mode 100644 index a8618944..00000000 --- a/vendor/github.com/google/go-github/v57/github/checks.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ChecksService provides access to the Checks API in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/checks/ -type ChecksService service - -// CheckRun represents a GitHub check run on a repository associated with a GitHub app. -type CheckRun struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - ExternalID *string `json:"external_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - DetailsURL *string `json:"details_url,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` - Output *CheckRunOutput `json:"output,omitempty"` - Name *string `json:"name,omitempty"` - CheckSuite *CheckSuite `json:"check_suite,omitempty"` - App *App `json:"app,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` -} - -// CheckRunOutput represents the output of a CheckRun. -type CheckRunOutput struct { - Title *string `json:"title,omitempty"` - Summary *string `json:"summary,omitempty"` - Text *string `json:"text,omitempty"` - AnnotationsCount *int `json:"annotations_count,omitempty"` - AnnotationsURL *string `json:"annotations_url,omitempty"` - Annotations []*CheckRunAnnotation `json:"annotations,omitempty"` - Images []*CheckRunImage `json:"images,omitempty"` -} - -// CheckRunAnnotation represents an annotation object for a CheckRun output. -type CheckRunAnnotation struct { - Path *string `json:"path,omitempty"` - StartLine *int `json:"start_line,omitempty"` - EndLine *int `json:"end_line,omitempty"` - StartColumn *int `json:"start_column,omitempty"` - EndColumn *int `json:"end_column,omitempty"` - AnnotationLevel *string `json:"annotation_level,omitempty"` - Message *string `json:"message,omitempty"` - Title *string `json:"title,omitempty"` - RawDetails *string `json:"raw_details,omitempty"` -} - -// CheckRunImage represents an image object for a CheckRun output. -type CheckRunImage struct { - Alt *string `json:"alt,omitempty"` - ImageURL *string `json:"image_url,omitempty"` - Caption *string `json:"caption,omitempty"` -} - -// CheckSuite represents a suite of check runs. -type CheckSuite struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - URL *string `json:"url,omitempty"` - BeforeSHA *string `json:"before,omitempty"` - AfterSHA *string `json:"after,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - App *App `json:"app,omitempty"` - Repository *Repository `json:"repository,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` - - // The following fields are only populated by Webhook events. - HeadCommit *Commit `json:"head_commit,omitempty"` -} - -func (c CheckRun) String() string { - return Stringify(c) -} - -func (c CheckSuite) String() string { - return Stringify(c) -} - -// GetCheckRun gets a check-run for a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#get-a-check-run -// -//meta:operation GET /repos/{owner}/{repo}/check-runs/{check_run_id} -func (s *ChecksService) GetCheckRun(ctx context.Context, owner, repo string, checkRunID int64) (*CheckRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkRun := new(CheckRun) - resp, err := s.client.Do(ctx, req, checkRun) - if err != nil { - return nil, resp, err - } - - return checkRun, resp, nil -} - -// GetCheckSuite gets a single check suite. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#get-a-check-suite -// -//meta:operation GET /repos/{owner}/{repo}/check-suites/{check_suite_id} -func (s *ChecksService) GetCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*CheckSuite, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/%v", owner, repo, checkSuiteID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkSuite := new(CheckSuite) - resp, err := s.client.Do(ctx, req, checkSuite) - if err != nil { - return nil, resp, err - } - - return checkSuite, resp, nil -} - -// CreateCheckRunOptions sets up parameters needed to create a CheckRun. -type CreateCheckRunOptions struct { - Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) - HeadSHA string `json:"head_sha"` // The SHA of the commit. (Required.) - DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) - ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) - Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) - Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "skipped", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) - StartedAt *Timestamp `json:"started_at,omitempty"` // The time that the check run began. (Optional.) - CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) - Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) - Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) -} - -// CheckRunAction exposes further actions the integrator can perform, which a user may trigger. -type CheckRunAction struct { - Label string `json:"label"` // The text to be displayed on a button in the web UI. The maximum size is 20 characters. (Required.) - Description string `json:"description"` // A short explanation of what this action would do. The maximum size is 40 characters. (Required.) - Identifier string `json:"identifier"` // A reference for the action on the integrator's system. The maximum size is 20 characters. (Required.) -} - -// CreateCheckRun creates a check run for repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#create-a-check-run -// -//meta:operation POST /repos/{owner}/{repo}/check-runs -func (s *ChecksService) CreateCheckRun(ctx context.Context, owner, repo string, opts CreateCheckRunOptions) (*CheckRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkRun := new(CheckRun) - resp, err := s.client.Do(ctx, req, checkRun) - if err != nil { - return nil, resp, err - } - - return checkRun, resp, nil -} - -// UpdateCheckRunOptions sets up parameters needed to update a CheckRun. -type UpdateCheckRunOptions struct { - Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) - DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) - ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) - Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) - Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "skipped", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) - CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) - Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) - Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) -} - -// UpdateCheckRun updates a check run for a specific commit in a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#update-a-check-run -// -//meta:operation PATCH /repos/{owner}/{repo}/check-runs/{check_run_id} -func (s *ChecksService) UpdateCheckRun(ctx context.Context, owner, repo string, checkRunID int64, opts UpdateCheckRunOptions) (*CheckRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkRun := new(CheckRun) - resp, err := s.client.Do(ctx, req, checkRun) - if err != nil { - return nil, resp, err - } - - return checkRun, resp, nil -} - -// ListCheckRunAnnotations lists the annotations for a check run. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#list-check-run-annotations -// -//meta:operation GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations -func (s *ChecksService) ListCheckRunAnnotations(ctx context.Context, owner, repo string, checkRunID int64, opts *ListOptions) ([]*CheckRunAnnotation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v/annotations", owner, repo, checkRunID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkRunAnnotations []*CheckRunAnnotation - resp, err := s.client.Do(ctx, req, &checkRunAnnotations) - if err != nil { - return nil, resp, err - } - - return checkRunAnnotations, resp, nil -} - -// ListCheckRunsOptions represents parameters to list check runs. -type ListCheckRunsOptions struct { - CheckName *string `url:"check_name,omitempty"` // Returns check runs with the specified name. - Status *string `url:"status,omitempty"` // Returns check runs with the specified status. Can be one of "queued", "in_progress", or "completed". - Filter *string `url:"filter,omitempty"` // Filters check runs by their completed_at timestamp. Can be one of "latest" (returning the most recent check runs) or "all". Default: "latest" - AppID *int64 `url:"app_id,omitempty"` // Filters check runs by GitHub App ID. - - ListOptions -} - -// ListCheckRunsResults represents the result of a check run list. -type ListCheckRunsResults struct { - Total *int `json:"total_count,omitempty"` - CheckRuns []*CheckRun `json:"check_runs,omitempty"` -} - -// ListCheckRunsForRef lists check runs for a specific ref. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#list-check-runs-for-a-git-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/check-runs -func (s *ChecksService) ListCheckRunsForRef(ctx context.Context, owner, repo, ref string, opts *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/check-runs", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkRunResults *ListCheckRunsResults - resp, err := s.client.Do(ctx, req, &checkRunResults) - if err != nil { - return nil, resp, err - } - - return checkRunResults, resp, nil -} - -// ListCheckRunsCheckSuite lists check runs for a check suite. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#list-check-runs-in-a-check-suite -// -//meta:operation GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs -func (s *ChecksService) ListCheckRunsCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64, opts *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/%v/check-runs", owner, repo, checkSuiteID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkRunResults *ListCheckRunsResults - resp, err := s.client.Do(ctx, req, &checkRunResults) - if err != nil { - return nil, resp, err - } - - return checkRunResults, resp, nil -} - -// ReRequestCheckRun triggers GitHub to rerequest an existing check run. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#rerequest-a-check-run -// -//meta:operation POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest -func (s *ChecksService) ReRequestCheckRun(ctx context.Context, owner, repo string, checkRunID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v/rerequest", owner, repo, checkRunID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ListCheckSuiteOptions represents parameters to list check suites. -type ListCheckSuiteOptions struct { - CheckName *string `url:"check_name,omitempty"` // Filters checks suites by the name of the check run. - AppID *int `url:"app_id,omitempty"` // Filters check suites by GitHub App id. - - ListOptions -} - -// ListCheckSuiteResults represents the result of a check run list. -type ListCheckSuiteResults struct { - Total *int `json:"total_count,omitempty"` - CheckSuites []*CheckSuite `json:"check_suites,omitempty"` -} - -// ListCheckSuitesForRef lists check suite for a specific ref. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#list-check-suites-for-a-git-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/check-suites -func (s *ChecksService) ListCheckSuitesForRef(ctx context.Context, owner, repo, ref string, opts *ListCheckSuiteOptions) (*ListCheckSuiteResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/check-suites", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkSuiteResults *ListCheckSuiteResults - resp, err := s.client.Do(ctx, req, &checkSuiteResults) - if err != nil { - return nil, resp, err - } - - return checkSuiteResults, resp, nil -} - -// AutoTriggerCheck enables or disables automatic creation of CheckSuite events upon pushes to the repository. -type AutoTriggerCheck struct { - AppID *int64 `json:"app_id,omitempty"` // The id of the GitHub App. (Required.) - Setting *bool `json:"setting,omitempty"` // Set to "true" to enable automatic creation of CheckSuite events upon pushes to the repository, or "false" to disable them. Default: "true" (Required.) -} - -// CheckSuitePreferenceOptions set options for check suite preferences for a repository. -type CheckSuitePreferenceOptions struct { - AutoTriggerChecks []*AutoTriggerCheck `json:"auto_trigger_checks,omitempty"` // A slice of auto trigger checks that can be set for a check suite in a repository. -} - -// CheckSuitePreferenceResults represents the results of the preference set operation. -type CheckSuitePreferenceResults struct { - Preferences *PreferenceList `json:"preferences,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -// PreferenceList represents a list of auto trigger checks for repository -type PreferenceList struct { - AutoTriggerChecks []*AutoTriggerCheck `json:"auto_trigger_checks,omitempty"` // A slice of auto trigger checks that can be set for a check suite in a repository. -} - -// SetCheckSuitePreferences changes the default automatic flow when creating check suites. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#update-repository-preferences-for-check-suites -// -//meta:operation PATCH /repos/{owner}/{repo}/check-suites/preferences -func (s *ChecksService) SetCheckSuitePreferences(ctx context.Context, owner, repo string, opts CheckSuitePreferenceOptions) (*CheckSuitePreferenceResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/preferences", owner, repo) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkSuitePrefResults *CheckSuitePreferenceResults - resp, err := s.client.Do(ctx, req, &checkSuitePrefResults) - if err != nil { - return nil, resp, err - } - - return checkSuitePrefResults, resp, nil -} - -// CreateCheckSuiteOptions sets up parameters to manually create a check suites -type CreateCheckSuiteOptions struct { - HeadSHA string `json:"head_sha"` // The sha of the head commit. (Required.) - HeadBranch *string `json:"head_branch,omitempty"` // The name of the head branch where the code changes are implemented. -} - -// CreateCheckSuite manually creates a check suite for a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#create-a-check-suite -// -//meta:operation POST /repos/{owner}/{repo}/check-suites -func (s *ChecksService) CreateCheckSuite(ctx context.Context, owner, repo string, opts CreateCheckSuiteOptions) (*CheckSuite, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkSuite := new(CheckSuite) - resp, err := s.client.Do(ctx, req, checkSuite) - if err != nil { - return nil, resp, err - } - - return checkSuite, resp, nil -} - -// ReRequestCheckSuite triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#rerequest-a-check-suite -// -//meta:operation POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest -func (s *ChecksService) ReRequestCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/%v/rerequest", owner, repo, checkSuiteID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - resp, err := s.client.Do(ctx, req, nil) - return resp, err -} diff --git a/vendor/github.com/google/go-github/v57/github/code-scanning.go b/vendor/github.com/google/go-github/v57/github/code-scanning.go deleted file mode 100644 index 74a7b6c9..00000000 --- a/vendor/github.com/google/go-github/v57/github/code-scanning.go +++ /dev/null @@ -1,652 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strconv" - "strings" -) - -// CodeScanningService handles communication with the code scanning related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type CodeScanningService service - -// Rule represents the complete details of GitHub Code Scanning alert type. -type Rule struct { - ID *string `json:"id,omitempty"` - Severity *string `json:"severity,omitempty"` - Description *string `json:"description,omitempty"` - Name *string `json:"name,omitempty"` - SecuritySeverityLevel *string `json:"security_severity_level,omitempty"` - FullDescription *string `json:"full_description,omitempty"` - Tags []string `json:"tags,omitempty"` - Help *string `json:"help,omitempty"` -} - -// Location represents the exact location of the GitHub Code Scanning Alert in the scanned project. -type Location struct { - Path *string `json:"path,omitempty"` - StartLine *int `json:"start_line,omitempty"` - EndLine *int `json:"end_line,omitempty"` - StartColumn *int `json:"start_column,omitempty"` - EndColumn *int `json:"end_column,omitempty"` -} - -// Message is a part of MostRecentInstance struct which provides the appropriate message when any action is performed on the analysis object. -type Message struct { - Text *string `json:"text,omitempty"` -} - -// MostRecentInstance provides details of the most recent instance of this alert for the default branch or for the specified Git reference. -type MostRecentInstance struct { - Ref *string `json:"ref,omitempty"` - AnalysisKey *string `json:"analysis_key,omitempty"` - Category *string `json:"category,omitempty"` - Environment *string `json:"environment,omitempty"` - State *string `json:"state,omitempty"` - CommitSHA *string `json:"commit_sha,omitempty"` - Message *Message `json:"message,omitempty"` - Location *Location `json:"location,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Classifications []string `json:"classifications,omitempty"` -} - -// Tool represents the tool used to generate a GitHub Code Scanning Alert. -type Tool struct { - Name *string `json:"name,omitempty"` - GUID *string `json:"guid,omitempty"` - Version *string `json:"version,omitempty"` -} - -// Alert represents an individual GitHub Code Scanning Alert on a single repository. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type Alert struct { - Number *int `json:"number,omitempty"` - Repository *Repository `json:"repository,omitempty"` - RuleID *string `json:"rule_id,omitempty"` - RuleSeverity *string `json:"rule_severity,omitempty"` - RuleDescription *string `json:"rule_description,omitempty"` - Rule *Rule `json:"rule,omitempty"` - Tool *Tool `json:"tool,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - FixedAt *Timestamp `json:"fixed_at,omitempty"` - State *string `json:"state,omitempty"` - ClosedBy *User `json:"closed_by,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - MostRecentInstance *MostRecentInstance `json:"most_recent_instance,omitempty"` - Instances []*MostRecentInstance `json:"instances,omitempty"` - DismissedBy *User `json:"dismissed_by,omitempty"` - DismissedAt *Timestamp `json:"dismissed_at,omitempty"` - DismissedReason *string `json:"dismissed_reason,omitempty"` - DismissedComment *string `json:"dismissed_comment,omitempty"` - InstancesURL *string `json:"instances_url,omitempty"` -} - -// ID returns the ID associated with an alert. It is the number at the end of the security alert's URL. -func (a *Alert) ID() int64 { - if a == nil { - return 0 - } - - s := a.GetHTMLURL() - - // Check for an ID to parse at the end of the url - if i := strings.LastIndex(s, "/"); i >= 0 { - s = s[i+1:] - } - - // Return the alert ID as a 64-bit integer. Unable to convert or out of range returns 0. - id, err := strconv.ParseInt(s, 10, 64) - if err != nil { - return 0 - } - - return id -} - -// AlertInstancesListOptions specifies optional parameters to the CodeScanningService.ListAlertInstances method. -type AlertInstancesListOptions struct { - // Return code scanning alert instances for a specific branch reference. - // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge - Ref string `url:"ref,omitempty"` - - ListOptions -} - -// AlertListOptions specifies optional parameters to the CodeScanningService.ListAlerts method. -type AlertListOptions struct { - // State of the code scanning alerts to list. Set to closed to list only closed code scanning alerts. Default: open - State string `url:"state,omitempty"` - - // Return code scanning alerts for a specific branch reference. - // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge - Ref string `url:"ref,omitempty"` - - // If specified, only code scanning alerts with this severity will be returned. Possible values are: critical, high, medium, low, warning, note, error. - Severity string `url:"severity,omitempty"` - - // The name of a code scanning tool. Only results by this tool will be listed. - ToolName string `url:"tool_name,omitempty"` - - ListCursorOptions - - // Add ListOptions so offset pagination with integer type "page" query parameter is accepted - // since ListCursorOptions accepts "page" as string only. - ListOptions -} - -// AnalysesListOptions specifies optional parameters to the CodeScanningService.ListAnalysesForRepo method. -type AnalysesListOptions struct { - // Return code scanning analyses belonging to the same SARIF upload. - SarifID *string `url:"sarif_id,omitempty"` - - // Return code scanning analyses for a specific branch reference. - // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge - Ref *string `url:"ref,omitempty"` - - ListOptions -} - -// CodeQLDatabase represents a metadata about the CodeQL database. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type CodeQLDatabase struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Language *string `json:"language,omitempty"` - Uploader *User `json:"uploader,omitempty"` - ContentType *string `json:"content_type,omitempty"` - Size *int64 `json:"size,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` -} - -// ScanningAnalysis represents an individual GitHub Code Scanning ScanningAnalysis on a single repository. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type ScanningAnalysis struct { - ID *int64 `json:"id,omitempty"` - Ref *string `json:"ref,omitempty"` - CommitSHA *string `json:"commit_sha,omitempty"` - AnalysisKey *string `json:"analysis_key,omitempty"` - Environment *string `json:"environment,omitempty"` - Error *string `json:"error,omitempty"` - Category *string `json:"category,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ResultsCount *int `json:"results_count,omitempty"` - RulesCount *int `json:"rules_count,omitempty"` - URL *string `json:"url,omitempty"` - SarifID *string `json:"sarif_id,omitempty"` - Tool *Tool `json:"tool,omitempty"` - Deletable *bool `json:"deletable,omitempty"` - Warning *string `json:"warning,omitempty"` -} - -// SarifAnalysis specifies the results of a code scanning job. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type SarifAnalysis struct { - CommitSHA *string `json:"commit_sha,omitempty"` - Ref *string `json:"ref,omitempty"` - Sarif *string `json:"sarif,omitempty"` - CheckoutURI *string `json:"checkout_uri,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - ToolName *string `json:"tool_name,omitempty"` -} - -// CodeScanningAlertState specifies the state of a code scanning alert. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type CodeScanningAlertState struct { - // State sets the state of the code scanning alert and is a required field. - // You must also provide DismissedReason when you set the state to "dismissed". - // State can be one of: "open", "dismissed". - State string `json:"state"` - // DismissedReason represents the reason for dismissing or closing the alert. - // It is required when the state is "dismissed". - // It can be one of: "false positive", "won't fix", "used in tests". - DismissedReason *string `json:"dismissed_reason,omitempty"` - // DismissedComment is associated with the dismissal of the alert. - DismissedComment *string `json:"dismissed_comment,omitempty"` -} - -// SarifID identifies a sarif analysis upload. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type SarifID struct { - ID *string `json:"id,omitempty"` - URL *string `json:"url,omitempty"` -} - -// ListAlertsForOrg lists code scanning alerts for an org. -// -// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events -// read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-an-organization -// -//meta:operation GET /orgs/{org}/code-scanning/alerts -func (s *CodeScanningService) ListAlertsForOrg(ctx context.Context, org string, opts *AlertListOptions) ([]*Alert, *Response, error) { - u := fmt.Sprintf("orgs/%v/code-scanning/alerts", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*Alert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListAlertsForRepo lists code scanning alerts for a repository. -// -// Lists all open code scanning alerts for the default branch (usually master) and protected branches in a repository. -// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events -// read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/alerts -func (s *CodeScanningService) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *AlertListOptions) ([]*Alert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*Alert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// GetAlert gets a single code scanning alert for a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security alert_id is the number at the end of the security alert's URL. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-code-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number} -func (s *CodeScanningService) GetAlert(ctx context.Context, owner, repo string, id int64) (*Alert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - a := new(Alert) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// UpdateAlert updates the state of a single code scanning alert for a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security alert_id is the number at the end of the security alert's URL. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#update-a-code-scanning-alert -// -//meta:operation PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number} -func (s *CodeScanningService) UpdateAlert(ctx context.Context, owner, repo string, id int64, stateInfo *CodeScanningAlertState) (*Alert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v", owner, repo, id) - - req, err := s.client.NewRequest("PATCH", u, stateInfo) - if err != nil { - return nil, nil, err - } - - a := new(Alert) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// ListAlertInstances lists instances of a code scanning alert. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-instances-of-a-code-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances -func (s *CodeScanningService) ListAlertInstances(ctx context.Context, owner, repo string, id int64, opts *AlertInstancesListOptions) ([]*MostRecentInstance, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v/instances", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alertInstances []*MostRecentInstance - resp, err := s.client.Do(ctx, req, &alertInstances) - if err != nil { - return nil, resp, err - } - - return alertInstances, resp, nil -} - -// UploadSarif uploads the result of code scanning job to GitHub. -// -// For the parameter sarif, you must first compress your SARIF file using gzip and then translate the contents of the file into a Base64 encoding string. -// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events -// write permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#upload-an-analysis-as-sarif-data -// -//meta:operation POST /repos/{owner}/{repo}/code-scanning/sarifs -func (s *CodeScanningService) UploadSarif(ctx context.Context, owner, repo string, sarif *SarifAnalysis) (*SarifID, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs", owner, repo) - - req, err := s.client.NewRequest("POST", u, sarif) - if err != nil { - return nil, nil, err - } - - sarifID := new(SarifID) - resp, err := s.client.Do(ctx, req, sarifID) - if err != nil { - return nil, resp, err - } - - return sarifID, resp, nil -} - -// SARIFUpload represents information about a SARIF upload. -type SARIFUpload struct { - // `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. - // `failed` files have either not been processed at all, or could only be partially processed. - ProcessingStatus *string `json:"processing_status,omitempty"` - // The REST API URL for getting the analyses associated with the upload. - AnalysesURL *string `json:"analyses_url,omitempty"` -} - -// GetSARIF gets information about a SARIF upload. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-information-about-a-sarif-upload -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id} -func (s *CodeScanningService) GetSARIF(ctx context.Context, owner, repo, sarifID string) (*SARIFUpload, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs/%v", owner, repo, sarifID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - sarifUpload := new(SARIFUpload) - resp, err := s.client.Do(ctx, req, sarifUpload) - if err != nil { - return nil, resp, err - } - - return sarifUpload, resp, nil -} - -// ListAnalysesForRepo lists code scanning analyses for a repository. -// -// Lists the details of all code scanning analyses for a repository, starting with the most recent. -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-code-scanning-analyses-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/analyses -func (s *CodeScanningService) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *AnalysesListOptions) ([]*ScanningAnalysis, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var analyses []*ScanningAnalysis - resp, err := s.client.Do(ctx, req, &analyses) - if err != nil { - return nil, resp, err - } - - return analyses, resp, nil -} - -// GetAnalysis gets a single code scanning analysis for a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-code-scanning-analysis-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id} -func (s *CodeScanningService) GetAnalysis(ctx context.Context, owner, repo string, id int64) (*ScanningAnalysis, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - analysis := new(ScanningAnalysis) - resp, err := s.client.Do(ctx, req, analysis) - if err != nil { - return nil, resp, err - } - - return analysis, resp, nil -} - -// DeleteAnalysis represents a successful deletion of a code scanning analysis. -type DeleteAnalysis struct { - // Next deletable analysis in chain, without last analysis deletion confirmation - NextAnalysisURL *string `json:"next_analysis_url,omitempty"` - // Next deletable analysis in chain, with last analysis deletion confirmation - ConfirmDeleteURL *string `json:"confirm_delete_url,omitempty"` -} - -// DeleteAnalysis deletes a single code scanning analysis from a repository. -// -// You must use an access token with the repo scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#delete-a-code-scanning-analysis-from-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id} -func (s *CodeScanningService) DeleteAnalysis(ctx context.Context, owner, repo string, id int64) (*DeleteAnalysis, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, nil, err - } - - deleteAnalysis := new(DeleteAnalysis) - resp, err := s.client.Do(ctx, req, deleteAnalysis) - if err != nil { - return nil, resp, err - } - - return deleteAnalysis, resp, nil -} - -// ListCodeQLDatabases lists the CodeQL databases that are available in a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the contents read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-codeql-databases-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/codeql/databases -func (s *CodeScanningService) ListCodeQLDatabases(ctx context.Context, owner, repo string) ([]*CodeQLDatabase, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var codeqlDatabases []*CodeQLDatabase - resp, err := s.client.Do(ctx, req, &codeqlDatabases) - if err != nil { - return nil, resp, err - } - - return codeqlDatabases, resp, nil -} - -// GetCodeQLDatabase gets a CodeQL database for a language in a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the contents read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-codeql-database-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language} -func (s *CodeScanningService) GetCodeQLDatabase(ctx context.Context, owner, repo, language string) (*CodeQLDatabase, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases/%v", owner, repo, language) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - codeqlDatabase := new(CodeQLDatabase) - resp, err := s.client.Do(ctx, req, codeqlDatabase) - if err != nil { - return nil, resp, err - } - - return codeqlDatabase, resp, nil -} - -// DefaultSetupConfiguration represents a code scanning default setup configuration. -type DefaultSetupConfiguration struct { - State *string `json:"state,omitempty"` - Languages []string `json:"languages,omitempty"` - QuerySuite *string `json:"query_suite,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -// GetDefaultSetupConfiguration gets a code scanning default setup configuration. -// -// You must use an access token with the repo scope to use this -// endpoint with private repos or the public_repo scope for public repos. GitHub Apps must have the repo write -// permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-code-scanning-default-setup-configuration -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/default-setup -func (s *CodeScanningService) GetDefaultSetupConfiguration(ctx context.Context, owner, repo string) (*DefaultSetupConfiguration, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cfg := new(DefaultSetupConfiguration) - resp, err := s.client.Do(ctx, req, cfg) - if err != nil { - return nil, resp, err - } - - return cfg, resp, nil -} - -// UpdateDefaultSetupConfigurationOptions specifies parameters to the CodeScanningService.UpdateDefaultSetupConfiguration -// method. -type UpdateDefaultSetupConfigurationOptions struct { - State string `json:"state"` - QuerySuite *string `json:"query_suite,omitempty"` - Languages []string `json:"languages,omitempty"` -} - -// UpdateDefaultSetupConfigurationResponse represents a response from updating a code scanning default setup configuration. -type UpdateDefaultSetupConfigurationResponse struct { - RunID *int64 `json:"run_id,omitempty"` - RunURL *string `json:"run_url,omitempty"` -} - -// UpdateDefaultSetupConfiguration updates a code scanning default setup configuration. -// -// You must use an access token with the repo scope to use this -// endpoint with private repos or the public_repo scope for public repos. GitHub Apps must have the repo write -// permission to use this endpoint. -// -// This method might return an AcceptedError and a status code of 202. This is because this is the status that GitHub -// returns to signify that it has now scheduled the update of the pull request branch in a background task. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#update-a-code-scanning-default-setup-configuration -// -//meta:operation PATCH /repos/{owner}/{repo}/code-scanning/default-setup -func (s *CodeScanningService) UpdateDefaultSetupConfiguration(ctx context.Context, owner, repo string, options *UpdateDefaultSetupConfigurationOptions) (*UpdateDefaultSetupConfigurationResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) - - req, err := s.client.NewRequest("PATCH", u, options) - if err != nil { - return nil, nil, err - } - - a := new(UpdateDefaultSetupConfigurationResponse) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/codesofconduct.go b/vendor/github.com/google/go-github/v57/github/codesofconduct.go deleted file mode 100644 index 7d7f9ef8..00000000 --- a/vendor/github.com/google/go-github/v57/github/codesofconduct.go +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// CodesOfConductService provides access to code-of-conduct-related functions in the GitHub API. -type CodesOfConductService service - -// CodeOfConduct represents a code of conduct. -type CodeOfConduct struct { - Name *string `json:"name,omitempty"` - Key *string `json:"key,omitempty"` - URL *string `json:"url,omitempty"` - Body *string `json:"body,omitempty"` -} - -func (c *CodeOfConduct) String() string { - return Stringify(c) -} - -// List returns all codes of conduct. -// -// GitHub API docs: https://docs.github.com/rest/codes-of-conduct/codes-of-conduct#get-all-codes-of-conduct -// -//meta:operation GET /codes_of_conduct -func (s *CodesOfConductService) List(ctx context.Context) ([]*CodeOfConduct, *Response, error) { - req, err := s.client.NewRequest("GET", "codes_of_conduct", nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeCodesOfConductPreview) - - var cs []*CodeOfConduct - resp, err := s.client.Do(ctx, req, &cs) - if err != nil { - return nil, resp, err - } - - return cs, resp, nil -} - -// ListCodesOfConduct returns all codes of conduct. -// -// Deprecated: Use CodesOfConductService.List instead -func (c *Client) ListCodesOfConduct(ctx context.Context) ([]*CodeOfConduct, *Response, error) { - return c.CodesOfConduct.List(ctx) -} - -// Get returns an individual code of conduct. -// -// GitHub API docs: https://docs.github.com/rest/codes-of-conduct/codes-of-conduct#get-a-code-of-conduct -// -//meta:operation GET /codes_of_conduct/{key} -func (s *CodesOfConductService) Get(ctx context.Context, key string) (*CodeOfConduct, *Response, error) { - u := fmt.Sprintf("codes_of_conduct/%s", key) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeCodesOfConductPreview) - - coc := new(CodeOfConduct) - resp, err := s.client.Do(ctx, req, coc) - if err != nil { - return nil, resp, err - } - - return coc, resp, nil -} - -// GetCodeOfConduct returns an individual code of conduct. -// -// Deprecated: Use CodesOfConductService.Get instead -func (c *Client) GetCodeOfConduct(ctx context.Context, key string) (*CodeOfConduct, *Response, error) { - return c.CodesOfConduct.Get(ctx, key) -} diff --git a/vendor/github.com/google/go-github/v57/github/codespaces.go b/vendor/github.com/google/go-github/v57/github/codespaces.go deleted file mode 100644 index 60837050..00000000 --- a/vendor/github.com/google/go-github/v57/github/codespaces.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// CodespacesService handles communication with the Codespaces related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/ -type CodespacesService service - -// Codespace represents a codespace. -// -// GitHub API docs: https://docs.github.com/rest/codespaces -type Codespace struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - EnvironmentID *string `json:"environment_id,omitempty"` - Owner *User `json:"owner,omitempty"` - BillableOwner *User `json:"billable_owner,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Machine *CodespacesMachine `json:"machine,omitempty"` - DevcontainerPath *string `json:"devcontainer_path,omitempty"` - Prebuild *bool `json:"prebuild,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - LastUsedAt *Timestamp `json:"last_used_at,omitempty"` - State *string `json:"state,omitempty"` - URL *string `json:"url,omitempty"` - GitStatus *CodespacesGitStatus `json:"git_status,omitempty"` - Location *string `json:"location,omitempty"` - IdleTimeoutMinutes *int `json:"idle_timeout_minutes,omitempty"` - WebURL *string `json:"web_url,omitempty"` - MachinesURL *string `json:"machines_url,omitempty"` - StartURL *string `json:"start_url,omitempty"` - StopURL *string `json:"stop_url,omitempty"` - PullsURL *string `json:"pulls_url,omitempty"` - RecentFolders []string `json:"recent_folders,omitempty"` - RuntimeConstraints *CodespacesRuntimeConstraints `json:"runtime_constraints,omitempty"` - PendingOperation *bool `json:"pending_operation,omitempty"` - PendingOperationDisabledReason *string `json:"pending_operation_disabled_reason,omitempty"` - IdleTimeoutNotice *string `json:"idle_timeout_notice,omitempty"` - RetentionPeriodMinutes *int `json:"retention_period_minutes,omitempty"` - RetentionExpiresAt *Timestamp `json:"retention_expires_at,omitempty"` - LastKnownStopNotice *string `json:"last_known_stop_notice,omitempty"` -} - -// CodespacesGitStatus represents the git status of a codespace. -type CodespacesGitStatus struct { - Ahead *int `json:"ahead,omitempty"` - Behind *int `json:"behind,omitempty"` - HasUnpushedChanges *bool `json:"has_unpushed_changes,omitempty"` - HasUncommittedChanges *bool `json:"has_uncommitted_changes,omitempty"` - Ref *string `json:"ref,omitempty"` -} - -// CodespacesMachine represents the machine type of a codespace. -type CodespacesMachine struct { - Name *string `json:"name,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - OperatingSystem *string `json:"operating_system,omitempty"` - StorageInBytes *int64 `json:"storage_in_bytes,omitempty"` - MemoryInBytes *int64 `json:"memory_in_bytes,omitempty"` - CPUs *int `json:"cpus,omitempty"` - PrebuildAvailability *string `json:"prebuild_availability,omitempty"` -} - -// CodespacesRuntimeConstraints represents the runtime constraints of a codespace. -type CodespacesRuntimeConstraints struct { - AllowedPortPrivacySettings []string `json:"allowed_port_privacy_settings,omitempty"` -} - -// ListCodespaces represents the response from the list codespaces endpoints. -type ListCodespaces struct { - TotalCount *int `json:"total_count,omitempty"` - Codespaces []*Codespace `json:"codespaces"` -} - -// ListInRepo lists codespaces for a user in a repository. -// -// Lists the codespaces associated with a specified repository and the authenticated user. -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have read access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user -// -//meta:operation GET /repos/{owner}/{repo}/codespaces -func (s *CodespacesService) ListInRepo(ctx context.Context, owner, repo string, opts *ListOptions) (*ListCodespaces, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var codespaces *ListCodespaces - resp, err := s.client.Do(ctx, req, &codespaces) - if err != nil { - return nil, resp, err - } - - return codespaces, resp, nil -} - -// ListCodespacesOptions represents the options for listing codespaces for a user. -type ListCodespacesOptions struct { - ListOptions - RepositoryID int64 `url:"repository_id,omitempty"` -} - -// List lists codespaces for an authenticated user. -// -// Lists the authenticated user's codespaces. -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have read access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#list-codespaces-for-the-authenticated-user -// -//meta:operation GET /user/codespaces -func (s *CodespacesService) List(ctx context.Context, opts *ListCodespacesOptions) (*ListCodespaces, *Response, error) { - u := "user/codespaces" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var codespaces *ListCodespaces - resp, err := s.client.Do(ctx, req, &codespaces) - if err != nil { - return nil, resp, err - } - - return codespaces, resp, nil -} - -// CreateCodespaceOptions represents options for the creation of a codespace in a repository. -type CreateCodespaceOptions struct { - Ref *string `json:"ref,omitempty"` - // Geo represents the geographic area for this codespace. - // If not specified, the value is assigned by IP. - // This property replaces location, which is being deprecated. - // Geo can be one of: `EuropeWest`, `SoutheastAsia`, `UsEast`, `UsWest`. - Geo *string `json:"geo,omitempty"` - ClientIP *string `json:"client_ip,omitempty"` - Machine *string `json:"machine,omitempty"` - DevcontainerPath *string `json:"devcontainer_path,omitempty"` - MultiRepoPermissionsOptOut *bool `json:"multi_repo_permissions_opt_out,omitempty"` - WorkingDirectory *string `json:"working_directory,omitempty"` - IdleTimeoutMinutes *int `json:"idle_timeout_minutes,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - // RetentionPeriodMinutes represents the duration in minutes after codespace has gone idle in which it will be deleted. - // Must be integer minutes between 0 and 43200 (30 days). - RetentionPeriodMinutes *int `json:"retention_period_minutes,omitempty"` -} - -// CreateInRepo creates a codespace in a repository. -// -// Creates a codespace owned by the authenticated user in the specified repository. -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#create-a-codespace-in-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/codespaces -func (s *CodespacesService) CreateInRepo(ctx context.Context, owner, repo string, request *CreateCodespaceOptions) (*Codespace, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces", owner, repo) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - var codespace *Codespace - resp, err := s.client.Do(ctx, req, &codespace) - if err != nil { - return nil, resp, err - } - - return codespace, resp, nil -} - -// Start starts a codespace. -// -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces_lifecycle_admin repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#start-a-codespace-for-the-authenticated-user -// -//meta:operation POST /user/codespaces/{codespace_name}/start -func (s *CodespacesService) Start(ctx context.Context, codespaceName string) (*Codespace, *Response, error) { - u := fmt.Sprintf("user/codespaces/%v/start", codespaceName) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - var codespace *Codespace - resp, err := s.client.Do(ctx, req, &codespace) - if err != nil { - return nil, resp, err - } - - return codespace, resp, nil -} - -// Stop stops a codespace. -// -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces_lifecycle_admin repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#stop-a-codespace-for-the-authenticated-user -// -//meta:operation POST /user/codespaces/{codespace_name}/stop -func (s *CodespacesService) Stop(ctx context.Context, codespaceName string) (*Codespace, *Response, error) { - u := fmt.Sprintf("user/codespaces/%v/stop", codespaceName) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - var codespace *Codespace - resp, err := s.client.Do(ctx, req, &codespace) - if err != nil { - return nil, resp, err - } - - return codespace, resp, nil -} - -// Delete deletes a codespace. -// -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#delete-a-codespace-for-the-authenticated-user -// -//meta:operation DELETE /user/codespaces/{codespace_name} -func (s *CodespacesService) Delete(ctx context.Context, codespaceName string) (*Response, error) { - u := fmt.Sprintf("user/codespaces/%v", codespaceName) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/codespaces_secrets.go b/vendor/github.com/google/go-github/v57/github/codespaces_secrets.go deleted file mode 100644 index 438c27f8..00000000 --- a/vendor/github.com/google/go-github/v57/github/codespaces_secrets.go +++ /dev/null @@ -1,451 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListUserSecrets list all secrets available for a users codespace -// -// Lists all secrets available for a user's Codespaces without revealing their encrypted values -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint -// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#list-secrets-for-the-authenticated-user -// -//meta:operation GET /user/codespaces/secrets -func (s *CodespacesService) ListUserSecrets(ctx context.Context, opts *ListOptions) (*Secrets, *Response, error) { - u, err := addOptions("user/codespaces/secrets", opts) - if err != nil { - return nil, nil, err - } - return s.listSecrets(ctx, u) -} - -// ListOrgSecrets list all secrets available to an org -// -// Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#list-organization-secrets -// -//meta:operation GET /orgs/{org}/codespaces/secrets -func (s *CodespacesService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - return s.listSecrets(ctx, u) -} - -// ListRepoSecrets list all secrets available to a repo -// -// Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#list-repository-secrets -// -//meta:operation GET /repos/{owner}/{repo}/codespaces/secrets -func (s *CodespacesService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - return s.listSecrets(ctx, u) -} - -func (s *CodespacesService) listSecrets(ctx context.Context, url string) (*Secrets, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var secrets *Secrets - resp, err := s.client.Do(ctx, req, &secrets) - if err != nil { - return nil, resp, err - } - - return secrets, resp, nil -} - -// GetUserPublicKey gets the users public key for encrypting codespace secrets -// -// Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#get-public-key-for-the-authenticated-user -// -//meta:operation GET /user/codespaces/secrets/public-key -func (s *CodespacesService) GetUserPublicKey(ctx context.Context) (*PublicKey, *Response, error) { - return s.getPublicKey(ctx, "user/codespaces/secrets/public-key") -} - -// GetOrgPublicKey gets the org public key for encrypting codespace secrets -// -// Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#get-an-organization-public-key -// -//meta:operation GET /orgs/{org}/codespaces/secrets/public-key -func (s *CodespacesService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { - return s.getPublicKey(ctx, fmt.Sprintf("orgs/%v/codespaces/secrets/public-key", org)) -} - -// GetRepoPublicKey gets the repo public key for encrypting codespace secrets -// -// Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the repo scope. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#get-a-repository-public-key -// -//meta:operation GET /repos/{owner}/{repo}/codespaces/secrets/public-key -func (s *CodespacesService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { - return s.getPublicKey(ctx, fmt.Sprintf("repos/%v/%v/codespaces/secrets/public-key", owner, repo)) -} - -func (s *CodespacesService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var publicKey *PublicKey - resp, err := s.client.Do(ctx, req, &publicKey) - if err != nil { - return nil, resp, err - } - - return publicKey, resp, nil -} - -// GetUserSecret gets a users codespace secret -// -// Gets a secret available to a user's codespaces without revealing its encrypted value. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#get-a-secret-for-the-authenticated-user -// -//meta:operation GET /user/codespaces/secrets/{secret_name} -func (s *CodespacesService) GetUserSecret(ctx context.Context, name string) (*Secret, *Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v", name) - return s.getSecret(ctx, u) -} - -// GetOrgSecret gets an org codespace secret -// -// Gets an organization secret without revealing its encrypted value. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#get-an-organization-secret -// -//meta:operation GET /orgs/{org}/codespaces/secrets/{secret_name} -func (s *CodespacesService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, name) - return s.getSecret(ctx, u) -} - -// GetRepoSecret gets a repo codespace secret -// -// Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#get-a-repository-secret -// -//meta:operation GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name} -func (s *CodespacesService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, name) - return s.getSecret(ctx, u) -} - -func (s *CodespacesService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var secret *Secret - resp, err := s.client.Do(ctx, req, &secret) - if err != nil { - return nil, resp, err - } - - return secret, resp, nil -} - -// CreateOrUpdateUserSecret creates or updates a users codespace secret -// -// Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using LibSodium. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must also have Codespaces access to use this endpoint. -// GitHub Apps must have write access to the codespaces_user_secrets user permission and codespaces_secrets repository permission on all referenced repositories to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#create-or-update-a-secret-for-the-authenticated-user -// -//meta:operation PUT /user/codespaces/secrets/{secret_name} -func (s *CodespacesService) CreateOrUpdateUserSecret(ctx context.Context, eSecret *EncryptedSecret) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v", eSecret.Name) - return s.createOrUpdateSecret(ctx, u, eSecret) -} - -// CreateOrUpdateOrgSecret creates or updates an orgs codespace secret -// -// Creates or updates an organization secret with an encrypted value. Encrypt your secret using LibSodium. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#create-or-update-an-organization-secret -// -//meta:operation PUT /orgs/{org}/codespaces/secrets/{secret_name} -func (s *CodespacesService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *EncryptedSecret) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, eSecret.Name) - return s.createOrUpdateSecret(ctx, u, eSecret) -} - -// CreateOrUpdateRepoSecret creates or updates a repos codespace secret -// -// Creates or updates a repository secret with an encrypted value. Encrypt your secret using LibSodium. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#create-or-update-a-repository-secret -// -//meta:operation PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name} -func (s *CodespacesService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *EncryptedSecret) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, eSecret.Name) - return s.createOrUpdateSecret(ctx, u, eSecret) -} - -func (s *CodespacesService) createOrUpdateSecret(ctx context.Context, url string, eSecret *EncryptedSecret) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, eSecret) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DeleteUserSecret deletes a users codespace secret -// -// Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have write access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#delete-a-secret-for-the-authenticated-user -// -//meta:operation DELETE /user/codespaces/secrets/{secret_name} -func (s *CodespacesService) DeleteUserSecret(ctx context.Context, name string) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v", name) - return s.deleteSecret(ctx, u) -} - -// DeleteOrgSecret deletes an orgs codespace secret -// -// Deletes an organization secret using the secret name. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#delete-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/codespaces/secrets/{secret_name} -func (s *CodespacesService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, name) - return s.deleteSecret(ctx, u) -} - -// DeleteRepoSecret deletes a repos codespace secret -// -// Deletes a secret in a repository using the secret name. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#delete-a-repository-secret -// -//meta:operation DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name} -func (s *CodespacesService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, name) - return s.deleteSecret(ctx, u) -} - -func (s *CodespacesService) deleteSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// ListSelectedReposForUserSecret lists the repositories that have been granted the ability to use a user's codespace secret. -// -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have read access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on all referenced repositories to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#list-selected-repositories-for-a-user-secret -// -//meta:operation GET /user/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) ListSelectedReposForUserSecret(ctx context.Context, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories", name) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - return s.listSelectedReposForSecret(ctx, u) -} - -// ListSelectedReposForOrgSecret lists the repositories that have been granted the ability to use an organization's codespace secret. -// -// Lists all repositories that have been selected when the visibility for repository access to a secret is set to selected. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#list-selected-repositories-for-an-organization-secret -// -//meta:operation GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories", org, name) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - return s.listSelectedReposForSecret(ctx, u) -} - -func (s *CodespacesService) listSelectedReposForSecret(ctx context.Context, url string) (*SelectedReposList, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var repositories *SelectedReposList - resp, err := s.client.Do(ctx, req, &repositories) - if err != nil { - return nil, resp, err - } - - return repositories, resp, nil -} - -// SetSelectedReposForUserSecret sets the repositories that have been granted the ability to use a user's codespace secret. -// -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have write access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on all referenced repositories to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#set-selected-repositories-for-a-user-secret -// -//meta:operation PUT /user/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) SetSelectedReposForUserSecret(ctx context.Context, name string, ids SelectedRepoIDs) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories", name) - return s.setSelectedRepoForSecret(ctx, u, ids) -} - -// SetSelectedReposForOrgSecret sets the repositories that have been granted the ability to use a user's codespace secret. -// -// Replaces all repositories for an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#set-selected-repositories-for-an-organization-secret -// -//meta:operation PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories", org, name) - return s.setSelectedRepoForSecret(ctx, u, ids) -} - -func (s *CodespacesService) setSelectedRepoForSecret(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// AddSelectedRepoToUserSecret adds a repository to the list of repositories that have been granted the ability to use a user's codespace secret. -// -// Adds a repository to the selected repositories for a user's codespace secret. You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. GitHub Apps must have write access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on the referenced repository to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#add-a-selected-repository-to-a-user-secret -// -//meta:operation PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) AddSelectedRepoToUserSecret(ctx context.Context, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories/%v", name, *repo.ID) - return s.addSelectedRepoToSecret(ctx, u) -} - -// AddSelectedRepoToOrgSecret adds a repository to the list of repositories that have been granted the ability to use an organization's codespace secret. -// -// Adds a repository to an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#add-selected-repository-to-an-organization-secret -// -//meta:operation PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.addSelectedRepoToSecret(ctx, u) -} - -func (s *CodespacesService) addSelectedRepoToSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// RemoveSelectedRepoFromUserSecret removes a repository from the list of repositories that have been granted the ability to use a user's codespace secret. -// -// Removes a repository from the selected repositories for a user's codespace secret. You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. GitHub Apps must have write access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#remove-a-selected-repository-from-a-user-secret -// -//meta:operation DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) RemoveSelectedRepoFromUserSecret(ctx context.Context, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories/%v", name, *repo.ID) - return s.removeSelectedRepoFromSecret(ctx, u) -} - -// RemoveSelectedRepoFromOrgSecret removes a repository from the list of repositories that have been granted the ability to use an organization's codespace secret. -// -// Removes a repository from an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#remove-selected-repository-from-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.removeSelectedRepoFromSecret(ctx, u) -} - -func (s *CodespacesService) removeSelectedRepoFromSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/dependabot.go b/vendor/github.com/google/go-github/v57/github/dependabot.go deleted file mode 100644 index 2a11a9c9..00000000 --- a/vendor/github.com/google/go-github/v57/github/dependabot.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// DependabotService handles communication with the Dependabot related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/ -type DependabotService service diff --git a/vendor/github.com/google/go-github/v57/github/dependabot_alerts.go b/vendor/github.com/google/go-github/v57/github/dependabot_alerts.go deleted file mode 100644 index f1ed126c..00000000 --- a/vendor/github.com/google/go-github/v57/github/dependabot_alerts.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Dependency reprensents the vulnerable dependency. -type Dependency struct { - Package *VulnerabilityPackage `json:"package,omitempty"` - ManifestPath *string `json:"manifest_path,omitempty"` - Scope *string `json:"scope,omitempty"` -} - -// AdvisoryCVSS represents the advisory pertaining to the Common Vulnerability Scoring System. -type AdvisoryCVSS struct { - Score *float64 `json:"score,omitempty"` - VectorString *string `json:"vector_string,omitempty"` -} - -// AdvisoryCWEs reprensent the advisory pertaining to Common Weakness Enumeration. -type AdvisoryCWEs struct { - CWEID *string `json:"cwe_id,omitempty"` - Name *string `json:"name,omitempty"` -} - -// DependabotSecurityAdvisory represents the GitHub Security Advisory. -type DependabotSecurityAdvisory struct { - GHSAID *string `json:"ghsa_id,omitempty"` - CVEID *string `json:"cve_id,omitempty"` - Summary *string `json:"summary,omitempty"` - Description *string `json:"description,omitempty"` - Vulnerabilities []*AdvisoryVulnerability `json:"vulnerabilities,omitempty"` - Severity *string `json:"severity,omitempty"` - CVSS *AdvisoryCVSS `json:"cvss,omitempty"` - CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` - Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` - References []*AdvisoryReference `json:"references,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - WithdrawnAt *Timestamp `json:"withdrawn_at,omitempty"` -} - -// DependabotAlert represents a Dependabot alert. -type DependabotAlert struct { - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - Dependency *Dependency `json:"dependency,omitempty"` - SecurityAdvisory *DependabotSecurityAdvisory `json:"security_advisory,omitempty"` - SecurityVulnerability *AdvisoryVulnerability `json:"security_vulnerability,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - DismissedAt *Timestamp `json:"dismissed_at,omitempty"` - DismissedBy *User `json:"dismissed_by,omitempty"` - DismissedReason *string `json:"dismissed_reason,omitempty"` - DismissedComment *string `json:"dismissed_comment,omitempty"` - FixedAt *Timestamp `json:"fixed_at,omitempty"` - AutoDismissedAt *Timestamp `json:"auto_dismissed_at,omitempty"` - // The repository is always empty for events - Repository *Repository `json:"repository,omitempty"` -} - -// DependabotAlertState represents the state of a Dependabot alert to update. -type DependabotAlertState struct { - // The state of the Dependabot alert. A dismissed_reason must be provided when setting the state to dismissed. - State string `json:"state"` - // Required when state is dismissed. A reason for dismissing the alert. - // Can be one of: fix_started, inaccurate, no_bandwidth, not_used, tolerable_risk - DismissedReason *string `json:"dismissed_reason,omitempty"` - // An optional comment associated with dismissing the alert. - DismissedComment *string `json:"dismissed_comment,omitempty"` -} - -// ListAlertsOptions specifies the optional parameters to the DependabotService.ListRepoAlerts -// and DependabotService.ListOrgAlerts methods. -type ListAlertsOptions struct { - State *string `url:"state,omitempty"` - Severity *string `url:"severity,omitempty"` - Ecosystem *string `url:"ecosystem,omitempty"` - Package *string `url:"package,omitempty"` - Scope *string `url:"scope,omitempty"` - Sort *string `url:"sort,omitempty"` - Direction *string `url:"direction,omitempty"` - - ListOptions - ListCursorOptions -} - -func (s *DependabotService) listAlerts(ctx context.Context, url string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*DependabotAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListRepoAlerts lists all Dependabot alerts of a repository. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/alerts -func (s *DependabotService) ListRepoAlerts(ctx context.Context, owner, repo string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/alerts", owner, repo) - return s.listAlerts(ctx, url, opts) -} - -// ListOrgAlerts lists all Dependabot alerts of an organization. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization -// -//meta:operation GET /orgs/{org}/dependabot/alerts -func (s *DependabotService) ListOrgAlerts(ctx context.Context, org string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/alerts", org) - return s.listAlerts(ctx, url, opts) -} - -// GetRepoAlert gets a single repository Dependabot alert. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#get-a-dependabot-alert -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number} -func (s *DependabotService) GetRepoAlert(ctx context.Context, owner, repo string, number int) (*DependabotAlert, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/alerts/%v", owner, repo, number) - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - alert := new(DependabotAlert) - resp, err := s.client.Do(ctx, req, alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} - -// UpdateAlert updates a Dependabot alert. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#update-a-dependabot-alert -// -//meta:operation PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number} -func (s *DependabotService) UpdateAlert(ctx context.Context, owner, repo string, number int, stateInfo *DependabotAlertState) (*DependabotAlert, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/alerts/%v", owner, repo, number) - req, err := s.client.NewRequest("PATCH", url, stateInfo) - if err != nil { - return nil, nil, err - } - - alert := new(DependabotAlert) - resp, err := s.client.Do(ctx, req, alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/dependabot_secrets.go b/vendor/github.com/google/go-github/v57/github/dependabot_secrets.go deleted file mode 100644 index e85c805a..00000000 --- a/vendor/github.com/google/go-github/v57/github/dependabot_secrets.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -func (s *DependabotService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - pubKey := new(PublicKey) - resp, err := s.client.Do(ctx, req, pubKey) - if err != nil { - return nil, resp, err - } - - return pubKey, resp, nil -} - -// GetRepoPublicKey gets a public key that should be used for Dependabot secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-a-repository-public-key -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/secrets/public-key -func (s *DependabotService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/public-key", owner, repo) - return s.getPublicKey(ctx, url) -} - -// GetOrgPublicKey gets a public key that should be used for Dependabot secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-an-organization-public-key -// -//meta:operation GET /orgs/{org}/dependabot/secrets/public-key -func (s *DependabotService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/public-key", org) - return s.getPublicKey(ctx, url) -} - -func (s *DependabotService) listSecrets(ctx context.Context, url string, opts *ListOptions) (*Secrets, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - secrets := new(Secrets) - resp, err := s.client.Do(ctx, req, &secrets) - if err != nil { - return nil, resp, err - } - - return secrets, resp, nil -} - -// ListRepoSecrets lists all Dependabot secrets available in a repository -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#list-repository-secrets -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/secrets -func (s *DependabotService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets", owner, repo) - return s.listSecrets(ctx, url, opts) -} - -// ListOrgSecrets lists all Dependabot secrets available in an organization -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#list-organization-secrets -// -//meta:operation GET /orgs/{org}/dependabot/secrets -func (s *DependabotService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets", org) - return s.listSecrets(ctx, url, opts) -} - -func (s *DependabotService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - secret := new(Secret) - resp, err := s.client.Do(ctx, req, secret) - if err != nil { - return nil, resp, err - } - - return secret, resp, nil -} - -// GetRepoSecret gets a single repository Dependabot secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-a-repository-secret -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name} -func (s *DependabotService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, name) - return s.getSecret(ctx, url) -} - -// GetOrgSecret gets a single organization Dependabot secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-an-organization-secret -// -//meta:operation GET /orgs/{org}/dependabot/secrets/{secret_name} -func (s *DependabotService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, name) - return s.getSecret(ctx, url) -} - -// DependabotEncryptedSecret represents a secret that is encrypted using a public key for Dependabot. -// -// The value of EncryptedValue must be your secret, encrypted with -// LibSodium (see documentation here: https://libsodium.gitbook.io/doc/bindings_for_other_languages) -// using the public key retrieved using the GetPublicKey method. -type DependabotEncryptedSecret struct { - Name string `json:"-"` - KeyID string `json:"key_id"` - EncryptedValue string `json:"encrypted_value"` - Visibility string `json:"visibility,omitempty"` - SelectedRepositoryIDs DependabotSecretsSelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -func (s *DependabotService) putSecret(ctx context.Context, url string, eSecret *DependabotEncryptedSecret) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, eSecret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateOrUpdateRepoSecret creates or updates a repository Dependabot secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#create-or-update-a-repository-secret -// -//meta:operation PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name} -func (s *DependabotService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *DependabotEncryptedSecret) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -// CreateOrUpdateOrgSecret creates or updates an organization Dependabot secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#create-or-update-an-organization-secret -// -//meta:operation PUT /orgs/{org}/dependabot/secrets/{secret_name} -func (s *DependabotService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *DependabotEncryptedSecret) (*Response, error) { - repoIDs := make([]string, len(eSecret.SelectedRepositoryIDs)) - for i, secret := range eSecret.SelectedRepositoryIDs { - repoIDs[i] = fmt.Sprintf("%v", secret) - } - params := struct { - *DependabotEncryptedSecret - SelectedRepositoryIDs []string `json:"selected_repository_ids,omitempty"` - }{ - DependabotEncryptedSecret: eSecret, - SelectedRepositoryIDs: repoIDs, - } - - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, eSecret.Name) - req, err := s.client.NewRequest("PUT", url, params) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -func (s *DependabotService) deleteSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteRepoSecret deletes a Dependabot secret in a repository using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#delete-a-repository-secret -// -//meta:operation DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name} -func (s *DependabotService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, name) - return s.deleteSecret(ctx, url) -} - -// DeleteOrgSecret deletes a Dependabot secret in an organization using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#delete-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/dependabot/secrets/{secret_name} -func (s *DependabotService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, name) - return s.deleteSecret(ctx, url) -} - -// ListSelectedReposForOrgSecret lists all repositories that have access to a Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#list-selected-repositories-for-an-organization-secret -// -//meta:operation GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories -func (s *DependabotService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories", org, name) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(SelectedReposList) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// DependabotSecretsSelectedRepoIDs are the repository IDs that have access to the dependabot secrets. -type DependabotSecretsSelectedRepoIDs []int64 - -// SetSelectedReposForOrgSecret sets the repositories that have access to a Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#set-selected-repositories-for-an-organization-secret -// -//meta:operation PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories -func (s *DependabotService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids DependabotSecretsSelectedRepoIDs) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories", org, name) - type repoIDs struct { - SelectedIDs DependabotSecretsSelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddSelectedRepoToOrgSecret adds a repository to an organization Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#add-selected-repository-to-an-organization-secret -// -//meta:operation PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id} -func (s *DependabotService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories/%v", org, name, *repo.ID) - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSelectedRepoFromOrgSecret removes a repository from an organization Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#remove-selected-repository-from-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id} -func (s *DependabotService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories/%v", org, name, *repo.ID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/dependency_graph.go b/vendor/github.com/google/go-github/v57/github/dependency_graph.go deleted file mode 100644 index 86a1fe48..00000000 --- a/vendor/github.com/google/go-github/v57/github/dependency_graph.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -type DependencyGraphService service - -// SBOM represents a software bill of materials, which describes the -// packages/libraries that a repository depends on. -type SBOM struct { - SBOM *SBOMInfo `json:"sbom,omitempty"` -} - -// CreationInfo represents when the SBOM was created and who created it. -type CreationInfo struct { - Created *Timestamp `json:"created,omitempty"` - Creators []string `json:"creators,omitempty"` -} - -// RepoDependencies represents the dependencies of a repo. -type RepoDependencies struct { - SPDXID *string `json:"SPDXID,omitempty"` - // Package name - Name *string `json:"name,omitempty"` - VersionInfo *string `json:"versionInfo,omitempty"` - DownloadLocation *string `json:"downloadLocation,omitempty"` - FilesAnalyzed *bool `json:"filesAnalyzed,omitempty"` - LicenseConcluded *string `json:"licenseConcluded,omitempty"` - LicenseDeclared *string `json:"licenseDeclared,omitempty"` -} - -// SBOMInfo represents a software bill of materials (SBOM) using SPDX. -// SPDX is an open standard for SBOMs that -// identifies and catalogs components, licenses, copyrights, security -// references, and other metadata relating to software. -type SBOMInfo struct { - SPDXID *string `json:"SPDXID,omitempty"` - SPDXVersion *string `json:"spdxVersion,omitempty"` - CreationInfo *CreationInfo `json:"creationInfo,omitempty"` - - // Repo name - Name *string `json:"name,omitempty"` - DataLicense *string `json:"dataLicense,omitempty"` - DocumentDescribes []string `json:"documentDescribes,omitempty"` - DocumentNamespace *string `json:"documentNamespace,omitempty"` - - // List of packages dependencies - Packages []*RepoDependencies `json:"packages,omitempty"` -} - -func (s SBOM) String() string { - return Stringify(s) -} - -// GetSBOM fetches the software bill of materials for a repository. -// -// GitHub API docs: https://docs.github.com/rest/dependency-graph/sboms#export-a-software-bill-of-materials-sbom-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/dependency-graph/sbom -func (s *DependencyGraphService) GetSBOM(ctx context.Context, owner, repo string) (*SBOM, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/dependency-graph/sbom", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var sbom *SBOM - resp, err := s.client.Do(ctx, req, &sbom) - if err != nil { - return nil, resp, err - } - - return sbom, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/doc.go b/vendor/github.com/google/go-github/v57/github/doc.go deleted file mode 100644 index ca00a4bd..00000000 --- a/vendor/github.com/google/go-github/v57/github/doc.go +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package github provides a client for using the GitHub API. - -Usage: - - import "github.com/google/go-github/v57/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) - import "github.com/google/go-github/github" // with go modules disabled - -Construct a new GitHub client, then use the various services on the client to -access different parts of the GitHub API. For example: - - client := github.NewClient(nil) - - // list all organizations for user "willnorris" - orgs, _, err := client.Organizations.List(ctx, "willnorris", nil) - -Some API methods have optional parameters that can be passed. For example: - - client := github.NewClient(nil) - - // list public repositories for org "github" - opt := &github.RepositoryListByOrgOptions{Type: "public"} - repos, _, err := client.Repositories.ListByOrg(ctx, "github", opt) - -The services of a client divide the API into logical chunks and correspond to -the structure of the GitHub API documentation at -https://docs.github.com/rest . - -NOTE: Using the https://godoc.org/context package, one can easily -pass cancelation signals and deadlines to various services of the client for -handling a request. In case there is no context available, then context.Background() -can be used as a starting point. - -For more sample code snippets, head over to the https://github.com/google/go-github/tree/master/example directory. - -# Authentication - -Use Client.WithAuthToken to configure your client to authenticate using an Oauth token -(for example, a personal access token). This is what is needed for a majority of use cases -aside from GitHub Apps. - - client := github.NewClient(nil).WithAuthToken("... your access token ...") - -Note that when using an authenticated Client, all calls made by the client will -include the specified OAuth token. Therefore, authenticated clients should -almost never be shared between different users. - -For API methods that require HTTP Basic Authentication, use the -BasicAuthTransport. - -GitHub Apps authentication can be provided by the -https://github.com/bradleyfalzon/ghinstallation package. -It supports both authentication as an installation, using an installation access token, -and as an app, using a JWT. - -To authenticate as an installation: - - import "github.com/bradleyfalzon/ghinstallation" - - func main() { - // Wrap the shared transport for use with the integration ID 1 authenticating with installation ID 99. - itr, err := ghinstallation.NewKeyFromFile(http.DefaultTransport, 1, 99, "2016-10-19.private-key.pem") - if err != nil { - // Handle error. - } - - // Use installation transport with client - client := github.NewClient(&http.Client{Transport: itr}) - - // Use client... - } - -To authenticate as an app, using a JWT: - - import "github.com/bradleyfalzon/ghinstallation" - - func main() { - // Wrap the shared transport for use with the application ID 1. - atr, err := ghinstallation.NewAppsTransportKeyFromFile(http.DefaultTransport, 1, "2016-10-19.private-key.pem") - if err != nil { - // Handle error. - } - - // Use app transport with client - client := github.NewClient(&http.Client{Transport: atr}) - - // Use client... - } - -# Rate Limiting - -GitHub imposes a rate limit on all API clients. Unauthenticated clients are -limited to 60 requests per hour, while authenticated clients can make up to -5,000 requests per hour. The Search API has a custom rate limit. Unauthenticated -clients are limited to 10 requests per minute, while authenticated clients -can make up to 30 requests per minute. To receive the higher rate limit when -making calls that are not issued on behalf of a user, -use UnauthenticatedRateLimitedTransport. - -The returned Response.Rate value contains the rate limit information -from the most recent API call. If a recent enough response isn't -available, you can use RateLimits to fetch the most up-to-date rate -limit data for the client. - -To detect an API rate limit error, you can check if its type is *github.RateLimitError. -For secondary rate limits, you can check if its type is *github.AbuseRateLimitError: - - repos, _, err := client.Repositories.List(ctx, "", nil) - if _, ok := err.(*github.RateLimitError); ok { - log.Println("hit rate limit") - } - if _, ok := err.(*github.AbuseRateLimitError); ok { - log.Println("hit secondary rate limit") - } - -Learn more about GitHub rate limiting at -https://docs.github.com/rest/rate-limit . - -# Accepted Status - -Some endpoints may return a 202 Accepted status code, meaning that the -information required is not yet ready and was scheduled to be gathered on -the GitHub side. Methods known to behave like this are documented specifying -this behavior. - -To detect this condition of error, you can check if its type is -*github.AcceptedError: - - stats, _, err := client.Repositories.ListContributorsStats(ctx, org, repo) - if _, ok := err.(*github.AcceptedError); ok { - log.Println("scheduled on GitHub side") - } - -# Conditional Requests - -The GitHub API has good support for conditional requests which will help -prevent you from burning through your rate limit, as well as help speed up your -application. go-github does not handle conditional requests directly, but is -instead designed to work with a caching http.Transport. We recommend using -https://github.com/gregjones/httpcache for that. - -Learn more about GitHub conditional requests at -https://docs.github.com/rest/overview/resources-in-the-rest-api#conditional-requests. - -# Creating and Updating Resources - -All structs for GitHub resources use pointer values for all non-repeated fields. -This allows distinguishing between unset fields and those set to a zero-value. -Helper functions have been provided to easily create these pointers for string, -bool, and int values. For example: - - // create a new private repository named "foo" - repo := &github.Repository{ - Name: github.String("foo"), - Private: github.Bool(true), - } - client.Repositories.Create(ctx, "", repo) - -Users who have worked with protocol buffers should find this pattern familiar. - -# Pagination - -All requests for resource collections (repos, pull requests, issues, etc.) -support pagination. Pagination options are described in the -github.ListOptions struct and passed to the list methods directly or as an -embedded type of a more specific list options struct (for example -github.PullRequestListOptions). Pages information is available via the -github.Response struct. - - client := github.NewClient(nil) - - opt := &github.RepositoryListByOrgOptions{ - ListOptions: github.ListOptions{PerPage: 10}, - } - // get all pages of results - var allRepos []*github.Repository - for { - repos, resp, err := client.Repositories.ListByOrg(ctx, "github", opt) - if err != nil { - return err - } - allRepos = append(allRepos, repos...) - if resp.NextPage == 0 { - break - } - opt.Page = resp.NextPage - } -*/ -package github diff --git a/vendor/github.com/google/go-github/v57/github/emojis.go b/vendor/github.com/google/go-github/v57/github/emojis.go deleted file mode 100644 index 93ef232f..00000000 --- a/vendor/github.com/google/go-github/v57/github/emojis.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// EmojisService provides access to emoji-related functions in the GitHub API. -type EmojisService service - -// List returns the emojis available to use on GitHub. -// -// GitHub API docs: https://docs.github.com/rest/emojis/emojis#get-emojis -// -//meta:operation GET /emojis -func (s *EmojisService) List(ctx context.Context) (map[string]string, *Response, error) { - req, err := s.client.NewRequest("GET", "emojis", nil) - if err != nil { - return nil, nil, err - } - - var emoji map[string]string - resp, err := s.client.Do(ctx, req, &emoji) - if err != nil { - return nil, resp, err - } - - return emoji, resp, nil -} - -// ListEmojis returns the emojis available to use on GitHub. -// -// Deprecated: Use EmojisService.List instead -func (c *Client) ListEmojis(ctx context.Context) (map[string]string, *Response, error) { - return c.Emojis.List(ctx) -} diff --git a/vendor/github.com/google/go-github/v57/github/enterprise.go b/vendor/github.com/google/go-github/v57/github/enterprise.go deleted file mode 100644 index 2036f8bc..00000000 --- a/vendor/github.com/google/go-github/v57/github/enterprise.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// EnterpriseService provides access to the enterprise related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/enterprise-admin/ -type EnterpriseService service diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_actions_runner_groups.go b/vendor/github.com/google/go-github/v57/github/enterprise_actions_runner_groups.go deleted file mode 100644 index f171df75..00000000 --- a/vendor/github.com/google/go-github/v57/github/enterprise_actions_runner_groups.go +++ /dev/null @@ -1,336 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListOrganizations represents the response from the list orgs endpoints. -type ListOrganizations struct { - TotalCount *int `json:"total_count,omitempty"` - Organizations []*Organization `json:"organizations"` -} - -// EnterpriseRunnerGroup represents a self-hosted runner group configured in an enterprise. -type EnterpriseRunnerGroup struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - Default *bool `json:"default,omitempty"` - SelectedOrganizationsURL *string `json:"selected_organizations_url,omitempty"` - RunnersURL *string `json:"runners_url,omitempty"` - Inherited *bool `json:"inherited,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` - WorkflowRestrictionsReadOnly *bool `json:"workflow_restrictions_read_only,omitempty"` -} - -// EnterpriseRunnerGroups represents a collection of self-hosted runner groups configured for an enterprise. -type EnterpriseRunnerGroups struct { - TotalCount *int `json:"total_count,omitempty"` - RunnerGroups []*EnterpriseRunnerGroup `json:"runner_groups"` -} - -// CreateEnterpriseRunnerGroupRequest represents a request to create a Runner group for an enterprise. -type CreateEnterpriseRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - // List of organization IDs that can access the runner group. - SelectedOrganizationIDs []int64 `json:"selected_organization_ids,omitempty"` - // Runners represent a list of runner IDs to add to the runner group. - Runners []int64 `json:"runners,omitempty"` - // If set to True, public repos can use this runner group - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - // If true, the runner group will be restricted to running only the workflows specified in the SelectedWorkflows slice. - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - // List of workflows the runner group should be allowed to run. This setting will be ignored unless RestrictedToWorkflows is set to true. - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// UpdateEnterpriseRunnerGroupRequest represents a request to update a Runner group for an enterprise. -type UpdateEnterpriseRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// SetOrgAccessRunnerGroupRequest represents a request to replace the list of organizations -// that can access a self-hosted runner group configured in an enterprise. -type SetOrgAccessRunnerGroupRequest struct { - // Updated list of organization IDs that should be given access to the runner group. - SelectedOrganizationIDs []int64 `json:"selected_organization_ids"` -} - -// ListEnterpriseRunnerGroupOptions extend ListOptions to have the optional parameters VisibleToOrganization. -type ListEnterpriseRunnerGroupOptions struct { - ListOptions - - // Only return runner groups that are allowed to be used by this organization. - VisibleToOrganization string `url:"visible_to_organization,omitempty"` -} - -// ListRunnerGroups lists all self-hosted runner groups configured in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runner-groups-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups -func (s *EnterpriseService) ListRunnerGroups(ctx context.Context, enterprise string, opts *ListEnterpriseRunnerGroupOptions) (*EnterpriseRunnerGroups, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := &EnterpriseRunnerGroups{} - resp, err := s.client.Do(ctx, req, &groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// GetEnterpriseRunnerGroup gets a specific self-hosted runner group for an enterprise using its RunnerGroup ID. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#get-a-self-hosted-runner-group-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id} -func (s *EnterpriseService) GetEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64) (*EnterpriseRunnerGroup, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(EnterpriseRunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// DeleteEnterpriseRunnerGroup deletes a self-hosted runner group from an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#delete-a-self-hosted-runner-group-from-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id} -func (s *EnterpriseService) DeleteEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateEnterpriseRunnerGroup creates a new self-hosted runner group for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#create-a-self-hosted-runner-group-for-an-enterprise -// -//meta:operation POST /enterprises/{enterprise}/actions/runner-groups -func (s *EnterpriseService) CreateEnterpriseRunnerGroup(ctx context.Context, enterprise string, createReq CreateEnterpriseRunnerGroupRequest) (*EnterpriseRunnerGroup, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups", enterprise) - req, err := s.client.NewRequest("POST", u, createReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(EnterpriseRunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// UpdateEnterpriseRunnerGroup updates a self-hosted runner group for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#update-a-self-hosted-runner-group-for-an-enterprise -// -//meta:operation PATCH /enterprises/{enterprise}/actions/runner-groups/{runner_group_id} -func (s *EnterpriseService) UpdateEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64, updateReq UpdateEnterpriseRunnerGroupRequest) (*EnterpriseRunnerGroup, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) - req, err := s.client.NewRequest("PATCH", u, updateReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(EnterpriseRunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// ListOrganizationAccessRunnerGroup lists the organizations with access to a self-hosted runner group configured in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-organization-access-to-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations -func (s *EnterpriseService) ListOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID int64, opts *ListOptions) (*ListOrganizations, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations", enterprise, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - orgs := &ListOrganizations{} - resp, err := s.client.Do(ctx, req, &orgs) - if err != nil { - return nil, resp, err - } - - return orgs, resp, nil -} - -// SetOrganizationAccessRunnerGroup replaces the list of organizations that have access to a self-hosted runner group configured in an enterprise -// with a new List of organizations. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-organization-access-for-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations -func (s *EnterpriseService) SetOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID int64, ids SetOrgAccessRunnerGroupRequest) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations", enterprise, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddOrganizationAccessRunnerGroup adds an organization to the list of selected organizations that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-organization-access-to-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id} -func (s *EnterpriseService) AddOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID, orgID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations/%v", enterprise, groupID, orgID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveOrganizationAccessRunnerGroup removes an organization from the list of selected organizations that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-organization-access-to-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id} -func (s *EnterpriseService) RemoveOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID, orgID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations/%v", enterprise, groupID, orgID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListRunnerGroupRunners lists self-hosted runners that are in a specific enterprise group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runners-in-a-group-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners -func (s *EnterpriseService) ListRunnerGroupRunners(ctx context.Context, enterprise string, groupID int64, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners", enterprise, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// SetRunnerGroupRunners replaces the list of self-hosted runners that are part of an enterprise runner group -// with a new list of runners. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-self-hosted-runners-in-a-group-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners -func (s *EnterpriseService) SetRunnerGroupRunners(ctx context.Context, enterprise string, groupID int64, ids SetRunnerGroupRunnersRequest) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners", enterprise, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRunnerGroupRunners adds a self-hosted runner to a runner group configured in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-a-self-hosted-runner-to-a-group-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *EnterpriseService) AddRunnerGroupRunners(ctx context.Context, enterprise string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners/%v", enterprise, groupID, runnerID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveRunnerGroupRunners removes a self-hosted runner from a group configured in an enterprise. -// The runner is then returned to the default group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-a-self-hosted-runner-from-a-group-for-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *EnterpriseService) RemoveRunnerGroupRunners(ctx context.Context, enterprise string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners/%v", enterprise, groupID, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go b/vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go deleted file mode 100644 index 4a6e6b52..00000000 --- a/vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#list-runner-applications-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runners/downloads -func (s *EnterpriseService) ListRunnerApplicationDownloads(ctx context.Context, enterprise string) ([]*RunnerApplicationDownload, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/downloads", enterprise) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rads []*RunnerApplicationDownload - resp, err := s.client.Do(ctx, req, &rads) - if err != nil { - return nil, resp, err - } - - return rads, resp, nil -} - -// GenerateEnterpriseJITConfig generates a just-in-time configuration for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-an-enterprise -// -//meta:operation POST /enterprises/{enterprise}/actions/runners/generate-jitconfig -func (s *EnterpriseService) GenerateEnterpriseJITConfig(ctx context.Context, enterprise string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/generate-jitconfig", enterprise) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - jitConfig := new(JITRunnerConfig) - resp, err := s.client.Do(ctx, req, jitConfig) - if err != nil { - return nil, resp, err - } - - return jitConfig, resp, nil -} - -// CreateRegistrationToken creates a token that can be used to add a self-hosted runner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#create-a-registration-token-for-an-enterprise -// -//meta:operation POST /enterprises/{enterprise}/actions/runners/registration-token -func (s *EnterpriseService) CreateRegistrationToken(ctx context.Context, enterprise string) (*RegistrationToken, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/registration-token", enterprise) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - registrationToken := new(RegistrationToken) - resp, err := s.client.Do(ctx, req, registrationToken) - if err != nil { - return nil, resp, err - } - - return registrationToken, resp, nil -} - -// ListRunners lists all the self-hosted runners for a enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#list-self-hosted-runners-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runners -func (s *EnterpriseService) ListRunners(ctx context.Context, enterprise string, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// RemoveRunner forces the removal of a self-hosted runner from an enterprise using the runner id. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runners/{runner_id} -func (s *EnterpriseService) RemoveRunner(ctx context.Context, enterprise string, runnerID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/%v", enterprise, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_audit_log.go b/vendor/github.com/google/go-github/v57/github/enterprise_audit_log.go deleted file mode 100644 index 058a7d17..00000000 --- a/vendor/github.com/google/go-github/v57/github/enterprise_audit_log.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetAuditLog gets the audit-log entries for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/audit-log#get-the-audit-log-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/audit-log -func (s *EnterpriseService) GetAuditLog(ctx context.Context, enterprise string, opts *GetAuditLogOptions) ([]*AuditEntry, *Response, error) { - u := fmt.Sprintf("enterprises/%v/audit-log", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var auditEntries []*AuditEntry - resp, err := s.client.Do(ctx, req, &auditEntries) - if err != nil { - return nil, resp, err - } - - return auditEntries, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go b/vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go deleted file mode 100644 index af8eb0ff..00000000 --- a/vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// EnterpriseSecurityAnalysisSettings represents security analysis settings for an enterprise. -type EnterpriseSecurityAnalysisSettings struct { - AdvancedSecurityEnabledForNewRepositories *bool `json:"advanced_security_enabled_for_new_repositories,omitempty"` - SecretScanningEnabledForNewRepositories *bool `json:"secret_scanning_enabled_for_new_repositories,omitempty"` - SecretScanningPushProtectionEnabledForNewRepositories *bool `json:"secret_scanning_push_protection_enabled_for_new_repositories,omitempty"` - SecretScanningPushProtectionCustomLink *string `json:"secret_scanning_push_protection_custom_link,omitempty"` -} - -// GetCodeSecurityAndAnalysis gets code security and analysis features for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/code-security-and-analysis#get-code-security-and-analysis-features-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/code_security_and_analysis -func (s *EnterpriseService) GetCodeSecurityAndAnalysis(ctx context.Context, enterprise string) (*EnterpriseSecurityAnalysisSettings, *Response, error) { - u := fmt.Sprintf("enterprises/%v/code_security_and_analysis", enterprise) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - settings := new(EnterpriseSecurityAnalysisSettings) - resp, err := s.client.Do(ctx, req, settings) - if err != nil { - return nil, resp, err - } - - return settings, resp, nil -} - -// UpdateCodeSecurityAndAnalysis updates code security and analysis features for new repositories in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/code-security-and-analysis#update-code-security-and-analysis-features-for-an-enterprise -// -//meta:operation PATCH /enterprises/{enterprise}/code_security_and_analysis -func (s *EnterpriseService) UpdateCodeSecurityAndAnalysis(ctx context.Context, enterprise string, settings *EnterpriseSecurityAnalysisSettings) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/code_security_and_analysis", enterprise) - req, err := s.client.NewRequest("PATCH", u, settings) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// EnableDisableSecurityFeature enables or disables a security feature for all repositories in an enterprise. -// -// Valid values for securityProduct: "advanced_security", "secret_scanning", "secret_scanning_push_protection". -// Valid values for enablement: "enable_all", "disable_all". -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/code-security-and-analysis#enable-or-disable-a-security-feature -// -//meta:operation POST /enterprises/{enterprise}/{security_product}/{enablement} -func (s *EnterpriseService) EnableDisableSecurityFeature(ctx context.Context, enterprise, securityProduct, enablement string) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/%v/%v", enterprise, securityProduct, enablement) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/event.go b/vendor/github.com/google/go-github/v57/github/event.go deleted file mode 100644 index e98606bc..00000000 --- a/vendor/github.com/google/go-github/v57/github/event.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "encoding/json" -) - -// Event represents a GitHub event. -type Event struct { - Type *string `json:"type,omitempty"` - Public *bool `json:"public,omitempty"` - RawPayload *json.RawMessage `json:"payload,omitempty"` - Repo *Repository `json:"repo,omitempty"` - Actor *User `json:"actor,omitempty"` - Org *Organization `json:"org,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ID *string `json:"id,omitempty"` -} - -func (e Event) String() string { - return Stringify(e) -} - -// ParsePayload parses the event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -func (e *Event) ParsePayload() (interface{}, error) { - // It would be nice if e.Type were the snake_case name of the event, - // but the existing interface uses the struct name instead. - payload := EventForType(typeToMessageMapping[e.GetType()]) - - if err := json.Unmarshal(e.GetRawPayload(), &payload); err != nil { - return nil, err - } - - return payload, nil -} - -// Payload returns the parsed event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -// -// Deprecated: Use ParsePayload instead, which returns an error -// rather than panics if JSON unmarshaling raw payload fails. -func (e *Event) Payload() (payload interface{}) { - var err error - payload, err = e.ParsePayload() - if err != nil { - panic(err) - } - return payload -} diff --git a/vendor/github.com/google/go-github/v57/github/event_types.go b/vendor/github.com/google/go-github/v57/github/event_types.go deleted file mode 100644 index badd29b2..00000000 --- a/vendor/github.com/google/go-github/v57/github/event_types.go +++ /dev/null @@ -1,1795 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// These event types are shared between the Events API and used as Webhook payloads. - -package github - -import "encoding/json" - -// RequestedAction is included in a CheckRunEvent when a user has invoked an action, -// i.e. when the CheckRunEvent's Action field is "requested_action". -type RequestedAction struct { - Identifier string `json:"identifier"` // The integrator reference of the action requested by the user. -} - -// BranchProtectionRuleEvent triggered when a check suite is "created", "edited", or "deleted". -// The Webhook event name is "branch_protection_rule". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#branch_protection_rule -type BranchProtectionRuleEvent struct { - Action *string `json:"action,omitempty"` - Rule *BranchProtectionRule `json:"rule,omitempty"` - Changes *ProtectionChanges `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CheckRunEvent is triggered when a check run is "created", "completed", or "rerequested". -// The Webhook event name is "check_run". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#check_run -type CheckRunEvent struct { - CheckRun *CheckRun `json:"check_run,omitempty"` - // The action performed. Possible values are: "created", "completed", "rerequested" or "requested_action". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The action requested by the user. Populated when the Action is "requested_action". - RequestedAction *RequestedAction `json:"requested_action,omitempty"` // -} - -// CheckSuiteEvent is triggered when a check suite is "completed", "requested", or "rerequested". -// The Webhook event name is "check_suite". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#check_suite -type CheckSuiteEvent struct { - CheckSuite *CheckSuite `json:"check_suite,omitempty"` - // The action performed. Possible values are: "completed", "requested" or "rerequested". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CommitCommentEvent is triggered when a commit comment is created. -// The Webhook event name is "commit_comment". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#commit_comment -type CommitCommentEvent struct { - Comment *RepositoryComment `json:"comment,omitempty"` - - // The following fields are only populated by Webhook events. - Action *string `json:"action,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// ContentReferenceEvent is triggered when the body or comment of an issue or -// pull request includes a URL that matches a configured content reference -// domain. -// The Webhook event name is "content_reference". -// -// GitHub API docs: https://developer.github.com/webhooks/event-payloads/#content_reference -type ContentReferenceEvent struct { - Action *string `json:"action,omitempty"` - ContentReference *ContentReference `json:"content_reference,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CreateEvent represents a created repository, branch, or tag. -// The Webhook event name is "create". -// -// Note: webhooks will not receive this event for created repositories. -// Additionally, webhooks will not receive this event for tags if more -// than three tags are pushed at once. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/github-event-types#createevent -type CreateEvent struct { - Ref *string `json:"ref,omitempty"` - // RefType is the object that was created. Possible values are: "repository", "branch", "tag". - RefType *string `json:"ref_type,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - Description *string `json:"description,omitempty"` - PusherType *string `json:"pusher_type,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// DeleteEvent represents a deleted branch or tag. -// The Webhook event name is "delete". -// -// Note: webhooks will not receive this event for tags if more than three tags -// are deleted at once. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/github-event-types#deleteevent -type DeleteEvent struct { - Ref *string `json:"ref,omitempty"` - // RefType is the object that was deleted. Possible values are: "branch", "tag". - RefType *string `json:"ref_type,omitempty"` - - // The following fields are only populated by Webhook events. - PusherType *string `json:"pusher_type,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// DependabotAlertEvent is triggered when there is activity relating to Dependabot alerts. -// The Webhook event name is "dependabot_alert". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#dependabot_alert -type DependabotAlertEvent struct { - Action *string `json:"action,omitempty"` - Alert *DependabotAlert `json:"alert,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -// DeployKeyEvent is triggered when a deploy key is added or removed from a repository. -// The Webhook event name is "deploy_key". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#deploy_key -type DeployKeyEvent struct { - // Action is the action that was performed. Possible values are: - // "created" or "deleted". - Action *string `json:"action,omitempty"` - - // The deploy key resource. - Key *Key `json:"key,omitempty"` - - // The Repository where the event occurred - Repo *Repository `json:"repository,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// DeploymentEvent represents a deployment. -// The Webhook event name is "deployment". -// -// Events of this type are not visible in timelines, they are only used to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#deployment -type DeploymentEvent struct { - Deployment *Deployment `json:"deployment,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Workflow *Workflow `json:"workflow,omitempty"` - WorkflowRun *WorkflowRun `json:"workflow_run,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// DeploymentProtectionRuleEvent represents a deployment protection rule event. -// The Webhook event name is "deployment_protection_rule". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#deployment_protection_rule -type DeploymentProtectionRuleEvent struct { - Action *string `json:"action,omitempty"` - Environment *string `json:"environment,omitempty"` - Event *string `json:"event,omitempty"` - - // The URL Github provides for a third-party to use in order to pass/fail a deployment gate - DeploymentCallbackURL *string `json:"deployment_callback_url,omitempty"` - Deployment *Deployment `json:"deployment,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Organization *Organization `json:"organization,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// DeploymentStatusEvent represents a deployment status. -// The Webhook event name is "deployment_status". -// -// Events of this type are not visible in timelines, they are only used to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#deployment_status -type DeploymentStatusEvent struct { - Deployment *Deployment `json:"deployment,omitempty"` - DeploymentStatus *DeploymentStatus `json:"deployment_status,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// DiscussionCommentEvent represents a webhook event for a comment on discussion. -// The Webhook event name is "discussion_comment". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion_comment -type DiscussionCommentEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "created", "edited", "deleted". ** check what all can be added - Action *string `json:"action,omitempty"` - Discussion *Discussion `json:"discussion,omitempty"` - Comment *CommentDiscussion `json:"comment,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CommentDiscussion represents a comment in a GitHub DiscussionCommentEvent. -type CommentDiscussion struct { - AuthorAssociation *string `json:"author_association,omitempty"` - Body *string `json:"body,omitempty"` - ChildCommentCount *int `json:"child_comment_count,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - DiscussionID *int64 `json:"discussion_id,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - ParentID *int64 `json:"parent_id,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - User *User `json:"user,omitempty"` -} - -// DiscussionEvent represents a webhook event for a discussion. -// The Webhook event name is "discussion". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion -type DiscussionEvent struct { - // Action is the action that was performed. Possible values are: - // created, edited, deleted, pinned, unpinned, locked, unlocked, - // transferred, category_changed, answered, or unanswered. - Action *string `json:"action,omitempty"` - Discussion *Discussion `json:"discussion,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// Discussion represents a discussion in a GitHub DiscussionEvent. -type Discussion struct { - RepositoryURL *string `json:"repository_url,omitempty"` - DiscussionCategory *DiscussionCategory `json:"category,omitempty"` - AnswerHTMLURL *string `json:"answer_html_url,omitempty"` - AnswerChosenAt *Timestamp `json:"answer_chosen_at,omitempty"` - AnswerChosenBy *string `json:"answer_chosen_by,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Number *int `json:"number,omitempty"` - Title *string `json:"title,omitempty"` - User *User `json:"user,omitempty"` - State *string `json:"state,omitempty"` - Locked *bool `json:"locked,omitempty"` - Comments *int `json:"comments,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - AuthorAssociation *string `json:"author_association,omitempty"` - ActiveLockReason *string `json:"active_lock_reason,omitempty"` - Body *string `json:"body,omitempty"` -} - -// DiscussionCategory represents a discussion category in a GitHub DiscussionEvent. -type DiscussionCategory struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Emoji *string `json:"emoji,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Slug *string `json:"slug,omitempty"` - IsAnswerable *bool `json:"is_answerable,omitempty"` -} - -// ForkEvent is triggered when a user forks a repository. -// The Webhook event name is "fork". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#fork -type ForkEvent struct { - // Forkee is the created repository. - Forkee *Repository `json:"forkee,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// GitHubAppAuthorizationEvent is triggered when a user's authorization for a -// GitHub Application is revoked. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#github_app_authorization -type GitHubAppAuthorizationEvent struct { - // The action performed. Possible value is: "revoked". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// Page represents a single Wiki page. -type Page struct { - PageName *string `json:"page_name,omitempty"` - Title *string `json:"title,omitempty"` - Summary *string `json:"summary,omitempty"` - Action *string `json:"action,omitempty"` - SHA *string `json:"sha,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` -} - -// GollumEvent is triggered when a Wiki page is created or updated. -// The Webhook event name is "gollum". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#gollum -type GollumEvent struct { - Pages []*Page `json:"pages,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// EditChange represents the changes when an issue, pull request, comment, -// or repository has been edited. -type EditChange struct { - Title *EditTitle `json:"title,omitempty"` - Body *EditBody `json:"body,omitempty"` - Base *EditBase `json:"base,omitempty"` - Repo *EditRepo `json:"repository,omitempty"` - Owner *EditOwner `json:"owner,omitempty"` - DefaultBranch *EditDefaultBranch `json:"default_branch,omitempty"` -} - -// EditTitle represents a pull-request title change. -type EditTitle struct { - From *string `json:"from,omitempty"` -} - -// EditBody represents a change of pull-request body. -type EditBody struct { - From *string `json:"from,omitempty"` -} - -// EditBase represents the change of a pull-request base branch. -type EditBase struct { - Ref *EditRef `json:"ref,omitempty"` - SHA *EditSHA `json:"sha,omitempty"` -} - -// EditRef represents a ref change of a pull-request. -type EditRef struct { - From *string `json:"from,omitempty"` -} - -// EditRepo represents a change of repository name. -type EditRepo struct { - Name *RepoName `json:"name,omitempty"` -} - -// EditOwner represents a change of repository ownership. -type EditOwner struct { - OwnerInfo *OwnerInfo `json:"from,omitempty"` -} - -// OwnerInfo represents the account info of the owner of the repo (could be User or Organization but both are User structs). -type OwnerInfo struct { - User *User `json:"user,omitempty"` - Org *User `json:"organization,omitempty"` -} - -// RepoName represents a change of repository name. -type RepoName struct { - From *string `json:"from,omitempty"` -} - -// EditSHA represents a sha change of a pull-request. -type EditSHA struct { - From *string `json:"from,omitempty"` -} - -// EditDefaultBranch represents a change of repository's default branch name. -type EditDefaultBranch struct { - From *string `json:"from,omitempty"` -} - -// ProjectChange represents the changes when a project has been edited. -type ProjectChange struct { - Name *ProjectName `json:"name,omitempty"` - Body *ProjectBody `json:"body,omitempty"` -} - -// ProjectName represents a project name change. -type ProjectName struct { - From *string `json:"from,omitempty"` -} - -// ProjectBody represents a project body change. -type ProjectBody struct { - From *string `json:"from,omitempty"` -} - -// ProjectCardChange represents the changes when a project card has been edited. -type ProjectCardChange struct { - Note *ProjectCardNote `json:"note,omitempty"` -} - -// ProjectCardNote represents a change of a note of a project card. -type ProjectCardNote struct { - From *string `json:"from,omitempty"` -} - -// ProjectColumnChange represents the changes when a project column has been edited. -type ProjectColumnChange struct { - Name *ProjectColumnName `json:"name,omitempty"` -} - -// ProjectColumnName represents a project column name change. -type ProjectColumnName struct { - From *string `json:"from,omitempty"` -} - -// TeamChange represents the changes when a team has been edited. -type TeamChange struct { - Description *TeamDescription `json:"description,omitempty"` - Name *TeamName `json:"name,omitempty"` - Privacy *TeamPrivacy `json:"privacy,omitempty"` - Repository *TeamRepository `json:"repository,omitempty"` -} - -// TeamDescription represents a team description change. -type TeamDescription struct { - From *string `json:"from,omitempty"` -} - -// TeamName represents a team name change. -type TeamName struct { - From *string `json:"from,omitempty"` -} - -// TeamPrivacy represents a team privacy change. -type TeamPrivacy struct { - From *string `json:"from,omitempty"` -} - -// TeamRepository represents a team repository permission change. -type TeamRepository struct { - Permissions *TeamPermissions `json:"permissions,omitempty"` -} - -// TeamPermissions represents a team permission change. -type TeamPermissions struct { - From *TeamPermissionsFrom `json:"from,omitempty"` -} - -// TeamPermissionsFrom represents a team permission change. -type TeamPermissionsFrom struct { - Admin *bool `json:"admin,omitempty"` - Pull *bool `json:"pull,omitempty"` - Push *bool `json:"push,omitempty"` -} - -// InstallationEvent is triggered when a GitHub App has been installed, uninstalled, suspend, unsuspended -// or new permissions have been accepted. -// The Webhook event name is "installation". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#installation -type InstallationEvent struct { - // The action that was performed. Can be either "created", "deleted", "suspend", "unsuspend" or "new_permissions_accepted". - Action *string `json:"action,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Requester *User `json:"requester,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// InstallationRepositoriesEvent is triggered when a repository is added or -// removed from an installation. The Webhook event name is "installation_repositories". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#installation_repositories -type InstallationRepositoriesEvent struct { - // The action that was performed. Can be either "added" or "removed". - Action *string `json:"action,omitempty"` - RepositoriesAdded []*Repository `json:"repositories_added,omitempty"` - RepositoriesRemoved []*Repository `json:"repositories_removed,omitempty"` - RepositorySelection *string `json:"repository_selection,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// InstallationLoginChange represents a change in login on an installation. -type InstallationLoginChange struct { - From *string `json:"from,omitempty"` -} - -// InstallationSlugChange represents a change in slug on an installation. -type InstallationSlugChange struct { - From *string `json:"from,omitempty"` -} - -// InstallationChanges represents a change in slug or login on an installation. -type InstallationChanges struct { - Login *InstallationLoginChange `json:"login,omitempty"` - Slug *InstallationSlugChange `json:"slug,omitempty"` -} - -// InstallationTargetEvent is triggered when there is activity on an installation from a user or organization account. -// The Webhook event name is "installation_target". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#installation_target -type InstallationTargetEvent struct { - Account *User `json:"account,omitempty"` - Action *string `json:"action,omitempty"` - Changes *InstallationChanges `json:"changes,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - TargetType *string `json:"target_type,omitempty"` -} - -// IssueCommentEvent is triggered when an issue comment is created on an issue -// or pull request. -// The Webhook event name is "issue_comment". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#issue_comment -type IssueCommentEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "created", "edited", "deleted". - Action *string `json:"action,omitempty"` - Issue *Issue `json:"issue,omitempty"` - Comment *IssueComment `json:"comment,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -// IssuesEvent is triggered when an issue is opened, edited, deleted, transferred, -// pinned, unpinned, closed, reopened, assigned, unassigned, labeled, unlabeled, -// locked, unlocked, milestoned, or demilestoned. -// The Webhook event name is "issues". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#issues -type IssuesEvent struct { - // Action is the action that was performed. Possible values are: "opened", - // "edited", "deleted", "transferred", "pinned", "unpinned", "closed", "reopened", - // "assigned", "unassigned", "labeled", "unlabeled", "locked", "unlocked", - // "milestoned", or "demilestoned". - Action *string `json:"action,omitempty"` - Issue *Issue `json:"issue,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Label *Label `json:"label,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// LabelEvent is triggered when a repository's label is created, edited, or deleted. -// The Webhook event name is "label" -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#label -type LabelEvent struct { - // Action is the action that was performed. Possible values are: - // "created", "edited", "deleted" - Action *string `json:"action,omitempty"` - Label *Label `json:"label,omitempty"` - Changes *EditChange `json:"changes,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// MarketplacePurchaseEvent is triggered when a user purchases, cancels, or changes -// their GitHub Marketplace plan. -// Webhook event name "marketplace_purchase". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#marketplace_purchase -type MarketplacePurchaseEvent struct { - // Action is the action that was performed. Possible values are: - // "purchased", "cancelled", "pending_change", "pending_change_cancelled", "changed". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - EffectiveDate *Timestamp `json:"effective_date,omitempty"` - MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` - PreviousMarketplacePurchase *MarketplacePurchase `json:"previous_marketplace_purchase,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// MemberEvent is triggered when a user is added as a collaborator to a repository. -// The Webhook event name is "member". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#member -type MemberEvent struct { - // Action is the action that was performed. Possible value is: "added". - Action *string `json:"action,omitempty"` - Member *User `json:"member,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// MembershipEvent is triggered when a user is added or removed from a team. -// The Webhook event name is "membership". -// -// Events of this type are not visible in timelines, they are only used to -// trigger organization webhooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#membership -type MembershipEvent struct { - // Action is the action that was performed. Possible values are: "added", "removed". - Action *string `json:"action,omitempty"` - // Scope is the scope of the membership. Possible value is: "team". - Scope *string `json:"scope,omitempty"` - Member *User `json:"member,omitempty"` - Team *Team `json:"team,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// MergeGroup represents the merge group in a merge queue. -type MergeGroup struct { - // The SHA of the merge group. - HeadSHA *string `json:"head_sha,omitempty"` - // The full ref of the merge group. - HeadRef *string `json:"head_ref,omitempty"` - // The SHA of the merge group's parent commit. - BaseSHA *string `json:"base_sha,omitempty"` - // The full ref of the branch the merge group will be merged into. - BaseRef *string `json:"base_ref,omitempty"` - // An expanded representation of the head_sha commit. - HeadCommit *Commit `json:"head_commit,omitempty"` -} - -// MergeGroupEvent represents activity related to merge groups in a merge queue. The type of activity is specified -// in the action property of the payload object. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#merge_group -type MergeGroupEvent struct { - // The action that was performed. Currently, can only be checks_requested. - Action *string `json:"action,omitempty"` - // The merge group. - MergeGroup *MergeGroup `json:"merge_group,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// MetaEvent is triggered when the webhook that this event is configured on is deleted. -// This event will only listen for changes to the particular hook the event is installed on. -// Therefore, it must be selected for each hook that you'd like to receive meta events for. -// The Webhook event name is "meta". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#meta -type MetaEvent struct { - // Action is the action that was performed. Possible value is: "deleted". - Action *string `json:"action,omitempty"` - // The ID of the modified webhook. - HookID *int64 `json:"hook_id,omitempty"` - // The modified webhook. - // This will contain different keys based on the type of webhook it is: repository, - // organization, business, app, or GitHub Marketplace. - Hook *Hook `json:"hook,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// MilestoneEvent is triggered when a milestone is created, closed, opened, edited, or deleted. -// The Webhook event name is "milestone". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#milestone -type MilestoneEvent struct { - // Action is the action that was performed. Possible values are: - // "created", "closed", "opened", "edited", "deleted" - Action *string `json:"action,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Org *Organization `json:"organization,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// OrganizationEvent is triggered when an organization is deleted and renamed, and when a user is added, -// removed, or invited to an organization. -// Events of this type are not visible in timelines. These events are only used to trigger organization hooks. -// Webhook event name is "organization". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#organization -type OrganizationEvent struct { - // Action is the action that was performed. - // Possible values are: "deleted", "renamed", "member_added", "member_removed", or "member_invited". - Action *string `json:"action,omitempty"` - - // Invitation is the invitation for the user or email if the action is "member_invited". - Invitation *Invitation `json:"invitation,omitempty"` - - // Membership is the membership between the user and the organization. - // Not present when the action is "member_invited". - Membership *Membership `json:"membership,omitempty"` - - Organization *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// OrgBlockEvent is triggered when an organization blocks or unblocks a user. -// The Webhook event name is "org_block". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#org_block -type OrgBlockEvent struct { - // Action is the action that was performed. - // Can be "blocked" or "unblocked". - Action *string `json:"action,omitempty"` - BlockedUser *User `json:"blocked_user,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` -} - -// PackageEvent represents activity related to GitHub Packages. -// The Webhook event name is "package". -// -// This event is triggered when a GitHub Package is published or updated. -// -// GitHub API docs: https://developer.github.com/webhooks/event-payloads/#package -type PackageEvent struct { - // Action is the action that was performed. - // Can be "published" or "updated". - Action *string `json:"action,omitempty"` - Package *Package `json:"package,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` -} - -// PageBuildEvent represents an attempted build of a GitHub Pages site, whether -// successful or not. -// The Webhook event name is "page_build". -// -// This event is triggered on push to a GitHub Pages enabled branch (gh-pages -// for project pages, master for user and organization pages). -// -// Events of this type are not visible in timelines, they are only used to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#page_build -type PageBuildEvent struct { - Build *PagesBuild `json:"build,omitempty"` - - // The following fields are only populated by Webhook events. - ID *int64 `json:"id,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PersonalAccessTokenRequestEvent occurs when there is activity relating to a -// request for a fine-grained personal access token to access resources that -// belong to a resource owner that requires approval for token access. -// The webhook event name is "personal_access_token_request". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#personal_access_token_request -type PersonalAccessTokenRequestEvent struct { - // Action is the action that was performed. Possible values are: - // "approved", "cancelled", "created" or "denied" - Action *string `json:"action,omitempty"` - PersonalAccessTokenRequest *PersonalAccessTokenRequest `json:"personal_access_token_request,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// PersonalAccessTokenRequest contains the details of a PersonalAccessTokenRequestEvent. -type PersonalAccessTokenRequest struct { - // Unique identifier of the request for access via fine-grained personal - // access token. Used as the pat_request_id parameter in the list and review - // API calls. - ID *int64 `json:"id,omitempty"` - Owner *User `json:"owner,omitempty"` - - // New requested permissions, categorized by type of permission. - PermissionsAdded *PersonalAccessTokenPermissions `json:"permissions_added,omitempty"` - - // Requested permissions that elevate access for a previously approved - // request for access, categorized by type of permission. - PermissionsUpgraded *PersonalAccessTokenPermissions `json:"permissions_upgraded,omitempty"` - - // Permissions requested, categorized by type of permission. - // This field incorporates permissions_added and permissions_upgraded. - PermissionsResult *PersonalAccessTokenPermissions `json:"permissions_result,omitempty"` - - // Type of repository selection requested. Possible values are: - // "none", "all" or "subset" - RepositorySelection *string `json:"repository_selection,omitempty"` - - // The number of repositories the token is requesting access to. - // This field is only populated when repository_selection is subset. - RepositoryCount *int64 `json:"repository_count,omitempty"` - - // An array of repository objects the token is requesting access to. - // This field is only populated when repository_selection is subset. - Repositories []*Repository `json:"repositories,omitempty"` - - // Date and time when the request for access was created. - CreatedAt *Timestamp `json:"created_at,omitempty"` - - // Whether the associated fine-grained personal access token has expired. - TokenExpired *bool `json:"token_expired,omitempty"` - - // Date and time when the associated fine-grained personal access token expires. - TokenExpiresAt *Timestamp `json:"token_expires_at,omitempty"` - - // Date and time when the associated fine-grained personal access token was last used for authentication. - TokenLastUsedAt *Timestamp `json:"token_last_used_at,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PersonalAccessTokenPermissions represents the original or newly requested -// scope of permissions for a fine-grained personal access token within a PersonalAccessTokenRequest. -type PersonalAccessTokenPermissions struct { - Org map[string]string `json:"organization,omitempty"` - Repo map[string]string `json:"repository,omitempty"` - Other map[string]string `json:"other,omitempty"` -} - -// PingEvent is triggered when a Webhook is added to GitHub. -// -// GitHub API docs: https://developer.github.com/webhooks/#ping-event -type PingEvent struct { - // Random string of GitHub zen. - Zen *string `json:"zen,omitempty"` - // The ID of the webhook that triggered the ping. - HookID *int64 `json:"hook_id,omitempty"` - // The webhook configuration. - Hook *Hook `json:"hook,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectEvent is triggered when project is created, modified or deleted. -// The webhook event name is "project". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#project -type ProjectEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectChange `json:"changes,omitempty"` - Project *Project `json:"project,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectCardEvent is triggered when a project card is created, updated, moved, converted to an issue, or deleted. -// The webhook event name is "project_card". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#project_card -type ProjectCardEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectCardChange `json:"changes,omitempty"` - AfterID *int64 `json:"after_id,omitempty"` - ProjectCard *ProjectCard `json:"project_card,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectColumnEvent is triggered when a project column is created, updated, moved, or deleted. -// The webhook event name is "project_column". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#project_column -type ProjectColumnEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectColumnChange `json:"changes,omitempty"` - AfterID *int64 `json:"after_id,omitempty"` - ProjectColumn *ProjectColumn `json:"project_column,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectV2Event is triggered when there is activity relating to an organization-level project. -// The Webhook event name is "projects_v2". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2 -type ProjectV2Event struct { - Action *string `json:"action,omitempty"` - ProjectsV2 *ProjectsV2 `json:"projects_v2,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// ProjectsV2 represents a projects v2 project. -type ProjectsV2 struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Creator *User `json:"creator,omitempty"` - Title *string `json:"title,omitempty"` - Description *string `json:"description,omitempty"` - Public *bool `json:"public,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - DeletedAt *Timestamp `json:"deleted_at,omitempty"` - Number *int `json:"number,omitempty"` - ShortDescription *string `json:"short_description,omitempty"` - DeletedBy *User `json:"deleted_by,omitempty"` -} - -// ProjectV2ItemEvent is triggered when there is activity relating to an item on an organization-level project. -// The Webhook event name is "projects_v2_item". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2_item -type ProjectV2ItemEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectV2ItemChange `json:"changes,omitempty"` - ProjectV2Item *ProjectV2Item `json:"projects_v2_item,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// ProjectV2ItemChange represents a project v2 item change. -type ProjectV2ItemChange struct { - ArchivedAt *ArchivedAt `json:"archived_at,omitempty"` -} - -// ArchivedAt represents an archiving date change. -type ArchivedAt struct { - From *Timestamp `json:"from,omitempty"` - To *Timestamp `json:"to,omitempty"` -} - -// ProjectV2Item represents an item belonging to a project. -type ProjectV2Item struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - ProjectNodeID *string `json:"project_node_id,omitempty"` - ContentNodeID *string `json:"content_node_id,omitempty"` - ContentType *string `json:"content_type,omitempty"` - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ArchivedAt *Timestamp `json:"archived_at,omitempty"` -} - -// PublicEvent is triggered when a private repository is open sourced. -// According to GitHub: "Without a doubt: the best GitHub event." -// The Webhook event name is "public". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#public -type PublicEvent struct { - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PullRequestEvent is triggered when a pull request is assigned, unassigned, labeled, -// unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, -// locked, unlocked, a pull request review is requested, or a review request is removed. -// The Webhook event name is "pull_request". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/github-event-types#pullrequestevent -type PullRequestEvent struct { - // Action is the action that was performed. Possible values are: - // "assigned", "unassigned", "review_requested", "review_request_removed", "labeled", "unlabeled", - // "opened", "edited", "closed", "ready_for_review", "locked", "unlocked", or "reopened". - // If the action is "closed" and the "merged" key is "false", the pull request was closed with unmerged commits. - // If the action is "closed" and the "merged" key is "true", the pull request was merged. - // While webhooks are also triggered when a pull request is synchronized, Events API timelines - // don't include pull request events with the "synchronize" action. - Action *string `json:"action,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Number *int `json:"number,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - // RequestedReviewer is populated in "review_requested", "review_request_removed" event deliveries. - // A request affecting multiple reviewers at once is split into multiple - // such event deliveries, each with a single, different RequestedReviewer. - RequestedReviewer *User `json:"requested_reviewer,omitempty"` - // In the event that a team is requested instead of a user, "requested_team" gets sent in place of - // "requested_user" with the same delivery behavior. - RequestedTeam *Team `json:"requested_team,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Label *Label `json:"label,omitempty"` // Populated in "labeled" event deliveries. - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` - - // The following fields are only populated when the Action is "synchronize". - Before *string `json:"before,omitempty"` - After *string `json:"after,omitempty"` - - // The following will be populated if the event was performed by an App - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// PullRequestReviewEvent is triggered when a review is submitted on a pull -// request. -// The Webhook event name is "pull_request_review". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review -type PullRequestReviewEvent struct { - // Action is always "submitted". - Action *string `json:"action,omitempty"` - Review *PullRequestReview `json:"review,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -// PullRequestReviewCommentEvent is triggered when a comment is created on a -// portion of the unified diff of a pull request. -// The Webhook event name is "pull_request_review_comment". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review_comment -type PullRequestReviewCommentEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "created", "edited", "deleted". - Action *string `json:"action,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - Comment *PullRequestComment `json:"comment,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PullRequestReviewThreadEvent is triggered when a comment made as part of a -// review of a pull request is marked resolved or unresolved. -// The Webhook event name is "pull_request_review_thread". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review_thread -type PullRequestReviewThreadEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "resolved", "unresolved". - Action *string `json:"action,omitempty"` - Thread *PullRequestThread `json:"thread,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PullRequestTargetEvent is triggered when a pull request is assigned, unassigned, labeled, -// unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, -// locked, unlocked, a pull request review is requested, or a review request is removed. -// The Webhook event name is "pull_request_target". -// -// GitHub API docs: https://docs.github.com/actions/events-that-trigger-workflows#pull_request_target -type PullRequestTargetEvent struct { - // Action is the action that was performed. Possible values are: - // "assigned", "unassigned", "labeled", "unlabeled", "opened", "edited", "closed", "reopened", - // "ready_for_review", "locked", "unlocked", "review_requested" or "review_request_removed". - // If the action is "closed" and the "merged" key is "false", the pull request was closed with unmerged commits. - // If the action is "closed" and the "merged" key is "true", the pull request was merged. - // While webhooks are also triggered when a pull request is synchronized, Events API timelines - // don't include pull request events with the "synchronize" action. - Action *string `json:"action,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Number *int `json:"number,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - // RequestedReviewer is populated in "review_requested", "review_request_removed" event deliveries. - // A request affecting multiple reviewers at once is split into multiple - // such event deliveries, each with a single, different RequestedReviewer. - RequestedReviewer *User `json:"requested_reviewer,omitempty"` - // In the event that a team is requested instead of a user, "requested_team" gets sent in place of - // "requested_user" with the same delivery behavior. - RequestedTeam *Team `json:"requested_team,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Label *Label `json:"label,omitempty"` // Populated in "labeled" event deliveries. - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` - - // The following fields are only populated when the Action is "synchronize". - Before *string `json:"before,omitempty"` - After *string `json:"after,omitempty"` - - // The following will be populated if the event was performed by an App - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// PushEvent represents a git push to a GitHub repository. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#push -type PushEvent struct { - PushID *int64 `json:"push_id,omitempty"` - Head *string `json:"head,omitempty"` - Ref *string `json:"ref,omitempty"` - Size *int `json:"size,omitempty"` - Commits []*HeadCommit `json:"commits,omitempty"` - Before *string `json:"before,omitempty"` - DistinctSize *int `json:"distinct_size,omitempty"` - - // The following fields are only populated by Webhook events. - Action *string `json:"action,omitempty"` - After *string `json:"after,omitempty"` - Created *bool `json:"created,omitempty"` - Deleted *bool `json:"deleted,omitempty"` - Forced *bool `json:"forced,omitempty"` - BaseRef *string `json:"base_ref,omitempty"` - Compare *string `json:"compare,omitempty"` - Repo *PushEventRepository `json:"repository,omitempty"` - HeadCommit *HeadCommit `json:"head_commit,omitempty"` - Pusher *CommitAuthor `json:"pusher,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -func (p PushEvent) String() string { - return Stringify(p) -} - -// HeadCommit represents a git commit in a GitHub PushEvent. -type HeadCommit struct { - Message *string `json:"message,omitempty"` - Author *CommitAuthor `json:"author,omitempty"` - URL *string `json:"url,omitempty"` - Distinct *bool `json:"distinct,omitempty"` - - // The following fields are only populated by Events API. - SHA *string `json:"sha,omitempty"` - - // The following fields are only populated by Webhook events. - ID *string `json:"id,omitempty"` - TreeID *string `json:"tree_id,omitempty"` - Timestamp *Timestamp `json:"timestamp,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` - Added []string `json:"added,omitempty"` - Removed []string `json:"removed,omitempty"` - Modified []string `json:"modified,omitempty"` -} - -func (h HeadCommit) String() string { - return Stringify(h) -} - -// PushEventRepository represents the repo object in a PushEvent payload. -type PushEventRepository struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - FullName *string `json:"full_name,omitempty"` - Owner *User `json:"owner,omitempty"` - Private *bool `json:"private,omitempty"` - Description *string `json:"description,omitempty"` - Fork *bool `json:"fork,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PushedAt *Timestamp `json:"pushed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Homepage *string `json:"homepage,omitempty"` - PullsURL *string `json:"pulls_url,omitempty"` - Size *int `json:"size,omitempty"` - StargazersCount *int `json:"stargazers_count,omitempty"` - WatchersCount *int `json:"watchers_count,omitempty"` - Language *string `json:"language,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasDownloads *bool `json:"has_downloads,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasPages *bool `json:"has_pages,omitempty"` - ForksCount *int `json:"forks_count,omitempty"` - Archived *bool `json:"archived,omitempty"` - Disabled *bool `json:"disabled,omitempty"` - OpenIssuesCount *int `json:"open_issues_count,omitempty"` - DefaultBranch *string `json:"default_branch,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - Organization *string `json:"organization,omitempty"` - URL *string `json:"url,omitempty"` - ArchiveURL *string `json:"archive_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - SSHURL *string `json:"ssh_url,omitempty"` - CloneURL *string `json:"clone_url,omitempty"` - SVNURL *string `json:"svn_url,omitempty"` - Topics []string `json:"topics,omitempty"` -} - -// PushEventRepoOwner is a basic representation of user/org in a PushEvent payload. -type PushEventRepoOwner struct { - Name *string `json:"name,omitempty"` - Email *string `json:"email,omitempty"` -} - -// ReleaseEvent is triggered when a release is published, unpublished, created, -// edited, deleted, or prereleased. -// The Webhook event name is "release". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#release -type ReleaseEvent struct { - // Action is the action that was performed. Possible values are: "published", "unpublished", - // "created", "edited", "deleted", or "prereleased". - Action *string `json:"action,omitempty"` - Release *RepositoryRelease `json:"release,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// RepositoryEvent is triggered when a repository is created, archived, unarchived, -// renamed, edited, transferred, made public, or made private. Organization hooks are -// also trigerred when a repository is deleted. -// The Webhook event name is "repository". -// -// Events of this type are not visible in timelines, they are only used to -// trigger organization webhooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#repository -type RepositoryEvent struct { - // Action is the action that was performed. Possible values are: "created", - // "deleted" (organization hooks only), "archived", "unarchived", "edited", "renamed", - // "transferred", "publicized", or "privatized". - Action *string `json:"action,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// RepositoryDispatchEvent is triggered when a client sends a POST request to the repository dispatch event endpoint. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#repository_dispatch -type RepositoryDispatchEvent struct { - // Action is the event_type that submitted with the repository dispatch payload. Value can be any string. - Action *string `json:"action,omitempty"` - Branch *string `json:"branch,omitempty"` - ClientPayload json.RawMessage `json:"client_payload,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// RepositoryImportEvent represents the activity related to a repository being imported to GitHub. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_import -type RepositoryImportEvent struct { - // Status represents the final state of the import. This can be one of "success", "cancelled", or "failure". - Status *string `json:"status,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// RepositoryVulnerabilityAlertEvent is triggered when a security alert is created, dismissed, or resolved. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#repository_vulnerability_alert -type RepositoryVulnerabilityAlertEvent struct { - // Action is the action that was performed. Possible values are: "create", "dismiss", "resolve". - Action *string `json:"action,omitempty"` - - // The security alert of the vulnerable dependency. - Alert *RepositoryVulnerabilityAlert `json:"alert,omitempty"` - - // The repository of the vulnerable dependency. - Repository *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - - // The user that triggered the event. - Sender *User `json:"sender,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// RepositoryVulnerabilityAlert represents a repository security alert. -type RepositoryVulnerabilityAlert struct { - ID *int64 `json:"id,omitempty"` - AffectedRange *string `json:"affected_range,omitempty"` - AffectedPackageName *string `json:"affected_package_name,omitempty"` - ExternalReference *string `json:"external_reference,omitempty"` - ExternalIdentifier *string `json:"external_identifier,omitempty"` - GitHubSecurityAdvisoryID *string `json:"ghsa_id,omitempty"` - Severity *string `json:"severity,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - FixedIn *string `json:"fixed_in,omitempty"` - Dismisser *User `json:"dismisser,omitempty"` - DismissReason *string `json:"dismiss_reason,omitempty"` - DismissedAt *Timestamp `json:"dismissed_at,omitempty"` -} - -// SecretScanningAlertEvent is triggered when a secret scanning alert occurs in a repository. -// The Webhook name is secret_scanning_alert. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#secret_scanning_alert -type SecretScanningAlertEvent struct { - // Action is the action that was performed. Possible values are: "created", "resolved", or "reopened". - Action *string `json:"action,omitempty"` - - // Alert is the secret scanning alert involved in the event. - Alert *SecretScanningAlert `json:"alert,omitempty"` - - // Only populated by the "resolved" and "reopen" actions - Sender *User `json:"sender,omitempty"` - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// SecurityAndAnalysisEvent is triggered when code security and analysis features -// are enabled or disabled for a repository. -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#security_and_analysis -type SecurityAndAnalysisEvent struct { - Changes *SecurityAndAnalysisChange `json:"changes,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// SecurityAndAnalysisChange represents the changes when security and analysis -// features are enabled or disabled for a repository. -type SecurityAndAnalysisChange struct { - From *SecurityAndAnalysisChangeFrom `json:"from,omitempty"` -} - -// SecurityAndAnalysisChangeFrom represents which change was made when security -// and analysis features are enabled or disabled for a repository. -type SecurityAndAnalysisChangeFrom struct { - SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis,omitempty"` -} - -// StarEvent is triggered when a star is added or removed from a repository. -// The Webhook event name is "star". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#star -type StarEvent struct { - // Action is the action that was performed. Possible values are: "created" or "deleted". - Action *string `json:"action,omitempty"` - - // StarredAt is the time the star was created. It will be null for the "deleted" action. - StarredAt *Timestamp `json:"starred_at,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// StatusEvent is triggered when the status of a Git commit changes. -// The Webhook event name is "status". -// -// Events of this type are not visible in timelines, they are only used to -// trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#status -type StatusEvent struct { - SHA *string `json:"sha,omitempty"` - // State is the new state. Possible values are: "pending", "success", "failure", "error". - State *string `json:"state,omitempty"` - Description *string `json:"description,omitempty"` - TargetURL *string `json:"target_url,omitempty"` - Branches []*Branch `json:"branches,omitempty"` - - // The following fields are only populated by Webhook events. - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Context *string `json:"context,omitempty"` - Commit *RepositoryCommit `json:"commit,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// TeamEvent is triggered when an organization's team is created, modified or deleted. -// The Webhook event name is "team". -// -// Events of this type are not visible in timelines. These events are only used -// to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#team -type TeamEvent struct { - Action *string `json:"action,omitempty"` - Team *Team `json:"team,omitempty"` - Changes *TeamChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// TeamAddEvent is triggered when a repository is added to a team. -// The Webhook event name is "team_add". -// -// Events of this type are not visible in timelines. These events are only used -// to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#team_add -type TeamAddEvent struct { - Team *Team `json:"team,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// UserEvent is triggered when a user is created or deleted. -// The Webhook event name is "user". -// -// Only global webhooks can subscribe to this event type. -// -// GitHub API docs: https://developer.github.com/enterprise/v3/activity/events/types/#userevent-enterprise -type UserEvent struct { - User *User `json:"user,omitempty"` - // The action performed. Possible values are: "created" or "deleted". - Action *string `json:"action,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` -} - -// WatchEvent is related to starring a repository, not watching. See this API -// blog post for an explanation: https://developer.github.com/changes/2012-09-05-watcher-api/ -// -// The event’s actor is the user who starred a repository, and the event’s -// repository is the repository that was starred. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#watch -type WatchEvent struct { - // Action is the action that was performed. Possible value is: "started". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// WorkflowDispatchEvent is triggered when someone triggers a workflow run on GitHub or -// sends a POST request to the create a workflow dispatch event endpoint. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch -type WorkflowDispatchEvent struct { - Inputs json.RawMessage `json:"inputs,omitempty"` - Ref *string `json:"ref,omitempty"` - Workflow *string `json:"workflow,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// WorkflowJobEvent is triggered when a job is queued, started or completed. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_job -type WorkflowJobEvent struct { - WorkflowJob *WorkflowJob `json:"workflow_job,omitempty"` - - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - - // Org is not nil when the webhook is configured for an organization or the event - // occurs from activity in a repository owned by an organization. - Org *Organization `json:"organization,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// WorkflowRunEvent is triggered when a GitHub Actions workflow run is requested or completed. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#workflow_run -type WorkflowRunEvent struct { - Action *string `json:"action,omitempty"` - Workflow *Workflow `json:"workflow,omitempty"` - WorkflowRun *WorkflowRun `json:"workflow_run,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// SecurityAdvisory represents the advisory object in SecurityAdvisoryEvent payload. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory -type SecurityAdvisory struct { - CVSS *AdvisoryCVSS `json:"cvss,omitempty"` - CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` - GHSAID *string `json:"ghsa_id,omitempty"` - Summary *string `json:"summary,omitempty"` - Description *string `json:"description,omitempty"` - Severity *string `json:"severity,omitempty"` - Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` - References []*AdvisoryReference `json:"references,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - WithdrawnAt *Timestamp `json:"withdrawn_at,omitempty"` - Vulnerabilities []*AdvisoryVulnerability `json:"vulnerabilities,omitempty"` - CVEID *string `json:"cve_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Author *User `json:"author,omitempty"` - Publisher *User `json:"publisher,omitempty"` - State *string `json:"state,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - Submission *SecurityAdvisorySubmission `json:"submission,omitempty"` - CWEIDs []string `json:"cwe_ids,omitempty"` - Credits []*RepoAdvisoryCredit `json:"credits,omitempty"` - CreditsDetailed []*RepoAdvisoryCreditDetailed `json:"credits_detailed,omitempty"` - CollaboratingUsers []*User `json:"collaborating_users,omitempty"` - CollaboratingTeams []*Team `json:"collaborating_teams,omitempty"` - PrivateFork *Repository `json:"private_fork,omitempty"` -} - -// AdvisoryIdentifier represents the identifier for a Security Advisory. -type AdvisoryIdentifier struct { - Value *string `json:"value,omitempty"` - Type *string `json:"type,omitempty"` -} - -// AdvisoryReference represents the reference url for the security advisory. -type AdvisoryReference struct { - URL *string `json:"url,omitempty"` -} - -// AdvisoryVulnerability represents the vulnerability object for a Security Advisory. -type AdvisoryVulnerability struct { - Package *VulnerabilityPackage `json:"package,omitempty"` - Severity *string `json:"severity,omitempty"` - VulnerableVersionRange *string `json:"vulnerable_version_range,omitempty"` - FirstPatchedVersion *FirstPatchedVersion `json:"first_patched_version,omitempty"` - - // PatchedVersions and VulnerableFunctions are used in the following APIs: - // - https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories-for-an-organization - // - https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories - PatchedVersions *string `json:"patched_versions,omitempty"` - VulnerableFunctions []string `json:"vulnerable_functions,omitempty"` -} - -// VulnerabilityPackage represents the package object for an Advisory Vulnerability. -type VulnerabilityPackage struct { - Ecosystem *string `json:"ecosystem,omitempty"` - Name *string `json:"name,omitempty"` -} - -// FirstPatchedVersion represents the identifier for the first patched version of that vulnerability. -type FirstPatchedVersion struct { - Identifier *string `json:"identifier,omitempty"` -} - -// SecurityAdvisoryEvent is triggered when a security-related vulnerability is found in software on GitHub. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory -type SecurityAdvisoryEvent struct { - Action *string `json:"action,omitempty"` - SecurityAdvisory *SecurityAdvisory `json:"security_advisory,omitempty"` - - // The following fields are only populated by Webhook events. - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// CodeScanningAlertEvent is triggered when a code scanning finds a potential vulnerability or error in your code. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#code_scanning_alert -type CodeScanningAlertEvent struct { - Action *string `json:"action,omitempty"` - Alert *Alert `json:"alert,omitempty"` - Ref *string `json:"ref,omitempty"` - // CommitOID is the commit SHA of the code scanning alert - CommitOID *string `json:"commit_oid,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - - Installation *Installation `json:"installation,omitempty"` -} diff --git a/vendor/github.com/google/go-github/v57/github/gists.go b/vendor/github.com/google/go-github/v57/github/gists.go deleted file mode 100644 index 08180c6d..00000000 --- a/vendor/github.com/google/go-github/v57/github/gists.go +++ /dev/null @@ -1,397 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// GistsService handles communication with the Gist related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/gists -type GistsService service - -// Gist represents a GitHub's gist. -type Gist struct { - ID *string `json:"id,omitempty"` - Description *string `json:"description,omitempty"` - Public *bool `json:"public,omitempty"` - Owner *User `json:"owner,omitempty"` - Files map[GistFilename]GistFile `json:"files,omitempty"` - Comments *int `json:"comments,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - GitPullURL *string `json:"git_pull_url,omitempty"` - GitPushURL *string `json:"git_push_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (g Gist) String() string { - return Stringify(g) -} - -// GistFilename represents filename on a gist. -type GistFilename string - -// GistFile represents a file on a gist. -type GistFile struct { - Size *int `json:"size,omitempty"` - Filename *string `json:"filename,omitempty"` - Language *string `json:"language,omitempty"` - Type *string `json:"type,omitempty"` - RawURL *string `json:"raw_url,omitempty"` - Content *string `json:"content,omitempty"` -} - -func (g GistFile) String() string { - return Stringify(g) -} - -// GistCommit represents a commit on a gist. -type GistCommit struct { - URL *string `json:"url,omitempty"` - Version *string `json:"version,omitempty"` - User *User `json:"user,omitempty"` - ChangeStatus *CommitStats `json:"change_status,omitempty"` - CommittedAt *Timestamp `json:"committed_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (gc GistCommit) String() string { - return Stringify(gc) -} - -// GistFork represents a fork of a gist. -type GistFork struct { - URL *string `json:"url,omitempty"` - User *User `json:"user,omitempty"` - ID *string `json:"id,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (gf GistFork) String() string { - return Stringify(gf) -} - -// GistListOptions specifies the optional parameters to the -// GistsService.List, GistsService.ListAll, and GistsService.ListStarred methods. -type GistListOptions struct { - // Since filters Gists by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// List gists for a user. Passing the empty string will list -// all public gists if called anonymously. However, if the call -// is authenticated, it will returns all gists for the authenticated -// user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gists-for-a-user -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gists-for-the-authenticated-user -// -//meta:operation GET /gists -//meta:operation GET /users/{username}/gists -func (s *GistsService) List(ctx context.Context, user string, opts *GistListOptions) ([]*Gist, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/gists", user) - } else { - u = "gists" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gists []*Gist - resp, err := s.client.Do(ctx, req, &gists) - if err != nil { - return nil, resp, err - } - - return gists, resp, nil -} - -// ListAll lists all public gists. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-public-gists -// -//meta:operation GET /gists/public -func (s *GistsService) ListAll(ctx context.Context, opts *GistListOptions) ([]*Gist, *Response, error) { - u, err := addOptions("gists/public", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gists []*Gist - resp, err := s.client.Do(ctx, req, &gists) - if err != nil { - return nil, resp, err - } - - return gists, resp, nil -} - -// ListStarred lists starred gists of authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-starred-gists -// -//meta:operation GET /gists/starred -func (s *GistsService) ListStarred(ctx context.Context, opts *GistListOptions) ([]*Gist, *Response, error) { - u, err := addOptions("gists/starred", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gists []*Gist - resp, err := s.client.Do(ctx, req, &gists) - if err != nil { - return nil, resp, err - } - - return gists, resp, nil -} - -// Get a single gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#get-a-gist -// -//meta:operation GET /gists/{gist_id} -func (s *GistsService) Get(ctx context.Context, id string) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - gist := new(Gist) - resp, err := s.client.Do(ctx, req, gist) - if err != nil { - return nil, resp, err - } - - return gist, resp, nil -} - -// GetRevision gets a specific revision of a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#get-a-gist-revision -// -//meta:operation GET /gists/{gist_id}/{sha} -func (s *GistsService) GetRevision(ctx context.Context, id, sha string) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v/%v", id, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - gist := new(Gist) - resp, err := s.client.Do(ctx, req, gist) - if err != nil { - return nil, resp, err - } - - return gist, resp, nil -} - -// Create a gist for authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#create-a-gist -// -//meta:operation POST /gists -func (s *GistsService) Create(ctx context.Context, gist *Gist) (*Gist, *Response, error) { - u := "gists" - req, err := s.client.NewRequest("POST", u, gist) - if err != nil { - return nil, nil, err - } - - g := new(Gist) - resp, err := s.client.Do(ctx, req, g) - if err != nil { - return nil, resp, err - } - - return g, resp, nil -} - -// Edit a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#update-a-gist -// -//meta:operation PATCH /gists/{gist_id} -func (s *GistsService) Edit(ctx context.Context, id string, gist *Gist) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v", id) - req, err := s.client.NewRequest("PATCH", u, gist) - if err != nil { - return nil, nil, err - } - - g := new(Gist) - resp, err := s.client.Do(ctx, req, g) - if err != nil { - return nil, resp, err - } - - return g, resp, nil -} - -// ListCommits lists commits of a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gist-commits -// -//meta:operation GET /gists/{gist_id}/commits -func (s *GistsService) ListCommits(ctx context.Context, id string, opts *ListOptions) ([]*GistCommit, *Response, error) { - u := fmt.Sprintf("gists/%v/commits", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gistCommits []*GistCommit - resp, err := s.client.Do(ctx, req, &gistCommits) - if err != nil { - return nil, resp, err - } - - return gistCommits, resp, nil -} - -// Delete a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#delete-a-gist -// -//meta:operation DELETE /gists/{gist_id} -func (s *GistsService) Delete(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("gists/%v", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Star a gist on behalf of authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#star-a-gist -// -//meta:operation PUT /gists/{gist_id}/star -func (s *GistsService) Star(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("gists/%v/star", id) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unstar a gist on a behalf of authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#unstar-a-gist -// -//meta:operation DELETE /gists/{gist_id}/star -func (s *GistsService) Unstar(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("gists/%v/star", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// IsStarred checks if a gist is starred by authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#check-if-a-gist-is-starred -// -//meta:operation GET /gists/{gist_id}/star -func (s *GistsService) IsStarred(ctx context.Context, id string) (bool, *Response, error) { - u := fmt.Sprintf("gists/%v/star", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - starred, err := parseBoolResponse(err) - return starred, resp, err -} - -// Fork a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#fork-a-gist -// -//meta:operation POST /gists/{gist_id}/forks -func (s *GistsService) Fork(ctx context.Context, id string) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v/forks", id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - g := new(Gist) - resp, err := s.client.Do(ctx, req, g) - if err != nil { - return nil, resp, err - } - - return g, resp, nil -} - -// ListForks lists forks of a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gist-forks -// -//meta:operation GET /gists/{gist_id}/forks -func (s *GistsService) ListForks(ctx context.Context, id string, opts *ListOptions) ([]*GistFork, *Response, error) { - u := fmt.Sprintf("gists/%v/forks", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gistForks []*GistFork - resp, err := s.client.Do(ctx, req, &gistForks) - if err != nil { - return nil, resp, err - } - - return gistForks, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/gists_comments.go b/vendor/github.com/google/go-github/v57/github/gists_comments.go deleted file mode 100644 index 5e061423..00000000 --- a/vendor/github.com/google/go-github/v57/github/gists_comments.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GistComment represents a Gist comment. -type GistComment struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Body *string `json:"body,omitempty"` - User *User `json:"user,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -func (g GistComment) String() string { - return Stringify(g) -} - -// ListComments lists all comments for a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#list-gist-comments -// -//meta:operation GET /gists/{gist_id}/comments -func (s *GistsService) ListComments(ctx context.Context, gistID string, opts *ListOptions) ([]*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments", gistID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*GistComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetComment retrieves a single comment from a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#get-a-gist-comment -// -//meta:operation GET /gists/{gist_id}/comments/{comment_id} -func (s *GistsService) GetComment(ctx context.Context, gistID string, commentID int64) (*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - c := new(GistComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// CreateComment creates a comment for a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#create-a-gist-comment -// -//meta:operation POST /gists/{gist_id}/comments -func (s *GistsService) CreateComment(ctx context.Context, gistID string, comment *GistComment) (*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments", gistID) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(GistComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// EditComment edits an existing gist comment. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#update-a-gist-comment -// -//meta:operation PATCH /gists/{gist_id}/comments/{comment_id} -func (s *GistsService) EditComment(ctx context.Context, gistID string, commentID int64, comment *GistComment) (*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(GistComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes a gist comment. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#delete-a-gist-comment -// -//meta:operation DELETE /gists/{gist_id}/comments/{comment_id} -func (s *GistsService) DeleteComment(ctx context.Context, gistID string, commentID int64) (*Response, error) { - u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/git.go b/vendor/github.com/google/go-github/v57/github/git.go deleted file mode 100644 index 2ca835e1..00000000 --- a/vendor/github.com/google/go-github/v57/github/git.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// GitService handles communication with the git data related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/git/ -type GitService service diff --git a/vendor/github.com/google/go-github/v57/github/git_blobs.go b/vendor/github.com/google/go-github/v57/github/git_blobs.go deleted file mode 100644 index d8904288..00000000 --- a/vendor/github.com/google/go-github/v57/github/git_blobs.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" -) - -// Blob represents a blob object. -type Blob struct { - Content *string `json:"content,omitempty"` - Encoding *string `json:"encoding,omitempty"` - SHA *string `json:"sha,omitempty"` - Size *int `json:"size,omitempty"` - URL *string `json:"url,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// GetBlob fetches a blob from a repo given a SHA. -// -// GitHub API docs: https://docs.github.com/rest/git/blobs#get-a-blob -// -//meta:operation GET /repos/{owner}/{repo}/git/blobs/{file_sha} -func (s *GitService) GetBlob(ctx context.Context, owner string, repo string, sha string) (*Blob, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/blobs/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - blob := new(Blob) - resp, err := s.client.Do(ctx, req, blob) - if err != nil { - return nil, resp, err - } - - return blob, resp, nil -} - -// GetBlobRaw fetches a blob's contents from a repo. -// Unlike GetBlob, it returns the raw bytes rather than the base64-encoded data. -// -// GitHub API docs: https://docs.github.com/rest/git/blobs#get-a-blob -// -//meta:operation GET /repos/{owner}/{repo}/git/blobs/{file_sha} -func (s *GitService) GetBlobRaw(ctx context.Context, owner, repo, sha string) ([]byte, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/blobs/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", "application/vnd.github.v3.raw") - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return nil, resp, err - } - - return buf.Bytes(), resp, nil -} - -// CreateBlob creates a blob object. -// -// GitHub API docs: https://docs.github.com/rest/git/blobs#create-a-blob -// -//meta:operation POST /repos/{owner}/{repo}/git/blobs -func (s *GitService) CreateBlob(ctx context.Context, owner string, repo string, blob *Blob) (*Blob, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/blobs", owner, repo) - req, err := s.client.NewRequest("POST", u, blob) - if err != nil { - return nil, nil, err - } - - t := new(Blob) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/git_commits.go b/vendor/github.com/google/go-github/v57/github/git_commits.go deleted file mode 100644 index 573d38be..00000000 --- a/vendor/github.com/google/go-github/v57/github/git_commits.go +++ /dev/null @@ -1,225 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "errors" - "fmt" - "io" - "strings" -) - -// SignatureVerification represents GPG signature verification. -type SignatureVerification struct { - Verified *bool `json:"verified,omitempty"` - Reason *string `json:"reason,omitempty"` - Signature *string `json:"signature,omitempty"` - Payload *string `json:"payload,omitempty"` -} - -// MessageSigner is used by GitService.CreateCommit to sign a commit. -// -// To create a MessageSigner that signs a commit with a [golang.org/x/crypto/openpgp.Entity], -// or [github.com/ProtonMail/go-crypto/openpgp.Entity], use: -// -// commit.Signer = github.MessageSignerFunc(func(w io.Writer, r io.Reader) error { -// return openpgp.ArmoredDetachSign(w, openpgpEntity, r, nil) -// }) -type MessageSigner interface { - Sign(w io.Writer, r io.Reader) error -} - -// MessageSignerFunc is a single function implementation of MessageSigner. -type MessageSignerFunc func(w io.Writer, r io.Reader) error - -func (f MessageSignerFunc) Sign(w io.Writer, r io.Reader) error { - return f(w, r) -} - -// Commit represents a GitHub commit. -type Commit struct { - SHA *string `json:"sha,omitempty"` - Author *CommitAuthor `json:"author,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` - Message *string `json:"message,omitempty"` - Tree *Tree `json:"tree,omitempty"` - Parents []*Commit `json:"parents,omitempty"` - Stats *CommitStats `json:"stats,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - Verification *SignatureVerification `json:"verification,omitempty"` - NodeID *string `json:"node_id,omitempty"` - - // CommentCount is the number of GitHub comments on the commit. This - // is only populated for requests that fetch GitHub data like - // Pulls.ListCommits, Repositories.ListCommits, etc. - CommentCount *int `json:"comment_count,omitempty"` -} - -func (c Commit) String() string { - return Stringify(c) -} - -// CommitAuthor represents the author or committer of a commit. The commit -// author may not correspond to a GitHub User. -type CommitAuthor struct { - Date *Timestamp `json:"date,omitempty"` - Name *string `json:"name,omitempty"` - Email *string `json:"email,omitempty"` - - // The following fields are only populated by Webhook events. - Login *string `json:"username,omitempty"` // Renamed for go-github consistency. -} - -func (c CommitAuthor) String() string { - return Stringify(c) -} - -// GetCommit fetches the Commit object for a given SHA. -// -// GitHub API docs: https://docs.github.com/rest/git/commits#get-a-commit-object -// -//meta:operation GET /repos/{owner}/{repo}/git/commits/{commit_sha} -func (s *GitService) GetCommit(ctx context.Context, owner string, repo string, sha string) (*Commit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/commits/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - c := new(Commit) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// createCommit represents the body of a CreateCommit request. -type createCommit struct { - Author *CommitAuthor `json:"author,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` - Message *string `json:"message,omitempty"` - Tree *string `json:"tree,omitempty"` - Parents []string `json:"parents,omitempty"` - Signature *string `json:"signature,omitempty"` -} - -type CreateCommitOptions struct { - // CreateCommit will sign the commit with this signer. See MessageSigner doc for more details. - // Ignored on commits where Verification.Signature is defined. - Signer MessageSigner -} - -// CreateCommit creates a new commit in a repository. -// commit must not be nil. -// -// The commit.Committer is optional and will be filled with the commit.Author -// data if omitted. If the commit.Author is omitted, it will be filled in with -// the authenticated user’s information and the current date. -// -// GitHub API docs: https://docs.github.com/rest/git/commits#create-a-commit -// -//meta:operation POST /repos/{owner}/{repo}/git/commits -func (s *GitService) CreateCommit(ctx context.Context, owner string, repo string, commit *Commit, opts *CreateCommitOptions) (*Commit, *Response, error) { - if commit == nil { - return nil, nil, fmt.Errorf("commit must be provided") - } - if opts == nil { - opts = &CreateCommitOptions{} - } - - u := fmt.Sprintf("repos/%v/%v/git/commits", owner, repo) - - parents := make([]string, len(commit.Parents)) - for i, parent := range commit.Parents { - parents[i] = *parent.SHA - } - - body := &createCommit{ - Author: commit.Author, - Committer: commit.Committer, - Message: commit.Message, - Parents: parents, - } - if commit.Tree != nil { - body.Tree = commit.Tree.SHA - } - switch { - case commit.Verification != nil: - body.Signature = commit.Verification.Signature - case opts.Signer != nil: - signature, err := createSignature(opts.Signer, body) - if err != nil { - return nil, nil, err - } - body.Signature = &signature - } - - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - c := new(Commit) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -func createSignature(signer MessageSigner, commit *createCommit) (string, error) { - if signer == nil { - return "", errors.New("createSignature: invalid parameters") - } - - message, err := createSignatureMessage(commit) - if err != nil { - return "", err - } - - var writer bytes.Buffer - err = signer.Sign(&writer, strings.NewReader(message)) - if err != nil { - return "", err - } - - return writer.String(), nil -} - -func createSignatureMessage(commit *createCommit) (string, error) { - if commit == nil || commit.Message == nil || *commit.Message == "" || commit.Author == nil { - return "", errors.New("createSignatureMessage: invalid parameters") - } - - var message []string - - if commit.Tree != nil { - message = append(message, fmt.Sprintf("tree %s", *commit.Tree)) - } - - for _, parent := range commit.Parents { - message = append(message, fmt.Sprintf("parent %s", parent)) - } - - message = append(message, fmt.Sprintf("author %s <%s> %d %s", commit.Author.GetName(), commit.Author.GetEmail(), commit.Author.GetDate().Unix(), commit.Author.GetDate().Format("-0700"))) - - committer := commit.Committer - if committer == nil { - committer = commit.Author - } - - // There needs to be a double newline after committer - message = append(message, fmt.Sprintf("committer %s <%s> %d %s\n", committer.GetName(), committer.GetEmail(), committer.GetDate().Unix(), committer.GetDate().Format("-0700"))) - message = append(message, *commit.Message) - - return strings.Join(message, "\n"), nil -} diff --git a/vendor/github.com/google/go-github/v57/github/git_refs.go b/vendor/github.com/google/go-github/v57/github/git_refs.go deleted file mode 100644 index ad7b10d7..00000000 --- a/vendor/github.com/google/go-github/v57/github/git_refs.go +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/url" - "strings" -) - -// Reference represents a GitHub reference. -type Reference struct { - Ref *string `json:"ref"` - URL *string `json:"url"` - Object *GitObject `json:"object"` - NodeID *string `json:"node_id,omitempty"` -} - -func (r Reference) String() string { - return Stringify(r) -} - -// GitObject represents a Git object. -type GitObject struct { - Type *string `json:"type"` - SHA *string `json:"sha"` - URL *string `json:"url"` -} - -func (o GitObject) String() string { - return Stringify(o) -} - -// createRefRequest represents the payload for creating a reference. -type createRefRequest struct { - Ref *string `json:"ref"` - SHA *string `json:"sha"` -} - -// updateRefRequest represents the payload for updating a reference. -type updateRefRequest struct { - SHA *string `json:"sha"` - Force *bool `json:"force"` -} - -// GetRef fetches a single reference in a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#get-a-reference -// -//meta:operation GET /repos/{owner}/{repo}/git/ref/{ref} -func (s *GitService) GetRef(ctx context.Context, owner string, repo string, ref string) (*Reference, *Response, error) { - ref = strings.TrimPrefix(ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/ref/%v", owner, repo, refURLEscape(ref)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(Reference) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// refURLEscape escapes every path segment of the given ref. Those must -// not contain escaped "/" - as "%2F" - or github will not recognize it. -func refURLEscape(ref string) string { - parts := strings.Split(ref, "/") - for i, s := range parts { - parts[i] = url.PathEscape(s) - } - return strings.Join(parts, "/") -} - -// ReferenceListOptions specifies optional parameters to the -// GitService.ListMatchingRefs method. -type ReferenceListOptions struct { - Ref string `url:"-"` - - ListOptions -} - -// ListMatchingRefs lists references in a repository that match a supplied ref. -// Use an empty ref to list all references. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#list-matching-references -// -//meta:operation GET /repos/{owner}/{repo}/git/matching-refs/{ref} -func (s *GitService) ListMatchingRefs(ctx context.Context, owner, repo string, opts *ReferenceListOptions) ([]*Reference, *Response, error) { - var ref string - if opts != nil { - ref = strings.TrimPrefix(opts.Ref, "refs/") - } - u := fmt.Sprintf("repos/%v/%v/git/matching-refs/%v", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rs []*Reference - resp, err := s.client.Do(ctx, req, &rs) - if err != nil { - return nil, resp, err - } - - return rs, resp, nil -} - -// CreateRef creates a new ref in a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#create-a-reference -// -//meta:operation POST /repos/{owner}/{repo}/git/refs -func (s *GitService) CreateRef(ctx context.Context, owner string, repo string, ref *Reference) (*Reference, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/refs", owner, repo) - req, err := s.client.NewRequest("POST", u, &createRefRequest{ - // back-compat with previous behavior that didn't require 'refs/' prefix - Ref: String("refs/" + strings.TrimPrefix(*ref.Ref, "refs/")), - SHA: ref.Object.SHA, - }) - if err != nil { - return nil, nil, err - } - - r := new(Reference) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// UpdateRef updates an existing ref in a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#update-a-reference -// -//meta:operation PATCH /repos/{owner}/{repo}/git/refs/{ref} -func (s *GitService) UpdateRef(ctx context.Context, owner string, repo string, ref *Reference, force bool) (*Reference, *Response, error) { - refPath := strings.TrimPrefix(*ref.Ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, refURLEscape(refPath)) - req, err := s.client.NewRequest("PATCH", u, &updateRefRequest{ - SHA: ref.Object.SHA, - Force: &force, - }) - if err != nil { - return nil, nil, err - } - - r := new(Reference) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DeleteRef deletes a ref from a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#delete-a-reference -// -//meta:operation DELETE /repos/{owner}/{repo}/git/refs/{ref} -func (s *GitService) DeleteRef(ctx context.Context, owner string, repo string, ref string) (*Response, error) { - ref = strings.TrimPrefix(ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, refURLEscape(ref)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/git_tags.go b/vendor/github.com/google/go-github/v57/github/git_tags.go deleted file mode 100644 index 67321566..00000000 --- a/vendor/github.com/google/go-github/v57/github/git_tags.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Tag represents a tag object. -type Tag struct { - Tag *string `json:"tag,omitempty"` - SHA *string `json:"sha,omitempty"` - URL *string `json:"url,omitempty"` - Message *string `json:"message,omitempty"` - Tagger *CommitAuthor `json:"tagger,omitempty"` - Object *GitObject `json:"object,omitempty"` - Verification *SignatureVerification `json:"verification,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// createTagRequest represents the body of a CreateTag request. This is mostly -// identical to Tag with the exception that the object SHA and Type are -// top-level fields, rather than being nested inside a JSON object. -type createTagRequest struct { - Tag *string `json:"tag,omitempty"` - Message *string `json:"message,omitempty"` - Object *string `json:"object,omitempty"` - Type *string `json:"type,omitempty"` - Tagger *CommitAuthor `json:"tagger,omitempty"` -} - -// GetTag fetches a tag from a repo given a SHA. -// -// GitHub API docs: https://docs.github.com/rest/git/tags#get-a-tag -// -//meta:operation GET /repos/{owner}/{repo}/git/tags/{tag_sha} -func (s *GitService) GetTag(ctx context.Context, owner string, repo string, sha string) (*Tag, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/tags/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - tag := new(Tag) - resp, err := s.client.Do(ctx, req, tag) - if err != nil { - return nil, resp, err - } - - return tag, resp, nil -} - -// CreateTag creates a tag object. -// -// GitHub API docs: https://docs.github.com/rest/git/tags#create-a-tag-object -// -//meta:operation POST /repos/{owner}/{repo}/git/tags -func (s *GitService) CreateTag(ctx context.Context, owner string, repo string, tag *Tag) (*Tag, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/tags", owner, repo) - - // convert Tag into a createTagRequest - tagRequest := &createTagRequest{ - Tag: tag.Tag, - Message: tag.Message, - Tagger: tag.Tagger, - } - if tag.Object != nil { - tagRequest.Object = tag.Object.SHA - tagRequest.Type = tag.Object.Type - } - - req, err := s.client.NewRequest("POST", u, tagRequest) - if err != nil { - return nil, nil, err - } - - t := new(Tag) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/git_trees.go b/vendor/github.com/google/go-github/v57/github/git_trees.go deleted file mode 100644 index b8eed58e..00000000 --- a/vendor/github.com/google/go-github/v57/github/git_trees.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// Tree represents a GitHub tree. -type Tree struct { - SHA *string `json:"sha,omitempty"` - Entries []*TreeEntry `json:"tree,omitempty"` - - // Truncated is true if the number of items in the tree - // exceeded GitHub's maximum limit and the Entries were truncated - // in the response. Only populated for requests that fetch - // trees like Git.GetTree. - Truncated *bool `json:"truncated,omitempty"` -} - -func (t Tree) String() string { - return Stringify(t) -} - -// TreeEntry represents the contents of a tree structure. TreeEntry can -// represent either a blob, a commit (in the case of a submodule), or another -// tree. -type TreeEntry struct { - SHA *string `json:"sha,omitempty"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - Size *int `json:"size,omitempty"` - Content *string `json:"content,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (t TreeEntry) String() string { - return Stringify(t) -} - -// treeEntryWithFileDelete is used internally to delete a file whose -// Content and SHA fields are empty. It does this by removing the "omitempty" -// tag modifier on the SHA field which causes the GitHub API to receive -// {"sha":null} and thereby delete the file. -type treeEntryWithFileDelete struct { - SHA *string `json:"sha"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - Size *int `json:"size,omitempty"` - Content *string `json:"content,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (t *TreeEntry) MarshalJSON() ([]byte, error) { - if t.SHA == nil && t.Content == nil { - return json.Marshal(struct { - SHA *string `json:"sha"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - }{ - nil, - t.Path, - t.Mode, - t.Type, - }) - } - return json.Marshal(struct { - SHA *string `json:"sha,omitempty"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - Size *int `json:"size,omitempty"` - Content *string `json:"content,omitempty"` - URL *string `json:"url,omitempty"` - }{ - SHA: t.SHA, - Path: t.Path, - Mode: t.Mode, - Type: t.Type, - Size: t.Size, - Content: t.Content, - URL: t.URL, - }) -} - -// GetTree fetches the Tree object for a given sha hash from a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/trees#get-a-tree -// -//meta:operation GET /repos/{owner}/{repo}/git/trees/{tree_sha} -func (s *GitService) GetTree(ctx context.Context, owner string, repo string, sha string, recursive bool) (*Tree, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/trees/%v", owner, repo, sha) - if recursive { - u += "?recursive=1" - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Tree) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// createTree represents the body of a CreateTree request. -type createTree struct { - BaseTree string `json:"base_tree,omitempty"` - Entries []interface{} `json:"tree"` -} - -// CreateTree creates a new tree in a repository. If both a tree and a nested -// path modifying that tree are specified, it will overwrite the contents of -// that tree with the new path contents and write a new tree out. -// -// GitHub API docs: https://docs.github.com/rest/git/trees#create-a-tree -// -//meta:operation POST /repos/{owner}/{repo}/git/trees -func (s *GitService) CreateTree(ctx context.Context, owner string, repo string, baseTree string, entries []*TreeEntry) (*Tree, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/trees", owner, repo) - - newEntries := make([]interface{}, 0, len(entries)) - for _, entry := range entries { - if entry.Content == nil && entry.SHA == nil { - newEntries = append(newEntries, treeEntryWithFileDelete{ - Path: entry.Path, - Mode: entry.Mode, - Type: entry.Type, - Size: entry.Size, - URL: entry.URL, - }) - continue - } - newEntries = append(newEntries, entry) - } - - body := &createTree{ - BaseTree: baseTree, - Entries: newEntries, - } - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - t := new(Tree) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/github-accessors.go b/vendor/github.com/google/go-github/v57/github/github-accessors.go deleted file mode 100644 index e15eb102..00000000 --- a/vendor/github.com/google/go-github/v57/github/github-accessors.go +++ /dev/null @@ -1,25375 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by gen-accessors; DO NOT EDIT. -// Instead, please run "go generate ./..." as described here: -// https://github.com/google/go-github/blob/master/CONTRIBUTING.md#submitting-a-patch - -package github - -import ( - "encoding/json" - "time" -) - -// GetRetryAfter returns the RetryAfter field if it's non-nil, zero value otherwise. -func (a *AbuseRateLimitError) GetRetryAfter() time.Duration { - if a == nil || a.RetryAfter == nil { - return 0 - } - return *a.RetryAfter -} - -// GetGithubOwnedAllowed returns the GithubOwnedAllowed field if it's non-nil, zero value otherwise. -func (a *ActionsAllowed) GetGithubOwnedAllowed() bool { - if a == nil || a.GithubOwnedAllowed == nil { - return false - } - return *a.GithubOwnedAllowed -} - -// GetVerifiedAllowed returns the VerifiedAllowed field if it's non-nil, zero value otherwise. -func (a *ActionsAllowed) GetVerifiedAllowed() bool { - if a == nil || a.VerifiedAllowed == nil { - return false - } - return *a.VerifiedAllowed -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetKey() string { - if a == nil || a.Key == nil { - return "" - } - return *a.Key -} - -// GetLastAccessedAt returns the LastAccessedAt field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetLastAccessedAt() Timestamp { - if a == nil || a.LastAccessedAt == nil { - return Timestamp{} - } - return *a.LastAccessedAt -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetRef() string { - if a == nil || a.Ref == nil { - return "" - } - return *a.Ref -} - -// GetSizeInBytes returns the SizeInBytes field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetSizeInBytes() int64 { - if a == nil || a.SizeInBytes == nil { - return 0 - } - return *a.SizeInBytes -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetVersion() string { - if a == nil || a.Version == nil { - return "" - } - return *a.Version -} - -// GetDirection returns the Direction field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetDirection() string { - if a == nil || a.Direction == nil { - return "" - } - return *a.Direction -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetKey() string { - if a == nil || a.Key == nil { - return "" - } - return *a.Key -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetRef() string { - if a == nil || a.Ref == nil { - return "" - } - return *a.Ref -} - -// GetSort returns the Sort field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetSort() string { - if a == nil || a.Sort == nil { - return "" - } - return *a.Sort -} - -// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. -func (a *ActionsPermissions) GetAllowedActions() string { - if a == nil || a.AllowedActions == nil { - return "" - } - return *a.AllowedActions -} - -// GetEnabledRepositories returns the EnabledRepositories field if it's non-nil, zero value otherwise. -func (a *ActionsPermissions) GetEnabledRepositories() string { - if a == nil || a.EnabledRepositories == nil { - return "" - } - return *a.EnabledRepositories -} - -// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. -func (a *ActionsPermissions) GetSelectedActionsURL() string { - if a == nil || a.SelectedActionsURL == nil { - return "" - } - return *a.SelectedActionsURL -} - -// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsEnterprise) GetAllowedActions() string { - if a == nil || a.AllowedActions == nil { - return "" - } - return *a.AllowedActions -} - -// GetEnabledOrganizations returns the EnabledOrganizations field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsEnterprise) GetEnabledOrganizations() string { - if a == nil || a.EnabledOrganizations == nil { - return "" - } - return *a.EnabledOrganizations -} - -// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsEnterprise) GetSelectedActionsURL() string { - if a == nil || a.SelectedActionsURL == nil { - return "" - } - return *a.SelectedActionsURL -} - -// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsRepository) GetAllowedActions() string { - if a == nil || a.AllowedActions == nil { - return "" - } - return *a.AllowedActions -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsRepository) GetEnabled() bool { - if a == nil || a.Enabled == nil { - return false - } - return *a.Enabled -} - -// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsRepository) GetSelectedActionsURL() string { - if a == nil || a.SelectedActionsURL == nil { - return "" - } - return *a.SelectedActionsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetSelectedRepositoriesURL() string { - if a == nil || a.SelectedRepositoriesURL == nil { - return "" - } - return *a.SelectedRepositoriesURL -} - -// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. -func (a *ActionsVariable) GetSelectedRepositoryIDs() *SelectedRepoIDs { - if a == nil { - return nil - } - return a.SelectedRepositoryIDs -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetVisibility() string { - if a == nil || a.Visibility == nil { - return "" - } - return *a.Visibility -} - -// GetCountryCode returns the CountryCode field if it's non-nil, zero value otherwise. -func (a *ActorLocation) GetCountryCode() string { - if a == nil || a.CountryCode == nil { - return "" - } - return *a.CountryCode -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AdminEnforcedChanges) GetFrom() bool { - if a == nil || a.From == nil { - return false - } - return *a.From -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *AdminEnforcement) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetComments returns the Comments field. -func (a *AdminStats) GetComments() *CommentStats { - if a == nil { - return nil - } - return a.Comments -} - -// GetGists returns the Gists field. -func (a *AdminStats) GetGists() *GistStats { - if a == nil { - return nil - } - return a.Gists -} - -// GetHooks returns the Hooks field. -func (a *AdminStats) GetHooks() *HookStats { - if a == nil { - return nil - } - return a.Hooks -} - -// GetIssues returns the Issues field. -func (a *AdminStats) GetIssues() *IssueStats { - if a == nil { - return nil - } - return a.Issues -} - -// GetMilestones returns the Milestones field. -func (a *AdminStats) GetMilestones() *MilestoneStats { - if a == nil { - return nil - } - return a.Milestones -} - -// GetOrgs returns the Orgs field. -func (a *AdminStats) GetOrgs() *OrgStats { - if a == nil { - return nil - } - return a.Orgs -} - -// GetPages returns the Pages field. -func (a *AdminStats) GetPages() *PageStats { - if a == nil { - return nil - } - return a.Pages -} - -// GetPulls returns the Pulls field. -func (a *AdminStats) GetPulls() *PullStats { - if a == nil { - return nil - } - return a.Pulls -} - -// GetRepos returns the Repos field. -func (a *AdminStats) GetRepos() *RepoStats { - if a == nil { - return nil - } - return a.Repos -} - -// GetUsers returns the Users field. -func (a *AdminStats) GetUsers() *UserStats { - if a == nil { - return nil - } - return a.Users -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (a *AdvancedSecurity) GetStatus() string { - if a == nil || a.Status == nil { - return "" - } - return *a.Status -} - -// GetLastPushedDate returns the LastPushedDate field if it's non-nil, zero value otherwise. -func (a *AdvancedSecurityCommittersBreakdown) GetLastPushedDate() string { - if a == nil || a.LastPushedDate == nil { - return "" - } - return *a.LastPushedDate -} - -// GetUserLogin returns the UserLogin field if it's non-nil, zero value otherwise. -func (a *AdvancedSecurityCommittersBreakdown) GetUserLogin() string { - if a == nil || a.UserLogin == nil { - return "" - } - return *a.UserLogin -} - -// GetScore returns the Score field. -func (a *AdvisoryCVSS) GetScore() *float64 { - if a == nil { - return nil - } - return a.Score -} - -// GetVectorString returns the VectorString field if it's non-nil, zero value otherwise. -func (a *AdvisoryCVSS) GetVectorString() string { - if a == nil || a.VectorString == nil { - return "" - } - return *a.VectorString -} - -// GetCWEID returns the CWEID field if it's non-nil, zero value otherwise. -func (a *AdvisoryCWEs) GetCWEID() string { - if a == nil || a.CWEID == nil { - return "" - } - return *a.CWEID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AdvisoryCWEs) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (a *AdvisoryIdentifier) GetType() string { - if a == nil || a.Type == nil { - return "" - } - return *a.Type -} - -// GetValue returns the Value field if it's non-nil, zero value otherwise. -func (a *AdvisoryIdentifier) GetValue() string { - if a == nil || a.Value == nil { - return "" - } - return *a.Value -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *AdvisoryReference) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetFirstPatchedVersion returns the FirstPatchedVersion field. -func (a *AdvisoryVulnerability) GetFirstPatchedVersion() *FirstPatchedVersion { - if a == nil { - return nil - } - return a.FirstPatchedVersion -} - -// GetPackage returns the Package field. -func (a *AdvisoryVulnerability) GetPackage() *VulnerabilityPackage { - if a == nil { - return nil - } - return a.Package -} - -// GetPatchedVersions returns the PatchedVersions field if it's non-nil, zero value otherwise. -func (a *AdvisoryVulnerability) GetPatchedVersions() string { - if a == nil || a.PatchedVersions == nil { - return "" - } - return *a.PatchedVersions -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (a *AdvisoryVulnerability) GetSeverity() string { - if a == nil || a.Severity == nil { - return "" - } - return *a.Severity -} - -// GetVulnerableVersionRange returns the VulnerableVersionRange field if it's non-nil, zero value otherwise. -func (a *AdvisoryVulnerability) GetVulnerableVersionRange() string { - if a == nil || a.VulnerableVersionRange == nil { - return "" - } - return *a.VulnerableVersionRange -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetClosedAt() Timestamp { - if a == nil || a.ClosedAt == nil { - return Timestamp{} - } - return *a.ClosedAt -} - -// GetClosedBy returns the ClosedBy field. -func (a *Alert) GetClosedBy() *User { - if a == nil { - return nil - } - return a.ClosedBy -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetDismissedAt() Timestamp { - if a == nil || a.DismissedAt == nil { - return Timestamp{} - } - return *a.DismissedAt -} - -// GetDismissedBy returns the DismissedBy field. -func (a *Alert) GetDismissedBy() *User { - if a == nil { - return nil - } - return a.DismissedBy -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (a *Alert) GetDismissedComment() string { - if a == nil || a.DismissedComment == nil { - return "" - } - return *a.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (a *Alert) GetDismissedReason() string { - if a == nil || a.DismissedReason == nil { - return "" - } - return *a.DismissedReason -} - -// GetFixedAt returns the FixedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetFixedAt() Timestamp { - if a == nil || a.FixedAt == nil { - return Timestamp{} - } - return *a.FixedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (a *Alert) GetHTMLURL() string { - if a == nil || a.HTMLURL == nil { - return "" - } - return *a.HTMLURL -} - -// GetInstancesURL returns the InstancesURL field if it's non-nil, zero value otherwise. -func (a *Alert) GetInstancesURL() string { - if a == nil || a.InstancesURL == nil { - return "" - } - return *a.InstancesURL -} - -// GetMostRecentInstance returns the MostRecentInstance field. -func (a *Alert) GetMostRecentInstance() *MostRecentInstance { - if a == nil { - return nil - } - return a.MostRecentInstance -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (a *Alert) GetNumber() int { - if a == nil || a.Number == nil { - return 0 - } - return *a.Number -} - -// GetRepository returns the Repository field. -func (a *Alert) GetRepository() *Repository { - if a == nil { - return nil - } - return a.Repository -} - -// GetRule returns the Rule field. -func (a *Alert) GetRule() *Rule { - if a == nil { - return nil - } - return a.Rule -} - -// GetRuleDescription returns the RuleDescription field if it's non-nil, zero value otherwise. -func (a *Alert) GetRuleDescription() string { - if a == nil || a.RuleDescription == nil { - return "" - } - return *a.RuleDescription -} - -// GetRuleID returns the RuleID field if it's non-nil, zero value otherwise. -func (a *Alert) GetRuleID() string { - if a == nil || a.RuleID == nil { - return "" - } - return *a.RuleID -} - -// GetRuleSeverity returns the RuleSeverity field if it's non-nil, zero value otherwise. -func (a *Alert) GetRuleSeverity() string { - if a == nil || a.RuleSeverity == nil { - return "" - } - return *a.RuleSeverity -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (a *Alert) GetState() string { - if a == nil || a.State == nil { - return "" - } - return *a.State -} - -// GetTool returns the Tool field. -func (a *Alert) GetTool() *Tool { - if a == nil { - return nil - } - return a.Tool -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *Alert) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AllowDeletionsEnforcementLevelChanges) GetFrom() string { - if a == nil || a.From == nil { - return "" - } - return *a.From -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (a *AllowForkSyncing) GetEnabled() bool { - if a == nil || a.Enabled == nil { - return false - } - return *a.Enabled -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (a *AnalysesListOptions) GetRef() string { - if a == nil || a.Ref == nil { - return "" - } - return *a.Ref -} - -// GetSarifID returns the SarifID field if it's non-nil, zero value otherwise. -func (a *AnalysesListOptions) GetSarifID() string { - if a == nil || a.SarifID == nil { - return "" - } - return *a.SarifID -} - -// GetSSHKeyFingerprints returns the SSHKeyFingerprints map if it's non-nil, an empty map otherwise. -func (a *APIMeta) GetSSHKeyFingerprints() map[string]string { - if a == nil || a.SSHKeyFingerprints == nil { - return map[string]string{} - } - return a.SSHKeyFingerprints -} - -// GetVerifiablePasswordAuthentication returns the VerifiablePasswordAuthentication field if it's non-nil, zero value otherwise. -func (a *APIMeta) GetVerifiablePasswordAuthentication() bool { - if a == nil || a.VerifiablePasswordAuthentication == nil { - return false - } - return *a.VerifiablePasswordAuthentication -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *App) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (a *App) GetDescription() string { - if a == nil || a.Description == nil { - return "" - } - return *a.Description -} - -// GetExternalURL returns the ExternalURL field if it's non-nil, zero value otherwise. -func (a *App) GetExternalURL() string { - if a == nil || a.ExternalURL == nil { - return "" - } - return *a.ExternalURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (a *App) GetHTMLURL() string { - if a == nil || a.HTMLURL == nil { - return "" - } - return *a.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *App) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetInstallationsCount returns the InstallationsCount field if it's non-nil, zero value otherwise. -func (a *App) GetInstallationsCount() int { - if a == nil || a.InstallationsCount == nil { - return 0 - } - return *a.InstallationsCount -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *App) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (a *App) GetNodeID() string { - if a == nil || a.NodeID == nil { - return "" - } - return *a.NodeID -} - -// GetOwner returns the Owner field. -func (a *App) GetOwner() *User { - if a == nil { - return nil - } - return a.Owner -} - -// GetPermissions returns the Permissions field. -func (a *App) GetPermissions() *InstallationPermissions { - if a == nil { - return nil - } - return a.Permissions -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (a *App) GetSlug() string { - if a == nil || a.Slug == nil { - return "" - } - return *a.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *App) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetClientID() string { - if a == nil || a.ClientID == nil { - return "" - } - return *a.ClientID -} - -// GetClientSecret returns the ClientSecret field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetClientSecret() string { - if a == nil || a.ClientSecret == nil { - return "" - } - return *a.ClientSecret -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetDescription() string { - if a == nil || a.Description == nil { - return "" - } - return *a.Description -} - -// GetExternalURL returns the ExternalURL field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetExternalURL() string { - if a == nil || a.ExternalURL == nil { - return "" - } - return *a.ExternalURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetHTMLURL() string { - if a == nil || a.HTMLURL == nil { - return "" - } - return *a.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetNodeID() string { - if a == nil || a.NodeID == nil { - return "" - } - return *a.NodeID -} - -// GetOwner returns the Owner field. -func (a *AppConfig) GetOwner() *User { - if a == nil { - return nil - } - return a.Owner -} - -// GetPEM returns the PEM field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetPEM() string { - if a == nil || a.PEM == nil { - return "" - } - return *a.PEM -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetSlug() string { - if a == nil || a.Slug == nil { - return "" - } - return *a.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetWebhookSecret returns the WebhookSecret field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetWebhookSecret() string { - if a == nil || a.WebhookSecret == nil { - return "" - } - return *a.WebhookSecret -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *ArchivedAt) GetFrom() Timestamp { - if a == nil || a.From == nil { - return Timestamp{} - } - return *a.From -} - -// GetTo returns the To field if it's non-nil, zero value otherwise. -func (a *ArchivedAt) GetTo() Timestamp { - if a == nil || a.To == nil { - return Timestamp{} - } - return *a.To -} - -// GetArchiveDownloadURL returns the ArchiveDownloadURL field if it's non-nil, zero value otherwise. -func (a *Artifact) GetArchiveDownloadURL() string { - if a == nil || a.ArchiveDownloadURL == nil { - return "" - } - return *a.ArchiveDownloadURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *Artifact) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetExpired returns the Expired field if it's non-nil, zero value otherwise. -func (a *Artifact) GetExpired() bool { - if a == nil || a.Expired == nil { - return false - } - return *a.Expired -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (a *Artifact) GetExpiresAt() Timestamp { - if a == nil || a.ExpiresAt == nil { - return Timestamp{} - } - return *a.ExpiresAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Artifact) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *Artifact) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (a *Artifact) GetNodeID() string { - if a == nil || a.NodeID == nil { - return "" - } - return *a.NodeID -} - -// GetSizeInBytes returns the SizeInBytes field if it's non-nil, zero value otherwise. -func (a *Artifact) GetSizeInBytes() int64 { - if a == nil || a.SizeInBytes == nil { - return 0 - } - return *a.SizeInBytes -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *Artifact) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *Artifact) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetWorkflowRun returns the WorkflowRun field. -func (a *Artifact) GetWorkflowRun() *ArtifactWorkflowRun { - if a == nil { - return nil - } - return a.WorkflowRun -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (a *ArtifactList) GetTotalCount() int64 { - if a == nil || a.TotalCount == nil { - return 0 - } - return *a.TotalCount -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetHeadBranch() string { - if a == nil || a.HeadBranch == nil { - return "" - } - return *a.HeadBranch -} - -// GetHeadRepositoryID returns the HeadRepositoryID field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetHeadRepositoryID() int64 { - if a == nil || a.HeadRepositoryID == nil { - return 0 - } - return *a.HeadRepositoryID -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetHeadSHA() string { - if a == nil || a.HeadSHA == nil { - return "" - } - return *a.HeadSHA -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetRepositoryID() int64 { - if a == nil || a.RepositoryID == nil { - return 0 - } - return *a.RepositoryID -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (a *Attachment) GetBody() string { - if a == nil || a.Body == nil { - return "" - } - return *a.Body -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Attachment) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (a *Attachment) GetTitle() string { - if a == nil || a.Title == nil { - return "" - } - return *a.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetAction() string { - if a == nil || a.Action == nil { - return "" - } - return *a.Action -} - -// GetActive returns the Active field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActive() bool { - if a == nil || a.Active == nil { - return false - } - return *a.Active -} - -// GetActiveWas returns the ActiveWas field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActiveWas() bool { - if a == nil || a.ActiveWas == nil { - return false - } - return *a.ActiveWas -} - -// GetActor returns the Actor field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActor() string { - if a == nil || a.Actor == nil { - return "" - } - return *a.Actor -} - -// GetActorIP returns the ActorIP field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActorIP() string { - if a == nil || a.ActorIP == nil { - return "" - } - return *a.ActorIP -} - -// GetActorLocation returns the ActorLocation field. -func (a *AuditEntry) GetActorLocation() *ActorLocation { - if a == nil { - return nil - } - return a.ActorLocation -} - -// GetBlockedUser returns the BlockedUser field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetBlockedUser() string { - if a == nil || a.BlockedUser == nil { - return "" - } - return *a.BlockedUser -} - -// GetBusiness returns the Business field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetBusiness() string { - if a == nil || a.Business == nil { - return "" - } - return *a.Business -} - -// GetCancelledAt returns the CancelledAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetCancelledAt() Timestamp { - if a == nil || a.CancelledAt == nil { - return Timestamp{} - } - return *a.CancelledAt -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetCompletedAt() Timestamp { - if a == nil || a.CompletedAt == nil { - return Timestamp{} - } - return *a.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetConclusion() string { - if a == nil || a.Conclusion == nil { - return "" - } - return *a.Conclusion -} - -// GetConfig returns the Config field. -func (a *AuditEntry) GetConfig() *HookConfig { - if a == nil { - return nil - } - return a.Config -} - -// GetConfigWas returns the ConfigWas field. -func (a *AuditEntry) GetConfigWas() *HookConfig { - if a == nil { - return nil - } - return a.ConfigWas -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetContentType() string { - if a == nil || a.ContentType == nil { - return "" - } - return *a.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetData returns the Data field. -func (a *AuditEntry) GetData() *AuditEntryData { - if a == nil { - return nil - } - return a.Data -} - -// GetDeployKeyFingerprint returns the DeployKeyFingerprint field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetDeployKeyFingerprint() string { - if a == nil || a.DeployKeyFingerprint == nil { - return "" - } - return *a.DeployKeyFingerprint -} - -// GetDocumentID returns the DocumentID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetDocumentID() string { - if a == nil || a.DocumentID == nil { - return "" - } - return *a.DocumentID -} - -// GetEmoji returns the Emoji field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetEmoji() string { - if a == nil || a.Emoji == nil { - return "" - } - return *a.Emoji -} - -// GetEnvironmentName returns the EnvironmentName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetEnvironmentName() string { - if a == nil || a.EnvironmentName == nil { - return "" - } - return *a.EnvironmentName -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetEvent() string { - if a == nil || a.Event == nil { - return "" - } - return *a.Event -} - -// GetExplanation returns the Explanation field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetExplanation() string { - if a == nil || a.Explanation == nil { - return "" - } - return *a.Explanation -} - -// GetExternalIdentityNameID returns the ExternalIdentityNameID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetExternalIdentityNameID() string { - if a == nil || a.ExternalIdentityNameID == nil { - return "" - } - return *a.ExternalIdentityNameID -} - -// GetExternalIdentityUsername returns the ExternalIdentityUsername field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetExternalIdentityUsername() string { - if a == nil || a.ExternalIdentityUsername == nil { - return "" - } - return *a.ExternalIdentityUsername -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHashedToken() string { - if a == nil || a.HashedToken == nil { - return "" - } - return *a.HashedToken -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHeadBranch() string { - if a == nil || a.HeadBranch == nil { - return "" - } - return *a.HeadBranch -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHeadSHA() string { - if a == nil || a.HeadSHA == nil { - return "" - } - return *a.HeadSHA -} - -// GetHookID returns the HookID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHookID() int64 { - if a == nil || a.HookID == nil { - return 0 - } - return *a.HookID -} - -// GetIsHostedRunner returns the IsHostedRunner field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetIsHostedRunner() bool { - if a == nil || a.IsHostedRunner == nil { - return false - } - return *a.IsHostedRunner -} - -// GetJobName returns the JobName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetJobName() string { - if a == nil || a.JobName == nil { - return "" - } - return *a.JobName -} - -// GetJobWorkflowRef returns the JobWorkflowRef field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetJobWorkflowRef() string { - if a == nil || a.JobWorkflowRef == nil { - return "" - } - return *a.JobWorkflowRef -} - -// GetLimitedAvailability returns the LimitedAvailability field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetLimitedAvailability() bool { - if a == nil || a.LimitedAvailability == nil { - return false - } - return *a.LimitedAvailability -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetMessage() string { - if a == nil || a.Message == nil { - return "" - } - return *a.Message -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetOAuthApplicationID returns the OAuthApplicationID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOAuthApplicationID() int64 { - if a == nil || a.OAuthApplicationID == nil { - return 0 - } - return *a.OAuthApplicationID -} - -// GetOldPermission returns the OldPermission field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOldPermission() string { - if a == nil || a.OldPermission == nil { - return "" - } - return *a.OldPermission -} - -// GetOldUser returns the OldUser field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOldUser() string { - if a == nil || a.OldUser == nil { - return "" - } - return *a.OldUser -} - -// GetOpenSSHPublicKey returns the OpenSSHPublicKey field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOpenSSHPublicKey() string { - if a == nil || a.OpenSSHPublicKey == nil { - return "" - } - return *a.OpenSSHPublicKey -} - -// GetOperationType returns the OperationType field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOperationType() string { - if a == nil || a.OperationType == nil { - return "" - } - return *a.OperationType -} - -// GetOrg returns the Org field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOrg() string { - if a == nil || a.Org == nil { - return "" - } - return *a.Org -} - -// GetOrgID returns the OrgID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOrgID() int64 { - if a == nil || a.OrgID == nil { - return 0 - } - return *a.OrgID -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPermission() string { - if a == nil || a.Permission == nil { - return "" - } - return *a.Permission -} - -// GetPreviousVisibility returns the PreviousVisibility field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPreviousVisibility() string { - if a == nil || a.PreviousVisibility == nil { - return "" - } - return *a.PreviousVisibility -} - -// GetProgrammaticAccessType returns the ProgrammaticAccessType field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetProgrammaticAccessType() string { - if a == nil || a.ProgrammaticAccessType == nil { - return "" - } - return *a.ProgrammaticAccessType -} - -// GetPullRequestID returns the PullRequestID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPullRequestID() int64 { - if a == nil || a.PullRequestID == nil { - return 0 - } - return *a.PullRequestID -} - -// GetPullRequestTitle returns the PullRequestTitle field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPullRequestTitle() string { - if a == nil || a.PullRequestTitle == nil { - return "" - } - return *a.PullRequestTitle -} - -// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPullRequestURL() string { - if a == nil || a.PullRequestURL == nil { - return "" - } - return *a.PullRequestURL -} - -// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetReadOnly() string { - if a == nil || a.ReadOnly == nil { - return "" - } - return *a.ReadOnly -} - -// GetRepo returns the Repo field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRepo() string { - if a == nil || a.Repo == nil { - return "" - } - return *a.Repo -} - -// GetRepository returns the Repository field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRepository() string { - if a == nil || a.Repository == nil { - return "" - } - return *a.Repository -} - -// GetRepositoryPublic returns the RepositoryPublic field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRepositoryPublic() bool { - if a == nil || a.RepositoryPublic == nil { - return false - } - return *a.RepositoryPublic -} - -// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunAttempt() int64 { - if a == nil || a.RunAttempt == nil { - return 0 - } - return *a.RunAttempt -} - -// GetRunnerGroupID returns the RunnerGroupID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerGroupID() int64 { - if a == nil || a.RunnerGroupID == nil { - return 0 - } - return *a.RunnerGroupID -} - -// GetRunnerGroupName returns the RunnerGroupName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerGroupName() string { - if a == nil || a.RunnerGroupName == nil { - return "" - } - return *a.RunnerGroupName -} - -// GetRunnerID returns the RunnerID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerID() int64 { - if a == nil || a.RunnerID == nil { - return 0 - } - return *a.RunnerID -} - -// GetRunnerName returns the RunnerName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerName() string { - if a == nil || a.RunnerName == nil { - return "" - } - return *a.RunnerName -} - -// GetRunNumber returns the RunNumber field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunNumber() int64 { - if a == nil || a.RunNumber == nil { - return 0 - } - return *a.RunNumber -} - -// GetSourceVersion returns the SourceVersion field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetSourceVersion() string { - if a == nil || a.SourceVersion == nil { - return "" - } - return *a.SourceVersion -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetStartedAt() Timestamp { - if a == nil || a.StartedAt == nil { - return Timestamp{} - } - return *a.StartedAt -} - -// GetTargetLogin returns the TargetLogin field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTargetLogin() string { - if a == nil || a.TargetLogin == nil { - return "" - } - return *a.TargetLogin -} - -// GetTargetVersion returns the TargetVersion field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTargetVersion() string { - if a == nil || a.TargetVersion == nil { - return "" - } - return *a.TargetVersion -} - -// GetTeam returns the Team field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTeam() string { - if a == nil || a.Team == nil { - return "" - } - return *a.Team -} - -// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTimestamp() Timestamp { - if a == nil || a.Timestamp == nil { - return Timestamp{} - } - return *a.Timestamp -} - -// GetTokenID returns the TokenID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTokenID() int64 { - if a == nil || a.TokenID == nil { - return 0 - } - return *a.TokenID -} - -// GetTokenScopes returns the TokenScopes field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTokenScopes() string { - if a == nil || a.TokenScopes == nil { - return "" - } - return *a.TokenScopes -} - -// GetTopic returns the Topic field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTopic() string { - if a == nil || a.Topic == nil { - return "" - } - return *a.Topic -} - -// GetTransportProtocol returns the TransportProtocol field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTransportProtocol() int { - if a == nil || a.TransportProtocol == nil { - return 0 - } - return *a.TransportProtocol -} - -// GetTransportProtocolName returns the TransportProtocolName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTransportProtocolName() string { - if a == nil || a.TransportProtocolName == nil { - return "" - } - return *a.TransportProtocolName -} - -// GetTriggerID returns the TriggerID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTriggerID() int64 { - if a == nil || a.TriggerID == nil { - return 0 - } - return *a.TriggerID -} - -// GetUser returns the User field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetUser() string { - if a == nil || a.User == nil { - return "" - } - return *a.User -} - -// GetUserAgent returns the UserAgent field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetUserAgent() string { - if a == nil || a.UserAgent == nil { - return "" - } - return *a.UserAgent -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetVisibility() string { - if a == nil || a.Visibility == nil { - return "" - } - return *a.Visibility -} - -// GetWorkflowID returns the WorkflowID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetWorkflowID() int64 { - if a == nil || a.WorkflowID == nil { - return 0 - } - return *a.WorkflowID -} - -// GetWorkflowRunID returns the WorkflowRunID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetWorkflowRunID() int64 { - if a == nil || a.WorkflowRunID == nil { - return 0 - } - return *a.WorkflowRunID -} - -// GetOldLogin returns the OldLogin field if it's non-nil, zero value otherwise. -func (a *AuditEntryData) GetOldLogin() string { - if a == nil || a.OldLogin == nil { - return "" - } - return *a.OldLogin -} - -// GetOldName returns the OldName field if it's non-nil, zero value otherwise. -func (a *AuditEntryData) GetOldName() string { - if a == nil || a.OldName == nil { - return "" - } - return *a.OldName -} - -// GetApp returns the App field. -func (a *Authorization) GetApp() *AuthorizationApp { - if a == nil { - return nil - } - return a.App -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *Authorization) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *Authorization) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. -func (a *Authorization) GetHashedToken() string { - if a == nil || a.HashedToken == nil { - return "" - } - return *a.HashedToken -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Authorization) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (a *Authorization) GetNote() string { - if a == nil || a.Note == nil { - return "" - } - return *a.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (a *Authorization) GetNoteURL() string { - if a == nil || a.NoteURL == nil { - return "" - } - return *a.NoteURL -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (a *Authorization) GetToken() string { - if a == nil || a.Token == nil { - return "" - } - return *a.Token -} - -// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. -func (a *Authorization) GetTokenLastEight() string { - if a == nil || a.TokenLastEight == nil { - return "" - } - return *a.TokenLastEight -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *Authorization) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *Authorization) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetUser returns the User field. -func (a *Authorization) GetUser() *User { - if a == nil { - return nil - } - return a.User -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (a *AuthorizationApp) GetClientID() string { - if a == nil || a.ClientID == nil { - return "" - } - return *a.ClientID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AuthorizationApp) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *AuthorizationApp) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetClientID() string { - if a == nil || a.ClientID == nil { - return "" - } - return *a.ClientID -} - -// GetClientSecret returns the ClientSecret field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetClientSecret() string { - if a == nil || a.ClientSecret == nil { - return "" - } - return *a.ClientSecret -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetNote() string { - if a == nil || a.Note == nil { - return "" - } - return *a.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetNoteURL() string { - if a == nil || a.NoteURL == nil { - return "" - } - return *a.NoteURL -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *AuthorizationUpdateRequest) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (a *AuthorizationUpdateRequest) GetNote() string { - if a == nil || a.Note == nil { - return "" - } - return *a.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (a *AuthorizationUpdateRequest) GetNoteURL() string { - if a == nil || a.NoteURL == nil { - return "" - } - return *a.NoteURL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AuthorizedActorsOnly) GetFrom() bool { - if a == nil || a.From == nil { - return false - } - return *a.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AuthorizedDismissalActorsOnlyChanges) GetFrom() bool { - if a == nil || a.From == nil { - return false - } - return *a.From -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Autolink) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetIsAlphanumeric returns the IsAlphanumeric field if it's non-nil, zero value otherwise. -func (a *Autolink) GetIsAlphanumeric() bool { - if a == nil || a.IsAlphanumeric == nil { - return false - } - return *a.IsAlphanumeric -} - -// GetKeyPrefix returns the KeyPrefix field if it's non-nil, zero value otherwise. -func (a *Autolink) GetKeyPrefix() string { - if a == nil || a.KeyPrefix == nil { - return "" - } - return *a.KeyPrefix -} - -// GetURLTemplate returns the URLTemplate field if it's non-nil, zero value otherwise. -func (a *Autolink) GetURLTemplate() string { - if a == nil || a.URLTemplate == nil { - return "" - } - return *a.URLTemplate -} - -// GetIsAlphanumeric returns the IsAlphanumeric field if it's non-nil, zero value otherwise. -func (a *AutolinkOptions) GetIsAlphanumeric() bool { - if a == nil || a.IsAlphanumeric == nil { - return false - } - return *a.IsAlphanumeric -} - -// GetKeyPrefix returns the KeyPrefix field if it's non-nil, zero value otherwise. -func (a *AutolinkOptions) GetKeyPrefix() string { - if a == nil || a.KeyPrefix == nil { - return "" - } - return *a.KeyPrefix -} - -// GetURLTemplate returns the URLTemplate field if it's non-nil, zero value otherwise. -func (a *AutolinkOptions) GetURLTemplate() string { - if a == nil || a.URLTemplate == nil { - return "" - } - return *a.URLTemplate -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (a *AutomatedSecurityFixes) GetEnabled() bool { - if a == nil || a.Enabled == nil { - return false - } - return *a.Enabled -} - -// GetPaused returns the Paused field if it's non-nil, zero value otherwise. -func (a *AutomatedSecurityFixes) GetPaused() bool { - if a == nil || a.Paused == nil { - return false - } - return *a.Paused -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (a *AutoTriggerCheck) GetAppID() int64 { - if a == nil || a.AppID == nil { - return 0 - } - return *a.AppID -} - -// GetSetting returns the Setting field if it's non-nil, zero value otherwise. -func (a *AutoTriggerCheck) GetSetting() bool { - if a == nil || a.Setting == nil { - return false - } - return *a.Setting -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (b *Blob) GetContent() string { - if b == nil || b.Content == nil { - return "" - } - return *b.Content -} - -// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. -func (b *Blob) GetEncoding() string { - if b == nil || b.Encoding == nil { - return "" - } - return *b.Encoding -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (b *Blob) GetNodeID() string { - if b == nil || b.NodeID == nil { - return "" - } - return *b.NodeID -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (b *Blob) GetSHA() string { - if b == nil || b.SHA == nil { - return "" - } - return *b.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (b *Blob) GetSize() int { - if b == nil || b.Size == nil { - return 0 - } - return *b.Size -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (b *Blob) GetURL() string { - if b == nil || b.URL == nil { - return "" - } - return *b.URL -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (b *BlockCreations) GetEnabled() bool { - if b == nil || b.Enabled == nil { - return false - } - return *b.Enabled -} - -// GetCommit returns the Commit field. -func (b *Branch) GetCommit() *RepositoryCommit { - if b == nil { - return nil - } - return b.Commit -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (b *Branch) GetName() string { - if b == nil || b.Name == nil { - return "" - } - return *b.Name -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (b *Branch) GetProtected() bool { - if b == nil || b.Protected == nil { - return false - } - return *b.Protected -} - -// GetCommit returns the Commit field. -func (b *BranchCommit) GetCommit() *Commit { - if b == nil { - return nil - } - return b.Commit -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (b *BranchCommit) GetName() string { - if b == nil || b.Name == nil { - return "" - } - return *b.Name -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (b *BranchCommit) GetProtected() bool { - if b == nil || b.Protected == nil { - return false - } - return *b.Protected -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (b *BranchListOptions) GetProtected() bool { - if b == nil || b.Protected == nil { - return false - } - return *b.Protected -} - -// GetCustomBranchPolicies returns the CustomBranchPolicies field if it's non-nil, zero value otherwise. -func (b *BranchPolicy) GetCustomBranchPolicies() bool { - if b == nil || b.CustomBranchPolicies == nil { - return false - } - return *b.CustomBranchPolicies -} - -// GetProtectedBranches returns the ProtectedBranches field if it's non-nil, zero value otherwise. -func (b *BranchPolicy) GetProtectedBranches() bool { - if b == nil || b.ProtectedBranches == nil { - return false - } - return *b.ProtectedBranches -} - -// GetAdminEnforced returns the AdminEnforced field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAdminEnforced() bool { - if b == nil || b.AdminEnforced == nil { - return false - } - return *b.AdminEnforced -} - -// GetAllowDeletionsEnforcementLevel returns the AllowDeletionsEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAllowDeletionsEnforcementLevel() string { - if b == nil || b.AllowDeletionsEnforcementLevel == nil { - return "" - } - return *b.AllowDeletionsEnforcementLevel -} - -// GetAllowForcePushesEnforcementLevel returns the AllowForcePushesEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAllowForcePushesEnforcementLevel() string { - if b == nil || b.AllowForcePushesEnforcementLevel == nil { - return "" - } - return *b.AllowForcePushesEnforcementLevel -} - -// GetAuthorizedActorsOnly returns the AuthorizedActorsOnly field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAuthorizedActorsOnly() bool { - if b == nil || b.AuthorizedActorsOnly == nil { - return false - } - return *b.AuthorizedActorsOnly -} - -// GetAuthorizedDismissalActorsOnly returns the AuthorizedDismissalActorsOnly field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAuthorizedDismissalActorsOnly() bool { - if b == nil || b.AuthorizedDismissalActorsOnly == nil { - return false - } - return *b.AuthorizedDismissalActorsOnly -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetCreatedAt() Timestamp { - if b == nil || b.CreatedAt == nil { - return Timestamp{} - } - return *b.CreatedAt -} - -// GetDismissStaleReviewsOnPush returns the DismissStaleReviewsOnPush field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetDismissStaleReviewsOnPush() bool { - if b == nil || b.DismissStaleReviewsOnPush == nil { - return false - } - return *b.DismissStaleReviewsOnPush -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetID() int64 { - if b == nil || b.ID == nil { - return 0 - } - return *b.ID -} - -// GetIgnoreApprovalsFromContributors returns the IgnoreApprovalsFromContributors field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetIgnoreApprovalsFromContributors() bool { - if b == nil || b.IgnoreApprovalsFromContributors == nil { - return false - } - return *b.IgnoreApprovalsFromContributors -} - -// GetLinearHistoryRequirementEnforcementLevel returns the LinearHistoryRequirementEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetLinearHistoryRequirementEnforcementLevel() string { - if b == nil || b.LinearHistoryRequirementEnforcementLevel == nil { - return "" - } - return *b.LinearHistoryRequirementEnforcementLevel -} - -// GetMergeQueueEnforcementLevel returns the MergeQueueEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetMergeQueueEnforcementLevel() string { - if b == nil || b.MergeQueueEnforcementLevel == nil { - return "" - } - return *b.MergeQueueEnforcementLevel -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetName() string { - if b == nil || b.Name == nil { - return "" - } - return *b.Name -} - -// GetPullRequestReviewsEnforcementLevel returns the PullRequestReviewsEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetPullRequestReviewsEnforcementLevel() string { - if b == nil || b.PullRequestReviewsEnforcementLevel == nil { - return "" - } - return *b.PullRequestReviewsEnforcementLevel -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRepositoryID() int64 { - if b == nil || b.RepositoryID == nil { - return 0 - } - return *b.RepositoryID -} - -// GetRequireCodeOwnerReview returns the RequireCodeOwnerReview field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequireCodeOwnerReview() bool { - if b == nil || b.RequireCodeOwnerReview == nil { - return false - } - return *b.RequireCodeOwnerReview -} - -// GetRequiredApprovingReviewCount returns the RequiredApprovingReviewCount field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredApprovingReviewCount() int { - if b == nil || b.RequiredApprovingReviewCount == nil { - return 0 - } - return *b.RequiredApprovingReviewCount -} - -// GetRequiredConversationResolutionLevel returns the RequiredConversationResolutionLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredConversationResolutionLevel() string { - if b == nil || b.RequiredConversationResolutionLevel == nil { - return "" - } - return *b.RequiredConversationResolutionLevel -} - -// GetRequiredDeploymentsEnforcementLevel returns the RequiredDeploymentsEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredDeploymentsEnforcementLevel() string { - if b == nil || b.RequiredDeploymentsEnforcementLevel == nil { - return "" - } - return *b.RequiredDeploymentsEnforcementLevel -} - -// GetRequiredStatusChecksEnforcementLevel returns the RequiredStatusChecksEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredStatusChecksEnforcementLevel() string { - if b == nil || b.RequiredStatusChecksEnforcementLevel == nil { - return "" - } - return *b.RequiredStatusChecksEnforcementLevel -} - -// GetSignatureRequirementEnforcementLevel returns the SignatureRequirementEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetSignatureRequirementEnforcementLevel() string { - if b == nil || b.SignatureRequirementEnforcementLevel == nil { - return "" - } - return *b.SignatureRequirementEnforcementLevel -} - -// GetStrictRequiredStatusChecksPolicy returns the StrictRequiredStatusChecksPolicy field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetStrictRequiredStatusChecksPolicy() bool { - if b == nil || b.StrictRequiredStatusChecksPolicy == nil { - return false - } - return *b.StrictRequiredStatusChecksPolicy -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetUpdatedAt() Timestamp { - if b == nil || b.UpdatedAt == nil { - return Timestamp{} - } - return *b.UpdatedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRuleEvent) GetAction() string { - if b == nil || b.Action == nil { - return "" - } - return *b.Action -} - -// GetChanges returns the Changes field. -func (b *BranchProtectionRuleEvent) GetChanges() *ProtectionChanges { - if b == nil { - return nil - } - return b.Changes -} - -// GetInstallation returns the Installation field. -func (b *BranchProtectionRuleEvent) GetInstallation() *Installation { - if b == nil { - return nil - } - return b.Installation -} - -// GetOrg returns the Org field. -func (b *BranchProtectionRuleEvent) GetOrg() *Organization { - if b == nil { - return nil - } - return b.Org -} - -// GetRepo returns the Repo field. -func (b *BranchProtectionRuleEvent) GetRepo() *Repository { - if b == nil { - return nil - } - return b.Repo -} - -// GetRule returns the Rule field. -func (b *BranchProtectionRuleEvent) GetRule() *BranchProtectionRule { - if b == nil { - return nil - } - return b.Rule -} - -// GetSender returns the Sender field. -func (b *BranchProtectionRuleEvent) GetSender() *User { - if b == nil { - return nil - } - return b.Sender -} - -// GetActorID returns the ActorID field if it's non-nil, zero value otherwise. -func (b *BypassActor) GetActorID() int64 { - if b == nil || b.ActorID == nil { - return 0 - } - return *b.ActorID -} - -// GetActorType returns the ActorType field if it's non-nil, zero value otherwise. -func (b *BypassActor) GetActorType() string { - if b == nil || b.ActorType == nil { - return "" - } - return *b.ActorType -} - -// GetBypassMode returns the BypassMode field if it's non-nil, zero value otherwise. -func (b *BypassActor) GetBypassMode() string { - if b == nil || b.BypassMode == nil { - return "" - } - return *b.BypassMode -} - -// GetApp returns the App field. -func (c *CheckRun) GetApp() *App { - if c == nil { - return nil - } - return c.App -} - -// GetCheckSuite returns the CheckSuite field. -func (c *CheckRun) GetCheckSuite() *CheckSuite { - if c == nil { - return nil - } - return c.CheckSuite -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetCompletedAt() Timestamp { - if c == nil || c.CompletedAt == nil { - return Timestamp{} - } - return *c.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetConclusion() string { - if c == nil || c.Conclusion == nil { - return "" - } - return *c.Conclusion -} - -// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetDetailsURL() string { - if c == nil || c.DetailsURL == nil { - return "" - } - return *c.DetailsURL -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetExternalID() string { - if c == nil || c.ExternalID == nil { - return "" - } - return *c.ExternalID -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetHeadSHA() string { - if c == nil || c.HeadSHA == nil { - return "" - } - return *c.HeadSHA -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetOutput returns the Output field. -func (c *CheckRun) GetOutput() *CheckRunOutput { - if c == nil { - return nil - } - return c.Output -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetStartedAt() Timestamp { - if c == nil || c.StartedAt == nil { - return Timestamp{} - } - return *c.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAnnotationLevel returns the AnnotationLevel field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetAnnotationLevel() string { - if c == nil || c.AnnotationLevel == nil { - return "" - } - return *c.AnnotationLevel -} - -// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetEndColumn() int { - if c == nil || c.EndColumn == nil { - return 0 - } - return *c.EndColumn -} - -// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetEndLine() int { - if c == nil || c.EndLine == nil { - return 0 - } - return *c.EndLine -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetMessage() string { - if c == nil || c.Message == nil { - return "" - } - return *c.Message -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetPath() string { - if c == nil || c.Path == nil { - return "" - } - return *c.Path -} - -// GetRawDetails returns the RawDetails field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetRawDetails() string { - if c == nil || c.RawDetails == nil { - return "" - } - return *c.RawDetails -} - -// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetStartColumn() int { - if c == nil || c.StartColumn == nil { - return 0 - } - return *c.StartColumn -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetStartLine() int { - if c == nil || c.StartLine == nil { - return 0 - } - return *c.StartLine -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetTitle() string { - if c == nil || c.Title == nil { - return "" - } - return *c.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CheckRunEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetCheckRun returns the CheckRun field. -func (c *CheckRunEvent) GetCheckRun() *CheckRun { - if c == nil { - return nil - } - return c.CheckRun -} - -// GetInstallation returns the Installation field. -func (c *CheckRunEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CheckRunEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRepo returns the Repo field. -func (c *CheckRunEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetRequestedAction returns the RequestedAction field. -func (c *CheckRunEvent) GetRequestedAction() *RequestedAction { - if c == nil { - return nil - } - return c.RequestedAction -} - -// GetSender returns the Sender field. -func (c *CheckRunEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetAlt returns the Alt field if it's non-nil, zero value otherwise. -func (c *CheckRunImage) GetAlt() string { - if c == nil || c.Alt == nil { - return "" - } - return *c.Alt -} - -// GetCaption returns the Caption field if it's non-nil, zero value otherwise. -func (c *CheckRunImage) GetCaption() string { - if c == nil || c.Caption == nil { - return "" - } - return *c.Caption -} - -// GetImageURL returns the ImageURL field if it's non-nil, zero value otherwise. -func (c *CheckRunImage) GetImageURL() string { - if c == nil || c.ImageURL == nil { - return "" - } - return *c.ImageURL -} - -// GetAnnotationsCount returns the AnnotationsCount field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetAnnotationsCount() int { - if c == nil || c.AnnotationsCount == nil { - return 0 - } - return *c.AnnotationsCount -} - -// GetAnnotationsURL returns the AnnotationsURL field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetAnnotationsURL() string { - if c == nil || c.AnnotationsURL == nil { - return "" - } - return *c.AnnotationsURL -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetSummary() string { - if c == nil || c.Summary == nil { - return "" - } - return *c.Summary -} - -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetText() string { - if c == nil || c.Text == nil { - return "" - } - return *c.Text -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetTitle() string { - if c == nil || c.Title == nil { - return "" - } - return *c.Title -} - -// GetAfterSHA returns the AfterSHA field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetAfterSHA() string { - if c == nil || c.AfterSHA == nil { - return "" - } - return *c.AfterSHA -} - -// GetApp returns the App field. -func (c *CheckSuite) GetApp() *App { - if c == nil { - return nil - } - return c.App -} - -// GetBeforeSHA returns the BeforeSHA field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetBeforeSHA() string { - if c == nil || c.BeforeSHA == nil { - return "" - } - return *c.BeforeSHA -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetConclusion() string { - if c == nil || c.Conclusion == nil { - return "" - } - return *c.Conclusion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetHeadBranch() string { - if c == nil || c.HeadBranch == nil { - return "" - } - return *c.HeadBranch -} - -// GetHeadCommit returns the HeadCommit field. -func (c *CheckSuite) GetHeadCommit() *Commit { - if c == nil { - return nil - } - return c.HeadCommit -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetHeadSHA() string { - if c == nil || c.HeadSHA == nil { - return "" - } - return *c.HeadSHA -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetRepository returns the Repository field. -func (c *CheckSuite) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CheckSuiteEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetCheckSuite returns the CheckSuite field. -func (c *CheckSuiteEvent) GetCheckSuite() *CheckSuite { - if c == nil { - return nil - } - return c.CheckSuite -} - -// GetInstallation returns the Installation field. -func (c *CheckSuiteEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CheckSuiteEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRepo returns the Repo field. -func (c *CheckSuiteEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CheckSuiteEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetPreferences returns the Preferences field. -func (c *CheckSuitePreferenceResults) GetPreferences() *PreferenceList { - if c == nil { - return nil - } - return c.Preferences -} - -// GetRepository returns the Repository field. -func (c *CheckSuitePreferenceResults) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetBody() string { - if c == nil || c.Body == nil { - return "" - } - return *c.Body -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetKey() string { - if c == nil || c.Key == nil { - return "" - } - return *c.Key -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetSuggestion returns the Suggestion field if it's non-nil, zero value otherwise. -func (c *CodeownersError) GetSuggestion() string { - if c == nil || c.Suggestion == nil { - return "" - } - return *c.Suggestion -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetContentType() string { - if c == nil || c.ContentType == nil { - return "" - } - return *c.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetLanguage() string { - if c == nil || c.Language == nil { - return "" - } - return *c.Language -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetSize() int64 { - if c == nil || c.Size == nil { - return 0 - } - return *c.Size -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetUploader returns the Uploader field. -func (c *CodeQLDatabase) GetUploader() *User { - if c == nil { - return nil - } - return c.Uploader -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetPath() string { - if c == nil || c.Path == nil { - return "" - } - return *c.Path -} - -// GetRepository returns the Repository field. -func (c *CodeResult) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetAlert returns the Alert field. -func (c *CodeScanningAlertEvent) GetAlert() *Alert { - if c == nil { - return nil - } - return c.Alert -} - -// GetCommitOID returns the CommitOID field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertEvent) GetCommitOID() string { - if c == nil || c.CommitOID == nil { - return "" - } - return *c.CommitOID -} - -// GetInstallation returns the Installation field. -func (c *CodeScanningAlertEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CodeScanningAlertEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertEvent) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetRepo returns the Repo field. -func (c *CodeScanningAlertEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CodeScanningAlertEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertState) GetDismissedComment() string { - if c == nil || c.DismissedComment == nil { - return "" - } - return *c.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertState) GetDismissedReason() string { - if c == nil || c.DismissedReason == nil { - return "" - } - return *c.DismissedReason -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (c *CodeSearchResult) GetIncompleteResults() bool { - if c == nil || c.IncompleteResults == nil { - return false - } - return *c.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CodeSearchResult) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetBillableOwner returns the BillableOwner field. -func (c *Codespace) GetBillableOwner() *User { - if c == nil { - return nil - } - return c.BillableOwner -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetDevcontainerPath returns the DevcontainerPath field if it's non-nil, zero value otherwise. -func (c *Codespace) GetDevcontainerPath() string { - if c == nil || c.DevcontainerPath == nil { - return "" - } - return *c.DevcontainerPath -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (c *Codespace) GetDisplayName() string { - if c == nil || c.DisplayName == nil { - return "" - } - return *c.DisplayName -} - -// GetEnvironmentID returns the EnvironmentID field if it's non-nil, zero value otherwise. -func (c *Codespace) GetEnvironmentID() string { - if c == nil || c.EnvironmentID == nil { - return "" - } - return *c.EnvironmentID -} - -// GetGitStatus returns the GitStatus field. -func (c *Codespace) GetGitStatus() *CodespacesGitStatus { - if c == nil { - return nil - } - return c.GitStatus -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *Codespace) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetIdleTimeoutMinutes returns the IdleTimeoutMinutes field if it's non-nil, zero value otherwise. -func (c *Codespace) GetIdleTimeoutMinutes() int { - if c == nil || c.IdleTimeoutMinutes == nil { - return 0 - } - return *c.IdleTimeoutMinutes -} - -// GetIdleTimeoutNotice returns the IdleTimeoutNotice field if it's non-nil, zero value otherwise. -func (c *Codespace) GetIdleTimeoutNotice() string { - if c == nil || c.IdleTimeoutNotice == nil { - return "" - } - return *c.IdleTimeoutNotice -} - -// GetLastKnownStopNotice returns the LastKnownStopNotice field if it's non-nil, zero value otherwise. -func (c *Codespace) GetLastKnownStopNotice() string { - if c == nil || c.LastKnownStopNotice == nil { - return "" - } - return *c.LastKnownStopNotice -} - -// GetLastUsedAt returns the LastUsedAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetLastUsedAt() Timestamp { - if c == nil || c.LastUsedAt == nil { - return Timestamp{} - } - return *c.LastUsedAt -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (c *Codespace) GetLocation() string { - if c == nil || c.Location == nil { - return "" - } - return *c.Location -} - -// GetMachine returns the Machine field. -func (c *Codespace) GetMachine() *CodespacesMachine { - if c == nil { - return nil - } - return c.Machine -} - -// GetMachinesURL returns the MachinesURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetMachinesURL() string { - if c == nil || c.MachinesURL == nil { - return "" - } - return *c.MachinesURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *Codespace) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetOwner returns the Owner field. -func (c *Codespace) GetOwner() *User { - if c == nil { - return nil - } - return c.Owner -} - -// GetPendingOperation returns the PendingOperation field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPendingOperation() bool { - if c == nil || c.PendingOperation == nil { - return false - } - return *c.PendingOperation -} - -// GetPendingOperationDisabledReason returns the PendingOperationDisabledReason field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPendingOperationDisabledReason() string { - if c == nil || c.PendingOperationDisabledReason == nil { - return "" - } - return *c.PendingOperationDisabledReason -} - -// GetPrebuild returns the Prebuild field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPrebuild() bool { - if c == nil || c.Prebuild == nil { - return false - } - return *c.Prebuild -} - -// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPullsURL() string { - if c == nil || c.PullsURL == nil { - return "" - } - return *c.PullsURL -} - -// GetRepository returns the Repository field. -func (c *Codespace) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetRetentionExpiresAt returns the RetentionExpiresAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetRetentionExpiresAt() Timestamp { - if c == nil || c.RetentionExpiresAt == nil { - return Timestamp{} - } - return *c.RetentionExpiresAt -} - -// GetRetentionPeriodMinutes returns the RetentionPeriodMinutes field if it's non-nil, zero value otherwise. -func (c *Codespace) GetRetentionPeriodMinutes() int { - if c == nil || c.RetentionPeriodMinutes == nil { - return 0 - } - return *c.RetentionPeriodMinutes -} - -// GetRuntimeConstraints returns the RuntimeConstraints field. -func (c *Codespace) GetRuntimeConstraints() *CodespacesRuntimeConstraints { - if c == nil { - return nil - } - return c.RuntimeConstraints -} - -// GetStartURL returns the StartURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetStartURL() string { - if c == nil || c.StartURL == nil { - return "" - } - return *c.StartURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (c *Codespace) GetState() string { - if c == nil || c.State == nil { - return "" - } - return *c.State -} - -// GetStopURL returns the StopURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetStopURL() string { - if c == nil || c.StopURL == nil { - return "" - } - return *c.StopURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetWebURL returns the WebURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetWebURL() string { - if c == nil || c.WebURL == nil { - return "" - } - return *c.WebURL -} - -// GetAhead returns the Ahead field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetAhead() int { - if c == nil || c.Ahead == nil { - return 0 - } - return *c.Ahead -} - -// GetBehind returns the Behind field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetBehind() int { - if c == nil || c.Behind == nil { - return 0 - } - return *c.Behind -} - -// GetHasUncommittedChanges returns the HasUncommittedChanges field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetHasUncommittedChanges() bool { - if c == nil || c.HasUncommittedChanges == nil { - return false - } - return *c.HasUncommittedChanges -} - -// GetHasUnpushedChanges returns the HasUnpushedChanges field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetHasUnpushedChanges() bool { - if c == nil || c.HasUnpushedChanges == nil { - return false - } - return *c.HasUnpushedChanges -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetCPUs returns the CPUs field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetCPUs() int { - if c == nil || c.CPUs == nil { - return 0 - } - return *c.CPUs -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetDisplayName() string { - if c == nil || c.DisplayName == nil { - return "" - } - return *c.DisplayName -} - -// GetMemoryInBytes returns the MemoryInBytes field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetMemoryInBytes() int64 { - if c == nil || c.MemoryInBytes == nil { - return 0 - } - return *c.MemoryInBytes -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetOperatingSystem returns the OperatingSystem field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetOperatingSystem() string { - if c == nil || c.OperatingSystem == nil { - return "" - } - return *c.OperatingSystem -} - -// GetPrebuildAvailability returns the PrebuildAvailability field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetPrebuildAvailability() string { - if c == nil || c.PrebuildAvailability == nil { - return "" - } - return *c.PrebuildAvailability -} - -// GetStorageInBytes returns the StorageInBytes field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetStorageInBytes() int64 { - if c == nil || c.StorageInBytes == nil { - return 0 - } - return *c.StorageInBytes -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetInvitee returns the Invitee field. -func (c *CollaboratorInvitation) GetInvitee() *User { - if c == nil { - return nil - } - return c.Invitee -} - -// GetInviter returns the Inviter field. -func (c *CollaboratorInvitation) GetInviter() *User { - if c == nil { - return nil - } - return c.Inviter -} - -// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetPermissions() string { - if c == nil || c.Permissions == nil { - return "" - } - return *c.Permissions -} - -// GetRepo returns the Repo field. -func (c *CollaboratorInvitation) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetCommitURL() string { - if c == nil || c.CommitURL == nil { - return "" - } - return *c.CommitURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetRepositoryURL() string { - if c == nil || c.RepositoryURL == nil { - return "" - } - return *c.RepositoryURL -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetState() string { - if c == nil || c.State == nil { - return "" - } - return *c.State -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetTotalCount() int { - if c == nil || c.TotalCount == nil { - return 0 - } - return *c.TotalCount -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *Comment) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetAuthorAssociation() string { - if c == nil || c.AuthorAssociation == nil { - return "" - } - return *c.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetBody() string { - if c == nil || c.Body == nil { - return "" - } - return *c.Body -} - -// GetChildCommentCount returns the ChildCommentCount field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetChildCommentCount() int { - if c == nil || c.ChildCommentCount == nil { - return 0 - } - return *c.ChildCommentCount -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetDiscussionID returns the DiscussionID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetDiscussionID() int64 { - if c == nil || c.DiscussionID == nil { - return 0 - } - return *c.DiscussionID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetParentID returns the ParentID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetParentID() int64 { - if c == nil || c.ParentID == nil { - return 0 - } - return *c.ParentID -} - -// GetReactions returns the Reactions field. -func (c *CommentDiscussion) GetReactions() *Reactions { - if c == nil { - return nil - } - return c.Reactions -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetRepositoryURL() string { - if c == nil || c.RepositoryURL == nil { - return "" - } - return *c.RepositoryURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetUser returns the User field. -func (c *CommentDiscussion) GetUser() *User { - if c == nil { - return nil - } - return c.User -} - -// GetTotalCommitComments returns the TotalCommitComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalCommitComments() int { - if c == nil || c.TotalCommitComments == nil { - return 0 - } - return *c.TotalCommitComments -} - -// GetTotalGistComments returns the TotalGistComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalGistComments() int { - if c == nil || c.TotalGistComments == nil { - return 0 - } - return *c.TotalGistComments -} - -// GetTotalIssueComments returns the TotalIssueComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalIssueComments() int { - if c == nil || c.TotalIssueComments == nil { - return 0 - } - return *c.TotalIssueComments -} - -// GetTotalPullRequestComments returns the TotalPullRequestComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalPullRequestComments() int { - if c == nil || c.TotalPullRequestComments == nil { - return 0 - } - return *c.TotalPullRequestComments -} - -// GetAuthor returns the Author field. -func (c *Commit) GetAuthor() *CommitAuthor { - if c == nil { - return nil - } - return c.Author -} - -// GetCommentCount returns the CommentCount field if it's non-nil, zero value otherwise. -func (c *Commit) GetCommentCount() int { - if c == nil || c.CommentCount == nil { - return 0 - } - return *c.CommentCount -} - -// GetCommitter returns the Committer field. -func (c *Commit) GetCommitter() *CommitAuthor { - if c == nil { - return nil - } - return c.Committer -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *Commit) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (c *Commit) GetMessage() string { - if c == nil || c.Message == nil { - return "" - } - return *c.Message -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *Commit) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *Commit) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetStats returns the Stats field. -func (c *Commit) GetStats() *CommitStats { - if c == nil { - return nil - } - return c.Stats -} - -// GetTree returns the Tree field. -func (c *Commit) GetTree() *Tree { - if c == nil { - return nil - } - return c.Tree -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Commit) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetVerification returns the Verification field. -func (c *Commit) GetVerification() *SignatureVerification { - if c == nil { - return nil - } - return c.Verification -} - -// GetDate returns the Date field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetDate() Timestamp { - if c == nil || c.Date == nil { - return Timestamp{} - } - return *c.Date -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetEmail() string { - if c == nil || c.Email == nil { - return "" - } - return *c.Email -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetLogin() string { - if c == nil || c.Login == nil { - return "" - } - return *c.Login -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CommitCommentEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetComment returns the Comment field. -func (c *CommitCommentEvent) GetComment() *RepositoryComment { - if c == nil { - return nil - } - return c.Comment -} - -// GetInstallation returns the Installation field. -func (c *CommitCommentEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CommitCommentEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRepo returns the Repo field. -func (c *CommitCommentEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CommitCommentEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetAdditions() int { - if c == nil || c.Additions == nil { - return 0 - } - return *c.Additions -} - -// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetBlobURL() string { - if c == nil || c.BlobURL == nil { - return "" - } - return *c.BlobURL -} - -// GetChanges returns the Changes field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetChanges() int { - if c == nil || c.Changes == nil { - return 0 - } - return *c.Changes -} - -// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetContentsURL() string { - if c == nil || c.ContentsURL == nil { - return "" - } - return *c.ContentsURL -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetDeletions() int { - if c == nil || c.Deletions == nil { - return 0 - } - return *c.Deletions -} - -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetFilename() string { - if c == nil || c.Filename == nil { - return "" - } - return *c.Filename -} - -// GetPatch returns the Patch field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetPatch() string { - if c == nil || c.Patch == nil { - return "" - } - return *c.Patch -} - -// GetPreviousFilename returns the PreviousFilename field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetPreviousFilename() string { - if c == nil || c.PreviousFilename == nil { - return "" - } - return *c.PreviousFilename -} - -// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetRawURL() string { - if c == nil || c.RawURL == nil { - return "" - } - return *c.RawURL -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetAuthor returns the Author field. -func (c *CommitResult) GetAuthor() *User { - if c == nil { - return nil - } - return c.Author -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetCommentsURL() string { - if c == nil || c.CommentsURL == nil { - return "" - } - return *c.CommentsURL -} - -// GetCommit returns the Commit field. -func (c *CommitResult) GetCommit() *Commit { - if c == nil { - return nil - } - return c.Commit -} - -// GetCommitter returns the Committer field. -func (c *CommitResult) GetCommitter() *User { - if c == nil { - return nil - } - return c.Committer -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetRepository returns the Repository field. -func (c *CommitResult) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetScore returns the Score field. -func (c *CommitResult) GetScore() *float64 { - if c == nil { - return nil - } - return c.Score -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAheadBy returns the AheadBy field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetAheadBy() int { - if c == nil || c.AheadBy == nil { - return 0 - } - return *c.AheadBy -} - -// GetBaseCommit returns the BaseCommit field. -func (c *CommitsComparison) GetBaseCommit() *RepositoryCommit { - if c == nil { - return nil - } - return c.BaseCommit -} - -// GetBehindBy returns the BehindBy field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetBehindBy() int { - if c == nil || c.BehindBy == nil { - return 0 - } - return *c.BehindBy -} - -// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetDiffURL() string { - if c == nil || c.DiffURL == nil { - return "" - } - return *c.DiffURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetMergeBaseCommit returns the MergeBaseCommit field. -func (c *CommitsComparison) GetMergeBaseCommit() *RepositoryCommit { - if c == nil { - return nil - } - return c.MergeBaseCommit -} - -// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetPatchURL() string { - if c == nil || c.PatchURL == nil { - return "" - } - return *c.PatchURL -} - -// GetPermalinkURL returns the PermalinkURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetPermalinkURL() string { - if c == nil || c.PermalinkURL == nil { - return "" - } - return *c.PermalinkURL -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetTotalCommits returns the TotalCommits field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetTotalCommits() int { - if c == nil || c.TotalCommits == nil { - return 0 - } - return *c.TotalCommits -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (c *CommitsSearchResult) GetIncompleteResults() bool { - if c == nil || c.IncompleteResults == nil { - return false - } - return *c.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CommitsSearchResult) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetAdditions() int { - if c == nil || c.Additions == nil { - return 0 - } - return *c.Additions -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetDeletions() int { - if c == nil || c.Deletions == nil { - return 0 - } - return *c.Deletions -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetCodeOfConduct returns the CodeOfConduct field. -func (c *CommunityHealthFiles) GetCodeOfConduct() *Metric { - if c == nil { - return nil - } - return c.CodeOfConduct -} - -// GetCodeOfConductFile returns the CodeOfConductFile field. -func (c *CommunityHealthFiles) GetCodeOfConductFile() *Metric { - if c == nil { - return nil - } - return c.CodeOfConductFile -} - -// GetContributing returns the Contributing field. -func (c *CommunityHealthFiles) GetContributing() *Metric { - if c == nil { - return nil - } - return c.Contributing -} - -// GetIssueTemplate returns the IssueTemplate field. -func (c *CommunityHealthFiles) GetIssueTemplate() *Metric { - if c == nil { - return nil - } - return c.IssueTemplate -} - -// GetLicense returns the License field. -func (c *CommunityHealthFiles) GetLicense() *Metric { - if c == nil { - return nil - } - return c.License -} - -// GetPullRequestTemplate returns the PullRequestTemplate field. -func (c *CommunityHealthFiles) GetPullRequestTemplate() *Metric { - if c == nil { - return nil - } - return c.PullRequestTemplate -} - -// GetReadme returns the Readme field. -func (c *CommunityHealthFiles) GetReadme() *Metric { - if c == nil { - return nil - } - return c.Readme -} - -// GetContentReportsEnabled returns the ContentReportsEnabled field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetContentReportsEnabled() bool { - if c == nil || c.ContentReportsEnabled == nil { - return false - } - return *c.ContentReportsEnabled -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetDocumentation returns the Documentation field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetDocumentation() string { - if c == nil || c.Documentation == nil { - return "" - } - return *c.Documentation -} - -// GetFiles returns the Files field. -func (c *CommunityHealthMetrics) GetFiles() *CommunityHealthFiles { - if c == nil { - return nil - } - return c.Files -} - -// GetHealthPercentage returns the HealthPercentage field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetHealthPercentage() int { - if c == nil || c.HealthPercentage == nil { - return 0 - } - return *c.HealthPercentage -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *ContentReference) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *ContentReference) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetReference returns the Reference field if it's non-nil, zero value otherwise. -func (c *ContentReference) GetReference() string { - if c == nil || c.Reference == nil { - return "" - } - return *c.Reference -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *ContentReferenceEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetContentReference returns the ContentReference field. -func (c *ContentReferenceEvent) GetContentReference() *ContentReference { - if c == nil { - return nil - } - return c.ContentReference -} - -// GetInstallation returns the Installation field. -func (c *ContentReferenceEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetRepo returns the Repo field. -func (c *ContentReferenceEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *ContentReferenceEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetAvatarURL() string { - if c == nil || c.AvatarURL == nil { - return "" - } - return *c.AvatarURL -} - -// GetContributions returns the Contributions field if it's non-nil, zero value otherwise. -func (c *Contributor) GetContributions() int { - if c == nil || c.Contributions == nil { - return 0 - } - return *c.Contributions -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (c *Contributor) GetEmail() string { - if c == nil || c.Email == nil { - return "" - } - return *c.Email -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetEventsURL() string { - if c == nil || c.EventsURL == nil { - return "" - } - return *c.EventsURL -} - -// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetFollowersURL() string { - if c == nil || c.FollowersURL == nil { - return "" - } - return *c.FollowersURL -} - -// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetFollowingURL() string { - if c == nil || c.FollowingURL == nil { - return "" - } - return *c.FollowingURL -} - -// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetGistsURL() string { - if c == nil || c.GistsURL == nil { - return "" - } - return *c.GistsURL -} - -// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetGravatarID() string { - if c == nil || c.GravatarID == nil { - return "" - } - return *c.GravatarID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *Contributor) GetLogin() string { - if c == nil || c.Login == nil { - return "" - } - return *c.Login -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *Contributor) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetOrganizationsURL() string { - if c == nil || c.OrganizationsURL == nil { - return "" - } - return *c.OrganizationsURL -} - -// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetReceivedEventsURL() string { - if c == nil || c.ReceivedEventsURL == nil { - return "" - } - return *c.ReceivedEventsURL -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetReposURL() string { - if c == nil || c.ReposURL == nil { - return "" - } - return *c.ReposURL -} - -// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. -func (c *Contributor) GetSiteAdmin() bool { - if c == nil || c.SiteAdmin == nil { - return false - } - return *c.SiteAdmin -} - -// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetStarredURL() string { - if c == nil || c.StarredURL == nil { - return "" - } - return *c.StarredURL -} - -// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetSubscriptionsURL() string { - if c == nil || c.SubscriptionsURL == nil { - return "" - } - return *c.SubscriptionsURL -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (c *Contributor) GetType() string { - if c == nil || c.Type == nil { - return "" - } - return *c.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAuthor returns the Author field. -func (c *ContributorStats) GetAuthor() *Contributor { - if c == nil { - return nil - } - return c.Author -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *ContributorStats) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetCompletedAt() Timestamp { - if c == nil || c.CompletedAt == nil { - return Timestamp{} - } - return *c.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetConclusion() string { - if c == nil || c.Conclusion == nil { - return "" - } - return *c.Conclusion -} - -// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetDetailsURL() string { - if c == nil || c.DetailsURL == nil { - return "" - } - return *c.DetailsURL -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetExternalID() string { - if c == nil || c.ExternalID == nil { - return "" - } - return *c.ExternalID -} - -// GetOutput returns the Output field. -func (c *CreateCheckRunOptions) GetOutput() *CheckRunOutput { - if c == nil { - return nil - } - return c.Output -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetStartedAt() Timestamp { - if c == nil || c.StartedAt == nil { - return Timestamp{} - } - return *c.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (c *CreateCheckSuiteOptions) GetHeadBranch() string { - if c == nil || c.HeadBranch == nil { - return "" - } - return *c.HeadBranch -} - -// GetClientIP returns the ClientIP field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetClientIP() string { - if c == nil || c.ClientIP == nil { - return "" - } - return *c.ClientIP -} - -// GetDevcontainerPath returns the DevcontainerPath field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetDevcontainerPath() string { - if c == nil || c.DevcontainerPath == nil { - return "" - } - return *c.DevcontainerPath -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetDisplayName() string { - if c == nil || c.DisplayName == nil { - return "" - } - return *c.DisplayName -} - -// GetGeo returns the Geo field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetGeo() string { - if c == nil || c.Geo == nil { - return "" - } - return *c.Geo -} - -// GetIdleTimeoutMinutes returns the IdleTimeoutMinutes field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetIdleTimeoutMinutes() int { - if c == nil || c.IdleTimeoutMinutes == nil { - return 0 - } - return *c.IdleTimeoutMinutes -} - -// GetMachine returns the Machine field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetMachine() string { - if c == nil || c.Machine == nil { - return "" - } - return *c.Machine -} - -// GetMultiRepoPermissionsOptOut returns the MultiRepoPermissionsOptOut field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetMultiRepoPermissionsOptOut() bool { - if c == nil || c.MultiRepoPermissionsOptOut == nil { - return false - } - return *c.MultiRepoPermissionsOptOut -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetRetentionPeriodMinutes returns the RetentionPeriodMinutes field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetRetentionPeriodMinutes() int { - if c == nil || c.RetentionPeriodMinutes == nil { - return 0 - } - return *c.RetentionPeriodMinutes -} - -// GetWorkingDirectory returns the WorkingDirectory field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetWorkingDirectory() string { - if c == nil || c.WorkingDirectory == nil { - return "" - } - return *c.WorkingDirectory -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if c == nil || c.AllowsPublicRepositories == nil { - return false - } - return *c.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if c == nil || c.RestrictedToWorkflows == nil { - return false - } - return *c.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetVisibility() string { - if c == nil || c.Visibility == nil { - return "" - } - return *c.Visibility -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetInstallation returns the Installation field. -func (c *CreateEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetMasterBranch() string { - if c == nil || c.MasterBranch == nil { - return "" - } - return *c.MasterBranch -} - -// GetOrg returns the Org field. -func (c *CreateEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetPusherType() string { - if c == nil || c.PusherType == nil { - return "" - } - return *c.PusherType -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetRefType returns the RefType field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetRefType() string { - if c == nil || c.RefType == nil { - return "" - } - return *c.RefType -} - -// GetRepo returns the Repo field. -func (c *CreateEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CreateEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (c *CreateOrgInvitationOptions) GetEmail() string { - if c == nil || c.Email == nil { - return "" - } - return *c.Email -} - -// GetInviteeID returns the InviteeID field if it's non-nil, zero value otherwise. -func (c *CreateOrgInvitationOptions) GetInviteeID() int64 { - if c == nil || c.InviteeID == nil { - return 0 - } - return *c.InviteeID -} - -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (c *CreateOrgInvitationOptions) GetRole() string { - if c == nil || c.Role == nil { - return "" - } - return *c.Role -} - -// GetBaseRole returns the BaseRole field if it's non-nil, zero value otherwise. -func (c *CreateOrUpdateCustomRoleOptions) GetBaseRole() string { - if c == nil || c.BaseRole == nil { - return "" - } - return *c.BaseRole -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CreateOrUpdateCustomRoleOptions) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CreateOrUpdateCustomRoleOptions) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (c *CreateProtectedChanges) GetFrom() bool { - if c == nil || c.From == nil { - return false - } - return *c.From -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if c == nil || c.AllowsPublicRepositories == nil { - return false - } - return *c.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if c == nil || c.RestrictedToWorkflows == nil { - return false - } - return *c.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetVisibility() string { - if c == nil || c.Visibility == nil { - return "" - } - return *c.Visibility -} - -// GetCanAdminsBypass returns the CanAdminsBypass field if it's non-nil, zero value otherwise. -func (c *CreateUpdateEnvironment) GetCanAdminsBypass() bool { - if c == nil || c.CanAdminsBypass == nil { - return false - } - return *c.CanAdminsBypass -} - -// GetDeploymentBranchPolicy returns the DeploymentBranchPolicy field. -func (c *CreateUpdateEnvironment) GetDeploymentBranchPolicy() *BranchPolicy { - if c == nil { - return nil - } - return c.DeploymentBranchPolicy -} - -// GetPreventSelfReview returns the PreventSelfReview field if it's non-nil, zero value otherwise. -func (c *CreateUpdateEnvironment) GetPreventSelfReview() bool { - if c == nil || c.PreventSelfReview == nil { - return false - } - return *c.PreventSelfReview -} - -// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. -func (c *CreateUpdateEnvironment) GetWaitTimer() int { - if c == nil || c.WaitTimer == nil { - return 0 - } - return *c.WaitTimer -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetRepositoryID() int64 { - if c == nil || c.RepositoryID == nil { - return 0 - } - return *c.RepositoryID -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetScope() string { - if c == nil || c.Scope == nil { - return "" - } - return *c.Scope -} - -// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. -func (c *CreateUpdateRequiredWorkflowOptions) GetSelectedRepositoryIDs() *SelectedRepoIDs { - if c == nil { - return nil - } - return c.SelectedRepositoryIDs -} - -// GetWorkflowFilePath returns the WorkflowFilePath field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetWorkflowFilePath() string { - if c == nil || c.WorkflowFilePath == nil { - return "" - } - return *c.WorkflowFilePath -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (c *CreateUserProjectOptions) GetBody() string { - if c == nil || c.Body == nil { - return "" - } - return *c.Body -} - -// GetCreated returns the Created field if it's non-nil, zero value otherwise. -func (c *CreationInfo) GetCreated() Timestamp { - if c == nil || c.Created == nil { - return Timestamp{} - } - return *c.Created -} - -// GetAuthorizedCredentialExpiresAt returns the AuthorizedCredentialExpiresAt field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialExpiresAt() Timestamp { - if c == nil || c.AuthorizedCredentialExpiresAt == nil { - return Timestamp{} - } - return *c.AuthorizedCredentialExpiresAt -} - -// GetAuthorizedCredentialID returns the AuthorizedCredentialID field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialID() int64 { - if c == nil || c.AuthorizedCredentialID == nil { - return 0 - } - return *c.AuthorizedCredentialID -} - -// GetAuthorizedCredentialNote returns the AuthorizedCredentialNote field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialNote() string { - if c == nil || c.AuthorizedCredentialNote == nil { - return "" - } - return *c.AuthorizedCredentialNote -} - -// GetAuthorizedCredentialTitle returns the AuthorizedCredentialTitle field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialTitle() string { - if c == nil || c.AuthorizedCredentialTitle == nil { - return "" - } - return *c.AuthorizedCredentialTitle -} - -// GetCredentialAccessedAt returns the CredentialAccessedAt field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialAccessedAt() Timestamp { - if c == nil || c.CredentialAccessedAt == nil { - return Timestamp{} - } - return *c.CredentialAccessedAt -} - -// GetCredentialAuthorizedAt returns the CredentialAuthorizedAt field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialAuthorizedAt() Timestamp { - if c == nil || c.CredentialAuthorizedAt == nil { - return Timestamp{} - } - return *c.CredentialAuthorizedAt -} - -// GetCredentialID returns the CredentialID field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialID() int64 { - if c == nil || c.CredentialID == nil { - return 0 - } - return *c.CredentialID -} - -// GetCredentialType returns the CredentialType field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialType() string { - if c == nil || c.CredentialType == nil { - return "" - } - return *c.CredentialType -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetFingerprint() string { - if c == nil || c.Fingerprint == nil { - return "" - } - return *c.Fingerprint -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetLogin() string { - if c == nil || c.Login == nil { - return "" - } - return *c.Login -} - -// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetTokenLastEight() string { - if c == nil || c.TokenLastEight == nil { - return "" - } - return *c.TokenLastEight -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (c *Credit) GetType() string { - if c == nil || c.Type == nil { - return "" - } - return *c.Type -} - -// GetUser returns the User field. -func (c *Credit) GetUser() *User { - if c == nil { - return nil - } - return c.User -} - -// GetDefaultValue returns the DefaultValue field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetDefaultValue() string { - if c == nil || c.DefaultValue == nil { - return "" - } - return *c.DefaultValue -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetPropertyName returns the PropertyName field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetPropertyName() string { - if c == nil || c.PropertyName == nil { - return "" - } - return *c.PropertyName -} - -// GetRequired returns the Required field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetRequired() bool { - if c == nil || c.Required == nil { - return false - } - return *c.Required -} - -// GetValue returns the Value field if it's non-nil, zero value otherwise. -func (c *CustomPropertyValue) GetValue() string { - if c == nil || c.Value == nil { - return "" - } - return *c.Value -} - -// GetBaseRole returns the BaseRole field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetBaseRole() string { - if c == nil || c.BaseRole == nil { - return "" - } - return *c.BaseRole -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetQuerySuite returns the QuerySuite field if it's non-nil, zero value otherwise. -func (d *DefaultSetupConfiguration) GetQuerySuite() string { - if d == nil || d.QuerySuite == nil { - return "" - } - return *d.QuerySuite -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DefaultSetupConfiguration) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DefaultSetupConfiguration) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetConfirmDeleteURL returns the ConfirmDeleteURL field if it's non-nil, zero value otherwise. -func (d *DeleteAnalysis) GetConfirmDeleteURL() string { - if d == nil || d.ConfirmDeleteURL == nil { - return "" - } - return *d.ConfirmDeleteURL -} - -// GetNextAnalysisURL returns the NextAnalysisURL field if it's non-nil, zero value otherwise. -func (d *DeleteAnalysis) GetNextAnalysisURL() string { - if d == nil || d.NextAnalysisURL == nil { - return "" - } - return *d.NextAnalysisURL -} - -// GetInstallation returns the Installation field. -func (d *DeleteEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DeleteEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetPusherType() string { - if d == nil || d.PusherType == nil { - return "" - } - return *d.PusherType -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetRef() string { - if d == nil || d.Ref == nil { - return "" - } - return *d.Ref -} - -// GetRefType returns the RefType field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetRefType() string { - if d == nil || d.RefType == nil { - return "" - } - return *d.RefType -} - -// GetRepo returns the Repo field. -func (d *DeleteEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeleteEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAutoDismissedAt returns the AutoDismissedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetAutoDismissedAt() Timestamp { - if d == nil || d.AutoDismissedAt == nil { - return Timestamp{} - } - return *d.AutoDismissedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDependency returns the Dependency field. -func (d *DependabotAlert) GetDependency() *Dependency { - if d == nil { - return nil - } - return d.Dependency -} - -// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetDismissedAt() Timestamp { - if d == nil || d.DismissedAt == nil { - return Timestamp{} - } - return *d.DismissedAt -} - -// GetDismissedBy returns the DismissedBy field. -func (d *DependabotAlert) GetDismissedBy() *User { - if d == nil { - return nil - } - return d.DismissedBy -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetDismissedComment() string { - if d == nil || d.DismissedComment == nil { - return "" - } - return *d.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetDismissedReason() string { - if d == nil || d.DismissedReason == nil { - return "" - } - return *d.DismissedReason -} - -// GetFixedAt returns the FixedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetFixedAt() Timestamp { - if d == nil || d.FixedAt == nil { - return Timestamp{} - } - return *d.FixedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetHTMLURL() string { - if d == nil || d.HTMLURL == nil { - return "" - } - return *d.HTMLURL -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetNumber() int { - if d == nil || d.Number == nil { - return 0 - } - return *d.Number -} - -// GetRepository returns the Repository field. -func (d *DependabotAlert) GetRepository() *Repository { - if d == nil { - return nil - } - return d.Repository -} - -// GetSecurityAdvisory returns the SecurityAdvisory field. -func (d *DependabotAlert) GetSecurityAdvisory() *DependabotSecurityAdvisory { - if d == nil { - return nil - } - return d.SecurityAdvisory -} - -// GetSecurityVulnerability returns the SecurityVulnerability field. -func (d *DependabotAlert) GetSecurityVulnerability() *AdvisoryVulnerability { - if d == nil { - return nil - } - return d.SecurityVulnerability -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DependabotAlertEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetAlert returns the Alert field. -func (d *DependabotAlertEvent) GetAlert() *DependabotAlert { - if d == nil { - return nil - } - return d.Alert -} - -// GetEnterprise returns the Enterprise field. -func (d *DependabotAlertEvent) GetEnterprise() *Enterprise { - if d == nil { - return nil - } - return d.Enterprise -} - -// GetInstallation returns the Installation field. -func (d *DependabotAlertEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrganization returns the Organization field. -func (d *DependabotAlertEvent) GetOrganization() *Organization { - if d == nil { - return nil - } - return d.Organization -} - -// GetRepo returns the Repo field. -func (d *DependabotAlertEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DependabotAlertEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (d *DependabotAlertState) GetDismissedComment() string { - if d == nil || d.DismissedComment == nil { - return "" - } - return *d.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (d *DependabotAlertState) GetDismissedReason() string { - if d == nil || d.DismissedReason == nil { - return "" - } - return *d.DismissedReason -} - -// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetCVEID() string { - if d == nil || d.CVEID == nil { - return "" - } - return *d.CVEID -} - -// GetCVSS returns the CVSS field. -func (d *DependabotSecurityAdvisory) GetCVSS() *AdvisoryCVSS { - if d == nil { - return nil - } - return d.CVSS -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetGHSAID() string { - if d == nil || d.GHSAID == nil { - return "" - } - return *d.GHSAID -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetPublishedAt() Timestamp { - if d == nil || d.PublishedAt == nil { - return Timestamp{} - } - return *d.PublishedAt -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetSeverity() string { - if d == nil || d.Severity == nil { - return "" - } - return *d.Severity -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetSummary() string { - if d == nil || d.Summary == nil { - return "" - } - return *d.Summary -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetWithdrawnAt returns the WithdrawnAt field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetWithdrawnAt() Timestamp { - if d == nil || d.WithdrawnAt == nil { - return Timestamp{} - } - return *d.WithdrawnAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityUpdates) GetStatus() string { - if d == nil || d.Status == nil { - return "" - } - return *d.Status -} - -// GetManifestPath returns the ManifestPath field if it's non-nil, zero value otherwise. -func (d *Dependency) GetManifestPath() string { - if d == nil || d.ManifestPath == nil { - return "" - } - return *d.ManifestPath -} - -// GetPackage returns the Package field. -func (d *Dependency) GetPackage() *VulnerabilityPackage { - if d == nil { - return nil - } - return d.Package -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (d *Dependency) GetScope() string { - if d == nil || d.Scope == nil { - return "" - } - return *d.Scope -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DeployKeyEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetInstallation returns the Installation field. -func (d *DeployKeyEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetKey returns the Key field. -func (d *DeployKeyEvent) GetKey() *Key { - if d == nil { - return nil - } - return d.Key -} - -// GetOrganization returns the Organization field. -func (d *DeployKeyEvent) GetOrganization() *Organization { - if d == nil { - return nil - } - return d.Organization -} - -// GetRepo returns the Repo field. -func (d *DeployKeyEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeployKeyEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *Deployment) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetCreator returns the Creator field. -func (d *Deployment) GetCreator() *User { - if d == nil { - return nil - } - return d.Creator -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *Deployment) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *Deployment) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *Deployment) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *Deployment) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *Deployment) GetRef() string { - if d == nil || d.Ref == nil { - return "" - } - return *d.Ref -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { - return "" - } - return *d.RepositoryURL -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (d *Deployment) GetSHA() string { - if d == nil || d.SHA == nil { - return "" - } - return *d.SHA -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetStatusesURL() string { - if d == nil || d.StatusesURL == nil { - return "" - } - return *d.StatusesURL -} - -// GetTask returns the Task field if it's non-nil, zero value otherwise. -func (d *Deployment) GetTask() string { - if d == nil || d.Task == nil { - return "" - } - return *d.Task -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *Deployment) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetName() string { - if d == nil || d.Name == nil { - return "" - } - return *d.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetType() string { - if d == nil || d.Type == nil { - return "" - } - return *d.Type -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicyRequest) GetName() string { - if d == nil || d.Name == nil { - return "" - } - return *d.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicyRequest) GetType() string { - if d == nil || d.Type == nil { - return "" - } - return *d.Type -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicyResponse) GetTotalCount() int { - if d == nil || d.TotalCount == nil { - return 0 - } - return *d.TotalCount -} - -// GetDeployment returns the Deployment field. -func (d *DeploymentEvent) GetDeployment() *Deployment { - if d == nil { - return nil - } - return d.Deployment -} - -// GetInstallation returns the Installation field. -func (d *DeploymentEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DeploymentEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DeploymentEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeploymentEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetWorkflow returns the Workflow field. -func (d *DeploymentEvent) GetWorkflow() *Workflow { - if d == nil { - return nil - } - return d.Workflow -} - -// GetWorkflowRun returns the WorkflowRun field. -func (d *DeploymentEvent) GetWorkflowRun() *WorkflowRun { - if d == nil { - return nil - } - return d.WorkflowRun -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetDeployment returns the Deployment field. -func (d *DeploymentProtectionRuleEvent) GetDeployment() *Deployment { - if d == nil { - return nil - } - return d.Deployment -} - -// GetDeploymentCallbackURL returns the DeploymentCallbackURL field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetDeploymentCallbackURL() string { - if d == nil || d.DeploymentCallbackURL == nil { - return "" - } - return *d.DeploymentCallbackURL -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetEvent() string { - if d == nil || d.Event == nil { - return "" - } - return *d.Event -} - -// GetInstallation returns the Installation field. -func (d *DeploymentProtectionRuleEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrganization returns the Organization field. -func (d *DeploymentProtectionRuleEvent) GetOrganization() *Organization { - if d == nil { - return nil - } - return d.Organization -} - -// GetRepo returns the Repo field. -func (d *DeploymentProtectionRuleEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeploymentProtectionRuleEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAutoMerge returns the AutoMerge field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetAutoMerge() bool { - if d == nil || d.AutoMerge == nil { - return false - } - return *d.AutoMerge -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetProductionEnvironment returns the ProductionEnvironment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetProductionEnvironment() bool { - if d == nil || d.ProductionEnvironment == nil { - return false - } - return *d.ProductionEnvironment -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetRef() string { - if d == nil || d.Ref == nil { - return "" - } - return *d.Ref -} - -// GetRequiredContexts returns the RequiredContexts field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetRequiredContexts() []string { - if d == nil || d.RequiredContexts == nil { - return nil - } - return *d.RequiredContexts -} - -// GetTask returns the Task field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetTask() string { - if d == nil || d.Task == nil { - return "" - } - return *d.Task -} - -// GetTransientEnvironment returns the TransientEnvironment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetTransientEnvironment() bool { - if d == nil || d.TransientEnvironment == nil { - return false - } - return *d.TransientEnvironment -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetCreator returns the Creator field. -func (d *DeploymentStatus) GetCreator() *User { - if d == nil { - return nil - } - return d.Creator -} - -// GetDeploymentURL returns the DeploymentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetDeploymentURL() string { - if d == nil || d.DeploymentURL == nil { - return "" - } - return *d.DeploymentURL -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetEnvironmentURL() string { - if d == nil || d.EnvironmentURL == nil { - return "" - } - return *d.EnvironmentURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetLogURL() string { - if d == nil || d.LogURL == nil { - return "" - } - return *d.LogURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { - return "" - } - return *d.RepositoryURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetTargetURL() string { - if d == nil || d.TargetURL == nil { - return "" - } - return *d.TargetURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetDeployment returns the Deployment field. -func (d *DeploymentStatusEvent) GetDeployment() *Deployment { - if d == nil { - return nil - } - return d.Deployment -} - -// GetDeploymentStatus returns the DeploymentStatus field. -func (d *DeploymentStatusEvent) GetDeploymentStatus() *DeploymentStatus { - if d == nil { - return nil - } - return d.DeploymentStatus -} - -// GetInstallation returns the Installation field. -func (d *DeploymentStatusEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DeploymentStatusEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DeploymentStatusEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeploymentStatusEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAutoInactive returns the AutoInactive field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetAutoInactive() bool { - if d == nil || d.AutoInactive == nil { - return false - } - return *d.AutoInactive -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetEnvironmentURL() string { - if d == nil || d.EnvironmentURL == nil { - return "" - } - return *d.EnvironmentURL -} - -// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetLogURL() string { - if d == nil || d.LogURL == nil { - return "" - } - return *d.LogURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. -func (d *Discussion) GetActiveLockReason() string { - if d == nil || d.ActiveLockReason == nil { - return "" - } - return *d.ActiveLockReason -} - -// GetAnswerChosenAt returns the AnswerChosenAt field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAnswerChosenAt() Timestamp { - if d == nil || d.AnswerChosenAt == nil { - return Timestamp{} - } - return *d.AnswerChosenAt -} - -// GetAnswerChosenBy returns the AnswerChosenBy field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAnswerChosenBy() string { - if d == nil || d.AnswerChosenBy == nil { - return "" - } - return *d.AnswerChosenBy -} - -// GetAnswerHTMLURL returns the AnswerHTMLURL field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAnswerHTMLURL() string { - if d == nil || d.AnswerHTMLURL == nil { - return "" - } - return *d.AnswerHTMLURL -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAuthorAssociation() string { - if d == nil || d.AuthorAssociation == nil { - return "" - } - return *d.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (d *Discussion) GetBody() string { - if d == nil || d.Body == nil { - return "" - } - return *d.Body -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (d *Discussion) GetComments() int { - if d == nil || d.Comments == nil { - return 0 - } - return *d.Comments -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *Discussion) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDiscussionCategory returns the DiscussionCategory field. -func (d *Discussion) GetDiscussionCategory() *DiscussionCategory { - if d == nil { - return nil - } - return d.DiscussionCategory -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (d *Discussion) GetHTMLURL() string { - if d == nil || d.HTMLURL == nil { - return "" - } - return *d.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *Discussion) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetLocked returns the Locked field if it's non-nil, zero value otherwise. -func (d *Discussion) GetLocked() bool { - if d == nil || d.Locked == nil { - return false - } - return *d.Locked -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *Discussion) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (d *Discussion) GetNumber() int { - if d == nil || d.Number == nil { - return 0 - } - return *d.Number -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *Discussion) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { - return "" - } - return *d.RepositoryURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *Discussion) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (d *Discussion) GetTitle() string { - if d == nil || d.Title == nil { - return "" - } - return *d.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *Discussion) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetUser returns the User field. -func (d *Discussion) GetUser() *User { - if d == nil { - return nil - } - return d.User -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEmoji returns the Emoji field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetEmoji() string { - if d == nil || d.Emoji == nil { - return "" - } - return *d.Emoji -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetIsAnswerable returns the IsAnswerable field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetIsAnswerable() bool { - if d == nil || d.IsAnswerable == nil { - return false - } - return *d.IsAnswerable -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetName() string { - if d == nil || d.Name == nil { - return "" - } - return *d.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetRepositoryID() int64 { - if d == nil || d.RepositoryID == nil { - return 0 - } - return *d.RepositoryID -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetSlug() string { - if d == nil || d.Slug == nil { - return "" - } - return *d.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetAuthor returns the Author field. -func (d *DiscussionComment) GetAuthor() *User { - if d == nil { - return nil - } - return d.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetBody() string { - if d == nil || d.Body == nil { - return "" - } - return *d.Body -} - -// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetBodyHTML() string { - if d == nil || d.BodyHTML == nil { - return "" - } - return *d.BodyHTML -} - -// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetBodyVersion() string { - if d == nil || d.BodyVersion == nil { - return "" - } - return *d.BodyVersion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDiscussionURL returns the DiscussionURL field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetDiscussionURL() string { - if d == nil || d.DiscussionURL == nil { - return "" - } - return *d.DiscussionURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetHTMLURL() string { - if d == nil || d.HTMLURL == nil { - return "" - } - return *d.HTMLURL -} - -// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetLastEditedAt() Timestamp { - if d == nil || d.LastEditedAt == nil { - return Timestamp{} - } - return *d.LastEditedAt -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetNumber() int { - if d == nil || d.Number == nil { - return 0 - } - return *d.Number -} - -// GetReactions returns the Reactions field. -func (d *DiscussionComment) GetReactions() *Reactions { - if d == nil { - return nil - } - return d.Reactions -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DiscussionCommentEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetComment returns the Comment field. -func (d *DiscussionCommentEvent) GetComment() *CommentDiscussion { - if d == nil { - return nil - } - return d.Comment -} - -// GetDiscussion returns the Discussion field. -func (d *DiscussionCommentEvent) GetDiscussion() *Discussion { - if d == nil { - return nil - } - return d.Discussion -} - -// GetInstallation returns the Installation field. -func (d *DiscussionCommentEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DiscussionCommentEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DiscussionCommentEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DiscussionCommentEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DiscussionEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetDiscussion returns the Discussion field. -func (d *DiscussionEvent) GetDiscussion() *Discussion { - if d == nil { - return nil - } - return d.Discussion -} - -// GetInstallation returns the Installation field. -func (d *DiscussionEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DiscussionEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DiscussionEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DiscussionEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetApps returns the Apps field if it's non-nil, zero value otherwise. -func (d *DismissalRestrictionsRequest) GetApps() []string { - if d == nil || d.Apps == nil { - return nil - } - return *d.Apps -} - -// GetTeams returns the Teams field if it's non-nil, zero value otherwise. -func (d *DismissalRestrictionsRequest) GetTeams() []string { - if d == nil || d.Teams == nil { - return nil - } - return *d.Teams -} - -// GetUsers returns the Users field if it's non-nil, zero value otherwise. -func (d *DismissalRestrictionsRequest) GetUsers() []string { - if d == nil || d.Users == nil { - return nil - } - return *d.Users -} - -// GetDismissalCommitID returns the DismissalCommitID field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetDismissalCommitID() string { - if d == nil || d.DismissalCommitID == nil { - return "" - } - return *d.DismissalCommitID -} - -// GetDismissalMessage returns the DismissalMessage field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetDismissalMessage() string { - if d == nil || d.DismissalMessage == nil { - return "" - } - return *d.DismissalMessage -} - -// GetReviewID returns the ReviewID field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetReviewID() int64 { - if d == nil || d.ReviewID == nil { - return 0 - } - return *d.ReviewID -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (d *DismissStaleReviewsOnPushChanges) GetFrom() bool { - if d == nil || d.From == nil { - return false - } - return *d.From -} - -// GetClientPayload returns the ClientPayload field if it's non-nil, zero value otherwise. -func (d *DispatchRequestOptions) GetClientPayload() json.RawMessage { - if d == nil || d.ClientPayload == nil { - return json.RawMessage{} - } - return *d.ClientPayload -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetBody() string { - if d == nil || d.Body == nil { - return "" - } - return *d.Body -} - -// GetLine returns the Line field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetLine() int { - if d == nil || d.Line == nil { - return 0 - } - return *d.Line -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetPath() string { - if d == nil || d.Path == nil { - return "" - } - return *d.Path -} - -// GetPosition returns the Position field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetPosition() int { - if d == nil || d.Position == nil { - return 0 - } - return *d.Position -} - -// GetSide returns the Side field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetSide() string { - if d == nil || d.Side == nil { - return "" - } - return *d.Side -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetStartLine() int { - if d == nil || d.StartLine == nil { - return 0 - } - return *d.StartLine -} - -// GetStartSide returns the StartSide field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetStartSide() string { - if d == nil || d.StartSide == nil { - return "" - } - return *d.StartSide -} - -// GetRef returns the Ref field. -func (e *EditBase) GetRef() *EditRef { - if e == nil { - return nil - } - return e.Ref -} - -// GetSHA returns the SHA field. -func (e *EditBase) GetSHA() *EditSHA { - if e == nil { - return nil - } - return e.SHA -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditBody) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetBase returns the Base field. -func (e *EditChange) GetBase() *EditBase { - if e == nil { - return nil - } - return e.Base -} - -// GetBody returns the Body field. -func (e *EditChange) GetBody() *EditBody { - if e == nil { - return nil - } - return e.Body -} - -// GetDefaultBranch returns the DefaultBranch field. -func (e *EditChange) GetDefaultBranch() *EditDefaultBranch { - if e == nil { - return nil - } - return e.DefaultBranch -} - -// GetOwner returns the Owner field. -func (e *EditChange) GetOwner() *EditOwner { - if e == nil { - return nil - } - return e.Owner -} - -// GetRepo returns the Repo field. -func (e *EditChange) GetRepo() *EditRepo { - if e == nil { - return nil - } - return e.Repo -} - -// GetTitle returns the Title field. -func (e *EditChange) GetTitle() *EditTitle { - if e == nil { - return nil - } - return e.Title -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditDefaultBranch) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetOwnerInfo returns the OwnerInfo field. -func (e *EditOwner) GetOwnerInfo() *OwnerInfo { - if e == nil { - return nil - } - return e.OwnerInfo -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditRef) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetName returns the Name field. -func (e *EditRepo) GetName() *RepoName { - if e == nil { - return nil - } - return e.Name -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditSHA) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditTitle) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetAvatarURL() string { - if e == nil || e.AvatarURL == nil { - return "" - } - return *e.AvatarURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetDescription() string { - if e == nil || e.Description == nil { - return "" - } - return *e.Description -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetHTMLURL() string { - if e == nil || e.HTMLURL == nil { - return "" - } - return *e.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetID() int { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetName() string { - if e == nil || e.Name == nil { - return "" - } - return *e.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetNodeID() string { - if e == nil || e.NodeID == nil { - return "" - } - return *e.NodeID -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetSlug() string { - if e == nil || e.Slug == nil { - return "" - } - return *e.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetUpdatedAt() Timestamp { - if e == nil || e.UpdatedAt == nil { - return Timestamp{} - } - return *e.UpdatedAt -} - -// GetWebsiteURL returns the WebsiteURL field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetWebsiteURL() string { - if e == nil || e.WebsiteURL == nil { - return "" - } - return *e.WebsiteURL -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetAllowsPublicRepositories() bool { - if e == nil || e.AllowsPublicRepositories == nil { - return false - } - return *e.AllowsPublicRepositories -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetDefault() bool { - if e == nil || e.Default == nil { - return false - } - return *e.Default -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetID() int64 { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetInherited returns the Inherited field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetInherited() bool { - if e == nil || e.Inherited == nil { - return false - } - return *e.Inherited -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetName() string { - if e == nil || e.Name == nil { - return "" - } - return *e.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetRestrictedToWorkflows() bool { - if e == nil || e.RestrictedToWorkflows == nil { - return false - } - return *e.RestrictedToWorkflows -} - -// GetRunnersURL returns the RunnersURL field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetRunnersURL() string { - if e == nil || e.RunnersURL == nil { - return "" - } - return *e.RunnersURL -} - -// GetSelectedOrganizationsURL returns the SelectedOrganizationsURL field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetSelectedOrganizationsURL() string { - if e == nil || e.SelectedOrganizationsURL == nil { - return "" - } - return *e.SelectedOrganizationsURL -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetVisibility() string { - if e == nil || e.Visibility == nil { - return "" - } - return *e.Visibility -} - -// GetWorkflowRestrictionsReadOnly returns the WorkflowRestrictionsReadOnly field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetWorkflowRestrictionsReadOnly() bool { - if e == nil || e.WorkflowRestrictionsReadOnly == nil { - return false - } - return *e.WorkflowRestrictionsReadOnly -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroups) GetTotalCount() int { - if e == nil || e.TotalCount == nil { - return 0 - } - return *e.TotalCount -} - -// GetAdvancedSecurityEnabledForNewRepositories returns the AdvancedSecurityEnabledForNewRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetAdvancedSecurityEnabledForNewRepositories() bool { - if e == nil || e.AdvancedSecurityEnabledForNewRepositories == nil { - return false - } - return *e.AdvancedSecurityEnabledForNewRepositories -} - -// GetSecretScanningEnabledForNewRepositories returns the SecretScanningEnabledForNewRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningEnabledForNewRepositories() bool { - if e == nil || e.SecretScanningEnabledForNewRepositories == nil { - return false - } - return *e.SecretScanningEnabledForNewRepositories -} - -// GetSecretScanningPushProtectionCustomLink returns the SecretScanningPushProtectionCustomLink field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningPushProtectionCustomLink() string { - if e == nil || e.SecretScanningPushProtectionCustomLink == nil { - return "" - } - return *e.SecretScanningPushProtectionCustomLink -} - -// GetSecretScanningPushProtectionEnabledForNewRepositories returns the SecretScanningPushProtectionEnabledForNewRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningPushProtectionEnabledForNewRepositories() bool { - if e == nil || e.SecretScanningPushProtectionEnabledForNewRepositories == nil { - return false - } - return *e.SecretScanningPushProtectionEnabledForNewRepositories -} - -// GetCanAdminsBypass returns the CanAdminsBypass field if it's non-nil, zero value otherwise. -func (e *Environment) GetCanAdminsBypass() bool { - if e == nil || e.CanAdminsBypass == nil { - return false - } - return *e.CanAdminsBypass -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *Environment) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetDeploymentBranchPolicy returns the DeploymentBranchPolicy field. -func (e *Environment) GetDeploymentBranchPolicy() *BranchPolicy { - if e == nil { - return nil - } - return e.DeploymentBranchPolicy -} - -// GetEnvironmentName returns the EnvironmentName field if it's non-nil, zero value otherwise. -func (e *Environment) GetEnvironmentName() string { - if e == nil || e.EnvironmentName == nil { - return "" - } - return *e.EnvironmentName -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (e *Environment) GetHTMLURL() string { - if e == nil || e.HTMLURL == nil { - return "" - } - return *e.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *Environment) GetID() int64 { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (e *Environment) GetName() string { - if e == nil || e.Name == nil { - return "" - } - return *e.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (e *Environment) GetNodeID() string { - if e == nil || e.NodeID == nil { - return "" - } - return *e.NodeID -} - -// GetOwner returns the Owner field if it's non-nil, zero value otherwise. -func (e *Environment) GetOwner() string { - if e == nil || e.Owner == nil { - return "" - } - return *e.Owner -} - -// GetRepo returns the Repo field if it's non-nil, zero value otherwise. -func (e *Environment) GetRepo() string { - if e == nil || e.Repo == nil { - return "" - } - return *e.Repo -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (e *Environment) GetUpdatedAt() Timestamp { - if e == nil || e.UpdatedAt == nil { - return Timestamp{} - } - return *e.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (e *Environment) GetURL() string { - if e == nil || e.URL == nil { - return "" - } - return *e.URL -} - -// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. -func (e *Environment) GetWaitTimer() int { - if e == nil || e.WaitTimer == nil { - return 0 - } - return *e.WaitTimer -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (e *EnvResponse) GetTotalCount() int { - if e == nil || e.TotalCount == nil { - return 0 - } - return *e.TotalCount -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *EnvReviewers) GetID() int64 { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (e *EnvReviewers) GetType() string { - if e == nil || e.Type == nil { - return "" - } - return *e.Type -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *ErrorBlock) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetBlock returns the Block field. -func (e *ErrorResponse) GetBlock() *ErrorBlock { - if e == nil { - return nil - } - return e.Block -} - -// GetActor returns the Actor field. -func (e *Event) GetActor() *User { - if e == nil { - return nil - } - return e.Actor -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *Event) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *Event) GetID() string { - if e == nil || e.ID == nil { - return "" - } - return *e.ID -} - -// GetOrg returns the Org field. -func (e *Event) GetOrg() *Organization { - if e == nil { - return nil - } - return e.Org -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (e *Event) GetPublic() bool { - if e == nil || e.Public == nil { - return false - } - return *e.Public -} - -// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. -func (e *Event) GetRawPayload() json.RawMessage { - if e == nil || e.RawPayload == nil { - return json.RawMessage{} - } - return *e.RawPayload -} - -// GetRepo returns the Repo field. -func (e *Event) GetRepo() *Repository { - if e == nil { - return nil - } - return e.Repo -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (e *Event) GetType() string { - if e == nil || e.Type == nil { - return "" - } - return *e.Type -} - -// GetGroupID returns the GroupID field if it's non-nil, zero value otherwise. -func (e *ExternalGroup) GetGroupID() int64 { - if e == nil || e.GroupID == nil { - return 0 - } - return *e.GroupID -} - -// GetGroupName returns the GroupName field if it's non-nil, zero value otherwise. -func (e *ExternalGroup) GetGroupName() string { - if e == nil || e.GroupName == nil { - return "" - } - return *e.GroupName -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (e *ExternalGroup) GetUpdatedAt() Timestamp { - if e == nil || e.UpdatedAt == nil { - return Timestamp{} - } - return *e.UpdatedAt -} - -// GetMemberEmail returns the MemberEmail field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberEmail() string { - if e == nil || e.MemberEmail == nil { - return "" - } - return *e.MemberEmail -} - -// GetMemberID returns the MemberID field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberID() int64 { - if e == nil || e.MemberID == nil { - return 0 - } - return *e.MemberID -} - -// GetMemberLogin returns the MemberLogin field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberLogin() string { - if e == nil || e.MemberLogin == nil { - return "" - } - return *e.MemberLogin -} - -// GetMemberName returns the MemberName field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberName() string { - if e == nil || e.MemberName == nil { - return "" - } - return *e.MemberName -} - -// GetTeamID returns the TeamID field if it's non-nil, zero value otherwise. -func (e *ExternalGroupTeam) GetTeamID() int64 { - if e == nil || e.TeamID == nil { - return 0 - } - return *e.TeamID -} - -// GetTeamName returns the TeamName field if it's non-nil, zero value otherwise. -func (e *ExternalGroupTeam) GetTeamName() string { - if e == nil || e.TeamName == nil { - return "" - } - return *e.TeamName -} - -// GetHRef returns the HRef field if it's non-nil, zero value otherwise. -func (f *FeedLink) GetHRef() string { - if f == nil || f.HRef == nil { - return "" - } - return *f.HRef -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (f *FeedLink) GetType() string { - if f == nil || f.Type == nil { - return "" - } - return *f.Type -} - -// GetCurrentUser returns the CurrentUser field. -func (f *FeedLinks) GetCurrentUser() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUser -} - -// GetCurrentUserActor returns the CurrentUserActor field. -func (f *FeedLinks) GetCurrentUserActor() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUserActor -} - -// GetCurrentUserOrganization returns the CurrentUserOrganization field. -func (f *FeedLinks) GetCurrentUserOrganization() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUserOrganization -} - -// GetCurrentUserPublic returns the CurrentUserPublic field. -func (f *FeedLinks) GetCurrentUserPublic() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUserPublic -} - -// GetTimeline returns the Timeline field. -func (f *FeedLinks) GetTimeline() *FeedLink { - if f == nil { - return nil - } - return f.Timeline -} - -// GetUser returns the User field. -func (f *FeedLinks) GetUser() *FeedLink { - if f == nil { - return nil - } - return f.User -} - -// GetCurrentUserActorURL returns the CurrentUserActorURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserActorURL() string { - if f == nil || f.CurrentUserActorURL == nil { - return "" - } - return *f.CurrentUserActorURL -} - -// GetCurrentUserOrganizationURL returns the CurrentUserOrganizationURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserOrganizationURL() string { - if f == nil || f.CurrentUserOrganizationURL == nil { - return "" - } - return *f.CurrentUserOrganizationURL -} - -// GetCurrentUserPublicURL returns the CurrentUserPublicURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserPublicURL() string { - if f == nil || f.CurrentUserPublicURL == nil { - return "" - } - return *f.CurrentUserPublicURL -} - -// GetCurrentUserURL returns the CurrentUserURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserURL() string { - if f == nil || f.CurrentUserURL == nil { - return "" - } - return *f.CurrentUserURL -} - -// GetLinks returns the Links field. -func (f *Feeds) GetLinks() *FeedLinks { - if f == nil { - return nil - } - return f.Links -} - -// GetTimelineURL returns the TimelineURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetTimelineURL() string { - if f == nil || f.TimelineURL == nil { - return "" - } - return *f.TimelineURL -} - -// GetUserURL returns the UserURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetUserURL() string { - if f == nil || f.UserURL == nil { - return "" - } - return *f.UserURL -} - -// GetIdentifier returns the Identifier field if it's non-nil, zero value otherwise. -func (f *FirstPatchedVersion) GetIdentifier() string { - if f == nil || f.Identifier == nil { - return "" - } - return *f.Identifier -} - -// GetForkee returns the Forkee field. -func (f *ForkEvent) GetForkee() *Repository { - if f == nil { - return nil - } - return f.Forkee -} - -// GetInstallation returns the Installation field. -func (f *ForkEvent) GetInstallation() *Installation { - if f == nil { - return nil - } - return f.Installation -} - -// GetRepo returns the Repo field. -func (f *ForkEvent) GetRepo() *Repository { - if f == nil { - return nil - } - return f.Repo -} - -// GetSender returns the Sender field. -func (f *ForkEvent) GetSender() *User { - if f == nil { - return nil - } - return f.Sender -} - -// GetWorkFolder returns the WorkFolder field if it's non-nil, zero value otherwise. -func (g *GenerateJITConfigRequest) GetWorkFolder() string { - if g == nil || g.WorkFolder == nil { - return "" - } - return *g.WorkFolder -} - -// GetPreviousTagName returns the PreviousTagName field if it's non-nil, zero value otherwise. -func (g *GenerateNotesOptions) GetPreviousTagName() string { - if g == nil || g.PreviousTagName == nil { - return "" - } - return *g.PreviousTagName -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (g *GenerateNotesOptions) GetTargetCommitish() string { - if g == nil || g.TargetCommitish == nil { - return "" - } - return *g.TargetCommitish -} - -// GetInclude returns the Include field if it's non-nil, zero value otherwise. -func (g *GetAuditLogOptions) GetInclude() string { - if g == nil || g.Include == nil { - return "" - } - return *g.Include -} - -// GetOrder returns the Order field if it's non-nil, zero value otherwise. -func (g *GetAuditLogOptions) GetOrder() string { - if g == nil || g.Order == nil { - return "" - } - return *g.Order -} - -// GetPhrase returns the Phrase field if it's non-nil, zero value otherwise. -func (g *GetAuditLogOptions) GetPhrase() string { - if g == nil || g.Phrase == nil { - return "" - } - return *g.Phrase -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (g *Gist) GetComments() int { - if g == nil || g.Comments == nil { - return 0 - } - return *g.Comments -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *Gist) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (g *Gist) GetDescription() string { - if g == nil || g.Description == nil { - return "" - } - return *g.Description -} - -// GetFiles returns the Files map if it's non-nil, an empty map otherwise. -func (g *Gist) GetFiles() map[GistFilename]GistFile { - if g == nil || g.Files == nil { - return map[GistFilename]GistFile{} - } - return g.Files -} - -// GetGitPullURL returns the GitPullURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetGitPullURL() string { - if g == nil || g.GitPullURL == nil { - return "" - } - return *g.GitPullURL -} - -// GetGitPushURL returns the GitPushURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetGitPushURL() string { - if g == nil || g.GitPushURL == nil { - return "" - } - return *g.GitPushURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetHTMLURL() string { - if g == nil || g.HTMLURL == nil { - return "" - } - return *g.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *Gist) GetID() string { - if g == nil || g.ID == nil { - return "" - } - return *g.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (g *Gist) GetNodeID() string { - if g == nil || g.NodeID == nil { - return "" - } - return *g.NodeID -} - -// GetOwner returns the Owner field. -func (g *Gist) GetOwner() *User { - if g == nil { - return nil - } - return g.Owner -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (g *Gist) GetPublic() bool { - if g == nil || g.Public == nil { - return false - } - return *g.Public -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *Gist) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} - } - return *g.UpdatedAt -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (g *GistComment) GetBody() string { - if g == nil || g.Body == nil { - return "" - } - return *g.Body -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GistComment) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GistComment) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistComment) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetUser returns the User field. -func (g *GistComment) GetUser() *User { - if g == nil { - return nil - } - return g.User -} - -// GetChangeStatus returns the ChangeStatus field. -func (g *GistCommit) GetChangeStatus() *CommitStats { - if g == nil { - return nil - } - return g.ChangeStatus -} - -// GetCommittedAt returns the CommittedAt field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetCommittedAt() Timestamp { - if g == nil || g.CommittedAt == nil { - return Timestamp{} - } - return *g.CommittedAt -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetNodeID() string { - if g == nil || g.NodeID == nil { - return "" - } - return *g.NodeID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetUser returns the User field. -func (g *GistCommit) GetUser() *User { - if g == nil { - return nil - } - return g.User -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetVersion() string { - if g == nil || g.Version == nil { - return "" - } - return *g.Version -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (g *GistFile) GetContent() string { - if g == nil || g.Content == nil { - return "" - } - return *g.Content -} - -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (g *GistFile) GetFilename() string { - if g == nil || g.Filename == nil { - return "" - } - return *g.Filename -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (g *GistFile) GetLanguage() string { - if g == nil || g.Language == nil { - return "" - } - return *g.Language -} - -// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. -func (g *GistFile) GetRawURL() string { - if g == nil || g.RawURL == nil { - return "" - } - return *g.RawURL -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (g *GistFile) GetSize() int { - if g == nil || g.Size == nil { - return 0 - } - return *g.Size -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GistFile) GetType() string { - if g == nil || g.Type == nil { - return "" - } - return *g.Type -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GistFork) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GistFork) GetID() string { - if g == nil || g.ID == nil { - return "" - } - return *g.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (g *GistFork) GetNodeID() string { - if g == nil || g.NodeID == nil { - return "" - } - return *g.NodeID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *GistFork) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} - } - return *g.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistFork) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetUser returns the User field. -func (g *GistFork) GetUser() *User { - if g == nil { - return nil - } - return g.User -} - -// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. -func (g *GistStats) GetPrivateGists() int { - if g == nil || g.PrivateGists == nil { - return 0 - } - return *g.PrivateGists -} - -// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. -func (g *GistStats) GetPublicGists() int { - if g == nil || g.PublicGists == nil { - return 0 - } - return *g.PublicGists -} - -// GetTotalGists returns the TotalGists field if it's non-nil, zero value otherwise. -func (g *GistStats) GetTotalGists() int { - if g == nil || g.TotalGists == nil { - return 0 - } - return *g.TotalGists -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (g *GitHubAppAuthorizationEvent) GetAction() string { - if g == nil || g.Action == nil { - return "" - } - return *g.Action -} - -// GetInstallation returns the Installation field. -func (g *GitHubAppAuthorizationEvent) GetInstallation() *Installation { - if g == nil { - return nil - } - return g.Installation -} - -// GetSender returns the Sender field. -func (g *GitHubAppAuthorizationEvent) GetSender() *User { - if g == nil { - return nil - } - return g.Sender -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (g *Gitignore) GetName() string { - if g == nil || g.Name == nil { - return "" - } - return *g.Name -} - -// GetSource returns the Source field if it's non-nil, zero value otherwise. -func (g *Gitignore) GetSource() string { - if g == nil || g.Source == nil { - return "" - } - return *g.Source -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (g *GitObject) GetSHA() string { - if g == nil || g.SHA == nil { - return "" - } - return *g.SHA -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GitObject) GetType() string { - if g == nil || g.Type == nil { - return "" - } - return *g.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GitObject) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetGithubReviewedAt returns the GithubReviewedAt field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetGithubReviewedAt() Timestamp { - if g == nil || g.GithubReviewedAt == nil { - return Timestamp{} - } - return *g.GithubReviewedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetNVDPublishedAt returns the NVDPublishedAt field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetNVDPublishedAt() Timestamp { - if g == nil || g.NVDPublishedAt == nil { - return Timestamp{} - } - return *g.NVDPublishedAt -} - -// GetRepositoryAdvisoryURL returns the RepositoryAdvisoryURL field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetRepositoryAdvisoryURL() string { - if g == nil || g.RepositoryAdvisoryURL == nil { - return "" - } - return *g.RepositoryAdvisoryURL -} - -// GetSourceCodeLocation returns the SourceCodeLocation field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetSourceCodeLocation() string { - if g == nil || g.SourceCodeLocation == nil { - return "" - } - return *g.SourceCodeLocation -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetType() string { - if g == nil || g.Type == nil { - return "" - } - return *g.Type -} - -// GetFirstPatchedVersion returns the FirstPatchedVersion field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityVulnerability) GetFirstPatchedVersion() string { - if g == nil || g.FirstPatchedVersion == nil { - return "" - } - return *g.FirstPatchedVersion -} - -// GetPackage returns the Package field. -func (g *GlobalSecurityVulnerability) GetPackage() *VulnerabilityPackage { - if g == nil { - return nil - } - return g.Package -} - -// GetVulnerableVersionRange returns the VulnerableVersionRange field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityVulnerability) GetVulnerableVersionRange() string { - if g == nil || g.VulnerableVersionRange == nil { - return "" - } - return *g.VulnerableVersionRange -} - -// GetInstallation returns the Installation field. -func (g *GollumEvent) GetInstallation() *Installation { - if g == nil { - return nil - } - return g.Installation -} - -// GetOrg returns the Org field. -func (g *GollumEvent) GetOrg() *Organization { - if g == nil { - return nil - } - return g.Org -} - -// GetRepo returns the Repo field. -func (g *GollumEvent) GetRepo() *Repository { - if g == nil { - return nil - } - return g.Repo -} - -// GetSender returns the Sender field. -func (g *GollumEvent) GetSender() *User { - if g == nil { - return nil - } - return g.Sender -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (g *GPGEmail) GetEmail() string { - if g == nil || g.Email == nil { - return "" - } - return *g.Email -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (g *GPGEmail) GetVerified() bool { - if g == nil || g.Verified == nil { - return false - } - return *g.Verified -} - -// GetCanCertify returns the CanCertify field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanCertify() bool { - if g == nil || g.CanCertify == nil { - return false - } - return *g.CanCertify -} - -// GetCanEncryptComms returns the CanEncryptComms field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanEncryptComms() bool { - if g == nil || g.CanEncryptComms == nil { - return false - } - return *g.CanEncryptComms -} - -// GetCanEncryptStorage returns the CanEncryptStorage field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanEncryptStorage() bool { - if g == nil || g.CanEncryptStorage == nil { - return false - } - return *g.CanEncryptStorage -} - -// GetCanSign returns the CanSign field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanSign() bool { - if g == nil || g.CanSign == nil { - return false - } - return *g.CanSign -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetExpiresAt() Timestamp { - if g == nil || g.ExpiresAt == nil { - return Timestamp{} - } - return *g.ExpiresAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetKeyID() string { - if g == nil || g.KeyID == nil { - return "" - } - return *g.KeyID -} - -// GetPrimaryKeyID returns the PrimaryKeyID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetPrimaryKeyID() int64 { - if g == nil || g.PrimaryKeyID == nil { - return 0 - } - return *g.PrimaryKeyID -} - -// GetPublicKey returns the PublicKey field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetPublicKey() string { - if g == nil || g.PublicKey == nil { - return "" - } - return *g.PublicKey -} - -// GetRawKey returns the RawKey field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetRawKey() string { - if g == nil || g.RawKey == nil { - return "" - } - return *g.RawKey -} - -// GetApp returns the App field. -func (g *Grant) GetApp() *AuthorizationApp { - if g == nil { - return nil - } - return g.App -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *Grant) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *Grant) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *Grant) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} - } - return *g.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *Grant) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetAuthor returns the Author field. -func (h *HeadCommit) GetAuthor() *CommitAuthor { - if h == nil { - return nil - } - return h.Author -} - -// GetCommitter returns the Committer field. -func (h *HeadCommit) GetCommitter() *CommitAuthor { - if h == nil { - return nil - } - return h.Committer -} - -// GetDistinct returns the Distinct field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetDistinct() bool { - if h == nil || h.Distinct == nil { - return false - } - return *h.Distinct -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetID() string { - if h == nil || h.ID == nil { - return "" - } - return *h.ID -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetMessage() string { - if h == nil || h.Message == nil { - return "" - } - return *h.Message -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetSHA() string { - if h == nil || h.SHA == nil { - return "" - } - return *h.SHA -} - -// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetTimestamp() Timestamp { - if h == nil || h.Timestamp == nil { - return Timestamp{} - } - return *h.Timestamp -} - -// GetTreeID returns the TreeID field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetTreeID() string { - if h == nil || h.TreeID == nil { - return "" - } - return *h.TreeID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetURL() string { - if h == nil || h.URL == nil { - return "" - } - return *h.URL -} - -// GetActive returns the Active field if it's non-nil, zero value otherwise. -func (h *Hook) GetActive() bool { - if h == nil || h.Active == nil { - return false - } - return *h.Active -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (h *Hook) GetCreatedAt() Timestamp { - if h == nil || h.CreatedAt == nil { - return Timestamp{} - } - return *h.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (h *Hook) GetID() int64 { - if h == nil || h.ID == nil { - return 0 - } - return *h.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (h *Hook) GetName() string { - if h == nil || h.Name == nil { - return "" - } - return *h.Name -} - -// GetPingURL returns the PingURL field if it's non-nil, zero value otherwise. -func (h *Hook) GetPingURL() string { - if h == nil || h.PingURL == nil { - return "" - } - return *h.PingURL -} - -// GetTestURL returns the TestURL field if it's non-nil, zero value otherwise. -func (h *Hook) GetTestURL() string { - if h == nil || h.TestURL == nil { - return "" - } - return *h.TestURL -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (h *Hook) GetType() string { - if h == nil || h.Type == nil { - return "" - } - return *h.Type -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (h *Hook) GetUpdatedAt() Timestamp { - if h == nil || h.UpdatedAt == nil { - return Timestamp{} - } - return *h.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (h *Hook) GetURL() string { - if h == nil || h.URL == nil { - return "" - } - return *h.URL -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetContentType() string { - if h == nil || h.ContentType == nil { - return "" - } - return *h.ContentType -} - -// GetInsecureSSL returns the InsecureSSL field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetInsecureSSL() string { - if h == nil || h.InsecureSSL == nil { - return "" - } - return *h.InsecureSSL -} - -// GetSecret returns the Secret field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetSecret() string { - if h == nil || h.Secret == nil { - return "" - } - return *h.Secret -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetURL() string { - if h == nil || h.URL == nil { - return "" - } - return *h.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetAction() string { - if h == nil || h.Action == nil { - return "" - } - return *h.Action -} - -// GetDeliveredAt returns the DeliveredAt field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetDeliveredAt() Timestamp { - if h == nil || h.DeliveredAt == nil { - return Timestamp{} - } - return *h.DeliveredAt -} - -// GetDuration returns the Duration field. -func (h *HookDelivery) GetDuration() *float64 { - if h == nil { - return nil - } - return h.Duration -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetEvent() string { - if h == nil || h.Event == nil { - return "" - } - return *h.Event -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetGUID() string { - if h == nil || h.GUID == nil { - return "" - } - return *h.GUID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetID() int64 { - if h == nil || h.ID == nil { - return 0 - } - return *h.ID -} - -// GetInstallationID returns the InstallationID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetInstallationID() int64 { - if h == nil || h.InstallationID == nil { - return 0 - } - return *h.InstallationID -} - -// GetRedelivery returns the Redelivery field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetRedelivery() bool { - if h == nil || h.Redelivery == nil { - return false - } - return *h.Redelivery -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetRepositoryID() int64 { - if h == nil || h.RepositoryID == nil { - return 0 - } - return *h.RepositoryID -} - -// GetRequest returns the Request field. -func (h *HookDelivery) GetRequest() *HookRequest { - if h == nil { - return nil - } - return h.Request -} - -// GetResponse returns the Response field. -func (h *HookDelivery) GetResponse() *HookResponse { - if h == nil { - return nil - } - return h.Response -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetStatus() string { - if h == nil || h.Status == nil { - return "" - } - return *h.Status -} - -// GetStatusCode returns the StatusCode field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetStatusCode() int { - if h == nil || h.StatusCode == nil { - return 0 - } - return *h.StatusCode -} - -// GetHeaders returns the Headers map if it's non-nil, an empty map otherwise. -func (h *HookRequest) GetHeaders() map[string]string { - if h == nil || h.Headers == nil { - return map[string]string{} - } - return h.Headers -} - -// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. -func (h *HookRequest) GetRawPayload() json.RawMessage { - if h == nil || h.RawPayload == nil { - return json.RawMessage{} - } - return *h.RawPayload -} - -// GetHeaders returns the Headers map if it's non-nil, an empty map otherwise. -func (h *HookResponse) GetHeaders() map[string]string { - if h == nil || h.Headers == nil { - return map[string]string{} - } - return h.Headers -} - -// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. -func (h *HookResponse) GetRawPayload() json.RawMessage { - if h == nil || h.RawPayload == nil { - return json.RawMessage{} - } - return *h.RawPayload -} - -// GetActiveHooks returns the ActiveHooks field if it's non-nil, zero value otherwise. -func (h *HookStats) GetActiveHooks() int { - if h == nil || h.ActiveHooks == nil { - return 0 - } - return *h.ActiveHooks -} - -// GetInactiveHooks returns the InactiveHooks field if it's non-nil, zero value otherwise. -func (h *HookStats) GetInactiveHooks() int { - if h == nil || h.InactiveHooks == nil { - return 0 - } - return *h.InactiveHooks -} - -// GetTotalHooks returns the TotalHooks field if it's non-nil, zero value otherwise. -func (h *HookStats) GetTotalHooks() int { - if h == nil || h.TotalHooks == nil { - return 0 - } - return *h.TotalHooks -} - -// GetGroupDescription returns the GroupDescription field if it's non-nil, zero value otherwise. -func (i *IDPGroup) GetGroupDescription() string { - if i == nil || i.GroupDescription == nil { - return "" - } - return *i.GroupDescription -} - -// GetGroupID returns the GroupID field if it's non-nil, zero value otherwise. -func (i *IDPGroup) GetGroupID() string { - if i == nil || i.GroupID == nil { - return "" - } - return *i.GroupID -} - -// GetGroupName returns the GroupName field if it's non-nil, zero value otherwise. -func (i *IDPGroup) GetGroupName() string { - if i == nil || i.GroupName == nil { - return "" - } - return *i.GroupName -} - -// GetAuthorsCount returns the AuthorsCount field if it's non-nil, zero value otherwise. -func (i *Import) GetAuthorsCount() int { - if i == nil || i.AuthorsCount == nil { - return 0 - } - return *i.AuthorsCount -} - -// GetAuthorsURL returns the AuthorsURL field if it's non-nil, zero value otherwise. -func (i *Import) GetAuthorsURL() string { - if i == nil || i.AuthorsURL == nil { - return "" - } - return *i.AuthorsURL -} - -// GetCommitCount returns the CommitCount field if it's non-nil, zero value otherwise. -func (i *Import) GetCommitCount() int { - if i == nil || i.CommitCount == nil { - return 0 - } - return *i.CommitCount -} - -// GetFailedStep returns the FailedStep field if it's non-nil, zero value otherwise. -func (i *Import) GetFailedStep() string { - if i == nil || i.FailedStep == nil { - return "" - } - return *i.FailedStep -} - -// GetHasLargeFiles returns the HasLargeFiles field if it's non-nil, zero value otherwise. -func (i *Import) GetHasLargeFiles() bool { - if i == nil || i.HasLargeFiles == nil { - return false - } - return *i.HasLargeFiles -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Import) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetHumanName returns the HumanName field if it's non-nil, zero value otherwise. -func (i *Import) GetHumanName() string { - if i == nil || i.HumanName == nil { - return "" - } - return *i.HumanName -} - -// GetLargeFilesCount returns the LargeFilesCount field if it's non-nil, zero value otherwise. -func (i *Import) GetLargeFilesCount() int { - if i == nil || i.LargeFilesCount == nil { - return 0 - } - return *i.LargeFilesCount -} - -// GetLargeFilesSize returns the LargeFilesSize field if it's non-nil, zero value otherwise. -func (i *Import) GetLargeFilesSize() int { - if i == nil || i.LargeFilesSize == nil { - return 0 - } - return *i.LargeFilesSize -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (i *Import) GetMessage() string { - if i == nil || i.Message == nil { - return "" - } - return *i.Message -} - -// GetPercent returns the Percent field if it's non-nil, zero value otherwise. -func (i *Import) GetPercent() int { - if i == nil || i.Percent == nil { - return 0 - } - return *i.Percent -} - -// GetPushPercent returns the PushPercent field if it's non-nil, zero value otherwise. -func (i *Import) GetPushPercent() int { - if i == nil || i.PushPercent == nil { - return 0 - } - return *i.PushPercent -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (i *Import) GetRepositoryURL() string { - if i == nil || i.RepositoryURL == nil { - return "" - } - return *i.RepositoryURL -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (i *Import) GetStatus() string { - if i == nil || i.Status == nil { - return "" - } - return *i.Status -} - -// GetStatusText returns the StatusText field if it's non-nil, zero value otherwise. -func (i *Import) GetStatusText() string { - if i == nil || i.StatusText == nil { - return "" - } - return *i.StatusText -} - -// GetTFVCProject returns the TFVCProject field if it's non-nil, zero value otherwise. -func (i *Import) GetTFVCProject() string { - if i == nil || i.TFVCProject == nil { - return "" - } - return *i.TFVCProject -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *Import) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetUseLFS returns the UseLFS field if it's non-nil, zero value otherwise. -func (i *Import) GetUseLFS() string { - if i == nil || i.UseLFS == nil { - return "" - } - return *i.UseLFS -} - -// GetVCS returns the VCS field if it's non-nil, zero value otherwise. -func (i *Import) GetVCS() string { - if i == nil || i.VCS == nil { - return "" - } - return *i.VCS -} - -// GetVCSPassword returns the VCSPassword field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSPassword() string { - if i == nil || i.VCSPassword == nil { - return "" - } - return *i.VCSPassword -} - -// GetVCSURL returns the VCSURL field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSURL() string { - if i == nil || i.VCSURL == nil { - return "" - } - return *i.VCSURL -} - -// GetVCSUsername returns the VCSUsername field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSUsername() string { - if i == nil || i.VCSUsername == nil { - return "" - } - return *i.VCSUsername -} - -// GetAccessTokensURL returns the AccessTokensURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetAccessTokensURL() string { - if i == nil || i.AccessTokensURL == nil { - return "" - } - return *i.AccessTokensURL -} - -// GetAccount returns the Account field. -func (i *Installation) GetAccount() *User { - if i == nil { - return nil - } - return i.Account -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (i *Installation) GetAppID() int64 { - if i == nil || i.AppID == nil { - return 0 - } - return *i.AppID -} - -// GetAppSlug returns the AppSlug field if it's non-nil, zero value otherwise. -func (i *Installation) GetAppSlug() string { - if i == nil || i.AppSlug == nil { - return "" - } - return *i.AppSlug -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetHasMultipleSingleFiles returns the HasMultipleSingleFiles field if it's non-nil, zero value otherwise. -func (i *Installation) GetHasMultipleSingleFiles() bool { - if i == nil || i.HasMultipleSingleFiles == nil { - return false - } - return *i.HasMultipleSingleFiles -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Installation) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *Installation) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetPermissions returns the Permissions field. -func (i *Installation) GetPermissions() *InstallationPermissions { - if i == nil { - return nil - } - return i.Permissions -} - -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetRepositoriesURL() string { - if i == nil || i.RepositoriesURL == nil { - return "" - } - return *i.RepositoriesURL -} - -// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. -func (i *Installation) GetRepositorySelection() string { - if i == nil || i.RepositorySelection == nil { - return "" - } - return *i.RepositorySelection -} - -// GetSingleFileName returns the SingleFileName field if it's non-nil, zero value otherwise. -func (i *Installation) GetSingleFileName() string { - if i == nil || i.SingleFileName == nil { - return "" - } - return *i.SingleFileName -} - -// GetSuspendedAt returns the SuspendedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetSuspendedAt() Timestamp { - if i == nil || i.SuspendedAt == nil { - return Timestamp{} - } - return *i.SuspendedAt -} - -// GetSuspendedBy returns the SuspendedBy field. -func (i *Installation) GetSuspendedBy() *User { - if i == nil { - return nil - } - return i.SuspendedBy -} - -// GetTargetID returns the TargetID field if it's non-nil, zero value otherwise. -func (i *Installation) GetTargetID() int64 { - if i == nil || i.TargetID == nil { - return 0 - } - return *i.TargetID -} - -// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. -func (i *Installation) GetTargetType() string { - if i == nil || i.TargetType == nil { - return "" - } - return *i.TargetType -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetLogin returns the Login field. -func (i *InstallationChanges) GetLogin() *InstallationLoginChange { - if i == nil { - return nil - } - return i.Login -} - -// GetSlug returns the Slug field. -func (i *InstallationChanges) GetSlug() *InstallationSlugChange { - if i == nil { - return nil - } - return i.Slug -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetInstallation returns the Installation field. -func (i *InstallationEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetOrg returns the Org field. -func (i *InstallationEvent) GetOrg() *Organization { - if i == nil { - return nil - } - return i.Org -} - -// GetRequester returns the Requester field. -func (i *InstallationEvent) GetRequester() *User { - if i == nil { - return nil - } - return i.Requester -} - -// GetSender returns the Sender field. -func (i *InstallationEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (i *InstallationLoginChange) GetFrom() string { - if i == nil || i.From == nil { - return "" - } - return *i.From -} - -// GetActions returns the Actions field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetActions() string { - if i == nil || i.Actions == nil { - return "" - } - return *i.Actions -} - -// GetAdministration returns the Administration field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetAdministration() string { - if i == nil || i.Administration == nil { - return "" - } - return *i.Administration -} - -// GetBlocking returns the Blocking field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetBlocking() string { - if i == nil || i.Blocking == nil { - return "" - } - return *i.Blocking -} - -// GetChecks returns the Checks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetChecks() string { - if i == nil || i.Checks == nil { - return "" - } - return *i.Checks -} - -// GetContentReferences returns the ContentReferences field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetContentReferences() string { - if i == nil || i.ContentReferences == nil { - return "" - } - return *i.ContentReferences -} - -// GetContents returns the Contents field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetContents() string { - if i == nil || i.Contents == nil { - return "" - } - return *i.Contents -} - -// GetDeployments returns the Deployments field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetDeployments() string { - if i == nil || i.Deployments == nil { - return "" - } - return *i.Deployments -} - -// GetEmails returns the Emails field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetEmails() string { - if i == nil || i.Emails == nil { - return "" - } - return *i.Emails -} - -// GetEnvironments returns the Environments field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetEnvironments() string { - if i == nil || i.Environments == nil { - return "" - } - return *i.Environments -} - -// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetFollowers() string { - if i == nil || i.Followers == nil { - return "" - } - return *i.Followers -} - -// GetIssues returns the Issues field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetIssues() string { - if i == nil || i.Issues == nil { - return "" - } - return *i.Issues -} - -// GetMembers returns the Members field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetMembers() string { - if i == nil || i.Members == nil { - return "" - } - return *i.Members -} - -// GetMetadata returns the Metadata field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetMetadata() string { - if i == nil || i.Metadata == nil { - return "" - } - return *i.Metadata -} - -// GetOrganizationAdministration returns the OrganizationAdministration field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationAdministration() string { - if i == nil || i.OrganizationAdministration == nil { - return "" - } - return *i.OrganizationAdministration -} - -// GetOrganizationCustomRoles returns the OrganizationCustomRoles field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationCustomRoles() string { - if i == nil || i.OrganizationCustomRoles == nil { - return "" - } - return *i.OrganizationCustomRoles -} - -// GetOrganizationHooks returns the OrganizationHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationHooks() string { - if i == nil || i.OrganizationHooks == nil { - return "" - } - return *i.OrganizationHooks -} - -// GetOrganizationPackages returns the OrganizationPackages field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationPackages() string { - if i == nil || i.OrganizationPackages == nil { - return "" - } - return *i.OrganizationPackages -} - -// GetOrganizationPlan returns the OrganizationPlan field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationPlan() string { - if i == nil || i.OrganizationPlan == nil { - return "" - } - return *i.OrganizationPlan -} - -// GetOrganizationPreReceiveHooks returns the OrganizationPreReceiveHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationPreReceiveHooks() string { - if i == nil || i.OrganizationPreReceiveHooks == nil { - return "" - } - return *i.OrganizationPreReceiveHooks -} - -// GetOrganizationProjects returns the OrganizationProjects field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationProjects() string { - if i == nil || i.OrganizationProjects == nil { - return "" - } - return *i.OrganizationProjects -} - -// GetOrganizationSecrets returns the OrganizationSecrets field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationSecrets() string { - if i == nil || i.OrganizationSecrets == nil { - return "" - } - return *i.OrganizationSecrets -} - -// GetOrganizationSelfHostedRunners returns the OrganizationSelfHostedRunners field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationSelfHostedRunners() string { - if i == nil || i.OrganizationSelfHostedRunners == nil { - return "" - } - return *i.OrganizationSelfHostedRunners -} - -// GetOrganizationUserBlocking returns the OrganizationUserBlocking field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationUserBlocking() string { - if i == nil || i.OrganizationUserBlocking == nil { - return "" - } - return *i.OrganizationUserBlocking -} - -// GetPackages returns the Packages field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetPackages() string { - if i == nil || i.Packages == nil { - return "" - } - return *i.Packages -} - -// GetPages returns the Pages field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetPages() string { - if i == nil || i.Pages == nil { - return "" - } - return *i.Pages -} - -// GetPullRequests returns the PullRequests field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetPullRequests() string { - if i == nil || i.PullRequests == nil { - return "" - } - return *i.PullRequests -} - -// GetRepositoryHooks returns the RepositoryHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetRepositoryHooks() string { - if i == nil || i.RepositoryHooks == nil { - return "" - } - return *i.RepositoryHooks -} - -// GetRepositoryPreReceiveHooks returns the RepositoryPreReceiveHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetRepositoryPreReceiveHooks() string { - if i == nil || i.RepositoryPreReceiveHooks == nil { - return "" - } - return *i.RepositoryPreReceiveHooks -} - -// GetRepositoryProjects returns the RepositoryProjects field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetRepositoryProjects() string { - if i == nil || i.RepositoryProjects == nil { - return "" - } - return *i.RepositoryProjects -} - -// GetSecrets returns the Secrets field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSecrets() string { - if i == nil || i.Secrets == nil { - return "" - } - return *i.Secrets -} - -// GetSecretScanningAlerts returns the SecretScanningAlerts field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSecretScanningAlerts() string { - if i == nil || i.SecretScanningAlerts == nil { - return "" - } - return *i.SecretScanningAlerts -} - -// GetSecurityEvents returns the SecurityEvents field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSecurityEvents() string { - if i == nil || i.SecurityEvents == nil { - return "" - } - return *i.SecurityEvents -} - -// GetSingleFile returns the SingleFile field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSingleFile() string { - if i == nil || i.SingleFile == nil { - return "" - } - return *i.SingleFile -} - -// GetStatuses returns the Statuses field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetStatuses() string { - if i == nil || i.Statuses == nil { - return "" - } - return *i.Statuses -} - -// GetTeamDiscussions returns the TeamDiscussions field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetTeamDiscussions() string { - if i == nil || i.TeamDiscussions == nil { - return "" - } - return *i.TeamDiscussions -} - -// GetVulnerabilityAlerts returns the VulnerabilityAlerts field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetVulnerabilityAlerts() string { - if i == nil || i.VulnerabilityAlerts == nil { - return "" - } - return *i.VulnerabilityAlerts -} - -// GetWorkflows returns the Workflows field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetWorkflows() string { - if i == nil || i.Workflows == nil { - return "" - } - return *i.Workflows -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationRepositoriesEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetInstallation returns the Installation field. -func (i *InstallationRepositoriesEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetOrg returns the Org field. -func (i *InstallationRepositoriesEvent) GetOrg() *Organization { - if i == nil { - return nil - } - return i.Org -} - -// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. -func (i *InstallationRepositoriesEvent) GetRepositorySelection() string { - if i == nil || i.RepositorySelection == nil { - return "" - } - return *i.RepositorySelection -} - -// GetSender returns the Sender field. -func (i *InstallationRepositoriesEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetAccount returns the Account field. -func (i *InstallationRequest) GetAccount() *User { - if i == nil { - return nil - } - return i.Account -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *InstallationRequest) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *InstallationRequest) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *InstallationRequest) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetRequester returns the Requester field. -func (i *InstallationRequest) GetRequester() *User { - if i == nil { - return nil - } - return i.Requester -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (i *InstallationSlugChange) GetFrom() string { - if i == nil || i.From == nil { - return "" - } - return *i.From -} - -// GetAccount returns the Account field. -func (i *InstallationTargetEvent) GetAccount() *User { - if i == nil { - return nil - } - return i.Account -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationTargetEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetChanges returns the Changes field. -func (i *InstallationTargetEvent) GetChanges() *InstallationChanges { - if i == nil { - return nil - } - return i.Changes -} - -// GetEnterprise returns the Enterprise field. -func (i *InstallationTargetEvent) GetEnterprise() *Enterprise { - if i == nil { - return nil - } - return i.Enterprise -} - -// GetInstallation returns the Installation field. -func (i *InstallationTargetEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetOrganization returns the Organization field. -func (i *InstallationTargetEvent) GetOrganization() *Organization { - if i == nil { - return nil - } - return i.Organization -} - -// GetRepository returns the Repository field. -func (i *InstallationTargetEvent) GetRepository() *Repository { - if i == nil { - return nil - } - return i.Repository -} - -// GetSender returns the Sender field. -func (i *InstallationTargetEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. -func (i *InstallationTargetEvent) GetTargetType() string { - if i == nil || i.TargetType == nil { - return "" - } - return *i.TargetType -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (i *InstallationToken) GetExpiresAt() Timestamp { - if i == nil || i.ExpiresAt == nil { - return Timestamp{} - } - return *i.ExpiresAt -} - -// GetPermissions returns the Permissions field. -func (i *InstallationToken) GetPermissions() *InstallationPermissions { - if i == nil { - return nil - } - return i.Permissions -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (i *InstallationToken) GetToken() string { - if i == nil || i.Token == nil { - return "" - } - return *i.Token -} - -// GetPermissions returns the Permissions field. -func (i *InstallationTokenOptions) GetPermissions() *InstallationPermissions { - if i == nil { - return nil - } - return i.Permissions -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (i *InteractionRestriction) GetExpiresAt() Timestamp { - if i == nil || i.ExpiresAt == nil { - return Timestamp{} - } - return *i.ExpiresAt -} - -// GetLimit returns the Limit field if it's non-nil, zero value otherwise. -func (i *InteractionRestriction) GetLimit() string { - if i == nil || i.Limit == nil { - return "" - } - return *i.Limit -} - -// GetOrigin returns the Origin field if it's non-nil, zero value otherwise. -func (i *InteractionRestriction) GetOrigin() string { - if i == nil || i.Origin == nil { - return "" - } - return *i.Origin -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Invitation) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (i *Invitation) GetEmail() string { - if i == nil || i.Email == nil { - return "" - } - return *i.Email -} - -// GetFailedAt returns the FailedAt field if it's non-nil, zero value otherwise. -func (i *Invitation) GetFailedAt() Timestamp { - if i == nil || i.FailedAt == nil { - return Timestamp{} - } - return *i.FailedAt -} - -// GetFailedReason returns the FailedReason field if it's non-nil, zero value otherwise. -func (i *Invitation) GetFailedReason() string { - if i == nil || i.FailedReason == nil { - return "" - } - return *i.FailedReason -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Invitation) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetInvitationTeamURL returns the InvitationTeamURL field if it's non-nil, zero value otherwise. -func (i *Invitation) GetInvitationTeamURL() string { - if i == nil || i.InvitationTeamURL == nil { - return "" - } - return *i.InvitationTeamURL -} - -// GetInviter returns the Inviter field. -func (i *Invitation) GetInviter() *User { - if i == nil { - return nil - } - return i.Inviter -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (i *Invitation) GetLogin() string { - if i == nil || i.Login == nil { - return "" - } - return *i.Login -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *Invitation) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (i *Invitation) GetRole() string { - if i == nil || i.Role == nil { - return "" - } - return *i.Role -} - -// GetTeamCount returns the TeamCount field if it's non-nil, zero value otherwise. -func (i *Invitation) GetTeamCount() int { - if i == nil || i.TeamCount == nil { - return 0 - } - return *i.TeamCount -} - -// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. -func (i *Issue) GetActiveLockReason() string { - if i == nil || i.ActiveLockReason == nil { - return "" - } - return *i.ActiveLockReason -} - -// GetAssignee returns the Assignee field. -func (i *Issue) GetAssignee() *User { - if i == nil { - return nil - } - return i.Assignee -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (i *Issue) GetAuthorAssociation() string { - if i == nil || i.AuthorAssociation == nil { - return "" - } - return *i.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *Issue) GetBody() string { - if i == nil || i.Body == nil { - return "" - } - return *i.Body -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetClosedAt() Timestamp { - if i == nil || i.ClosedAt == nil { - return Timestamp{} - } - return *i.ClosedAt -} - -// GetClosedBy returns the ClosedBy field. -func (i *Issue) GetClosedBy() *User { - if i == nil { - return nil - } - return i.ClosedBy -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (i *Issue) GetComments() int { - if i == nil || i.Comments == nil { - return 0 - } - return *i.Comments -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetCommentsURL() string { - if i == nil || i.CommentsURL == nil { - return "" - } - return *i.CommentsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (i *Issue) GetDraft() bool { - if i == nil || i.Draft == nil { - return false - } - return *i.Draft -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetEventsURL() string { - if i == nil || i.EventsURL == nil { - return "" - } - return *i.EventsURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Issue) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetLabelsURL() string { - if i == nil || i.LabelsURL == nil { - return "" - } - return *i.LabelsURL -} - -// GetLocked returns the Locked field if it's non-nil, zero value otherwise. -func (i *Issue) GetLocked() bool { - if i == nil || i.Locked == nil { - return false - } - return *i.Locked -} - -// GetMilestone returns the Milestone field. -func (i *Issue) GetMilestone() *Milestone { - if i == nil { - return nil - } - return i.Milestone -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *Issue) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (i *Issue) GetNumber() int { - if i == nil || i.Number == nil { - return 0 - } - return *i.Number -} - -// GetPullRequestLinks returns the PullRequestLinks field. -func (i *Issue) GetPullRequestLinks() *PullRequestLinks { - if i == nil { - return nil - } - return i.PullRequestLinks -} - -// GetReactions returns the Reactions field. -func (i *Issue) GetReactions() *Reactions { - if i == nil { - return nil - } - return i.Reactions -} - -// GetRepository returns the Repository field. -func (i *Issue) GetRepository() *Repository { - if i == nil { - return nil - } - return i.Repository -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetRepositoryURL() string { - if i == nil || i.RepositoryURL == nil { - return "" - } - return *i.RepositoryURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (i *Issue) GetState() string { - if i == nil || i.State == nil { - return "" - } - return *i.State -} - -// GetStateReason returns the StateReason field if it's non-nil, zero value otherwise. -func (i *Issue) GetStateReason() string { - if i == nil || i.StateReason == nil { - return "" - } - return *i.StateReason -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (i *Issue) GetTitle() string { - if i == nil || i.Title == nil { - return "" - } - return *i.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *Issue) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetUser returns the User field. -func (i *Issue) GetUser() *User { - if i == nil { - return nil - } - return i.User -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetAuthorAssociation() string { - if i == nil || i.AuthorAssociation == nil { - return "" - } - return *i.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetBody() string { - if i == nil || i.Body == nil { - return "" - } - return *i.Body -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetIssueURL() string { - if i == nil || i.IssueURL == nil { - return "" - } - return *i.IssueURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetReactions returns the Reactions field. -func (i *IssueComment) GetReactions() *Reactions { - if i == nil { - return nil - } - return i.Reactions -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetUser returns the User field. -func (i *IssueComment) GetUser() *User { - if i == nil { - return nil - } - return i.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *IssueCommentEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetChanges returns the Changes field. -func (i *IssueCommentEvent) GetChanges() *EditChange { - if i == nil { - return nil - } - return i.Changes -} - -// GetComment returns the Comment field. -func (i *IssueCommentEvent) GetComment() *IssueComment { - if i == nil { - return nil - } - return i.Comment -} - -// GetInstallation returns the Installation field. -func (i *IssueCommentEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetIssue returns the Issue field. -func (i *IssueCommentEvent) GetIssue() *Issue { - if i == nil { - return nil - } - return i.Issue -} - -// GetOrganization returns the Organization field. -func (i *IssueCommentEvent) GetOrganization() *Organization { - if i == nil { - return nil - } - return i.Organization -} - -// GetRepo returns the Repo field. -func (i *IssueCommentEvent) GetRepo() *Repository { - if i == nil { - return nil - } - return i.Repo -} - -// GetSender returns the Sender field. -func (i *IssueCommentEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetActor returns the Actor field. -func (i *IssueEvent) GetActor() *User { - if i == nil { - return nil - } - return i.Actor -} - -// GetAssignee returns the Assignee field. -func (i *IssueEvent) GetAssignee() *User { - if i == nil { - return nil - } - return i.Assignee -} - -// GetAssigner returns the Assigner field. -func (i *IssueEvent) GetAssigner() *User { - if i == nil { - return nil - } - return i.Assigner -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetCommitID() string { - if i == nil || i.CommitID == nil { - return "" - } - return *i.CommitID -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetDismissedReview returns the DismissedReview field. -func (i *IssueEvent) GetDismissedReview() *DismissedReview { - if i == nil { - return nil - } - return i.DismissedReview -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetEvent() string { - if i == nil || i.Event == nil { - return "" - } - return *i.Event -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetIssue returns the Issue field. -func (i *IssueEvent) GetIssue() *Issue { - if i == nil { - return nil - } - return i.Issue -} - -// GetLabel returns the Label field. -func (i *IssueEvent) GetLabel() *Label { - if i == nil { - return nil - } - return i.Label -} - -// GetLockReason returns the LockReason field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetLockReason() string { - if i == nil || i.LockReason == nil { - return "" - } - return *i.LockReason -} - -// GetMilestone returns the Milestone field. -func (i *IssueEvent) GetMilestone() *Milestone { - if i == nil { - return nil - } - return i.Milestone -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (i *IssueEvent) GetPerformedViaGithubApp() *App { - if i == nil { - return nil - } - return i.PerformedViaGithubApp -} - -// GetProjectCard returns the ProjectCard field. -func (i *IssueEvent) GetProjectCard() *ProjectCard { - if i == nil { - return nil - } - return i.ProjectCard -} - -// GetRename returns the Rename field. -func (i *IssueEvent) GetRename() *Rename { - if i == nil { - return nil - } - return i.Rename -} - -// GetRequestedReviewer returns the RequestedReviewer field. -func (i *IssueEvent) GetRequestedReviewer() *User { - if i == nil { - return nil - } - return i.RequestedReviewer -} - -// GetRequestedTeam returns the RequestedTeam field. -func (i *IssueEvent) GetRequestedTeam() *Team { - if i == nil { - return nil - } - return i.RequestedTeam -} - -// GetReviewRequester returns the ReviewRequester field. -func (i *IssueEvent) GetReviewRequester() *User { - if i == nil { - return nil - } - return i.ReviewRequester -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetAssignee() string { - if i == nil || i.Assignee == nil { - return "" - } - return *i.Assignee -} - -// GetClosed returns the Closed field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetClosed() bool { - if i == nil || i.Closed == nil { - return false - } - return *i.Closed -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetClosedAt() Timestamp { - if i == nil || i.ClosedAt == nil { - return Timestamp{} - } - return *i.ClosedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetMilestone() int { - if i == nil || i.Milestone == nil { - return 0 - } - return *i.Milestone -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetCode returns the Code field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetCode() string { - if i == nil || i.Code == nil { - return "" - } - return *i.Code -} - -// GetField returns the Field field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetField() string { - if i == nil || i.Field == nil { - return "" - } - return *i.Field -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetLocation() string { - if i == nil || i.Location == nil { - return "" - } - return *i.Location -} - -// GetResource returns the Resource field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetResource() string { - if i == nil || i.Resource == nil { - return "" - } - return *i.Resource -} - -// GetValue returns the Value field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetValue() string { - if i == nil || i.Value == nil { - return "" - } - return *i.Value -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetDocumentationURL returns the DocumentationURL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetDocumentationURL() string { - if i == nil || i.DocumentationURL == nil { - return "" - } - return *i.DocumentationURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetID() int { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetImportIssuesURL returns the ImportIssuesURL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetImportIssuesURL() string { - if i == nil || i.ImportIssuesURL == nil { - return "" - } - return *i.ImportIssuesURL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetMessage() string { - if i == nil || i.Message == nil { - return "" - } - return *i.Message -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetRepositoryURL() string { - if i == nil || i.RepositoryURL == nil { - return "" - } - return *i.RepositoryURL -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetStatus() string { - if i == nil || i.Status == nil { - return "" - } - return *i.Status -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetDirection returns the Direction field if it's non-nil, zero value otherwise. -func (i *IssueListCommentsOptions) GetDirection() string { - if i == nil || i.Direction == nil { - return "" - } - return *i.Direction -} - -// GetSince returns the Since field if it's non-nil, zero value otherwise. -func (i *IssueListCommentsOptions) GetSince() time.Time { - if i == nil || i.Since == nil { - return time.Time{} - } - return *i.Since -} - -// GetSort returns the Sort field if it's non-nil, zero value otherwise. -func (i *IssueListCommentsOptions) GetSort() string { - if i == nil || i.Sort == nil { - return "" - } - return *i.Sort -} - -// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetAssignee() string { - if i == nil || i.Assignee == nil { - return "" - } - return *i.Assignee -} - -// GetAssignees returns the Assignees field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetAssignees() []string { - if i == nil || i.Assignees == nil { - return nil - } - return *i.Assignees -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetBody() string { - if i == nil || i.Body == nil { - return "" - } - return *i.Body -} - -// GetLabels returns the Labels field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetLabels() []string { - if i == nil || i.Labels == nil { - return nil - } - return *i.Labels -} - -// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetMilestone() int { - if i == nil || i.Milestone == nil { - return 0 - } - return *i.Milestone -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetState() string { - if i == nil || i.State == nil { - return "" - } - return *i.State -} - -// GetStateReason returns the StateReason field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetStateReason() string { - if i == nil || i.StateReason == nil { - return "" - } - return *i.StateReason -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetTitle() string { - if i == nil || i.Title == nil { - return "" - } - return *i.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *IssuesEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetAssignee returns the Assignee field. -func (i *IssuesEvent) GetAssignee() *User { - if i == nil { - return nil - } - return i.Assignee -} - -// GetChanges returns the Changes field. -func (i *IssuesEvent) GetChanges() *EditChange { - if i == nil { - return nil - } - return i.Changes -} - -// GetInstallation returns the Installation field. -func (i *IssuesEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetIssue returns the Issue field. -func (i *IssuesEvent) GetIssue() *Issue { - if i == nil { - return nil - } - return i.Issue -} - -// GetLabel returns the Label field. -func (i *IssuesEvent) GetLabel() *Label { - if i == nil { - return nil - } - return i.Label -} - -// GetMilestone returns the Milestone field. -func (i *IssuesEvent) GetMilestone() *Milestone { - if i == nil { - return nil - } - return i.Milestone -} - -// GetOrg returns the Org field. -func (i *IssuesEvent) GetOrg() *Organization { - if i == nil { - return nil - } - return i.Org -} - -// GetRepo returns the Repo field. -func (i *IssuesEvent) GetRepo() *Repository { - if i == nil { - return nil - } - return i.Repo -} - -// GetSender returns the Sender field. -func (i *IssuesEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (i *IssuesSearchResult) GetIncompleteResults() bool { - if i == nil || i.IncompleteResults == nil { - return false - } - return *i.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (i *IssuesSearchResult) GetTotal() int { - if i == nil || i.Total == nil { - return 0 - } - return *i.Total -} - -// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. -func (i *IssueStats) GetClosedIssues() int { - if i == nil || i.ClosedIssues == nil { - return 0 - } - return *i.ClosedIssues -} - -// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. -func (i *IssueStats) GetOpenIssues() int { - if i == nil || i.OpenIssues == nil { - return 0 - } - return *i.OpenIssues -} - -// GetTotalIssues returns the TotalIssues field if it's non-nil, zero value otherwise. -func (i *IssueStats) GetTotalIssues() int { - if i == nil || i.TotalIssues == nil { - return 0 - } - return *i.TotalIssues -} - -// GetEncodedJITConfig returns the EncodedJITConfig field if it's non-nil, zero value otherwise. -func (j *JITRunnerConfig) GetEncodedJITConfig() string { - if j == nil || j.EncodedJITConfig == nil { - return "" - } - return *j.EncodedJITConfig -} - -// GetRunner returns the Runner field. -func (j *JITRunnerConfig) GetRunner() *Runner { - if j == nil { - return nil - } - return j.Runner -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (j *Jobs) GetTotalCount() int { - if j == nil || j.TotalCount == nil { - return 0 - } - return *j.TotalCount -} - -// GetAddedBy returns the AddedBy field if it's non-nil, zero value otherwise. -func (k *Key) GetAddedBy() string { - if k == nil || k.AddedBy == nil { - return "" - } - return *k.AddedBy -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (k *Key) GetCreatedAt() Timestamp { - if k == nil || k.CreatedAt == nil { - return Timestamp{} - } - return *k.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (k *Key) GetID() int64 { - if k == nil || k.ID == nil { - return 0 - } - return *k.ID -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (k *Key) GetKey() string { - if k == nil || k.Key == nil { - return "" - } - return *k.Key -} - -// GetLastUsed returns the LastUsed field if it's non-nil, zero value otherwise. -func (k *Key) GetLastUsed() Timestamp { - if k == nil || k.LastUsed == nil { - return Timestamp{} - } - return *k.LastUsed -} - -// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. -func (k *Key) GetReadOnly() bool { - if k == nil || k.ReadOnly == nil { - return false - } - return *k.ReadOnly -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (k *Key) GetTitle() string { - if k == nil || k.Title == nil { - return "" - } - return *k.Title -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (k *Key) GetURL() string { - if k == nil || k.URL == nil { - return "" - } - return *k.URL -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (k *Key) GetVerified() bool { - if k == nil || k.Verified == nil { - return false - } - return *k.Verified -} - -// GetColor returns the Color field if it's non-nil, zero value otherwise. -func (l *Label) GetColor() string { - if l == nil || l.Color == nil { - return "" - } - return *l.Color -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (l *Label) GetDefault() bool { - if l == nil || l.Default == nil { - return false - } - return *l.Default -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (l *Label) GetDescription() string { - if l == nil || l.Description == nil { - return "" - } - return *l.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (l *Label) GetID() int64 { - if l == nil || l.ID == nil { - return 0 - } - return *l.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *Label) GetName() string { - if l == nil || l.Name == nil { - return "" - } - return *l.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (l *Label) GetNodeID() string { - if l == nil || l.NodeID == nil { - return "" - } - return *l.NodeID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *Label) GetURL() string { - if l == nil || l.URL == nil { - return "" - } - return *l.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (l *LabelEvent) GetAction() string { - if l == nil || l.Action == nil { - return "" - } - return *l.Action -} - -// GetChanges returns the Changes field. -func (l *LabelEvent) GetChanges() *EditChange { - if l == nil { - return nil - } - return l.Changes -} - -// GetInstallation returns the Installation field. -func (l *LabelEvent) GetInstallation() *Installation { - if l == nil { - return nil - } - return l.Installation -} - -// GetLabel returns the Label field. -func (l *LabelEvent) GetLabel() *Label { - if l == nil { - return nil - } - return l.Label -} - -// GetOrg returns the Org field. -func (l *LabelEvent) GetOrg() *Organization { - if l == nil { - return nil - } - return l.Org -} - -// GetRepo returns the Repo field. -func (l *LabelEvent) GetRepo() *Repository { - if l == nil { - return nil - } - return l.Repo -} - -// GetSender returns the Sender field. -func (l *LabelEvent) GetSender() *User { - if l == nil { - return nil - } - return l.Sender -} - -// GetColor returns the Color field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetColor() string { - if l == nil || l.Color == nil { - return "" - } - return *l.Color -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetDefault() bool { - if l == nil || l.Default == nil { - return false - } - return *l.Default -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetDescription() string { - if l == nil || l.Description == nil { - return "" - } - return *l.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetID() int64 { - if l == nil || l.ID == nil { - return 0 - } - return *l.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetName() string { - if l == nil || l.Name == nil { - return "" - } - return *l.Name -} - -// GetScore returns the Score field. -func (l *LabelResult) GetScore() *float64 { - if l == nil { - return nil - } - return l.Score -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetURL() string { - if l == nil || l.URL == nil { - return "" - } - return *l.URL -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (l *LabelsSearchResult) GetIncompleteResults() bool { - if l == nil || l.IncompleteResults == nil { - return false - } - return *l.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (l *LabelsSearchResult) GetTotal() int { - if l == nil || l.Total == nil { - return 0 - } - return *l.Total -} - -// GetOID returns the OID field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetOID() string { - if l == nil || l.OID == nil { - return "" - } - return *l.OID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetPath() string { - if l == nil || l.Path == nil { - return "" - } - return *l.Path -} - -// GetRefName returns the RefName field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetRefName() string { - if l == nil || l.RefName == nil { - return "" - } - return *l.RefName -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetSize() int { - if l == nil || l.Size == nil { - return 0 - } - return *l.Size -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (l *License) GetBody() string { - if l == nil || l.Body == nil { - return "" - } - return *l.Body -} - -// GetConditions returns the Conditions field if it's non-nil, zero value otherwise. -func (l *License) GetConditions() []string { - if l == nil || l.Conditions == nil { - return nil - } - return *l.Conditions -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (l *License) GetDescription() string { - if l == nil || l.Description == nil { - return "" - } - return *l.Description -} - -// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. -func (l *License) GetFeatured() bool { - if l == nil || l.Featured == nil { - return false - } - return *l.Featured -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (l *License) GetHTMLURL() string { - if l == nil || l.HTMLURL == nil { - return "" - } - return *l.HTMLURL -} - -// GetImplementation returns the Implementation field if it's non-nil, zero value otherwise. -func (l *License) GetImplementation() string { - if l == nil || l.Implementation == nil { - return "" - } - return *l.Implementation -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (l *License) GetKey() string { - if l == nil || l.Key == nil { - return "" - } - return *l.Key -} - -// GetLimitations returns the Limitations field if it's non-nil, zero value otherwise. -func (l *License) GetLimitations() []string { - if l == nil || l.Limitations == nil { - return nil - } - return *l.Limitations -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *License) GetName() string { - if l == nil || l.Name == nil { - return "" - } - return *l.Name -} - -// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. -func (l *License) GetPermissions() []string { - if l == nil || l.Permissions == nil { - return nil - } - return *l.Permissions -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (l *License) GetSPDXID() string { - if l == nil || l.SPDXID == nil { - return "" - } - return *l.SPDXID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *License) GetURL() string { - if l == nil || l.URL == nil { - return "" - } - return *l.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (l *LinearHistoryRequirementEnforcementLevelChanges) GetFrom() string { - if l == nil || l.From == nil { - return "" - } - return *l.From -} - -// GetDirection returns the Direction field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetDirection() string { - if l == nil || l.Direction == nil { - return "" - } - return *l.Direction -} - -// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetEcosystem() string { - if l == nil || l.Ecosystem == nil { - return "" - } - return *l.Ecosystem -} - -// GetPackage returns the Package field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetPackage() string { - if l == nil || l.Package == nil { - return "" - } - return *l.Package -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetScope() string { - if l == nil || l.Scope == nil { - return "" - } - return *l.Scope -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetSeverity() string { - if l == nil || l.Severity == nil { - return "" - } - return *l.Severity -} - -// GetSort returns the Sort field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetSort() string { - if l == nil || l.Sort == nil { - return "" - } - return *l.Sort -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetState() string { - if l == nil || l.State == nil { - return "" - } - return *l.State -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetAppID() int64 { - if l == nil || l.AppID == nil { - return 0 - } - return *l.AppID -} - -// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetCheckName() string { - if l == nil || l.CheckName == nil { - return "" - } - return *l.CheckName -} - -// GetFilter returns the Filter field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetFilter() string { - if l == nil || l.Filter == nil { - return "" - } - return *l.Filter -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetStatus() string { - if l == nil || l.Status == nil { - return "" - } - return *l.Status -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsResults) GetTotal() int { - if l == nil || l.Total == nil { - return 0 - } - return *l.Total -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (l *ListCheckSuiteOptions) GetAppID() int { - if l == nil || l.AppID == nil { - return 0 - } - return *l.AppID -} - -// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. -func (l *ListCheckSuiteOptions) GetCheckName() string { - if l == nil || l.CheckName == nil { - return "" - } - return *l.CheckName -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (l *ListCheckSuiteResults) GetTotal() int { - if l == nil || l.Total == nil { - return 0 - } - return *l.Total -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (l *ListCodespaces) GetTotalCount() int { - if l == nil || l.TotalCount == nil { - return 0 - } - return *l.TotalCount -} - -// GetAffiliation returns the Affiliation field if it's non-nil, zero value otherwise. -func (l *ListCollaboratorOptions) GetAffiliation() string { - if l == nil || l.Affiliation == nil { - return "" - } - return *l.Affiliation -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (l *ListExternalGroupsOptions) GetDisplayName() string { - if l == nil || l.DisplayName == nil { - return "" - } - return *l.DisplayName -} - -// GetAffects returns the Affects field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetAffects() string { - if l == nil || l.Affects == nil { - return "" - } - return *l.Affects -} - -// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetCVEID() string { - if l == nil || l.CVEID == nil { - return "" - } - return *l.CVEID -} - -// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetEcosystem() string { - if l == nil || l.Ecosystem == nil { - return "" - } - return *l.Ecosystem -} - -// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetGHSAID() string { - if l == nil || l.GHSAID == nil { - return "" - } - return *l.GHSAID -} - -// GetIsWithdrawn returns the IsWithdrawn field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetIsWithdrawn() bool { - if l == nil || l.IsWithdrawn == nil { - return false - } - return *l.IsWithdrawn -} - -// GetModified returns the Modified field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetModified() string { - if l == nil || l.Modified == nil { - return "" - } - return *l.Modified -} - -// GetPublished returns the Published field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetPublished() string { - if l == nil || l.Published == nil { - return "" - } - return *l.Published -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetSeverity() string { - if l == nil || l.Severity == nil { - return "" - } - return *l.Severity -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetType() string { - if l == nil || l.Type == nil { - return "" - } - return *l.Type -} - -// GetUpdated returns the Updated field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetUpdated() string { - if l == nil || l.Updated == nil { - return "" - } - return *l.Updated -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (l *ListOrganizations) GetTotalCount() int { - if l == nil || l.TotalCount == nil { - return 0 - } - return *l.TotalCount -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (l *ListRepositories) GetTotalCount() int { - if l == nil || l.TotalCount == nil { - return 0 - } - return *l.TotalCount -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (l *ListSCIMProvisionedIdentitiesOptions) GetCount() int { - if l == nil || l.Count == nil { - return 0 - } - return *l.Count -} - -// GetFilter returns the Filter field if it's non-nil, zero value otherwise. -func (l *ListSCIMProvisionedIdentitiesOptions) GetFilter() string { - if l == nil || l.Filter == nil { - return "" - } - return *l.Filter -} - -// GetStartIndex returns the StartIndex field if it's non-nil, zero value otherwise. -func (l *ListSCIMProvisionedIdentitiesOptions) GetStartIndex() int { - if l == nil || l.StartIndex == nil { - return 0 - } - return *l.StartIndex -} - -// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. -func (l *Location) GetEndColumn() int { - if l == nil || l.EndColumn == nil { - return 0 - } - return *l.EndColumn -} - -// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. -func (l *Location) GetEndLine() int { - if l == nil || l.EndLine == nil { - return 0 - } - return *l.EndLine -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (l *Location) GetPath() string { - if l == nil || l.Path == nil { - return "" - } - return *l.Path -} - -// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. -func (l *Location) GetStartColumn() int { - if l == nil || l.StartColumn == nil { - return 0 - } - return *l.StartColumn -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (l *Location) GetStartLine() int { - if l == nil || l.StartLine == nil { - return 0 - } - return *l.StartLine -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (l *LockBranch) GetEnabled() bool { - if l == nil || l.Enabled == nil { - return false - } - return *l.Enabled -} - -// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. -func (m *MarketplacePendingChange) GetEffectiveDate() Timestamp { - if m == nil || m.EffectiveDate == nil { - return Timestamp{} - } - return *m.EffectiveDate -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePendingChange) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetPlan returns the Plan field. -func (m *MarketplacePendingChange) GetPlan() *MarketplacePlan { - if m == nil { - return nil - } - return m.Plan -} - -// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. -func (m *MarketplacePendingChange) GetUnitCount() int { - if m == nil || m.UnitCount == nil { - return 0 - } - return *m.UnitCount -} - -// GetAccountsURL returns the AccountsURL field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetAccountsURL() string { - if m == nil || m.AccountsURL == nil { - return "" - } - return *m.AccountsURL -} - -// GetBullets returns the Bullets field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetBullets() []string { - if m == nil || m.Bullets == nil { - return nil - } - return *m.Bullets -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetDescription() string { - if m == nil || m.Description == nil { - return "" - } - return *m.Description -} - -// GetHasFreeTrial returns the HasFreeTrial field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetHasFreeTrial() bool { - if m == nil || m.HasFreeTrial == nil { - return false - } - return *m.HasFreeTrial -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetMonthlyPriceInCents returns the MonthlyPriceInCents field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetMonthlyPriceInCents() int { - if m == nil || m.MonthlyPriceInCents == nil { - return 0 - } - return *m.MonthlyPriceInCents -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetName() string { - if m == nil || m.Name == nil { - return "" - } - return *m.Name -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetNumber() int { - if m == nil || m.Number == nil { - return 0 - } - return *m.Number -} - -// GetPriceModel returns the PriceModel field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetPriceModel() string { - if m == nil || m.PriceModel == nil { - return "" - } - return *m.PriceModel -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetUnitName returns the UnitName field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetUnitName() string { - if m == nil || m.UnitName == nil { - return "" - } - return *m.UnitName -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetYearlyPriceInCents returns the YearlyPriceInCents field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetYearlyPriceInCents() int { - if m == nil || m.YearlyPriceInCents == nil { - return 0 - } - return *m.YearlyPriceInCents -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetLogin() string { - if m == nil || m.Login == nil { - return "" - } - return *m.Login -} - -// GetMarketplacePendingChange returns the MarketplacePendingChange field. -func (m *MarketplacePlanAccount) GetMarketplacePendingChange() *MarketplacePendingChange { - if m == nil { - return nil - } - return m.MarketplacePendingChange -} - -// GetMarketplacePurchase returns the MarketplacePurchase field. -func (m *MarketplacePlanAccount) GetMarketplacePurchase() *MarketplacePurchase { - if m == nil { - return nil - } - return m.MarketplacePurchase -} - -// GetOrganizationBillingEmail returns the OrganizationBillingEmail field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetOrganizationBillingEmail() string { - if m == nil || m.OrganizationBillingEmail == nil { - return "" - } - return *m.OrganizationBillingEmail -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetType() string { - if m == nil || m.Type == nil { - return "" - } - return *m.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetAccount returns the Account field. -func (m *MarketplacePurchase) GetAccount() *MarketplacePurchaseAccount { - if m == nil { - return nil - } - return m.Account -} - -// GetBillingCycle returns the BillingCycle field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetBillingCycle() string { - if m == nil || m.BillingCycle == nil { - return "" - } - return *m.BillingCycle -} - -// GetFreeTrialEndsOn returns the FreeTrialEndsOn field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetFreeTrialEndsOn() Timestamp { - if m == nil || m.FreeTrialEndsOn == nil { - return Timestamp{} - } - return *m.FreeTrialEndsOn -} - -// GetNextBillingDate returns the NextBillingDate field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetNextBillingDate() Timestamp { - if m == nil || m.NextBillingDate == nil { - return Timestamp{} - } - return *m.NextBillingDate -} - -// GetOnFreeTrial returns the OnFreeTrial field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetOnFreeTrial() bool { - if m == nil || m.OnFreeTrial == nil { - return false - } - return *m.OnFreeTrial -} - -// GetPlan returns the Plan field. -func (m *MarketplacePurchase) GetPlan() *MarketplacePlan { - if m == nil { - return nil - } - return m.Plan -} - -// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetUnitCount() int { - if m == nil || m.UnitCount == nil { - return 0 - } - return *m.UnitCount -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetUpdatedAt() Timestamp { - if m == nil || m.UpdatedAt == nil { - return Timestamp{} - } - return *m.UpdatedAt -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetEmail() string { - if m == nil || m.Email == nil { - return "" - } - return *m.Email -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetLogin() string { - if m == nil || m.Login == nil { - return "" - } - return *m.Login -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetNodeID() string { - if m == nil || m.NodeID == nil { - return "" - } - return *m.NodeID -} - -// GetOrganizationBillingEmail returns the OrganizationBillingEmail field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetOrganizationBillingEmail() string { - if m == nil || m.OrganizationBillingEmail == nil { - return "" - } - return *m.OrganizationBillingEmail -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetType() string { - if m == nil || m.Type == nil { - return "" - } - return *m.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseEvent) GetEffectiveDate() Timestamp { - if m == nil || m.EffectiveDate == nil { - return Timestamp{} - } - return *m.EffectiveDate -} - -// GetInstallation returns the Installation field. -func (m *MarketplacePurchaseEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMarketplacePurchase returns the MarketplacePurchase field. -func (m *MarketplacePurchaseEvent) GetMarketplacePurchase() *MarketplacePurchase { - if m == nil { - return nil - } - return m.MarketplacePurchase -} - -// GetOrg returns the Org field. -func (m *MarketplacePurchaseEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetPreviousMarketplacePurchase returns the PreviousMarketplacePurchase field. -func (m *MarketplacePurchaseEvent) GetPreviousMarketplacePurchase() *MarketplacePurchase { - if m == nil { - return nil - } - return m.PreviousMarketplacePurchase -} - -// GetSender returns the Sender field. -func (m *MarketplacePurchaseEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (m *Match) GetText() string { - if m == nil || m.Text == nil { - return "" - } - return *m.Text -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MemberEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetInstallation returns the Installation field. -func (m *MemberEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMember returns the Member field. -func (m *MemberEvent) GetMember() *User { - if m == nil { - return nil - } - return m.Member -} - -// GetOrg returns the Org field. -func (m *MemberEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MemberEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MemberEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetOrganization returns the Organization field. -func (m *Membership) GetOrganization() *Organization { - if m == nil { - return nil - } - return m.Organization -} - -// GetOrganizationURL returns the OrganizationURL field if it's non-nil, zero value otherwise. -func (m *Membership) GetOrganizationURL() string { - if m == nil || m.OrganizationURL == nil { - return "" - } - return *m.OrganizationURL -} - -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (m *Membership) GetRole() string { - if m == nil || m.Role == nil { - return "" - } - return *m.Role -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Membership) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Membership) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetUser returns the User field. -func (m *Membership) GetUser() *User { - if m == nil { - return nil - } - return m.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MembershipEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetInstallation returns the Installation field. -func (m *MembershipEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMember returns the Member field. -func (m *MembershipEvent) GetMember() *User { - if m == nil { - return nil - } - return m.Member -} - -// GetOrg returns the Org field. -func (m *MembershipEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (m *MembershipEvent) GetScope() string { - if m == nil || m.Scope == nil { - return "" - } - return *m.Scope -} - -// GetSender returns the Sender field. -func (m *MembershipEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetTeam returns the Team field. -func (m *MembershipEvent) GetTeam() *Team { - if m == nil { - return nil - } - return m.Team -} - -// GetBaseRef returns the BaseRef field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetBaseRef() string { - if m == nil || m.BaseRef == nil { - return "" - } - return *m.BaseRef -} - -// GetBaseSHA returns the BaseSHA field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetBaseSHA() string { - if m == nil || m.BaseSHA == nil { - return "" - } - return *m.BaseSHA -} - -// GetHeadCommit returns the HeadCommit field. -func (m *MergeGroup) GetHeadCommit() *Commit { - if m == nil { - return nil - } - return m.HeadCommit -} - -// GetHeadRef returns the HeadRef field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetHeadRef() string { - if m == nil || m.HeadRef == nil { - return "" - } - return *m.HeadRef -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetHeadSHA() string { - if m == nil || m.HeadSHA == nil { - return "" - } - return *m.HeadSHA -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MergeGroupEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetInstallation returns the Installation field. -func (m *MergeGroupEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMergeGroup returns the MergeGroup field. -func (m *MergeGroupEvent) GetMergeGroup() *MergeGroup { - if m == nil { - return nil - } - return m.MergeGroup -} - -// GetOrg returns the Org field. -func (m *MergeGroupEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MergeGroupEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MergeGroupEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (m *Message) GetText() string { - if m == nil || m.Text == nil { - return "" - } - return *m.Text -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MetaEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetHook returns the Hook field. -func (m *MetaEvent) GetHook() *Hook { - if m == nil { - return nil - } - return m.Hook -} - -// GetHookID returns the HookID field if it's non-nil, zero value otherwise. -func (m *MetaEvent) GetHookID() int64 { - if m == nil || m.HookID == nil { - return 0 - } - return *m.HookID -} - -// GetInstallation returns the Installation field. -func (m *MetaEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetOrg returns the Org field. -func (m *MetaEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MetaEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MetaEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *Metric) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { - return "" - } - return *m.HTMLURL -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (m *Metric) GetKey() string { - if m == nil || m.Key == nil { - return "" - } - return *m.Key -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (m *Metric) GetName() string { - if m == nil || m.Name == nil { - return "" - } - return *m.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (m *Metric) GetNodeID() string { - if m == nil || m.NodeID == nil { - return "" - } - return *m.NodeID -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (m *Metric) GetSPDXID() string { - if m == nil || m.SPDXID == nil { - return "" - } - return *m.SPDXID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Metric) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (m *Migration) GetCreatedAt() string { - if m == nil || m.CreatedAt == nil { - return "" - } - return *m.CreatedAt -} - -// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. -func (m *Migration) GetExcludeAttachments() bool { - if m == nil || m.ExcludeAttachments == nil { - return false - } - return *m.ExcludeAttachments -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (m *Migration) GetGUID() string { - if m == nil || m.GUID == nil { - return "" - } - return *m.GUID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *Migration) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. -func (m *Migration) GetLockRepositories() bool { - if m == nil || m.LockRepositories == nil { - return false - } - return *m.LockRepositories -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Migration) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *Migration) GetUpdatedAt() string { - if m == nil || m.UpdatedAt == nil { - return "" - } - return *m.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Migration) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetClosedAt() Timestamp { - if m == nil || m.ClosedAt == nil { - return Timestamp{} - } - return *m.ClosedAt -} - -// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. -func (m *Milestone) GetClosedIssues() int { - if m == nil || m.ClosedIssues == nil { - return 0 - } - return *m.ClosedIssues -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetCreatedAt() Timestamp { - if m == nil || m.CreatedAt == nil { - return Timestamp{} - } - return *m.CreatedAt -} - -// GetCreator returns the Creator field. -func (m *Milestone) GetCreator() *User { - if m == nil { - return nil - } - return m.Creator -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (m *Milestone) GetDescription() string { - if m == nil || m.Description == nil { - return "" - } - return *m.Description -} - -// GetDueOn returns the DueOn field if it's non-nil, zero value otherwise. -func (m *Milestone) GetDueOn() Timestamp { - if m == nil || m.DueOn == nil { - return Timestamp{} - } - return *m.DueOn -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { - return "" - } - return *m.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *Milestone) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetLabelsURL() string { - if m == nil || m.LabelsURL == nil { - return "" - } - return *m.LabelsURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (m *Milestone) GetNodeID() string { - if m == nil || m.NodeID == nil { - return "" - } - return *m.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (m *Milestone) GetNumber() int { - if m == nil || m.Number == nil { - return 0 - } - return *m.Number -} - -// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. -func (m *Milestone) GetOpenIssues() int { - if m == nil || m.OpenIssues == nil { - return 0 - } - return *m.OpenIssues -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Milestone) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (m *Milestone) GetTitle() string { - if m == nil || m.Title == nil { - return "" - } - return *m.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetUpdatedAt() Timestamp { - if m == nil || m.UpdatedAt == nil { - return Timestamp{} - } - return *m.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MilestoneEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetChanges returns the Changes field. -func (m *MilestoneEvent) GetChanges() *EditChange { - if m == nil { - return nil - } - return m.Changes -} - -// GetInstallation returns the Installation field. -func (m *MilestoneEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMilestone returns the Milestone field. -func (m *MilestoneEvent) GetMilestone() *Milestone { - if m == nil { - return nil - } - return m.Milestone -} - -// GetOrg returns the Org field. -func (m *MilestoneEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MilestoneEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MilestoneEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetClosedMilestones returns the ClosedMilestones field if it's non-nil, zero value otherwise. -func (m *MilestoneStats) GetClosedMilestones() int { - if m == nil || m.ClosedMilestones == nil { - return 0 - } - return *m.ClosedMilestones -} - -// GetOpenMilestones returns the OpenMilestones field if it's non-nil, zero value otherwise. -func (m *MilestoneStats) GetOpenMilestones() int { - if m == nil || m.OpenMilestones == nil { - return 0 - } - return *m.OpenMilestones -} - -// GetTotalMilestones returns the TotalMilestones field if it's non-nil, zero value otherwise. -func (m *MilestoneStats) GetTotalMilestones() int { - if m == nil || m.TotalMilestones == nil { - return 0 - } - return *m.TotalMilestones -} - -// GetAnalysisKey returns the AnalysisKey field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetAnalysisKey() string { - if m == nil || m.AnalysisKey == nil { - return "" - } - return *m.AnalysisKey -} - -// GetCategory returns the Category field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetCategory() string { - if m == nil || m.Category == nil { - return "" - } - return *m.Category -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetCommitSHA() string { - if m == nil || m.CommitSHA == nil { - return "" - } - return *m.CommitSHA -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetEnvironment() string { - if m == nil || m.Environment == nil { - return "" - } - return *m.Environment -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { - return "" - } - return *m.HTMLURL -} - -// GetLocation returns the Location field. -func (m *MostRecentInstance) GetLocation() *Location { - if m == nil { - return nil - } - return m.Location -} - -// GetMessage returns the Message field. -func (m *MostRecentInstance) GetMessage() *Message { - if m == nil { - return nil - } - return m.Message -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetRef() string { - if m == nil || m.Ref == nil { - return "" - } - return *m.Ref -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetBase returns the Base field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetBase() string { - if n == nil || n.Base == nil { - return "" - } - return *n.Base -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetBody() string { - if n == nil || n.Body == nil { - return "" - } - return *n.Body -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetDraft() bool { - if n == nil || n.Draft == nil { - return false - } - return *n.Draft -} - -// GetHead returns the Head field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetHead() string { - if n == nil || n.Head == nil { - return "" - } - return *n.Head -} - -// GetHeadRepo returns the HeadRepo field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetHeadRepo() string { - if n == nil || n.HeadRepo == nil { - return "" - } - return *n.HeadRepo -} - -// GetIssue returns the Issue field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetIssue() int { - if n == nil || n.Issue == nil { - return 0 - } - return *n.Issue -} - -// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetMaintainerCanModify() bool { - if n == nil || n.MaintainerCanModify == nil { - return false - } - return *n.MaintainerCanModify -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetTitle() string { - if n == nil || n.Title == nil { - return "" - } - return *n.Title -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetDescription() string { - if n == nil || n.Description == nil { - return "" - } - return *n.Description -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetLDAPDN() string { - if n == nil || n.LDAPDN == nil { - return "" - } - return *n.LDAPDN -} - -// GetParentTeamID returns the ParentTeamID field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetParentTeamID() int64 { - if n == nil || n.ParentTeamID == nil { - return 0 - } - return *n.ParentTeamID -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetPermission() string { - if n == nil || n.Permission == nil { - return "" - } - return *n.Permission -} - -// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetPrivacy() string { - if n == nil || n.Privacy == nil { - return "" - } - return *n.Privacy -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (n *Notification) GetID() string { - if n == nil || n.ID == nil { - return "" - } - return *n.ID -} - -// GetLastReadAt returns the LastReadAt field if it's non-nil, zero value otherwise. -func (n *Notification) GetLastReadAt() Timestamp { - if n == nil || n.LastReadAt == nil { - return Timestamp{} - } - return *n.LastReadAt -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (n *Notification) GetReason() string { - if n == nil || n.Reason == nil { - return "" - } - return *n.Reason -} - -// GetRepository returns the Repository field. -func (n *Notification) GetRepository() *Repository { - if n == nil { - return nil - } - return n.Repository -} - -// GetSubject returns the Subject field. -func (n *Notification) GetSubject() *NotificationSubject { - if n == nil { - return nil - } - return n.Subject -} - -// GetUnread returns the Unread field if it's non-nil, zero value otherwise. -func (n *Notification) GetUnread() bool { - if n == nil || n.Unread == nil { - return false - } - return *n.Unread -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (n *Notification) GetUpdatedAt() Timestamp { - if n == nil || n.UpdatedAt == nil { - return Timestamp{} - } - return *n.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (n *Notification) GetURL() string { - if n == nil || n.URL == nil { - return "" - } - return *n.URL -} - -// GetLatestCommentURL returns the LatestCommentURL field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetLatestCommentURL() string { - if n == nil || n.LatestCommentURL == nil { - return "" - } - return *n.LatestCommentURL -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetTitle() string { - if n == nil || n.Title == nil { - return "" - } - return *n.Title -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetType() string { - if n == nil || n.Type == nil { - return "" - } - return *n.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetURL() string { - if n == nil || n.URL == nil { - return "" - } - return *n.URL -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (o *OAuthAPP) GetClientID() string { - if o == nil || o.ClientID == nil { - return "" - } - return *o.ClientID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *OAuthAPP) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (o *OAuthAPP) GetURL() string { - if o == nil || o.URL == nil { - return "" - } - return *o.URL -} - -// GetUseDefault returns the UseDefault field if it's non-nil, zero value otherwise. -func (o *OIDCSubjectClaimCustomTemplate) GetUseDefault() bool { - if o == nil || o.UseDefault == nil { - return false - } - return *o.UseDefault -} - -// GetAdvancedSecurityEnabledForNewRepos returns the AdvancedSecurityEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetAdvancedSecurityEnabledForNewRepos() bool { - if o == nil || o.AdvancedSecurityEnabledForNewRepos == nil { - return false - } - return *o.AdvancedSecurityEnabledForNewRepos -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetAvatarURL() string { - if o == nil || o.AvatarURL == nil { - return "" - } - return *o.AvatarURL -} - -// GetBillingEmail returns the BillingEmail field if it's non-nil, zero value otherwise. -func (o *Organization) GetBillingEmail() string { - if o == nil || o.BillingEmail == nil { - return "" - } - return *o.BillingEmail -} - -// GetBlog returns the Blog field if it's non-nil, zero value otherwise. -func (o *Organization) GetBlog() string { - if o == nil || o.Blog == nil { - return "" - } - return *o.Blog -} - -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (o *Organization) GetCollaborators() int { - if o == nil || o.Collaborators == nil { - return 0 - } - return *o.Collaborators -} - -// GetCompany returns the Company field if it's non-nil, zero value otherwise. -func (o *Organization) GetCompany() string { - if o == nil || o.Company == nil { - return "" - } - return *o.Company -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (o *Organization) GetCreatedAt() Timestamp { - if o == nil || o.CreatedAt == nil { - return Timestamp{} - } - return *o.CreatedAt -} - -// GetDefaultRepoPermission returns the DefaultRepoPermission field if it's non-nil, zero value otherwise. -func (o *Organization) GetDefaultRepoPermission() string { - if o == nil || o.DefaultRepoPermission == nil { - return "" - } - return *o.DefaultRepoPermission -} - -// GetDefaultRepoSettings returns the DefaultRepoSettings field if it's non-nil, zero value otherwise. -func (o *Organization) GetDefaultRepoSettings() string { - if o == nil || o.DefaultRepoSettings == nil { - return "" - } - return *o.DefaultRepoSettings -} - -// GetDependabotAlertsEnabledForNewRepos returns the DependabotAlertsEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetDependabotAlertsEnabledForNewRepos() bool { - if o == nil || o.DependabotAlertsEnabledForNewRepos == nil { - return false - } - return *o.DependabotAlertsEnabledForNewRepos -} - -// GetDependabotSecurityUpdatesEnabledForNewRepos returns the DependabotSecurityUpdatesEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetDependabotSecurityUpdatesEnabledForNewRepos() bool { - if o == nil || o.DependabotSecurityUpdatesEnabledForNewRepos == nil { - return false - } - return *o.DependabotSecurityUpdatesEnabledForNewRepos -} - -// GetDependencyGraphEnabledForNewRepos returns the DependencyGraphEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetDependencyGraphEnabledForNewRepos() bool { - if o == nil || o.DependencyGraphEnabledForNewRepos == nil { - return false - } - return *o.DependencyGraphEnabledForNewRepos -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (o *Organization) GetDescription() string { - if o == nil || o.Description == nil { - return "" - } - return *o.Description -} - -// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. -func (o *Organization) GetDiskUsage() int { - if o == nil || o.DiskUsage == nil { - return 0 - } - return *o.DiskUsage -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (o *Organization) GetEmail() string { - if o == nil || o.Email == nil { - return "" - } - return *o.Email -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetEventsURL() string { - if o == nil || o.EventsURL == nil { - return "" - } - return *o.EventsURL -} - -// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. -func (o *Organization) GetFollowers() int { - if o == nil || o.Followers == nil { - return 0 - } - return *o.Followers -} - -// GetFollowing returns the Following field if it's non-nil, zero value otherwise. -func (o *Organization) GetFollowing() int { - if o == nil || o.Following == nil { - return 0 - } - return *o.Following -} - -// GetHasOrganizationProjects returns the HasOrganizationProjects field if it's non-nil, zero value otherwise. -func (o *Organization) GetHasOrganizationProjects() bool { - if o == nil || o.HasOrganizationProjects == nil { - return false - } - return *o.HasOrganizationProjects -} - -// GetHasRepositoryProjects returns the HasRepositoryProjects field if it's non-nil, zero value otherwise. -func (o *Organization) GetHasRepositoryProjects() bool { - if o == nil || o.HasRepositoryProjects == nil { - return false - } - return *o.HasRepositoryProjects -} - -// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetHooksURL() string { - if o == nil || o.HooksURL == nil { - return "" - } - return *o.HooksURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetHTMLURL() string { - if o == nil || o.HTMLURL == nil { - return "" - } - return *o.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (o *Organization) GetID() int64 { - if o == nil || o.ID == nil { - return 0 - } - return *o.ID -} - -// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetIssuesURL() string { - if o == nil || o.IssuesURL == nil { - return "" - } - return *o.IssuesURL -} - -// GetIsVerified returns the IsVerified field if it's non-nil, zero value otherwise. -func (o *Organization) GetIsVerified() bool { - if o == nil || o.IsVerified == nil { - return false - } - return *o.IsVerified -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (o *Organization) GetLocation() string { - if o == nil || o.Location == nil { - return "" - } - return *o.Location -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (o *Organization) GetLogin() string { - if o == nil || o.Login == nil { - return "" - } - return *o.Login -} - -// GetMembersAllowedRepositoryCreationType returns the MembersAllowedRepositoryCreationType field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersAllowedRepositoryCreationType() string { - if o == nil || o.MembersAllowedRepositoryCreationType == nil { - return "" - } - return *o.MembersAllowedRepositoryCreationType -} - -// GetMembersCanCreateInternalRepos returns the MembersCanCreateInternalRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreateInternalRepos() bool { - if o == nil || o.MembersCanCreateInternalRepos == nil { - return false - } - return *o.MembersCanCreateInternalRepos -} - -// GetMembersCanCreatePages returns the MembersCanCreatePages field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePages() bool { - if o == nil || o.MembersCanCreatePages == nil { - return false - } - return *o.MembersCanCreatePages -} - -// GetMembersCanCreatePrivatePages returns the MembersCanCreatePrivatePages field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePrivatePages() bool { - if o == nil || o.MembersCanCreatePrivatePages == nil { - return false - } - return *o.MembersCanCreatePrivatePages -} - -// GetMembersCanCreatePrivateRepos returns the MembersCanCreatePrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePrivateRepos() bool { - if o == nil || o.MembersCanCreatePrivateRepos == nil { - return false - } - return *o.MembersCanCreatePrivateRepos -} - -// GetMembersCanCreatePublicPages returns the MembersCanCreatePublicPages field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePublicPages() bool { - if o == nil || o.MembersCanCreatePublicPages == nil { - return false - } - return *o.MembersCanCreatePublicPages -} - -// GetMembersCanCreatePublicRepos returns the MembersCanCreatePublicRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePublicRepos() bool { - if o == nil || o.MembersCanCreatePublicRepos == nil { - return false - } - return *o.MembersCanCreatePublicRepos -} - -// GetMembersCanCreateRepos returns the MembersCanCreateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreateRepos() bool { - if o == nil || o.MembersCanCreateRepos == nil { - return false - } - return *o.MembersCanCreateRepos -} - -// GetMembersCanForkPrivateRepos returns the MembersCanForkPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanForkPrivateRepos() bool { - if o == nil || o.MembersCanForkPrivateRepos == nil { - return false - } - return *o.MembersCanForkPrivateRepos -} - -// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersURL() string { - if o == nil || o.MembersURL == nil { - return "" - } - return *o.MembersURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *Organization) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (o *Organization) GetNodeID() string { - if o == nil || o.NodeID == nil { - return "" - } - return *o.NodeID -} - -// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetOwnedPrivateRepos() int64 { - if o == nil || o.OwnedPrivateRepos == nil { - return 0 - } - return *o.OwnedPrivateRepos -} - -// GetPlan returns the Plan field. -func (o *Organization) GetPlan() *Plan { - if o == nil { - return nil - } - return o.Plan -} - -// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. -func (o *Organization) GetPrivateGists() int { - if o == nil || o.PrivateGists == nil { - return 0 - } - return *o.PrivateGists -} - -// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicGists() int { - if o == nil || o.PublicGists == nil { - return 0 - } - return *o.PublicGists -} - -// GetPublicMembersURL returns the PublicMembersURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicMembersURL() string { - if o == nil || o.PublicMembersURL == nil { - return "" - } - return *o.PublicMembersURL -} - -// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicRepos() int { - if o == nil || o.PublicRepos == nil { - return 0 - } - return *o.PublicRepos -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetReposURL() string { - if o == nil || o.ReposURL == nil { - return "" - } - return *o.ReposURL -} - -// GetSecretScanningEnabledForNewRepos returns the SecretScanningEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetSecretScanningEnabledForNewRepos() bool { - if o == nil || o.SecretScanningEnabledForNewRepos == nil { - return false - } - return *o.SecretScanningEnabledForNewRepos -} - -// GetSecretScanningPushProtectionEnabledForNewRepos returns the SecretScanningPushProtectionEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetSecretScanningPushProtectionEnabledForNewRepos() bool { - if o == nil || o.SecretScanningPushProtectionEnabledForNewRepos == nil { - return false - } - return *o.SecretScanningPushProtectionEnabledForNewRepos -} - -// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetTotalPrivateRepos() int64 { - if o == nil || o.TotalPrivateRepos == nil { - return 0 - } - return *o.TotalPrivateRepos -} - -// GetTwitterUsername returns the TwitterUsername field if it's non-nil, zero value otherwise. -func (o *Organization) GetTwitterUsername() string { - if o == nil || o.TwitterUsername == nil { - return "" - } - return *o.TwitterUsername -} - -// GetTwoFactorRequirementEnabled returns the TwoFactorRequirementEnabled field if it's non-nil, zero value otherwise. -func (o *Organization) GetTwoFactorRequirementEnabled() bool { - if o == nil || o.TwoFactorRequirementEnabled == nil { - return false - } - return *o.TwoFactorRequirementEnabled -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (o *Organization) GetType() string { - if o == nil || o.Type == nil { - return "" - } - return *o.Type -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (o *Organization) GetUpdatedAt() Timestamp { - if o == nil || o.UpdatedAt == nil { - return Timestamp{} - } - return *o.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (o *Organization) GetURL() string { - if o == nil || o.URL == nil { - return "" - } - return *o.URL -} - -// GetWebCommitSignoffRequired returns the WebCommitSignoffRequired field if it's non-nil, zero value otherwise. -func (o *Organization) GetWebCommitSignoffRequired() bool { - if o == nil || o.WebCommitSignoffRequired == nil { - return false - } - return *o.WebCommitSignoffRequired -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrganizationCustomRepoRoles) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (o *OrganizationEvent) GetAction() string { - if o == nil || o.Action == nil { - return "" - } - return *o.Action -} - -// GetInstallation returns the Installation field. -func (o *OrganizationEvent) GetInstallation() *Installation { - if o == nil { - return nil - } - return o.Installation -} - -// GetInvitation returns the Invitation field. -func (o *OrganizationEvent) GetInvitation() *Invitation { - if o == nil { - return nil - } - return o.Invitation -} - -// GetMembership returns the Membership field. -func (o *OrganizationEvent) GetMembership() *Membership { - if o == nil { - return nil - } - return o.Membership -} - -// GetOrganization returns the Organization field. -func (o *OrganizationEvent) GetOrganization() *Organization { - if o == nil { - return nil - } - return o.Organization -} - -// GetSender returns the Sender field. -func (o *OrganizationEvent) GetSender() *User { - if o == nil { - return nil - } - return o.Sender -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrganizationInstallations) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (o *OrgBlockEvent) GetAction() string { - if o == nil || o.Action == nil { - return "" - } - return *o.Action -} - -// GetBlockedUser returns the BlockedUser field. -func (o *OrgBlockEvent) GetBlockedUser() *User { - if o == nil { - return nil - } - return o.BlockedUser -} - -// GetInstallation returns the Installation field. -func (o *OrgBlockEvent) GetInstallation() *Installation { - if o == nil { - return nil - } - return o.Installation -} - -// GetOrganization returns the Organization field. -func (o *OrgBlockEvent) GetOrganization() *Organization { - if o == nil { - return nil - } - return o.Organization -} - -// GetSender returns the Sender field. -func (o *OrgBlockEvent) GetSender() *User { - if o == nil { - return nil - } - return o.Sender -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetCreatedAt() Timestamp { - if o == nil || o.CreatedAt == nil { - return Timestamp{} - } - return *o.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetID() int64 { - if o == nil || o.ID == nil { - return 0 - } - return *o.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetPath() string { - if o == nil || o.Path == nil { - return "" - } - return *o.Path -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetRef() string { - if o == nil || o.Ref == nil { - return "" - } - return *o.Ref -} - -// GetRepository returns the Repository field. -func (o *OrgRequiredWorkflow) GetRepository() *Repository { - if o == nil { - return nil - } - return o.Repository -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetScope() string { - if o == nil || o.Scope == nil { - return "" - } - return *o.Scope -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetSelectedRepositoriesURL() string { - if o == nil || o.SelectedRepositoriesURL == nil { - return "" - } - return *o.SelectedRepositoriesURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetState() string { - if o == nil || o.State == nil { - return "" - } - return *o.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetUpdatedAt() Timestamp { - if o == nil || o.UpdatedAt == nil { - return Timestamp{} - } - return *o.UpdatedAt -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflows) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - -// GetDisabledOrgs returns the DisabledOrgs field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetDisabledOrgs() int { - if o == nil || o.DisabledOrgs == nil { - return 0 - } - return *o.DisabledOrgs -} - -// GetTotalOrgs returns the TotalOrgs field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetTotalOrgs() int { - if o == nil || o.TotalOrgs == nil { - return 0 - } - return *o.TotalOrgs -} - -// GetTotalTeamMembers returns the TotalTeamMembers field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetTotalTeamMembers() int { - if o == nil || o.TotalTeamMembers == nil { - return 0 - } - return *o.TotalTeamMembers -} - -// GetTotalTeams returns the TotalTeams field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetTotalTeams() int { - if o == nil || o.TotalTeams == nil { - return 0 - } - return *o.TotalTeams -} - -// GetOrg returns the Org field. -func (o *OwnerInfo) GetOrg() *User { - if o == nil { - return nil - } - return o.Org -} - -// GetUser returns the User field. -func (o *OwnerInfo) GetUser() *User { - if o == nil { - return nil - } - return o.User -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *Package) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Package) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *Package) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Package) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetOwner returns the Owner field. -func (p *Package) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. -func (p *Package) GetPackageType() string { - if p == nil || p.PackageType == nil { - return "" - } - return *p.PackageType -} - -// GetPackageVersion returns the PackageVersion field. -func (p *Package) GetPackageVersion() *PackageVersion { - if p == nil { - return nil - } - return p.PackageVersion -} - -// GetRegistry returns the Registry field. -func (p *Package) GetRegistry() *PackageRegistry { - if p == nil { - return nil - } - return p.Registry -} - -// GetRepository returns the Repository field. -func (p *Package) GetRepository() *Repository { - if p == nil { - return nil - } - return p.Repository -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *Package) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Package) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetVersionCount returns the VersionCount field if it's non-nil, zero value otherwise. -func (p *Package) GetVersionCount() int64 { - if p == nil || p.VersionCount == nil { - return 0 - } - return *p.VersionCount -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (p *Package) GetVisibility() string { - if p == nil || p.Visibility == nil { - return "" - } - return *p.Visibility -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PackageEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PackageEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PackageEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPackage returns the Package field. -func (p *PackageEvent) GetPackage() *Package { - if p == nil { - return nil - } - return p.Package -} - -// GetRepo returns the Repo field. -func (p *PackageEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PackageEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetAuthor returns the Author field. -func (p *PackageFile) GetAuthor() *User { - if p == nil { - return nil - } - return p.Author -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetContentType() string { - if p == nil || p.ContentType == nil { - return "" - } - return *p.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetDownloadURL() string { - if p == nil || p.DownloadURL == nil { - return "" - } - return *p.DownloadURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetMD5 returns the MD5 field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetMD5() string { - if p == nil || p.MD5 == nil { - return "" - } - return *p.MD5 -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetSHA1 returns the SHA1 field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetSHA1() string { - if p == nil || p.SHA1 == nil { - return "" - } - return *p.SHA1 -} - -// GetSHA256 returns the SHA256 field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetSHA256() string { - if p == nil || p.SHA256 == nil { - return "" - } - return *p.SHA256 -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetSize() int64 { - if p == nil || p.Size == nil { - return 0 - } - return *p.Size -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. -func (p *PackageListOptions) GetPackageType() string { - if p == nil || p.PackageType == nil { - return "" - } - return *p.PackageType -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PackageListOptions) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (p *PackageListOptions) GetVisibility() string { - if p == nil || p.Visibility == nil { - return "" - } - return *p.Visibility -} - -// GetContainer returns the Container field. -func (p *PackageMetadata) GetContainer() *PackageContainerMetadata { - if p == nil { - return nil - } - return p.Container -} - -// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. -func (p *PackageMetadata) GetPackageType() string { - if p == nil || p.PackageType == nil { - return "" - } - return *p.PackageType -} - -// GetAboutURL returns the AboutURL field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetAboutURL() string { - if p == nil || p.AboutURL == nil { - return "" - } - return *p.AboutURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetType() string { - if p == nil || p.Type == nil { - return "" - } - return *p.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetVendor returns the Vendor field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetVendor() string { - if p == nil || p.Vendor == nil { - return "" - } - return *p.Vendor -} - -// GetAuthor returns the Author field. -func (p *PackageRelease) GetAuthor() *User { - if p == nil { - return nil - } - return p.Author -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetDraft() bool { - if p == nil || p.Draft == nil { - return false - } - return *p.Draft -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetPrerelease() bool { - if p == nil || p.Prerelease == nil { - return false - } - return *p.Prerelease -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetPublishedAt() Timestamp { - if p == nil || p.PublishedAt == nil { - return Timestamp{} - } - return *p.PublishedAt -} - -// GetTagName returns the TagName field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetTagName() string { - if p == nil || p.TagName == nil { - return "" - } - return *p.TagName -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetTargetCommitish() string { - if p == nil || p.TargetCommitish == nil { - return "" - } - return *p.TargetCommitish -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetAuthor returns the Author field. -func (p *PackageVersion) GetAuthor() *User { - if p == nil { - return nil - } - return p.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetBodyHTML() string { - if p == nil || p.BodyHTML == nil { - return "" - } - return *p.BodyHTML -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetDraft() bool { - if p == nil || p.Draft == nil { - return false - } - return *p.Draft -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetInstallationCommand returns the InstallationCommand field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetInstallationCommand() string { - if p == nil || p.InstallationCommand == nil { - return "" - } - return *p.InstallationCommand -} - -// GetManifest returns the Manifest field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetManifest() string { - if p == nil || p.Manifest == nil { - return "" - } - return *p.Manifest -} - -// GetMetadata returns the Metadata field. -func (p *PackageVersion) GetMetadata() *PackageMetadata { - if p == nil { - return nil - } - return p.Metadata -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetPackageHTMLURL returns the PackageHTMLURL field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetPackageHTMLURL() string { - if p == nil || p.PackageHTMLURL == nil { - return "" - } - return *p.PackageHTMLURL -} - -// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetPrerelease() bool { - if p == nil || p.Prerelease == nil { - return false - } - return *p.Prerelease -} - -// GetRelease returns the Release field. -func (p *PackageVersion) GetRelease() *PackageRelease { - if p == nil { - return nil - } - return p.Release -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetSummary() string { - if p == nil || p.Summary == nil { - return "" - } - return *p.Summary -} - -// GetTagName returns the TagName field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetTagName() string { - if p == nil || p.TagName == nil { - return "" - } - return *p.TagName -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetTargetCommitish() string { - if p == nil || p.TargetCommitish == nil { - return "" - } - return *p.TargetCommitish -} - -// GetTargetOID returns the TargetOID field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetTargetOID() string { - if p == nil || p.TargetOID == nil { - return "" - } - return *p.TargetOID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetVersion() string { - if p == nil || p.Version == nil { - return "" - } - return *p.Version -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *Page) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Page) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetPageName returns the PageName field if it's non-nil, zero value otherwise. -func (p *Page) GetPageName() string { - if p == nil || p.PageName == nil { - return "" - } - return *p.PageName -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (p *Page) GetSHA() string { - if p == nil || p.SHA == nil { - return "" - } - return *p.SHA -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (p *Page) GetSummary() string { - if p == nil || p.Summary == nil { - return "" - } - return *p.Summary -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *Page) GetTitle() string { - if p == nil || p.Title == nil { - return "" - } - return *p.Title -} - -// GetBuild returns the Build field. -func (p *PageBuildEvent) GetBuild() *PagesBuild { - if p == nil { - return nil - } - return p.Build -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PageBuildEvent) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetInstallation returns the Installation field. -func (p *PageBuildEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PageBuildEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetRepo returns the Repo field. -func (p *PageBuildEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PageBuildEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetBuildType returns the BuildType field if it's non-nil, zero value otherwise. -func (p *Pages) GetBuildType() string { - if p == nil || p.BuildType == nil { - return "" - } - return *p.BuildType -} - -// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. -func (p *Pages) GetCNAME() string { - if p == nil || p.CNAME == nil { - return "" - } - return *p.CNAME -} - -// GetCustom404 returns the Custom404 field if it's non-nil, zero value otherwise. -func (p *Pages) GetCustom404() bool { - if p == nil || p.Custom404 == nil { - return false - } - return *p.Custom404 -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Pages) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetHTTPSCertificate returns the HTTPSCertificate field. -func (p *Pages) GetHTTPSCertificate() *PagesHTTPSCertificate { - if p == nil { - return nil - } - return p.HTTPSCertificate -} - -// GetHTTPSEnforced returns the HTTPSEnforced field if it's non-nil, zero value otherwise. -func (p *Pages) GetHTTPSEnforced() bool { - if p == nil || p.HTTPSEnforced == nil { - return false - } - return *p.HTTPSEnforced -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (p *Pages) GetPublic() bool { - if p == nil || p.Public == nil { - return false - } - return *p.Public -} - -// GetSource returns the Source field. -func (p *Pages) GetSource() *PagesSource { - if p == nil { - return nil - } - return p.Source -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (p *Pages) GetStatus() string { - if p == nil || p.Status == nil { - return "" - } - return *p.Status -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Pages) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetCommit returns the Commit field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetCommit() string { - if p == nil || p.Commit == nil { - return "" - } - return *p.Commit -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDuration returns the Duration field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetDuration() int { - if p == nil || p.Duration == nil { - return 0 - } - return *p.Duration -} - -// GetError returns the Error field. -func (p *PagesBuild) GetError() *PagesError { - if p == nil { - return nil - } - return p.Error -} - -// GetPusher returns the Pusher field. -func (p *PagesBuild) GetPusher() *User { - if p == nil { - return nil - } - return p.Pusher -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetStatus() string { - if p == nil || p.Status == nil { - return "" - } - return *p.Status -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetCAAError returns the CAAError field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetCAAError() string { - if p == nil || p.CAAError == nil { - return "" - } - return *p.CAAError -} - -// GetDNSResolves returns the DNSResolves field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetDNSResolves() bool { - if p == nil || p.DNSResolves == nil { - return false - } - return *p.DNSResolves -} - -// GetEnforcesHTTPS returns the EnforcesHTTPS field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetEnforcesHTTPS() bool { - if p == nil || p.EnforcesHTTPS == nil { - return false - } - return *p.EnforcesHTTPS -} - -// GetHasCNAMERecord returns the HasCNAMERecord field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHasCNAMERecord() bool { - if p == nil || p.HasCNAMERecord == nil { - return false - } - return *p.HasCNAMERecord -} - -// GetHasMXRecordsPresent returns the HasMXRecordsPresent field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHasMXRecordsPresent() bool { - if p == nil || p.HasMXRecordsPresent == nil { - return false - } - return *p.HasMXRecordsPresent -} - -// GetHost returns the Host field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHost() string { - if p == nil || p.Host == nil { - return "" - } - return *p.Host -} - -// GetHTTPSError returns the HTTPSError field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHTTPSError() string { - if p == nil || p.HTTPSError == nil { - return "" - } - return *p.HTTPSError -} - -// GetIsApexDomain returns the IsApexDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsApexDomain() bool { - if p == nil || p.IsApexDomain == nil { - return false - } - return *p.IsApexDomain -} - -// GetIsARecord returns the IsARecord field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsARecord() bool { - if p == nil || p.IsARecord == nil { - return false - } - return *p.IsARecord -} - -// GetIsCloudflareIP returns the IsCloudflareIP field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCloudflareIP() bool { - if p == nil || p.IsCloudflareIP == nil { - return false - } - return *p.IsCloudflareIP -} - -// GetIsCNAMEToFastly returns the IsCNAMEToFastly field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCNAMEToFastly() bool { - if p == nil || p.IsCNAMEToFastly == nil { - return false - } - return *p.IsCNAMEToFastly -} - -// GetIsCNAMEToGithubUserDomain returns the IsCNAMEToGithubUserDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCNAMEToGithubUserDomain() bool { - if p == nil || p.IsCNAMEToGithubUserDomain == nil { - return false - } - return *p.IsCNAMEToGithubUserDomain -} - -// GetIsCNAMEToPagesDotGithubDotCom returns the IsCNAMEToPagesDotGithubDotCom field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCNAMEToPagesDotGithubDotCom() bool { - if p == nil || p.IsCNAMEToPagesDotGithubDotCom == nil { - return false - } - return *p.IsCNAMEToPagesDotGithubDotCom -} - -// GetIsFastlyIP returns the IsFastlyIP field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsFastlyIP() bool { - if p == nil || p.IsFastlyIP == nil { - return false - } - return *p.IsFastlyIP -} - -// GetIsHTTPSEligible returns the IsHTTPSEligible field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsHTTPSEligible() bool { - if p == nil || p.IsHTTPSEligible == nil { - return false - } - return *p.IsHTTPSEligible -} - -// GetIsNonGithubPagesIPPresent returns the IsNonGithubPagesIPPresent field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsNonGithubPagesIPPresent() bool { - if p == nil || p.IsNonGithubPagesIPPresent == nil { - return false - } - return *p.IsNonGithubPagesIPPresent -} - -// GetIsOldIPAddress returns the IsOldIPAddress field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsOldIPAddress() bool { - if p == nil || p.IsOldIPAddress == nil { - return false - } - return *p.IsOldIPAddress -} - -// GetIsPagesDomain returns the IsPagesDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsPagesDomain() bool { - if p == nil || p.IsPagesDomain == nil { - return false - } - return *p.IsPagesDomain -} - -// GetIsPointedToGithubPagesIP returns the IsPointedToGithubPagesIP field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsPointedToGithubPagesIP() bool { - if p == nil || p.IsPointedToGithubPagesIP == nil { - return false - } - return *p.IsPointedToGithubPagesIP -} - -// GetIsProxied returns the IsProxied field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsProxied() bool { - if p == nil || p.IsProxied == nil { - return false - } - return *p.IsProxied -} - -// GetIsServedByPages returns the IsServedByPages field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsServedByPages() bool { - if p == nil || p.IsServedByPages == nil { - return false - } - return *p.IsServedByPages -} - -// GetIsValid returns the IsValid field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsValid() bool { - if p == nil || p.IsValid == nil { - return false - } - return *p.IsValid -} - -// GetIsValidDomain returns the IsValidDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsValidDomain() bool { - if p == nil || p.IsValidDomain == nil { - return false - } - return *p.IsValidDomain -} - -// GetNameservers returns the Nameservers field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetNameservers() string { - if p == nil || p.Nameservers == nil { - return "" - } - return *p.Nameservers -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetReason() string { - if p == nil || p.Reason == nil { - return "" - } - return *p.Reason -} - -// GetRespondsToHTTPS returns the RespondsToHTTPS field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetRespondsToHTTPS() bool { - if p == nil || p.RespondsToHTTPS == nil { - return false - } - return *p.RespondsToHTTPS -} - -// GetShouldBeARecord returns the ShouldBeARecord field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetShouldBeARecord() bool { - if p == nil || p.ShouldBeARecord == nil { - return false - } - return *p.ShouldBeARecord -} - -// GetURI returns the URI field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetURI() string { - if p == nil || p.URI == nil { - return "" - } - return *p.URI -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PagesError) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetAltDomain returns the AltDomain field. -func (p *PagesHealthCheckResponse) GetAltDomain() *PagesDomain { - if p == nil { - return nil - } - return p.AltDomain -} - -// GetDomain returns the Domain field. -func (p *PagesHealthCheckResponse) GetDomain() *PagesDomain { - if p == nil { - return nil - } - return p.Domain -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (p *PagesHTTPSCertificate) GetDescription() string { - if p == nil || p.Description == nil { - return "" - } - return *p.Description -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (p *PagesHTTPSCertificate) GetExpiresAt() string { - if p == nil || p.ExpiresAt == nil { - return "" - } - return *p.ExpiresAt -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PagesHTTPSCertificate) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (p *PagesSource) GetBranch() string { - if p == nil || p.Branch == nil { - return "" - } - return *p.Branch -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (p *PagesSource) GetPath() string { - if p == nil || p.Path == nil { - return "" - } - return *p.Path -} - -// GetTotalPages returns the TotalPages field if it's non-nil, zero value otherwise. -func (p *PageStats) GetTotalPages() int { - if p == nil || p.TotalPages == nil { - return 0 - } - return *p.TotalPages -} - -// GetBuildType returns the BuildType field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetBuildType() string { - if p == nil || p.BuildType == nil { - return "" - } - return *p.BuildType -} - -// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetCNAME() string { - if p == nil || p.CNAME == nil { - return "" - } - return *p.CNAME -} - -// GetHTTPSEnforced returns the HTTPSEnforced field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetHTTPSEnforced() bool { - if p == nil || p.HTTPSEnforced == nil { - return false - } - return *p.HTTPSEnforced -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetPublic() bool { - if p == nil || p.Public == nil { - return false - } - return *p.Public -} - -// GetSource returns the Source field. -func (p *PagesUpdate) GetSource() *PagesSource { - if p == nil { - return nil - } - return p.Source -} - -// GetOrg returns the Org map if it's non-nil, an empty map otherwise. -func (p *PersonalAccessTokenPermissions) GetOrg() map[string]string { - if p == nil || p.Org == nil { - return map[string]string{} - } - return p.Org -} - -// GetOther returns the Other map if it's non-nil, an empty map otherwise. -func (p *PersonalAccessTokenPermissions) GetOther() map[string]string { - if p == nil || p.Other == nil { - return map[string]string{} - } - return p.Other -} - -// GetRepo returns the Repo map if it's non-nil, an empty map otherwise. -func (p *PersonalAccessTokenPermissions) GetRepo() map[string]string { - if p == nil || p.Repo == nil { - return map[string]string{} - } - return p.Repo -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetOrg returns the Org field. -func (p *PersonalAccessTokenRequest) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetOwner returns the Owner field. -func (p *PersonalAccessTokenRequest) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPermissionsAdded returns the PermissionsAdded field. -func (p *PersonalAccessTokenRequest) GetPermissionsAdded() *PersonalAccessTokenPermissions { - if p == nil { - return nil - } - return p.PermissionsAdded -} - -// GetPermissionsResult returns the PermissionsResult field. -func (p *PersonalAccessTokenRequest) GetPermissionsResult() *PersonalAccessTokenPermissions { - if p == nil { - return nil - } - return p.PermissionsResult -} - -// GetPermissionsUpgraded returns the PermissionsUpgraded field. -func (p *PersonalAccessTokenRequest) GetPermissionsUpgraded() *PersonalAccessTokenPermissions { - if p == nil { - return nil - } - return p.PermissionsUpgraded -} - -// GetRepositoryCount returns the RepositoryCount field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetRepositoryCount() int64 { - if p == nil || p.RepositoryCount == nil { - return 0 - } - return *p.RepositoryCount -} - -// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetRepositorySelection() string { - if p == nil || p.RepositorySelection == nil { - return "" - } - return *p.RepositorySelection -} - -// GetTokenExpired returns the TokenExpired field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetTokenExpired() bool { - if p == nil || p.TokenExpired == nil { - return false - } - return *p.TokenExpired -} - -// GetTokenExpiresAt returns the TokenExpiresAt field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetTokenExpiresAt() Timestamp { - if p == nil || p.TokenExpiresAt == nil { - return Timestamp{} - } - return *p.TokenExpiresAt -} - -// GetTokenLastUsedAt returns the TokenLastUsedAt field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetTokenLastUsedAt() Timestamp { - if p == nil || p.TokenLastUsedAt == nil { - return Timestamp{} - } - return *p.TokenLastUsedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequestEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PersonalAccessTokenRequestEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PersonalAccessTokenRequestEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPersonalAccessTokenRequest returns the PersonalAccessTokenRequest field. -func (p *PersonalAccessTokenRequestEvent) GetPersonalAccessTokenRequest() *PersonalAccessTokenRequest { - if p == nil { - return nil - } - return p.PersonalAccessTokenRequest -} - -// GetSender returns the Sender field. -func (p *PersonalAccessTokenRequestEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetHook returns the Hook field. -func (p *PingEvent) GetHook() *Hook { - if p == nil { - return nil - } - return p.Hook -} - -// GetHookID returns the HookID field if it's non-nil, zero value otherwise. -func (p *PingEvent) GetHookID() int64 { - if p == nil || p.HookID == nil { - return 0 - } - return *p.HookID -} - -// GetInstallation returns the Installation field. -func (p *PingEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PingEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetRepo returns the Repo field. -func (p *PingEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PingEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetZen returns the Zen field if it's non-nil, zero value otherwise. -func (p *PingEvent) GetZen() string { - if p == nil || p.Zen == nil { - return "" - } - return *p.Zen -} - -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (p *Plan) GetCollaborators() int { - if p == nil || p.Collaborators == nil { - return 0 - } - return *p.Collaborators -} - -// GetFilledSeats returns the FilledSeats field if it's non-nil, zero value otherwise. -func (p *Plan) GetFilledSeats() int { - if p == nil || p.FilledSeats == nil { - return 0 - } - return *p.FilledSeats -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Plan) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetPrivateRepos returns the PrivateRepos field if it's non-nil, zero value otherwise. -func (p *Plan) GetPrivateRepos() int64 { - if p == nil || p.PrivateRepos == nil { - return 0 - } - return *p.PrivateRepos -} - -// GetSeats returns the Seats field if it's non-nil, zero value otherwise. -func (p *Plan) GetSeats() int { - if p == nil || p.Seats == nil { - return 0 - } - return *p.Seats -} - -// GetSpace returns the Space field if it's non-nil, zero value otherwise. -func (p *Plan) GetSpace() int { - if p == nil || p.Space == nil { - return 0 - } - return *p.Space -} - -// GetCode returns the Code field if it's non-nil, zero value otherwise. -func (p *PolicyOverrideReason) GetCode() string { - if p == nil || p.Code == nil { - return "" - } - return *p.Code -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PolicyOverrideReason) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetConfigURL returns the ConfigURL field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetConfigURL() string { - if p == nil || p.ConfigURL == nil { - return "" - } - return *p.ConfigURL -} - -// GetEnforcement returns the Enforcement field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetEnforcement() string { - if p == nil || p.Enforcement == nil { - return "" - } - return *p.Enforcement -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetHRef returns the HRef field if it's non-nil, zero value otherwise. -func (p *PRLink) GetHRef() string { - if p == nil || p.HRef == nil { - return "" - } - return *p.HRef -} - -// GetComments returns the Comments field. -func (p *PRLinks) GetComments() *PRLink { - if p == nil { - return nil - } - return p.Comments -} - -// GetCommits returns the Commits field. -func (p *PRLinks) GetCommits() *PRLink { - if p == nil { - return nil - } - return p.Commits -} - -// GetHTML returns the HTML field. -func (p *PRLinks) GetHTML() *PRLink { - if p == nil { - return nil - } - return p.HTML -} - -// GetIssue returns the Issue field. -func (p *PRLinks) GetIssue() *PRLink { - if p == nil { - return nil - } - return p.Issue -} - -// GetReviewComment returns the ReviewComment field. -func (p *PRLinks) GetReviewComment() *PRLink { - if p == nil { - return nil - } - return p.ReviewComment -} - -// GetReviewComments returns the ReviewComments field. -func (p *PRLinks) GetReviewComments() *PRLink { - if p == nil { - return nil - } - return p.ReviewComments -} - -// GetSelf returns the Self field. -func (p *PRLinks) GetSelf() *PRLink { - if p == nil { - return nil - } - return p.Self -} - -// GetStatuses returns the Statuses field. -func (p *PRLinks) GetStatuses() *PRLink { - if p == nil { - return nil - } - return p.Statuses -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *Project) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetColumnsURL returns the ColumnsURL field if it's non-nil, zero value otherwise. -func (p *Project) GetColumnsURL() string { - if p == nil || p.ColumnsURL == nil { - return "" - } - return *p.ColumnsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *Project) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *Project) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Project) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *Project) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Project) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *Project) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *Project) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. -func (p *Project) GetOrganizationPermission() string { - if p == nil || p.OrganizationPermission == nil { - return "" - } - return *p.OrganizationPermission -} - -// GetOwnerURL returns the OwnerURL field if it's non-nil, zero value otherwise. -func (p *Project) GetOwnerURL() string { - if p == nil || p.OwnerURL == nil { - return "" - } - return *p.OwnerURL -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (p *Project) GetPrivate() bool { - if p == nil || p.Private == nil { - return false - } - return *p.Private -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *Project) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *Project) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Project) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectBody) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetArchived() bool { - if p == nil || p.Archived == nil { - return false - } - return *p.Archived -} - -// GetColumnID returns the ColumnID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnID() int64 { - if p == nil || p.ColumnID == nil { - return 0 - } - return *p.ColumnID -} - -// GetColumnName returns the ColumnName field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnName() string { - if p == nil || p.ColumnName == nil { - return "" - } - return *p.ColumnName -} - -// GetColumnURL returns the ColumnURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnURL() string { - if p == nil || p.ColumnURL == nil { - return "" - } - return *p.ColumnURL -} - -// GetContentURL returns the ContentURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetContentURL() string { - if p == nil || p.ContentURL == nil { - return "" - } - return *p.ContentURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *ProjectCard) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetNote() string { - if p == nil || p.Note == nil { - return "" - } - return *p.Note -} - -// GetPreviousColumnName returns the PreviousColumnName field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetPreviousColumnName() string { - if p == nil || p.PreviousColumnName == nil { - return "" - } - return *p.PreviousColumnName -} - -// GetProjectID returns the ProjectID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetProjectID() int64 { - if p == nil || p.ProjectID == nil { - return 0 - } - return *p.ProjectID -} - -// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetProjectURL() string { - if p == nil || p.ProjectURL == nil { - return "" - } - return *p.ProjectURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetNote returns the Note field. -func (p *ProjectCardChange) GetNote() *ProjectCardNote { - if p == nil { - return nil - } - return p.Note -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectCardEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. -func (p *ProjectCardEvent) GetAfterID() int64 { - if p == nil || p.AfterID == nil { - return 0 - } - return *p.AfterID -} - -// GetChanges returns the Changes field. -func (p *ProjectCardEvent) GetChanges() *ProjectCardChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectCardEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectCardEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectCard returns the ProjectCard field. -func (p *ProjectCardEvent) GetProjectCard() *ProjectCard { - if p == nil { - return nil - } - return p.ProjectCard -} - -// GetRepo returns the Repo field. -func (p *ProjectCardEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *ProjectCardEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetArchivedState returns the ArchivedState field if it's non-nil, zero value otherwise. -func (p *ProjectCardListOptions) GetArchivedState() string { - if p == nil || p.ArchivedState == nil { - return "" - } - return *p.ArchivedState -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectCardNote) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (p *ProjectCardOptions) GetArchived() bool { - if p == nil || p.Archived == nil { - return false - } - return *p.Archived -} - -// GetBody returns the Body field. -func (p *ProjectChange) GetBody() *ProjectBody { - if p == nil { - return nil - } - return p.Body -} - -// GetName returns the Name field. -func (p *ProjectChange) GetName() *ProjectName { - if p == nil { - return nil - } - return p.Name -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (p *ProjectCollaboratorOptions) GetPermission() string { - if p == nil || p.Permission == nil { - return "" - } - return *p.Permission -} - -// GetCardsURL returns the CardsURL field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetCardsURL() string { - if p == nil || p.CardsURL == nil { - return "" - } - return *p.CardsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetProjectURL() string { - if p == nil || p.ProjectURL == nil { - return "" - } - return *p.ProjectURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetName returns the Name field. -func (p *ProjectColumnChange) GetName() *ProjectColumnName { - if p == nil { - return nil - } - return p.Name -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectColumnEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. -func (p *ProjectColumnEvent) GetAfterID() int64 { - if p == nil || p.AfterID == nil { - return 0 - } - return *p.AfterID -} - -// GetChanges returns the Changes field. -func (p *ProjectColumnEvent) GetChanges() *ProjectColumnChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectColumnEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectColumnEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectColumn returns the ProjectColumn field. -func (p *ProjectColumnEvent) GetProjectColumn() *ProjectColumn { - if p == nil { - return nil - } - return p.ProjectColumn -} - -// GetRepo returns the Repo field. -func (p *ProjectColumnEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *ProjectColumnEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectColumnName) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetChanges returns the Changes field. -func (p *ProjectEvent) GetChanges() *ProjectChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProject returns the Project field. -func (p *ProjectEvent) GetProject() *Project { - if p == nil { - return nil - } - return p.Project -} - -// GetRepo returns the Repo field. -func (p *ProjectEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *ProjectEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectName) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetOrganizationPermission() string { - if p == nil || p.OrganizationPermission == nil { - return "" - } - return *p.OrganizationPermission -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetPrivate() bool { - if p == nil || p.Private == nil { - return false - } - return *p.Private -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (p *ProjectPermissionLevel) GetPermission() string { - if p == nil || p.Permission == nil { - return "" - } - return *p.Permission -} - -// GetUser returns the User field. -func (p *ProjectPermissionLevel) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetClosedAt() Timestamp { - if p == nil || p.ClosedAt == nil { - return Timestamp{} - } - return *p.ClosedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *ProjectsV2) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetDeletedAt returns the DeletedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetDeletedAt() Timestamp { - if p == nil || p.DeletedAt == nil { - return Timestamp{} - } - return *p.DeletedAt -} - -// GetDeletedBy returns the DeletedBy field. -func (p *ProjectsV2) GetDeletedBy() *User { - if p == nil { - return nil - } - return p.DeletedBy -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetDescription() string { - if p == nil || p.Description == nil { - return "" - } - return *p.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOwner returns the Owner field. -func (p *ProjectsV2) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetPublic() bool { - if p == nil || p.Public == nil { - return false - } - return *p.Public -} - -// GetShortDescription returns the ShortDescription field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetShortDescription() string { - if p == nil || p.ShortDescription == nil { - return "" - } - return *p.ShortDescription -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetTitle() string { - if p == nil || p.Title == nil { - return "" - } - return *p.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectV2Event) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *ProjectV2Event) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectV2Event) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectsV2 returns the ProjectsV2 field. -func (p *ProjectV2Event) GetProjectsV2() *ProjectsV2 { - if p == nil { - return nil - } - return p.ProjectsV2 -} - -// GetSender returns the Sender field. -func (p *ProjectV2Event) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetArchivedAt returns the ArchivedAt field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetArchivedAt() Timestamp { - if p == nil || p.ArchivedAt == nil { - return Timestamp{} - } - return *p.ArchivedAt -} - -// GetContentNodeID returns the ContentNodeID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetContentNodeID() string { - if p == nil || p.ContentNodeID == nil { - return "" - } - return *p.ContentNodeID -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetContentType() string { - if p == nil || p.ContentType == nil { - return "" - } - return *p.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *ProjectV2Item) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetProjectNodeID returns the ProjectNodeID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetProjectNodeID() string { - if p == nil || p.ProjectNodeID == nil { - return "" - } - return *p.ProjectNodeID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetArchivedAt returns the ArchivedAt field. -func (p *ProjectV2ItemChange) GetArchivedAt() *ArchivedAt { - if p == nil { - return nil - } - return p.ArchivedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectV2ItemEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetChanges returns the Changes field. -func (p *ProjectV2ItemEvent) GetChanges() *ProjectV2ItemChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectV2ItemEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectV2ItemEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectV2Item returns the ProjectV2Item field. -func (p *ProjectV2ItemEvent) GetProjectV2Item() *ProjectV2Item { - if p == nil { - return nil - } - return p.ProjectV2Item -} - -// GetSender returns the Sender field. -func (p *ProjectV2ItemEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetAllowDeletions returns the AllowDeletions field. -func (p *Protection) GetAllowDeletions() *AllowDeletions { - if p == nil { - return nil - } - return p.AllowDeletions -} - -// GetAllowForcePushes returns the AllowForcePushes field. -func (p *Protection) GetAllowForcePushes() *AllowForcePushes { - if p == nil { - return nil - } - return p.AllowForcePushes -} - -// GetAllowForkSyncing returns the AllowForkSyncing field. -func (p *Protection) GetAllowForkSyncing() *AllowForkSyncing { - if p == nil { - return nil - } - return p.AllowForkSyncing -} - -// GetBlockCreations returns the BlockCreations field. -func (p *Protection) GetBlockCreations() *BlockCreations { - if p == nil { - return nil - } - return p.BlockCreations -} - -// GetEnforceAdmins returns the EnforceAdmins field. -func (p *Protection) GetEnforceAdmins() *AdminEnforcement { - if p == nil { - return nil - } - return p.EnforceAdmins -} - -// GetLockBranch returns the LockBranch field. -func (p *Protection) GetLockBranch() *LockBranch { - if p == nil { - return nil - } - return p.LockBranch -} - -// GetRequiredConversationResolution returns the RequiredConversationResolution field. -func (p *Protection) GetRequiredConversationResolution() *RequiredConversationResolution { - if p == nil { - return nil - } - return p.RequiredConversationResolution -} - -// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. -func (p *Protection) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcement { - if p == nil { - return nil - } - return p.RequiredPullRequestReviews -} - -// GetRequiredSignatures returns the RequiredSignatures field. -func (p *Protection) GetRequiredSignatures() *SignaturesProtectedBranch { - if p == nil { - return nil - } - return p.RequiredSignatures -} - -// GetRequiredStatusChecks returns the RequiredStatusChecks field. -func (p *Protection) GetRequiredStatusChecks() *RequiredStatusChecks { - if p == nil { - return nil - } - return p.RequiredStatusChecks -} - -// GetRequireLinearHistory returns the RequireLinearHistory field. -func (p *Protection) GetRequireLinearHistory() *RequireLinearHistory { - if p == nil { - return nil - } - return p.RequireLinearHistory -} - -// GetRestrictions returns the Restrictions field. -func (p *Protection) GetRestrictions() *BranchRestrictions { - if p == nil { - return nil - } - return p.Restrictions -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Protection) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetAdminEnforced returns the AdminEnforced field. -func (p *ProtectionChanges) GetAdminEnforced() *AdminEnforcedChanges { - if p == nil { - return nil - } - return p.AdminEnforced -} - -// GetAllowDeletionsEnforcementLevel returns the AllowDeletionsEnforcementLevel field. -func (p *ProtectionChanges) GetAllowDeletionsEnforcementLevel() *AllowDeletionsEnforcementLevelChanges { - if p == nil { - return nil - } - return p.AllowDeletionsEnforcementLevel -} - -// GetAuthorizedActorNames returns the AuthorizedActorNames field. -func (p *ProtectionChanges) GetAuthorizedActorNames() *AuthorizedActorNames { - if p == nil { - return nil - } - return p.AuthorizedActorNames -} - -// GetAuthorizedActorsOnly returns the AuthorizedActorsOnly field. -func (p *ProtectionChanges) GetAuthorizedActorsOnly() *AuthorizedActorsOnly { - if p == nil { - return nil - } - return p.AuthorizedActorsOnly -} - -// GetAuthorizedDismissalActorsOnly returns the AuthorizedDismissalActorsOnly field. -func (p *ProtectionChanges) GetAuthorizedDismissalActorsOnly() *AuthorizedDismissalActorsOnlyChanges { - if p == nil { - return nil - } - return p.AuthorizedDismissalActorsOnly -} - -// GetCreateProtected returns the CreateProtected field. -func (p *ProtectionChanges) GetCreateProtected() *CreateProtectedChanges { - if p == nil { - return nil - } - return p.CreateProtected -} - -// GetDismissStaleReviewsOnPush returns the DismissStaleReviewsOnPush field. -func (p *ProtectionChanges) GetDismissStaleReviewsOnPush() *DismissStaleReviewsOnPushChanges { - if p == nil { - return nil - } - return p.DismissStaleReviewsOnPush -} - -// GetLinearHistoryRequirementEnforcementLevel returns the LinearHistoryRequirementEnforcementLevel field. -func (p *ProtectionChanges) GetLinearHistoryRequirementEnforcementLevel() *LinearHistoryRequirementEnforcementLevelChanges { - if p == nil { - return nil - } - return p.LinearHistoryRequirementEnforcementLevel -} - -// GetPullRequestReviewsEnforcementLevel returns the PullRequestReviewsEnforcementLevel field. -func (p *ProtectionChanges) GetPullRequestReviewsEnforcementLevel() *PullRequestReviewsEnforcementLevelChanges { - if p == nil { - return nil - } - return p.PullRequestReviewsEnforcementLevel -} - -// GetRequireCodeOwnerReview returns the RequireCodeOwnerReview field. -func (p *ProtectionChanges) GetRequireCodeOwnerReview() *RequireCodeOwnerReviewChanges { - if p == nil { - return nil - } - return p.RequireCodeOwnerReview -} - -// GetRequiredConversationResolutionLevel returns the RequiredConversationResolutionLevel field. -func (p *ProtectionChanges) GetRequiredConversationResolutionLevel() *RequiredConversationResolutionLevelChanges { - if p == nil { - return nil - } - return p.RequiredConversationResolutionLevel -} - -// GetRequiredDeploymentsEnforcementLevel returns the RequiredDeploymentsEnforcementLevel field. -func (p *ProtectionChanges) GetRequiredDeploymentsEnforcementLevel() *RequiredDeploymentsEnforcementLevelChanges { - if p == nil { - return nil - } - return p.RequiredDeploymentsEnforcementLevel -} - -// GetRequiredStatusChecks returns the RequiredStatusChecks field. -func (p *ProtectionChanges) GetRequiredStatusChecks() *RequiredStatusChecksChanges { - if p == nil { - return nil - } - return p.RequiredStatusChecks -} - -// GetRequiredStatusChecksEnforcementLevel returns the RequiredStatusChecksEnforcementLevel field. -func (p *ProtectionChanges) GetRequiredStatusChecksEnforcementLevel() *RequiredStatusChecksEnforcementLevelChanges { - if p == nil { - return nil - } - return p.RequiredStatusChecksEnforcementLevel -} - -// GetSignatureRequirementEnforcementLevel returns the SignatureRequirementEnforcementLevel field. -func (p *ProtectionChanges) GetSignatureRequirementEnforcementLevel() *SignatureRequirementEnforcementLevelChanges { - if p == nil { - return nil - } - return p.SignatureRequirementEnforcementLevel -} - -// GetAllowDeletions returns the AllowDeletions field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetAllowDeletions() bool { - if p == nil || p.AllowDeletions == nil { - return false - } - return *p.AllowDeletions -} - -// GetAllowForcePushes returns the AllowForcePushes field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetAllowForcePushes() bool { - if p == nil || p.AllowForcePushes == nil { - return false - } - return *p.AllowForcePushes -} - -// GetAllowForkSyncing returns the AllowForkSyncing field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetAllowForkSyncing() bool { - if p == nil || p.AllowForkSyncing == nil { - return false - } - return *p.AllowForkSyncing -} - -// GetBlockCreations returns the BlockCreations field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetBlockCreations() bool { - if p == nil || p.BlockCreations == nil { - return false - } - return *p.BlockCreations -} - -// GetLockBranch returns the LockBranch field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetLockBranch() bool { - if p == nil || p.LockBranch == nil { - return false - } - return *p.LockBranch -} - -// GetRequiredConversationResolution returns the RequiredConversationResolution field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetRequiredConversationResolution() bool { - if p == nil || p.RequiredConversationResolution == nil { - return false - } - return *p.RequiredConversationResolution -} - -// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. -func (p *ProtectionRequest) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcementRequest { - if p == nil { - return nil - } - return p.RequiredPullRequestReviews -} - -// GetRequiredStatusChecks returns the RequiredStatusChecks field. -func (p *ProtectionRequest) GetRequiredStatusChecks() *RequiredStatusChecks { - if p == nil { - return nil - } - return p.RequiredStatusChecks -} - -// GetRequireLinearHistory returns the RequireLinearHistory field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetRequireLinearHistory() bool { - if p == nil || p.RequireLinearHistory == nil { - return false - } - return *p.RequireLinearHistory -} - -// GetRestrictions returns the Restrictions field. -func (p *ProtectionRequest) GetRestrictions() *BranchRestrictionsRequest { - if p == nil { - return nil - } - return p.Restrictions -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetPreventSelfReview returns the PreventSelfReview field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetPreventSelfReview() bool { - if p == nil || p.PreventSelfReview == nil { - return false - } - return *p.PreventSelfReview -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetType() string { - if p == nil || p.Type == nil { - return "" - } - return *p.Type -} - -// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetWaitTimer() int { - if p == nil || p.WaitTimer == nil { - return 0 - } - return *p.WaitTimer -} - -// GetInstallation returns the Installation field. -func (p *PublicEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PublicEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetRepo returns the Repo field. -func (p *PublicEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PublicEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (p *PublicKey) GetKey() string { - if p == nil || p.Key == nil { - return "" - } - return *p.Key -} - -// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. -func (p *PublicKey) GetKeyID() string { - if p == nil || p.KeyID == nil { - return "" - } - return *p.KeyID -} - -// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetActiveLockReason() string { - if p == nil || p.ActiveLockReason == nil { - return "" - } - return *p.ActiveLockReason -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetAdditions() int { - if p == nil || p.Additions == nil { - return 0 - } - return *p.Additions -} - -// GetAssignee returns the Assignee field. -func (p *PullRequest) GetAssignee() *User { - if p == nil { - return nil - } - return p.Assignee -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetAuthorAssociation() string { - if p == nil || p.AuthorAssociation == nil { - return "" - } - return *p.AuthorAssociation -} - -// GetAutoMerge returns the AutoMerge field. -func (p *PullRequest) GetAutoMerge() *PullRequestAutoMerge { - if p == nil { - return nil - } - return p.AutoMerge -} - -// GetBase returns the Base field. -func (p *PullRequest) GetBase() *PullRequestBranch { - if p == nil { - return nil - } - return p.Base -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetChangedFiles returns the ChangedFiles field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetChangedFiles() int { - if p == nil || p.ChangedFiles == nil { - return 0 - } - return *p.ChangedFiles -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetClosedAt() Timestamp { - if p == nil || p.ClosedAt == nil { - return Timestamp{} - } - return *p.ClosedAt -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetComments() int { - if p == nil || p.Comments == nil { - return 0 - } - return *p.Comments -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCommentsURL() string { - if p == nil || p.CommentsURL == nil { - return "" - } - return *p.CommentsURL -} - -// GetCommits returns the Commits field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCommits() int { - if p == nil || p.Commits == nil { - return 0 - } - return *p.Commits -} - -// GetCommitsURL returns the CommitsURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCommitsURL() string { - if p == nil || p.CommitsURL == nil { - return "" - } - return *p.CommitsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetDeletions() int { - if p == nil || p.Deletions == nil { - return 0 - } - return *p.Deletions -} - -// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetDiffURL() string { - if p == nil || p.DiffURL == nil { - return "" - } - return *p.DiffURL -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetDraft() bool { - if p == nil || p.Draft == nil { - return false - } - return *p.Draft -} - -// GetHead returns the Head field. -func (p *PullRequest) GetHead() *PullRequestBranch { - if p == nil { - return nil - } - return p.Head -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetIssueURL() string { - if p == nil || p.IssueURL == nil { - return "" - } - return *p.IssueURL -} - -// GetLinks returns the Links field. -func (p *PullRequest) GetLinks() *PRLinks { - if p == nil { - return nil - } - return p.Links -} - -// GetLocked returns the Locked field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetLocked() bool { - if p == nil || p.Locked == nil { - return false - } - return *p.Locked -} - -// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMaintainerCanModify() bool { - if p == nil || p.MaintainerCanModify == nil { - return false - } - return *p.MaintainerCanModify -} - -// GetMergeable returns the Mergeable field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergeable() bool { - if p == nil || p.Mergeable == nil { - return false - } - return *p.Mergeable -} - -// GetMergeableState returns the MergeableState field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergeableState() string { - if p == nil || p.MergeableState == nil { - return "" - } - return *p.MergeableState -} - -// GetMergeCommitSHA returns the MergeCommitSHA field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergeCommitSHA() string { - if p == nil || p.MergeCommitSHA == nil { - return "" - } - return *p.MergeCommitSHA -} - -// GetMerged returns the Merged field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMerged() bool { - if p == nil || p.Merged == nil { - return false - } - return *p.Merged -} - -// GetMergedAt returns the MergedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergedAt() Timestamp { - if p == nil || p.MergedAt == nil { - return Timestamp{} - } - return *p.MergedAt -} - -// GetMergedBy returns the MergedBy field. -func (p *PullRequest) GetMergedBy() *User { - if p == nil { - return nil - } - return p.MergedBy -} - -// GetMilestone returns the Milestone field. -func (p *PullRequest) GetMilestone() *Milestone { - if p == nil { - return nil - } - return p.Milestone -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetPatchURL() string { - if p == nil || p.PatchURL == nil { - return "" - } - return *p.PatchURL -} - -// GetRebaseable returns the Rebaseable field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetRebaseable() bool { - if p == nil || p.Rebaseable == nil { - return false - } - return *p.Rebaseable -} - -// GetReviewComments returns the ReviewComments field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetReviewComments() int { - if p == nil || p.ReviewComments == nil { - return 0 - } - return *p.ReviewComments -} - -// GetReviewCommentsURL returns the ReviewCommentsURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetReviewCommentsURL() string { - if p == nil || p.ReviewCommentsURL == nil { - return "" - } - return *p.ReviewCommentsURL -} - -// GetReviewCommentURL returns the ReviewCommentURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetReviewCommentURL() string { - if p == nil || p.ReviewCommentURL == nil { - return "" - } - return *p.ReviewCommentURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetStatusesURL() string { - if p == nil || p.StatusesURL == nil { - return "" - } - return *p.StatusesURL -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetTitle() string { - if p == nil || p.Title == nil { - return "" - } - return *p.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetUser returns the User field. -func (p *PullRequest) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetCommitMessage returns the CommitMessage field if it's non-nil, zero value otherwise. -func (p *PullRequestAutoMerge) GetCommitMessage() string { - if p == nil || p.CommitMessage == nil { - return "" - } - return *p.CommitMessage -} - -// GetCommitTitle returns the CommitTitle field if it's non-nil, zero value otherwise. -func (p *PullRequestAutoMerge) GetCommitTitle() string { - if p == nil || p.CommitTitle == nil { - return "" - } - return *p.CommitTitle -} - -// GetEnabledBy returns the EnabledBy field. -func (p *PullRequestAutoMerge) GetEnabledBy() *User { - if p == nil { - return nil - } - return p.EnabledBy -} - -// GetMergeMethod returns the MergeMethod field if it's non-nil, zero value otherwise. -func (p *PullRequestAutoMerge) GetMergeMethod() string { - if p == nil || p.MergeMethod == nil { - return "" - } - return *p.MergeMethod -} - -// GetLabel returns the Label field if it's non-nil, zero value otherwise. -func (p *PullRequestBranch) GetLabel() string { - if p == nil || p.Label == nil { - return "" - } - return *p.Label -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (p *PullRequestBranch) GetRef() string { - if p == nil || p.Ref == nil { - return "" - } - return *p.Ref -} - -// GetRepo returns the Repo field. -func (p *PullRequestBranch) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (p *PullRequestBranch) GetSHA() string { - if p == nil || p.SHA == nil { - return "" - } - return *p.SHA -} - -// GetUser returns the User field. -func (p *PullRequestBranch) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetExpectedHeadSHA returns the ExpectedHeadSHA field if it's non-nil, zero value otherwise. -func (p *PullRequestBranchUpdateOptions) GetExpectedHeadSHA() string { - if p == nil || p.ExpectedHeadSHA == nil { - return "" - } - return *p.ExpectedHeadSHA -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PullRequestBranchUpdateResponse) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequestBranchUpdateResponse) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetAuthorAssociation() string { - if p == nil || p.AuthorAssociation == nil { - return "" - } - return *p.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetCommitID() string { - if p == nil || p.CommitID == nil { - return "" - } - return *p.CommitID -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDiffHunk returns the DiffHunk field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetDiffHunk() string { - if p == nil || p.DiffHunk == nil { - return "" - } - return *p.DiffHunk -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetInReplyTo returns the InReplyTo field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetInReplyTo() int64 { - if p == nil || p.InReplyTo == nil { - return 0 - } - return *p.InReplyTo -} - -// GetLine returns the Line field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetLine() int { - if p == nil || p.Line == nil { - return 0 - } - return *p.Line -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetOriginalCommitID returns the OriginalCommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalCommitID() string { - if p == nil || p.OriginalCommitID == nil { - return "" - } - return *p.OriginalCommitID -} - -// GetOriginalLine returns the OriginalLine field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalLine() int { - if p == nil || p.OriginalLine == nil { - return 0 - } - return *p.OriginalLine -} - -// GetOriginalPosition returns the OriginalPosition field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalPosition() int { - if p == nil || p.OriginalPosition == nil { - return 0 - } - return *p.OriginalPosition -} - -// GetOriginalStartLine returns the OriginalStartLine field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalStartLine() int { - if p == nil || p.OriginalStartLine == nil { - return 0 - } - return *p.OriginalStartLine -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPath() string { - if p == nil || p.Path == nil { - return "" - } - return *p.Path -} - -// GetPosition returns the Position field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPosition() int { - if p == nil || p.Position == nil { - return 0 - } - return *p.Position -} - -// GetPullRequestReviewID returns the PullRequestReviewID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPullRequestReviewID() int64 { - if p == nil || p.PullRequestReviewID == nil { - return 0 - } - return *p.PullRequestReviewID -} - -// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPullRequestURL() string { - if p == nil || p.PullRequestURL == nil { - return "" - } - return *p.PullRequestURL -} - -// GetReactions returns the Reactions field. -func (p *PullRequestComment) GetReactions() *Reactions { - if p == nil { - return nil - } - return p.Reactions -} - -// GetSide returns the Side field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetSide() string { - if p == nil || p.Side == nil { - return "" - } - return *p.Side -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetStartLine() int { - if p == nil || p.StartLine == nil { - return 0 - } - return *p.StartLine -} - -// GetStartSide returns the StartSide field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetStartSide() string { - if p == nil || p.StartSide == nil { - return "" - } - return *p.StartSide -} - -// GetSubjectType returns the SubjectType field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetSubjectType() string { - if p == nil || p.SubjectType == nil { - return "" - } - return *p.SubjectType -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetUser returns the User field. -func (p *PullRequestComment) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfter returns the After field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetAfter() string { - if p == nil || p.After == nil { - return "" - } - return *p.After -} - -// GetAssignee returns the Assignee field. -func (p *PullRequestEvent) GetAssignee() *User { - if p == nil { - return nil - } - return p.Assignee -} - -// GetBefore returns the Before field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetBefore() string { - if p == nil || p.Before == nil { - return "" - } - return *p.Before -} - -// GetChanges returns the Changes field. -func (p *PullRequestEvent) GetChanges() *EditChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *PullRequestEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetLabel returns the Label field. -func (p *PullRequestEvent) GetLabel() *Label { - if p == nil { - return nil - } - return p.Label -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOrganization returns the Organization field. -func (p *PullRequestEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (p *PullRequestEvent) GetPerformedViaGithubApp() *App { - if p == nil { - return nil - } - return p.PerformedViaGithubApp -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetRequestedReviewer returns the RequestedReviewer field. -func (p *PullRequestEvent) GetRequestedReviewer() *User { - if p == nil { - return nil - } - return p.RequestedReviewer -} - -// GetRequestedTeam returns the RequestedTeam field. -func (p *PullRequestEvent) GetRequestedTeam() *Team { - if p == nil { - return nil - } - return p.RequestedTeam -} - -// GetSender returns the Sender field. -func (p *PullRequestEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetDiffURL() string { - if p == nil || p.DiffURL == nil { - return "" - } - return *p.DiffURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetPatchURL() string { - if p == nil || p.PatchURL == nil { - return "" - } - return *p.PatchURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetMerged returns the Merged field if it's non-nil, zero value otherwise. -func (p *PullRequestMergeResult) GetMerged() bool { - if p == nil || p.Merged == nil { - return false - } - return *p.Merged -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PullRequestMergeResult) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (p *PullRequestMergeResult) GetSHA() string { - if p == nil || p.SHA == nil { - return "" - } - return *p.SHA -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetAuthorAssociation() string { - if p == nil || p.AuthorAssociation == nil { - return "" - } - return *p.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetCommitID() string { - if p == nil || p.CommitID == nil { - return "" - } - return *p.CommitID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetPullRequestURL() string { - if p == nil || p.PullRequestURL == nil { - return "" - } - return *p.PullRequestURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetSubmittedAt returns the SubmittedAt field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetSubmittedAt() Timestamp { - if p == nil || p.SubmittedAt == nil { - return Timestamp{} - } - return *p.SubmittedAt -} - -// GetUser returns the User field. -func (p *PullRequestReview) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewCommentEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetChanges returns the Changes field. -func (p *PullRequestReviewCommentEvent) GetChanges() *EditChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetComment returns the Comment field. -func (p *PullRequestReviewCommentEvent) GetComment() *PullRequestComment { - if p == nil { - return nil - } - return p.Comment -} - -// GetInstallation returns the Installation field. -func (p *PullRequestReviewCommentEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PullRequestReviewCommentEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestReviewCommentEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestReviewCommentEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PullRequestReviewCommentEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewDismissalRequest) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PullRequestReviewEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrganization returns the Organization field. -func (p *PullRequestReviewEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestReviewEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestReviewEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetReview returns the Review field. -func (p *PullRequestReviewEvent) GetReview() *PullRequestReview { - if p == nil { - return nil - } - return p.Review -} - -// GetSender returns the Sender field. -func (p *PullRequestReviewEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetCommitID() string { - if p == nil || p.CommitID == nil { - return "" - } - return *p.CommitID -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetEvent() string { - if p == nil || p.Event == nil { - return "" - } - return *p.Event -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetBypassPullRequestAllowances returns the BypassPullRequestAllowances field. -func (p *PullRequestReviewsEnforcement) GetBypassPullRequestAllowances() *BypassPullRequestAllowances { - if p == nil { - return nil - } - return p.BypassPullRequestAllowances -} - -// GetDismissalRestrictions returns the DismissalRestrictions field. -func (p *PullRequestReviewsEnforcement) GetDismissalRestrictions() *DismissalRestrictions { - if p == nil { - return nil - } - return p.DismissalRestrictions -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementLevelChanges) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetBypassPullRequestAllowancesRequest returns the BypassPullRequestAllowancesRequest field. -func (p *PullRequestReviewsEnforcementRequest) GetBypassPullRequestAllowancesRequest() *BypassPullRequestAllowancesRequest { - if p == nil { - return nil - } - return p.BypassPullRequestAllowancesRequest -} - -// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. -func (p *PullRequestReviewsEnforcementRequest) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { - if p == nil { - return nil - } - return p.DismissalRestrictionsRequest -} - -// GetRequireLastPushApproval returns the RequireLastPushApproval field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementRequest) GetRequireLastPushApproval() bool { - if p == nil || p.RequireLastPushApproval == nil { - return false - } - return *p.RequireLastPushApproval -} - -// GetBypassPullRequestAllowancesRequest returns the BypassPullRequestAllowancesRequest field. -func (p *PullRequestReviewsEnforcementUpdate) GetBypassPullRequestAllowancesRequest() *BypassPullRequestAllowancesRequest { - if p == nil { - return nil - } - return p.BypassPullRequestAllowancesRequest -} - -// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. -func (p *PullRequestReviewsEnforcementUpdate) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { - if p == nil { - return nil - } - return p.DismissalRestrictionsRequest -} - -// GetDismissStaleReviews returns the DismissStaleReviews field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementUpdate) GetDismissStaleReviews() bool { - if p == nil || p.DismissStaleReviews == nil { - return false - } - return *p.DismissStaleReviews -} - -// GetRequireCodeOwnerReviews returns the RequireCodeOwnerReviews field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementUpdate) GetRequireCodeOwnerReviews() bool { - if p == nil || p.RequireCodeOwnerReviews == nil { - return false - } - return *p.RequireCodeOwnerReviews -} - -// GetRequireLastPushApproval returns the RequireLastPushApproval field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementUpdate) GetRequireLastPushApproval() bool { - if p == nil || p.RequireLastPushApproval == nil { - return false - } - return *p.RequireLastPushApproval -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewThreadEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PullRequestReviewThreadEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PullRequestReviewThreadEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestReviewThreadEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestReviewThreadEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PullRequestReviewThreadEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetThread returns the Thread field. -func (p *PullRequestReviewThreadEvent) GetThread() *PullRequestThread { - if p == nil { - return nil - } - return p.Thread -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfter returns the After field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetAfter() string { - if p == nil || p.After == nil { - return "" - } - return *p.After -} - -// GetAssignee returns the Assignee field. -func (p *PullRequestTargetEvent) GetAssignee() *User { - if p == nil { - return nil - } - return p.Assignee -} - -// GetBefore returns the Before field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetBefore() string { - if p == nil || p.Before == nil { - return "" - } - return *p.Before -} - -// GetChanges returns the Changes field. -func (p *PullRequestTargetEvent) GetChanges() *EditChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *PullRequestTargetEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetLabel returns the Label field. -func (p *PullRequestTargetEvent) GetLabel() *Label { - if p == nil { - return nil - } - return p.Label -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOrganization returns the Organization field. -func (p *PullRequestTargetEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (p *PullRequestTargetEvent) GetPerformedViaGithubApp() *App { - if p == nil { - return nil - } - return p.PerformedViaGithubApp -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestTargetEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestTargetEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetRequestedReviewer returns the RequestedReviewer field. -func (p *PullRequestTargetEvent) GetRequestedReviewer() *User { - if p == nil { - return nil - } - return p.RequestedReviewer -} - -// GetRequestedTeam returns the RequestedTeam field. -func (p *PullRequestTargetEvent) GetRequestedTeam() *Team { - if p == nil { - return nil - } - return p.RequestedTeam -} - -// GetSender returns the Sender field. -func (p *PullRequestTargetEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestThread) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestThread) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetMergablePulls returns the MergablePulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetMergablePulls() int { - if p == nil || p.MergablePulls == nil { - return 0 - } - return *p.MergablePulls -} - -// GetMergedPulls returns the MergedPulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetMergedPulls() int { - if p == nil || p.MergedPulls == nil { - return 0 - } - return *p.MergedPulls -} - -// GetTotalPulls returns the TotalPulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetTotalPulls() int { - if p == nil || p.TotalPulls == nil { - return 0 - } - return *p.TotalPulls -} - -// GetUnmergablePulls returns the UnmergablePulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetUnmergablePulls() int { - if p == nil || p.UnmergablePulls == nil { - return 0 - } - return *p.UnmergablePulls -} - -// GetCommits returns the Commits field if it's non-nil, zero value otherwise. -func (p *PunchCard) GetCommits() int { - if p == nil || p.Commits == nil { - return 0 - } - return *p.Commits -} - -// GetDay returns the Day field if it's non-nil, zero value otherwise. -func (p *PunchCard) GetDay() int { - if p == nil || p.Day == nil { - return 0 - } - return *p.Day -} - -// GetHour returns the Hour field if it's non-nil, zero value otherwise. -func (p *PunchCard) GetHour() int { - if p == nil || p.Hour == nil { - return 0 - } - return *p.Hour -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfter returns the After field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetAfter() string { - if p == nil || p.After == nil { - return "" - } - return *p.After -} - -// GetBaseRef returns the BaseRef field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetBaseRef() string { - if p == nil || p.BaseRef == nil { - return "" - } - return *p.BaseRef -} - -// GetBefore returns the Before field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetBefore() string { - if p == nil || p.Before == nil { - return "" - } - return *p.Before -} - -// GetCommits returns the Commits slice if it's non-nil, nil otherwise. -func (p *PushEvent) GetCommits() []*HeadCommit { - if p == nil || p.Commits == nil { - return nil - } - return p.Commits -} - -// GetCompare returns the Compare field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetCompare() string { - if p == nil || p.Compare == nil { - return "" - } - return *p.Compare -} - -// GetCreated returns the Created field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetCreated() bool { - if p == nil || p.Created == nil { - return false - } - return *p.Created -} - -// GetDeleted returns the Deleted field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetDeleted() bool { - if p == nil || p.Deleted == nil { - return false - } - return *p.Deleted -} - -// GetDistinctSize returns the DistinctSize field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetDistinctSize() int { - if p == nil || p.DistinctSize == nil { - return 0 - } - return *p.DistinctSize -} - -// GetForced returns the Forced field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetForced() bool { - if p == nil || p.Forced == nil { - return false - } - return *p.Forced -} - -// GetHead returns the Head field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetHead() string { - if p == nil || p.Head == nil { - return "" - } - return *p.Head -} - -// GetHeadCommit returns the HeadCommit field. -func (p *PushEvent) GetHeadCommit() *HeadCommit { - if p == nil { - return nil - } - return p.HeadCommit -} - -// GetInstallation returns the Installation field. -func (p *PushEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrganization returns the Organization field. -func (p *PushEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPusher returns the Pusher field. -func (p *PushEvent) GetPusher() *CommitAuthor { - if p == nil { - return nil - } - return p.Pusher -} - -// GetPushID returns the PushID field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetPushID() int64 { - if p == nil || p.PushID == nil { - return 0 - } - return *p.PushID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetRef() string { - if p == nil || p.Ref == nil { - return "" - } - return *p.Ref -} - -// GetRepo returns the Repo field. -func (p *PushEvent) GetRepo() *PushEventRepository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PushEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetSize() int { - if p == nil || p.Size == nil { - return 0 - } - return *p.Size -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (p *PushEventRepoOwner) GetEmail() string { - if p == nil || p.Email == nil { - return "" - } - return *p.Email -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PushEventRepoOwner) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetArchived() bool { - if p == nil || p.Archived == nil { - return false - } - return *p.Archived -} - -// GetArchiveURL returns the ArchiveURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetArchiveURL() string { - if p == nil || p.ArchiveURL == nil { - return "" - } - return *p.ArchiveURL -} - -// GetCloneURL returns the CloneURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetCloneURL() string { - if p == nil || p.CloneURL == nil { - return "" - } - return *p.CloneURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDefaultBranch returns the DefaultBranch field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetDefaultBranch() string { - if p == nil || p.DefaultBranch == nil { - return "" - } - return *p.DefaultBranch -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetDescription() string { - if p == nil || p.Description == nil { - return "" - } - return *p.Description -} - -// GetDisabled returns the Disabled field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetDisabled() bool { - if p == nil || p.Disabled == nil { - return false - } - return *p.Disabled -} - -// GetFork returns the Fork field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetFork() bool { - if p == nil || p.Fork == nil { - return false - } - return *p.Fork -} - -// GetForksCount returns the ForksCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetForksCount() int { - if p == nil || p.ForksCount == nil { - return 0 - } - return *p.ForksCount -} - -// GetFullName returns the FullName field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetFullName() string { - if p == nil || p.FullName == nil { - return "" - } - return *p.FullName -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetGitURL() string { - if p == nil || p.GitURL == nil { - return "" - } - return *p.GitURL -} - -// GetHasDownloads returns the HasDownloads field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasDownloads() bool { - if p == nil || p.HasDownloads == nil { - return false - } - return *p.HasDownloads -} - -// GetHasIssues returns the HasIssues field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasIssues() bool { - if p == nil || p.HasIssues == nil { - return false - } - return *p.HasIssues -} - -// GetHasPages returns the HasPages field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasPages() bool { - if p == nil || p.HasPages == nil { - return false - } - return *p.HasPages -} - -// GetHasWiki returns the HasWiki field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasWiki() bool { - if p == nil || p.HasWiki == nil { - return false - } - return *p.HasWiki -} - -// GetHomepage returns the Homepage field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHomepage() string { - if p == nil || p.Homepage == nil { - return "" - } - return *p.Homepage -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetLanguage() string { - if p == nil || p.Language == nil { - return "" - } - return *p.Language -} - -// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetMasterBranch() string { - if p == nil || p.MasterBranch == nil { - return "" - } - return *p.MasterBranch -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetOpenIssuesCount() int { - if p == nil || p.OpenIssuesCount == nil { - return 0 - } - return *p.OpenIssuesCount -} - -// GetOrganization returns the Organization field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetOrganization() string { - if p == nil || p.Organization == nil { - return "" - } - return *p.Organization -} - -// GetOwner returns the Owner field. -func (p *PushEventRepository) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetPrivate() bool { - if p == nil || p.Private == nil { - return false - } - return *p.Private -} - -// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetPullsURL() string { - if p == nil || p.PullsURL == nil { - return "" - } - return *p.PullsURL -} - -// GetPushedAt returns the PushedAt field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetPushedAt() Timestamp { - if p == nil || p.PushedAt == nil { - return Timestamp{} - } - return *p.PushedAt -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetSize() int { - if p == nil || p.Size == nil { - return 0 - } - return *p.Size -} - -// GetSSHURL returns the SSHURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetSSHURL() string { - if p == nil || p.SSHURL == nil { - return "" - } - return *p.SSHURL -} - -// GetStargazersCount returns the StargazersCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetStargazersCount() int { - if p == nil || p.StargazersCount == nil { - return 0 - } - return *p.StargazersCount -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetStatusesURL() string { - if p == nil || p.StatusesURL == nil { - return "" - } - return *p.StatusesURL -} - -// GetSVNURL returns the SVNURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetSVNURL() string { - if p == nil || p.SVNURL == nil { - return "" - } - return *p.SVNURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetWatchersCount returns the WatchersCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetWatchersCount() int { - if p == nil || p.WatchersCount == nil { - return 0 - } - return *p.WatchersCount -} - -// GetActionsRunnerRegistration returns the ActionsRunnerRegistration field. -func (r *RateLimits) GetActionsRunnerRegistration() *Rate { - if r == nil { - return nil - } - return r.ActionsRunnerRegistration -} - -// GetCodeScanningUpload returns the CodeScanningUpload field. -func (r *RateLimits) GetCodeScanningUpload() *Rate { - if r == nil { - return nil - } - return r.CodeScanningUpload -} - -// GetCore returns the Core field. -func (r *RateLimits) GetCore() *Rate { - if r == nil { - return nil - } - return r.Core -} - -// GetGraphQL returns the GraphQL field. -func (r *RateLimits) GetGraphQL() *Rate { - if r == nil { - return nil - } - return r.GraphQL -} - -// GetIntegrationManifest returns the IntegrationManifest field. -func (r *RateLimits) GetIntegrationManifest() *Rate { - if r == nil { - return nil - } - return r.IntegrationManifest -} - -// GetSCIM returns the SCIM field. -func (r *RateLimits) GetSCIM() *Rate { - if r == nil { - return nil - } - return r.SCIM -} - -// GetSearch returns the Search field. -func (r *RateLimits) GetSearch() *Rate { - if r == nil { - return nil - } - return r.Search -} - -// GetSourceImport returns the SourceImport field. -func (r *RateLimits) GetSourceImport() *Rate { - if r == nil { - return nil - } - return r.SourceImport -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (r *Reaction) GetContent() string { - if r == nil || r.Content == nil { - return "" - } - return *r.Content -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Reaction) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Reaction) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetUser returns the User field. -func (r *Reaction) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetConfused returns the Confused field if it's non-nil, zero value otherwise. -func (r *Reactions) GetConfused() int { - if r == nil || r.Confused == nil { - return 0 - } - return *r.Confused -} - -// GetEyes returns the Eyes field if it's non-nil, zero value otherwise. -func (r *Reactions) GetEyes() int { - if r == nil || r.Eyes == nil { - return 0 - } - return *r.Eyes -} - -// GetHeart returns the Heart field if it's non-nil, zero value otherwise. -func (r *Reactions) GetHeart() int { - if r == nil || r.Heart == nil { - return 0 - } - return *r.Heart -} - -// GetHooray returns the Hooray field if it's non-nil, zero value otherwise. -func (r *Reactions) GetHooray() int { - if r == nil || r.Hooray == nil { - return 0 - } - return *r.Hooray -} - -// GetLaugh returns the Laugh field if it's non-nil, zero value otherwise. -func (r *Reactions) GetLaugh() int { - if r == nil || r.Laugh == nil { - return 0 - } - return *r.Laugh -} - -// GetMinusOne returns the MinusOne field if it's non-nil, zero value otherwise. -func (r *Reactions) GetMinusOne() int { - if r == nil || r.MinusOne == nil { - return 0 - } - return *r.MinusOne -} - -// GetPlusOne returns the PlusOne field if it's non-nil, zero value otherwise. -func (r *Reactions) GetPlusOne() int { - if r == nil || r.PlusOne == nil { - return 0 - } - return *r.PlusOne -} - -// GetRocket returns the Rocket field if it's non-nil, zero value otherwise. -func (r *Reactions) GetRocket() int { - if r == nil || r.Rocket == nil { - return 0 - } - return *r.Rocket -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *Reactions) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *Reactions) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Reference) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetObject returns the Object field. -func (r *Reference) GetObject() *GitObject { - if r == nil { - return nil - } - return r.Object -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (r *Reference) GetRef() string { - if r == nil || r.Ref == nil { - return "" - } - return *r.Ref -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *Reference) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *ReferencedWorkflow) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (r *ReferencedWorkflow) GetRef() string { - if r == nil || r.Ref == nil { - return "" - } - return *r.Ref -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *ReferencedWorkflow) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (r *RegistrationToken) GetExpiresAt() Timestamp { - if r == nil || r.ExpiresAt == nil { - return Timestamp{} - } - return *r.ExpiresAt -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (r *RegistrationToken) GetToken() string { - if r == nil || r.Token == nil { - return "" - } - return *r.Token -} - -// GetBrowserDownloadURL returns the BrowserDownloadURL field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetBrowserDownloadURL() string { - if r == nil || r.BrowserDownloadURL == nil { - return "" - } - return *r.BrowserDownloadURL -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetContentType() string { - if r == nil || r.ContentType == nil { - return "" - } - return *r.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDownloadCount returns the DownloadCount field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetDownloadCount() int { - if r == nil || r.DownloadCount == nil { - return 0 - } - return *r.DownloadCount -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetLabel returns the Label field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetLabel() string { - if r == nil || r.Label == nil { - return "" - } - return *r.Label -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetUploader returns the Uploader field. -func (r *ReleaseAsset) GetUploader() *User { - if r == nil { - return nil - } - return r.Uploader -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *ReleaseEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetInstallation returns the Installation field. -func (r *ReleaseEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *ReleaseEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRelease returns the Release field. -func (r *ReleaseEvent) GetRelease() *RepositoryRelease { - if r == nil { - return nil - } - return r.Release -} - -// GetRepo returns the Repo field. -func (r *ReleaseEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *ReleaseEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (r *RemoveToken) GetExpiresAt() Timestamp { - if r == nil || r.ExpiresAt == nil { - return Timestamp{} - } - return *r.ExpiresAt -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (r *RemoveToken) GetToken() string { - if r == nil || r.Token == nil { - return "" - } - return *r.Token -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *Rename) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetTo returns the To field if it's non-nil, zero value otherwise. -func (r *Rename) GetTo() string { - if r == nil || r.To == nil { - return "" - } - return *r.To -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (r *RenameOrgResponse) GetMessage() string { - if r == nil || r.Message == nil { - return "" - } - return *r.Message -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RenameOrgResponse) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCredit) GetLogin() string { - if r == nil || r.Login == nil { - return "" - } - return *r.Login -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCredit) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCreditDetailed) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCreditDetailed) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetUser returns the User field. -func (r *RepoAdvisoryCreditDetailed) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetDownloadLocation returns the DownloadLocation field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetDownloadLocation() string { - if r == nil || r.DownloadLocation == nil { - return "" - } - return *r.DownloadLocation -} - -// GetFilesAnalyzed returns the FilesAnalyzed field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetFilesAnalyzed() bool { - if r == nil || r.FilesAnalyzed == nil { - return false - } - return *r.FilesAnalyzed -} - -// GetLicenseConcluded returns the LicenseConcluded field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetLicenseConcluded() string { - if r == nil || r.LicenseConcluded == nil { - return "" - } - return *r.LicenseConcluded -} - -// GetLicenseDeclared returns the LicenseDeclared field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetLicenseDeclared() string { - if r == nil || r.LicenseDeclared == nil { - return "" - } - return *r.LicenseDeclared -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetSPDXID() string { - if r == nil || r.SPDXID == nil { - return "" - } - return *r.SPDXID -} - -// GetVersionInfo returns the VersionInfo field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetVersionInfo() string { - if r == nil || r.VersionInfo == nil { - return "" - } - return *r.VersionInfo -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamRequest) GetBranch() string { - if r == nil || r.Branch == nil { - return "" - } - return *r.Branch -} - -// GetBaseBranch returns the BaseBranch field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamResult) GetBaseBranch() string { - if r == nil || r.BaseBranch == nil { - return "" - } - return *r.BaseBranch -} - -// GetMergeType returns the MergeType field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamResult) GetMergeType() string { - if r == nil || r.MergeType == nil { - return "" - } - return *r.MergeType -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamResult) GetMessage() string { - if r == nil || r.Message == nil { - return "" - } - return *r.Message -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RepoName) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetBadgeURL() string { - if r == nil || r.BadgeURL == nil { - return "" - } - return *r.BadgeURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSourceRepository returns the SourceRepository field. -func (r *RepoRequiredWorkflow) GetSourceRepository() *Repository { - if r == nil { - return nil - } - return r.SourceRepository -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflows) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (r *RepositoriesSearchResult) GetIncompleteResults() bool { - if r == nil || r.IncompleteResults == nil { - return false - } - return *r.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (r *RepositoriesSearchResult) GetTotal() int { - if r == nil || r.Total == nil { - return 0 - } - return *r.Total -} - -// GetAllowAutoMerge returns the AllowAutoMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowAutoMerge() bool { - if r == nil || r.AllowAutoMerge == nil { - return false - } - return *r.AllowAutoMerge -} - -// GetAllowForking returns the AllowForking field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowForking() bool { - if r == nil || r.AllowForking == nil { - return false - } - return *r.AllowForking -} - -// GetAllowMergeCommit returns the AllowMergeCommit field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowMergeCommit() bool { - if r == nil || r.AllowMergeCommit == nil { - return false - } - return *r.AllowMergeCommit -} - -// GetAllowRebaseMerge returns the AllowRebaseMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowRebaseMerge() bool { - if r == nil || r.AllowRebaseMerge == nil { - return false - } - return *r.AllowRebaseMerge -} - -// GetAllowSquashMerge returns the AllowSquashMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowSquashMerge() bool { - if r == nil || r.AllowSquashMerge == nil { - return false - } - return *r.AllowSquashMerge -} - -// GetAllowUpdateBranch returns the AllowUpdateBranch field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowUpdateBranch() bool { - if r == nil || r.AllowUpdateBranch == nil { - return false - } - return *r.AllowUpdateBranch -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (r *Repository) GetArchived() bool { - if r == nil || r.Archived == nil { - return false - } - return *r.Archived -} - -// GetArchiveURL returns the ArchiveURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetArchiveURL() string { - if r == nil || r.ArchiveURL == nil { - return "" - } - return *r.ArchiveURL -} - -// GetAssigneesURL returns the AssigneesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetAssigneesURL() string { - if r == nil || r.AssigneesURL == nil { - return "" - } - return *r.AssigneesURL -} - -// GetAutoInit returns the AutoInit field if it's non-nil, zero value otherwise. -func (r *Repository) GetAutoInit() bool { - if r == nil || r.AutoInit == nil { - return false - } - return *r.AutoInit -} - -// GetBlobsURL returns the BlobsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetBlobsURL() string { - if r == nil || r.BlobsURL == nil { - return "" - } - return *r.BlobsURL -} - -// GetBranchesURL returns the BranchesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetBranchesURL() string { - if r == nil || r.BranchesURL == nil { - return "" - } - return *r.BranchesURL -} - -// GetCloneURL returns the CloneURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCloneURL() string { - if r == nil || r.CloneURL == nil { - return "" - } - return *r.CloneURL -} - -// GetCodeOfConduct returns the CodeOfConduct field. -func (r *Repository) GetCodeOfConduct() *CodeOfConduct { - if r == nil { - return nil - } - return r.CodeOfConduct -} - -// GetCollaboratorsURL returns the CollaboratorsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCollaboratorsURL() string { - if r == nil || r.CollaboratorsURL == nil { - return "" - } - return *r.CollaboratorsURL -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCommentsURL() string { - if r == nil || r.CommentsURL == nil { - return "" - } - return *r.CommentsURL -} - -// GetCommitsURL returns the CommitsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCommitsURL() string { - if r == nil || r.CommitsURL == nil { - return "" - } - return *r.CommitsURL -} - -// GetCompareURL returns the CompareURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCompareURL() string { - if r == nil || r.CompareURL == nil { - return "" - } - return *r.CompareURL -} - -// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetContentsURL() string { - if r == nil || r.ContentsURL == nil { - return "" - } - return *r.ContentsURL -} - -// GetContributorsURL returns the ContributorsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetContributorsURL() string { - if r == nil || r.ContributorsURL == nil { - return "" - } - return *r.ContributorsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *Repository) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDefaultBranch returns the DefaultBranch field if it's non-nil, zero value otherwise. -func (r *Repository) GetDefaultBranch() string { - if r == nil || r.DefaultBranch == nil { - return "" - } - return *r.DefaultBranch -} - -// GetDeleteBranchOnMerge returns the DeleteBranchOnMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetDeleteBranchOnMerge() bool { - if r == nil || r.DeleteBranchOnMerge == nil { - return false - } - return *r.DeleteBranchOnMerge -} - -// GetDeploymentsURL returns the DeploymentsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetDeploymentsURL() string { - if r == nil || r.DeploymentsURL == nil { - return "" - } - return *r.DeploymentsURL -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (r *Repository) GetDescription() string { - if r == nil || r.Description == nil { - return "" - } - return *r.Description -} - -// GetDisabled returns the Disabled field if it's non-nil, zero value otherwise. -func (r *Repository) GetDisabled() bool { - if r == nil || r.Disabled == nil { - return false - } - return *r.Disabled -} - -// GetDownloadsURL returns the DownloadsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetDownloadsURL() string { - if r == nil || r.DownloadsURL == nil { - return "" - } - return *r.DownloadsURL -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetEventsURL() string { - if r == nil || r.EventsURL == nil { - return "" - } - return *r.EventsURL -} - -// GetFork returns the Fork field if it's non-nil, zero value otherwise. -func (r *Repository) GetFork() bool { - if r == nil || r.Fork == nil { - return false - } - return *r.Fork -} - -// GetForksCount returns the ForksCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetForksCount() int { - if r == nil || r.ForksCount == nil { - return 0 - } - return *r.ForksCount -} - -// GetForksURL returns the ForksURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetForksURL() string { - if r == nil || r.ForksURL == nil { - return "" - } - return *r.ForksURL -} - -// GetFullName returns the FullName field if it's non-nil, zero value otherwise. -func (r *Repository) GetFullName() string { - if r == nil || r.FullName == nil { - return "" - } - return *r.FullName -} - -// GetGitCommitsURL returns the GitCommitsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitCommitsURL() string { - if r == nil || r.GitCommitsURL == nil { - return "" - } - return *r.GitCommitsURL -} - -// GetGitignoreTemplate returns the GitignoreTemplate field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitignoreTemplate() string { - if r == nil || r.GitignoreTemplate == nil { - return "" - } - return *r.GitignoreTemplate -} - -// GetGitRefsURL returns the GitRefsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitRefsURL() string { - if r == nil || r.GitRefsURL == nil { - return "" - } - return *r.GitRefsURL -} - -// GetGitTagsURL returns the GitTagsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitTagsURL() string { - if r == nil || r.GitTagsURL == nil { - return "" - } - return *r.GitTagsURL -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitURL() string { - if r == nil || r.GitURL == nil { - return "" - } - return *r.GitURL -} - -// GetHasDiscussions returns the HasDiscussions field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasDiscussions() bool { - if r == nil || r.HasDiscussions == nil { - return false - } - return *r.HasDiscussions -} - -// GetHasDownloads returns the HasDownloads field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasDownloads() bool { - if r == nil || r.HasDownloads == nil { - return false - } - return *r.HasDownloads -} - -// GetHasIssues returns the HasIssues field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasIssues() bool { - if r == nil || r.HasIssues == nil { - return false - } - return *r.HasIssues -} - -// GetHasPages returns the HasPages field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasPages() bool { - if r == nil || r.HasPages == nil { - return false - } - return *r.HasPages -} - -// GetHasProjects returns the HasProjects field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasProjects() bool { - if r == nil || r.HasProjects == nil { - return false - } - return *r.HasProjects -} - -// GetHasWiki returns the HasWiki field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasWiki() bool { - if r == nil || r.HasWiki == nil { - return false - } - return *r.HasWiki -} - -// GetHomepage returns the Homepage field if it's non-nil, zero value otherwise. -func (r *Repository) GetHomepage() string { - if r == nil || r.Homepage == nil { - return "" - } - return *r.Homepage -} - -// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetHooksURL() string { - if r == nil || r.HooksURL == nil { - return "" - } - return *r.HooksURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Repository) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetIssueCommentURL returns the IssueCommentURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetIssueCommentURL() string { - if r == nil || r.IssueCommentURL == nil { - return "" - } - return *r.IssueCommentURL -} - -// GetIssueEventsURL returns the IssueEventsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetIssueEventsURL() string { - if r == nil || r.IssueEventsURL == nil { - return "" - } - return *r.IssueEventsURL -} - -// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetIssuesURL() string { - if r == nil || r.IssuesURL == nil { - return "" - } - return *r.IssuesURL -} - -// GetIsTemplate returns the IsTemplate field if it's non-nil, zero value otherwise. -func (r *Repository) GetIsTemplate() bool { - if r == nil || r.IsTemplate == nil { - return false - } - return *r.IsTemplate -} - -// GetKeysURL returns the KeysURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetKeysURL() string { - if r == nil || r.KeysURL == nil { - return "" - } - return *r.KeysURL -} - -// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetLabelsURL() string { - if r == nil || r.LabelsURL == nil { - return "" - } - return *r.LabelsURL -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (r *Repository) GetLanguage() string { - if r == nil || r.Language == nil { - return "" - } - return *r.Language -} - -// GetLanguagesURL returns the LanguagesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetLanguagesURL() string { - if r == nil || r.LanguagesURL == nil { - return "" - } - return *r.LanguagesURL -} - -// GetLicense returns the License field. -func (r *Repository) GetLicense() *License { - if r == nil { - return nil - } - return r.License -} - -// GetLicenseTemplate returns the LicenseTemplate field if it's non-nil, zero value otherwise. -func (r *Repository) GetLicenseTemplate() string { - if r == nil || r.LicenseTemplate == nil { - return "" - } - return *r.LicenseTemplate -} - -// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. -func (r *Repository) GetMasterBranch() string { - if r == nil || r.MasterBranch == nil { - return "" - } - return *r.MasterBranch -} - -// GetMergeCommitMessage returns the MergeCommitMessage field if it's non-nil, zero value otherwise. -func (r *Repository) GetMergeCommitMessage() string { - if r == nil || r.MergeCommitMessage == nil { - return "" - } - return *r.MergeCommitMessage -} - -// GetMergeCommitTitle returns the MergeCommitTitle field if it's non-nil, zero value otherwise. -func (r *Repository) GetMergeCommitTitle() string { - if r == nil || r.MergeCommitTitle == nil { - return "" - } - return *r.MergeCommitTitle -} - -// GetMergesURL returns the MergesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetMergesURL() string { - if r == nil || r.MergesURL == nil { - return "" - } - return *r.MergesURL -} - -// GetMilestonesURL returns the MilestonesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetMilestonesURL() string { - if r == nil || r.MilestonesURL == nil { - return "" - } - return *r.MilestonesURL -} - -// GetMirrorURL returns the MirrorURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetMirrorURL() string { - if r == nil || r.MirrorURL == nil { - return "" - } - return *r.MirrorURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *Repository) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNetworkCount returns the NetworkCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetNetworkCount() int { - if r == nil || r.NetworkCount == nil { - return 0 - } - return *r.NetworkCount -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Repository) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetNotificationsURL returns the NotificationsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetNotificationsURL() string { - if r == nil || r.NotificationsURL == nil { - return "" - } - return *r.NotificationsURL -} - -// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. -func (r *Repository) GetOpenIssues() int { - if r == nil || r.OpenIssues == nil { - return 0 - } - return *r.OpenIssues -} - -// GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetOpenIssuesCount() int { - if r == nil || r.OpenIssuesCount == nil { - return 0 - } - return *r.OpenIssuesCount -} - -// GetOrganization returns the Organization field. -func (r *Repository) GetOrganization() *Organization { - if r == nil { - return nil - } - return r.Organization -} - -// GetOwner returns the Owner field. -func (r *Repository) GetOwner() *User { - if r == nil { - return nil - } - return r.Owner -} - -// GetParent returns the Parent field. -func (r *Repository) GetParent() *Repository { - if r == nil { - return nil - } - return r.Parent -} - -// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. -func (r *Repository) GetPermissions() map[string]bool { - if r == nil || r.Permissions == nil { - return map[string]bool{} - } - return r.Permissions -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (r *Repository) GetPrivate() bool { - if r == nil || r.Private == nil { - return false - } - return *r.Private -} - -// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetPullsURL() string { - if r == nil || r.PullsURL == nil { - return "" - } - return *r.PullsURL -} - -// GetPushedAt returns the PushedAt field if it's non-nil, zero value otherwise. -func (r *Repository) GetPushedAt() Timestamp { - if r == nil || r.PushedAt == nil { - return Timestamp{} - } - return *r.PushedAt -} - -// GetReleasesURL returns the ReleasesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetReleasesURL() string { - if r == nil || r.ReleasesURL == nil { - return "" - } - return *r.ReleasesURL -} - -// GetRoleName returns the RoleName field if it's non-nil, zero value otherwise. -func (r *Repository) GetRoleName() string { - if r == nil || r.RoleName == nil { - return "" - } - return *r.RoleName -} - -// GetSecurityAndAnalysis returns the SecurityAndAnalysis field. -func (r *Repository) GetSecurityAndAnalysis() *SecurityAndAnalysis { - if r == nil { - return nil - } - return r.SecurityAndAnalysis -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *Repository) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetSource returns the Source field. -func (r *Repository) GetSource() *Repository { - if r == nil { - return nil - } - return r.Source -} - -// GetSquashMergeCommitMessage returns the SquashMergeCommitMessage field if it's non-nil, zero value otherwise. -func (r *Repository) GetSquashMergeCommitMessage() string { - if r == nil || r.SquashMergeCommitMessage == nil { - return "" - } - return *r.SquashMergeCommitMessage -} - -// GetSquashMergeCommitTitle returns the SquashMergeCommitTitle field if it's non-nil, zero value otherwise. -func (r *Repository) GetSquashMergeCommitTitle() string { - if r == nil || r.SquashMergeCommitTitle == nil { - return "" - } - return *r.SquashMergeCommitTitle -} - -// GetSSHURL returns the SSHURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSSHURL() string { - if r == nil || r.SSHURL == nil { - return "" - } - return *r.SSHURL -} - -// GetStargazersCount returns the StargazersCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetStargazersCount() int { - if r == nil || r.StargazersCount == nil { - return 0 - } - return *r.StargazersCount -} - -// GetStargazersURL returns the StargazersURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetStargazersURL() string { - if r == nil || r.StargazersURL == nil { - return "" - } - return *r.StargazersURL -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetStatusesURL() string { - if r == nil || r.StatusesURL == nil { - return "" - } - return *r.StatusesURL -} - -// GetSubscribersCount returns the SubscribersCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetSubscribersCount() int { - if r == nil || r.SubscribersCount == nil { - return 0 - } - return *r.SubscribersCount -} - -// GetSubscribersURL returns the SubscribersURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSubscribersURL() string { - if r == nil || r.SubscribersURL == nil { - return "" - } - return *r.SubscribersURL -} - -// GetSubscriptionURL returns the SubscriptionURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSubscriptionURL() string { - if r == nil || r.SubscriptionURL == nil { - return "" - } - return *r.SubscriptionURL -} - -// GetSVNURL returns the SVNURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSVNURL() string { - if r == nil || r.SVNURL == nil { - return "" - } - return *r.SVNURL -} - -// GetTagsURL returns the TagsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetTagsURL() string { - if r == nil || r.TagsURL == nil { - return "" - } - return *r.TagsURL -} - -// GetTeamID returns the TeamID field if it's non-nil, zero value otherwise. -func (r *Repository) GetTeamID() int64 { - if r == nil || r.TeamID == nil { - return 0 - } - return *r.TeamID -} - -// GetTeamsURL returns the TeamsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetTeamsURL() string { - if r == nil || r.TeamsURL == nil { - return "" - } - return *r.TeamsURL -} - -// GetTemplateRepository returns the TemplateRepository field. -func (r *Repository) GetTemplateRepository() *Repository { - if r == nil { - return nil - } - return r.TemplateRepository -} - -// GetTreesURL returns the TreesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetTreesURL() string { - if r == nil || r.TreesURL == nil { - return "" - } - return *r.TreesURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *Repository) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *Repository) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetUseSquashPRTitleAsDefault returns the UseSquashPRTitleAsDefault field if it's non-nil, zero value otherwise. -func (r *Repository) GetUseSquashPRTitleAsDefault() bool { - if r == nil || r.UseSquashPRTitleAsDefault == nil { - return false - } - return *r.UseSquashPRTitleAsDefault -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (r *Repository) GetVisibility() string { - if r == nil || r.Visibility == nil { - return "" - } - return *r.Visibility -} - -// GetWatchers returns the Watchers field if it's non-nil, zero value otherwise. -func (r *Repository) GetWatchers() int { - if r == nil || r.Watchers == nil { - return 0 - } - return *r.Watchers -} - -// GetWatchersCount returns the WatchersCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetWatchersCount() int { - if r == nil || r.WatchersCount == nil { - return 0 - } - return *r.WatchersCount -} - -// GetWebCommitSignoffRequired returns the WebCommitSignoffRequired field if it's non-nil, zero value otherwise. -func (r *Repository) GetWebCommitSignoffRequired() bool { - if r == nil || r.WebCommitSignoffRequired == nil { - return false - } - return *r.WebCommitSignoffRequired -} - -// GetAccessLevel returns the AccessLevel field if it's non-nil, zero value otherwise. -func (r *RepositoryActionsAccessLevel) GetAccessLevel() string { - if r == nil || r.AccessLevel == nil { - return "" - } - return *r.AccessLevel -} - -// GetAdvancedSecurityCommitters returns the AdvancedSecurityCommitters field if it's non-nil, zero value otherwise. -func (r *RepositoryActiveCommitters) GetAdvancedSecurityCommitters() int { - if r == nil || r.AdvancedSecurityCommitters == nil { - return 0 - } - return *r.AdvancedSecurityCommitters -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryActiveCommitters) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetBody() string { - if r == nil || r.Body == nil { - return "" - } - return *r.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetCommitID() string { - if r == nil || r.CommitID == nil { - return "" - } - return *r.CommitID -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetPosition returns the Position field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetPosition() int { - if r == nil || r.Position == nil { - return 0 - } - return *r.Position -} - -// GetReactions returns the Reactions field. -func (r *RepositoryComment) GetReactions() *Reactions { - if r == nil { - return nil - } - return r.Reactions -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetUser returns the User field. -func (r *RepositoryComment) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetAuthor returns the Author field. -func (r *RepositoryCommit) GetAuthor() *User { - if r == nil { - return nil - } - return r.Author -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetCommentsURL() string { - if r == nil || r.CommentsURL == nil { - return "" - } - return *r.CommentsURL -} - -// GetCommit returns the Commit field. -func (r *RepositoryCommit) GetCommit() *Commit { - if r == nil { - return nil - } - return r.Commit -} - -// GetCommitter returns the Committer field. -func (r *RepositoryCommit) GetCommitter() *User { - if r == nil { - return nil - } - return r.Committer -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetStats returns the Stats field. -func (r *RepositoryCommit) GetStats() *CommitStats { - if r == nil { - return nil - } - return r.Stats -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetDownloadURL() string { - if r == nil || r.DownloadURL == nil { - return "" - } - return *r.DownloadURL -} - -// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetEncoding() string { - if r == nil || r.Encoding == nil { - return "" - } - return *r.Encoding -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetGitURL() string { - if r == nil || r.GitURL == nil { - return "" - } - return *r.GitURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetSubmoduleGitURL returns the SubmoduleGitURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetSubmoduleGitURL() string { - if r == nil || r.SubmoduleGitURL == nil { - return "" - } - return *r.SubmoduleGitURL -} - -// GetTarget returns the Target field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetTarget() string { - if r == nil || r.Target == nil { - return "" - } - return *r.Target -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetAuthor returns the Author field. -func (r *RepositoryContentFileOptions) GetAuthor() *CommitAuthor { - if r == nil { - return nil - } - return r.Author -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (r *RepositoryContentFileOptions) GetBranch() string { - if r == nil || r.Branch == nil { - return "" - } - return *r.Branch -} - -// GetCommitter returns the Committer field. -func (r *RepositoryContentFileOptions) GetCommitter() *CommitAuthor { - if r == nil { - return nil - } - return r.Committer -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (r *RepositoryContentFileOptions) GetMessage() string { - if r == nil || r.Message == nil { - return "" - } - return *r.Message -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryContentFileOptions) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetContent returns the Content field. -func (r *RepositoryContentResponse) GetContent() *RepositoryContent { - if r == nil { - return nil - } - return r.Content -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *RepositoryDispatchEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (r *RepositoryDispatchEvent) GetBranch() string { - if r == nil || r.Branch == nil { - return "" - } - return *r.Branch -} - -// GetInstallation returns the Installation field. -func (r *RepositoryDispatchEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *RepositoryDispatchEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepo returns the Repo field. -func (r *RepositoryDispatchEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *RepositoryDispatchEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *RepositoryEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetChanges returns the Changes field. -func (r *RepositoryEvent) GetChanges() *EditChange { - if r == nil { - return nil - } - return r.Changes -} - -// GetInstallation returns the Installation field. -func (r *RepositoryEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *RepositoryEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepo returns the Repo field. -func (r *RepositoryEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *RepositoryEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetOrg returns the Org field. -func (r *RepositoryImportEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepo returns the Repo field. -func (r *RepositoryImportEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *RepositoryImportEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (r *RepositoryImportEvent) GetStatus() string { - if r == nil || r.Status == nil { - return "" - } - return *r.Status -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetInvitee returns the Invitee field. -func (r *RepositoryInvitation) GetInvitee() *User { - if r == nil { - return nil - } - return r.Invitee -} - -// GetInviter returns the Inviter field. -func (r *RepositoryInvitation) GetInviter() *User { - if r == nil { - return nil - } - return r.Inviter -} - -// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetPermissions() string { - if r == nil || r.Permissions == nil { - return "" - } - return *r.Permissions -} - -// GetRepo returns the Repo field. -func (r *RepositoryInvitation) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetContent() string { - if r == nil || r.Content == nil { - return "" - } - return *r.Content -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetDownloadURL() string { - if r == nil || r.DownloadURL == nil { - return "" - } - return *r.DownloadURL -} - -// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetEncoding() string { - if r == nil || r.Encoding == nil { - return "" - } - return *r.Encoding -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetGitURL() string { - if r == nil || r.GitURL == nil { - return "" - } - return *r.GitURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetLicense returns the License field. -func (r *RepositoryLicense) GetLicense() *License { - if r == nil { - return nil - } - return r.License -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetBase returns the Base field if it's non-nil, zero value otherwise. -func (r *RepositoryMergeRequest) GetBase() string { - if r == nil || r.Base == nil { - return "" - } - return *r.Base -} - -// GetCommitMessage returns the CommitMessage field if it's non-nil, zero value otherwise. -func (r *RepositoryMergeRequest) GetCommitMessage() string { - if r == nil || r.CommitMessage == nil { - return "" - } - return *r.CommitMessage -} - -// GetHead returns the Head field if it's non-nil, zero value otherwise. -func (r *RepositoryMergeRequest) GetHead() string { - if r == nil || r.Head == nil { - return "" - } - return *r.Head -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (r *RepositoryPermissionLevel) GetPermission() string { - if r == nil || r.Permission == nil { - return "" - } - return *r.Permission -} - -// GetUser returns the User field. -func (r *RepositoryPermissionLevel) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetAssetsURL returns the AssetsURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetAssetsURL() string { - if r == nil || r.AssetsURL == nil { - return "" - } - return *r.AssetsURL -} - -// GetAuthor returns the Author field. -func (r *RepositoryRelease) GetAuthor() *User { - if r == nil { - return nil - } - return r.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetBody() string { - if r == nil || r.Body == nil { - return "" - } - return *r.Body -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDiscussionCategoryName returns the DiscussionCategoryName field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetDiscussionCategoryName() string { - if r == nil || r.DiscussionCategoryName == nil { - return "" - } - return *r.DiscussionCategoryName -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetDraft() bool { - if r == nil || r.Draft == nil { - return false - } - return *r.Draft -} - -// GetGenerateReleaseNotes returns the GenerateReleaseNotes field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetGenerateReleaseNotes() bool { - if r == nil || r.GenerateReleaseNotes == nil { - return false - } - return *r.GenerateReleaseNotes -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetMakeLatest returns the MakeLatest field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetMakeLatest() string { - if r == nil || r.MakeLatest == nil { - return "" - } - return *r.MakeLatest -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetPrerelease() bool { - if r == nil || r.Prerelease == nil { - return false - } - return *r.Prerelease -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetPublishedAt() Timestamp { - if r == nil || r.PublishedAt == nil { - return Timestamp{} - } - return *r.PublishedAt -} - -// GetTagName returns the TagName field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetTagName() string { - if r == nil || r.TagName == nil { - return "" - } - return *r.TagName -} - -// GetTarballURL returns the TarballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetTarballURL() string { - if r == nil || r.TarballURL == nil { - return "" - } - return *r.TarballURL -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetTargetCommitish() string { - if r == nil || r.TargetCommitish == nil { - return "" - } - return *r.TargetCommitish -} - -// GetUploadURL returns the UploadURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetUploadURL() string { - if r == nil || r.UploadURL == nil { - return "" - } - return *r.UploadURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetZipballURL returns the ZipballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetZipballURL() string { - if r == nil || r.ZipballURL == nil { - return "" - } - return *r.ZipballURL -} - -// GetParameters returns the Parameters field if it's non-nil, zero value otherwise. -func (r *RepositoryRule) GetParameters() json.RawMessage { - if r == nil || r.Parameters == nil { - return json.RawMessage{} - } - return *r.Parameters -} - -// GetCommit returns the Commit field. -func (r *RepositoryTag) GetCommit() *Commit { - if r == nil { - return nil - } - return r.Commit -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryTag) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetTarballURL returns the TarballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryTag) GetTarballURL() string { - if r == nil || r.TarballURL == nil { - return "" - } - return *r.TarballURL -} - -// GetZipballURL returns the ZipballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryTag) GetZipballURL() string { - if r == nil || r.ZipballURL == nil { - return "" - } - return *r.ZipballURL -} - -// GetAffectedPackageName returns the AffectedPackageName field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetAffectedPackageName() string { - if r == nil || r.AffectedPackageName == nil { - return "" - } - return *r.AffectedPackageName -} - -// GetAffectedRange returns the AffectedRange field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetAffectedRange() string { - if r == nil || r.AffectedRange == nil { - return "" - } - return *r.AffectedRange -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetDismissedAt() Timestamp { - if r == nil || r.DismissedAt == nil { - return Timestamp{} - } - return *r.DismissedAt -} - -// GetDismisser returns the Dismisser field. -func (r *RepositoryVulnerabilityAlert) GetDismisser() *User { - if r == nil { - return nil - } - return r.Dismisser -} - -// GetDismissReason returns the DismissReason field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetDismissReason() string { - if r == nil || r.DismissReason == nil { - return "" - } - return *r.DismissReason -} - -// GetExternalIdentifier returns the ExternalIdentifier field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetExternalIdentifier() string { - if r == nil || r.ExternalIdentifier == nil { - return "" - } - return *r.ExternalIdentifier -} - -// GetExternalReference returns the ExternalReference field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetExternalReference() string { - if r == nil || r.ExternalReference == nil { - return "" - } - return *r.ExternalReference -} - -// GetFixedIn returns the FixedIn field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetFixedIn() string { - if r == nil || r.FixedIn == nil { - return "" - } - return *r.FixedIn -} - -// GetGitHubSecurityAdvisoryID returns the GitHubSecurityAdvisoryID field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetGitHubSecurityAdvisoryID() string { - if r == nil || r.GitHubSecurityAdvisoryID == nil { - return "" - } - return *r.GitHubSecurityAdvisoryID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetSeverity() string { - if r == nil || r.Severity == nil { - return "" - } - return *r.Severity -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlertEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetAlert returns the Alert field. -func (r *RepositoryVulnerabilityAlertEvent) GetAlert() *RepositoryVulnerabilityAlert { - if r == nil { - return nil - } - return r.Alert -} - -// GetInstallation returns the Installation field. -func (r *RepositoryVulnerabilityAlertEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *RepositoryVulnerabilityAlertEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepository returns the Repository field. -func (r *RepositoryVulnerabilityAlertEvent) GetRepository() *Repository { - if r == nil { - return nil - } - return r.Repository -} - -// GetSender returns the Sender field. -func (r *RepositoryVulnerabilityAlertEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetForkRepos returns the ForkRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetForkRepos() int { - if r == nil || r.ForkRepos == nil { - return 0 - } - return *r.ForkRepos -} - -// GetOrgRepos returns the OrgRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetOrgRepos() int { - if r == nil || r.OrgRepos == nil { - return 0 - } - return *r.OrgRepos -} - -// GetRootRepos returns the RootRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetRootRepos() int { - if r == nil || r.RootRepos == nil { - return 0 - } - return *r.RootRepos -} - -// GetTotalPushes returns the TotalPushes field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetTotalPushes() int { - if r == nil || r.TotalPushes == nil { - return 0 - } - return *r.TotalPushes -} - -// GetTotalRepos returns the TotalRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetTotalRepos() int { - if r == nil || r.TotalRepos == nil { - return 0 - } - return *r.TotalRepos -} - -// GetTotalWikis returns the TotalWikis field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetTotalWikis() int { - if r == nil || r.TotalWikis == nil { - return 0 - } - return *r.TotalWikis -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetAvatarURL() string { - if r == nil || r.AvatarURL == nil { - return "" - } - return *r.AvatarURL -} - -// GetContext returns the Context field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetContext() string { - if r == nil || r.Context == nil { - return "" - } - return *r.Context -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetCreator returns the Creator field. -func (r *RepoStatus) GetCreator() *User { - if r == nil { - return nil - } - return r.Creator -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetDescription() string { - if r == nil || r.Description == nil { - return "" - } - return *r.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetTargetURL() string { - if r == nil || r.TargetURL == nil { - return "" - } - return *r.TargetURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequireCodeOwnerReviewChanges) GetFrom() bool { - if r == nil || r.From == nil { - return false - } - return *r.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequiredConversationResolutionLevelChanges) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequiredDeploymentsEnforcementLevelChanges) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RequiredReviewer) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (r *RequiredStatusCheck) GetAppID() int64 { - if r == nil || r.AppID == nil { - return 0 - } - return *r.AppID -} - -// GetContextsURL returns the ContextsURL field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecks) GetContextsURL() string { - if r == nil || r.ContextsURL == nil { - return "" - } - return *r.ContextsURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecks) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecksEnforcementLevelChanges) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetStrict returns the Strict field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecksRequest) GetStrict() bool { - if r == nil || r.Strict == nil { - return false - } - return *r.Strict -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RequiredWorkflowSelectedRepos) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *ReviewersRequest) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (r *ReviewPersonalAccessTokenRequestOptions) GetReason() string { - if r == nil || r.Reason == nil { - return "" - } - return *r.Reason -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (r *Rule) GetDescription() string { - if r == nil || r.Description == nil { - return "" - } - return *r.Description -} - -// GetFullDescription returns the FullDescription field if it's non-nil, zero value otherwise. -func (r *Rule) GetFullDescription() string { - if r == nil || r.FullDescription == nil { - return "" - } - return *r.FullDescription -} - -// GetHelp returns the Help field if it's non-nil, zero value otherwise. -func (r *Rule) GetHelp() string { - if r == nil || r.Help == nil { - return "" - } - return *r.Help -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Rule) GetID() string { - if r == nil || r.ID == nil { - return "" - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *Rule) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetSecuritySeverityLevel returns the SecuritySeverityLevel field if it's non-nil, zero value otherwise. -func (r *Rule) GetSecuritySeverityLevel() string { - if r == nil || r.SecuritySeverityLevel == nil { - return "" - } - return *r.SecuritySeverityLevel -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (r *Rule) GetSeverity() string { - if r == nil || r.Severity == nil { - return "" - } - return *r.Severity -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RulePatternParameters) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNegate returns the Negate field if it's non-nil, zero value otherwise. -func (r *RulePatternParameters) GetNegate() bool { - if r == nil || r.Negate == nil { - return false - } - return *r.Negate -} - -// GetIntegrationID returns the IntegrationID field if it's non-nil, zero value otherwise. -func (r *RuleRequiredStatusChecks) GetIntegrationID() int64 { - if r == nil || r.IntegrationID == nil { - return 0 - } - return *r.IntegrationID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (r *RuleRequiredWorkflow) GetRef() string { - if r == nil || r.Ref == nil { - return "" - } - return *r.Ref -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (r *RuleRequiredWorkflow) GetRepositoryID() int64 { - if r == nil || r.RepositoryID == nil { - return 0 - } - return *r.RepositoryID -} - -// GetSha returns the Sha field if it's non-nil, zero value otherwise. -func (r *RuleRequiredWorkflow) GetSha() string { - if r == nil || r.Sha == nil { - return "" - } - return *r.Sha -} - -// GetConditions returns the Conditions field. -func (r *Ruleset) GetConditions() *RulesetConditions { - if r == nil { - return nil - } - return r.Conditions -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetLinks returns the Links field. -func (r *Ruleset) GetLinks() *RulesetLinks { - if r == nil { - return nil - } - return r.Links -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetSourceType returns the SourceType field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetSourceType() string { - if r == nil || r.SourceType == nil { - return "" - } - return *r.SourceType -} - -// GetTarget returns the Target field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetTarget() string { - if r == nil || r.Target == nil { - return "" - } - return *r.Target -} - -// GetRefName returns the RefName field. -func (r *RulesetConditions) GetRefName() *RulesetRefConditionParameters { - if r == nil { - return nil - } - return r.RefName -} - -// GetRepositoryID returns the RepositoryID field. -func (r *RulesetConditions) GetRepositoryID() *RulesetRepositoryIDsConditionParameters { - if r == nil { - return nil - } - return r.RepositoryID -} - -// GetRepositoryName returns the RepositoryName field. -func (r *RulesetConditions) GetRepositoryName() *RulesetRepositoryNamesConditionParameters { - if r == nil { - return nil - } - return r.RepositoryName -} - -// GetHRef returns the HRef field if it's non-nil, zero value otherwise. -func (r *RulesetLink) GetHRef() string { - if r == nil || r.HRef == nil { - return "" - } - return *r.HRef -} - -// GetSelf returns the Self field. -func (r *RulesetLinks) GetSelf() *RulesetLink { - if r == nil { - return nil - } - return r.Self -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (r *RulesetRepositoryNamesConditionParameters) GetProtected() bool { - if r == nil || r.Protected == nil { - return false - } - return *r.Protected -} - -// GetBusy returns the Busy field if it's non-nil, zero value otherwise. -func (r *Runner) GetBusy() bool { - if r == nil || r.Busy == nil { - return false - } - return *r.Busy -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Runner) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *Runner) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetOS returns the OS field if it's non-nil, zero value otherwise. -func (r *Runner) GetOS() string { - if r == nil || r.OS == nil { - return "" - } - return *r.OS -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (r *Runner) GetStatus() string { - if r == nil || r.Status == nil { - return "" - } - return *r.Status -} - -// GetArchitecture returns the Architecture field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetArchitecture() string { - if r == nil || r.Architecture == nil { - return "" - } - return *r.Architecture -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetDownloadURL() string { - if r == nil || r.DownloadURL == nil { - return "" - } - return *r.DownloadURL -} - -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetFilename() string { - if r == nil || r.Filename == nil { - return "" - } - return *r.Filename -} - -// GetOS returns the OS field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetOS() string { - if r == nil || r.OS == nil { - return "" - } - return *r.OS -} - -// GetSHA256Checksum returns the SHA256Checksum field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetSHA256Checksum() string { - if r == nil || r.SHA256Checksum == nil { - return "" - } - return *r.SHA256Checksum -} - -// GetTempDownloadToken returns the TempDownloadToken field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetTempDownloadToken() string { - if r == nil || r.TempDownloadToken == nil { - return "" - } - return *r.TempDownloadToken -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetAllowsPublicRepositories() bool { - if r == nil || r.AllowsPublicRepositories == nil { - return false - } - return *r.AllowsPublicRepositories -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetDefault() bool { - if r == nil || r.Default == nil { - return false - } - return *r.Default -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetInherited returns the Inherited field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetInherited() bool { - if r == nil || r.Inherited == nil { - return false - } - return *r.Inherited -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetRestrictedToWorkflows() bool { - if r == nil || r.RestrictedToWorkflows == nil { - return false - } - return *r.RestrictedToWorkflows -} - -// GetRunnersURL returns the RunnersURL field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetRunnersURL() string { - if r == nil || r.RunnersURL == nil { - return "" - } - return *r.RunnersURL -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetSelectedRepositoriesURL() string { - if r == nil || r.SelectedRepositoriesURL == nil { - return "" - } - return *r.SelectedRepositoriesURL -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetVisibility() string { - if r == nil || r.Visibility == nil { - return "" - } - return *r.Visibility -} - -// GetWorkflowRestrictionsReadOnly returns the WorkflowRestrictionsReadOnly field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetWorkflowRestrictionsReadOnly() bool { - if r == nil || r.WorkflowRestrictionsReadOnly == nil { - return false - } - return *r.WorkflowRestrictionsReadOnly -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RunnerLabels) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RunnerLabels) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RunnerLabels) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetCheckoutURI returns the CheckoutURI field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetCheckoutURI() string { - if s == nil || s.CheckoutURI == nil { - return "" - } - return *s.CheckoutURI -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetCommitSHA() string { - if s == nil || s.CommitSHA == nil { - return "" - } - return *s.CommitSHA -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetRef() string { - if s == nil || s.Ref == nil { - return "" - } - return *s.Ref -} - -// GetSarif returns the Sarif field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetSarif() string { - if s == nil || s.Sarif == nil { - return "" - } - return *s.Sarif -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetStartedAt() Timestamp { - if s == nil || s.StartedAt == nil { - return Timestamp{} - } - return *s.StartedAt -} - -// GetToolName returns the ToolName field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetToolName() string { - if s == nil || s.ToolName == nil { - return "" - } - return *s.ToolName -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SarifID) GetID() string { - if s == nil || s.ID == nil { - return "" - } - return *s.ID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SarifID) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetAnalysesURL returns the AnalysesURL field if it's non-nil, zero value otherwise. -func (s *SARIFUpload) GetAnalysesURL() string { - if s == nil || s.AnalysesURL == nil { - return "" - } - return *s.AnalysesURL -} - -// GetProcessingStatus returns the ProcessingStatus field if it's non-nil, zero value otherwise. -func (s *SARIFUpload) GetProcessingStatus() string { - if s == nil || s.ProcessingStatus == nil { - return "" - } - return *s.ProcessingStatus -} - -// GetSBOM returns the SBOM field. -func (s *SBOM) GetSBOM() *SBOMInfo { - if s == nil { - return nil - } - return s.SBOM -} - -// GetCreationInfo returns the CreationInfo field. -func (s *SBOMInfo) GetCreationInfo() *CreationInfo { - if s == nil { - return nil - } - return s.CreationInfo -} - -// GetDataLicense returns the DataLicense field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetDataLicense() string { - if s == nil || s.DataLicense == nil { - return "" - } - return *s.DataLicense -} - -// GetDocumentNamespace returns the DocumentNamespace field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetDocumentNamespace() string { - if s == nil || s.DocumentNamespace == nil { - return "" - } - return *s.DocumentNamespace -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetName() string { - if s == nil || s.Name == nil { - return "" - } - return *s.Name -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetSPDXID() string { - if s == nil || s.SPDXID == nil { - return "" - } - return *s.SPDXID -} - -// GetSPDXVersion returns the SPDXVersion field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetSPDXVersion() string { - if s == nil || s.SPDXVersion == nil { - return "" - } - return *s.SPDXVersion -} - -// GetAnalysisKey returns the AnalysisKey field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetAnalysisKey() string { - if s == nil || s.AnalysisKey == nil { - return "" - } - return *s.AnalysisKey -} - -// GetCategory returns the Category field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetCategory() string { - if s == nil || s.Category == nil { - return "" - } - return *s.Category -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetCommitSHA() string { - if s == nil || s.CommitSHA == nil { - return "" - } - return *s.CommitSHA -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetDeletable returns the Deletable field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetDeletable() bool { - if s == nil || s.Deletable == nil { - return false - } - return *s.Deletable -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetEnvironment() string { - if s == nil || s.Environment == nil { - return "" - } - return *s.Environment -} - -// GetError returns the Error field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetError() string { - if s == nil || s.Error == nil { - return "" - } - return *s.Error -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetRef() string { - if s == nil || s.Ref == nil { - return "" - } - return *s.Ref -} - -// GetResultsCount returns the ResultsCount field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetResultsCount() int { - if s == nil || s.ResultsCount == nil { - return 0 - } - return *s.ResultsCount -} - -// GetRulesCount returns the RulesCount field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetRulesCount() int { - if s == nil || s.RulesCount == nil { - return 0 - } - return *s.RulesCount -} - -// GetSarifID returns the SarifID field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetSarifID() string { - if s == nil || s.SarifID == nil { - return "" - } - return *s.SarifID -} - -// GetTool returns the Tool field. -func (s *ScanningAnalysis) GetTool() *Tool { - if s == nil { - return nil - } - return s.Tool -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetWarning returns the Warning field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetWarning() string { - if s == nil || s.Warning == nil { - return "" - } - return *s.Warning -} - -// GetCreated returns the Created field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetCreated() Timestamp { - if s == nil || s.Created == nil { - return Timestamp{} - } - return *s.Created -} - -// GetLastModified returns the LastModified field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetLastModified() Timestamp { - if s == nil || s.LastModified == nil { - return Timestamp{} - } - return *s.LastModified -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetLocation() string { - if s == nil || s.Location == nil { - return "" - } - return *s.Location -} - -// GetResourceType returns the ResourceType field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetResourceType() string { - if s == nil || s.ResourceType == nil { - return "" - } - return *s.ResourceType -} - -// GetItemsPerPage returns the ItemsPerPage field if it's non-nil, zero value otherwise. -func (s *SCIMProvisionedIdentities) GetItemsPerPage() int { - if s == nil || s.ItemsPerPage == nil { - return 0 - } - return *s.ItemsPerPage -} - -// GetStartIndex returns the StartIndex field if it's non-nil, zero value otherwise. -func (s *SCIMProvisionedIdentities) GetStartIndex() int { - if s == nil || s.StartIndex == nil { - return 0 - } - return *s.StartIndex -} - -// GetTotalResults returns the TotalResults field if it's non-nil, zero value otherwise. -func (s *SCIMProvisionedIdentities) GetTotalResults() int { - if s == nil || s.TotalResults == nil { - return 0 - } - return *s.TotalResults -} - -// GetActive returns the Active field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetActive() bool { - if s == nil || s.Active == nil { - return false - } - return *s.Active -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetDisplayName() string { - if s == nil || s.DisplayName == nil { - return "" - } - return *s.DisplayName -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetExternalID() string { - if s == nil || s.ExternalID == nil { - return "" - } - return *s.ExternalID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetID() string { - if s == nil || s.ID == nil { - return "" - } - return *s.ID -} - -// GetMeta returns the Meta field. -func (s *SCIMUserAttributes) GetMeta() *SCIMMeta { - if s == nil { - return nil - } - return s.Meta -} - -// GetPrimary returns the Primary field if it's non-nil, zero value otherwise. -func (s *SCIMUserEmail) GetPrimary() bool { - if s == nil || s.Primary == nil { - return false - } - return *s.Primary -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (s *SCIMUserEmail) GetType() string { - if s == nil || s.Type == nil { - return "" - } - return *s.Type -} - -// GetFormatted returns the Formatted field if it's non-nil, zero value otherwise. -func (s *SCIMUserName) GetFormatted() string { - if s == nil || s.Formatted == nil { - return "" - } - return *s.Formatted -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (s *SecretScanning) GetStatus() string { - if s == nil || s.Status == nil { - return "" - } - return *s.Status -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetHTMLURL() string { - if s == nil || s.HTMLURL == nil { - return "" - } - return *s.HTMLURL -} - -// GetLocationsURL returns the LocationsURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetLocationsURL() string { - if s == nil || s.LocationsURL == nil { - return "" - } - return *s.LocationsURL -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetNumber() int { - if s == nil || s.Number == nil { - return 0 - } - return *s.Number -} - -// GetPushProtectionBypassed returns the PushProtectionBypassed field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetPushProtectionBypassed() bool { - if s == nil || s.PushProtectionBypassed == nil { - return false - } - return *s.PushProtectionBypassed -} - -// GetPushProtectionBypassedAt returns the PushProtectionBypassedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetPushProtectionBypassedAt() Timestamp { - if s == nil || s.PushProtectionBypassedAt == nil { - return Timestamp{} - } - return *s.PushProtectionBypassedAt -} - -// GetPushProtectionBypassedBy returns the PushProtectionBypassedBy field. -func (s *SecretScanningAlert) GetPushProtectionBypassedBy() *User { - if s == nil { - return nil - } - return s.PushProtectionBypassedBy -} - -// GetRepository returns the Repository field. -func (s *SecretScanningAlert) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetResolution returns the Resolution field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetResolution() string { - if s == nil || s.Resolution == nil { - return "" - } - return *s.Resolution -} - -// GetResolutionComment returns the ResolutionComment field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetResolutionComment() string { - if s == nil || s.ResolutionComment == nil { - return "" - } - return *s.ResolutionComment -} - -// GetResolvedAt returns the ResolvedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetResolvedAt() Timestamp { - if s == nil || s.ResolvedAt == nil { - return Timestamp{} - } - return *s.ResolvedAt -} - -// GetResolvedBy returns the ResolvedBy field. -func (s *SecretScanningAlert) GetResolvedBy() *User { - if s == nil { - return nil - } - return s.ResolvedBy -} - -// GetSecret returns the Secret field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetSecret() string { - if s == nil || s.Secret == nil { - return "" - } - return *s.Secret -} - -// GetSecretType returns the SecretType field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetSecretType() string { - if s == nil || s.SecretType == nil { - return "" - } - return *s.SecretType -} - -// GetSecretTypeDisplayName returns the SecretTypeDisplayName field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetSecretTypeDisplayName() string { - if s == nil || s.SecretTypeDisplayName == nil { - return "" - } - return *s.SecretTypeDisplayName -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetState() string { - if s == nil || s.State == nil { - return "" - } - return *s.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetUpdatedAt() Timestamp { - if s == nil || s.UpdatedAt == nil { - return Timestamp{} - } - return *s.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertEvent) GetAction() string { - if s == nil || s.Action == nil { - return "" - } - return *s.Action -} - -// GetAlert returns the Alert field. -func (s *SecretScanningAlertEvent) GetAlert() *SecretScanningAlert { - if s == nil { - return nil - } - return s.Alert -} - -// GetEnterprise returns the Enterprise field. -func (s *SecretScanningAlertEvent) GetEnterprise() *Enterprise { - if s == nil { - return nil - } - return s.Enterprise -} - -// GetInstallation returns the Installation field. -func (s *SecretScanningAlertEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrganization returns the Organization field. -func (s *SecretScanningAlertEvent) GetOrganization() *Organization { - if s == nil { - return nil - } - return s.Organization -} - -// GetRepo returns the Repo field. -func (s *SecretScanningAlertEvent) GetRepo() *Repository { - if s == nil { - return nil - } - return s.Repo -} - -// GetSender returns the Sender field. -func (s *SecretScanningAlertEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetDetails returns the Details field. -func (s *SecretScanningAlertLocation) GetDetails() *SecretScanningAlertLocationDetails { - if s == nil { - return nil - } - return s.Details -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocation) GetType() string { - if s == nil || s.Type == nil { - return "" - } - return *s.Type -} - -// GetBlobSHA returns the BlobSHA field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetBlobSHA() string { - if s == nil || s.BlobSHA == nil { - return "" - } - return *s.BlobSHA -} - -// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetBlobURL() string { - if s == nil || s.BlobURL == nil { - return "" - } - return *s.BlobURL -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetCommitSHA() string { - if s == nil || s.CommitSHA == nil { - return "" - } - return *s.CommitSHA -} - -// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetCommitURL() string { - if s == nil || s.CommitURL == nil { - return "" - } - return *s.CommitURL -} - -// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetEndColumn() int { - if s == nil || s.EndColumn == nil { - return 0 - } - return *s.EndColumn -} - -// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetEndLine() int { - if s == nil || s.EndLine == nil { - return 0 - } - return *s.EndLine -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetPath() string { - if s == nil || s.Path == nil { - return "" - } - return *s.Path -} - -// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetStartColumn() int { - if s == nil || s.StartColumn == nil { - return 0 - } - return *s.StartColumn -} - -// GetStartline returns the Startline field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetStartline() int { - if s == nil || s.Startline == nil { - return 0 - } - return *s.Startline -} - -// GetResolution returns the Resolution field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertUpdateOptions) GetResolution() string { - if s == nil || s.Resolution == nil { - return "" - } - return *s.Resolution -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (s *SecretScanningPushProtection) GetStatus() string { - if s == nil || s.Status == nil { - return "" - } - return *s.Status -} - -// GetAuthor returns the Author field. -func (s *SecurityAdvisory) GetAuthor() *User { - if s == nil { - return nil - } - return s.Author -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetClosedAt() Timestamp { - if s == nil || s.ClosedAt == nil { - return Timestamp{} - } - return *s.ClosedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetCVEID() string { - if s == nil || s.CVEID == nil { - return "" - } - return *s.CVEID -} - -// GetCVSS returns the CVSS field. -func (s *SecurityAdvisory) GetCVSS() *AdvisoryCVSS { - if s == nil { - return nil - } - return s.CVSS -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetDescription() string { - if s == nil || s.Description == nil { - return "" - } - return *s.Description -} - -// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetGHSAID() string { - if s == nil || s.GHSAID == nil { - return "" - } - return *s.GHSAID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetHTMLURL() string { - if s == nil || s.HTMLURL == nil { - return "" - } - return *s.HTMLURL -} - -// GetPrivateFork returns the PrivateFork field. -func (s *SecurityAdvisory) GetPrivateFork() *Repository { - if s == nil { - return nil - } - return s.PrivateFork -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetPublishedAt() Timestamp { - if s == nil || s.PublishedAt == nil { - return Timestamp{} - } - return *s.PublishedAt -} - -// GetPublisher returns the Publisher field. -func (s *SecurityAdvisory) GetPublisher() *User { - if s == nil { - return nil - } - return s.Publisher -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetSeverity() string { - if s == nil || s.Severity == nil { - return "" - } - return *s.Severity -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetState() string { - if s == nil || s.State == nil { - return "" - } - return *s.State -} - -// GetSubmission returns the Submission field. -func (s *SecurityAdvisory) GetSubmission() *SecurityAdvisorySubmission { - if s == nil { - return nil - } - return s.Submission -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetSummary() string { - if s == nil || s.Summary == nil { - return "" - } - return *s.Summary -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetUpdatedAt() Timestamp { - if s == nil || s.UpdatedAt == nil { - return Timestamp{} - } - return *s.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetWithdrawnAt returns the WithdrawnAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetWithdrawnAt() Timestamp { - if s == nil || s.WithdrawnAt == nil { - return Timestamp{} - } - return *s.WithdrawnAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisoryEvent) GetAction() string { - if s == nil || s.Action == nil { - return "" - } - return *s.Action -} - -// GetEnterprise returns the Enterprise field. -func (s *SecurityAdvisoryEvent) GetEnterprise() *Enterprise { - if s == nil { - return nil - } - return s.Enterprise -} - -// GetInstallation returns the Installation field. -func (s *SecurityAdvisoryEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrganization returns the Organization field. -func (s *SecurityAdvisoryEvent) GetOrganization() *Organization { - if s == nil { - return nil - } - return s.Organization -} - -// GetRepository returns the Repository field. -func (s *SecurityAdvisoryEvent) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetSecurityAdvisory returns the SecurityAdvisory field. -func (s *SecurityAdvisoryEvent) GetSecurityAdvisory() *SecurityAdvisory { - if s == nil { - return nil - } - return s.SecurityAdvisory -} - -// GetSender returns the Sender field. -func (s *SecurityAdvisoryEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetAccepted returns the Accepted field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisorySubmission) GetAccepted() bool { - if s == nil || s.Accepted == nil { - return false - } - return *s.Accepted -} - -// GetAdvancedSecurity returns the AdvancedSecurity field. -func (s *SecurityAndAnalysis) GetAdvancedSecurity() *AdvancedSecurity { - if s == nil { - return nil - } - return s.AdvancedSecurity -} - -// GetDependabotSecurityUpdates returns the DependabotSecurityUpdates field. -func (s *SecurityAndAnalysis) GetDependabotSecurityUpdates() *DependabotSecurityUpdates { - if s == nil { - return nil - } - return s.DependabotSecurityUpdates -} - -// GetSecretScanning returns the SecretScanning field. -func (s *SecurityAndAnalysis) GetSecretScanning() *SecretScanning { - if s == nil { - return nil - } - return s.SecretScanning -} - -// GetSecretScanningPushProtection returns the SecretScanningPushProtection field. -func (s *SecurityAndAnalysis) GetSecretScanningPushProtection() *SecretScanningPushProtection { - if s == nil { - return nil - } - return s.SecretScanningPushProtection -} - -// GetFrom returns the From field. -func (s *SecurityAndAnalysisChange) GetFrom() *SecurityAndAnalysisChangeFrom { - if s == nil { - return nil - } - return s.From -} - -// GetSecurityAndAnalysis returns the SecurityAndAnalysis field. -func (s *SecurityAndAnalysisChangeFrom) GetSecurityAndAnalysis() *SecurityAndAnalysis { - if s == nil { - return nil - } - return s.SecurityAndAnalysis -} - -// GetChanges returns the Changes field. -func (s *SecurityAndAnalysisEvent) GetChanges() *SecurityAndAnalysisChange { - if s == nil { - return nil - } - return s.Changes -} - -// GetEnterprise returns the Enterprise field. -func (s *SecurityAndAnalysisEvent) GetEnterprise() *Enterprise { - if s == nil { - return nil - } - return s.Enterprise -} - -// GetInstallation returns the Installation field. -func (s *SecurityAndAnalysisEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrganization returns the Organization field. -func (s *SecurityAndAnalysisEvent) GetOrganization() *Organization { - if s == nil { - return nil - } - return s.Organization -} - -// GetRepository returns the Repository field. -func (s *SecurityAndAnalysisEvent) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetSender returns the Sender field. -func (s *SecurityAndAnalysisEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (s *SelectedReposList) GetTotalCount() int { - if s == nil || s.TotalCount == nil { - return 0 - } - return *s.TotalCount -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (s *SignatureRequirementEnforcementLevelChanges) GetFrom() string { - if s == nil || s.From == nil { - return "" - } - return *s.From -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (s *SignaturesProtectedBranch) GetEnabled() bool { - if s == nil || s.Enabled == nil { - return false - } - return *s.Enabled -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SignaturesProtectedBranch) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetPayload returns the Payload field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetPayload() string { - if s == nil || s.Payload == nil { - return "" - } - return *s.Payload -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetReason() string { - if s == nil || s.Reason == nil { - return "" - } - return *s.Reason -} - -// GetSignature returns the Signature field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetSignature() string { - if s == nil || s.Signature == nil { - return "" - } - return *s.Signature -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetVerified() bool { - if s == nil || s.Verified == nil { - return false - } - return *s.Verified -} - -// GetActor returns the Actor field. -func (s *Source) GetActor() *User { - if s == nil { - return nil - } - return s.Actor -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *Source) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetIssue returns the Issue field. -func (s *Source) GetIssue() *Issue { - if s == nil { - return nil - } - return s.Issue -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (s *Source) GetType() string { - if s == nil || s.Type == nil { - return "" - } - return *s.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *Source) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetEmail() string { - if s == nil || s.Email == nil { - return "" - } - return *s.Email -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetImportURL returns the ImportURL field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetImportURL() string { - if s == nil || s.ImportURL == nil { - return "" - } - return *s.ImportURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetName() string { - if s == nil || s.Name == nil { - return "" - } - return *s.Name -} - -// GetRemoteID returns the RemoteID field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetRemoteID() string { - if s == nil || s.RemoteID == nil { - return "" - } - return *s.RemoteID -} - -// GetRemoteName returns the RemoteName field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetRemoteName() string { - if s == nil || s.RemoteName == nil { - return "" - } - return *s.RemoteName -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetKey() string { - if s == nil || s.Key == nil { - return "" - } - return *s.Key -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetTitle() string { - if s == nil || s.Title == nil { - return "" - } - return *s.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (s *StarEvent) GetAction() string { - if s == nil || s.Action == nil { - return "" - } - return *s.Action -} - -// GetInstallation returns the Installation field. -func (s *StarEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrg returns the Org field. -func (s *StarEvent) GetOrg() *Organization { - if s == nil { - return nil - } - return s.Org -} - -// GetRepo returns the Repo field. -func (s *StarEvent) GetRepo() *Repository { - if s == nil { - return nil - } - return s.Repo -} - -// GetSender returns the Sender field. -func (s *StarEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. -func (s *StarEvent) GetStarredAt() Timestamp { - if s == nil || s.StarredAt == nil { - return Timestamp{} - } - return *s.StarredAt -} - -// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. -func (s *Stargazer) GetStarredAt() Timestamp { - if s == nil || s.StarredAt == nil { - return Timestamp{} - } - return *s.StarredAt -} - -// GetUser returns the User field. -func (s *Stargazer) GetUser() *User { - if s == nil { - return nil - } - return s.User -} - -// GetRepository returns the Repository field. -func (s *StarredRepository) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. -func (s *StarredRepository) GetStarredAt() Timestamp { - if s == nil || s.StarredAt == nil { - return Timestamp{} - } - return *s.StarredAt -} - -// GetCommit returns the Commit field. -func (s *StatusEvent) GetCommit() *RepositoryCommit { - if s == nil { - return nil - } - return s.Commit -} - -// GetContext returns the Context field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetContext() string { - if s == nil || s.Context == nil { - return "" - } - return *s.Context -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetDescription() string { - if s == nil || s.Description == nil { - return "" - } - return *s.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetInstallation returns the Installation field. -func (s *StatusEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetName() string { - if s == nil || s.Name == nil { - return "" - } - return *s.Name -} - -// GetOrg returns the Org field. -func (s *StatusEvent) GetOrg() *Organization { - if s == nil { - return nil - } - return s.Org -} - -// GetRepo returns the Repo field. -func (s *StatusEvent) GetRepo() *Repository { - if s == nil { - return nil - } - return s.Repo -} - -// GetSender returns the Sender field. -func (s *StatusEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetSHA() string { - if s == nil || s.SHA == nil { - return "" - } - return *s.SHA -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetState() string { - if s == nil || s.State == nil { - return "" - } - return *s.State -} - -// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetTargetURL() string { - if s == nil || s.TargetURL == nil { - return "" - } - return *s.TargetURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetUpdatedAt() Timestamp { - if s == nil || s.UpdatedAt == nil { - return Timestamp{} - } - return *s.UpdatedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *Subscription) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetIgnored returns the Ignored field if it's non-nil, zero value otherwise. -func (s *Subscription) GetIgnored() bool { - if s == nil || s.Ignored == nil { - return false - } - return *s.Ignored -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (s *Subscription) GetReason() string { - if s == nil || s.Reason == nil { - return "" - } - return *s.Reason -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (s *Subscription) GetRepositoryURL() string { - if s == nil || s.RepositoryURL == nil { - return "" - } - return *s.RepositoryURL -} - -// GetSubscribed returns the Subscribed field if it's non-nil, zero value otherwise. -func (s *Subscription) GetSubscribed() bool { - if s == nil || s.Subscribed == nil { - return false - } - return *s.Subscribed -} - -// GetThreadURL returns the ThreadURL field if it's non-nil, zero value otherwise. -func (s *Subscription) GetThreadURL() string { - if s == nil || s.ThreadURL == nil { - return "" - } - return *s.ThreadURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *Subscription) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (t *Tag) GetMessage() string { - if t == nil || t.Message == nil { - return "" - } - return *t.Message -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (t *Tag) GetNodeID() string { - if t == nil || t.NodeID == nil { - return "" - } - return *t.NodeID -} - -// GetObject returns the Object field. -func (t *Tag) GetObject() *GitObject { - if t == nil { - return nil - } - return t.Object -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *Tag) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetTag returns the Tag field if it's non-nil, zero value otherwise. -func (t *Tag) GetTag() string { - if t == nil || t.Tag == nil { - return "" - } - return *t.Tag -} - -// GetTagger returns the Tagger field. -func (t *Tag) GetTagger() *CommitAuthor { - if t == nil { - return nil - } - return t.Tagger -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *Tag) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetVerification returns the Verification field. -func (t *Tag) GetVerification() *SignatureVerification { - if t == nil { - return nil - } - return t.Verification -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *TagProtection) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetPattern returns the Pattern field if it's non-nil, zero value otherwise. -func (t *TagProtection) GetPattern() string { - if t == nil || t.Pattern == nil { - return "" - } - return *t.Pattern -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetCompletedAt() Timestamp { - if t == nil || t.CompletedAt == nil { - return Timestamp{} - } - return *t.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetConclusion() string { - if t == nil || t.Conclusion == nil { - return "" - } - return *t.Conclusion -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetNumber() int64 { - if t == nil || t.Number == nil { - return 0 - } - return *t.Number -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetStartedAt() Timestamp { - if t == nil || t.StartedAt == nil { - return Timestamp{} - } - return *t.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetStatus() string { - if t == nil || t.Status == nil { - return "" - } - return *t.Status -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *Team) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (t *Team) GetHTMLURL() string { - if t == nil || t.HTMLURL == nil { - return "" - } - return *t.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *Team) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (t *Team) GetLDAPDN() string { - if t == nil || t.LDAPDN == nil { - return "" - } - return *t.LDAPDN -} - -// GetMembersCount returns the MembersCount field if it's non-nil, zero value otherwise. -func (t *Team) GetMembersCount() int { - if t == nil || t.MembersCount == nil { - return 0 - } - return *t.MembersCount -} - -// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. -func (t *Team) GetMembersURL() string { - if t == nil || t.MembersURL == nil { - return "" - } - return *t.MembersURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *Team) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (t *Team) GetNodeID() string { - if t == nil || t.NodeID == nil { - return "" - } - return *t.NodeID -} - -// GetOrganization returns the Organization field. -func (t *Team) GetOrganization() *Organization { - if t == nil { - return nil - } - return t.Organization -} - -// GetParent returns the Parent field. -func (t *Team) GetParent() *Team { - if t == nil { - return nil - } - return t.Parent -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (t *Team) GetPermission() string { - if t == nil || t.Permission == nil { - return "" - } - return *t.Permission -} - -// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. -func (t *Team) GetPermissions() map[string]bool { - if t == nil || t.Permissions == nil { - return map[string]bool{} - } - return t.Permissions -} - -// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. -func (t *Team) GetPrivacy() string { - if t == nil || t.Privacy == nil { - return "" - } - return *t.Privacy -} - -// GetReposCount returns the ReposCount field if it's non-nil, zero value otherwise. -func (t *Team) GetReposCount() int { - if t == nil || t.ReposCount == nil { - return 0 - } - return *t.ReposCount -} - -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (t *Team) GetRepositoriesURL() string { - if t == nil || t.RepositoriesURL == nil { - return "" - } - return *t.RepositoriesURL -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (t *Team) GetSlug() string { - if t == nil || t.Slug == nil { - return "" - } - return *t.Slug -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *Team) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetInstallation returns the Installation field. -func (t *TeamAddEvent) GetInstallation() *Installation { - if t == nil { - return nil - } - return t.Installation -} - -// GetOrg returns the Org field. -func (t *TeamAddEvent) GetOrg() *Organization { - if t == nil { - return nil - } - return t.Org -} - -// GetRepo returns the Repo field. -func (t *TeamAddEvent) GetRepo() *Repository { - if t == nil { - return nil - } - return t.Repo -} - -// GetSender returns the Sender field. -func (t *TeamAddEvent) GetSender() *User { - if t == nil { - return nil - } - return t.Sender -} - -// GetTeam returns the Team field. -func (t *TeamAddEvent) GetTeam() *Team { - if t == nil { - return nil - } - return t.Team -} - -// GetDescription returns the Description field. -func (t *TeamChange) GetDescription() *TeamDescription { - if t == nil { - return nil - } - return t.Description -} - -// GetName returns the Name field. -func (t *TeamChange) GetName() *TeamName { - if t == nil { - return nil - } - return t.Name -} - -// GetPrivacy returns the Privacy field. -func (t *TeamChange) GetPrivacy() *TeamPrivacy { - if t == nil { - return nil - } - return t.Privacy -} - -// GetRepository returns the Repository field. -func (t *TeamChange) GetRepository() *TeamRepository { - if t == nil { - return nil - } - return t.Repository -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (t *TeamDescription) GetFrom() string { - if t == nil || t.From == nil { - return "" - } - return *t.From -} - -// GetAuthor returns the Author field. -func (t *TeamDiscussion) GetAuthor() *User { - if t == nil { - return nil - } - return t.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetBody() string { - if t == nil || t.Body == nil { - return "" - } - return *t.Body -} - -// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetBodyHTML() string { - if t == nil || t.BodyHTML == nil { - return "" - } - return *t.BodyHTML -} - -// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetBodyVersion() string { - if t == nil || t.BodyVersion == nil { - return "" - } - return *t.BodyVersion -} - -// GetCommentsCount returns the CommentsCount field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetCommentsCount() int { - if t == nil || t.CommentsCount == nil { - return 0 - } - return *t.CommentsCount -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetCommentsURL() string { - if t == nil || t.CommentsURL == nil { - return "" - } - return *t.CommentsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetCreatedAt() Timestamp { - if t == nil || t.CreatedAt == nil { - return Timestamp{} - } - return *t.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetHTMLURL() string { - if t == nil || t.HTMLURL == nil { - return "" - } - return *t.HTMLURL -} - -// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetLastEditedAt() Timestamp { - if t == nil || t.LastEditedAt == nil { - return Timestamp{} - } - return *t.LastEditedAt -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetNodeID() string { - if t == nil || t.NodeID == nil { - return "" - } - return *t.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetNumber() int { - if t == nil || t.Number == nil { - return 0 - } - return *t.Number -} - -// GetPinned returns the Pinned field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetPinned() bool { - if t == nil || t.Pinned == nil { - return false - } - return *t.Pinned -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetPrivate() bool { - if t == nil || t.Private == nil { - return false - } - return *t.Private -} - -// GetReactions returns the Reactions field. -func (t *TeamDiscussion) GetReactions() *Reactions { - if t == nil { - return nil - } - return t.Reactions -} - -// GetTeamURL returns the TeamURL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetTeamURL() string { - if t == nil || t.TeamURL == nil { - return "" - } - return *t.TeamURL -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetTitle() string { - if t == nil || t.Title == nil { - return "" - } - return *t.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetUpdatedAt() Timestamp { - if t == nil || t.UpdatedAt == nil { - return Timestamp{} - } - return *t.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (t *TeamEvent) GetAction() string { - if t == nil || t.Action == nil { - return "" - } - return *t.Action -} - -// GetChanges returns the Changes field. -func (t *TeamEvent) GetChanges() *TeamChange { - if t == nil { - return nil - } - return t.Changes -} - -// GetInstallation returns the Installation field. -func (t *TeamEvent) GetInstallation() *Installation { - if t == nil { - return nil - } - return t.Installation -} - -// GetOrg returns the Org field. -func (t *TeamEvent) GetOrg() *Organization { - if t == nil { - return nil - } - return t.Org -} - -// GetRepo returns the Repo field. -func (t *TeamEvent) GetRepo() *Repository { - if t == nil { - return nil - } - return t.Repo -} - -// GetSender returns the Sender field. -func (t *TeamEvent) GetSender() *User { - if t == nil { - return nil - } - return t.Sender -} - -// GetTeam returns the Team field. -func (t *TeamEvent) GetTeam() *Team { - if t == nil { - return nil - } - return t.Team -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetLDAPDN() string { - if t == nil || t.LDAPDN == nil { - return "" - } - return *t.LDAPDN -} - -// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetMembersURL() string { - if t == nil || t.MembersURL == nil { - return "" - } - return *t.MembersURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetPermission() string { - if t == nil || t.Permission == nil { - return "" - } - return *t.Permission -} - -// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetPrivacy() string { - if t == nil || t.Privacy == nil { - return "" - } - return *t.Privacy -} - -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetRepositoriesURL() string { - if t == nil || t.RepositoriesURL == nil { - return "" - } - return *t.RepositoriesURL -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetSlug() string { - if t == nil || t.Slug == nil { - return "" - } - return *t.Slug -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (t *TeamName) GetFrom() string { - if t == nil || t.From == nil { - return "" - } - return *t.From -} - -// GetFrom returns the From field. -func (t *TeamPermissions) GetFrom() *TeamPermissionsFrom { - if t == nil { - return nil - } - return t.From -} - -// GetAdmin returns the Admin field if it's non-nil, zero value otherwise. -func (t *TeamPermissionsFrom) GetAdmin() bool { - if t == nil || t.Admin == nil { - return false - } - return *t.Admin -} - -// GetPull returns the Pull field if it's non-nil, zero value otherwise. -func (t *TeamPermissionsFrom) GetPull() bool { - if t == nil || t.Pull == nil { - return false - } - return *t.Pull -} - -// GetPush returns the Push field if it's non-nil, zero value otherwise. -func (t *TeamPermissionsFrom) GetPush() bool { - if t == nil || t.Push == nil { - return false - } - return *t.Push -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (t *TeamPrivacy) GetFrom() string { - if t == nil || t.From == nil { - return "" - } - return *t.From -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (t *TeamProjectOptions) GetPermission() string { - if t == nil || t.Permission == nil { - return "" - } - return *t.Permission -} - -// GetPermissions returns the Permissions field. -func (t *TeamRepository) GetPermissions() *TeamPermissions { - if t == nil { - return nil - } - return t.Permissions -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetIncludeAllBranches returns the IncludeAllBranches field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetIncludeAllBranches() bool { - if t == nil || t.IncludeAllBranches == nil { - return false - } - return *t.IncludeAllBranches -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetOwner returns the Owner field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetOwner() string { - if t == nil || t.Owner == nil { - return "" - } - return *t.Owner -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetPrivate() bool { - if t == nil || t.Private == nil { - return false - } - return *t.Private -} - -// GetFragment returns the Fragment field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetFragment() string { - if t == nil || t.Fragment == nil { - return "" - } - return *t.Fragment -} - -// GetObjectType returns the ObjectType field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetObjectType() string { - if t == nil || t.ObjectType == nil { - return "" - } - return *t.ObjectType -} - -// GetObjectURL returns the ObjectURL field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetObjectURL() string { - if t == nil || t.ObjectURL == nil { - return "" - } - return *t.ObjectURL -} - -// GetProperty returns the Property field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetProperty() string { - if t == nil || t.Property == nil { - return "" - } - return *t.Property -} - -// GetActor returns the Actor field. -func (t *Timeline) GetActor() *User { - if t == nil { - return nil - } - return t.Actor -} - -// GetAssignee returns the Assignee field. -func (t *Timeline) GetAssignee() *User { - if t == nil { - return nil - } - return t.Assignee -} - -// GetAssigner returns the Assigner field. -func (t *Timeline) GetAssigner() *User { - if t == nil { - return nil - } - return t.Assigner -} - -// GetAuthor returns the Author field. -func (t *Timeline) GetAuthor() *CommitAuthor { - if t == nil { - return nil - } - return t.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (t *Timeline) GetBody() string { - if t == nil || t.Body == nil { - return "" - } - return *t.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (t *Timeline) GetCommitID() string { - if t == nil || t.CommitID == nil { - return "" - } - return *t.CommitID -} - -// GetCommitter returns the Committer field. -func (t *Timeline) GetCommitter() *CommitAuthor { - if t == nil { - return nil - } - return t.Committer -} - -// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. -func (t *Timeline) GetCommitURL() string { - if t == nil || t.CommitURL == nil { - return "" - } - return *t.CommitURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (t *Timeline) GetCreatedAt() Timestamp { - if t == nil || t.CreatedAt == nil { - return Timestamp{} - } - return *t.CreatedAt -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (t *Timeline) GetEvent() string { - if t == nil || t.Event == nil { - return "" - } - return *t.Event -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *Timeline) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetLabel returns the Label field. -func (t *Timeline) GetLabel() *Label { - if t == nil { - return nil - } - return t.Label -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (t *Timeline) GetMessage() string { - if t == nil || t.Message == nil { - return "" - } - return *t.Message -} - -// GetMilestone returns the Milestone field. -func (t *Timeline) GetMilestone() *Milestone { - if t == nil { - return nil - } - return t.Milestone -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (t *Timeline) GetPerformedViaGithubApp() *App { - if t == nil { - return nil - } - return t.PerformedViaGithubApp -} - -// GetProjectCard returns the ProjectCard field. -func (t *Timeline) GetProjectCard() *ProjectCard { - if t == nil { - return nil - } - return t.ProjectCard -} - -// GetRename returns the Rename field. -func (t *Timeline) GetRename() *Rename { - if t == nil { - return nil - } - return t.Rename -} - -// GetRequestedTeam returns the RequestedTeam field. -func (t *Timeline) GetRequestedTeam() *Team { - if t == nil { - return nil - } - return t.RequestedTeam -} - -// GetRequester returns the Requester field. -func (t *Timeline) GetRequester() *User { - if t == nil { - return nil - } - return t.Requester -} - -// GetReviewer returns the Reviewer field. -func (t *Timeline) GetReviewer() *User { - if t == nil { - return nil - } - return t.Reviewer -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *Timeline) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetSource returns the Source field. -func (t *Timeline) GetSource() *Source { - if t == nil { - return nil - } - return t.Source -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (t *Timeline) GetState() string { - if t == nil || t.State == nil { - return "" - } - return *t.State -} - -// GetSubmittedAt returns the SubmittedAt field if it's non-nil, zero value otherwise. -func (t *Timeline) GetSubmittedAt() Timestamp { - if t == nil || t.SubmittedAt == nil { - return Timestamp{} - } - return *t.SubmittedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *Timeline) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetUser returns the User field. -func (t *Timeline) GetUser() *User { - if t == nil { - return nil - } - return t.User -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (t *Tool) GetGUID() string { - if t == nil || t.GUID == nil { - return "" - } - return *t.GUID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *Tool) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (t *Tool) GetVersion() string { - if t == nil || t.Version == nil { - return "" - } - return *t.Version -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetCreatedAt() Timestamp { - if t == nil || t.CreatedAt == nil { - return Timestamp{} - } - return *t.CreatedAt -} - -// GetCreatedBy returns the CreatedBy field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetCreatedBy() string { - if t == nil || t.CreatedBy == nil { - return "" - } - return *t.CreatedBy -} - -// GetCurated returns the Curated field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetCurated() bool { - if t == nil || t.Curated == nil { - return false - } - return *t.Curated -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetDisplayName() string { - if t == nil || t.DisplayName == nil { - return "" - } - return *t.DisplayName -} - -// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetFeatured() bool { - if t == nil || t.Featured == nil { - return false - } - return *t.Featured -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetScore returns the Score field. -func (t *TopicResult) GetScore() *float64 { - if t == nil { - return nil - } - return t.Score -} - -// GetShortDescription returns the ShortDescription field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetShortDescription() string { - if t == nil || t.ShortDescription == nil { - return "" - } - return *t.ShortDescription -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetUpdatedAt() string { - if t == nil || t.UpdatedAt == nil { - return "" - } - return *t.UpdatedAt -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (t *TopicsSearchResult) GetIncompleteResults() bool { - if t == nil || t.IncompleteResults == nil { - return false - } - return *t.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (t *TopicsSearchResult) GetTotal() int { - if t == nil || t.Total == nil { - return 0 - } - return *t.Total -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficClones) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficClones) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficData) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. -func (t *TrafficData) GetTimestamp() Timestamp { - if t == nil || t.Timestamp == nil { - return Timestamp{} - } - return *t.Timestamp -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficData) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetPath() string { - if t == nil || t.Path == nil { - return "" - } - return *t.Path -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetTitle() string { - if t == nil || t.Title == nil { - return "" - } - return *t.Title -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficReferrer) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetReferrer returns the Referrer field if it's non-nil, zero value otherwise. -func (t *TrafficReferrer) GetReferrer() string { - if t == nil || t.Referrer == nil { - return "" - } - return *t.Referrer -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficReferrer) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficViews) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficViews) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetNewName returns the NewName field if it's non-nil, zero value otherwise. -func (t *TransferRequest) GetNewName() string { - if t == nil || t.NewName == nil { - return "" - } - return *t.NewName -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *Tree) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetTruncated returns the Truncated field if it's non-nil, zero value otherwise. -func (t *Tree) GetTruncated() bool { - if t == nil || t.Truncated == nil { - return false - } - return *t.Truncated -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetContent() string { - if t == nil || t.Content == nil { - return "" - } - return *t.Content -} - -// GetMode returns the Mode field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetMode() string { - if t == nil || t.Mode == nil { - return "" - } - return *t.Mode -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetPath() string { - if t == nil || t.Path == nil { - return "" - } - return *t.Path -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetSize() int { - if t == nil || t.Size == nil { - return 0 - } - return *t.Size -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetType() string { - if t == nil || t.Type == nil { - return "" - } - return *t.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (u *UpdateAttributeForSCIMUserOperations) GetPath() string { - if u == nil || u.Path == nil { - return "" - } - return *u.Path -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetCompletedAt() Timestamp { - if u == nil || u.CompletedAt == nil { - return Timestamp{} - } - return *u.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetConclusion() string { - if u == nil || u.Conclusion == nil { - return "" - } - return *u.Conclusion -} - -// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetDetailsURL() string { - if u == nil || u.DetailsURL == nil { - return "" - } - return *u.DetailsURL -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetExternalID() string { - if u == nil || u.ExternalID == nil { - return "" - } - return *u.ExternalID -} - -// GetOutput returns the Output field. -func (u *UpdateCheckRunOptions) GetOutput() *CheckRunOutput { - if u == nil { - return nil - } - return u.Output -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetStatus() string { - if u == nil || u.Status == nil { - return "" - } - return *u.Status -} - -// GetQuerySuite returns the QuerySuite field if it's non-nil, zero value otherwise. -func (u *UpdateDefaultSetupConfigurationOptions) GetQuerySuite() string { - if u == nil || u.QuerySuite == nil { - return "" - } - return *u.QuerySuite -} - -// GetRunID returns the RunID field if it's non-nil, zero value otherwise. -func (u *UpdateDefaultSetupConfigurationResponse) GetRunID() int64 { - if u == nil || u.RunID == nil { - return 0 - } - return *u.RunID -} - -// GetRunURL returns the RunURL field if it's non-nil, zero value otherwise. -func (u *UpdateDefaultSetupConfigurationResponse) GetRunURL() string { - if u == nil || u.RunURL == nil { - return "" - } - return *u.RunURL -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if u == nil || u.AllowsPublicRepositories == nil { - return false - } - return *u.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetName() string { - if u == nil || u.Name == nil { - return "" - } - return *u.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if u == nil || u.RestrictedToWorkflows == nil { - return false - } - return *u.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetVisibility() string { - if u == nil || u.Visibility == nil { - return "" - } - return *u.Visibility -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if u == nil || u.AllowsPublicRepositories == nil { - return false - } - return *u.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetName() string { - if u == nil || u.Name == nil { - return "" - } - return *u.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if u == nil || u.RestrictedToWorkflows == nil { - return false - } - return *u.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetVisibility() string { - if u == nil || u.Visibility == nil { - return "" - } - return *u.Visibility -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (u *User) GetAvatarURL() string { - if u == nil || u.AvatarURL == nil { - return "" - } - return *u.AvatarURL -} - -// GetBio returns the Bio field if it's non-nil, zero value otherwise. -func (u *User) GetBio() string { - if u == nil || u.Bio == nil { - return "" - } - return *u.Bio -} - -// GetBlog returns the Blog field if it's non-nil, zero value otherwise. -func (u *User) GetBlog() string { - if u == nil || u.Blog == nil { - return "" - } - return *u.Blog -} - -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (u *User) GetCollaborators() int { - if u == nil || u.Collaborators == nil { - return 0 - } - return *u.Collaborators -} - -// GetCompany returns the Company field if it's non-nil, zero value otherwise. -func (u *User) GetCompany() string { - if u == nil || u.Company == nil { - return "" - } - return *u.Company -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (u *User) GetCreatedAt() Timestamp { - if u == nil || u.CreatedAt == nil { - return Timestamp{} - } - return *u.CreatedAt -} - -// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. -func (u *User) GetDiskUsage() int { - if u == nil || u.DiskUsage == nil { - return 0 - } - return *u.DiskUsage -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (u *User) GetEmail() string { - if u == nil || u.Email == nil { - return "" - } - return *u.Email -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (u *User) GetEventsURL() string { - if u == nil || u.EventsURL == nil { - return "" - } - return *u.EventsURL -} - -// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. -func (u *User) GetFollowers() int { - if u == nil || u.Followers == nil { - return 0 - } - return *u.Followers -} - -// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. -func (u *User) GetFollowersURL() string { - if u == nil || u.FollowersURL == nil { - return "" - } - return *u.FollowersURL -} - -// GetFollowing returns the Following field if it's non-nil, zero value otherwise. -func (u *User) GetFollowing() int { - if u == nil || u.Following == nil { - return 0 - } - return *u.Following -} - -// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. -func (u *User) GetFollowingURL() string { - if u == nil || u.FollowingURL == nil { - return "" - } - return *u.FollowingURL -} - -// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. -func (u *User) GetGistsURL() string { - if u == nil || u.GistsURL == nil { - return "" - } - return *u.GistsURL -} - -// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. -func (u *User) GetGravatarID() string { - if u == nil || u.GravatarID == nil { - return "" - } - return *u.GravatarID -} - -// GetHireable returns the Hireable field if it's non-nil, zero value otherwise. -func (u *User) GetHireable() bool { - if u == nil || u.Hireable == nil { - return false - } - return *u.Hireable -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (u *User) GetHTMLURL() string { - if u == nil || u.HTMLURL == nil { - return "" - } - return *u.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *User) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetLdapDn returns the LdapDn field if it's non-nil, zero value otherwise. -func (u *User) GetLdapDn() string { - if u == nil || u.LdapDn == nil { - return "" - } - return *u.LdapDn -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (u *User) GetLocation() string { - if u == nil || u.Location == nil { - return "" - } - return *u.Location -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (u *User) GetLogin() string { - if u == nil || u.Login == nil { - return "" - } - return *u.Login -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (u *User) GetName() string { - if u == nil || u.Name == nil { - return "" - } - return *u.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (u *User) GetNodeID() string { - if u == nil || u.NodeID == nil { - return "" - } - return *u.NodeID -} - -// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. -func (u *User) GetOrganizationsURL() string { - if u == nil || u.OrganizationsURL == nil { - return "" - } - return *u.OrganizationsURL -} - -// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. -func (u *User) GetOwnedPrivateRepos() int64 { - if u == nil || u.OwnedPrivateRepos == nil { - return 0 - } - return *u.OwnedPrivateRepos -} - -// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. -func (u *User) GetPermissions() map[string]bool { - if u == nil || u.Permissions == nil { - return map[string]bool{} - } - return u.Permissions -} - -// GetPlan returns the Plan field. -func (u *User) GetPlan() *Plan { - if u == nil { - return nil - } - return u.Plan -} - -// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. -func (u *User) GetPrivateGists() int { - if u == nil || u.PrivateGists == nil { - return 0 - } - return *u.PrivateGists -} - -// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. -func (u *User) GetPublicGists() int { - if u == nil || u.PublicGists == nil { - return 0 - } - return *u.PublicGists -} - -// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. -func (u *User) GetPublicRepos() int { - if u == nil || u.PublicRepos == nil { - return 0 - } - return *u.PublicRepos -} - -// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. -func (u *User) GetReceivedEventsURL() string { - if u == nil || u.ReceivedEventsURL == nil { - return "" - } - return *u.ReceivedEventsURL -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (u *User) GetReposURL() string { - if u == nil || u.ReposURL == nil { - return "" - } - return *u.ReposURL -} - -// GetRoleName returns the RoleName field if it's non-nil, zero value otherwise. -func (u *User) GetRoleName() string { - if u == nil || u.RoleName == nil { - return "" - } - return *u.RoleName -} - -// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. -func (u *User) GetSiteAdmin() bool { - if u == nil || u.SiteAdmin == nil { - return false - } - return *u.SiteAdmin -} - -// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. -func (u *User) GetStarredURL() string { - if u == nil || u.StarredURL == nil { - return "" - } - return *u.StarredURL -} - -// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. -func (u *User) GetSubscriptionsURL() string { - if u == nil || u.SubscriptionsURL == nil { - return "" - } - return *u.SubscriptionsURL -} - -// GetSuspendedAt returns the SuspendedAt field if it's non-nil, zero value otherwise. -func (u *User) GetSuspendedAt() Timestamp { - if u == nil || u.SuspendedAt == nil { - return Timestamp{} - } - return *u.SuspendedAt -} - -// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. -func (u *User) GetTotalPrivateRepos() int64 { - if u == nil || u.TotalPrivateRepos == nil { - return 0 - } - return *u.TotalPrivateRepos -} - -// GetTwitterUsername returns the TwitterUsername field if it's non-nil, zero value otherwise. -func (u *User) GetTwitterUsername() string { - if u == nil || u.TwitterUsername == nil { - return "" - } - return *u.TwitterUsername -} - -// GetTwoFactorAuthentication returns the TwoFactorAuthentication field if it's non-nil, zero value otherwise. -func (u *User) GetTwoFactorAuthentication() bool { - if u == nil || u.TwoFactorAuthentication == nil { - return false - } - return *u.TwoFactorAuthentication -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (u *User) GetType() string { - if u == nil || u.Type == nil { - return "" - } - return *u.Type -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (u *User) GetUpdatedAt() Timestamp { - if u == nil || u.UpdatedAt == nil { - return Timestamp{} - } - return *u.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *User) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetApp returns the App field. -func (u *UserAuthorization) GetApp() *OAuthAPP { - if u == nil { - return nil - } - return u.App -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetCreatedAt() Timestamp { - if u == nil || u.CreatedAt == nil { - return Timestamp{} - } - return *u.CreatedAt -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetFingerprint() string { - if u == nil || u.Fingerprint == nil { - return "" - } - return *u.Fingerprint -} - -// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetHashedToken() string { - if u == nil || u.HashedToken == nil { - return "" - } - return *u.HashedToken -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetNote() string { - if u == nil || u.Note == nil { - return "" - } - return *u.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetNoteURL() string { - if u == nil || u.NoteURL == nil { - return "" - } - return *u.NoteURL -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetToken() string { - if u == nil || u.Token == nil { - return "" - } - return *u.Token -} - -// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetTokenLastEight() string { - if u == nil || u.TokenLastEight == nil { - return "" - } - return *u.TokenLastEight -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetUpdatedAt() Timestamp { - if u == nil || u.UpdatedAt == nil { - return Timestamp{} - } - return *u.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (u *UserContext) GetMessage() string { - if u == nil || u.Message == nil { - return "" - } - return *u.Message -} - -// GetOcticon returns the Octicon field if it's non-nil, zero value otherwise. -func (u *UserContext) GetOcticon() string { - if u == nil || u.Octicon == nil { - return "" - } - return *u.Octicon -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetEmail() string { - if u == nil || u.Email == nil { - return "" - } - return *u.Email -} - -// GetPrimary returns the Primary field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetPrimary() bool { - if u == nil || u.Primary == nil { - return false - } - return *u.Primary -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetVerified() bool { - if u == nil || u.Verified == nil { - return false - } - return *u.Verified -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetVisibility() string { - if u == nil || u.Visibility == nil { - return "" - } - return *u.Visibility -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (u *UserEvent) GetAction() string { - if u == nil || u.Action == nil { - return "" - } - return *u.Action -} - -// GetEnterprise returns the Enterprise field. -func (u *UserEvent) GetEnterprise() *Enterprise { - if u == nil { - return nil - } - return u.Enterprise -} - -// GetInstallation returns the Installation field. -func (u *UserEvent) GetInstallation() *Installation { - if u == nil { - return nil - } - return u.Installation -} - -// GetSender returns the Sender field. -func (u *UserEvent) GetSender() *User { - if u == nil { - return nil - } - return u.Sender -} - -// GetUser returns the User field. -func (u *UserEvent) GetUser() *User { - if u == nil { - return nil - } - return u.User -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetAvatarURL() string { - if u == nil || u.AvatarURL == nil { - return "" - } - return *u.AvatarURL -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetEventsURL() string { - if u == nil || u.EventsURL == nil { - return "" - } - return *u.EventsURL -} - -// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetFollowersURL() string { - if u == nil || u.FollowersURL == nil { - return "" - } - return *u.FollowersURL -} - -// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetFollowingURL() string { - if u == nil || u.FollowingURL == nil { - return "" - } - return *u.FollowingURL -} - -// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetGistsURL() string { - if u == nil || u.GistsURL == nil { - return "" - } - return *u.GistsURL -} - -// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetGravatarID() string { - if u == nil || u.GravatarID == nil { - return "" - } - return *u.GravatarID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetLDAPDN() string { - if u == nil || u.LDAPDN == nil { - return "" - } - return *u.LDAPDN -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetLogin() string { - if u == nil || u.Login == nil { - return "" - } - return *u.Login -} - -// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetOrganizationsURL() string { - if u == nil || u.OrganizationsURL == nil { - return "" - } - return *u.OrganizationsURL -} - -// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetReceivedEventsURL() string { - if u == nil || u.ReceivedEventsURL == nil { - return "" - } - return *u.ReceivedEventsURL -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetReposURL() string { - if u == nil || u.ReposURL == nil { - return "" - } - return *u.ReposURL -} - -// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetSiteAdmin() bool { - if u == nil || u.SiteAdmin == nil { - return false - } - return *u.SiteAdmin -} - -// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetStarredURL() string { - if u == nil || u.StarredURL == nil { - return "" - } - return *u.StarredURL -} - -// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetSubscriptionsURL() string { - if u == nil || u.SubscriptionsURL == nil { - return "" - } - return *u.SubscriptionsURL -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetType() string { - if u == nil || u.Type == nil { - return "" - } - return *u.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetCreatedAt() string { - if u == nil || u.CreatedAt == nil { - return "" - } - return *u.CreatedAt -} - -// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetExcludeAttachments() bool { - if u == nil || u.ExcludeAttachments == nil { - return false - } - return *u.ExcludeAttachments -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetGUID() string { - if u == nil || u.GUID == nil { - return "" - } - return *u.GUID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetLockRepositories() bool { - if u == nil || u.LockRepositories == nil { - return false - } - return *u.LockRepositories -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetState() string { - if u == nil || u.State == nil { - return "" - } - return *u.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetUpdatedAt() string { - if u == nil || u.UpdatedAt == nil { - return "" - } - return *u.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (u *UsersSearchResult) GetIncompleteResults() bool { - if u == nil || u.IncompleteResults == nil { - return false - } - return *u.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (u *UsersSearchResult) GetTotal() int { - if u == nil || u.Total == nil { - return 0 - } - return *u.Total -} - -// GetAdminUsers returns the AdminUsers field if it's non-nil, zero value otherwise. -func (u *UserStats) GetAdminUsers() int { - if u == nil || u.AdminUsers == nil { - return 0 - } - return *u.AdminUsers -} - -// GetSuspendedUsers returns the SuspendedUsers field if it's non-nil, zero value otherwise. -func (u *UserStats) GetSuspendedUsers() int { - if u == nil || u.SuspendedUsers == nil { - return 0 - } - return *u.SuspendedUsers -} - -// GetTotalUsers returns the TotalUsers field if it's non-nil, zero value otherwise. -func (u *UserStats) GetTotalUsers() int { - if u == nil || u.TotalUsers == nil { - return 0 - } - return *u.TotalUsers -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (u *UserSuspendOptions) GetReason() string { - if u == nil || u.Reason == nil { - return "" - } - return *u.Reason -} - -// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. -func (v *VulnerabilityPackage) GetEcosystem() string { - if v == nil || v.Ecosystem == nil { - return "" - } - return *v.Ecosystem -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (v *VulnerabilityPackage) GetName() string { - if v == nil || v.Name == nil { - return "" - } - return *v.Name -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (w *WatchEvent) GetAction() string { - if w == nil || w.Action == nil { - return "" - } - return *w.Action -} - -// GetInstallation returns the Installation field. -func (w *WatchEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WatchEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRepo returns the Repo field. -func (w *WatchEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WatchEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (w *WeeklyCommitActivity) GetTotal() int { - if w == nil || w.Total == nil { - return 0 - } - return *w.Total -} - -// GetWeek returns the Week field if it's non-nil, zero value otherwise. -func (w *WeeklyCommitActivity) GetWeek() Timestamp { - if w == nil || w.Week == nil { - return Timestamp{} - } - return *w.Week -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetAdditions() int { - if w == nil || w.Additions == nil { - return 0 - } - return *w.Additions -} - -// GetCommits returns the Commits field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetCommits() int { - if w == nil || w.Commits == nil { - return 0 - } - return *w.Commits -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetDeletions() int { - if w == nil || w.Deletions == nil { - return 0 - } - return *w.Deletions -} - -// GetWeek returns the Week field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetWeek() Timestamp { - if w == nil || w.Week == nil { - return Timestamp{} - } - return *w.Week -} - -// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. -func (w *Workflow) GetBadgeURL() string { - if w == nil || w.BadgeURL == nil { - return "" - } - return *w.BadgeURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (w *Workflow) GetCreatedAt() Timestamp { - if w == nil || w.CreatedAt == nil { - return Timestamp{} - } - return *w.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (w *Workflow) GetHTMLURL() string { - if w == nil || w.HTMLURL == nil { - return "" - } - return *w.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (w *Workflow) GetID() int64 { - if w == nil || w.ID == nil { - return 0 - } - return *w.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (w *Workflow) GetName() string { - if w == nil || w.Name == nil { - return "" - } - return *w.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (w *Workflow) GetNodeID() string { - if w == nil || w.NodeID == nil { - return "" - } - return *w.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (w *Workflow) GetPath() string { - if w == nil || w.Path == nil { - return "" - } - return *w.Path -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (w *Workflow) GetState() string { - if w == nil || w.State == nil { - return "" - } - return *w.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (w *Workflow) GetUpdatedAt() Timestamp { - if w == nil || w.UpdatedAt == nil { - return Timestamp{} - } - return *w.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (w *Workflow) GetURL() string { - if w == nil || w.URL == nil { - return "" - } - return *w.URL -} - -// GetTotalMS returns the TotalMS field if it's non-nil, zero value otherwise. -func (w *WorkflowBill) GetTotalMS() int64 { - if w == nil || w.TotalMS == nil { - return 0 - } - return *w.TotalMS -} - -// GetInstallation returns the Installation field. -func (w *WorkflowDispatchEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WorkflowDispatchEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (w *WorkflowDispatchEvent) GetRef() string { - if w == nil || w.Ref == nil { - return "" - } - return *w.Ref -} - -// GetRepo returns the Repo field. -func (w *WorkflowDispatchEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WorkflowDispatchEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetWorkflow returns the Workflow field if it's non-nil, zero value otherwise. -func (w *WorkflowDispatchEvent) GetWorkflow() string { - if w == nil || w.Workflow == nil { - return "" - } - return *w.Workflow -} - -// GetCheckRunURL returns the CheckRunURL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetCheckRunURL() string { - if w == nil || w.CheckRunURL == nil { - return "" - } - return *w.CheckRunURL -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetCompletedAt() Timestamp { - if w == nil || w.CompletedAt == nil { - return Timestamp{} - } - return *w.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetConclusion() string { - if w == nil || w.Conclusion == nil { - return "" - } - return *w.Conclusion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetCreatedAt() Timestamp { - if w == nil || w.CreatedAt == nil { - return Timestamp{} - } - return *w.CreatedAt -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetHeadBranch() string { - if w == nil || w.HeadBranch == nil { - return "" - } - return *w.HeadBranch -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetHeadSHA() string { - if w == nil || w.HeadSHA == nil { - return "" - } - return *w.HeadSHA -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetHTMLURL() string { - if w == nil || w.HTMLURL == nil { - return "" - } - return *w.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetID() int64 { - if w == nil || w.ID == nil { - return 0 - } - return *w.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetName() string { - if w == nil || w.Name == nil { - return "" - } - return *w.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetNodeID() string { - if w == nil || w.NodeID == nil { - return "" - } - return *w.NodeID -} - -// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunAttempt() int64 { - if w == nil || w.RunAttempt == nil { - return 0 - } - return *w.RunAttempt -} - -// GetRunID returns the RunID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunID() int64 { - if w == nil || w.RunID == nil { - return 0 - } - return *w.RunID -} - -// GetRunnerGroupID returns the RunnerGroupID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerGroupID() int64 { - if w == nil || w.RunnerGroupID == nil { - return 0 - } - return *w.RunnerGroupID -} - -// GetRunnerGroupName returns the RunnerGroupName field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerGroupName() string { - if w == nil || w.RunnerGroupName == nil { - return "" - } - return *w.RunnerGroupName -} - -// GetRunnerID returns the RunnerID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerID() int64 { - if w == nil || w.RunnerID == nil { - return 0 - } - return *w.RunnerID -} - -// GetRunnerName returns the RunnerName field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerName() string { - if w == nil || w.RunnerName == nil { - return "" - } - return *w.RunnerName -} - -// GetRunURL returns the RunURL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunURL() string { - if w == nil || w.RunURL == nil { - return "" - } - return *w.RunURL -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetStartedAt() Timestamp { - if w == nil || w.StartedAt == nil { - return Timestamp{} - } - return *w.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetStatus() string { - if w == nil || w.Status == nil { - return "" - } - return *w.Status -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetURL() string { - if w == nil || w.URL == nil { - return "" - } - return *w.URL -} - -// GetWorkflowName returns the WorkflowName field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetWorkflowName() string { - if w == nil || w.WorkflowName == nil { - return "" - } - return *w.WorkflowName -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (w *WorkflowJobEvent) GetAction() string { - if w == nil || w.Action == nil { - return "" - } - return *w.Action -} - -// GetInstallation returns the Installation field. -func (w *WorkflowJobEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WorkflowJobEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRepo returns the Repo field. -func (w *WorkflowJobEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WorkflowJobEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetWorkflowJob returns the WorkflowJob field. -func (w *WorkflowJobEvent) GetWorkflowJob() *WorkflowJob { - if w == nil { - return nil - } - return w.WorkflowJob -} - -// GetActor returns the Actor field. -func (w *WorkflowRun) GetActor() *User { - if w == nil { - return nil - } - return w.Actor -} - -// GetArtifactsURL returns the ArtifactsURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetArtifactsURL() string { - if w == nil || w.ArtifactsURL == nil { - return "" - } - return *w.ArtifactsURL -} - -// GetCancelURL returns the CancelURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCancelURL() string { - if w == nil || w.CancelURL == nil { - return "" - } - return *w.CancelURL -} - -// GetCheckSuiteID returns the CheckSuiteID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCheckSuiteID() int64 { - if w == nil || w.CheckSuiteID == nil { - return 0 - } - return *w.CheckSuiteID -} - -// GetCheckSuiteNodeID returns the CheckSuiteNodeID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCheckSuiteNodeID() string { - if w == nil || w.CheckSuiteNodeID == nil { - return "" - } - return *w.CheckSuiteNodeID -} - -// GetCheckSuiteURL returns the CheckSuiteURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCheckSuiteURL() string { - if w == nil || w.CheckSuiteURL == nil { - return "" - } - return *w.CheckSuiteURL -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetConclusion() string { - if w == nil || w.Conclusion == nil { - return "" - } - return *w.Conclusion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCreatedAt() Timestamp { - if w == nil || w.CreatedAt == nil { - return Timestamp{} - } - return *w.CreatedAt -} - -// GetDisplayTitle returns the DisplayTitle field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetDisplayTitle() string { - if w == nil || w.DisplayTitle == nil { - return "" - } - return *w.DisplayTitle -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetEvent() string { - if w == nil || w.Event == nil { - return "" - } - return *w.Event -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetHeadBranch() string { - if w == nil || w.HeadBranch == nil { - return "" - } - return *w.HeadBranch -} - -// GetHeadCommit returns the HeadCommit field. -func (w *WorkflowRun) GetHeadCommit() *HeadCommit { - if w == nil { - return nil - } - return w.HeadCommit -} - -// GetHeadRepository returns the HeadRepository field. -func (w *WorkflowRun) GetHeadRepository() *Repository { - if w == nil { - return nil - } - return w.HeadRepository -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetHeadSHA() string { - if w == nil || w.HeadSHA == nil { - return "" - } - return *w.HeadSHA -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetHTMLURL() string { - if w == nil || w.HTMLURL == nil { - return "" - } - return *w.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetID() int64 { - if w == nil || w.ID == nil { - return 0 - } - return *w.ID -} - -// GetJobsURL returns the JobsURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetJobsURL() string { - if w == nil || w.JobsURL == nil { - return "" - } - return *w.JobsURL -} - -// GetLogsURL returns the LogsURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetLogsURL() string { - if w == nil || w.LogsURL == nil { - return "" - } - return *w.LogsURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetName() string { - if w == nil || w.Name == nil { - return "" - } - return *w.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetNodeID() string { - if w == nil || w.NodeID == nil { - return "" - } - return *w.NodeID -} - -// GetPreviousAttemptURL returns the PreviousAttemptURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetPreviousAttemptURL() string { - if w == nil || w.PreviousAttemptURL == nil { - return "" - } - return *w.PreviousAttemptURL -} - -// GetRepository returns the Repository field. -func (w *WorkflowRun) GetRepository() *Repository { - if w == nil { - return nil - } - return w.Repository -} - -// GetRerunURL returns the RerunURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRerunURL() string { - if w == nil || w.RerunURL == nil { - return "" - } - return *w.RerunURL -} - -// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRunAttempt() int { - if w == nil || w.RunAttempt == nil { - return 0 - } - return *w.RunAttempt -} - -// GetRunNumber returns the RunNumber field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRunNumber() int { - if w == nil || w.RunNumber == nil { - return 0 - } - return *w.RunNumber -} - -// GetRunStartedAt returns the RunStartedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRunStartedAt() Timestamp { - if w == nil || w.RunStartedAt == nil { - return Timestamp{} - } - return *w.RunStartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetStatus() string { - if w == nil || w.Status == nil { - return "" - } - return *w.Status -} - -// GetTriggeringActor returns the TriggeringActor field. -func (w *WorkflowRun) GetTriggeringActor() *User { - if w == nil { - return nil - } - return w.TriggeringActor -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetUpdatedAt() Timestamp { - if w == nil || w.UpdatedAt == nil { - return Timestamp{} - } - return *w.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetURL() string { - if w == nil || w.URL == nil { - return "" - } - return *w.URL -} - -// GetWorkflowID returns the WorkflowID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetWorkflowID() int64 { - if w == nil || w.WorkflowID == nil { - return 0 - } - return *w.WorkflowID -} - -// GetWorkflowURL returns the WorkflowURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetWorkflowURL() string { - if w == nil || w.WorkflowURL == nil { - return "" - } - return *w.WorkflowURL -} - -// GetExcludePullRequests returns the ExcludePullRequests field if it's non-nil, zero value otherwise. -func (w *WorkflowRunAttemptOptions) GetExcludePullRequests() bool { - if w == nil || w.ExcludePullRequests == nil { - return false - } - return *w.ExcludePullRequests -} - -// GetJobs returns the Jobs field if it's non-nil, zero value otherwise. -func (w *WorkflowRunBill) GetJobs() int { - if w == nil || w.Jobs == nil { - return 0 - } - return *w.Jobs -} - -// GetTotalMS returns the TotalMS field if it's non-nil, zero value otherwise. -func (w *WorkflowRunBill) GetTotalMS() int64 { - if w == nil || w.TotalMS == nil { - return 0 - } - return *w.TotalMS -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (w *WorkflowRunEvent) GetAction() string { - if w == nil || w.Action == nil { - return "" - } - return *w.Action -} - -// GetInstallation returns the Installation field. -func (w *WorkflowRunEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WorkflowRunEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRepo returns the Repo field. -func (w *WorkflowRunEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WorkflowRunEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetWorkflow returns the Workflow field. -func (w *WorkflowRunEvent) GetWorkflow() *Workflow { - if w == nil { - return nil - } - return w.Workflow -} - -// GetWorkflowRun returns the WorkflowRun field. -func (w *WorkflowRunEvent) GetWorkflowRun() *WorkflowRun { - if w == nil { - return nil - } - return w.WorkflowRun -} - -// GetDurationMS returns the DurationMS field if it's non-nil, zero value otherwise. -func (w *WorkflowRunJobRun) GetDurationMS() int64 { - if w == nil || w.DurationMS == nil { - return 0 - } - return *w.DurationMS -} - -// GetJobID returns the JobID field if it's non-nil, zero value otherwise. -func (w *WorkflowRunJobRun) GetJobID() int { - if w == nil || w.JobID == nil { - return 0 - } - return *w.JobID -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (w *WorkflowRuns) GetTotalCount() int { - if w == nil || w.TotalCount == nil { - return 0 - } - return *w.TotalCount -} - -// GetBillable returns the Billable field. -func (w *WorkflowRunUsage) GetBillable() *WorkflowRunBillMap { - if w == nil { - return nil - } - return w.Billable -} - -// GetRunDurationMS returns the RunDurationMS field if it's non-nil, zero value otherwise. -func (w *WorkflowRunUsage) GetRunDurationMS() int64 { - if w == nil || w.RunDurationMS == nil { - return 0 - } - return *w.RunDurationMS -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (w *Workflows) GetTotalCount() int { - if w == nil || w.TotalCount == nil { - return 0 - } - return *w.TotalCount -} - -// GetBillable returns the Billable field. -func (w *WorkflowUsage) GetBillable() *WorkflowBillMap { - if w == nil { - return nil - } - return w.Billable -} diff --git a/vendor/github.com/google/go-github/v57/github/github.go b/vendor/github.com/google/go-github/v57/github/github.go deleted file mode 100644 index c248b256..00000000 --- a/vendor/github.com/google/go-github/v57/github/github.go +++ /dev/null @@ -1,1537 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:generate go run gen-accessors.go -//go:generate go run gen-stringify-test.go -//go:generate ../script/metadata.sh update-go - -package github - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "net/url" - "reflect" - "strconv" - "strings" - "sync" - "time" - - "github.com/google/go-querystring/query" -) - -const ( - Version = "v57.0.0" - - defaultAPIVersion = "2022-11-28" - defaultBaseURL = "https://api.github.com/" - defaultUserAgent = "go-github" + "/" + Version - uploadBaseURL = "https://uploads.github.com/" - - headerAPIVersion = "X-GitHub-Api-Version" - headerRateLimit = "X-RateLimit-Limit" - headerRateRemaining = "X-RateLimit-Remaining" - headerRateReset = "X-RateLimit-Reset" - headerOTP = "X-GitHub-OTP" - headerRetryAfter = "Retry-After" - - headerTokenExpiration = "GitHub-Authentication-Token-Expiration" - - mediaTypeV3 = "application/vnd.github.v3+json" - defaultMediaType = "application/octet-stream" - mediaTypeV3SHA = "application/vnd.github.v3.sha" - mediaTypeV3Diff = "application/vnd.github.v3.diff" - mediaTypeV3Patch = "application/vnd.github.v3.patch" - mediaTypeOrgPermissionRepo = "application/vnd.github.v3.repository+json" - mediaTypeIssueImportAPI = "application/vnd.github.golden-comet-preview+json" - - // Media Type values to access preview APIs - // These media types will be added to the API request as headers - // and used to enable particular features on GitHub API that are still in preview. - // After some time, specific media types will be promoted (to a "stable" state). - // From then on, the preview headers are not required anymore to activate the additional - // feature on GitHub.com's API. However, this API header might still be needed for users - // to run a GitHub Enterprise Server on-premise. - // It's not uncommon for GitHub Enterprise Server customers to run older versions which - // would probably rely on the preview headers for some time. - // While the header promotion is going out for GitHub.com, it may be some time before it - // even arrives in GitHub Enterprise Server. - // We keep those preview headers around to avoid breaking older GitHub Enterprise Server - // versions. Additionally, non-functional (preview) headers don't create any side effects - // on GitHub Cloud version. - // - // See https://github.com/google/go-github/pull/2125 for full context. - - // https://developer.github.com/changes/2014-12-09-new-attributes-for-stars-api/ - mediaTypeStarringPreview = "application/vnd.github.v3.star+json" - - // https://help.github.com/enterprise/2.4/admin/guides/migrations/exporting-the-github-com-organization-s-repositories/ - mediaTypeMigrationsPreview = "application/vnd.github.wyandotte-preview+json" - - // https://developer.github.com/changes/2016-04-06-deployment-and-deployment-status-enhancements/ - mediaTypeDeploymentStatusPreview = "application/vnd.github.ant-man-preview+json" - - // https://developer.github.com/changes/2018-10-16-deployments-environments-states-and-auto-inactive-updates/ - mediaTypeExpandDeploymentStatusPreview = "application/vnd.github.flash-preview+json" - - // https://developer.github.com/changes/2016-05-12-reactions-api-preview/ - mediaTypeReactionsPreview = "application/vnd.github.squirrel-girl-preview" - - // https://developer.github.com/changes/2016-05-23-timeline-preview-api/ - mediaTypeTimelinePreview = "application/vnd.github.mockingbird-preview+json" - - // https://developer.github.com/changes/2016-09-14-projects-api/ - mediaTypeProjectsPreview = "application/vnd.github.inertia-preview+json" - - // https://developer.github.com/changes/2017-01-05-commit-search-api/ - mediaTypeCommitSearchPreview = "application/vnd.github.cloak-preview+json" - - // https://developer.github.com/changes/2017-02-28-user-blocking-apis-and-webhook/ - mediaTypeBlockUsersPreview = "application/vnd.github.giant-sentry-fist-preview+json" - - // https://developer.github.com/changes/2017-05-23-coc-api/ - mediaTypeCodesOfConductPreview = "application/vnd.github.scarlet-witch-preview+json" - - // https://developer.github.com/changes/2017-07-17-update-topics-on-repositories/ - mediaTypeTopicsPreview = "application/vnd.github.mercy-preview+json" - - // https://developer.github.com/changes/2018-03-16-protected-branches-required-approving-reviews/ - mediaTypeRequiredApprovingReviewsPreview = "application/vnd.github.luke-cage-preview+json" - - // https://developer.github.com/changes/2018-05-07-new-checks-api-public-beta/ - mediaTypeCheckRunsPreview = "application/vnd.github.antiope-preview+json" - - // https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/ - mediaTypePreReceiveHooksPreview = "application/vnd.github.eye-scream-preview" - - // https://developer.github.com/changes/2018-02-22-protected-branches-required-signatures/ - mediaTypeSignaturePreview = "application/vnd.github.zzzax-preview+json" - - // https://developer.github.com/changes/2018-09-05-project-card-events/ - mediaTypeProjectCardDetailsPreview = "application/vnd.github.starfox-preview+json" - - // https://developer.github.com/changes/2018-12-18-interactions-preview/ - mediaTypeInteractionRestrictionsPreview = "application/vnd.github.sombra-preview+json" - - // https://developer.github.com/changes/2019-03-14-enabling-disabling-pages/ - mediaTypeEnablePagesAPIPreview = "application/vnd.github.switcheroo-preview+json" - - // https://developer.github.com/changes/2019-04-24-vulnerability-alerts/ - mediaTypeRequiredVulnerabilityAlertsPreview = "application/vnd.github.dorian-preview+json" - - // https://developer.github.com/changes/2019-05-29-update-branch-api/ - mediaTypeUpdatePullRequestBranchPreview = "application/vnd.github.lydian-preview+json" - - // https://developer.github.com/changes/2019-04-11-pulls-branches-for-commit/ - mediaTypeListPullsOrBranchesForCommitPreview = "application/vnd.github.groot-preview+json" - - // https://docs.github.com/rest/previews/#repository-creation-permissions - mediaTypeMemberAllowedRepoCreationTypePreview = "application/vnd.github.surtur-preview+json" - - // https://docs.github.com/rest/previews/#create-and-use-repository-templates - mediaTypeRepositoryTemplatePreview = "application/vnd.github.baptiste-preview+json" - - // https://developer.github.com/changes/2019-10-03-multi-line-comments/ - mediaTypeMultiLineCommentsPreview = "application/vnd.github.comfort-fade-preview+json" - - // https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api/ - mediaTypeOAuthAppPreview = "application/vnd.github.doctor-strange-preview+json" - - // https://developer.github.com/changes/2019-12-03-internal-visibility-changes/ - mediaTypeRepositoryVisibilityPreview = "application/vnd.github.nebula-preview+json" - - // https://developer.github.com/changes/2018-12-10-content-attachments-api/ - mediaTypeContentAttachmentsPreview = "application/vnd.github.corsair-preview+json" -) - -var errNonNilContext = errors.New("context must be non-nil") - -// A Client manages communication with the GitHub API. -type Client struct { - clientMu sync.Mutex // clientMu protects the client during calls that modify the CheckRedirect func. - client *http.Client // HTTP client used to communicate with the API. - - // Base URL for API requests. Defaults to the public GitHub API, but can be - // set to a domain endpoint to use with GitHub Enterprise. BaseURL should - // always be specified with a trailing slash. - BaseURL *url.URL - - // Base URL for uploading files. - UploadURL *url.URL - - // User agent used when communicating with the GitHub API. - UserAgent string - - rateMu sync.Mutex - rateLimits [categories]Rate // Rate limits for the client as determined by the most recent API calls. - secondaryRateLimitReset time.Time // Secondary rate limit reset for the client as determined by the most recent API calls. - - common service // Reuse a single struct instead of allocating one for each service on the heap. - - // Services used for talking to different parts of the GitHub API. - Actions *ActionsService - Activity *ActivityService - Admin *AdminService - Apps *AppsService - Authorizations *AuthorizationsService - Billing *BillingService - Checks *ChecksService - CodeScanning *CodeScanningService - CodesOfConduct *CodesOfConductService - Codespaces *CodespacesService - Dependabot *DependabotService - DependencyGraph *DependencyGraphService - Emojis *EmojisService - Enterprise *EnterpriseService - Gists *GistsService - Git *GitService - Gitignores *GitignoresService - Interactions *InteractionsService - IssueImport *IssueImportService - Issues *IssuesService - Licenses *LicensesService - Markdown *MarkdownService - Marketplace *MarketplaceService - Meta *MetaService - Migrations *MigrationService - Organizations *OrganizationsService - Projects *ProjectsService - PullRequests *PullRequestsService - RateLimit *RateLimitService - Reactions *ReactionsService - Repositories *RepositoriesService - SCIM *SCIMService - Search *SearchService - SecretScanning *SecretScanningService - SecurityAdvisories *SecurityAdvisoriesService - Teams *TeamsService - Users *UsersService -} - -type service struct { - client *Client -} - -// Client returns the http.Client used by this GitHub client. -func (c *Client) Client() *http.Client { - c.clientMu.Lock() - defer c.clientMu.Unlock() - clientCopy := *c.client - return &clientCopy -} - -// ListOptions specifies the optional parameters to various List methods that -// support offset pagination. -type ListOptions struct { - // For paginated result sets, page of results to retrieve. - Page int `url:"page,omitempty"` - - // For paginated result sets, the number of results to include per page. - PerPage int `url:"per_page,omitempty"` -} - -// ListCursorOptions specifies the optional parameters to various List methods that -// support cursor pagination. -type ListCursorOptions struct { - // For paginated result sets, page of results to retrieve. - Page string `url:"page,omitempty"` - - // For paginated result sets, the number of results to include per page. - PerPage int `url:"per_page,omitempty"` - - // For paginated result sets, the number of results per page (max 100), starting from the first matching result. - // This parameter must not be used in combination with last. - First int `url:"first,omitempty"` - - // For paginated result sets, the number of results per page (max 100), starting from the last matching result. - // This parameter must not be used in combination with first. - Last int `url:"last,omitempty"` - - // A cursor, as given in the Link header. If specified, the query only searches for events after this cursor. - After string `url:"after,omitempty"` - - // A cursor, as given in the Link header. If specified, the query only searches for events before this cursor. - Before string `url:"before,omitempty"` - - // A cursor, as given in the Link header. If specified, the query continues the search using this cursor. - Cursor string `url:"cursor,omitempty"` -} - -// UploadOptions specifies the parameters to methods that support uploads. -type UploadOptions struct { - Name string `url:"name,omitempty"` - Label string `url:"label,omitempty"` - MediaType string `url:"-"` -} - -// RawType represents type of raw format of a request instead of JSON. -type RawType uint8 - -const ( - // Diff format. - Diff RawType = 1 + iota - // Patch format. - Patch -) - -// RawOptions specifies parameters when user wants to get raw format of -// a response instead of JSON. -type RawOptions struct { - Type RawType -} - -// addOptions adds the parameters in opts as URL query parameters to s. opts -// must be a struct whose fields may contain "url" tags. -func addOptions(s string, opts interface{}) (string, error) { - v := reflect.ValueOf(opts) - if v.Kind() == reflect.Ptr && v.IsNil() { - return s, nil - } - - u, err := url.Parse(s) - if err != nil { - return s, err - } - - qs, err := query.Values(opts) - if err != nil { - return s, err - } - - u.RawQuery = qs.Encode() - return u.String(), nil -} - -// NewClient returns a new GitHub API client. If a nil httpClient is -// provided, a new http.Client will be used. To use API methods which require -// authentication, either use Client.WithAuthToken or provide NewClient with -// an http.Client that will perform the authentication for you (such as that -// provided by the golang.org/x/oauth2 library). -func NewClient(httpClient *http.Client) *Client { - c := &Client{client: httpClient} - c.initialize() - return c -} - -// WithAuthToken returns a copy of the client configured to use the provided token for the Authorization header. -func (c *Client) WithAuthToken(token string) *Client { - c2 := c.copy() - defer c2.initialize() - transport := c2.client.Transport - if transport == nil { - transport = http.DefaultTransport - } - c2.client.Transport = roundTripperFunc( - func(req *http.Request) (*http.Response, error) { - req = req.Clone(req.Context()) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token)) - return transport.RoundTrip(req) - }, - ) - return c2 -} - -// WithEnterpriseURLs returns a copy of the client configured to use the provided base and -// upload URLs. If the base URL does not have the suffix "/api/v3/", it will be added -// automatically. If the upload URL does not have the suffix "/api/uploads", it will be -// added automatically. -// -// Note that WithEnterpriseURLs is a convenience helper only; -// its behavior is equivalent to setting the BaseURL and UploadURL fields. -// -// Another important thing is that by default, the GitHub Enterprise URL format -// should be http(s)://[hostname]/api/v3/ or you will always receive the 406 status code. -// The upload URL format should be http(s)://[hostname]/api/uploads/. -func (c *Client) WithEnterpriseURLs(baseURL, uploadURL string) (*Client, error) { - c2 := c.copy() - defer c2.initialize() - var err error - c2.BaseURL, err = url.Parse(baseURL) - if err != nil { - return nil, err - } - - if !strings.HasSuffix(c2.BaseURL.Path, "/") { - c2.BaseURL.Path += "/" - } - if !strings.HasSuffix(c2.BaseURL.Path, "/api/v3/") && - !strings.HasPrefix(c2.BaseURL.Host, "api.") && - !strings.Contains(c2.BaseURL.Host, ".api.") { - c2.BaseURL.Path += "api/v3/" - } - - c2.UploadURL, err = url.Parse(uploadURL) - if err != nil { - return nil, err - } - - if !strings.HasSuffix(c2.UploadURL.Path, "/") { - c2.UploadURL.Path += "/" - } - if !strings.HasSuffix(c2.UploadURL.Path, "/api/uploads/") && - !strings.HasPrefix(c2.UploadURL.Host, "api.") && - !strings.Contains(c2.UploadURL.Host, ".api.") { - c2.UploadURL.Path += "api/uploads/" - } - return c2, nil -} - -// initialize sets default values and initializes services. -func (c *Client) initialize() { - if c.client == nil { - c.client = &http.Client{} - } - if c.BaseURL == nil { - c.BaseURL, _ = url.Parse(defaultBaseURL) - } - if c.UploadURL == nil { - c.UploadURL, _ = url.Parse(uploadBaseURL) - } - if c.UserAgent == "" { - c.UserAgent = defaultUserAgent - } - c.common.client = c - c.Actions = (*ActionsService)(&c.common) - c.Activity = (*ActivityService)(&c.common) - c.Admin = (*AdminService)(&c.common) - c.Apps = (*AppsService)(&c.common) - c.Authorizations = (*AuthorizationsService)(&c.common) - c.Billing = (*BillingService)(&c.common) - c.Checks = (*ChecksService)(&c.common) - c.CodeScanning = (*CodeScanningService)(&c.common) - c.Codespaces = (*CodespacesService)(&c.common) - c.CodesOfConduct = (*CodesOfConductService)(&c.common) - c.Dependabot = (*DependabotService)(&c.common) - c.DependencyGraph = (*DependencyGraphService)(&c.common) - c.Emojis = (*EmojisService)(&c.common) - c.Enterprise = (*EnterpriseService)(&c.common) - c.Gists = (*GistsService)(&c.common) - c.Git = (*GitService)(&c.common) - c.Gitignores = (*GitignoresService)(&c.common) - c.Interactions = (*InteractionsService)(&c.common) - c.IssueImport = (*IssueImportService)(&c.common) - c.Issues = (*IssuesService)(&c.common) - c.Licenses = (*LicensesService)(&c.common) - c.Markdown = (*MarkdownService)(&c.common) - c.Marketplace = &MarketplaceService{client: c} - c.Meta = (*MetaService)(&c.common) - c.Migrations = (*MigrationService)(&c.common) - c.Organizations = (*OrganizationsService)(&c.common) - c.Projects = (*ProjectsService)(&c.common) - c.PullRequests = (*PullRequestsService)(&c.common) - c.RateLimit = (*RateLimitService)(&c.common) - c.Reactions = (*ReactionsService)(&c.common) - c.Repositories = (*RepositoriesService)(&c.common) - c.SCIM = (*SCIMService)(&c.common) - c.Search = (*SearchService)(&c.common) - c.SecretScanning = (*SecretScanningService)(&c.common) - c.SecurityAdvisories = (*SecurityAdvisoriesService)(&c.common) - c.Teams = (*TeamsService)(&c.common) - c.Users = (*UsersService)(&c.common) -} - -// copy returns a copy of the current client. It must be initialized before use. -func (c *Client) copy() *Client { - c.clientMu.Lock() - // can't use *c here because that would copy mutexes by value. - clone := Client{ - client: c.client, - UserAgent: c.UserAgent, - BaseURL: c.BaseURL, - UploadURL: c.UploadURL, - secondaryRateLimitReset: c.secondaryRateLimitReset, - } - c.clientMu.Unlock() - if clone.client == nil { - clone.client = &http.Client{} - } - c.rateMu.Lock() - copy(clone.rateLimits[:], c.rateLimits[:]) - c.rateMu.Unlock() - return &clone -} - -// NewClientWithEnvProxy enhances NewClient with the HttpProxy env. -func NewClientWithEnvProxy() *Client { - return NewClient(&http.Client{Transport: &http.Transport{Proxy: http.ProxyFromEnvironment}}) -} - -// NewTokenClient returns a new GitHub API client authenticated with the provided token. -// Deprecated: Use NewClient(nil).WithAuthToken(token) instead. -func NewTokenClient(_ context.Context, token string) *Client { - // This always returns a nil error. - return NewClient(nil).WithAuthToken(token) -} - -// NewEnterpriseClient returns a new GitHub API client with provided -// base URL and upload URL (often is your GitHub Enterprise hostname). -// -// Deprecated: Use NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) instead. -func NewEnterpriseClient(baseURL, uploadURL string, httpClient *http.Client) (*Client, error) { - return NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) -} - -// RequestOption represents an option that can modify an http.Request. -type RequestOption func(req *http.Request) - -// WithVersion overrides the GitHub v3 API version for this individual request. -// For more information, see: -// https://github.blog/2022-11-28-to-infinity-and-beyond-enabling-the-future-of-githubs-rest-api-with-api-versioning/ -func WithVersion(version string) RequestOption { - return func(req *http.Request) { - req.Header.Set(headerAPIVersion, version) - } -} - -// NewRequest creates an API request. A relative URL can be provided in urlStr, -// in which case it is resolved relative to the BaseURL of the Client. -// Relative URLs should always be specified without a preceding slash. If -// specified, the value pointed to by body is JSON encoded and included as the -// request body. -func (c *Client) NewRequest(method, urlStr string, body interface{}, opts ...RequestOption) (*http.Request, error) { - if !strings.HasSuffix(c.BaseURL.Path, "/") { - return nil, fmt.Errorf("BaseURL must have a trailing slash, but %q does not", c.BaseURL) - } - - u, err := c.BaseURL.Parse(urlStr) - if err != nil { - return nil, err - } - - var buf io.ReadWriter - if body != nil { - buf = &bytes.Buffer{} - enc := json.NewEncoder(buf) - enc.SetEscapeHTML(false) - err := enc.Encode(body) - if err != nil { - return nil, err - } - } - - req, err := http.NewRequest(method, u.String(), buf) - if err != nil { - return nil, err - } - - if body != nil { - req.Header.Set("Content-Type", "application/json") - } - req.Header.Set("Accept", mediaTypeV3) - if c.UserAgent != "" { - req.Header.Set("User-Agent", c.UserAgent) - } - req.Header.Set(headerAPIVersion, defaultAPIVersion) - - for _, opt := range opts { - opt(req) - } - - return req, nil -} - -// NewFormRequest creates an API request. A relative URL can be provided in urlStr, -// in which case it is resolved relative to the BaseURL of the Client. -// Relative URLs should always be specified without a preceding slash. -// Body is sent with Content-Type: application/x-www-form-urlencoded. -func (c *Client) NewFormRequest(urlStr string, body io.Reader, opts ...RequestOption) (*http.Request, error) { - if !strings.HasSuffix(c.BaseURL.Path, "/") { - return nil, fmt.Errorf("BaseURL must have a trailing slash, but %q does not", c.BaseURL) - } - - u, err := c.BaseURL.Parse(urlStr) - if err != nil { - return nil, err - } - - req, err := http.NewRequest(http.MethodPost, u.String(), body) - if err != nil { - return nil, err - } - - req.Header.Set("Content-Type", "application/x-www-form-urlencoded") - req.Header.Set("Accept", mediaTypeV3) - if c.UserAgent != "" { - req.Header.Set("User-Agent", c.UserAgent) - } - req.Header.Set(headerAPIVersion, defaultAPIVersion) - - for _, opt := range opts { - opt(req) - } - - return req, nil -} - -// NewUploadRequest creates an upload request. A relative URL can be provided in -// urlStr, in which case it is resolved relative to the UploadURL of the Client. -// Relative URLs should always be specified without a preceding slash. -func (c *Client) NewUploadRequest(urlStr string, reader io.Reader, size int64, mediaType string, opts ...RequestOption) (*http.Request, error) { - if !strings.HasSuffix(c.UploadURL.Path, "/") { - return nil, fmt.Errorf("UploadURL must have a trailing slash, but %q does not", c.UploadURL) - } - u, err := c.UploadURL.Parse(urlStr) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", u.String(), reader) - if err != nil { - return nil, err - } - - req.ContentLength = size - - if mediaType == "" { - mediaType = defaultMediaType - } - req.Header.Set("Content-Type", mediaType) - req.Header.Set("Accept", mediaTypeV3) - req.Header.Set("User-Agent", c.UserAgent) - req.Header.Set(headerAPIVersion, defaultAPIVersion) - - for _, opt := range opts { - opt(req) - } - - return req, nil -} - -// Response is a GitHub API response. This wraps the standard http.Response -// returned from GitHub and provides convenient access to things like -// pagination links. -type Response struct { - *http.Response - - // These fields provide the page values for paginating through a set of - // results. Any or all of these may be set to the zero value for - // responses that are not part of a paginated set, or for which there - // are no additional pages. - // - // These fields support what is called "offset pagination" and should - // be used with the ListOptions struct. - NextPage int - PrevPage int - FirstPage int - LastPage int - - // Additionally, some APIs support "cursor pagination" instead of offset. - // This means that a token points directly to the next record which - // can lead to O(1) performance compared to O(n) performance provided - // by offset pagination. - // - // For APIs that support cursor pagination (such as - // TeamsService.ListIDPGroupsInOrganization), the following field - // will be populated to point to the next page. - // - // To use this token, set ListCursorOptions.Page to this value before - // calling the endpoint again. - NextPageToken string - - // For APIs that support cursor pagination, such as RepositoriesService.ListHookDeliveries, - // the following field will be populated to point to the next page. - // Set ListCursorOptions.Cursor to this value when calling the endpoint again. - Cursor string - - // For APIs that support before/after pagination, such as OrganizationsService.AuditLog. - Before string - After string - - // Explicitly specify the Rate type so Rate's String() receiver doesn't - // propagate to Response. - Rate Rate - - // token's expiration date. Timestamp is 0001-01-01 when token doesn't expire. - // So it is valid for TokenExpiration.Equal(Timestamp{}) or TokenExpiration.Time.After(time.Now()) - TokenExpiration Timestamp -} - -// newResponse creates a new Response for the provided http.Response. -// r must not be nil. -func newResponse(r *http.Response) *Response { - response := &Response{Response: r} - response.populatePageValues() - response.Rate = parseRate(r) - response.TokenExpiration = parseTokenExpiration(r) - return response -} - -// populatePageValues parses the HTTP Link response headers and populates the -// various pagination link values in the Response. -func (r *Response) populatePageValues() { - if links, ok := r.Response.Header["Link"]; ok && len(links) > 0 { - for _, link := range strings.Split(links[0], ",") { - segments := strings.Split(strings.TrimSpace(link), ";") - - // link must at least have href and rel - if len(segments) < 2 { - continue - } - - // ensure href is properly formatted - if !strings.HasPrefix(segments[0], "<") || !strings.HasSuffix(segments[0], ">") { - continue - } - - // try to pull out page parameter - url, err := url.Parse(segments[0][1 : len(segments[0])-1]) - if err != nil { - continue - } - - q := url.Query() - - if cursor := q.Get("cursor"); cursor != "" { - for _, segment := range segments[1:] { - switch strings.TrimSpace(segment) { - case `rel="next"`: - r.Cursor = cursor - } - } - - continue - } - - page := q.Get("page") - since := q.Get("since") - before := q.Get("before") - after := q.Get("after") - - if page == "" && before == "" && after == "" && since == "" { - continue - } - - if since != "" && page == "" { - page = since - } - - for _, segment := range segments[1:] { - switch strings.TrimSpace(segment) { - case `rel="next"`: - if r.NextPage, err = strconv.Atoi(page); err != nil { - r.NextPageToken = page - } - r.After = after - case `rel="prev"`: - r.PrevPage, _ = strconv.Atoi(page) - r.Before = before - case `rel="first"`: - r.FirstPage, _ = strconv.Atoi(page) - case `rel="last"`: - r.LastPage, _ = strconv.Atoi(page) - } - } - } - } -} - -// parseRate parses the rate related headers. -func parseRate(r *http.Response) Rate { - var rate Rate - if limit := r.Header.Get(headerRateLimit); limit != "" { - rate.Limit, _ = strconv.Atoi(limit) - } - if remaining := r.Header.Get(headerRateRemaining); remaining != "" { - rate.Remaining, _ = strconv.Atoi(remaining) - } - if reset := r.Header.Get(headerRateReset); reset != "" { - if v, _ := strconv.ParseInt(reset, 10, 64); v != 0 { - rate.Reset = Timestamp{time.Unix(v, 0)} - } - } - return rate -} - -// parseSecondaryRate parses the secondary rate related headers, -// and returns the time to retry after. -func parseSecondaryRate(r *http.Response) *time.Duration { - // According to GitHub support, the "Retry-After" header value will be - // an integer which represents the number of seconds that one should - // wait before resuming making requests. - if v := r.Header.Get(headerRetryAfter); v != "" { - retryAfterSeconds, _ := strconv.ParseInt(v, 10, 64) // Error handling is noop. - retryAfter := time.Duration(retryAfterSeconds) * time.Second - return &retryAfter - } - - // According to GitHub support, endpoints might return x-ratelimit-reset instead, - // as an integer which represents the number of seconds since epoch UTC, - // represting the time to resume making requests. - if v := r.Header.Get(headerRateReset); v != "" { - secondsSinceEpoch, _ := strconv.ParseInt(v, 10, 64) // Error handling is noop. - retryAfter := time.Until(time.Unix(secondsSinceEpoch, 0)) - return &retryAfter - } - - return nil -} - -// parseTokenExpiration parses the TokenExpiration related headers. -// Returns 0001-01-01 if the header is not defined or could not be parsed. -func parseTokenExpiration(r *http.Response) Timestamp { - if v := r.Header.Get(headerTokenExpiration); v != "" { - if t, err := time.Parse("2006-01-02 15:04:05 MST", v); err == nil { - return Timestamp{t.Local()} - } - // Some tokens include the timezone offset instead of the timezone. - // https://github.com/google/go-github/issues/2649 - if t, err := time.Parse("2006-01-02 15:04:05 -0700", v); err == nil { - return Timestamp{t.Local()} - } - } - return Timestamp{} // 0001-01-01 00:00:00 -} - -type requestContext uint8 - -const ( - bypassRateLimitCheck requestContext = iota -) - -// BareDo sends an API request and lets you handle the api response. If an error -// or API Error occurs, the error will contain more information. Otherwise you -// are supposed to read and close the response's Body. If rate limit is exceeded -// and reset time is in the future, BareDo returns *RateLimitError immediately -// without making a network API call. -// -// The provided ctx must be non-nil, if it is nil an error is returned. If it is -// canceled or times out, ctx.Err() will be returned. -func (c *Client) BareDo(ctx context.Context, req *http.Request) (*Response, error) { - if ctx == nil { - return nil, errNonNilContext - } - - req = withContext(ctx, req) - - rateLimitCategory := category(req.Method, req.URL.Path) - - if bypass := ctx.Value(bypassRateLimitCheck); bypass == nil { - // If we've hit rate limit, don't make further requests before Reset time. - if err := c.checkRateLimitBeforeDo(req, rateLimitCategory); err != nil { - return &Response{ - Response: err.Response, - Rate: err.Rate, - }, err - } - // If we've hit a secondary rate limit, don't make further requests before Retry After. - if err := c.checkSecondaryRateLimitBeforeDo(req); err != nil { - return &Response{ - Response: err.Response, - }, err - } - } - - resp, err := c.client.Do(req) - if err != nil { - // If we got an error, and the context has been canceled, - // the context's error is probably more useful. - select { - case <-ctx.Done(): - return nil, ctx.Err() - default: - } - - // If the error type is *url.Error, sanitize its URL before returning. - if e, ok := err.(*url.Error); ok { - if url, err := url.Parse(e.URL); err == nil { - e.URL = sanitizeURL(url).String() - return nil, e - } - } - - return nil, err - } - - response := newResponse(resp) - - // Don't update the rate limits if this was a cached response. - // X-From-Cache is set by https://github.com/gregjones/httpcache - if response.Header.Get("X-From-Cache") == "" { - c.rateMu.Lock() - c.rateLimits[rateLimitCategory] = response.Rate - c.rateMu.Unlock() - } - - err = CheckResponse(resp) - if err != nil { - defer resp.Body.Close() - // Special case for AcceptedErrors. If an AcceptedError - // has been encountered, the response's payload will be - // added to the AcceptedError and returned. - // - // Issue #1022 - aerr, ok := err.(*AcceptedError) - if ok { - b, readErr := io.ReadAll(resp.Body) - if readErr != nil { - return response, readErr - } - - aerr.Raw = b - err = aerr - } - - // Update the secondary rate limit if we hit it. - rerr, ok := err.(*AbuseRateLimitError) - if ok && rerr.RetryAfter != nil { - c.rateMu.Lock() - c.secondaryRateLimitReset = time.Now().Add(*rerr.RetryAfter) - c.rateMu.Unlock() - } - } - return response, err -} - -// Do sends an API request and returns the API response. The API response is -// JSON decoded and stored in the value pointed to by v, or returned as an -// error if an API error has occurred. If v implements the io.Writer interface, -// the raw response body will be written to v, without attempting to first -// decode it. If v is nil, and no error hapens, the response is returned as is. -// If rate limit is exceeded and reset time is in the future, Do returns -// *RateLimitError immediately without making a network API call. -// -// The provided ctx must be non-nil, if it is nil an error is returned. If it -// is canceled or times out, ctx.Err() will be returned. -func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Response, error) { - resp, err := c.BareDo(ctx, req) - if err != nil { - return resp, err - } - defer resp.Body.Close() - - switch v := v.(type) { - case nil: - case io.Writer: - _, err = io.Copy(v, resp.Body) - default: - decErr := json.NewDecoder(resp.Body).Decode(v) - if decErr == io.EOF { - decErr = nil // ignore EOF errors caused by empty response body - } - if decErr != nil { - err = decErr - } - } - return resp, err -} - -// checkRateLimitBeforeDo does not make any network calls, but uses existing knowledge from -// current client state in order to quickly check if *RateLimitError can be immediately returned -// from Client.Do, and if so, returns it so that Client.Do can skip making a network API call unnecessarily. -// Otherwise it returns nil, and Client.Do should proceed normally. -func (c *Client) checkRateLimitBeforeDo(req *http.Request, rateLimitCategory rateLimitCategory) *RateLimitError { - c.rateMu.Lock() - rate := c.rateLimits[rateLimitCategory] - c.rateMu.Unlock() - if !rate.Reset.Time.IsZero() && rate.Remaining == 0 && time.Now().Before(rate.Reset.Time) { - // Create a fake response. - resp := &http.Response{ - Status: http.StatusText(http.StatusForbidden), - StatusCode: http.StatusForbidden, - Request: req, - Header: make(http.Header), - Body: io.NopCloser(strings.NewReader("")), - } - return &RateLimitError{ - Rate: rate, - Response: resp, - Message: fmt.Sprintf("API rate limit of %v still exceeded until %v, not making remote request.", rate.Limit, rate.Reset.Time), - } - } - - return nil -} - -// checkSecondaryRateLimitBeforeDo does not make any network calls, but uses existing knowledge from -// current client state in order to quickly check if *AbuseRateLimitError can be immediately returned -// from Client.Do, and if so, returns it so that Client.Do can skip making a network API call unnecessarily. -// Otherwise it returns nil, and Client.Do should proceed normally. -func (c *Client) checkSecondaryRateLimitBeforeDo(req *http.Request) *AbuseRateLimitError { - c.rateMu.Lock() - secondary := c.secondaryRateLimitReset - c.rateMu.Unlock() - if !secondary.IsZero() && time.Now().Before(secondary) { - // Create a fake response. - resp := &http.Response{ - Status: http.StatusText(http.StatusForbidden), - StatusCode: http.StatusForbidden, - Request: req, - Header: make(http.Header), - Body: io.NopCloser(strings.NewReader("")), - } - - retryAfter := time.Until(secondary) - return &AbuseRateLimitError{ - Response: resp, - Message: fmt.Sprintf("API secondary rate limit exceeded until %v, not making remote request.", secondary), - RetryAfter: &retryAfter, - } - } - - return nil -} - -// compareHTTPResponse returns whether two http.Response objects are equal or not. -// Currently, only StatusCode is checked. This function is used when implementing the -// Is(error) bool interface for the custom error types in this package. -func compareHTTPResponse(r1, r2 *http.Response) bool { - if r1 == nil && r2 == nil { - return true - } - - if r1 != nil && r2 != nil { - return r1.StatusCode == r2.StatusCode - } - return false -} - -/* -An ErrorResponse reports one or more errors caused by an API request. - -GitHub API docs: https://docs.github.com/rest/#client-errors -*/ -type ErrorResponse struct { - Response *http.Response `json:"-"` // HTTP response that caused this error - Message string `json:"message"` // error message - Errors []Error `json:"errors"` // more detail on individual errors - // Block is only populated on certain types of errors such as code 451. - Block *ErrorBlock `json:"block,omitempty"` - // Most errors will also include a documentation_url field pointing - // to some content that might help you resolve the error, see - // https://docs.github.com/rest/#client-errors - DocumentationURL string `json:"documentation_url,omitempty"` -} - -// ErrorBlock contains a further explanation for the reason of an error. -// See https://developer.github.com/changes/2016-03-17-the-451-status-code-is-now-supported/ -// for more information. -type ErrorBlock struct { - Reason string `json:"reason,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -func (r *ErrorResponse) Error() string { - if r.Response != nil && r.Response.Request != nil { - return fmt.Sprintf("%v %v: %d %v %+v", - r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), - r.Response.StatusCode, r.Message, r.Errors) - } - - if r.Response != nil { - return fmt.Sprintf("%d %v %+v", r.Response.StatusCode, r.Message, r.Errors) - } - - return fmt.Sprintf("%v %+v", r.Message, r.Errors) -} - -// Is returns whether the provided error equals this error. -func (r *ErrorResponse) Is(target error) bool { - v, ok := target.(*ErrorResponse) - if !ok { - return false - } - - if r.Message != v.Message || (r.DocumentationURL != v.DocumentationURL) || - !compareHTTPResponse(r.Response, v.Response) { - return false - } - - // Compare Errors. - if len(r.Errors) != len(v.Errors) { - return false - } - for idx := range r.Errors { - if r.Errors[idx] != v.Errors[idx] { - return false - } - } - - // Compare Block. - if (r.Block != nil && v.Block == nil) || (r.Block == nil && v.Block != nil) { - return false - } - if r.Block != nil && v.Block != nil { - if r.Block.Reason != v.Block.Reason { - return false - } - if (r.Block.CreatedAt != nil && v.Block.CreatedAt == nil) || (r.Block.CreatedAt == - nil && v.Block.CreatedAt != nil) { - return false - } - if r.Block.CreatedAt != nil && v.Block.CreatedAt != nil { - if *(r.Block.CreatedAt) != *(v.Block.CreatedAt) { - return false - } - } - } - - return true -} - -// TwoFactorAuthError occurs when using HTTP Basic Authentication for a user -// that has two-factor authentication enabled. The request can be reattempted -// by providing a one-time password in the request. -type TwoFactorAuthError ErrorResponse - -func (r *TwoFactorAuthError) Error() string { return (*ErrorResponse)(r).Error() } - -// RateLimitError occurs when GitHub returns 403 Forbidden response with a rate limit -// remaining value of 0. -type RateLimitError struct { - Rate Rate // Rate specifies last known rate limit for the client - Response *http.Response // HTTP response that caused this error - Message string `json:"message"` // error message -} - -func (r *RateLimitError) Error() string { - return fmt.Sprintf("%v %v: %d %v %v", - r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), - r.Response.StatusCode, r.Message, formatRateReset(time.Until(r.Rate.Reset.Time))) -} - -// Is returns whether the provided error equals this error. -func (r *RateLimitError) Is(target error) bool { - v, ok := target.(*RateLimitError) - if !ok { - return false - } - - return r.Rate == v.Rate && - r.Message == v.Message && - compareHTTPResponse(r.Response, v.Response) -} - -// AcceptedError occurs when GitHub returns 202 Accepted response with an -// empty body, which means a job was scheduled on the GitHub side to process -// the information needed and cache it. -// Technically, 202 Accepted is not a real error, it's just used to -// indicate that results are not ready yet, but should be available soon. -// The request can be repeated after some time. -type AcceptedError struct { - // Raw contains the response body. - Raw []byte -} - -func (*AcceptedError) Error() string { - return "job scheduled on GitHub side; try again later" -} - -// Is returns whether the provided error equals this error. -func (ae *AcceptedError) Is(target error) bool { - v, ok := target.(*AcceptedError) - if !ok { - return false - } - return bytes.Equal(ae.Raw, v.Raw) -} - -// AbuseRateLimitError occurs when GitHub returns 403 Forbidden response with the -// "documentation_url" field value equal to "https://docs.github.com/rest/overview/rate-limits-for-the-rest-api#about-secondary-rate-limits". -type AbuseRateLimitError struct { - Response *http.Response // HTTP response that caused this error - Message string `json:"message"` // error message - - // RetryAfter is provided with some abuse rate limit errors. If present, - // it is the amount of time that the client should wait before retrying. - // Otherwise, the client should try again later (after an unspecified amount of time). - RetryAfter *time.Duration -} - -func (r *AbuseRateLimitError) Error() string { - return fmt.Sprintf("%v %v: %d %v", - r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), - r.Response.StatusCode, r.Message) -} - -// Is returns whether the provided error equals this error. -func (r *AbuseRateLimitError) Is(target error) bool { - v, ok := target.(*AbuseRateLimitError) - if !ok { - return false - } - - return r.Message == v.Message && - r.RetryAfter == v.RetryAfter && - compareHTTPResponse(r.Response, v.Response) -} - -// sanitizeURL redacts the client_secret parameter from the URL which may be -// exposed to the user. -func sanitizeURL(uri *url.URL) *url.URL { - if uri == nil { - return nil - } - params := uri.Query() - if len(params.Get("client_secret")) > 0 { - params.Set("client_secret", "REDACTED") - uri.RawQuery = params.Encode() - } - return uri -} - -/* -An Error reports more details on an individual error in an ErrorResponse. -These are the possible validation error codes: - - missing: - resource does not exist - missing_field: - a required field on a resource has not been set - invalid: - the formatting of a field is invalid - already_exists: - another resource has the same valid as this field - custom: - some resources return this (e.g. github.User.CreateKey()), additional - information is set in the Message field of the Error - -GitHub error responses structure are often undocumented and inconsistent. -Sometimes error is just a simple string (Issue #540). -In such cases, Message represents an error message as a workaround. - -GitHub API docs: https://docs.github.com/rest/#client-errors -*/ -type Error struct { - Resource string `json:"resource"` // resource on which the error occurred - Field string `json:"field"` // field on which the error occurred - Code string `json:"code"` // validation error code - Message string `json:"message"` // Message describing the error. Errors with Code == "custom" will always have this set. -} - -func (e *Error) Error() string { - return fmt.Sprintf("%v error caused by %v field on %v resource", - e.Code, e.Field, e.Resource) -} - -func (e *Error) UnmarshalJSON(data []byte) error { - type aliasError Error // avoid infinite recursion by using type alias. - if err := json.Unmarshal(data, (*aliasError)(e)); err != nil { - return json.Unmarshal(data, &e.Message) // data can be json string. - } - return nil -} - -// CheckResponse checks the API response for errors, and returns them if -// present. A response is considered an error if it has a status code outside -// the 200 range or equal to 202 Accepted. -// API error responses are expected to have response -// body, and a JSON response body that maps to ErrorResponse. -// -// The error type will be *RateLimitError for rate limit exceeded errors, -// *AcceptedError for 202 Accepted status codes, -// and *TwoFactorAuthError for two-factor authentication errors. -func CheckResponse(r *http.Response) error { - if r.StatusCode == http.StatusAccepted { - return &AcceptedError{} - } - if c := r.StatusCode; 200 <= c && c <= 299 { - return nil - } - - errorResponse := &ErrorResponse{Response: r} - data, err := io.ReadAll(r.Body) - if err == nil && data != nil { - err = json.Unmarshal(data, errorResponse) - if err != nil { - // reset the response as if this never happened - errorResponse = &ErrorResponse{Response: r} - } - } - // Re-populate error response body because GitHub error responses are often - // undocumented and inconsistent. - // Issue #1136, #540. - r.Body = io.NopCloser(bytes.NewBuffer(data)) - switch { - case r.StatusCode == http.StatusUnauthorized && strings.HasPrefix(r.Header.Get(headerOTP), "required"): - return (*TwoFactorAuthError)(errorResponse) - case r.StatusCode == http.StatusForbidden && r.Header.Get(headerRateRemaining) == "0": - return &RateLimitError{ - Rate: parseRate(r), - Response: errorResponse.Response, - Message: errorResponse.Message, - } - case r.StatusCode == http.StatusForbidden && - (strings.HasSuffix(errorResponse.DocumentationURL, "#abuse-rate-limits") || - strings.HasSuffix(errorResponse.DocumentationURL, "secondary-rate-limits")): - abuseRateLimitError := &AbuseRateLimitError{ - Response: errorResponse.Response, - Message: errorResponse.Message, - } - if retryAfter := parseSecondaryRate(r); retryAfter != nil { - abuseRateLimitError.RetryAfter = retryAfter - } - return abuseRateLimitError - default: - return errorResponse - } -} - -// parseBoolResponse determines the boolean result from a GitHub API response. -// Several GitHub API methods return boolean responses indicated by the HTTP -// status code in the response (true indicated by a 204, false indicated by a -// 404). This helper function will determine that result and hide the 404 -// error if present. Any other error will be returned through as-is. -func parseBoolResponse(err error) (bool, error) { - if err == nil { - return true, nil - } - - if err, ok := err.(*ErrorResponse); ok && err.Response.StatusCode == http.StatusNotFound { - // Simply false. In this one case, we do not pass the error through. - return false, nil - } - - // some other real error occurred - return false, err -} - -type rateLimitCategory uint8 - -const ( - coreCategory rateLimitCategory = iota - searchCategory - graphqlCategory - integrationManifestCategory - sourceImportCategory - codeScanningUploadCategory - actionsRunnerRegistrationCategory - scimCategory - - categories // An array of this length will be able to contain all rate limit categories. -) - -// category returns the rate limit category of the endpoint, determined by HTTP method and Request.URL.Path. -func category(method, path string) rateLimitCategory { - switch { - // https://docs.github.com/rest/rate-limit#about-rate-limits - default: - // NOTE: coreCategory is returned for actionsRunnerRegistrationCategory too, - // because no API found for this category. - return coreCategory - case strings.HasPrefix(path, "/search/"): - return searchCategory - case path == "/graphql": - return graphqlCategory - case strings.HasPrefix(path, "/app-manifests/") && - strings.HasSuffix(path, "/conversions") && - method == http.MethodPost: - return integrationManifestCategory - - // https://docs.github.com/rest/migrations/source-imports#start-an-import - case strings.HasPrefix(path, "/repos/") && - strings.HasSuffix(path, "/import") && - method == http.MethodPut: - return sourceImportCategory - - // https://docs.github.com/rest/code-scanning#upload-an-analysis-as-sarif-data - case strings.HasSuffix(path, "/code-scanning/sarifs"): - return codeScanningUploadCategory - - // https://docs.github.com/enterprise-cloud@latest/rest/scim - case strings.HasPrefix(path, "/scim/"): - return scimCategory - } -} - -// RateLimits returns the rate limits for the current client. -// -// Deprecated: Use RateLimitService.Get instead. -func (c *Client) RateLimits(ctx context.Context) (*RateLimits, *Response, error) { - return c.RateLimit.Get(ctx) -} - -func setCredentialsAsHeaders(req *http.Request, id, secret string) *http.Request { - // To set extra headers, we must make a copy of the Request so - // that we don't modify the Request we were given. This is required by the - // specification of http.RoundTripper. - // - // Since we are going to modify only req.Header here, we only need a deep copy - // of req.Header. - convertedRequest := new(http.Request) - *convertedRequest = *req - convertedRequest.Header = make(http.Header, len(req.Header)) - - for k, s := range req.Header { - convertedRequest.Header[k] = append([]string(nil), s...) - } - convertedRequest.SetBasicAuth(id, secret) - return convertedRequest -} - -/* -UnauthenticatedRateLimitedTransport allows you to make unauthenticated calls -that need to use a higher rate limit associated with your OAuth application. - - t := &github.UnauthenticatedRateLimitedTransport{ - ClientID: "your app's client ID", - ClientSecret: "your app's client secret", - } - client := github.NewClient(t.Client()) - -This will add the client id and secret as a base64-encoded string in the format -ClientID:ClientSecret and apply it as an "Authorization": "Basic" header. - -See https://docs.github.com/rest/#unauthenticated-rate-limited-requests for -more information. -*/ -type UnauthenticatedRateLimitedTransport struct { - // ClientID is the GitHub OAuth client ID of the current application, which - // can be found by selecting its entry in the list at - // https://github.com/settings/applications. - ClientID string - - // ClientSecret is the GitHub OAuth client secret of the current - // application. - ClientSecret string - - // Transport is the underlying HTTP transport to use when making requests. - // It will default to http.DefaultTransport if nil. - Transport http.RoundTripper -} - -// RoundTrip implements the RoundTripper interface. -func (t *UnauthenticatedRateLimitedTransport) RoundTrip(req *http.Request) (*http.Response, error) { - if t.ClientID == "" { - return nil, errors.New("t.ClientID is empty") - } - if t.ClientSecret == "" { - return nil, errors.New("t.ClientSecret is empty") - } - - req2 := setCredentialsAsHeaders(req, t.ClientID, t.ClientSecret) - // Make the HTTP request. - return t.transport().RoundTrip(req2) -} - -// Client returns an *http.Client that makes requests which are subject to the -// rate limit of your OAuth application. -func (t *UnauthenticatedRateLimitedTransport) Client() *http.Client { - return &http.Client{Transport: t} -} - -func (t *UnauthenticatedRateLimitedTransport) transport() http.RoundTripper { - if t.Transport != nil { - return t.Transport - } - return http.DefaultTransport -} - -// BasicAuthTransport is an http.RoundTripper that authenticates all requests -// using HTTP Basic Authentication with the provided username and password. It -// additionally supports users who have two-factor authentication enabled on -// their GitHub account. -type BasicAuthTransport struct { - Username string // GitHub username - Password string // GitHub password - OTP string // one-time password for users with two-factor auth enabled - - // Transport is the underlying HTTP transport to use when making requests. - // It will default to http.DefaultTransport if nil. - Transport http.RoundTripper -} - -// RoundTrip implements the RoundTripper interface. -func (t *BasicAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { - req2 := setCredentialsAsHeaders(req, t.Username, t.Password) - if t.OTP != "" { - req2.Header.Set(headerOTP, t.OTP) - } - return t.transport().RoundTrip(req2) -} - -// Client returns an *http.Client that makes requests that are authenticated -// using HTTP Basic Authentication. -func (t *BasicAuthTransport) Client() *http.Client { - return &http.Client{Transport: t} -} - -func (t *BasicAuthTransport) transport() http.RoundTripper { - if t.Transport != nil { - return t.Transport - } - return http.DefaultTransport -} - -// formatRateReset formats d to look like "[rate reset in 2s]" or -// "[rate reset in 87m02s]" for the positive durations. And like "[rate limit was reset 87m02s ago]" -// for the negative cases. -func formatRateReset(d time.Duration) string { - isNegative := d < 0 - if isNegative { - d *= -1 - } - secondsTotal := int(0.5 + d.Seconds()) - minutes := secondsTotal / 60 - seconds := secondsTotal - minutes*60 - - var timeString string - if minutes > 0 { - timeString = fmt.Sprintf("%dm%02ds", minutes, seconds) - } else { - timeString = fmt.Sprintf("%ds", seconds) - } - - if isNegative { - return fmt.Sprintf("[rate limit was reset %v ago]", timeString) - } - return fmt.Sprintf("[rate reset in %v]", timeString) -} - -// When using roundTripWithOptionalFollowRedirect, note that it -// is the responsibility of the caller to close the response body. -func (c *Client) roundTripWithOptionalFollowRedirect(ctx context.Context, u string, maxRedirects int, opts ...RequestOption) (*http.Response, error) { - req, err := c.NewRequest("GET", u, nil, opts...) - if err != nil { - return nil, err - } - - var resp *http.Response - // Use http.DefaultTransport if no custom Transport is configured - req = withContext(ctx, req) - if c.client.Transport == nil { - resp, err = http.DefaultTransport.RoundTrip(req) - } else { - resp, err = c.client.Transport.RoundTrip(req) - } - if err != nil { - return nil, err - } - - // If redirect response is returned, follow it - if maxRedirects > 0 && resp.StatusCode == http.StatusMovedPermanently { - _ = resp.Body.Close() - u = resp.Header.Get("Location") - resp, err = c.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects-1, opts...) - } - return resp, err -} - -// Bool is a helper routine that allocates a new bool value -// to store v and returns a pointer to it. -func Bool(v bool) *bool { return &v } - -// Int is a helper routine that allocates a new int value -// to store v and returns a pointer to it. -func Int(v int) *int { return &v } - -// Int64 is a helper routine that allocates a new int64 value -// to store v and returns a pointer to it. -func Int64(v int64) *int64 { return &v } - -// String is a helper routine that allocates a new string value -// to store v and returns a pointer to it. -func String(v string) *string { return &v } - -// roundTripperFunc creates a RoundTripper (transport) -type roundTripperFunc func(*http.Request) (*http.Response, error) - -func (fn roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { - return fn(r) -} diff --git a/vendor/github.com/google/go-github/v57/github/gitignore.go b/vendor/github.com/google/go-github/v57/github/gitignore.go deleted file mode 100644 index 34cf285e..00000000 --- a/vendor/github.com/google/go-github/v57/github/gitignore.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GitignoresService provides access to the gitignore related functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/gitignore/ -type GitignoresService service - -// Gitignore represents a .gitignore file as returned by the GitHub API. -type Gitignore struct { - Name *string `json:"name,omitempty"` - Source *string `json:"source,omitempty"` -} - -func (g Gitignore) String() string { - return Stringify(g) -} - -// List all available Gitignore templates. -// -// GitHub API docs: https://docs.github.com/rest/gitignore/gitignore#get-all-gitignore-templates -// -//meta:operation GET /gitignore/templates -func (s *GitignoresService) List(ctx context.Context) ([]string, *Response, error) { - req, err := s.client.NewRequest("GET", "gitignore/templates", nil) - if err != nil { - return nil, nil, err - } - - var availableTemplates []string - resp, err := s.client.Do(ctx, req, &availableTemplates) - if err != nil { - return nil, resp, err - } - - return availableTemplates, resp, nil -} - -// Get a Gitignore by name. -// -// GitHub API docs: https://docs.github.com/rest/gitignore/gitignore#get-a-gitignore-template -// -//meta:operation GET /gitignore/templates/{name} -func (s *GitignoresService) Get(ctx context.Context, name string) (*Gitignore, *Response, error) { - u := fmt.Sprintf("gitignore/templates/%v", name) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - gitignore := new(Gitignore) - resp, err := s.client.Do(ctx, req, gitignore) - if err != nil { - return nil, resp, err - } - - return gitignore, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/interactions.go b/vendor/github.com/google/go-github/v57/github/interactions.go deleted file mode 100644 index 2268273d..00000000 --- a/vendor/github.com/google/go-github/v57/github/interactions.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// InteractionsService handles communication with the repository and organization related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/interactions/ -type InteractionsService service - -// InteractionRestriction represents the interaction restrictions for repository and organization. -type InteractionRestriction struct { - // Specifies the group of GitHub users who can - // comment, open issues, or create pull requests for the given repository. - // Possible values are: "existing_users", "contributors_only" and "collaborators_only". - Limit *string `json:"limit,omitempty"` - - // Origin specifies the type of the resource to interact with. - // Possible values are: "repository" and "organization". - Origin *string `json:"origin,omitempty"` - - // ExpiresAt specifies the time after which the interaction restrictions expire. - // The default expiry time is 24 hours from the time restriction is created. - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} diff --git a/vendor/github.com/google/go-github/v57/github/interactions_orgs.go b/vendor/github.com/google/go-github/v57/github/interactions_orgs.go deleted file mode 100644 index f0ba0b15..00000000 --- a/vendor/github.com/google/go-github/v57/github/interactions_orgs.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetRestrictionsForOrg fetches the interaction restrictions for an organization. -// -// GitHub API docs: https://docs.github.com/rest/interactions/orgs#get-interaction-restrictions-for-an-organization -// -//meta:operation GET /orgs/{org}/interaction-limits -func (s *InteractionsService) GetRestrictionsForOrg(ctx context.Context, organization string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("orgs/%v/interaction-limits", organization) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - organizationInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, organizationInteractions) - if err != nil { - return nil, resp, err - } - - return organizationInteractions, resp, nil -} - -// UpdateRestrictionsForOrg adds or updates the interaction restrictions for an organization. -// -// limit specifies the group of GitHub users who can comment, open issues, or create pull requests -// in public repositories for the given organization. -// Possible values are: "existing_users", "contributors_only", "collaborators_only". -// -// GitHub API docs: https://docs.github.com/rest/interactions/orgs#set-interaction-restrictions-for-an-organization -// -//meta:operation PUT /orgs/{org}/interaction-limits -func (s *InteractionsService) UpdateRestrictionsForOrg(ctx context.Context, organization, limit string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("orgs/%v/interaction-limits", organization) - - interaction := &InteractionRestriction{Limit: String(limit)} - - req, err := s.client.NewRequest("PUT", u, interaction) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - organizationInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, organizationInteractions) - if err != nil { - return nil, resp, err - } - - return organizationInteractions, resp, nil -} - -// RemoveRestrictionsFromOrg removes the interaction restrictions for an organization. -// -// GitHub API docs: https://docs.github.com/rest/interactions/orgs#remove-interaction-restrictions-for-an-organization -// -//meta:operation DELETE /orgs/{org}/interaction-limits -func (s *InteractionsService) RemoveRestrictionsFromOrg(ctx context.Context, organization string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/interaction-limits", organization) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/interactions_repos.go b/vendor/github.com/google/go-github/v57/github/interactions_repos.go deleted file mode 100644 index 9c044bad..00000000 --- a/vendor/github.com/google/go-github/v57/github/interactions_repos.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetRestrictionsForRepo fetches the interaction restrictions for a repository. -// -// GitHub API docs: https://docs.github.com/rest/interactions/repos#get-interaction-restrictions-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/interaction-limits -func (s *InteractionsService) GetRestrictionsForRepo(ctx context.Context, owner, repo string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - repositoryInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, repositoryInteractions) - if err != nil { - return nil, resp, err - } - - return repositoryInteractions, resp, nil -} - -// UpdateRestrictionsForRepo adds or updates the interaction restrictions for a repository. -// -// limit specifies the group of GitHub users who can comment, open issues, or create pull requests -// for the given repository. -// Possible values are: "existing_users", "contributors_only", "collaborators_only". -// -// GitHub API docs: https://docs.github.com/rest/interactions/repos#set-interaction-restrictions-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/interaction-limits -func (s *InteractionsService) UpdateRestrictionsForRepo(ctx context.Context, owner, repo, limit string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) - - interaction := &InteractionRestriction{Limit: String(limit)} - - req, err := s.client.NewRequest("PUT", u, interaction) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - repositoryInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, repositoryInteractions) - if err != nil { - return nil, resp, err - } - - return repositoryInteractions, resp, nil -} - -// RemoveRestrictionsFromRepo removes the interaction restrictions for a repository. -// -// GitHub API docs: https://docs.github.com/rest/interactions/repos#remove-interaction-restrictions-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/interaction-limits -func (s *InteractionsService) RemoveRestrictionsFromRepo(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/issue_import.go b/vendor/github.com/google/go-github/v57/github/issue_import.go deleted file mode 100644 index 4f063710..00000000 --- a/vendor/github.com/google/go-github/v57/github/issue_import.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "encoding/json" - "fmt" -) - -// IssueImportService handles communication with the issue import related -// methods of the Issue Import GitHub API. -type IssueImportService service - -// IssueImportRequest represents a request to create an issue. -// -// https://gist.github.com/jonmagic/5282384165e0f86ef105#supported-issue-and-comment-fields -type IssueImportRequest struct { - IssueImport IssueImport `json:"issue"` - Comments []*Comment `json:"comments,omitempty"` -} - -// IssueImport represents body of issue to import. -type IssueImport struct { - Title string `json:"title"` - Body string `json:"body"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Assignee *string `json:"assignee,omitempty"` - Milestone *int `json:"milestone,omitempty"` - Closed *bool `json:"closed,omitempty"` - Labels []string `json:"labels,omitempty"` -} - -// Comment represents comments of issue to import. -type Comment struct { - CreatedAt *Timestamp `json:"created_at,omitempty"` - Body string `json:"body"` -} - -// IssueImportResponse represents the response of an issue import create request. -// -// https://gist.github.com/jonmagic/5282384165e0f86ef105#import-issue-response -type IssueImportResponse struct { - ID *int `json:"id,omitempty"` - Status *string `json:"status,omitempty"` - URL *string `json:"url,omitempty"` - ImportIssuesURL *string `json:"import_issues_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Message *string `json:"message,omitempty"` - DocumentationURL *string `json:"documentation_url,omitempty"` - Errors []*IssueImportError `json:"errors,omitempty"` -} - -// IssueImportError represents errors of an issue import create request. -type IssueImportError struct { - Location *string `json:"location,omitempty"` - Resource *string `json:"resource,omitempty"` - Field *string `json:"field,omitempty"` - Value *string `json:"value,omitempty"` - Code *string `json:"code,omitempty"` -} - -// Create a new imported issue on the specified repository. -// -// GitHub API docs: https://gist.github.com/jonmagic/5282384165e0f86ef105#start-an-issue-import -// -//meta:operation POST /repos/{owner}/{repo}/import/issues -func (s *IssueImportService) Create(ctx context.Context, owner, repo string, issue *IssueImportRequest) (*IssueImportResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/issues", owner, repo) - req, err := s.client.NewRequest("POST", u, issue) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeIssueImportAPI) - - i := new(IssueImportResponse) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - aerr, ok := err.(*AcceptedError) - if ok { - if err := json.Unmarshal(aerr.Raw, i); err != nil { - return i, resp, err - } - return i, resp, err - } - return nil, resp, err - } - - return i, resp, nil -} - -// CheckStatus checks the status of an imported issue. -// -// GitHub API docs: https://gist.github.com/jonmagic/5282384165e0f86ef105#import-status-request -// -//meta:operation GET /repos/{owner}/{repo}/import/issues/{issue_number} -func (s *IssueImportService) CheckStatus(ctx context.Context, owner, repo string, issueID int64) (*IssueImportResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/issues/%v", owner, repo, issueID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeIssueImportAPI) - - i := new(IssueImportResponse) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// CheckStatusSince checks the status of multiple imported issues since a given date. -// -// GitHub API docs: https://gist.github.com/jonmagic/5282384165e0f86ef105#check-status-of-multiple-issues -// -//meta:operation GET /repos/{owner}/{repo}/import/issues -func (s *IssueImportService) CheckStatusSince(ctx context.Context, owner, repo string, since Timestamp) ([]*IssueImportResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/issues?since=%v", owner, repo, since.Format("2006-01-02")) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeIssueImportAPI) - - var b bytes.Buffer - resp, err := s.client.Do(ctx, req, &b) - if err != nil { - return nil, resp, err - } - - var i []*IssueImportResponse - err = json.Unmarshal(b.Bytes(), &i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/issues.go b/vendor/github.com/google/go-github/v57/github/issues.go deleted file mode 100644 index 1c07fef8..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues.go +++ /dev/null @@ -1,382 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// IssuesService handles communication with the issue related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/issues/ -type IssuesService service - -// Issue represents a GitHub issue on a repository. -// -// Note: As far as the GitHub API is concerned, every pull request is an issue, -// but not every issue is a pull request. Some endpoints, events, and webhooks -// may also return pull requests via this struct. If PullRequestLinks is nil, -// this is an issue, and if PullRequestLinks is not nil, this is a pull request. -// The IsPullRequest helper method can be used to check that. -type Issue struct { - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - // StateReason can be one of: "completed", "not_planned", "reopened". - StateReason *string `json:"state_reason,omitempty"` - Locked *bool `json:"locked,omitempty"` - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - AuthorAssociation *string `json:"author_association,omitempty"` - User *User `json:"user,omitempty"` - Labels []*Label `json:"labels,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Comments *int `json:"comments,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClosedBy *User `json:"closed_by,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - LabelsURL *string `json:"labels_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - PullRequestLinks *PullRequestLinks `json:"pull_request,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - Assignees []*User `json:"assignees,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Draft *bool `json:"draft,omitempty"` - - // TextMatches is only populated from search results that request text matches - // See: search.go and https://docs.github.com/rest/search/#text-match-metadata - TextMatches []*TextMatch `json:"text_matches,omitempty"` - - // ActiveLockReason is populated only when LockReason is provided while locking the issue. - // Possible values are: "off-topic", "too heated", "resolved", and "spam". - ActiveLockReason *string `json:"active_lock_reason,omitempty"` -} - -func (i Issue) String() string { - return Stringify(i) -} - -// IsPullRequest reports whether the issue is also a pull request. It uses the -// method recommended by GitHub's API documentation, which is to check whether -// PullRequestLinks is non-nil. -func (i Issue) IsPullRequest() bool { - return i.PullRequestLinks != nil -} - -// IssueRequest represents a request to create/edit an issue. -// It is separate from Issue above because otherwise Labels -// and Assignee fail to serialize to the correct JSON. -type IssueRequest struct { - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - Labels *[]string `json:"labels,omitempty"` - Assignee *string `json:"assignee,omitempty"` - State *string `json:"state,omitempty"` - // StateReason can be 'completed' or 'not_planned'. - StateReason *string `json:"state_reason,omitempty"` - Milestone *int `json:"milestone,omitempty"` - Assignees *[]string `json:"assignees,omitempty"` -} - -// IssueListOptions specifies the optional parameters to the IssuesService.List -// and IssuesService.ListByOrg methods. -type IssueListOptions struct { - // Filter specifies which issues to list. Possible values are: assigned, - // created, mentioned, subscribed, all. Default is "assigned". - Filter string `url:"filter,omitempty"` - - // State filters issues based on their state. Possible values are: open, - // closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Labels filters issues based on their label. - Labels []string `url:"labels,comma,omitempty"` - - // Sort specifies how to sort issues. Possible values are: created, updated, - // and comments. Default value is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort issues. Possible values are: asc, desc. - // Default is "desc". - Direction string `url:"direction,omitempty"` - - // Since filters issues by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// PullRequestLinks object is added to the Issue object when it's an issue included -// in the IssueCommentEvent webhook payload, if the webhook is fired by a comment on a PR. -type PullRequestLinks struct { - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - DiffURL *string `json:"diff_url,omitempty"` - PatchURL *string `json:"patch_url,omitempty"` -} - -// List the issues for the authenticated user. If all is true, list issues -// across all the user's visible repositories including owned, member, and -// organization repositories; if false, list only owned and member -// repositories. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-issues-assigned-to-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-user-account-issues-assigned-to-the-authenticated-user -// -//meta:operation GET /issues -//meta:operation GET /user/issues -func (s *IssuesService) List(ctx context.Context, all bool, opts *IssueListOptions) ([]*Issue, *Response, error) { - var u string - if all { - u = "issues" - } else { - u = "user/issues" - } - return s.listIssues(ctx, u, opts) -} - -// ListByOrg fetches the issues in the specified organization for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-organization-issues-assigned-to-the-authenticated-user -// -//meta:operation GET /orgs/{org}/issues -func (s *IssuesService) ListByOrg(ctx context.Context, org string, opts *IssueListOptions) ([]*Issue, *Response, error) { - u := fmt.Sprintf("orgs/%v/issues", org) - return s.listIssues(ctx, u, opts) -} - -func (s *IssuesService) listIssues(ctx context.Context, u string, opts *IssueListOptions) ([]*Issue, *Response, error) { - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var issues []*Issue - resp, err := s.client.Do(ctx, req, &issues) - if err != nil { - return nil, resp, err - } - - return issues, resp, nil -} - -// IssueListByRepoOptions specifies the optional parameters to the -// IssuesService.ListByRepo method. -type IssueListByRepoOptions struct { - // Milestone limits issues for the specified milestone. Possible values are - // a milestone number, "none" for issues with no milestone, "*" for issues - // with any milestone. - Milestone string `url:"milestone,omitempty"` - - // State filters issues based on their state. Possible values are: open, - // closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Assignee filters issues based on their assignee. Possible values are a - // user name, "none" for issues that are not assigned, "*" for issues with - // any assigned user. - Assignee string `url:"assignee,omitempty"` - - // Creator filters issues based on their creator. - Creator string `url:"creator,omitempty"` - - // Mentioned filters issues to those mentioned a specific user. - Mentioned string `url:"mentioned,omitempty"` - - // Labels filters issues based on their label. - Labels []string `url:"labels,omitempty,comma"` - - // Sort specifies how to sort issues. Possible values are: created, updated, - // and comments. Default value is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort issues. Possible values are: asc, desc. - // Default is "desc". - Direction string `url:"direction,omitempty"` - - // Since filters issues by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// ListByRepo lists the issues for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-repository-issues -// -//meta:operation GET /repos/{owner}/{repo}/issues -func (s *IssuesService) ListByRepo(ctx context.Context, owner string, repo string, opts *IssueListByRepoOptions) ([]*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var issues []*Issue - resp, err := s.client.Do(ctx, req, &issues) - if err != nil { - return nil, resp, err - } - - return issues, resp, nil -} - -// Get a single issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#get-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number} -func (s *IssuesService) Get(ctx context.Context, owner string, repo string, number int) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - issue := new(Issue) - resp, err := s.client.Do(ctx, req, issue) - if err != nil { - return nil, resp, err - } - - return issue, resp, nil -} - -// Create a new issue on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#create-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues -func (s *IssuesService) Create(ctx context.Context, owner string, repo string, issue *IssueRequest) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues", owner, repo) - req, err := s.client.NewRequest("POST", u, issue) - if err != nil { - return nil, nil, err - } - - i := new(Issue) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// Edit (update) an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#update-an-issue -// -//meta:operation PATCH /repos/{owner}/{repo}/issues/{issue_number} -func (s *IssuesService) Edit(ctx context.Context, owner string, repo string, number int, issue *IssueRequest) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d", owner, repo, number) - req, err := s.client.NewRequest("PATCH", u, issue) - if err != nil { - return nil, nil, err - } - - i := new(Issue) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// RemoveMilestone removes a milestone from an issue. -// -// This is a helper method to explicitly update an issue with a `null` milestone, thereby removing it. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#update-an-issue -// -//meta:operation PATCH /repos/{owner}/{repo}/issues/{issue_number} -func (s *IssuesService) RemoveMilestone(ctx context.Context, owner, repo string, issueNumber int) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v", owner, repo, issueNumber) - req, err := s.client.NewRequest("PATCH", u, &struct { - Milestone *Milestone `json:"milestone"` - }{}) - if err != nil { - return nil, nil, err - } - - i := new(Issue) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// LockIssueOptions specifies the optional parameters to the -// IssuesService.Lock method. -type LockIssueOptions struct { - // LockReason specifies the reason to lock this issue. - // Providing a lock reason can help make it clearer to contributors why an issue - // was locked. Possible values are: "off-topic", "too heated", "resolved", and "spam". - LockReason string `json:"lock_reason,omitempty"` -} - -// Lock an issue's conversation. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#lock-an-issue -// -//meta:operation PUT /repos/{owner}/{repo}/issues/{issue_number}/lock -func (s *IssuesService) Lock(ctx context.Context, owner string, repo string, number int, opts *LockIssueOptions) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unlock an issue's conversation. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#unlock-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock -func (s *IssuesService) Unlock(ctx context.Context, owner string, repo string, number int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_assignees.go b/vendor/github.com/google/go-github/v57/github/issues_assignees.go deleted file mode 100644 index fd065771..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_assignees.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListAssignees fetches all available assignees (owners and collaborators) to -// which issues may be assigned. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#list-assignees -// -//meta:operation GET /repos/{owner}/{repo}/assignees -func (s *IssuesService) ListAssignees(ctx context.Context, owner, repo string, opts *ListOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/assignees", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var assignees []*User - resp, err := s.client.Do(ctx, req, &assignees) - if err != nil { - return nil, resp, err - } - - return assignees, resp, nil -} - -// IsAssignee checks if a user is an assignee for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#check-if-a-user-can-be-assigned -// -//meta:operation GET /repos/{owner}/{repo}/assignees/{assignee} -func (s *IssuesService) IsAssignee(ctx context.Context, owner, repo, user string) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/assignees/%v", owner, repo, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - assignee, err := parseBoolResponse(err) - return assignee, resp, err -} - -// AddAssignees adds the provided GitHub users as assignees to the issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#add-assignees-to-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/assignees -func (s *IssuesService) AddAssignees(ctx context.Context, owner, repo string, number int, assignees []string) (*Issue, *Response, error) { - users := &struct { - Assignees []string `json:"assignees,omitempty"` - }{Assignees: assignees} - u := fmt.Sprintf("repos/%v/%v/issues/%v/assignees", owner, repo, number) - req, err := s.client.NewRequest("POST", u, users) - if err != nil { - return nil, nil, err - } - - issue := &Issue{} - resp, err := s.client.Do(ctx, req, issue) - if err != nil { - return nil, resp, err - } - - return issue, resp, nil -} - -// RemoveAssignees removes the provided GitHub users as assignees from the issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#remove-assignees-from-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees -func (s *IssuesService) RemoveAssignees(ctx context.Context, owner, repo string, number int, assignees []string) (*Issue, *Response, error) { - users := &struct { - Assignees []string `json:"assignees,omitempty"` - }{Assignees: assignees} - u := fmt.Sprintf("repos/%v/%v/issues/%v/assignees", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, users) - if err != nil { - return nil, nil, err - } - - issue := &Issue{} - resp, err := s.client.Do(ctx, req, issue) - if err != nil { - return nil, resp, err - } - - return issue, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_comments.go b/vendor/github.com/google/go-github/v57/github/issues_comments.go deleted file mode 100644 index 74a4e60f..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_comments.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// IssueComment represents a comment left on an issue. -type IssueComment struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Body *string `json:"body,omitempty"` - User *User `json:"user,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - // AuthorAssociation is the comment author's relationship to the issue's repository. - // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". - AuthorAssociation *string `json:"author_association,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - IssueURL *string `json:"issue_url,omitempty"` -} - -func (i IssueComment) String() string { - return Stringify(i) -} - -// IssueListCommentsOptions specifies the optional parameters to the -// IssuesService.ListComments method. -type IssueListCommentsOptions struct { - // Sort specifies how to sort comments. Possible values are: created, updated. - Sort *string `url:"sort,omitempty"` - - // Direction in which to sort comments. Possible values are: asc, desc. - Direction *string `url:"direction,omitempty"` - - // Since filters comments by time. - Since *time.Time `url:"since,omitempty"` - - ListOptions -} - -// ListComments lists all comments on the specified issue. Specifying an issue -// number of 0 will return all comments on all issues for the repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#list-issue-comments -// GitHub API docs: https://docs.github.com/rest/issues/comments#list-issue-comments-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/issues/comments -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/comments -func (s *IssuesService) ListComments(ctx context.Context, owner string, repo string, number int, opts *IssueListCommentsOptions) ([]*IssueComment, *Response, error) { - var u string - if number == 0 { - u = fmt.Sprintf("repos/%v/%v/issues/comments", owner, repo) - } else { - u = fmt.Sprintf("repos/%v/%v/issues/%d/comments", owner, repo, number) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var comments []*IssueComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetComment fetches the specified issue comment. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#get-an-issue-comment -// -//meta:operation GET /repos/{owner}/{repo}/issues/comments/{comment_id} -func (s *IssuesService) GetComment(ctx context.Context, owner string, repo string, commentID int64) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - comment := new(IssueComment) - resp, err := s.client.Do(ctx, req, comment) - if err != nil { - return nil, resp, err - } - - return comment, resp, nil -} - -// CreateComment creates a new comment on the specified issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#create-an-issue-comment -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/comments -func (s *IssuesService) CreateComment(ctx context.Context, owner string, repo string, number int, comment *IssueComment) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/comments", owner, repo, number) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - c := new(IssueComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// EditComment updates an issue comment. -// A non-nil comment.Body must be provided. Other comment fields should be left nil. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#update-an-issue-comment -// -//meta:operation PATCH /repos/{owner}/{repo}/issues/comments/{comment_id} -func (s *IssuesService) EditComment(ctx context.Context, owner string, repo string, commentID int64, comment *IssueComment) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - c := new(IssueComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes an issue comment. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#delete-an-issue-comment -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/comments/{comment_id} -func (s *IssuesService) DeleteComment(ctx context.Context, owner string, repo string, commentID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_events.go b/vendor/github.com/google/go-github/v57/github/issues_events.go deleted file mode 100644 index 23a16bcd..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_events.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// IssueEvent represents an event that occurred around an Issue or Pull Request. -type IssueEvent struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - - // The User that generated this event. - Actor *User `json:"actor,omitempty"` - - // Event identifies the actual type of Event that occurred. Possible - // values are: - // - // closed - // The Actor closed the issue. - // If the issue was closed by commit message, CommitID holds the SHA1 hash of the commit. - // - // merged - // The Actor merged into master a branch containing a commit mentioning the issue. - // CommitID holds the SHA1 of the merge commit. - // - // referenced - // The Actor committed to master a commit mentioning the issue in its commit message. - // CommitID holds the SHA1 of the commit. - // - // reopened, unlocked - // The Actor did that to the issue. - // - // locked - // The Actor locked the issue. - // LockReason holds the reason of locking the issue (if provided while locking). - // - // renamed - // The Actor changed the issue title from Rename.From to Rename.To. - // - // mentioned - // Someone unspecified @mentioned the Actor [sic] in an issue comment body. - // - // assigned, unassigned - // The Assigner assigned the issue to or removed the assignment from the Assignee. - // - // labeled, unlabeled - // The Actor added or removed the Label from the issue. - // - // milestoned, demilestoned - // The Actor added or removed the issue from the Milestone. - // - // subscribed, unsubscribed - // The Actor subscribed to or unsubscribed from notifications for an issue. - // - // head_ref_deleted, head_ref_restored - // The pull request’s branch was deleted or restored. - // - // review_dismissed - // The review was dismissed and `DismissedReview` will be populated below. - // - // review_requested, review_request_removed - // The Actor requested or removed the request for a review. - // RequestedReviewer or RequestedTeam, and ReviewRequester will be populated below. - // - Event *string `json:"event,omitempty"` - - CreatedAt *Timestamp `json:"created_at,omitempty"` - Issue *Issue `json:"issue,omitempty"` - - // Only present on certain events; see above. - Assignee *User `json:"assignee,omitempty"` - Assigner *User `json:"assigner,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - Label *Label `json:"label,omitempty"` - Rename *Rename `json:"rename,omitempty"` - LockReason *string `json:"lock_reason,omitempty"` - ProjectCard *ProjectCard `json:"project_card,omitempty"` - DismissedReview *DismissedReview `json:"dismissed_review,omitempty"` - RequestedReviewer *User `json:"requested_reviewer,omitempty"` - RequestedTeam *Team `json:"requested_team,omitempty"` - ReviewRequester *User `json:"review_requester,omitempty"` - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// DismissedReview represents details for 'dismissed_review' events. -type DismissedReview struct { - // State represents the state of the dismissed review. - // Possible values are: "commented", "approved", and "changes_requested". - State *string `json:"state,omitempty"` - ReviewID *int64 `json:"review_id,omitempty"` - DismissalMessage *string `json:"dismissal_message,omitempty"` - DismissalCommitID *string `json:"dismissal_commit_id,omitempty"` -} - -// ListIssueEvents lists events for the specified issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#list-issue-events -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/events -func (s *IssuesService) ListIssueEvents(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/events", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeProjectCardDetailsPreview) - - var events []*IssueEvent - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListRepositoryEvents lists events for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#list-issue-events-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/issues/events -func (s *IssuesService) ListRepositoryEvents(ctx context.Context, owner, repo string, opts *ListOptions) ([]*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*IssueEvent - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// GetEvent returns the specified issue event. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#get-an-issue-event -// -//meta:operation GET /repos/{owner}/{repo}/issues/events/{event_id} -func (s *IssuesService) GetEvent(ctx context.Context, owner, repo string, id int64) (*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/events/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - event := new(IssueEvent) - resp, err := s.client.Do(ctx, req, event) - if err != nil { - return nil, resp, err - } - - return event, resp, nil -} - -// Rename contains details for 'renamed' events. -type Rename struct { - From *string `json:"from,omitempty"` - To *string `json:"to,omitempty"` -} - -func (r Rename) String() string { - return Stringify(r) -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_labels.go b/vendor/github.com/google/go-github/v57/github/issues_labels.go deleted file mode 100644 index 51e7fe6a..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_labels.go +++ /dev/null @@ -1,253 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Label represents a GitHub label on an Issue -type Label struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Color *string `json:"color,omitempty"` - Description *string `json:"description,omitempty"` - Default *bool `json:"default,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (l Label) String() string { - return Stringify(l) -} - -// ListLabels lists all labels for a repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#list-labels-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/labels -func (s *IssuesService) ListLabels(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var labels []*Label - resp, err := s.client.Do(ctx, req, &labels) - if err != nil { - return nil, resp, err - } - - return labels, resp, nil -} - -// GetLabel gets a single label. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#get-a-label -// -//meta:operation GET /repos/{owner}/{repo}/labels/{name} -func (s *IssuesService) GetLabel(ctx context.Context, owner string, repo string, name string) (*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - label := new(Label) - resp, err := s.client.Do(ctx, req, label) - if err != nil { - return nil, resp, err - } - - return label, resp, nil -} - -// CreateLabel creates a new label on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#create-a-label -// -//meta:operation POST /repos/{owner}/{repo}/labels -func (s *IssuesService) CreateLabel(ctx context.Context, owner string, repo string, label *Label) (*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels", owner, repo) - req, err := s.client.NewRequest("POST", u, label) - if err != nil { - return nil, nil, err - } - - l := new(Label) - resp, err := s.client.Do(ctx, req, l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// EditLabel edits a label. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#update-a-label -// -//meta:operation PATCH /repos/{owner}/{repo}/labels/{name} -func (s *IssuesService) EditLabel(ctx context.Context, owner string, repo string, name string, label *Label) (*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) - req, err := s.client.NewRequest("PATCH", u, label) - if err != nil { - return nil, nil, err - } - - l := new(Label) - resp, err := s.client.Do(ctx, req, l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// DeleteLabel deletes a label. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#delete-a-label -// -//meta:operation DELETE /repos/{owner}/{repo}/labels/{name} -func (s *IssuesService) DeleteLabel(ctx context.Context, owner string, repo string, name string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListLabelsByIssue lists all labels for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#list-labels-for-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) ListLabelsByIssue(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var labels []*Label - resp, err := s.client.Do(ctx, req, &labels) - if err != nil { - return nil, resp, err - } - - return labels, resp, nil -} - -// AddLabelsToIssue adds labels to an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#add-labels-to-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) AddLabelsToIssue(ctx context.Context, owner string, repo string, number int, labels []string) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - req, err := s.client.NewRequest("POST", u, labels) - if err != nil { - return nil, nil, err - } - - var l []*Label - resp, err := s.client.Do(ctx, req, &l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// RemoveLabelForIssue removes a label for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#remove-a-label-from-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name} -func (s *IssuesService) RemoveLabelForIssue(ctx context.Context, owner string, repo string, number int, label string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels/%v", owner, repo, number, label) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ReplaceLabelsForIssue replaces all labels for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#set-labels-for-an-issue -// -//meta:operation PUT /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) ReplaceLabelsForIssue(ctx context.Context, owner string, repo string, number int, labels []string) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - req, err := s.client.NewRequest("PUT", u, labels) - if err != nil { - return nil, nil, err - } - - var l []*Label - resp, err := s.client.Do(ctx, req, &l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// RemoveLabelsForIssue removes all labels for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#remove-all-labels-from-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) RemoveLabelsForIssue(ctx context.Context, owner string, repo string, number int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListLabelsForMilestone lists labels for every issue in a milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#list-labels-for-issues-in-a-milestone -// -//meta:operation GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels -func (s *IssuesService) ListLabelsForMilestone(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d/labels", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var labels []*Label - resp, err := s.client.Do(ctx, req, &labels) - if err != nil { - return nil, resp, err - } - - return labels, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_milestones.go b/vendor/github.com/google/go-github/v57/github/issues_milestones.go deleted file mode 100644 index 6c31bcd0..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_milestones.go +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Milestone represents a GitHub repository milestone. -type Milestone struct { - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - LabelsURL *string `json:"labels_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - Title *string `json:"title,omitempty"` - Description *string `json:"description,omitempty"` - Creator *User `json:"creator,omitempty"` - OpenIssues *int `json:"open_issues,omitempty"` - ClosedIssues *int `json:"closed_issues,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - DueOn *Timestamp `json:"due_on,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (m Milestone) String() string { - return Stringify(m) -} - -// MilestoneListOptions specifies the optional parameters to the -// IssuesService.ListMilestones method. -type MilestoneListOptions struct { - // State filters milestones based on their state. Possible values are: - // open, closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Sort specifies how to sort milestones. Possible values are: due_on, completeness. - // Default value is "due_on". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort milestones. Possible values are: asc, desc. - // Default is "asc". - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListMilestones lists all milestones for a repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#list-milestones -// -//meta:operation GET /repos/{owner}/{repo}/milestones -func (s *IssuesService) ListMilestones(ctx context.Context, owner string, repo string, opts *MilestoneListOptions) ([]*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var milestones []*Milestone - resp, err := s.client.Do(ctx, req, &milestones) - if err != nil { - return nil, resp, err - } - - return milestones, resp, nil -} - -// GetMilestone gets a single milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#get-a-milestone -// -//meta:operation GET /repos/{owner}/{repo}/milestones/{milestone_number} -func (s *IssuesService) GetMilestone(ctx context.Context, owner string, repo string, number int) (*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - milestone := new(Milestone) - resp, err := s.client.Do(ctx, req, milestone) - if err != nil { - return nil, resp, err - } - - return milestone, resp, nil -} - -// CreateMilestone creates a new milestone on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#create-a-milestone -// -//meta:operation POST /repos/{owner}/{repo}/milestones -func (s *IssuesService) CreateMilestone(ctx context.Context, owner string, repo string, milestone *Milestone) (*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones", owner, repo) - req, err := s.client.NewRequest("POST", u, milestone) - if err != nil { - return nil, nil, err - } - - m := new(Milestone) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// EditMilestone edits a milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#update-a-milestone -// -//meta:operation PATCH /repos/{owner}/{repo}/milestones/{milestone_number} -func (s *IssuesService) EditMilestone(ctx context.Context, owner string, repo string, number int, milestone *Milestone) (*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) - req, err := s.client.NewRequest("PATCH", u, milestone) - if err != nil { - return nil, nil, err - } - - m := new(Milestone) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteMilestone deletes a milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#delete-a-milestone -// -//meta:operation DELETE /repos/{owner}/{repo}/milestones/{milestone_number} -func (s *IssuesService) DeleteMilestone(ctx context.Context, owner string, repo string, number int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_timeline.go b/vendor/github.com/google/go-github/v57/github/issues_timeline.go deleted file mode 100644 index 0aa589af..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_timeline.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" -) - -// Timeline represents an event that occurred around an Issue or Pull Request. -// -// It is similar to an IssueEvent but may contain more information. -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/events/issue-event-types -type Timeline struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - CommitURL *string `json:"commit_url,omitempty"` - - // The User object that generated the event. - Actor *User `json:"actor,omitempty"` - - // The person who commented on the issue. - User *User `json:"user,omitempty"` - - // The person who authored the commit. - Author *CommitAuthor `json:"author,omitempty"` - // The person who committed the commit on behalf of the author. - Committer *CommitAuthor `json:"committer,omitempty"` - // The SHA of the commit in the pull request. - SHA *string `json:"sha,omitempty"` - // The commit message. - Message *string `json:"message,omitempty"` - // A list of parent commits. - Parents []*Commit `json:"parents,omitempty"` - - // Event identifies the actual type of Event that occurred. Possible values - // are: - // - // assigned - // The issue was assigned to the assignee. - // - // closed - // The issue was closed by the actor. When the commit_id is present, it - // identifies the commit that closed the issue using "closes / fixes #NN" - // syntax. - // - // commented - // A comment was added to the issue. - // - // committed - // A commit was added to the pull request's 'HEAD' branch. Only provided - // for pull requests. - // - // cross-referenced - // The issue was referenced from another issue. The 'source' attribute - // contains the 'id', 'actor', and 'url' of the reference's source. - // - // demilestoned - // The issue was removed from a milestone. - // - // head_ref_deleted - // The pull request's branch was deleted. - // - // head_ref_restored - // The pull request's branch was restored. - // - // labeled - // A label was added to the issue. - // - // locked - // The issue was locked by the actor. - // - // mentioned - // The actor was @mentioned in an issue body. - // - // merged - // The issue was merged by the actor. The 'commit_id' attribute is the - // SHA1 of the HEAD commit that was merged. - // - // milestoned - // The issue was added to a milestone. - // - // referenced - // The issue was referenced from a commit message. The 'commit_id' - // attribute is the commit SHA1 of where that happened. - // - // renamed - // The issue title was changed. - // - // reopened - // The issue was reopened by the actor. - // - // reviewed - // The pull request was reviewed. - // - // subscribed - // The actor subscribed to receive notifications for an issue. - // - // unassigned - // The assignee was unassigned from the issue. - // - // unlabeled - // A label was removed from the issue. - // - // unlocked - // The issue was unlocked by the actor. - // - // unsubscribed - // The actor unsubscribed to stop receiving notifications for an issue. - // - Event *string `json:"event,omitempty"` - - // The string SHA of a commit that referenced this Issue or Pull Request. - CommitID *string `json:"commit_id,omitempty"` - // The timestamp indicating when the event occurred. - CreatedAt *Timestamp `json:"created_at,omitempty"` - // The Label object including `name` and `color` attributes. Only provided for - // 'labeled' and 'unlabeled' events. - Label *Label `json:"label,omitempty"` - // The User object which was assigned to (or unassigned from) this Issue or - // Pull Request. Only provided for 'assigned' and 'unassigned' events. - Assignee *User `json:"assignee,omitempty"` - Assigner *User `json:"assigner,omitempty"` - - // The Milestone object including a 'title' attribute. - // Only provided for 'milestoned' and 'demilestoned' events. - Milestone *Milestone `json:"milestone,omitempty"` - // The 'id', 'actor', and 'url' for the source of a reference from another issue. - // Only provided for 'cross-referenced' events. - Source *Source `json:"source,omitempty"` - // An object containing rename details including 'from' and 'to' attributes. - // Only provided for 'renamed' events. - Rename *Rename `json:"rename,omitempty"` - ProjectCard *ProjectCard `json:"project_card,omitempty"` - // The state of a submitted review. Can be one of: 'commented', - // 'changes_requested' or 'approved'. - // Only provided for 'reviewed' events. - State *string `json:"state,omitempty"` - - // The person requested to review the pull request. - Reviewer *User `json:"requested_reviewer,omitempty"` - // RequestedTeam contains the team requested to review the pull request. - RequestedTeam *Team `json:"requested_team,omitempty"` - // The person who requested a review. - Requester *User `json:"review_requester,omitempty"` - - // The review summary text. - Body *string `json:"body,omitempty"` - SubmittedAt *Timestamp `json:"submitted_at,omitempty"` - - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// Source represents a reference's source. -type Source struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Actor *User `json:"actor,omitempty"` - Type *string `json:"type,omitempty"` - Issue *Issue `json:"issue,omitempty"` -} - -// ListIssueTimeline lists events for the specified issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/timeline#list-timeline-events-for-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/timeline -func (s *IssuesService) ListIssueTimeline(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*Timeline, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/timeline", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeTimelinePreview, mediaTypeProjectCardDetailsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var events []*Timeline - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/licenses.go b/vendor/github.com/google/go-github/v57/github/licenses.go deleted file mode 100644 index 34b8a3d8..00000000 --- a/vendor/github.com/google/go-github/v57/github/licenses.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// LicensesService handles communication with the license related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/licenses/ -type LicensesService service - -// RepositoryLicense represents the license for a repository. -type RepositoryLicense struct { - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - - SHA *string `json:"sha,omitempty"` - Size *int `json:"size,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - DownloadURL *string `json:"download_url,omitempty"` - Type *string `json:"type,omitempty"` - Content *string `json:"content,omitempty"` - Encoding *string `json:"encoding,omitempty"` - License *License `json:"license,omitempty"` -} - -func (l RepositoryLicense) String() string { - return Stringify(l) -} - -// License represents an open source license. -type License struct { - Key *string `json:"key,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - - SPDXID *string `json:"spdx_id,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Featured *bool `json:"featured,omitempty"` - Description *string `json:"description,omitempty"` - Implementation *string `json:"implementation,omitempty"` - Permissions *[]string `json:"permissions,omitempty"` - Conditions *[]string `json:"conditions,omitempty"` - Limitations *[]string `json:"limitations,omitempty"` - Body *string `json:"body,omitempty"` -} - -func (l License) String() string { - return Stringify(l) -} - -// List popular open source licenses. -// -// GitHub API docs: https://docs.github.com/rest/licenses/licenses#get-all-commonly-used-licenses -// -//meta:operation GET /licenses -func (s *LicensesService) List(ctx context.Context) ([]*License, *Response, error) { - req, err := s.client.NewRequest("GET", "licenses", nil) - if err != nil { - return nil, nil, err - } - - var licenses []*License - resp, err := s.client.Do(ctx, req, &licenses) - if err != nil { - return nil, resp, err - } - - return licenses, resp, nil -} - -// Get extended metadata for one license. -// -// GitHub API docs: https://docs.github.com/rest/licenses/licenses#get-a-license -// -//meta:operation GET /licenses/{license} -func (s *LicensesService) Get(ctx context.Context, licenseName string) (*License, *Response, error) { - u := fmt.Sprintf("licenses/%s", licenseName) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - license := new(License) - resp, err := s.client.Do(ctx, req, license) - if err != nil { - return nil, resp, err - } - - return license, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/markdown.go b/vendor/github.com/google/go-github/v57/github/markdown.go deleted file mode 100644 index fe3b3112..00000000 --- a/vendor/github.com/google/go-github/v57/github/markdown.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" -) - -// MarkdownService provides access to markdown-related functions in the GitHub API. -type MarkdownService service - -// MarkdownOptions specifies optional parameters to the Render method. -type MarkdownOptions struct { - // Mode identifies the rendering mode. Possible values are: - // markdown - render a document as plain Render, just like - // README files are rendered. - // - // gfm - to render a document as user-content, e.g. like user - // comments or issues are rendered. In GFM mode, hard line breaks are - // always taken into account, and issue and user mentions are linked - // accordingly. - // - // Default is "markdown". - Mode string - - // Context identifies the repository context. Only taken into account - // when rendering as "gfm". - Context string -} - -type markdownRenderRequest struct { - Text *string `json:"text,omitempty"` - Mode *string `json:"mode,omitempty"` - Context *string `json:"context,omitempty"` -} - -// Render renders an arbitrary Render document. -// -// GitHub API docs: https://docs.github.com/rest/markdown/markdown#render-a-markdown-document -// -//meta:operation POST /markdown -func (s *MarkdownService) Render(ctx context.Context, text string, opts *MarkdownOptions) (string, *Response, error) { - request := &markdownRenderRequest{Text: String(text)} - if opts != nil { - if opts.Mode != "" { - request.Mode = String(opts.Mode) - } - if opts.Context != "" { - request.Context = String(opts.Context) - } - } - - req, err := s.client.NewRequest("POST", "markdown", request) - if err != nil { - return "", nil, err - } - - buf := new(bytes.Buffer) - resp, err := s.client.Do(ctx, req, buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/messages.go b/vendor/github.com/google/go-github/v57/github/messages.go deleted file mode 100644 index 72edbd9f..00000000 --- a/vendor/github.com/google/go-github/v57/github/messages.go +++ /dev/null @@ -1,352 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file provides functions for validating payloads from GitHub Webhooks. -// GitHub API docs: https://developer.github.com/webhooks/securing/#validating-payloads-from-github - -package github - -import ( - "crypto/hmac" - "crypto/sha1" - "crypto/sha256" - "crypto/sha512" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "hash" - "io" - "mime" - "net/http" - "net/url" - "reflect" - "sort" - "strings" -) - -const ( - // sha1Prefix is the prefix used by GitHub before the HMAC hexdigest. - sha1Prefix = "sha1" - // sha256Prefix and sha512Prefix are provided for future compatibility. - sha256Prefix = "sha256" - sha512Prefix = "sha512" - // SHA1SignatureHeader is the GitHub header key used to pass the HMAC-SHA1 hexdigest. - SHA1SignatureHeader = "X-Hub-Signature" - // SHA256SignatureHeader is the GitHub header key used to pass the HMAC-SHA256 hexdigest. - SHA256SignatureHeader = "X-Hub-Signature-256" - // EventTypeHeader is the GitHub header key used to pass the event type. - EventTypeHeader = "X-Github-Event" - // DeliveryIDHeader is the GitHub header key used to pass the unique ID for the webhook event. - DeliveryIDHeader = "X-Github-Delivery" -) - -var ( - // eventTypeMapping maps webhooks types to their corresponding go-github struct types. - eventTypeMapping = map[string]interface{}{ - "branch_protection_rule": &BranchProtectionRuleEvent{}, - "check_run": &CheckRunEvent{}, - "check_suite": &CheckSuiteEvent{}, - "code_scanning_alert": &CodeScanningAlertEvent{}, - "commit_comment": &CommitCommentEvent{}, - "content_reference": &ContentReferenceEvent{}, - "create": &CreateEvent{}, - "delete": &DeleteEvent{}, - "dependabot_alert": &DependabotAlertEvent{}, - "deploy_key": &DeployKeyEvent{}, - "deployment": &DeploymentEvent{}, - "deployment_status": &DeploymentStatusEvent{}, - "deployment_protection_rule": &DeploymentProtectionRuleEvent{}, - "discussion": &DiscussionEvent{}, - "discussion_comment": &DiscussionCommentEvent{}, - "fork": &ForkEvent{}, - "github_app_authorization": &GitHubAppAuthorizationEvent{}, - "gollum": &GollumEvent{}, - "installation": &InstallationEvent{}, - "installation_repositories": &InstallationRepositoriesEvent{}, - "installation_target": &InstallationTargetEvent{}, - "issue_comment": &IssueCommentEvent{}, - "issues": &IssuesEvent{}, - "label": &LabelEvent{}, - "marketplace_purchase": &MarketplacePurchaseEvent{}, - "member": &MemberEvent{}, - "membership": &MembershipEvent{}, - "merge_group": &MergeGroupEvent{}, - "meta": &MetaEvent{}, - "milestone": &MilestoneEvent{}, - "organization": &OrganizationEvent{}, - "org_block": &OrgBlockEvent{}, - "package": &PackageEvent{}, - "page_build": &PageBuildEvent{}, - "personal_access_token_request": &PersonalAccessTokenRequestEvent{}, - "ping": &PingEvent{}, - "project": &ProjectEvent{}, - "project_card": &ProjectCardEvent{}, - "project_column": &ProjectColumnEvent{}, - "projects_v2": &ProjectV2Event{}, - "projects_v2_item": &ProjectV2ItemEvent{}, - "public": &PublicEvent{}, - "pull_request": &PullRequestEvent{}, - "pull_request_review": &PullRequestReviewEvent{}, - "pull_request_review_comment": &PullRequestReviewCommentEvent{}, - "pull_request_review_thread": &PullRequestReviewThreadEvent{}, - "pull_request_target": &PullRequestTargetEvent{}, - "push": &PushEvent{}, - "repository": &RepositoryEvent{}, - "repository_dispatch": &RepositoryDispatchEvent{}, - "repository_import": &RepositoryImportEvent{}, - "repository_vulnerability_alert": &RepositoryVulnerabilityAlertEvent{}, - "release": &ReleaseEvent{}, - "secret_scanning_alert": &SecretScanningAlertEvent{}, - "security_advisory": &SecurityAdvisoryEvent{}, - "security_and_analysis": &SecurityAndAnalysisEvent{}, - "star": &StarEvent{}, - "status": &StatusEvent{}, - "team": &TeamEvent{}, - "team_add": &TeamAddEvent{}, - "user": &UserEvent{}, - "watch": &WatchEvent{}, - "workflow_dispatch": &WorkflowDispatchEvent{}, - "workflow_job": &WorkflowJobEvent{}, - "workflow_run": &WorkflowRunEvent{}, - } - // forward mapping of event types to the string names of the structs - messageToTypeName = make(map[string]string, len(eventTypeMapping)) - // Inverse map of the above - typeToMessageMapping = make(map[string]string, len(eventTypeMapping)) -) - -func init() { - for k, v := range eventTypeMapping { - typename := reflect.TypeOf(v).Elem().Name() - messageToTypeName[k] = typename - typeToMessageMapping[typename] = k - } -} - -// genMAC generates the HMAC signature for a message provided the secret key -// and hashFunc. -func genMAC(message, key []byte, hashFunc func() hash.Hash) []byte { - mac := hmac.New(hashFunc, key) - mac.Write(message) - return mac.Sum(nil) -} - -// checkMAC reports whether messageMAC is a valid HMAC tag for message. -func checkMAC(message, messageMAC, key []byte, hashFunc func() hash.Hash) bool { - expectedMAC := genMAC(message, key, hashFunc) - return hmac.Equal(messageMAC, expectedMAC) -} - -// messageMAC returns the hex-decoded HMAC tag from the signature and its -// corresponding hash function. -func messageMAC(signature string) ([]byte, func() hash.Hash, error) { - if signature == "" { - return nil, nil, errors.New("missing signature") - } - sigParts := strings.SplitN(signature, "=", 2) - if len(sigParts) != 2 { - return nil, nil, fmt.Errorf("error parsing signature %q", signature) - } - - var hashFunc func() hash.Hash - switch sigParts[0] { - case sha1Prefix: - hashFunc = sha1.New - case sha256Prefix: - hashFunc = sha256.New - case sha512Prefix: - hashFunc = sha512.New - default: - return nil, nil, fmt.Errorf("unknown hash type prefix: %q", sigParts[0]) - } - - buf, err := hex.DecodeString(sigParts[1]) - if err != nil { - return nil, nil, fmt.Errorf("error decoding signature %q: %v", signature, err) - } - return buf, hashFunc, nil -} - -// ValidatePayloadFromBody validates an incoming GitHub Webhook event request body -// and returns the (JSON) payload. -// The Content-Type header of the payload can be "application/json" or "application/x-www-form-urlencoded". -// If the Content-Type is neither then an error is returned. -// secretToken is the GitHub Webhook secret token. -// If your webhook does not contain a secret token, you can pass an empty secretToken. -// Webhooks without a secret token are not secure and should be avoided. -// -// Example usage: -// -// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { -// // read signature from request -// signature := "" -// payload, err := github.ValidatePayloadFromBody(r.Header.Get("Content-Type"), r.Body, signature, s.webhookSecretKey) -// if err != nil { ... } -// // Process payload... -// } -func ValidatePayloadFromBody(contentType string, readable io.Reader, signature string, secretToken []byte) (payload []byte, err error) { - var body []byte // Raw body that GitHub uses to calculate the signature. - - switch contentType { - case "application/json": - var err error - if body, err = io.ReadAll(readable); err != nil { - return nil, err - } - - // If the content type is application/json, - // the JSON payload is just the original body. - payload = body - - case "application/x-www-form-urlencoded": - // payloadFormParam is the name of the form parameter that the JSON payload - // will be in if a webhook has its content type set to application/x-www-form-urlencoded. - const payloadFormParam = "payload" - - var err error - if body, err = io.ReadAll(readable); err != nil { - return nil, err - } - - // If the content type is application/x-www-form-urlencoded, - // the JSON payload will be under the "payload" form param. - form, err := url.ParseQuery(string(body)) - if err != nil { - return nil, err - } - payload = []byte(form.Get(payloadFormParam)) - - default: - return nil, fmt.Errorf("webhook request has unsupported Content-Type %q", contentType) - } - - // Validate the signature if present or if one is expected (secretToken is non-empty). - if len(secretToken) > 0 || len(signature) > 0 { - if err := ValidateSignature(signature, body, secretToken); err != nil { - return nil, err - } - } - - return payload, nil -} - -// ValidatePayload validates an incoming GitHub Webhook event request -// and returns the (JSON) payload. -// The Content-Type header of the payload can be "application/json" or "application/x-www-form-urlencoded". -// If the Content-Type is neither then an error is returned. -// secretToken is the GitHub Webhook secret token. -// If your webhook does not contain a secret token, you can pass nil or an empty slice. -// This is intended for local development purposes only and all webhooks should ideally set up a secret token. -// -// Example usage: -// -// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { -// payload, err := github.ValidatePayload(r, s.webhookSecretKey) -// if err != nil { ... } -// // Process payload... -// } -func ValidatePayload(r *http.Request, secretToken []byte) (payload []byte, err error) { - signature := r.Header.Get(SHA256SignatureHeader) - if signature == "" { - signature = r.Header.Get(SHA1SignatureHeader) - } - - contentType, _, err := mime.ParseMediaType(r.Header.Get("Content-Type")) - if err != nil { - return nil, err - } - - return ValidatePayloadFromBody(contentType, r.Body, signature, secretToken) -} - -// ValidateSignature validates the signature for the given payload. -// signature is the GitHub hash signature delivered in the X-Hub-Signature header. -// payload is the JSON payload sent by GitHub Webhooks. -// secretToken is the GitHub Webhook secret token. -// -// GitHub API docs: https://developer.github.com/webhooks/securing/#validating-payloads-from-github -func ValidateSignature(signature string, payload, secretToken []byte) error { - messageMAC, hashFunc, err := messageMAC(signature) - if err != nil { - return err - } - if !checkMAC(payload, messageMAC, secretToken, hashFunc) { - return errors.New("payload signature check failed") - } - return nil -} - -// WebHookType returns the event type of webhook request r. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/events/github-event-types -func WebHookType(r *http.Request) string { - return r.Header.Get(EventTypeHeader) -} - -// DeliveryID returns the unique delivery ID of webhook request r. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/events/github-event-types -func DeliveryID(r *http.Request) string { - return r.Header.Get(DeliveryIDHeader) -} - -// ParseWebHook parses the event payload. For recognized event types, a -// value of the corresponding struct type will be returned (as returned -// by Event.ParsePayload()). An error will be returned for unrecognized event -// types. -// -// Example usage: -// -// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { -// payload, err := github.ValidatePayload(r, s.webhookSecretKey) -// if err != nil { ... } -// event, err := github.ParseWebHook(github.WebHookType(r), payload) -// if err != nil { ... } -// switch event := event.(type) { -// case *github.CommitCommentEvent: -// processCommitCommentEvent(event) -// case *github.CreateEvent: -// processCreateEvent(event) -// ... -// } -// } -func ParseWebHook(messageType string, payload []byte) (interface{}, error) { - eventType, ok := messageToTypeName[messageType] - if !ok { - return nil, fmt.Errorf("unknown X-Github-Event in message: %v", messageType) - } - - event := Event{ - Type: &eventType, - RawPayload: (*json.RawMessage)(&payload), - } - return event.ParsePayload() -} - -// MessageTypes returns a sorted list of all the known GitHub event type strings -// supported by go-github. -func MessageTypes() []string { - types := make([]string, 0, len(eventTypeMapping)) - for t := range eventTypeMapping { - types = append(types, t) - } - sort.Strings(types) - return types -} - -// EventForType returns an empty struct matching the specified GitHub event type. -// If messageType does not match any known event types, it returns nil. -func EventForType(messageType string) interface{} { - prototype := eventTypeMapping[messageType] - if prototype == nil { - return nil - } - // return a _copy_ of the pointed-to-object. Unfortunately, for this we - // need to use reflection. If we store the actual objects in the map, - // we still need to use reflection to convert from `any` to the actual - // type, so this was deemed the lesser of two evils. (#2865) - return reflect.New(reflect.TypeOf(prototype).Elem()).Interface() -} diff --git a/vendor/github.com/google/go-github/v57/github/meta.go b/vendor/github.com/google/go-github/v57/github/meta.go deleted file mode 100644 index 1da8fcf1..00000000 --- a/vendor/github.com/google/go-github/v57/github/meta.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" - "net/url" -) - -// MetaService provides access to functions in the GitHub API that GitHub categorizes as "meta". -type MetaService service - -// APIMeta represents metadata about the GitHub API. -type APIMeta struct { - // An Array of IP addresses in CIDR format specifying the addresses - // that incoming service hooks will originate from on GitHub.com. - Hooks []string `json:"hooks,omitempty"` - - // An Array of IP addresses in CIDR format specifying the Git servers - // for GitHub.com. - Git []string `json:"git,omitempty"` - - // Whether authentication with username and password is supported. - // (GitHub Enterprise instances using CAS or OAuth for authentication - // will return false. Features like Basic Authentication with a - // username and password, sudo mode, and two-factor authentication are - // not supported on these servers.) - VerifiablePasswordAuthentication *bool `json:"verifiable_password_authentication,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub Packages. - Packages []string `json:"packages,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub Pages websites. - Pages []string `json:"pages,omitempty"` - - // An Array of IP addresses specifying the addresses that source imports - // will originate from on GitHub.com. - Importer []string `json:"importer,omitempty"` - - // An array of IP addresses in CIDR format specifying the IP addresses - // GitHub Actions will originate from. - Actions []string `json:"actions,omitempty"` - - // An array of IP addresses in CIDR format specifying the IP addresses - // Dependabot will originate from. - Dependabot []string `json:"dependabot,omitempty"` - - // A map of algorithms to SSH key fingerprints. - SSHKeyFingerprints map[string]string `json:"ssh_key_fingerprints,omitempty"` - - // An array of SSH keys. - SSHKeys []string `json:"ssh_keys,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub websites. - Web []string `json:"web,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub APIs. - API []string `json:"api,omitempty"` -} - -// Get returns information about GitHub.com, the service. Or, if you access -// this endpoint on your organization’s GitHub Enterprise installation, this -// endpoint provides information about that installation. -// -// GitHub API docs: https://docs.github.com/rest/meta/meta#get-github-meta-information -// -//meta:operation GET /meta -func (s *MetaService) Get(ctx context.Context) (*APIMeta, *Response, error) { - req, err := s.client.NewRequest("GET", "meta", nil) - if err != nil { - return nil, nil, err - } - - meta := new(APIMeta) - resp, err := s.client.Do(ctx, req, meta) - if err != nil { - return nil, resp, err - } - - return meta, resp, nil -} - -// APIMeta returns information about GitHub.com. -// -// Deprecated: Use MetaService.Get instead. -func (c *Client) APIMeta(ctx context.Context) (*APIMeta, *Response, error) { - return c.Meta.Get(ctx) -} - -// Octocat returns an ASCII art octocat with the specified message in a speech -// bubble. If message is empty, a random zen phrase is used. -// -// GitHub API docs: https://docs.github.com/rest/meta/meta#get-octocat -// -//meta:operation GET /octocat -func (s *MetaService) Octocat(ctx context.Context, message string) (string, *Response, error) { - u := "octocat" - if message != "" { - u = fmt.Sprintf("%s?s=%s", u, url.QueryEscape(message)) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - buf := new(bytes.Buffer) - resp, err := s.client.Do(ctx, req, buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// Octocat returns an ASCII art octocat with the specified message in a speech -// bubble. If message is empty, a random zen phrase is used. -// -// Deprecated: Use MetaService.Octocat instead. -func (c *Client) Octocat(ctx context.Context, message string) (string, *Response, error) { - return c.Meta.Octocat(ctx, message) -} - -// Zen returns a random line from The Zen of GitHub. -// -// See also: http://warpspire.com/posts/taste/ -// -// GitHub API docs: https://docs.github.com/rest/meta/meta#get-the-zen-of-github -// -//meta:operation GET /zen -func (s *MetaService) Zen(ctx context.Context) (string, *Response, error) { - req, err := s.client.NewRequest("GET", "zen", nil) - if err != nil { - return "", nil, err - } - - buf := new(bytes.Buffer) - resp, err := s.client.Do(ctx, req, buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// Zen returns a random line from The Zen of GitHub. -// -// Deprecated: Use MetaService.Zen instead. -func (c *Client) Zen(ctx context.Context) (string, *Response, error) { - return c.Meta.Zen(ctx) -} diff --git a/vendor/github.com/google/go-github/v57/github/migrations.go b/vendor/github.com/google/go-github/v57/github/migrations.go deleted file mode 100644 index 5af88170..00000000 --- a/vendor/github.com/google/go-github/v57/github/migrations.go +++ /dev/null @@ -1,240 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" - "net/http" - "strings" -) - -// MigrationService provides access to the migration related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/migration/ -type MigrationService service - -// Migration represents a GitHub migration (archival). -type Migration struct { - ID *int64 `json:"id,omitempty"` - GUID *string `json:"guid,omitempty"` - // State is the current state of a migration. - // Possible values are: - // "pending" which means the migration hasn't started yet, - // "exporting" which means the migration is in progress, - // "exported" which means the migration finished successfully, or - // "failed" which means the migration failed. - State *string `json:"state,omitempty"` - // LockRepositories indicates whether repositories are locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` - URL *string `json:"url,omitempty"` - CreatedAt *string `json:"created_at,omitempty"` - UpdatedAt *string `json:"updated_at,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -func (m Migration) String() string { - return Stringify(m) -} - -// MigrationOptions specifies the optional parameters to Migration methods. -type MigrationOptions struct { - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories bool - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments bool -} - -// startMigration represents the body of a StartMigration request. -type startMigration struct { - // Repositories is a slice of repository names to migrate. - Repositories []string `json:"repositories,omitempty"` - - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` -} - -// StartMigration starts the generation of a migration archive. -// repos is a slice of repository names to migrate. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#start-an-organization-migration -// -//meta:operation POST /orgs/{org}/migrations -func (s *MigrationService) StartMigration(ctx context.Context, org string, repos []string, opts *MigrationOptions) (*Migration, *Response, error) { - u := fmt.Sprintf("orgs/%v/migrations", org) - - body := &startMigration{Repositories: repos} - if opts != nil { - body.LockRepositories = Bool(opts.LockRepositories) - body.ExcludeAttachments = Bool(opts.ExcludeAttachments) - } - - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &Migration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// ListMigrations lists the most recent migrations. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#list-organization-migrations -// -//meta:operation GET /orgs/{org}/migrations -func (s *MigrationService) ListMigrations(ctx context.Context, org string, opts *ListOptions) ([]*Migration, *Response, error) { - u := fmt.Sprintf("orgs/%v/migrations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - var m []*Migration - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// MigrationStatus gets the status of a specific migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#get-an-organization-migration-status -// -//meta:operation GET /orgs/{org}/migrations/{migration_id} -func (s *MigrationService) MigrationStatus(ctx context.Context, org string, id int64) (*Migration, *Response, error) { - u := fmt.Sprintf("orgs/%v/migrations/%v", org, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &Migration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// MigrationArchiveURL fetches a migration archive URL. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#download-an-organization-migration-archive -// -//meta:operation GET /orgs/{org}/migrations/{migration_id}/archive -func (s *MigrationService) MigrationArchiveURL(ctx context.Context, org string, id int64) (url string, err error) { - u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - s.client.clientMu.Lock() - defer s.client.clientMu.Unlock() - - // Disable the redirect mechanism because AWS fails if the GitHub auth token is provided. - var loc string - saveRedirect := s.client.client.CheckRedirect - s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { - loc = req.URL.String() - return errors.New("disable redirect") - } - defer func() { s.client.client.CheckRedirect = saveRedirect }() - - _, err = s.client.Do(ctx, req, nil) // expect error from disable redirect - if err == nil { - return "", errors.New("expected redirect, none provided") - } - if !strings.Contains(err.Error(), "disable redirect") { - return "", err - } - return loc, nil -} - -// DeleteMigration deletes a previous migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#delete-an-organization-migration-archive -// -//meta:operation DELETE /orgs/{org}/migrations/{migration_id}/archive -func (s *MigrationService) DeleteMigration(ctx context.Context, org string, id int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnlockRepo unlocks a repository that was locked for migration. -// id is the migration ID. -// You should unlock each migrated repository and delete them when the migration -// is complete and you no longer need the source data. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#unlock-an-organization-repository -// -//meta:operation DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock -func (s *MigrationService) UnlockRepo(ctx context.Context, org string, id int64, repo string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/migrations/%v/repos/%v/lock", org, id, repo) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/migrations_source_import.go b/vendor/github.com/google/go-github/v57/github/migrations_source_import.go deleted file mode 100644 index 3b161232..00000000 --- a/vendor/github.com/google/go-github/v57/github/migrations_source_import.go +++ /dev/null @@ -1,321 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Import represents a repository import request. -type Import struct { - // The URL of the originating repository. - VCSURL *string `json:"vcs_url,omitempty"` - // The originating VCS type. Can be one of 'subversion', 'git', - // 'mercurial', or 'tfvc'. Without this parameter, the import job will - // take additional time to detect the VCS type before beginning the - // import. This detection step will be reflected in the response. - VCS *string `json:"vcs,omitempty"` - // VCSUsername and VCSPassword are only used for StartImport calls that - // are importing a password-protected repository. - VCSUsername *string `json:"vcs_username,omitempty"` - VCSPassword *string `json:"vcs_password,omitempty"` - // For a tfvc import, the name of the project that is being imported. - TFVCProject *string `json:"tfvc_project,omitempty"` - - // LFS related fields that may be preset in the Import Progress response - - // Describes whether the import has been opted in or out of using Git - // LFS. The value can be 'opt_in', 'opt_out', or 'undecided' if no - // action has been taken. - UseLFS *string `json:"use_lfs,omitempty"` - // Describes whether files larger than 100MB were found during the - // importing step. - HasLargeFiles *bool `json:"has_large_files,omitempty"` - // The total size in gigabytes of files larger than 100MB found in the - // originating repository. - LargeFilesSize *int `json:"large_files_size,omitempty"` - // The total number of files larger than 100MB found in the originating - // repository. To see a list of these files, call LargeFiles. - LargeFilesCount *int `json:"large_files_count,omitempty"` - - // Identifies the current status of an import. An import that does not - // have errors will progress through these steps: - // - // detecting - the "detection" step of the import is in progress - // because the request did not include a VCS parameter. The - // import is identifying the type of source control present at - // the URL. - // importing - the "raw" step of the import is in progress. This is - // where commit data is fetched from the original repository. - // The import progress response will include CommitCount (the - // total number of raw commits that will be imported) and - // Percent (0 - 100, the current progress through the import). - // mapping - the "rewrite" step of the import is in progress. This - // is where SVN branches are converted to Git branches, and - // where author updates are applied. The import progress - // response does not include progress information. - // pushing - the "push" step of the import is in progress. This is - // where the importer updates the repository on GitHub. The - // import progress response will include PushPercent, which is - // the percent value reported by git push when it is "Writing - // objects". - // complete - the import is complete, and the repository is ready - // on GitHub. - // - // If there are problems, you will see one of these in the status field: - // - // auth_failed - the import requires authentication in order to - // connect to the original repository. Make an UpdateImport - // request, and include VCSUsername and VCSPassword. - // error - the import encountered an error. The import progress - // response will include the FailedStep and an error message. - // Contact GitHub support for more information. - // detection_needs_auth - the importer requires authentication for - // the originating repository to continue detection. Make an - // UpdatImport request, and include VCSUsername and - // VCSPassword. - // detection_found_nothing - the importer didn't recognize any - // source control at the URL. - // detection_found_multiple - the importer found several projects - // or repositories at the provided URL. When this is the case, - // the Import Progress response will also include a - // ProjectChoices field with the possible project choices as - // values. Make an UpdateImport request, and include VCS and - // (if applicable) TFVCProject. - Status *string `json:"status,omitempty"` - CommitCount *int `json:"commit_count,omitempty"` - StatusText *string `json:"status_text,omitempty"` - AuthorsCount *int `json:"authors_count,omitempty"` - Percent *int `json:"percent,omitempty"` - PushPercent *int `json:"push_percent,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - AuthorsURL *string `json:"authors_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - Message *string `json:"message,omitempty"` - FailedStep *string `json:"failed_step,omitempty"` - - // Human readable display name, provided when the Import appears as - // part of ProjectChoices. - HumanName *string `json:"human_name,omitempty"` - - // When the importer finds several projects or repositories at the - // provided URLs, this will identify the available choices. Call - // UpdateImport with the selected Import value. - ProjectChoices []*Import `json:"project_choices,omitempty"` -} - -func (i Import) String() string { - return Stringify(i) -} - -// SourceImportAuthor identifies an author imported from a source repository. -// -// GitHub API docs: https://docs.github.com/rest/migration/source_imports/#get-commit-authors -type SourceImportAuthor struct { - ID *int64 `json:"id,omitempty"` - RemoteID *string `json:"remote_id,omitempty"` - RemoteName *string `json:"remote_name,omitempty"` - Email *string `json:"email,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - ImportURL *string `json:"import_url,omitempty"` -} - -func (a SourceImportAuthor) String() string { - return Stringify(a) -} - -// LargeFile identifies a file larger than 100MB found during a repository import. -// -// GitHub API docs: https://docs.github.com/rest/migration/source_imports/#get-large-files -type LargeFile struct { - RefName *string `json:"ref_name,omitempty"` - Path *string `json:"path,omitempty"` - OID *string `json:"oid,omitempty"` - Size *int `json:"size,omitempty"` -} - -func (f LargeFile) String() string { - return Stringify(f) -} - -// StartImport initiates a repository import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#start-an-import -// -//meta:operation PUT /repos/{owner}/{repo}/import -func (s *MigrationService) StartImport(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("PUT", u, in) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// ImportProgress queries for the status and progress of an ongoing repository import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#get-an-import-status -// -//meta:operation GET /repos/{owner}/{repo}/import -func (s *MigrationService) ImportProgress(ctx context.Context, owner, repo string) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// UpdateImport initiates a repository import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#update-an-import -// -//meta:operation PATCH /repos/{owner}/{repo}/import -func (s *MigrationService) UpdateImport(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("PATCH", u, in) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// CommitAuthors gets the authors mapped from the original repository. -// -// Each type of source control system represents authors in a different way. -// For example, a Git commit author has a display name and an email address, -// but a Subversion commit author just has a username. The GitHub Importer will -// make the author information valid, but the author might not be correct. For -// example, it will change the bare Subversion username "hubot" into something -// like "hubot ". -// -// This method and MapCommitAuthor allow you to provide correct Git author -// information. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#get-commit-authors -// -//meta:operation GET /repos/{owner}/{repo}/import/authors -func (s *MigrationService) CommitAuthors(ctx context.Context, owner, repo string) ([]*SourceImportAuthor, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/authors", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var authors []*SourceImportAuthor - resp, err := s.client.Do(ctx, req, &authors) - if err != nil { - return nil, resp, err - } - - return authors, resp, nil -} - -// MapCommitAuthor updates an author's identity for the import. Your -// application can continue updating authors any time before you push new -// commits to the repository. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#map-a-commit-author -// -//meta:operation PATCH /repos/{owner}/{repo}/import/authors/{author_id} -func (s *MigrationService) MapCommitAuthor(ctx context.Context, owner, repo string, id int64, author *SourceImportAuthor) (*SourceImportAuthor, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/authors/%v", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, author) - if err != nil { - return nil, nil, err - } - - out := new(SourceImportAuthor) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// SetLFSPreference sets whether imported repositories should use Git LFS for -// files larger than 100MB. Only the UseLFS field on the provided Import is -// used. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference -// -//meta:operation PATCH /repos/{owner}/{repo}/import/lfs -func (s *MigrationService) SetLFSPreference(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/lfs", owner, repo) - req, err := s.client.NewRequest("PATCH", u, in) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// LargeFiles lists files larger than 100MB found during the import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#get-large-files -// -//meta:operation GET /repos/{owner}/{repo}/import/large_files -func (s *MigrationService) LargeFiles(ctx context.Context, owner, repo string) ([]*LargeFile, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/large_files", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var files []*LargeFile - resp, err := s.client.Do(ctx, req, &files) - if err != nil { - return nil, resp, err - } - - return files, resp, nil -} - -// CancelImport stops an import for a repository. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#cancel-an-import -// -//meta:operation DELETE /repos/{owner}/{repo}/import -func (s *MigrationService) CancelImport(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/migrations_user.go b/vendor/github.com/google/go-github/v57/github/migrations_user.go deleted file mode 100644 index 1f907cd4..00000000 --- a/vendor/github.com/google/go-github/v57/github/migrations_user.go +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" - "net/http" -) - -// UserMigration represents a GitHub migration (archival). -type UserMigration struct { - ID *int64 `json:"id,omitempty"` - GUID *string `json:"guid,omitempty"` - // State is the current state of a migration. - // Possible values are: - // "pending" which means the migration hasn't started yet, - // "exporting" which means the migration is in progress, - // "exported" which means the migration finished successfully, or - // "failed" which means the migration failed. - State *string `json:"state,omitempty"` - // LockRepositories indicates whether repositories are locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` - URL *string `json:"url,omitempty"` - CreatedAt *string `json:"created_at,omitempty"` - UpdatedAt *string `json:"updated_at,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -func (m UserMigration) String() string { - return Stringify(m) -} - -// UserMigrationOptions specifies the optional parameters to Migration methods. -type UserMigrationOptions struct { - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories bool - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments bool -} - -// startUserMigration represents the body of a StartMigration request. -type startUserMigration struct { - // Repositories is a slice of repository names to migrate. - Repositories []string `json:"repositories,omitempty"` - - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` -} - -// StartUserMigration starts the generation of a migration archive. -// repos is a slice of repository names to migrate. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#start-a-user-migration -// -//meta:operation POST /user/migrations -func (s *MigrationService) StartUserMigration(ctx context.Context, repos []string, opts *UserMigrationOptions) (*UserMigration, *Response, error) { - u := "user/migrations" - - body := &startUserMigration{Repositories: repos} - if opts != nil { - body.LockRepositories = Bool(opts.LockRepositories) - body.ExcludeAttachments = Bool(opts.ExcludeAttachments) - } - - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &UserMigration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// ListUserMigrations lists the most recent migrations. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#list-user-migrations -// -//meta:operation GET /user/migrations -func (s *MigrationService) ListUserMigrations(ctx context.Context, opts *ListOptions) ([]*UserMigration, *Response, error) { - u := "user/migrations" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - var m []*UserMigration - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// UserMigrationStatus gets the status of a specific migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#get-a-user-migration-status -// -//meta:operation GET /user/migrations/{migration_id} -func (s *MigrationService) UserMigrationStatus(ctx context.Context, id int64) (*UserMigration, *Response, error) { - u := fmt.Sprintf("user/migrations/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &UserMigration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// UserMigrationArchiveURL gets the URL for a specific migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#download-a-user-migration-archive -// -//meta:operation GET /user/migrations/{migration_id}/archive -func (s *MigrationService) UserMigrationArchiveURL(ctx context.Context, id int64) (string, error) { - url := fmt.Sprintf("user/migrations/%v/archive", id) - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return "", err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &UserMigration{} - - var loc string - originalRedirect := s.client.client.CheckRedirect - s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { - loc = req.URL.String() - return http.ErrUseLastResponse - } - defer func() { - s.client.client.CheckRedirect = originalRedirect - }() - resp, err := s.client.Do(ctx, req, m) - if err == nil { - return "", errors.New("expected redirect, none provided") - } - loc = resp.Header.Get("Location") - return loc, nil -} - -// DeleteUserMigration will delete a previous migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#delete-a-user-migration-archive -// -//meta:operation DELETE /user/migrations/{migration_id}/archive -func (s *MigrationService) DeleteUserMigration(ctx context.Context, id int64) (*Response, error) { - url := fmt.Sprintf("user/migrations/%v/archive", id) - - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnlockUserRepo will unlock a repo that was locked for migration. -// id is migration ID. -// You should unlock each migrated repository and delete them when the migration -// is complete and you no longer need the source data. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#unlock-a-user-repository -// -//meta:operation DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock -func (s *MigrationService) UnlockUserRepo(ctx context.Context, id int64, repo string) (*Response, error) { - url := fmt.Sprintf("user/migrations/%v/repos/%v/lock", id, repo) - - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs.go b/vendor/github.com/google/go-github/v57/github/orgs.go deleted file mode 100644 index 4d346527..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs.go +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrganizationsService provides access to the organization related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/orgs/ -type OrganizationsService service - -// Organization represents a GitHub organization account. -type Organization struct { - Login *string `json:"login,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Name *string `json:"name,omitempty"` - Company *string `json:"company,omitempty"` - Blog *string `json:"blog,omitempty"` - Location *string `json:"location,omitempty"` - Email *string `json:"email,omitempty"` - TwitterUsername *string `json:"twitter_username,omitempty"` - Description *string `json:"description,omitempty"` - PublicRepos *int `json:"public_repos,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` - Followers *int `json:"followers,omitempty"` - Following *int `json:"following,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - TotalPrivateRepos *int64 `json:"total_private_repos,omitempty"` - OwnedPrivateRepos *int64 `json:"owned_private_repos,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - DiskUsage *int `json:"disk_usage,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - BillingEmail *string `json:"billing_email,omitempty"` - Type *string `json:"type,omitempty"` - Plan *Plan `json:"plan,omitempty"` - TwoFactorRequirementEnabled *bool `json:"two_factor_requirement_enabled,omitempty"` - IsVerified *bool `json:"is_verified,omitempty"` - HasOrganizationProjects *bool `json:"has_organization_projects,omitempty"` - HasRepositoryProjects *bool `json:"has_repository_projects,omitempty"` - - // DefaultRepoPermission can be one of: "read", "write", "admin", or "none". (Default: "read"). - // It is only used in OrganizationsService.Edit. - DefaultRepoPermission *string `json:"default_repository_permission,omitempty"` - // DefaultRepoSettings can be one of: "read", "write", "admin", or "none". (Default: "read"). - // It is only used in OrganizationsService.Get. - DefaultRepoSettings *string `json:"default_repository_settings,omitempty"` - - // MembersCanCreateRepos default value is true and is only used in Organizations.Edit. - MembersCanCreateRepos *bool `json:"members_can_create_repositories,omitempty"` - - // https://developer.github.com/changes/2019-12-03-internal-visibility-changes/#rest-v3-api - MembersCanCreatePublicRepos *bool `json:"members_can_create_public_repositories,omitempty"` - MembersCanCreatePrivateRepos *bool `json:"members_can_create_private_repositories,omitempty"` - MembersCanCreateInternalRepos *bool `json:"members_can_create_internal_repositories,omitempty"` - - // MembersCanForkPrivateRepos toggles whether organization members can fork private organization repositories. - MembersCanForkPrivateRepos *bool `json:"members_can_fork_private_repositories,omitempty"` - - // MembersAllowedRepositoryCreationType denotes if organization members can create repositories - // and the type of repositories they can create. Possible values are: "all", "private", or "none". - // - // Deprecated: Use MembersCanCreatePublicRepos, MembersCanCreatePrivateRepos, MembersCanCreateInternalRepos - // instead. The new fields overrides the existing MembersAllowedRepositoryCreationType during 'edit' - // operation and does not consider 'internal' repositories during 'get' operation - MembersAllowedRepositoryCreationType *string `json:"members_allowed_repository_creation_type,omitempty"` - - // MembersCanCreatePages toggles whether organization members can create GitHub Pages sites. - MembersCanCreatePages *bool `json:"members_can_create_pages,omitempty"` - // MembersCanCreatePublicPages toggles whether organization members can create public GitHub Pages sites. - MembersCanCreatePublicPages *bool `json:"members_can_create_public_pages,omitempty"` - // MembersCanCreatePrivatePages toggles whether organization members can create private GitHub Pages sites. - MembersCanCreatePrivatePages *bool `json:"members_can_create_private_pages,omitempty"` - // WebCommitSignoffRequire toggles - WebCommitSignoffRequired *bool `json:"web_commit_signoff_required,omitempty"` - // AdvancedSecurityAuditLogEnabled toggles whether the advanced security audit log is enabled. - AdvancedSecurityEnabledForNewRepos *bool `json:"advanced_security_enabled_for_new_repositories,omitempty"` - // DependabotAlertsEnabled toggles whether dependabot alerts are enabled. - DependabotAlertsEnabledForNewRepos *bool `json:"dependabot_alerts_enabled_for_new_repositories,omitempty"` - // DependabotSecurityUpdatesEnabled toggles whether dependabot security updates are enabled. - DependabotSecurityUpdatesEnabledForNewRepos *bool `json:"dependabot_security_updates_enabled_for_new_repositories,omitempty"` - // DependabotGraphEnabledForNewRepos toggles whether dependabot graph is enabled on new repositories. - DependencyGraphEnabledForNewRepos *bool `json:"dependency_graph_enabled_for_new_repositories,omitempty"` - // SecretScanningEnabled toggles whether secret scanning is enabled on new repositories. - SecretScanningEnabledForNewRepos *bool `json:"secret_scanning_enabled_for_new_repositories,omitempty"` - // SecretScanningPushProtectionEnabledForNewRepos toggles whether secret scanning push protection is enabled on new repositories. - SecretScanningPushProtectionEnabledForNewRepos *bool `json:"secret_scanning_push_protection_enabled_for_new_repositories,omitempty"` - - // API URLs - URL *string `json:"url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - HooksURL *string `json:"hooks_url,omitempty"` - IssuesURL *string `json:"issues_url,omitempty"` - MembersURL *string `json:"members_url,omitempty"` - PublicMembersURL *string `json:"public_members_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` -} - -// OrganizationInstallations represents GitHub app installations for an organization. -type OrganizationInstallations struct { - TotalCount *int `json:"total_count,omitempty"` - Installations []*Installation `json:"installations,omitempty"` -} - -func (o Organization) String() string { - return Stringify(o) -} - -// Plan represents the payment plan for an account. See plans at https://github.com/plans. -type Plan struct { - Name *string `json:"name,omitempty"` - Space *int `json:"space,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - PrivateRepos *int64 `json:"private_repos,omitempty"` - FilledSeats *int `json:"filled_seats,omitempty"` - Seats *int `json:"seats,omitempty"` -} - -func (p Plan) String() string { - return Stringify(p) -} - -// OrganizationsListOptions specifies the optional parameters to the -// OrganizationsService.ListAll method. -type OrganizationsListOptions struct { - // Since filters Organizations by ID. - Since int64 `url:"since,omitempty"` - - // Note: Pagination is powered exclusively by the Since parameter, - // ListOptions.Page has no effect. - // ListOptions.PerPage controls an undocumented GitHub API parameter. - ListOptions -} - -// ListAll lists all organizations, in the order that they were created on GitHub. -// -// Note: Pagination is powered exclusively by the since parameter. To continue -// listing the next set of organizations, use the ID of the last-returned organization -// as the opts.Since parameter for the next call. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-organizations -// -//meta:operation GET /organizations -func (s *OrganizationsService) ListAll(ctx context.Context, opts *OrganizationsListOptions) ([]*Organization, *Response, error) { - u, err := addOptions("organizations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - orgs := []*Organization{} - resp, err := s.client.Do(ctx, req, &orgs) - if err != nil { - return nil, resp, err - } - return orgs, resp, nil -} - -// List the organizations for a user. Passing the empty string will list -// organizations for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-organizations-for-a-user -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-organizations-for-the-authenticated-user -// -//meta:operation GET /user/orgs -//meta:operation GET /users/{username}/orgs -func (s *OrganizationsService) List(ctx context.Context, user string, opts *ListOptions) ([]*Organization, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/orgs", user) - } else { - u = "user/orgs" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var orgs []*Organization - resp, err := s.client.Do(ctx, req, &orgs) - if err != nil { - return nil, resp, err - } - - return orgs, resp, nil -} - -// Get fetches an organization by name. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#get-an-organization -// -//meta:operation GET /orgs/{org} -func (s *OrganizationsService) Get(ctx context.Context, org string) (*Organization, *Response, error) { - u := fmt.Sprintf("orgs/%v", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMemberAllowedRepoCreationTypePreview) - - organization := new(Organization) - resp, err := s.client.Do(ctx, req, organization) - if err != nil { - return nil, resp, err - } - - return organization, resp, nil -} - -// GetByID fetches an organization. -// -// Note: GetByID uses the undocumented GitHub API endpoint "GET /organizations/{organization_id}". -// -//meta:operation GET /organizations/{organization_id} -func (s *OrganizationsService) GetByID(ctx context.Context, id int64) (*Organization, *Response, error) { - u := fmt.Sprintf("organizations/%d", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - organization := new(Organization) - resp, err := s.client.Do(ctx, req, organization) - if err != nil { - return nil, resp, err - } - - return organization, resp, nil -} - -// Edit an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#update-an-organization -// -//meta:operation PATCH /orgs/{org} -func (s *OrganizationsService) Edit(ctx context.Context, name string, org *Organization) (*Organization, *Response, error) { - u := fmt.Sprintf("orgs/%v", name) - req, err := s.client.NewRequest("PATCH", u, org) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMemberAllowedRepoCreationTypePreview) - - o := new(Organization) - resp, err := s.client.Do(ctx, req, o) - if err != nil { - return nil, resp, err - } - - return o, resp, nil -} - -// Delete an organization by name. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#delete-an-organization -// -//meta:operation DELETE /orgs/{org} -func (s *OrganizationsService) Delete(ctx context.Context, org string) (*Response, error) { - u := fmt.Sprintf("orgs/%v", org) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListInstallations lists installations for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-app-installations-for-an-organization -// -//meta:operation GET /orgs/{org}/installations -func (s *OrganizationsService) ListInstallations(ctx context.Context, org string, opts *ListOptions) (*OrganizationInstallations, *Response, error) { - u := fmt.Sprintf("orgs/%v/installations", org) - - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(OrganizationInstallations) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_actions_allowed.go b/vendor/github.com/google/go-github/v57/github/orgs_actions_allowed.go deleted file mode 100644 index b115e094..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_actions_allowed.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// GetActionsAllowed gets the actions that are allowed in an organization. -// -// Deprecated: please use `client.Actions.GetActionsAllowed` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions/selected-actions -func (s *OrganizationsService) GetActionsAllowed(ctx context.Context, org string) (*ActionsAllowed, *Response, error) { - s2 := (*ActionsService)(s) - return s2.GetActionsAllowed(ctx, org) -} - -// EditActionsAllowed sets the actions that are allowed in an organization. -// -// Deprecated: please use `client.Actions.EditActionsAllowed` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/selected-actions -func (s *OrganizationsService) EditActionsAllowed(ctx context.Context, org string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - s2 := (*ActionsService)(s) - return s2.EditActionsAllowed(ctx, org, actionsAllowed) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_actions_permissions.go b/vendor/github.com/google/go-github/v57/github/orgs_actions_permissions.go deleted file mode 100644 index 97df1c96..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_actions_permissions.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. -// -// Deprecated: please use `client.Actions.GetActionsPermissions` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-github-actions-permissions-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions -func (s *OrganizationsService) GetActionsPermissions(ctx context.Context, org string) (*ActionsPermissions, *Response, error) { - s2 := (*ActionsService)(s) - return s2.GetActionsPermissions(ctx, org) -} - -// EditActionsPermissions sets the permissions policy for repositories and allowed actions in an organization. -// -// Deprecated: please use `client.Actions.EditActionsPermissions` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-github-actions-permissions-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions -func (s *OrganizationsService) EditActionsPermissions(ctx context.Context, org string, actionsPermissions ActionsPermissions) (*ActionsPermissions, *Response, error) { - s2 := (*ActionsService)(s) - return s2.EditActionsPermissions(ctx, org, actionsPermissions) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_audit_log.go b/vendor/github.com/google/go-github/v57/github/orgs_audit_log.go deleted file mode 100644 index e3afd311..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_audit_log.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetAuditLogOptions sets up optional parameters to query audit-log endpoint. -type GetAuditLogOptions struct { - Phrase *string `url:"phrase,omitempty"` // A search phrase. (Optional.) - Include *string `url:"include,omitempty"` // Event type includes. Can be one of "web", "git", "all". Default: "web". (Optional.) - Order *string `url:"order,omitempty"` // The order of audit log events. Can be one of "asc" or "desc". Default: "desc". (Optional.) - - ListCursorOptions -} - -// HookConfig describes metadata about a webhook configuration. -type HookConfig struct { - ContentType *string `json:"content_type,omitempty"` - InsecureSSL *string `json:"insecure_ssl,omitempty"` - URL *string `json:"url,omitempty"` - - // Secret is returned obfuscated by GitHub, but it can be set for outgoing requests. - Secret *string `json:"secret,omitempty"` -} - -// ActorLocation contains information about reported location for an actor. -type ActorLocation struct { - CountryCode *string `json:"country_code,omitempty"` -} - -// PolicyOverrideReason contains user-supplied information about why a policy was overridden. -type PolicyOverrideReason struct { - Code *string `json:"code,omitempty"` - Message *string `json:"message,omitempty"` -} - -// AuditEntry describes the fields that may be represented by various audit-log "action" entries. -// For a list of actions see - https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#audit-log-actions -type AuditEntry struct { - ActorIP *string `json:"actor_ip,omitempty"` - Action *string `json:"action,omitempty"` // The name of the action that was performed, for example `user.login` or `repo.create`. - Active *bool `json:"active,omitempty"` - ActiveWas *bool `json:"active_was,omitempty"` - Actor *string `json:"actor,omitempty"` // The actor who performed the action. - ActorLocation *ActorLocation `json:"actor_location,omitempty"` - BlockedUser *string `json:"blocked_user,omitempty"` - Business *string `json:"business,omitempty"` - CancelledAt *Timestamp `json:"cancelled_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - Config *HookConfig `json:"config,omitempty"` - ConfigWas *HookConfig `json:"config_was,omitempty"` - ContentType *string `json:"content_type,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - DeployKeyFingerprint *string `json:"deploy_key_fingerprint,omitempty"` - DocumentID *string `json:"_document_id,omitempty"` - Emoji *string `json:"emoji,omitempty"` - EnvironmentName *string `json:"environment_name,omitempty"` - Event *string `json:"event,omitempty"` - Events []string `json:"events,omitempty"` - EventsWere []string `json:"events_were,omitempty"` - Explanation *string `json:"explanation,omitempty"` - ExternalIdentityNameID *string `json:"external_identity_nameid,omitempty"` - ExternalIdentityUsername *string `json:"external_identity_username,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` - HashedToken *string `json:"hashed_token,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - HookID *int64 `json:"hook_id,omitempty"` - IsHostedRunner *bool `json:"is_hosted_runner,omitempty"` - JobName *string `json:"job_name,omitempty"` - JobWorkflowRef *string `json:"job_workflow_ref,omitempty"` - LimitedAvailability *bool `json:"limited_availability,omitempty"` - Message *string `json:"message,omitempty"` - Name *string `json:"name,omitempty"` - OAuthApplicationID *int64 `json:"oauth_application_id,omitempty"` - OldUser *string `json:"old_user,omitempty"` - OldPermission *string `json:"old_permission,omitempty"` // The permission level for membership changes, for example `admin` or `read`. - OpenSSHPublicKey *string `json:"openssh_public_key,omitempty"` - OperationType *string `json:"operation_type,omitempty"` - Org *string `json:"org,omitempty"` - OrgID *int64 `json:"org_id,omitempty"` - OverriddenCodes []string `json:"overridden_codes,omitempty"` - Permission *string `json:"permission,omitempty"` // The permission level for membership changes, for example `admin` or `read`. - PreviousVisibility *string `json:"previous_visibility,omitempty"` - ProgrammaticAccessType *string `json:"programmatic_access_type,omitempty"` - PullRequestID *int64 `json:"pull_request_id,omitempty"` - PullRequestTitle *string `json:"pull_request_title,omitempty"` - PullRequestURL *string `json:"pull_request_url,omitempty"` - ReadOnly *string `json:"read_only,omitempty"` - Reasons []*PolicyOverrideReason `json:"reasons,omitempty"` - Repo *string `json:"repo,omitempty"` - Repository *string `json:"repository,omitempty"` - RepositoryPublic *bool `json:"repository_public,omitempty"` - RunAttempt *int64 `json:"run_attempt,omitempty"` - RunnerGroupID *int64 `json:"runner_group_id,omitempty"` - RunnerGroupName *string `json:"runner_group_name,omitempty"` - RunnerID *int64 `json:"runner_id,omitempty"` - RunnerLabels []string `json:"runner_labels,omitempty"` - RunnerName *string `json:"runner_name,omitempty"` - RunNumber *int64 `json:"run_number,omitempty"` - SecretsPassed []string `json:"secrets_passed,omitempty"` - SourceVersion *string `json:"source_version,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - TargetLogin *string `json:"target_login,omitempty"` - TargetVersion *string `json:"target_version,omitempty"` - Team *string `json:"team,omitempty"` - Timestamp *Timestamp `json:"@timestamp,omitempty"` // The time the audit log event occurred, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). - TokenID *int64 `json:"token_id,omitempty"` - TokenScopes *string `json:"token_scopes,omitempty"` - Topic *string `json:"topic,omitempty"` - TransportProtocolName *string `json:"transport_protocol_name,omitempty"` // A human readable name for the protocol (for example, HTTP or SSH) used to transfer Git data. - TransportProtocol *int `json:"transport_protocol,omitempty"` // The type of protocol (for example, HTTP=1 or SSH=2) used to transfer Git data. - TriggerID *int64 `json:"trigger_id,omitempty"` - User *string `json:"user,omitempty"` // The user that was affected by the action performed (if available). - UserAgent *string `json:"user_agent,omitempty"` - Visibility *string `json:"visibility,omitempty"` // The repository visibility, for example `public` or `private`. - WorkflowID *int64 `json:"workflow_id,omitempty"` - WorkflowRunID *int64 `json:"workflow_run_id,omitempty"` - - Data *AuditEntryData `json:"data,omitempty"` -} - -// AuditEntryData represents additional information stuffed into a `data` field. -type AuditEntryData struct { - OldName *string `json:"old_name,omitempty"` // The previous name of the repository, for a name change - OldLogin *string `json:"old_login,omitempty"` // The previous name of the organization, for a name change -} - -// GetAuditLog gets the audit-log entries for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/orgs#get-the-audit-log-for-an-organization -// -//meta:operation GET /orgs/{org}/audit-log -func (s *OrganizationsService) GetAuditLog(ctx context.Context, org string, opts *GetAuditLogOptions) ([]*AuditEntry, *Response, error) { - u := fmt.Sprintf("orgs/%v/audit-log", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var auditEntries []*AuditEntry - resp, err := s.client.Do(ctx, req, &auditEntries) - if err != nil { - return nil, resp, err - } - - return auditEntries, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go b/vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go deleted file mode 100644 index eed0f0c6..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" -) - -// CredentialAuthorization represents a credential authorized through SAML SSO. -type CredentialAuthorization struct { - // User login that owns the underlying credential. - Login *string `json:"login,omitempty"` - - // Unique identifier for the credential. - CredentialID *int64 `json:"credential_id,omitempty"` - - // Human-readable description of the credential type. - CredentialType *string `json:"credential_type,omitempty"` - - // Last eight characters of the credential. - // Only included in responses with credential_type of personal access token. - TokenLastEight *string `json:"token_last_eight,omitempty"` - - // Date when the credential was authorized for use. - CredentialAuthorizedAt *Timestamp `json:"credential_authorized_at,omitempty"` - - // Date when the credential was last accessed. - // May be null if it was never accessed. - CredentialAccessedAt *Timestamp `json:"credential_accessed_at,omitempty"` - - // List of oauth scopes the token has been granted. - Scopes []string `json:"scopes,omitempty"` - - // Unique string to distinguish the credential. - // Only included in responses with credential_type of SSH Key. - Fingerprint *string `json:"fingerprint,omitempty"` - - AuthorizedCredentialID *int64 `json:"authorized_credential_id,omitempty"` - - // The title given to the ssh key. - // This will only be present when the credential is an ssh key. - AuthorizedCredentialTitle *string `json:"authorized_credential_title,omitempty"` - - // The note given to the token. - // This will only be present when the credential is a token. - AuthorizedCredentialNote *string `json:"authorized_credential_note,omitempty"` - - // The expiry for the token. - // This will only be present when the credential is a token. - AuthorizedCredentialExpiresAt *Timestamp `json:"authorized_credential_expires_at,omitempty"` -} - -// ListCredentialAuthorizations lists credentials authorized through SAML SSO -// for a given organization. Only available with GitHub Enterprise Cloud. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/orgs#list-saml-sso-authorizations-for-an-organization -// -//meta:operation GET /orgs/{org}/credential-authorizations -func (s *OrganizationsService) ListCredentialAuthorizations(ctx context.Context, org string, opts *ListOptions) ([]*CredentialAuthorization, *Response, error) { - u := fmt.Sprintf("orgs/%v/credential-authorizations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest(http.MethodGet, u, nil) - if err != nil { - return nil, nil, err - } - - var creds []*CredentialAuthorization - resp, err := s.client.Do(ctx, req, &creds) - if err != nil { - return nil, resp, err - } - - return creds, resp, nil -} - -// RemoveCredentialAuthorization revokes the SAML SSO authorization for a given -// credential within an organization. Only available with GitHub Enterprise Cloud. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/orgs#remove-a-saml-sso-authorization-for-an-organization -// -//meta:operation DELETE /orgs/{org}/credential-authorizations/{credential_id} -func (s *OrganizationsService) RemoveCredentialAuthorization(ctx context.Context, org string, credentialID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/credential-authorizations/%v", org, credentialID) - req, err := s.client.NewRequest(http.MethodDelete, u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go b/vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go deleted file mode 100644 index 45de896a..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrganizationCustomRepoRoles represents custom repository roles available in specified organization. -type OrganizationCustomRepoRoles struct { - TotalCount *int `json:"total_count,omitempty"` - CustomRepoRoles []*CustomRepoRoles `json:"custom_roles,omitempty"` -} - -// CustomRepoRoles represents custom repository roles for an organization. -// See https://docs.github.com/enterprise-cloud@latest/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization -// for more information. -type CustomRepoRoles struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - BaseRole *string `json:"base_role,omitempty"` - Permissions []string `json:"permissions,omitempty"` -} - -// ListCustomRepoRoles lists the custom repository roles available in this organization. -// In order to see custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#list-custom-repository-roles-in-an-organization -// -//meta:operation GET /orgs/{org}/custom-repository-roles -func (s *OrganizationsService) ListCustomRepoRoles(ctx context.Context, org string) (*OrganizationCustomRepoRoles, *Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - customRepoRoles := new(OrganizationCustomRepoRoles) - resp, err := s.client.Do(ctx, req, customRepoRoles) - if err != nil { - return nil, resp, err - } - - return customRepoRoles, resp, nil -} - -// CreateOrUpdateCustomRoleOptions represents options required to create or update a custom repository role. -type CreateOrUpdateCustomRoleOptions struct { - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - BaseRole *string `json:"base_role,omitempty"` - Permissions []string `json:"permissions,omitempty"` -} - -// CreateCustomRepoRole creates a custom repository role in this organization. -// In order to create custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#create-a-custom-repository-role -// -//meta:operation POST /orgs/{org}/custom-repository-roles -func (s *OrganizationsService) CreateCustomRepoRole(ctx context.Context, org string, opts *CreateOrUpdateCustomRoleOptions) (*CustomRepoRoles, *Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles", org) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - resultingRole := new(CustomRepoRoles) - resp, err := s.client.Do(ctx, req, resultingRole) - if err != nil { - return nil, resp, err - } - - return resultingRole, resp, err -} - -// UpdateCustomRepoRole updates a custom repository role in this organization. -// In order to update custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#update-a-custom-repository-role -// -//meta:operation PATCH /orgs/{org}/custom-repository-roles/{role_id} -func (s *OrganizationsService) UpdateCustomRepoRole(ctx context.Context, org, roleID string, opts *CreateOrUpdateCustomRoleOptions) (*CustomRepoRoles, *Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles/%v", org, roleID) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - resultingRole := new(CustomRepoRoles) - resp, err := s.client.Do(ctx, req, resultingRole) - if err != nil { - return nil, resp, err - } - - return resultingRole, resp, err -} - -// DeleteCustomRepoRole deletes an existing custom repository role in this organization. -// In order to delete custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#delete-a-custom-repository-role -// -//meta:operation DELETE /orgs/{org}/custom-repository-roles/{role_id} -func (s *OrganizationsService) DeleteCustomRepoRole(ctx context.Context, org, roleID string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles/%v", org, roleID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resultingRole := new(CustomRepoRoles) - resp, err := s.client.Do(ctx, req, resultingRole) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_hooks.go b/vendor/github.com/google/go-github/v57/github/orgs_hooks.go deleted file mode 100644 index c2eef77c..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_hooks.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2015 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListHooks lists all Hooks for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#list-organization-webhooks -// -//meta:operation GET /orgs/{org}/hooks -func (s *OrganizationsService) ListHooks(ctx context.Context, org string, opts *ListOptions) ([]*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var hooks []*Hook - resp, err := s.client.Do(ctx, req, &hooks) - if err != nil { - return nil, resp, err - } - - return hooks, resp, nil -} - -// GetHook returns a single specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#get-an-organization-webhook -// -//meta:operation GET /orgs/{org}/hooks/{hook_id} -func (s *OrganizationsService) GetHook(ctx context.Context, org string, id int64) (*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - hook := new(Hook) - resp, err := s.client.Do(ctx, req, hook) - if err != nil { - return nil, resp, err - } - - return hook, resp, nil -} - -// CreateHook creates a Hook for the specified org. -// Config is a required field. -// -// Note that only a subset of the hook fields are used and hook must -// not be nil. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#create-an-organization-webhook -// -//meta:operation POST /orgs/{org}/hooks -func (s *OrganizationsService) CreateHook(ctx context.Context, org string, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks", org) - - hookReq := &createHookRequest{ - Name: "web", - Events: hook.Events, - Active: hook.Active, - Config: hook.Config, - } - - req, err := s.client.NewRequest("POST", u, hookReq) - if err != nil { - return nil, nil, err - } - - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// EditHook updates a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#update-an-organization-webhook -// -//meta:operation PATCH /orgs/{org}/hooks/{hook_id} -func (s *OrganizationsService) EditHook(ctx context.Context, org string, id int64, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) - req, err := s.client.NewRequest("PATCH", u, hook) - if err != nil { - return nil, nil, err - } - - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// PingHook triggers a 'ping' event to be sent to the Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#ping-an-organization-webhook -// -//meta:operation POST /orgs/{org}/hooks/{hook_id}/pings -func (s *OrganizationsService) PingHook(ctx context.Context, org string, id int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d/pings", org, id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteHook deletes a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#delete-an-organization-webhook -// -//meta:operation DELETE /orgs/{org}/hooks/{hook_id} -func (s *OrganizationsService) DeleteHook(ctx context.Context, org string, id int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_hooks_configuration.go b/vendor/github.com/google/go-github/v57/github/orgs_hooks_configuration.go deleted file mode 100644 index aeb616fc..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_hooks_configuration.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetHookConfiguration returns the configuration for the specified organization webhook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#get-a-webhook-configuration-for-an-organization -// -//meta:operation GET /orgs/{org}/hooks/{hook_id}/config -func (s *OrganizationsService) GetHookConfiguration(ctx context.Context, org string, id int64) (*HookConfig, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - config := new(HookConfig) - resp, err := s.client.Do(ctx, req, config) - if err != nil { - return nil, resp, err - } - - return config, resp, nil -} - -// EditHookConfiguration updates the configuration for the specified organization webhook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#update-a-webhook-configuration-for-an-organization -// -//meta:operation PATCH /orgs/{org}/hooks/{hook_id}/config -func (s *OrganizationsService) EditHookConfiguration(ctx context.Context, org string, id int64, config *HookConfig) (*HookConfig, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) - req, err := s.client.NewRequest("PATCH", u, config) - if err != nil { - return nil, nil, err - } - - c := new(HookConfig) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_hooks_deliveries.go b/vendor/github.com/google/go-github/v57/github/orgs_hooks_deliveries.go deleted file mode 100644 index c1c30124..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_hooks_deliveries.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListHookDeliveries lists webhook deliveries for a webhook configured in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#list-deliveries-for-an-organization-webhook -// -//meta:operation GET /orgs/{org}/hooks/{hook_id}/deliveries -func (s *OrganizationsService) ListHookDeliveries(ctx context.Context, org string, id int64, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries", org, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deliveries := []*HookDelivery{} - resp, err := s.client.Do(ctx, req, &deliveries) - if err != nil { - return nil, resp, err - } - - return deliveries, resp, nil -} - -// GetHookDelivery returns a delivery for a webhook configured in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#get-a-webhook-delivery-for-an-organization-webhook -// -//meta:operation GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id} -func (s *OrganizationsService) GetHookDelivery(ctx context.Context, owner string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries/%v", owner, hookID, deliveryID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// RedeliverHookDelivery redelivers a delivery for a webhook configured in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#redeliver-a-delivery-for-an-organization-webhook -// -//meta:operation POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts -func (s *OrganizationsService) RedeliverHookDelivery(ctx context.Context, owner string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries/%v/attempts", owner, hookID, deliveryID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_members.go b/vendor/github.com/google/go-github/v57/github/orgs_members.go deleted file mode 100644 index 5bc23657..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_members.go +++ /dev/null @@ -1,422 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Membership represents the status of a user's membership in an organization or team. -type Membership struct { - URL *string `json:"url,omitempty"` - - // State is the user's status within the organization or team. - // Possible values are: "active", "pending" - State *string `json:"state,omitempty"` - - // Role identifies the user's role within the organization or team. - // Possible values for organization membership: - // member - non-owner organization member - // admin - organization owner - // - // Possible values for team membership are: - // member - a normal member of the team - // maintainer - a team maintainer. Able to add/remove other team - // members, promote other team members to team - // maintainer, and edit the team’s name and description - Role *string `json:"role,omitempty"` - - // For organization membership, the API URL of the organization. - OrganizationURL *string `json:"organization_url,omitempty"` - - // For organization membership, the organization the membership is for. - Organization *Organization `json:"organization,omitempty"` - - // For organization membership, the user the membership is for. - User *User `json:"user,omitempty"` -} - -func (m Membership) String() string { - return Stringify(m) -} - -// ListMembersOptions specifies optional parameters to the -// OrganizationsService.ListMembers method. -type ListMembersOptions struct { - // If true (or if the authenticated user is not an owner of the - // organization), list only publicly visible members. - PublicOnly bool `url:"-"` - - // Filter members returned in the list. Possible values are: - // 2fa_disabled, all. Default is "all". - Filter string `url:"filter,omitempty"` - - // Role filters members returned by their role in the organization. - // Possible values are: - // all - all members of the organization, regardless of role - // admin - organization owners - // member - non-owner organization members - // - // Default is "all". - Role string `url:"role,omitempty"` - - ListOptions -} - -// ListMembers lists the members for an organization. If the authenticated -// user is an owner of the organization, this will return both concealed and -// public members, otherwise it will only return public members. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-organization-members -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-public-organization-members -// -//meta:operation GET /orgs/{org}/members -//meta:operation GET /orgs/{org}/public_members -func (s *OrganizationsService) ListMembers(ctx context.Context, org string, opts *ListMembersOptions) ([]*User, *Response, error) { - var u string - if opts != nil && opts.PublicOnly { - u = fmt.Sprintf("orgs/%v/public_members", org) - } else { - u = fmt.Sprintf("orgs/%v/members", org) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// IsMember checks if a user is a member of an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#check-organization-membership-for-a-user -// -//meta:operation GET /orgs/{org}/members/{username} -func (s *OrganizationsService) IsMember(ctx context.Context, org, user string) (bool, *Response, error) { - u := fmt.Sprintf("orgs/%v/members/%v", org, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - member, err := parseBoolResponse(err) - return member, resp, err -} - -// IsPublicMember checks if a user is a public member of an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#check-public-organization-membership-for-a-user -// -//meta:operation GET /orgs/{org}/public_members/{username} -func (s *OrganizationsService) IsPublicMember(ctx context.Context, org, user string) (bool, *Response, error) { - u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - member, err := parseBoolResponse(err) - return member, resp, err -} - -// RemoveMember removes a user from all teams of an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#remove-an-organization-member -// -//meta:operation DELETE /orgs/{org}/members/{username} -func (s *OrganizationsService) RemoveMember(ctx context.Context, org, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/members/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PublicizeMembership publicizes a user's membership in an organization. (A -// user cannot publicize the membership for another user.) -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#set-public-organization-membership-for-the-authenticated-user -// -//meta:operation PUT /orgs/{org}/public_members/{username} -func (s *OrganizationsService) PublicizeMembership(ctx context.Context, org, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ConcealMembership conceals a user's membership in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#remove-public-organization-membership-for-the-authenticated-user -// -//meta:operation DELETE /orgs/{org}/public_members/{username} -func (s *OrganizationsService) ConcealMembership(ctx context.Context, org, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListOrgMembershipsOptions specifies optional parameters to the -// OrganizationsService.ListOrgMemberships method. -type ListOrgMembershipsOptions struct { - // Filter memberships to include only those with the specified state. - // Possible values are: "active", "pending". - State string `url:"state,omitempty"` - - ListOptions -} - -// ListOrgMemberships lists the organization memberships for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-organization-memberships-for-the-authenticated-user -// -//meta:operation GET /user/memberships/orgs -func (s *OrganizationsService) ListOrgMemberships(ctx context.Context, opts *ListOrgMembershipsOptions) ([]*Membership, *Response, error) { - u := "user/memberships/orgs" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var memberships []*Membership - resp, err := s.client.Do(ctx, req, &memberships) - if err != nil { - return nil, resp, err - } - - return memberships, resp, nil -} - -// GetOrgMembership gets the membership for a user in a specified organization. -// Passing an empty string for user will get the membership for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#get-an-organization-membership-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/orgs/members#get-organization-membership-for-a-user -// -//meta:operation GET /orgs/{org}/memberships/{username} -//meta:operation GET /user/memberships/orgs/{org} -func (s *OrganizationsService) GetOrgMembership(ctx context.Context, user, org string) (*Membership, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("orgs/%v/memberships/%v", org, user) - } else { - u = fmt.Sprintf("user/memberships/orgs/%v", org) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - membership := new(Membership) - resp, err := s.client.Do(ctx, req, membership) - if err != nil { - return nil, resp, err - } - - return membership, resp, nil -} - -// EditOrgMembership edits the membership for user in specified organization. -// Passing an empty string for user will edit the membership for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#set-organization-membership-for-a-user -// GitHub API docs: https://docs.github.com/rest/orgs/members#update-an-organization-membership-for-the-authenticated-user -// -//meta:operation PUT /orgs/{org}/memberships/{username} -//meta:operation PATCH /user/memberships/orgs/{org} -func (s *OrganizationsService) EditOrgMembership(ctx context.Context, user, org string, membership *Membership) (*Membership, *Response, error) { - var u, method string - if user != "" { - u = fmt.Sprintf("orgs/%v/memberships/%v", org, user) - method = "PUT" - } else { - u = fmt.Sprintf("user/memberships/orgs/%v", org) - method = "PATCH" - } - - req, err := s.client.NewRequest(method, u, membership) - if err != nil { - return nil, nil, err - } - - m := new(Membership) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// RemoveOrgMembership removes user from the specified organization. If the -// user has been invited to the organization, this will cancel their invitation. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#remove-organization-membership-for-a-user -// -//meta:operation DELETE /orgs/{org}/memberships/{username} -func (s *OrganizationsService) RemoveOrgMembership(ctx context.Context, user, org string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/memberships/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListPendingOrgInvitations returns a list of pending invitations. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-pending-organization-invitations -// -//meta:operation GET /orgs/{org}/invitations -func (s *OrganizationsService) ListPendingOrgInvitations(ctx context.Context, org string, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/invitations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} - -// CreateOrgInvitationOptions specifies the parameters to the OrganizationService.Invite -// method. -type CreateOrgInvitationOptions struct { - // GitHub user ID for the person you are inviting. Not required if you provide Email. - InviteeID *int64 `json:"invitee_id,omitempty"` - // Email address of the person you are inviting, which can be an existing GitHub user. - // Not required if you provide InviteeID - Email *string `json:"email,omitempty"` - // Specify role for new member. Can be one of: - // * admin - Organization owners with full administrative rights to the - // organization and complete access to all repositories and teams. - // * direct_member - Non-owner organization members with ability to see - // other members and join teams by invitation. - // * billing_manager - Non-owner organization members with ability to - // manage the billing settings of your organization. - // Default is "direct_member". - Role *string `json:"role,omitempty"` - TeamID []int64 `json:"team_ids,omitempty"` -} - -// CreateOrgInvitation invites people to an organization by using their GitHub user ID or their email address. -// In order to create invitations in an organization, -// the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#create-an-organization-invitation -// -//meta:operation POST /orgs/{org}/invitations -func (s *OrganizationsService) CreateOrgInvitation(ctx context.Context, org string, opts *CreateOrgInvitationOptions) (*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/invitations", org) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - var invitation *Invitation - resp, err := s.client.Do(ctx, req, &invitation) - if err != nil { - return nil, resp, err - } - - return invitation, resp, nil -} - -// ListOrgInvitationTeams lists all teams associated with an invitation. In order to see invitations in an organization, -// the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-organization-invitation-teams -// -//meta:operation GET /orgs/{org}/invitations/{invitation_id}/teams -func (s *OrganizationsService) ListOrgInvitationTeams(ctx context.Context, org, invitationID string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/invitations/%v/teams", org, invitationID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var orgInvitationTeams []*Team - resp, err := s.client.Do(ctx, req, &orgInvitationTeams) - if err != nil { - return nil, resp, err - } - - return orgInvitationTeams, resp, nil -} - -// ListFailedOrgInvitations returns a list of failed inviatations. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-failed-organization-invitations -// -//meta:operation GET /orgs/{org}/failed_invitations -func (s *OrganizationsService) ListFailedOrgInvitations(ctx context.Context, org string, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/failed_invitations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var failedInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &failedInvitations) - if err != nil { - return nil, resp, err - } - - return failedInvitations, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_outside_collaborators.go b/vendor/github.com/google/go-github/v57/github/orgs_outside_collaborators.go deleted file mode 100644 index 56034d72..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_outside_collaborators.go +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListOutsideCollaboratorsOptions specifies optional parameters to the -// OrganizationsService.ListOutsideCollaborators method. -type ListOutsideCollaboratorsOptions struct { - // Filter outside collaborators returned in the list. Possible values are: - // 2fa_disabled, all. Default is "all". - Filter string `url:"filter,omitempty"` - - ListOptions -} - -// ListOutsideCollaborators lists outside collaborators of organization's repositories. -// This will only work if the authenticated -// user is an owner of the organization. -// -// Warning: The API may change without advance notice during the preview period. -// Preview features are not supported for production use. -// -// GitHub API docs: https://docs.github.com/rest/orgs/outside-collaborators#list-outside-collaborators-for-an-organization -// -//meta:operation GET /orgs/{org}/outside_collaborators -func (s *OrganizationsService) ListOutsideCollaborators(ctx context.Context, org string, opts *ListOutsideCollaboratorsOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("orgs/%v/outside_collaborators", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// RemoveOutsideCollaborator removes a user from the list of outside collaborators; -// consequently, removing them from all the organization's repositories. -// -// GitHub API docs: https://docs.github.com/rest/orgs/outside-collaborators#remove-outside-collaborator-from-an-organization -// -//meta:operation DELETE /orgs/{org}/outside_collaborators/{username} -func (s *OrganizationsService) RemoveOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ConvertMemberToOutsideCollaborator reduces the permission level of a member of the -// organization to that of an outside collaborator. Therefore, they will only -// have access to the repositories that their current team membership allows. -// Responses for converting a non-member or the last owner to an outside collaborator -// are listed in GitHub API docs. -// -// GitHub API docs: https://docs.github.com/rest/orgs/outside-collaborators#convert-an-organization-member-to-outside-collaborator -// -//meta:operation PUT /orgs/{org}/outside_collaborators/{username} -func (s *OrganizationsService) ConvertMemberToOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_packages.go b/vendor/github.com/google/go-github/v57/github/orgs_packages.go deleted file mode 100644 index 4fb9a63b..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_packages.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListPackages lists the packages for an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-packages-for-an-organization -// -//meta:operation GET /orgs/{org}/packages -func (s *OrganizationsService) ListPackages(ctx context.Context, org string, opts *PackageListOptions) ([]*Package, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var packages []*Package - resp, err := s.client.Do(ctx, req, &packages) - if err != nil { - return nil, resp, err - } - - return packages, resp, nil -} - -// GetPackage gets a package by name from an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-for-an-organization -// -//meta:operation GET /orgs/{org}/packages/{package_type}/{package_name} -func (s *OrganizationsService) GetPackage(ctx context.Context, org, packageType, packageName string) (*Package, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v", org, packageType, packageName) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pack *Package - resp, err := s.client.Do(ctx, req, &pack) - if err != nil { - return nil, resp, err - } - - return pack, resp, nil -} - -// DeletePackage deletes a package from an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-for-an-organization -// -//meta:operation DELETE /orgs/{org}/packages/{package_type}/{package_name} -func (s *OrganizationsService) DeletePackage(ctx context.Context, org, packageType, packageName string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v", org, packageType, packageName) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RestorePackage restores a package to an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-for-an-organization -// -//meta:operation POST /orgs/{org}/packages/{package_type}/{package_name}/restore -func (s *OrganizationsService) RestorePackage(ctx context.Context, org, packageType, packageName string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/restore", org, packageType, packageName) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageGetAllVersions gets all versions of a package in an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-package-versions-for-a-package-owned-by-an-organization -// -//meta:operation GET /orgs/{org}/packages/{package_type}/{package_name}/versions -func (s *OrganizationsService) PackageGetAllVersions(ctx context.Context, org, packageType, packageName string, opts *PackageListOptions) ([]*PackageVersion, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions", org, packageType, packageName) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var versions []*PackageVersion - resp, err := s.client.Do(ctx, req, &versions) - if err != nil { - return nil, resp, err - } - - return versions, resp, nil -} - -// PackageGetVersion gets a specific version of a package in an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-version-for-an-organization -// -//meta:operation GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *OrganizationsService) PackageGetVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*PackageVersion, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v", org, packageType, packageName, packageVersionID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var version *PackageVersion - resp, err := s.client.Do(ctx, req, &version) - if err != nil { - return nil, resp, err - } - - return version, resp, nil -} - -// PackageDeleteVersion deletes a package version from an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-package-version-for-an-organization -// -//meta:operation DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *OrganizationsService) PackageDeleteVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v", org, packageType, packageName, packageVersionID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageRestoreVersion restores a package version to an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-package-version-for-an-organization -// -//meta:operation POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore -func (s *OrganizationsService) PackageRestoreVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v/restore", org, packageType, packageName, packageVersionID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go b/vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go deleted file mode 100644 index 0d786114..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" -) - -// ReviewPersonalAccessTokenRequestOptions specifies the parameters to the ReviewPersonalAccessTokenRequest method. -type ReviewPersonalAccessTokenRequestOptions struct { - Action string `json:"action"` - Reason *string `json:"reason,omitempty"` -} - -// ReviewPersonalAccessTokenRequest approves or denies a pending request to access organization resources via a fine-grained personal access token. -// Only GitHub Apps can call this API, using the `organization_personal_access_token_requests: write` permission. -// `action` can be one of `approve` or `deny`. -// -// GitHub API docs: https://docs.github.com/rest/orgs/personal-access-tokens#review-a-request-to-access-organization-resources-with-a-fine-grained-personal-access-token -// -//meta:operation POST /orgs/{org}/personal-access-token-requests/{pat_request_id} -func (s *OrganizationsService) ReviewPersonalAccessTokenRequest(ctx context.Context, org string, requestID int64, opts ReviewPersonalAccessTokenRequestOptions) (*Response, error) { - u := fmt.Sprintf("orgs/%v/personal-access-token-requests/%v", org, requestID) - - req, err := s.client.NewRequest(http.MethodPost, u, &opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_projects.go b/vendor/github.com/google/go-github/v57/github/orgs_projects.go deleted file mode 100644 index 454d8cf1..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_projects.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListProjects lists the projects for an organization. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#list-organization-projects -// -//meta:operation GET /orgs/{org}/projects -func (s *OrganizationsService) ListProjects(ctx context.Context, org string, opts *ProjectListOptions) ([]*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/projects", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// CreateProject creates a GitHub Project for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#create-an-organization-project -// -//meta:operation POST /orgs/{org}/projects -func (s *OrganizationsService) CreateProject(ctx context.Context, org string, opts *ProjectOptions) (*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/projects", org) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_properties.go b/vendor/github.com/google/go-github/v57/github/orgs_properties.go deleted file mode 100644 index 1daac811..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_properties.go +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// CustomProperty represents an organization custom property object. -type CustomProperty struct { - // PropertyName is required for most endpoints except when calling CreateOrUpdateCustomProperty; - // where this is sent in the path and thus can be omitted. - PropertyName *string `json:"property_name,omitempty"` - // Possible values for ValueType are: string, single_select - ValueType string `json:"value_type"` - Required *bool `json:"required,omitempty"` - DefaultValue *string `json:"default_value,omitempty"` - Description *string `json:"description,omitempty"` - AllowedValues []string `json:"allowed_values,omitempty"` -} - -// RepoCustomPropertyValue represents a repository custom property value. -type RepoCustomPropertyValue struct { - RepositoryID int64 `json:"repository_id"` - RepositoryName string `json:"repository_name"` - RepositoryFullName string `json:"repository_full_name"` - Properties []*CustomPropertyValue `json:"properties"` -} - -// CustomPropertyValue represents a custom property value. -type CustomPropertyValue struct { - PropertyName string `json:"property_name"` - Value *string `json:"value,omitempty"` -} - -// GetAllCustomProperties gets all custom properties that are defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#get-all-custom-properties-for-an-organization -// -//meta:operation GET /orgs/{org}/properties/schema -func (s *OrganizationsService) GetAllCustomProperties(ctx context.Context, org string) ([]*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var customProperties []*CustomProperty - resp, err := s.client.Do(ctx, req, &customProperties) - if err != nil { - return nil, resp, err - } - - return customProperties, resp, nil -} - -// CreateOrUpdateCustomProperties creates new or updates existing custom properties that are defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#create-or-update-custom-properties-for-an-organization -// -//meta:operation PATCH /orgs/{org}/properties/schema -func (s *OrganizationsService) CreateOrUpdateCustomProperties(ctx context.Context, org string, properties []*CustomProperty) ([]*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema", org) - - params := struct { - Properties []*CustomProperty `json:"properties"` - }{ - Properties: properties, - } - - req, err := s.client.NewRequest("PATCH", u, params) - if err != nil { - return nil, nil, err - } - - var customProperties []*CustomProperty - resp, err := s.client.Do(ctx, req, &customProperties) - if err != nil { - return nil, resp, err - } - - return customProperties, resp, nil -} - -// GetCustomProperty gets a custom property that is defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#get-a-custom-property-for-an-organization -// -//meta:operation GET /orgs/{org}/properties/schema/{custom_property_name} -func (s *OrganizationsService) GetCustomProperty(ctx context.Context, org, name string) (*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema/%v", org, name) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var customProperty *CustomProperty - resp, err := s.client.Do(ctx, req, &customProperty) - if err != nil { - return nil, resp, err - } - - return customProperty, resp, nil -} - -// CreateOrUpdateCustomProperty creates a new or updates an existing custom property that is defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#create-or-update-a-custom-property-for-an-organization -// -//meta:operation PUT /orgs/{org}/properties/schema/{custom_property_name} -func (s *OrganizationsService) CreateOrUpdateCustomProperty(ctx context.Context, org, customPropertyName string, property *CustomProperty) (*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema/%v", org, customPropertyName) - - req, err := s.client.NewRequest("PUT", u, property) - if err != nil { - return nil, nil, err - } - - var customProperty *CustomProperty - resp, err := s.client.Do(ctx, req, &customProperty) - if err != nil { - return nil, resp, err - } - - return customProperty, resp, nil -} - -// RemoveCustomProperty removes a custom property that is defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#remove-a-custom-property-for-an-organization -// -//meta:operation DELETE /orgs/{org}/properties/schema/{custom_property_name} -func (s *OrganizationsService) RemoveCustomProperty(ctx context.Context, org, customPropertyName string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema/%v", org, customPropertyName) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListCustomPropertyValues lists all custom property values for repositories in the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#list-custom-property-values-for-organization-repositories -// -//meta:operation GET /orgs/{org}/properties/values -func (s *OrganizationsService) ListCustomPropertyValues(ctx context.Context, org string, opts *ListOptions) ([]*RepoCustomPropertyValue, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/values", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repoCustomPropertyValues []*RepoCustomPropertyValue - resp, err := s.client.Do(ctx, req, &repoCustomPropertyValues) - if err != nil { - return nil, resp, err - } - - return repoCustomPropertyValues, resp, nil -} - -// CreateOrUpdateRepoCustomPropertyValues creates new or updates existing custom property values across multiple repositories for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#create-or-update-custom-property-values-for-organization-repositories -// -//meta:operation PATCH /orgs/{org}/properties/values -func (s *OrganizationsService) CreateOrUpdateRepoCustomPropertyValues(ctx context.Context, org string, repoNames []string, properties []*CustomProperty) (*Response, error) { - u := fmt.Sprintf("orgs/%v/properties/values", org) - - params := struct { - RepositoryNames []string `json:"repository_names"` - Properties []*CustomProperty `json:"properties"` - }{ - RepositoryNames: repoNames, - Properties: properties, - } - - req, err := s.client.NewRequest("PATCH", u, params) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_rules.go b/vendor/github.com/google/go-github/v57/github/orgs_rules.go deleted file mode 100644 index 37c06a73..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_rules.go +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetAllOrganizationRulesets gets all the rulesets for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#get-all-organization-repository-rulesets -// -//meta:operation GET /orgs/{org}/rulesets -func (s *OrganizationsService) GetAllOrganizationRulesets(ctx context.Context, org string) ([]*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rulesets []*Ruleset - resp, err := s.client.Do(ctx, req, &rulesets) - if err != nil { - return nil, resp, err - } - - return rulesets, resp, nil -} - -// CreateOrganizationRuleset creates a ruleset for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#create-an-organization-repository-ruleset -// -//meta:operation POST /orgs/{org}/rulesets -func (s *OrganizationsService) CreateOrganizationRuleset(ctx context.Context, org string, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets", org) - - req, err := s.client.NewRequest("POST", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// GetOrganizationRuleset gets a ruleset from the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#get-an-organization-repository-ruleset -// -//meta:operation GET /orgs/{org}/rulesets/{ruleset_id} -func (s *OrganizationsService) GetOrganizationRuleset(ctx context.Context, org string, rulesetID int64) (*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// UpdateOrganizationRuleset updates a ruleset from the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#update-an-organization-repository-ruleset -// -//meta:operation PUT /orgs/{org}/rulesets/{ruleset_id} -func (s *OrganizationsService) UpdateOrganizationRuleset(ctx context.Context, org string, rulesetID int64, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) - - req, err := s.client.NewRequest("PUT", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// DeleteOrganizationRuleset deletes a ruleset from the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#delete-an-organization-repository-ruleset -// -//meta:operation DELETE /orgs/{org}/rulesets/{ruleset_id} -func (s *OrganizationsService) DeleteOrganizationRuleset(ctx context.Context, org string, rulesetID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_security_managers.go b/vendor/github.com/google/go-github/v57/github/orgs_security_managers.go deleted file mode 100644 index 08037727..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_security_managers.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListSecurityManagerTeams lists all security manager teams for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/security-managers#list-security-manager-teams -// -//meta:operation GET /orgs/{org}/security-managers -func (s *OrganizationsService) ListSecurityManagerTeams(ctx context.Context, org string) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/security-managers", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// AddSecurityManagerTeam adds a team to the list of security managers for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/security-managers#add-a-security-manager-team -// -//meta:operation PUT /orgs/{org}/security-managers/teams/{team_slug} -func (s *OrganizationsService) AddSecurityManagerTeam(ctx context.Context, org, team string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/security-managers/teams/%v", org, team) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSecurityManagerTeam removes a team from the list of security managers for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/security-managers#remove-a-security-manager-team -// -//meta:operation DELETE /orgs/{org}/security-managers/teams/{team_slug} -func (s *OrganizationsService) RemoveSecurityManagerTeam(ctx context.Context, org, team string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/security-managers/teams/%v", org, team) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_users_blocking.go b/vendor/github.com/google/go-github/v57/github/orgs_users_blocking.go deleted file mode 100644 index 62bd9116..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_users_blocking.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListBlockedUsers lists all the users blocked by an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#list-users-blocked-by-an-organization -// -//meta:operation GET /orgs/{org}/blocks -func (s *OrganizationsService) ListBlockedUsers(ctx context.Context, org string, opts *ListOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("orgs/%v/blocks", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - var blockedUsers []*User - resp, err := s.client.Do(ctx, req, &blockedUsers) - if err != nil { - return nil, resp, err - } - - return blockedUsers, resp, nil -} - -// IsBlocked reports whether specified user is blocked from an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#check-if-a-user-is-blocked-by-an-organization -// -//meta:operation GET /orgs/{org}/blocks/{username} -func (s *OrganizationsService) IsBlocked(ctx context.Context, org string, user string) (bool, *Response, error) { - u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - resp, err := s.client.Do(ctx, req, nil) - isBlocked, err := parseBoolResponse(err) - return isBlocked, resp, err -} - -// BlockUser blocks specified user from an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#block-a-user-from-an-organization -// -//meta:operation PUT /orgs/{org}/blocks/{username} -func (s *OrganizationsService) BlockUser(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnblockUser unblocks specified user from an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#unblock-a-user-from-an-organization -// -//meta:operation DELETE /orgs/{org}/blocks/{username} -func (s *OrganizationsService) UnblockUser(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/packages.go b/vendor/github.com/google/go-github/v57/github/packages.go deleted file mode 100644 index ef7df074..00000000 --- a/vendor/github.com/google/go-github/v57/github/packages.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// Package represents a GitHub package. -type Package struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - PackageType *string `json:"package_type,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Owner *User `json:"owner,omitempty"` - PackageVersion *PackageVersion `json:"package_version,omitempty"` - Registry *PackageRegistry `json:"registry,omitempty"` - URL *string `json:"url,omitempty"` - VersionCount *int64 `json:"version_count,omitempty"` - Visibility *string `json:"visibility,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -func (p Package) String() string { - return Stringify(p) -} - -// PackageVersion represents a GitHub package version. -type PackageVersion struct { - ID *int64 `json:"id,omitempty"` - Version *string `json:"version,omitempty"` - Summary *string `json:"summary,omitempty"` - Body *string `json:"body,omitempty"` - BodyHTML *string `json:"body_html,omitempty"` - Release *PackageRelease `json:"release,omitempty"` - Manifest *string `json:"manifest,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - TargetOID *string `json:"target_oid,omitempty"` - Draft *bool `json:"draft,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PackageFiles []*PackageFile `json:"package_files,omitempty"` - Author *User `json:"author,omitempty"` - InstallationCommand *string `json:"installation_command,omitempty"` - Metadata *PackageMetadata `json:"metadata,omitempty"` - PackageHTMLURL *string `json:"package_html_url,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (pv PackageVersion) String() string { - return Stringify(pv) -} - -// PackageRelease represents a GitHub package version release. -type PackageRelease struct { - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ID *int64 `json:"id,omitempty"` - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - Name *string `json:"name,omitempty"` - Draft *bool `json:"draft,omitempty"` - Author *User `json:"author,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` -} - -func (r PackageRelease) String() string { - return Stringify(r) -} - -// PackageFile represents a GitHub package version release file. -type PackageFile struct { - DownloadURL *string `json:"download_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - SHA256 *string `json:"sha256,omitempty"` - SHA1 *string `json:"sha1,omitempty"` - MD5 *string `json:"md5,omitempty"` - ContentType *string `json:"content_type,omitempty"` - State *string `json:"state,omitempty"` - Author *User `json:"author,omitempty"` - Size *int64 `json:"size,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -func (pf PackageFile) String() string { - return Stringify(pf) -} - -// PackageRegistry represents a GitHub package registry. -type PackageRegistry struct { - AboutURL *string `json:"about_url,omitempty"` - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` - URL *string `json:"url,omitempty"` - Vendor *string `json:"vendor,omitempty"` -} - -func (r PackageRegistry) String() string { - return Stringify(r) -} - -// PackageListOptions represents the optional list options for a package. -type PackageListOptions struct { - // Visibility of packages "public", "internal" or "private". - Visibility *string `url:"visibility,omitempty"` - - // PackageType represents the type of package. - // It can be one of "npm", "maven", "rubygems", "nuget", "docker", or "container". - PackageType *string `url:"package_type,omitempty"` - - // State of package either "active" or "deleted". - State *string `url:"state,omitempty"` - - ListOptions -} - -// PackageMetadata represents metadata from a package. -type PackageMetadata struct { - PackageType *string `json:"package_type,omitempty"` - Container *PackageContainerMetadata `json:"container,omitempty"` -} - -func (r PackageMetadata) String() string { - return Stringify(r) -} - -// PackageContainerMetadata represents container metadata for docker container packages. -type PackageContainerMetadata struct { - Tags []string `json:"tags,omitempty"` -} - -func (r PackageContainerMetadata) String() string { - return Stringify(r) -} diff --git a/vendor/github.com/google/go-github/v57/github/projects.go b/vendor/github.com/google/go-github/v57/github/projects.go deleted file mode 100644 index c5c42f89..00000000 --- a/vendor/github.com/google/go-github/v57/github/projects.go +++ /dev/null @@ -1,634 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ProjectsService provides access to the projects functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/projects -type ProjectsService service - -// Project represents a GitHub Project. -type Project struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ColumnsURL *string `json:"columns_url,omitempty"` - OwnerURL *string `json:"owner_url,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` - OrganizationPermission *string `json:"organization_permission,omitempty"` - Private *bool `json:"private,omitempty"` - - // The User object that generated the project. - Creator *User `json:"creator,omitempty"` -} - -func (p Project) String() string { - return Stringify(p) -} - -// GetProject gets a GitHub Project for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#get-a-project -// -//meta:operation GET /projects/{project_id} -func (s *ProjectsService) GetProject(ctx context.Context, id int64) (*Project, *Response, error) { - u := fmt.Sprintf("projects/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} - -// ProjectOptions specifies the parameters to the -// RepositoriesService.CreateProject and -// ProjectsService.UpdateProject methods. -type ProjectOptions struct { - // The name of the project. (Required for creation; optional for update.) - Name *string `json:"name,omitempty"` - // The body of the project. (Optional.) - Body *string `json:"body,omitempty"` - - // The following field(s) are only applicable for update. - // They should be left with zero values for creation. - - // State of the project. Either "open" or "closed". (Optional.) - State *string `json:"state,omitempty"` - // The permission level that all members of the project's organization - // will have on this project. - // Setting the organization permission is only available - // for organization projects. (Optional.) - OrganizationPermission *string `json:"organization_permission,omitempty"` - // Sets visibility of the project within the organization. - // Setting visibility is only available - // for organization projects.(Optional.) - Private *bool `json:"private,omitempty"` -} - -// UpdateProject updates a repository project. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#update-a-project -// -//meta:operation PATCH /projects/{project_id} -func (s *ProjectsService) UpdateProject(ctx context.Context, id int64, opts *ProjectOptions) (*Project, *Response, error) { - u := fmt.Sprintf("projects/%v", id) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} - -// DeleteProject deletes a GitHub Project from a repository. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#delete-a-project -// -//meta:operation DELETE /projects/{project_id} -func (s *ProjectsService) DeleteProject(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("projects/%v", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectColumn represents a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/repos/projects/ -type ProjectColumn struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - ProjectURL *string `json:"project_url,omitempty"` - CardsURL *string `json:"cards_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// ListProjectColumns lists the columns of a GitHub Project for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#list-project-columns -// -//meta:operation GET /projects/{project_id}/columns -func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int64, opts *ListOptions) ([]*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/%v/columns", projectID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - columns := []*ProjectColumn{} - resp, err := s.client.Do(ctx, req, &columns) - if err != nil { - return nil, resp, err - } - - return columns, resp, nil -} - -// GetProjectColumn gets a column of a GitHub Project for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#get-a-project-column -// -//meta:operation GET /projects/columns/{column_id} -func (s *ProjectsService) GetProjectColumn(ctx context.Context, id int64) (*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/columns/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - column := &ProjectColumn{} - resp, err := s.client.Do(ctx, req, column) - if err != nil { - return nil, resp, err - } - - return column, resp, nil -} - -// ProjectColumnOptions specifies the parameters to the -// ProjectsService.CreateProjectColumn and -// ProjectsService.UpdateProjectColumn methods. -type ProjectColumnOptions struct { - // The name of the project column. (Required for creation and update.) - Name string `json:"name"` -} - -// CreateProjectColumn creates a column for the specified (by number) project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#create-a-project-column -// -//meta:operation POST /projects/{project_id}/columns -func (s *ProjectsService) CreateProjectColumn(ctx context.Context, projectID int64, opts *ProjectColumnOptions) (*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/%v/columns", projectID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - column := &ProjectColumn{} - resp, err := s.client.Do(ctx, req, column) - if err != nil { - return nil, resp, err - } - - return column, resp, nil -} - -// UpdateProjectColumn updates a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#update-an-existing-project-column -// -//meta:operation PATCH /projects/columns/{column_id} -func (s *ProjectsService) UpdateProjectColumn(ctx context.Context, columnID int64, opts *ProjectColumnOptions) (*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/columns/%v", columnID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - column := &ProjectColumn{} - resp, err := s.client.Do(ctx, req, column) - if err != nil { - return nil, resp, err - } - - return column, resp, nil -} - -// DeleteProjectColumn deletes a column from a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#delete-a-project-column -// -//meta:operation DELETE /projects/columns/{column_id} -func (s *ProjectsService) DeleteProjectColumn(ctx context.Context, columnID int64) (*Response, error) { - u := fmt.Sprintf("projects/columns/%v", columnID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectColumnMoveOptions specifies the parameters to the -// ProjectsService.MoveProjectColumn method. -type ProjectColumnMoveOptions struct { - // Position can be one of "first", "last", or "after:", where - // is the ID of a column in the same project. (Required.) - Position string `json:"position"` -} - -// MoveProjectColumn moves a column within a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#move-a-project-column -// -//meta:operation POST /projects/columns/{column_id}/moves -func (s *ProjectsService) MoveProjectColumn(ctx context.Context, columnID int64, opts *ProjectColumnMoveOptions) (*Response, error) { - u := fmt.Sprintf("projects/columns/%v/moves", columnID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectCard represents a card in a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards/#get-a-project-card -type ProjectCard struct { - URL *string `json:"url,omitempty"` - ColumnURL *string `json:"column_url,omitempty"` - ContentURL *string `json:"content_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Note *string `json:"note,omitempty"` - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Archived *bool `json:"archived,omitempty"` - - // The following fields are only populated by Webhook events. - ColumnID *int64 `json:"column_id,omitempty"` - - // The following fields are only populated by Events API. - ProjectID *int64 `json:"project_id,omitempty"` - ProjectURL *string `json:"project_url,omitempty"` - ColumnName *string `json:"column_name,omitempty"` - PreviousColumnName *string `json:"previous_column_name,omitempty"` // Populated in "moved_columns_in_project" event deliveries. -} - -// ProjectCardListOptions specifies the optional parameters to the -// ProjectsService.ListProjectCards method. -type ProjectCardListOptions struct { - // ArchivedState is used to list all, archived, or not_archived project cards. - // Defaults to not_archived when you omit this parameter. - ArchivedState *string `url:"archived_state,omitempty"` - - ListOptions -} - -// ListProjectCards lists the cards in a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#list-project-cards -// -//meta:operation GET /projects/columns/{column_id}/cards -func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int64, opts *ProjectCardListOptions) ([]*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/%v/cards", columnID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - cards := []*ProjectCard{} - resp, err := s.client.Do(ctx, req, &cards) - if err != nil { - return nil, resp, err - } - - return cards, resp, nil -} - -// GetProjectCard gets a card in a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#get-a-project-card -// -//meta:operation GET /projects/columns/cards/{card_id} -func (s *ProjectsService) GetProjectCard(ctx context.Context, cardID int64) (*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v", cardID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - card := &ProjectCard{} - resp, err := s.client.Do(ctx, req, card) - if err != nil { - return nil, resp, err - } - - return card, resp, nil -} - -// ProjectCardOptions specifies the parameters to the -// ProjectsService.CreateProjectCard and -// ProjectsService.UpdateProjectCard methods. -type ProjectCardOptions struct { - // The note of the card. Note and ContentID are mutually exclusive. - Note string `json:"note,omitempty"` - // The ID (not Number) of the Issue to associate with this card. - // Note and ContentID are mutually exclusive. - ContentID int64 `json:"content_id,omitempty"` - // The type of content to associate with this card. Possible values are: "Issue" and "PullRequest". - ContentType string `json:"content_type,omitempty"` - // Use true to archive a project card. - // Specify false if you need to restore a previously archived project card. - Archived *bool `json:"archived,omitempty"` -} - -// CreateProjectCard creates a card in the specified column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#create-a-project-card -// -//meta:operation POST /projects/columns/{column_id}/cards -func (s *ProjectsService) CreateProjectCard(ctx context.Context, columnID int64, opts *ProjectCardOptions) (*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/%v/cards", columnID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - card := &ProjectCard{} - resp, err := s.client.Do(ctx, req, card) - if err != nil { - return nil, resp, err - } - - return card, resp, nil -} - -// UpdateProjectCard updates a card of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#update-an-existing-project-card -// -//meta:operation PATCH /projects/columns/cards/{card_id} -func (s *ProjectsService) UpdateProjectCard(ctx context.Context, cardID int64, opts *ProjectCardOptions) (*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v", cardID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - card := &ProjectCard{} - resp, err := s.client.Do(ctx, req, card) - if err != nil { - return nil, resp, err - } - - return card, resp, nil -} - -// DeleteProjectCard deletes a card from a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#delete-a-project-card -// -//meta:operation DELETE /projects/columns/cards/{card_id} -func (s *ProjectsService) DeleteProjectCard(ctx context.Context, cardID int64) (*Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v", cardID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectCardMoveOptions specifies the parameters to the -// ProjectsService.MoveProjectCard method. -type ProjectCardMoveOptions struct { - // Position can be one of "top", "bottom", or "after:", where - // is the ID of a card in the same project. - Position string `json:"position"` - // ColumnID is the ID of a column in the same project. Note that ColumnID - // is required when using Position "after:" when that card is in - // another column; otherwise it is optional. - ColumnID int64 `json:"column_id,omitempty"` -} - -// MoveProjectCard moves a card within a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#move-a-project-card -// -//meta:operation POST /projects/columns/cards/{card_id}/moves -func (s *ProjectsService) MoveProjectCard(ctx context.Context, cardID int64, opts *ProjectCardMoveOptions) (*Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v/moves", cardID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectCollaboratorOptions specifies the optional parameters to the -// ProjectsService.AddProjectCollaborator method. -type ProjectCollaboratorOptions struct { - // Permission specifies the permission to grant to the collaborator. - // Possible values are: - // "read" - can read, but not write to or administer this project. - // "write" - can read and write, but not administer this project. - // "admin" - can read, write and administer this project. - // - // Default value is "write" - Permission *string `json:"permission,omitempty"` -} - -// AddProjectCollaborator adds a collaborator to an organization project and sets -// their permission level. You must be an organization owner or a project admin to add a collaborator. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#add-project-collaborator -// -//meta:operation PUT /projects/{project_id}/collaborators/{username} -func (s *ProjectsService) AddProjectCollaborator(ctx context.Context, id int64, username string, opts *ProjectCollaboratorOptions) (*Response, error) { - u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// RemoveProjectCollaborator removes a collaborator from an organization project. -// You must be an organization owner or a project admin to remove a collaborator. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#remove-user-as-a-collaborator -// -//meta:operation DELETE /projects/{project_id}/collaborators/{username} -func (s *ProjectsService) RemoveProjectCollaborator(ctx context.Context, id int64, username string) (*Response, error) { - u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ListCollaboratorOptions specifies the optional parameters to the -// ProjectsService.ListProjectCollaborators method. -type ListCollaboratorOptions struct { - // Affiliation specifies how collaborators should be filtered by their affiliation. - // Possible values are: - // "outside" - All outside collaborators of an organization-owned repository - // "direct" - All collaborators with permissions to an organization-owned repository, - // regardless of organization membership status - // "all" - All collaborators the authenticated user can see - // - // Default value is "all". - Affiliation *string `url:"affiliation,omitempty"` - - ListOptions -} - -// ListProjectCollaborators lists the collaborators for an organization project. For a project, -// the list of collaborators includes outside collaborators, organization members that are direct -// collaborators, organization members with access through team memberships, organization members -// with access through default organization permissions, and organization owners. You must be an -// organization owner or a project admin to list collaborators. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#list-project-collaborators -// -//meta:operation GET /projects/{project_id}/collaborators -func (s *ProjectsService) ListProjectCollaborators(ctx context.Context, id int64, opts *ListCollaboratorOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("projects/%v/collaborators", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ProjectPermissionLevel represents the permission level an organization -// member has for a given project. -type ProjectPermissionLevel struct { - // Possible values: "admin", "write", "read", "none" - Permission *string `json:"permission,omitempty"` - - User *User `json:"user,omitempty"` -} - -// ReviewProjectCollaboratorPermission returns the collaborator's permission level for an organization -// project. Possible values for the permission key: "admin", "write", "read", "none". -// You must be an organization owner or a project admin to review a user's permission level. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#get-project-permission-for-a-user -// -//meta:operation GET /projects/{project_id}/collaborators/{username}/permission -func (s *ProjectsService) ReviewProjectCollaboratorPermission(ctx context.Context, id int64, username string) (*ProjectPermissionLevel, *Response, error) { - u := fmt.Sprintf("projects/%v/collaborators/%v/permission", id, username) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - ppl := new(ProjectPermissionLevel) - resp, err := s.client.Do(ctx, req, ppl) - if err != nil { - return nil, resp, err - } - return ppl, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/pulls.go b/vendor/github.com/google/go-github/v57/github/pulls.go deleted file mode 100644 index 80df9fa6..00000000 --- a/vendor/github.com/google/go-github/v57/github/pulls.go +++ /dev/null @@ -1,508 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" -) - -// PullRequestsService handles communication with the pull request related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/pulls/ -type PullRequestsService service - -// PullRequestAutoMerge represents the "auto_merge" response for a PullRequest. -type PullRequestAutoMerge struct { - EnabledBy *User `json:"enabled_by,omitempty"` - MergeMethod *string `json:"merge_method,omitempty"` - CommitTitle *string `json:"commit_title,omitempty"` - CommitMessage *string `json:"commit_message,omitempty"` -} - -// PullRequest represents a GitHub pull request on a repository. -type PullRequest struct { - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - Locked *bool `json:"locked,omitempty"` - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - MergedAt *Timestamp `json:"merged_at,omitempty"` - Labels []*Label `json:"labels,omitempty"` - User *User `json:"user,omitempty"` - Draft *bool `json:"draft,omitempty"` - Merged *bool `json:"merged,omitempty"` - Mergeable *bool `json:"mergeable,omitempty"` - MergeableState *string `json:"mergeable_state,omitempty"` - MergedBy *User `json:"merged_by,omitempty"` - MergeCommitSHA *string `json:"merge_commit_sha,omitempty"` - Rebaseable *bool `json:"rebaseable,omitempty"` - Comments *int `json:"comments,omitempty"` - Commits *int `json:"commits,omitempty"` - Additions *int `json:"additions,omitempty"` - Deletions *int `json:"deletions,omitempty"` - ChangedFiles *int `json:"changed_files,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - IssueURL *string `json:"issue_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - DiffURL *string `json:"diff_url,omitempty"` - PatchURL *string `json:"patch_url,omitempty"` - CommitsURL *string `json:"commits_url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - ReviewCommentsURL *string `json:"review_comments_url,omitempty"` - ReviewCommentURL *string `json:"review_comment_url,omitempty"` - ReviewComments *int `json:"review_comments,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Assignees []*User `json:"assignees,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` - AuthorAssociation *string `json:"author_association,omitempty"` - NodeID *string `json:"node_id,omitempty"` - RequestedReviewers []*User `json:"requested_reviewers,omitempty"` - AutoMerge *PullRequestAutoMerge `json:"auto_merge,omitempty"` - - // RequestedTeams is populated as part of the PullRequestEvent. - // See, https://docs.github.com/developers/webhooks-and-events/github-event-types#pullrequestevent for an example. - RequestedTeams []*Team `json:"requested_teams,omitempty"` - - Links *PRLinks `json:"_links,omitempty"` - Head *PullRequestBranch `json:"head,omitempty"` - Base *PullRequestBranch `json:"base,omitempty"` - - // ActiveLockReason is populated only when LockReason is provided while locking the pull request. - // Possible values are: "off-topic", "too heated", "resolved", and "spam". - ActiveLockReason *string `json:"active_lock_reason,omitempty"` -} - -func (p PullRequest) String() string { - return Stringify(p) -} - -// PRLink represents a single link object from GitHub pull request _links. -type PRLink struct { - HRef *string `json:"href,omitempty"` -} - -// PRLinks represents the "_links" object in a GitHub pull request. -type PRLinks struct { - Self *PRLink `json:"self,omitempty"` - HTML *PRLink `json:"html,omitempty"` - Issue *PRLink `json:"issue,omitempty"` - Comments *PRLink `json:"comments,omitempty"` - ReviewComments *PRLink `json:"review_comments,omitempty"` - ReviewComment *PRLink `json:"review_comment,omitempty"` - Commits *PRLink `json:"commits,omitempty"` - Statuses *PRLink `json:"statuses,omitempty"` -} - -// PullRequestBranch represents a base or head branch in a GitHub pull request. -type PullRequestBranch struct { - Label *string `json:"label,omitempty"` - Ref *string `json:"ref,omitempty"` - SHA *string `json:"sha,omitempty"` - Repo *Repository `json:"repo,omitempty"` - User *User `json:"user,omitempty"` -} - -// PullRequestListOptions specifies the optional parameters to the -// PullRequestsService.List method. -type PullRequestListOptions struct { - // State filters pull requests based on their state. Possible values are: - // open, closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Head filters pull requests by head user and branch name in the format of: - // "user:ref-name". - Head string `url:"head,omitempty"` - - // Base filters pull requests by base branch name. - Base string `url:"base,omitempty"` - - // Sort specifies how to sort pull requests. Possible values are: created, - // updated, popularity, long-running. Default is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort pull requests. Possible values are: asc, desc. - // If Sort is "created" or not specified, Default is "desc", otherwise Default - // is "asc" - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// List the pull requests for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#list-pull-requests -// -//meta:operation GET /repos/{owner}/{repo}/pulls -func (s *PullRequestsService) List(ctx context.Context, owner string, repo string, opts *PullRequestListOptions) ([]*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pulls []*PullRequest - resp, err := s.client.Do(ctx, req, &pulls) - if err != nil { - return nil, resp, err - } - - return pulls, resp, nil -} - -// ListPullRequestsWithCommit returns pull requests associated with a commit SHA. -// -// The results may include open and closed pull requests. -// By default, the PullRequestListOptions State filters for "open". -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#list-pull-requests-associated-with-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls -func (s *PullRequestsService) ListPullRequestsWithCommit(ctx context.Context, owner, repo, sha string, opts *ListOptions) ([]*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/pulls", owner, repo, sha) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeListPullsOrBranchesForCommitPreview) - var pulls []*PullRequest - resp, err := s.client.Do(ctx, req, &pulls) - if err != nil { - return nil, resp, err - } - - return pulls, resp, nil -} - -// Get a single pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#get-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number} -func (s *PullRequestsService) Get(ctx context.Context, owner string, repo string, number int) (*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - pull := new(PullRequest) - resp, err := s.client.Do(ctx, req, pull) - if err != nil { - return nil, resp, err - } - - return pull, resp, nil -} - -// GetRaw gets a single pull request in raw (diff or patch) format. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#get-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number} -func (s *PullRequestsService) GetRaw(ctx context.Context, owner string, repo string, number int, opts RawOptions) (string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - switch opts.Type { - case Diff: - req.Header.Set("Accept", mediaTypeV3Diff) - case Patch: - req.Header.Set("Accept", mediaTypeV3Patch) - default: - return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) - } - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// NewPullRequest represents a new pull request to be created. -type NewPullRequest struct { - Title *string `json:"title,omitempty"` - Head *string `json:"head,omitempty"` - HeadRepo *string `json:"head_repo,omitempty"` - Base *string `json:"base,omitempty"` - Body *string `json:"body,omitempty"` - Issue *int `json:"issue,omitempty"` - MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` - Draft *bool `json:"draft,omitempty"` -} - -// Create a new pull request on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#create-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls -func (s *PullRequestsService) Create(ctx context.Context, owner string, repo string, pull *NewPullRequest) (*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls", owner, repo) - req, err := s.client.NewRequest("POST", u, pull) - if err != nil { - return nil, nil, err - } - - p := new(PullRequest) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// PullRequestBranchUpdateOptions specifies the optional parameters to the -// PullRequestsService.UpdateBranch method. -type PullRequestBranchUpdateOptions struct { - // ExpectedHeadSHA specifies the most recent commit on the pull request's branch. - // Default value is the SHA of the pull request's current HEAD ref. - ExpectedHeadSHA *string `json:"expected_head_sha,omitempty"` -} - -// PullRequestBranchUpdateResponse specifies the response of pull request branch update. -type PullRequestBranchUpdateResponse struct { - Message *string `json:"message,omitempty"` - URL *string `json:"url,omitempty"` -} - -// UpdateBranch updates the pull request branch with latest upstream changes. -// -// This method might return an AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it has now scheduled the update of the pull request branch in a background task. -// A follow up request, after a delay of a second or so, should result -// in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#update-a-pull-request-branch -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch -func (s *PullRequestsService) UpdateBranch(ctx context.Context, owner, repo string, number int, opts *PullRequestBranchUpdateOptions) (*PullRequestBranchUpdateResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/update-branch", owner, repo, number) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeUpdatePullRequestBranchPreview) - - p := new(PullRequestBranchUpdateResponse) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -type pullRequestUpdate struct { - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - State *string `json:"state,omitempty"` - Base *string `json:"base,omitempty"` - MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` -} - -// Edit a pull request. -// pull must not be nil. -// -// The following fields are editable: Title, Body, State, Base.Ref and MaintainerCanModify. -// Base.Ref updates the base branch of the pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#update-a-pull-request -// -//meta:operation PATCH /repos/{owner}/{repo}/pulls/{pull_number} -func (s *PullRequestsService) Edit(ctx context.Context, owner string, repo string, number int, pull *PullRequest) (*PullRequest, *Response, error) { - if pull == nil { - return nil, nil, fmt.Errorf("pull must be provided") - } - - u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) - - update := &pullRequestUpdate{ - Title: pull.Title, - Body: pull.Body, - State: pull.State, - MaintainerCanModify: pull.MaintainerCanModify, - } - // avoid updating the base branch when closing the Pull Request - // - otherwise the GitHub API server returns a "Validation Failed" error: - // "Cannot change base branch of closed pull request". - if pull.Base != nil && pull.GetState() != "closed" { - update.Base = pull.Base.Ref - } - - req, err := s.client.NewRequest("PATCH", u, update) - if err != nil { - return nil, nil, err - } - - p := new(PullRequest) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// ListCommits lists the commits in a pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#list-commits-on-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/commits -func (s *PullRequestsService) ListCommits(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/commits", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var commits []*RepositoryCommit - resp, err := s.client.Do(ctx, req, &commits) - if err != nil { - return nil, resp, err - } - - return commits, resp, nil -} - -// ListFiles lists the files in a pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#list-pull-requests-files -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/files -func (s *PullRequestsService) ListFiles(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*CommitFile, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/files", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var commitFiles []*CommitFile - resp, err := s.client.Do(ctx, req, &commitFiles) - if err != nil { - return nil, resp, err - } - - return commitFiles, resp, nil -} - -// IsMerged checks if a pull request has been merged. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#check-if-a-pull-request-has-been-merged -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/merge -func (s *PullRequestsService) IsMerged(ctx context.Context, owner string, repo string, number int) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/merge", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - merged, err := parseBoolResponse(err) - return merged, resp, err -} - -// PullRequestMergeResult represents the result of merging a pull request. -type PullRequestMergeResult struct { - SHA *string `json:"sha,omitempty"` - Merged *bool `json:"merged,omitempty"` - Message *string `json:"message,omitempty"` -} - -// PullRequestOptions lets you define how a pull request will be merged. -type PullRequestOptions struct { - CommitTitle string // Title for the automatic commit message. (Optional.) - SHA string // SHA that pull request head must match to allow merge. (Optional.) - - // The merge method to use. Possible values include: "merge", "squash", and "rebase" with the default being merge. (Optional.) - MergeMethod string - - // If false, an empty string commit message will use the default commit message. If true, an empty string commit message will be used. - DontDefaultIfBlank bool -} - -type pullRequestMergeRequest struct { - CommitMessage *string `json:"commit_message,omitempty"` - CommitTitle string `json:"commit_title,omitempty"` - MergeMethod string `json:"merge_method,omitempty"` - SHA string `json:"sha,omitempty"` -} - -// Merge a pull request. -// commitMessage is an extra detail to append to automatic commit message. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#merge-a-pull-request -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge -func (s *PullRequestsService) Merge(ctx context.Context, owner string, repo string, number int, commitMessage string, options *PullRequestOptions) (*PullRequestMergeResult, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/merge", owner, repo, number) - - pullRequestBody := &pullRequestMergeRequest{} - if commitMessage != "" { - pullRequestBody.CommitMessage = &commitMessage - } - if options != nil { - pullRequestBody.CommitTitle = options.CommitTitle - pullRequestBody.MergeMethod = options.MergeMethod - pullRequestBody.SHA = options.SHA - if options.DontDefaultIfBlank && commitMessage == "" { - pullRequestBody.CommitMessage = &commitMessage - } - } - req, err := s.client.NewRequest("PUT", u, pullRequestBody) - if err != nil { - return nil, nil, err - } - - mergeResult := new(PullRequestMergeResult) - resp, err := s.client.Do(ctx, req, mergeResult) - if err != nil { - return nil, resp, err - } - - return mergeResult, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/pulls_comments.go b/vendor/github.com/google/go-github/v57/github/pulls_comments.go deleted file mode 100644 index a9ffe8d7..00000000 --- a/vendor/github.com/google/go-github/v57/github/pulls_comments.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" - "time" -) - -// PullRequestComment represents a comment left on a pull request. -type PullRequestComment struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - InReplyTo *int64 `json:"in_reply_to_id,omitempty"` - Body *string `json:"body,omitempty"` - Path *string `json:"path,omitempty"` - DiffHunk *string `json:"diff_hunk,omitempty"` - PullRequestReviewID *int64 `json:"pull_request_review_id,omitempty"` - Position *int `json:"position,omitempty"` - OriginalPosition *int `json:"original_position,omitempty"` - StartLine *int `json:"start_line,omitempty"` - Line *int `json:"line,omitempty"` - OriginalLine *int `json:"original_line,omitempty"` - OriginalStartLine *int `json:"original_start_line,omitempty"` - Side *string `json:"side,omitempty"` - StartSide *string `json:"start_side,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - OriginalCommitID *string `json:"original_commit_id,omitempty"` - User *User `json:"user,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - // AuthorAssociation is the comment author's relationship to the pull request's repository. - // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". - AuthorAssociation *string `json:"author_association,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - PullRequestURL *string `json:"pull_request_url,omitempty"` - // Can be one of: LINE, FILE from https://docs.github.com/rest/pulls/comments#create-a-review-comment-for-a-pull-request - SubjectType *string `json:"subject_type,omitempty"` -} - -func (p PullRequestComment) String() string { - return Stringify(p) -} - -// PullRequestListCommentsOptions specifies the optional parameters to the -// PullRequestsService.ListComments method. -type PullRequestListCommentsOptions struct { - // Sort specifies how to sort comments. Possible values are: created, updated. - Sort string `url:"sort,omitempty"` - - // Direction in which to sort comments. Possible values are: asc, desc. - Direction string `url:"direction,omitempty"` - - // Since filters comments by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// ListComments lists all comments on the specified pull request. Specifying a -// pull request number of 0 will return all comments on all pull requests for -// the repository. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#list-review-comments-in-a-repository -// GitHub API docs: https://docs.github.com/rest/pulls/comments#list-review-comments-on-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/comments -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/comments -func (s *PullRequestsService) ListComments(ctx context.Context, owner, repo string, number int, opts *PullRequestListCommentsOptions) ([]*PullRequestComment, *Response, error) { - var u string - if number == 0 { - u = fmt.Sprintf("repos/%v/%v/pulls/comments", owner, repo) - } else { - u = fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var comments []*PullRequestComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetComment fetches the specified pull request comment. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#get-a-review-comment-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/comments/{comment_id} -func (s *PullRequestsService) GetComment(ctx context.Context, owner, repo string, commentID int64) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - comment := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, comment) - if err != nil { - return nil, resp, err - } - - return comment, resp, nil -} - -// CreateComment creates a new comment on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#create-a-review-comment-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/comments -func (s *PullRequestsService) CreateComment(ctx context.Context, owner, repo string, number int, comment *PullRequestComment) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - // TODO: remove custom Accept headers when their respective API fully launches. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - c := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// CreateCommentInReplyTo creates a new comment as a reply to an existing pull request comment. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#create-a-review-comment-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/comments -func (s *PullRequestsService) CreateCommentInReplyTo(ctx context.Context, owner, repo string, number int, body string, commentID int64) (*PullRequestComment, *Response, error) { - comment := &struct { - Body string `json:"body,omitempty"` - InReplyTo int64 `json:"in_reply_to,omitempty"` - }{ - Body: body, - InReplyTo: commentID, - } - u := fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// EditComment updates a pull request comment. -// A non-nil comment.Body must be provided. Other comment fields should be left nil. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#update-a-review-comment-for-a-pull-request -// -//meta:operation PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id} -func (s *PullRequestsService) EditComment(ctx context.Context, owner, repo string, commentID int64, comment *PullRequestComment) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes a pull request comment. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#delete-a-review-comment-for-a-pull-request -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id} -func (s *PullRequestsService) DeleteComment(ctx context.Context, owner, repo string, commentID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/pulls_reviewers.go b/vendor/github.com/google/go-github/v57/github/pulls_reviewers.go deleted file mode 100644 index 3f0c50b7..00000000 --- a/vendor/github.com/google/go-github/v57/github/pulls_reviewers.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ReviewersRequest specifies users and teams for a pull request review request. -type ReviewersRequest struct { - NodeID *string `json:"node_id,omitempty"` - Reviewers []string `json:"reviewers,omitempty"` - TeamReviewers []string `json:"team_reviewers,omitempty"` -} - -// Reviewers represents reviewers of a pull request. -type Reviewers struct { - Users []*User `json:"users,omitempty"` - Teams []*Team `json:"teams,omitempty"` -} - -// RequestReviewers creates a review request for the provided reviewers for the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/review-requests#request-reviewers-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers -func (s *PullRequestsService) RequestReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) - req, err := s.client.NewRequest("POST", u, &reviewers) - if err != nil { - return nil, nil, err - } - - r := new(PullRequest) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// ListReviewers lists reviewers whose reviews have been requested on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/review-requests#get-all-requested-reviewers-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers -func (s *PullRequestsService) ListReviewers(ctx context.Context, owner, repo string, number int, opts *ListOptions) (*Reviewers, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/requested_reviewers", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - reviewers := new(Reviewers) - resp, err := s.client.Do(ctx, req, reviewers) - if err != nil { - return nil, resp, err - } - - return reviewers, resp, nil -} - -// RemoveReviewers removes the review request for the provided reviewers for the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/review-requests#remove-requested-reviewers-from-a-pull-request -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers -func (s *PullRequestsService) RemoveReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, &reviewers) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/pulls_reviews.go b/vendor/github.com/google/go-github/v57/github/pulls_reviews.go deleted file mode 100644 index 27b8dc37..00000000 --- a/vendor/github.com/google/go-github/v57/github/pulls_reviews.go +++ /dev/null @@ -1,329 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" -) - -var ErrMixedCommentStyles = errors.New("cannot use both position and side/line form comments") - -// PullRequestReview represents a review of a pull request. -type PullRequestReview struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - User *User `json:"user,omitempty"` - Body *string `json:"body,omitempty"` - SubmittedAt *Timestamp `json:"submitted_at,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - PullRequestURL *string `json:"pull_request_url,omitempty"` - State *string `json:"state,omitempty"` - // AuthorAssociation is the comment author's relationship to the issue's repository. - // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". - AuthorAssociation *string `json:"author_association,omitempty"` -} - -func (p PullRequestReview) String() string { - return Stringify(p) -} - -// DraftReviewComment represents a comment part of the review. -type DraftReviewComment struct { - Path *string `json:"path,omitempty"` - Position *int `json:"position,omitempty"` - Body *string `json:"body,omitempty"` - - // The new comfort-fade-preview fields - StartSide *string `json:"start_side,omitempty"` - Side *string `json:"side,omitempty"` - StartLine *int `json:"start_line,omitempty"` - Line *int `json:"line,omitempty"` -} - -func (c DraftReviewComment) String() string { - return Stringify(c) -} - -// PullRequestReviewRequest represents a request to create a review. -type PullRequestReviewRequest struct { - NodeID *string `json:"node_id,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - Body *string `json:"body,omitempty"` - Event *string `json:"event,omitempty"` - Comments []*DraftReviewComment `json:"comments,omitempty"` -} - -func (r PullRequestReviewRequest) String() string { - return Stringify(r) -} - -func (r *PullRequestReviewRequest) isComfortFadePreview() (bool, error) { - var isCF *bool - for _, comment := range r.Comments { - if comment == nil { - continue - } - hasPos := comment.Position != nil - hasComfortFade := (comment.StartSide != nil) || (comment.Side != nil) || - (comment.StartLine != nil) || (comment.Line != nil) - - switch { - case hasPos && hasComfortFade: - return false, ErrMixedCommentStyles - case hasPos && isCF != nil && *isCF: - return false, ErrMixedCommentStyles - case hasComfortFade && isCF != nil && !*isCF: - return false, ErrMixedCommentStyles - } - isCF = &hasComfortFade - } - if isCF != nil { - return *isCF, nil - } - return false, nil -} - -// PullRequestReviewDismissalRequest represents a request to dismiss a review. -type PullRequestReviewDismissalRequest struct { - Message *string `json:"message,omitempty"` -} - -func (r PullRequestReviewDismissalRequest) String() string { - return Stringify(r) -} - -// ListReviews lists all reviews on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews -func (s *PullRequestsService) ListReviews(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var reviews []*PullRequestReview - resp, err := s.client.Do(ctx, req, &reviews) - if err != nil { - return nil, resp, err - } - - return reviews, resp, nil -} - -// GetReview fetches the specified pull request review. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#get-a-review-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id} -func (s *PullRequestsService) GetReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - review := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, review) - if err != nil { - return nil, resp, err - } - - return review, resp, nil -} - -// DeletePendingReview deletes the specified pull request pending review. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#delete-a-pending-review-for-a-pull-request -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id} -func (s *PullRequestsService) DeletePendingReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, nil, err - } - - review := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, review) - if err != nil { - return nil, resp, err - } - - return review, resp, nil -} - -// ListReviewComments lists all the comments for the specified review. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#list-comments-for-a-pull-request-review -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments -func (s *PullRequestsService) ListReviewComments(ctx context.Context, owner, repo string, number int, reviewID int64, opts *ListOptions) ([]*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/comments", owner, repo, number, reviewID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*PullRequestComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// CreateReview creates a new review on the specified pull request. -// -// In order to use multi-line comments, you must use the "comfort fade" preview. -// This replaces the use of the "Position" field in comments with 4 new fields: -// -// [Start]Side, and [Start]Line. -// -// These new fields must be used for ALL comments (including single-line), -// with the following restrictions (empirically observed, so subject to change). -// -// For single-line "comfort fade" comments, you must use: -// -// Path: &path, // as before -// Body: &body, // as before -// Side: &"RIGHT" (or "LEFT") -// Line: &123, // NOT THE SAME AS POSITION, this is an actual line number. -// -// If StartSide or StartLine is used with single-line comments, a 422 is returned. -// -// For multi-line "comfort fade" comments, you must use: -// -// Path: &path, // as before -// Body: &body, // as before -// StartSide: &"RIGHT" (or "LEFT") -// Side: &"RIGHT" (or "LEFT") -// StartLine: &120, -// Line: &125, -// -// Suggested edits are made by commenting on the lines to replace, and including the -// suggested edit in a block like this (it may be surrounded in non-suggestion markdown): -// -// ```suggestion -// Use this instead. -// It is waaaaaay better. -// ``` -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#create-a-review-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews -func (s *PullRequestsService) CreateReview(ctx context.Context, owner, repo string, number int, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews", owner, repo, number) - - req, err := s.client.NewRequest("POST", u, review) - if err != nil { - return nil, nil, err - } - - // Detect which style of review comment is being used. - if isCF, err := review.isComfortFadePreview(); err != nil { - return nil, nil, err - } else if isCF { - // If the review comments are using the comfort fade preview fields, - // then pass the comfort fade header. - req.Header.Set("Accept", mediaTypeMultiLineCommentsPreview) - } - - r := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// UpdateReview updates the review summary on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#update-a-review-for-a-pull-request -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id} -func (s *PullRequestsService) UpdateReview(ctx context.Context, owner, repo string, number int, reviewID int64, body string) (*PullRequestReview, *Response, error) { - opts := &struct { - Body string `json:"body"` - }{Body: body} - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - review := &PullRequestReview{} - resp, err := s.client.Do(ctx, req, review) - if err != nil { - return nil, resp, err - } - - return review, resp, nil -} - -// SubmitReview submits a specified review on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#submit-a-review-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events -func (s *PullRequestsService) SubmitReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/events", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("POST", u, review) - if err != nil { - return nil, nil, err - } - - r := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DismissReview dismisses a specified review on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#dismiss-a-review-for-a-pull-request -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals -func (s *PullRequestsService) DismissReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewDismissalRequest) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/dismissals", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("PUT", u, review) - if err != nil { - return nil, nil, err - } - - r := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/pulls_threads.go b/vendor/github.com/google/go-github/v57/github/pulls_threads.go deleted file mode 100644 index 23e924d8..00000000 --- a/vendor/github.com/google/go-github/v57/github/pulls_threads.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// PullRequestThread represents a thread of comments on a pull request. -type PullRequestThread struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Comments []*PullRequestComment `json:"comments,omitempty"` -} - -func (p PullRequestThread) String() string { - return Stringify(p) -} diff --git a/vendor/github.com/google/go-github/v57/github/rate_limit.go b/vendor/github.com/google/go-github/v57/github/rate_limit.go deleted file mode 100644 index 0fc15f81..00000000 --- a/vendor/github.com/google/go-github/v57/github/rate_limit.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import "context" - -// RateLimitService provides access to rate limit functions in the GitHub API. -type RateLimitService service - -// Rate represents the rate limit for the current client. -type Rate struct { - // The number of requests per hour the client is currently limited to. - Limit int `json:"limit"` - - // The number of remaining requests the client can make this hour. - Remaining int `json:"remaining"` - - // The time at which the current rate limit will reset. - Reset Timestamp `json:"reset"` -} - -func (r Rate) String() string { - return Stringify(r) -} - -// RateLimits represents the rate limits for the current client. -type RateLimits struct { - // The rate limit for non-search API requests. Unauthenticated - // requests are limited to 60 per hour. Authenticated requests are - // limited to 5,000 per hour. - // - // GitHub API docs: https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting - Core *Rate `json:"core"` - - // The rate limit for search API requests. Unauthenticated requests - // are limited to 10 requests per minutes. Authenticated requests are - // limited to 30 per minute. - // - // GitHub API docs: https://docs.github.com/en/rest/search#rate-limit - Search *Rate `json:"search"` - - // GitHub API docs: https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit - GraphQL *Rate `json:"graphql"` - - // GitHub API dos: https://docs.github.com/en/rest/rate-limit - IntegrationManifest *Rate `json:"integration_manifest"` - - SourceImport *Rate `json:"source_import"` - CodeScanningUpload *Rate `json:"code_scanning_upload"` - ActionsRunnerRegistration *Rate `json:"actions_runner_registration"` - SCIM *Rate `json:"scim"` -} - -func (r RateLimits) String() string { - return Stringify(r) -} - -// Get returns the rate limits for the current client. -// -// GitHub API docs: https://docs.github.com/rest/rate-limit/rate-limit#get-rate-limit-status-for-the-authenticated-user -// -//meta:operation GET /rate_limit -func (s *RateLimitService) Get(ctx context.Context) (*RateLimits, *Response, error) { - req, err := s.client.NewRequest("GET", "rate_limit", nil) - if err != nil { - return nil, nil, err - } - - response := new(struct { - Resources *RateLimits `json:"resources"` - }) - - // This resource is not subject to rate limits. - ctx = context.WithValue(ctx, bypassRateLimitCheck, true) - resp, err := s.client.Do(ctx, req, response) - if err != nil { - return nil, resp, err - } - - if response.Resources != nil { - s.client.rateMu.Lock() - if response.Resources.Core != nil { - s.client.rateLimits[coreCategory] = *response.Resources.Core - } - if response.Resources.Search != nil { - s.client.rateLimits[searchCategory] = *response.Resources.Search - } - if response.Resources.GraphQL != nil { - s.client.rateLimits[graphqlCategory] = *response.Resources.GraphQL - } - if response.Resources.IntegrationManifest != nil { - s.client.rateLimits[integrationManifestCategory] = *response.Resources.IntegrationManifest - } - if response.Resources.SourceImport != nil { - s.client.rateLimits[sourceImportCategory] = *response.Resources.SourceImport - } - if response.Resources.CodeScanningUpload != nil { - s.client.rateLimits[codeScanningUploadCategory] = *response.Resources.CodeScanningUpload - } - if response.Resources.ActionsRunnerRegistration != nil { - s.client.rateLimits[actionsRunnerRegistrationCategory] = *response.Resources.ActionsRunnerRegistration - } - if response.Resources.SCIM != nil { - s.client.rateLimits[scimCategory] = *response.Resources.SCIM - } - s.client.rateMu.Unlock() - } - - return response.Resources, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/reactions.go b/vendor/github.com/google/go-github/v57/github/reactions.go deleted file mode 100644 index 1aa7ac38..00000000 --- a/vendor/github.com/google/go-github/v57/github/reactions.go +++ /dev/null @@ -1,570 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" -) - -// ReactionsService provides access to the reactions-related functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/reactions -type ReactionsService service - -// Reaction represents a GitHub reaction. -type Reaction struct { - // ID is the Reaction ID. - ID *int64 `json:"id,omitempty"` - User *User `json:"user,omitempty"` - NodeID *string `json:"node_id,omitempty"` - // Content is the type of reaction. - // Possible values are: - // "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". - Content *string `json:"content,omitempty"` -} - -// Reactions represents a summary of GitHub reactions. -type Reactions struct { - TotalCount *int `json:"total_count,omitempty"` - PlusOne *int `json:"+1,omitempty"` - MinusOne *int `json:"-1,omitempty"` - Laugh *int `json:"laugh,omitempty"` - Confused *int `json:"confused,omitempty"` - Heart *int `json:"heart,omitempty"` - Hooray *int `json:"hooray,omitempty"` - Rocket *int `json:"rocket,omitempty"` - Eyes *int `json:"eyes,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (r Reaction) String() string { - return Stringify(r) -} - -// ListCommentReactionOptions specifies the optional parameters to the -// ReactionsService.ListCommentReactions method. -type ListCommentReactionOptions struct { - // Content restricts the returned comment reactions to only those with the given type. - // Omit this parameter to list all reactions to a commit comment. - // Possible values are: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". - Content string `url:"content,omitempty"` - - ListOptions -} - -// ListCommentReactions lists the reactions for a commit comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-commit-comment -// -//meta:operation GET /repos/{owner}/{repo}/comments/{comment_id}/reactions -func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListCommentReactionOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateCommentReaction creates a reaction for a commit comment. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-commit-comment -// -//meta:operation POST /repos/{owner}/{repo}/comments/{comment_id}/reactions -func (s *ReactionsService) CreateCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteCommentReaction deletes the reaction for a commit comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-commit-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions/%v", owner, repo, commentID, reactionID) - - return s.deleteReaction(ctx, u) -} - -// DeleteCommentReactionByID deletes the reaction for a commit comment by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-commit-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { - u := fmt.Sprintf("repositories/%v/comments/%v/reactions/%v", repoID, commentID, reactionID) - - return s.deleteReaction(ctx, u) -} - -// ListIssueReactions lists the reactions for an issue. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/reactions -func (s *ReactionsService) ListIssueReactions(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/reactions", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateIssueReaction creates a reaction for an issue. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/reactions -func (s *ReactionsService) CreateIssueReaction(ctx context.Context, owner, repo string, number int, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/reactions", owner, repo, number) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteIssueReaction deletes the reaction to an issue. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueReaction(ctx context.Context, owner, repo string, issueNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/issues/%v/reactions/%v", owner, repo, issueNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteIssueReactionByID deletes the reaction to an issue by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueReactionByID(ctx context.Context, repoID, issueNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repositories/%v/issues/%v/reactions/%v", repoID, issueNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListIssueCommentReactions lists the reactions for an issue comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-an-issue-comment -// -//meta:operation GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions -func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateIssueCommentReaction creates a reaction for an issue comment. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-an-issue-comment -// -//meta:operation POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions -func (s *ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteIssueCommentReaction deletes the reaction to an issue comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions/%v", owner, repo, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteIssueCommentReactionByID deletes the reaction to an issue comment by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repositories/%v/issues/comments/%v/reactions/%v", repoID, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListPullRequestCommentReactions lists the reactions for a pull request review comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-pull-request-review-comment -// -//meta:operation GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions -func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreatePullRequestCommentReaction creates a reaction for a pull request review comment. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-pull-request-review-comment -// -//meta:operation POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions -func (s *ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeletePullRequestCommentReaction deletes the reaction to a pull request review comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-pull-request-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeletePullRequestCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions/%v", owner, repo, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeletePullRequestCommentReactionByID deletes the reaction to a pull request review comment by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-pull-request-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeletePullRequestCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repositories/%v/pulls/comments/%v/reactions/%v", repoID, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListTeamDiscussionReactions lists the reactions for a team discussion. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-team-discussion-legacy -// -//meta:operation GET /teams/{team_id}/discussions/{discussion_number}/reactions -func (s *ReactionsService) ListTeamDiscussionReactions(ctx context.Context, teamID int64, discussionNumber int, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateTeamDiscussionReaction creates a reaction for a team discussion. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion-legacy -// -//meta:operation POST /teams/{team_id}/discussions/{discussion_number}/reactions -func (s *ReactionsService) CreateTeamDiscussionReaction(ctx context.Context, teamID int64, discussionNumber int, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteTeamDiscussionReaction deletes the reaction to a team discussion. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-team-discussion-reaction -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteTeamDiscussionReaction(ctx context.Context, org, teamSlug string, discussionNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/reactions/%v", org, teamSlug, discussionNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteTeamDiscussionReactionByOrgIDAndTeamID deletes the reaction to a team discussion by organization ID and team ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions -func (s *ReactionsService) DeleteTeamDiscussionReactionByOrgIDAndTeamID(ctx context.Context, orgID, teamID, discussionNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/reactions/%v", orgID, teamID, discussionNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListTeamDiscussionCommentReactions lists the reactions for a team discussion comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-team-discussion-comment-legacy -// -//meta:operation GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions -func (s *ReactionsService) ListTeamDiscussionCommentReactions(ctx context.Context, teamID int64, discussionNumber, commentNumber int, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - return m, resp, nil -} - -// CreateTeamDiscussionCommentReaction creates a reaction for a team discussion comment. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion-comment-legacy -// -//meta:operation POST /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions -func (s *ReactionsService) CreateTeamDiscussionCommentReaction(ctx context.Context, teamID int64, discussionNumber, commentNumber int, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteTeamDiscussionCommentReaction deletes the reaction to a team discussion comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-team-discussion-comment-reaction -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteTeamDiscussionCommentReaction(ctx context.Context, org, teamSlug string, discussionNumber, commentNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v/reactions/%v", org, teamSlug, discussionNumber, commentNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteTeamDiscussionCommentReactionByOrgIDAndTeamID deletes the reaction to a team discussion comment by organization ID and team ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion-comment -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions -func (s *ReactionsService) DeleteTeamDiscussionCommentReactionByOrgIDAndTeamID(ctx context.Context, orgID, teamID, discussionNumber, commentNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v/reactions/%v", orgID, teamID, discussionNumber, commentNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -func (s *ReactionsService) deleteReaction(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest(http.MethodDelete, url, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - return s.client.Do(ctx, req, nil) -} - -// CreateReleaseReaction creates a reaction to a release. -// Note that a response with a Status: 200 OK means that you already -// added the reaction type to this release. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-release -// -//meta:operation POST /repos/{owner}/{repo}/releases/{release_id}/reactions -func (s *ReactionsService) CreateReleaseReaction(ctx context.Context, owner, repo string, releaseID int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/releases/%v/reactions", owner, repo, releaseID) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos.go b/vendor/github.com/google/go-github/v57/github/repos.go deleted file mode 100644 index 5fcf219b..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos.go +++ /dev/null @@ -1,2387 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "net/http" - "net/url" - "strings" -) - -const githubBranchNotProtected string = "Branch not protected" - -var ErrBranchNotProtected = errors.New("branch is not protected") - -// RepositoriesService handles communication with the repository related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/repos/ -type RepositoriesService service - -// Repository represents a GitHub repository. -type Repository struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Name *string `json:"name,omitempty"` - FullName *string `json:"full_name,omitempty"` - Description *string `json:"description,omitempty"` - Homepage *string `json:"homepage,omitempty"` - CodeOfConduct *CodeOfConduct `json:"code_of_conduct,omitempty"` - DefaultBranch *string `json:"default_branch,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PushedAt *Timestamp `json:"pushed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CloneURL *string `json:"clone_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - MirrorURL *string `json:"mirror_url,omitempty"` - SSHURL *string `json:"ssh_url,omitempty"` - SVNURL *string `json:"svn_url,omitempty"` - Language *string `json:"language,omitempty"` - Fork *bool `json:"fork,omitempty"` - ForksCount *int `json:"forks_count,omitempty"` - NetworkCount *int `json:"network_count,omitempty"` - OpenIssuesCount *int `json:"open_issues_count,omitempty"` - OpenIssues *int `json:"open_issues,omitempty"` // Deprecated: Replaced by OpenIssuesCount. For backward compatibility OpenIssues is still populated. - StargazersCount *int `json:"stargazers_count,omitempty"` - SubscribersCount *int `json:"subscribers_count,omitempty"` - WatchersCount *int `json:"watchers_count,omitempty"` // Deprecated: Replaced by StargazersCount. For backward compatibility WatchersCount is still populated. - Watchers *int `json:"watchers,omitempty"` // Deprecated: Replaced by StargazersCount. For backward compatibility Watchers is still populated. - Size *int `json:"size,omitempty"` - AutoInit *bool `json:"auto_init,omitempty"` - Parent *Repository `json:"parent,omitempty"` - Source *Repository `json:"source,omitempty"` - TemplateRepository *Repository `json:"template_repository,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Permissions map[string]bool `json:"permissions,omitempty"` - AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` - AllowUpdateBranch *bool `json:"allow_update_branch,omitempty"` - AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` - AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` - AllowAutoMerge *bool `json:"allow_auto_merge,omitempty"` - AllowForking *bool `json:"allow_forking,omitempty"` - WebCommitSignoffRequired *bool `json:"web_commit_signoff_required,omitempty"` - DeleteBranchOnMerge *bool `json:"delete_branch_on_merge,omitempty"` - UseSquashPRTitleAsDefault *bool `json:"use_squash_pr_title_as_default,omitempty"` - SquashMergeCommitTitle *string `json:"squash_merge_commit_title,omitempty"` // Can be one of: "PR_TITLE", "COMMIT_OR_PR_TITLE" - SquashMergeCommitMessage *string `json:"squash_merge_commit_message,omitempty"` // Can be one of: "PR_BODY", "COMMIT_MESSAGES", "BLANK" - MergeCommitTitle *string `json:"merge_commit_title,omitempty"` // Can be one of: "PR_TITLE", "MERGE_MESSAGE" - MergeCommitMessage *string `json:"merge_commit_message,omitempty"` // Can be one of: "PR_BODY", "PR_TITLE", "BLANK" - Topics []string `json:"topics,omitempty"` - Archived *bool `json:"archived,omitempty"` - Disabled *bool `json:"disabled,omitempty"` - - // Only provided when using RepositoriesService.Get while in preview - License *License `json:"license,omitempty"` - - // Additional mutable fields when creating and editing a repository - Private *bool `json:"private,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasPages *bool `json:"has_pages,omitempty"` - HasProjects *bool `json:"has_projects,omitempty"` - HasDownloads *bool `json:"has_downloads,omitempty"` - HasDiscussions *bool `json:"has_discussions,omitempty"` - IsTemplate *bool `json:"is_template,omitempty"` - LicenseTemplate *string `json:"license_template,omitempty"` - GitignoreTemplate *string `json:"gitignore_template,omitempty"` - - // Options for configuring Advanced Security and Secret Scanning - SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis,omitempty"` - - // Creating an organization repository. Required for non-owners. - TeamID *int64 `json:"team_id,omitempty"` - - // API URLs - URL *string `json:"url,omitempty"` - ArchiveURL *string `json:"archive_url,omitempty"` - AssigneesURL *string `json:"assignees_url,omitempty"` - BlobsURL *string `json:"blobs_url,omitempty"` - BranchesURL *string `json:"branches_url,omitempty"` - CollaboratorsURL *string `json:"collaborators_url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - CommitsURL *string `json:"commits_url,omitempty"` - CompareURL *string `json:"compare_url,omitempty"` - ContentsURL *string `json:"contents_url,omitempty"` - ContributorsURL *string `json:"contributors_url,omitempty"` - DeploymentsURL *string `json:"deployments_url,omitempty"` - DownloadsURL *string `json:"downloads_url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - ForksURL *string `json:"forks_url,omitempty"` - GitCommitsURL *string `json:"git_commits_url,omitempty"` - GitRefsURL *string `json:"git_refs_url,omitempty"` - GitTagsURL *string `json:"git_tags_url,omitempty"` - HooksURL *string `json:"hooks_url,omitempty"` - IssueCommentURL *string `json:"issue_comment_url,omitempty"` - IssueEventsURL *string `json:"issue_events_url,omitempty"` - IssuesURL *string `json:"issues_url,omitempty"` - KeysURL *string `json:"keys_url,omitempty"` - LabelsURL *string `json:"labels_url,omitempty"` - LanguagesURL *string `json:"languages_url,omitempty"` - MergesURL *string `json:"merges_url,omitempty"` - MilestonesURL *string `json:"milestones_url,omitempty"` - NotificationsURL *string `json:"notifications_url,omitempty"` - PullsURL *string `json:"pulls_url,omitempty"` - ReleasesURL *string `json:"releases_url,omitempty"` - StargazersURL *string `json:"stargazers_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - SubscribersURL *string `json:"subscribers_url,omitempty"` - SubscriptionURL *string `json:"subscription_url,omitempty"` - TagsURL *string `json:"tags_url,omitempty"` - TreesURL *string `json:"trees_url,omitempty"` - TeamsURL *string `json:"teams_url,omitempty"` - - // TextMatches is only populated from search results that request text matches - // See: search.go and https://docs.github.com/rest/search/#text-match-metadata - TextMatches []*TextMatch `json:"text_matches,omitempty"` - - // Visibility is only used for Create and Edit endpoints. The visibility field - // overrides the field parameter when both are used. - // Can be one of public, private or internal. - Visibility *string `json:"visibility,omitempty"` - - // RoleName is only returned by the API 'check team permissions for a repository'. - // See: teams.go (IsTeamRepoByID) https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-repository - RoleName *string `json:"role_name,omitempty"` -} - -func (r Repository) String() string { - return Stringify(r) -} - -// BranchListOptions specifies the optional parameters to the -// RepositoriesService.ListBranches method. -type BranchListOptions struct { - // Setting to true returns only protected branches. - // When set to false, only unprotected branches are returned. - // Omitting this parameter returns all branches. - // Default: nil - Protected *bool `url:"protected,omitempty"` - - ListOptions -} - -// RepositoryListOptions specifies the optional parameters to the -// RepositoriesService.List method. -type RepositoryListOptions struct { - // See RepositoryListByAuthenticatedUserOptions.Visibility - Visibility string `url:"visibility,omitempty"` - - // See RepositoryListByAuthenticatedUserOptions.Affiliation - Affiliation string `url:"affiliation,omitempty"` - - // See RepositoryListByUserOptions.Type or RepositoryListByAuthenticatedUserOptions.Type - Type string `url:"type,omitempty"` - - // See RepositoryListByUserOptions.Sort or RepositoryListByAuthenticatedUserOptions.Sort - Sort string `url:"sort,omitempty"` - - // See RepositoryListByUserOptions.Direction or RepositoryListByAuthenticatedUserOptions.Direction - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// SecurityAndAnalysis specifies the optional advanced security features -// that are enabled on a given repository. -type SecurityAndAnalysis struct { - AdvancedSecurity *AdvancedSecurity `json:"advanced_security,omitempty"` - SecretScanning *SecretScanning `json:"secret_scanning,omitempty"` - SecretScanningPushProtection *SecretScanningPushProtection `json:"secret_scanning_push_protection,omitempty"` - DependabotSecurityUpdates *DependabotSecurityUpdates `json:"dependabot_security_updates,omitempty"` -} - -func (s SecurityAndAnalysis) String() string { - return Stringify(s) -} - -// AdvancedSecurity specifies the state of advanced security on a repository. -// -// GitHub API docs: https://docs.github.com/github/getting-started-with-github/learning-about-github/about-github-advanced-security -type AdvancedSecurity struct { - Status *string `json:"status,omitempty"` -} - -func (a AdvancedSecurity) String() string { - return Stringify(a) -} - -// SecretScanning specifies the state of secret scanning on a repository. -// -// GitHub API docs: https://docs.github.com/code-security/secret-security/about-secret-scanning -type SecretScanning struct { - Status *string `json:"status,omitempty"` -} - -func (s SecretScanning) String() string { - return Stringify(s) -} - -// SecretScanningPushProtection specifies the state of secret scanning push protection on a repository. -// -// GitHub API docs: https://docs.github.com/code-security/secret-scanning/about-secret-scanning#about-secret-scanning-for-partner-patterns -type SecretScanningPushProtection struct { - Status *string `json:"status,omitempty"` -} - -func (s SecretScanningPushProtection) String() string { - return Stringify(s) -} - -// DependabotSecurityUpdates specifies the state of Dependabot security updates on a repository. -// -// GitHub API docs: https://docs.github.com/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates -type DependabotSecurityUpdates struct { - Status *string `json:"status,omitempty"` -} - -func (d DependabotSecurityUpdates) String() string { - return Stringify(d) -} - -// List calls either RepositoriesService.ListByUser or RepositoriesService.ListByAuthenticatedUser -// depending on whether user is empty. -// -// Deprecated: Use RepositoriesService.ListByUser or RepositoriesService.ListByAuthenticatedUser instead. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-a-user -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-the-authenticated-user -// -//meta:operation GET /user/repos -//meta:operation GET /users/{username}/repos -func (s *RepositoriesService) List(ctx context.Context, user string, opts *RepositoryListOptions) ([]*Repository, *Response, error) { - if opts == nil { - opts = &RepositoryListOptions{} - } - if user != "" { - return s.ListByUser(ctx, user, &RepositoryListByUserOptions{ - Type: opts.Type, - Sort: opts.Sort, - Direction: opts.Direction, - ListOptions: opts.ListOptions, - }) - } - return s.ListByAuthenticatedUser(ctx, &RepositoryListByAuthenticatedUserOptions{ - Visibility: opts.Visibility, - Affiliation: opts.Affiliation, - Type: opts.Type, - Sort: opts.Sort, - Direction: opts.Direction, - ListOptions: opts.ListOptions, - }) -} - -// RepositoryListByUserOptions specifies the optional parameters to the -// RepositoriesService.ListByUser method. -type RepositoryListByUserOptions struct { - // Limit results to repositories of the specified type. - // Default: owner - // Can be one of: all, owner, member - Type string `url:"type,omitempty"` - - // The property to sort the results by. - // Default: full_name - // Can be one of: created, updated, pushed, full_name - Sort string `url:"sort,omitempty"` - - // The order to sort by. - // Default: asc when using full_name, otherwise desc. - // Can be one of: asc, desc - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListByUser lists public repositories for the specified user. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-a-user -// -//meta:operation GET /users/{username}/repos -func (s *RepositoriesService) ListByUser(ctx context.Context, user string, opts *RepositoryListByUserOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("users/%v/repos", user) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryListByAuthenticatedUserOptions specifies the optional parameters to the -// RepositoriesService.ListByAuthenticatedUser method. -type RepositoryListByAuthenticatedUserOptions struct { - // Limit results to repositories with the specified visibility. - // Default: all - // Can be one of: all, public, private - Visibility string `url:"visibility,omitempty"` - - // List repos of given affiliation[s]. - // Comma-separated list of values. Can include: - // * owner: Repositories that are owned by the authenticated user. - // * collaborator: Repositories that the user has been added to as a - // collaborator. - // * organization_member: Repositories that the user has access to through - // being a member of an organization. This includes every repository on - // every team that the user is on. - // Default: owner,collaborator,organization_member - Affiliation string `url:"affiliation,omitempty"` - - // Limit results to repositories of the specified type. Will cause a 422 error if - // used in the same request as visibility or affiliation. - // Default: all - // Can be one of: all, owner, public, private, member - Type string `url:"type,omitempty"` - - // The property to sort the results by. - // Default: full_name - // Can be one of: created, updated, pushed, full_name - Sort string `url:"sort,omitempty"` - - // Direction in which to sort repositories. Can be one of asc or desc. - // Default: when using full_name: asc; otherwise desc - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListByAuthenticatedUser lists repositories for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-the-authenticated-user -// -//meta:operation GET /user/repos -func (s *RepositoriesService) ListByAuthenticatedUser(ctx context.Context, opts *RepositoryListByAuthenticatedUserOptions) ([]*Repository, *Response, error) { - u := "user/repos" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryListByOrgOptions specifies the optional parameters to the -// RepositoriesService.ListByOrg method. -type RepositoryListByOrgOptions struct { - // Type of repositories to list. Possible values are: all, public, private, - // forks, sources, member. Default is "all". - Type string `url:"type,omitempty"` - - // How to sort the repository list. Can be one of created, updated, pushed, - // full_name. Default is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort repositories. Can be one of asc or desc. - // Default when using full_name: asc; otherwise desc. - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListByOrg lists the repositories for an organization. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-organization-repositories -// -//meta:operation GET /orgs/{org}/repos -func (s *RepositoriesService) ListByOrg(ctx context.Context, org string, opts *RepositoryListByOrgOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("orgs/%v/repos", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeTopicsPreview, mediaTypeRepositoryVisibilityPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryListAllOptions specifies the optional parameters to the -// RepositoriesService.ListAll method. -type RepositoryListAllOptions struct { - // ID of the last repository seen - Since int64 `url:"since,omitempty"` -} - -// ListAll lists all GitHub repositories in the order that they were created. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-public-repositories -// -//meta:operation GET /repositories -func (s *RepositoriesService) ListAll(ctx context.Context, opts *RepositoryListAllOptions) ([]*Repository, *Response, error) { - u, err := addOptions("repositories", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// createRepoRequest is a subset of Repository and is used internally -// by Create to pass only the known fields for the endpoint. -// -// See https://github.com/google/go-github/issues/1014 for more -// information. -type createRepoRequest struct { - // Name is required when creating a repo. - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - Homepage *string `json:"homepage,omitempty"` - - Private *bool `json:"private,omitempty"` - Visibility *string `json:"visibility,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasProjects *bool `json:"has_projects,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasDiscussions *bool `json:"has_discussions,omitempty"` - IsTemplate *bool `json:"is_template,omitempty"` - - // Creating an organization repository. Required for non-owners. - TeamID *int64 `json:"team_id,omitempty"` - - AutoInit *bool `json:"auto_init,omitempty"` - GitignoreTemplate *string `json:"gitignore_template,omitempty"` - LicenseTemplate *string `json:"license_template,omitempty"` - AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` - AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` - AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` - AllowUpdateBranch *bool `json:"allow_update_branch,omitempty"` - AllowAutoMerge *bool `json:"allow_auto_merge,omitempty"` - AllowForking *bool `json:"allow_forking,omitempty"` - DeleteBranchOnMerge *bool `json:"delete_branch_on_merge,omitempty"` - UseSquashPRTitleAsDefault *bool `json:"use_squash_pr_title_as_default,omitempty"` - SquashMergeCommitTitle *string `json:"squash_merge_commit_title,omitempty"` - SquashMergeCommitMessage *string `json:"squash_merge_commit_message,omitempty"` - MergeCommitTitle *string `json:"merge_commit_title,omitempty"` - MergeCommitMessage *string `json:"merge_commit_message,omitempty"` -} - -// Create a new repository. If an organization is specified, the new -// repository will be created under that org. If the empty string is -// specified, it will be created for the authenticated user. -// -// Note that only a subset of the repo fields are used and repo must -// not be nil. -// -// Also note that this method will return the response without actually -// waiting for GitHub to finish creating the repository and letting the -// changes propagate throughout its servers. You may set up a loop with -// exponential back-off to verify repository's creation. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-a-repository-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-an-organization-repository -// -//meta:operation POST /orgs/{org}/repos -//meta:operation POST /user/repos -func (s *RepositoriesService) Create(ctx context.Context, org string, repo *Repository) (*Repository, *Response, error) { - var u string - if org != "" { - u = fmt.Sprintf("orgs/%v/repos", org) - } else { - u = "user/repos" - } - - repoReq := &createRepoRequest{ - Name: repo.Name, - Description: repo.Description, - Homepage: repo.Homepage, - Private: repo.Private, - Visibility: repo.Visibility, - HasIssues: repo.HasIssues, - HasProjects: repo.HasProjects, - HasWiki: repo.HasWiki, - HasDiscussions: repo.HasDiscussions, - IsTemplate: repo.IsTemplate, - TeamID: repo.TeamID, - AutoInit: repo.AutoInit, - GitignoreTemplate: repo.GitignoreTemplate, - LicenseTemplate: repo.LicenseTemplate, - AllowSquashMerge: repo.AllowSquashMerge, - AllowMergeCommit: repo.AllowMergeCommit, - AllowRebaseMerge: repo.AllowRebaseMerge, - AllowUpdateBranch: repo.AllowUpdateBranch, - AllowAutoMerge: repo.AllowAutoMerge, - AllowForking: repo.AllowForking, - DeleteBranchOnMerge: repo.DeleteBranchOnMerge, - UseSquashPRTitleAsDefault: repo.UseSquashPRTitleAsDefault, - SquashMergeCommitTitle: repo.SquashMergeCommitTitle, - SquashMergeCommitMessage: repo.SquashMergeCommitMessage, - MergeCommitTitle: repo.MergeCommitTitle, - MergeCommitMessage: repo.MergeCommitMessage, - } - - req, err := s.client.NewRequest("POST", u, repoReq) - if err != nil { - return nil, nil, err - } - - acceptHeaders := []string{mediaTypeRepositoryTemplatePreview, mediaTypeRepositoryVisibilityPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// TemplateRepoRequest represents a request to create a repository from a template. -type TemplateRepoRequest struct { - // Name is required when creating a repo. - Name *string `json:"name,omitempty"` - Owner *string `json:"owner,omitempty"` - Description *string `json:"description,omitempty"` - - IncludeAllBranches *bool `json:"include_all_branches,omitempty"` - Private *bool `json:"private,omitempty"` -} - -// CreateFromTemplate generates a repository from a template. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-a-repository-using-a-template -// -//meta:operation POST /repos/{template_owner}/{template_repo}/generate -func (s *RepositoriesService) CreateFromTemplate(ctx context.Context, templateOwner, templateRepo string, templateRepoReq *TemplateRepoRequest) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/generate", templateOwner, templateRepo) - - req, err := s.client.NewRequest("POST", u, templateRepoReq) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeRepositoryTemplatePreview) - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// Get fetches a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#get-a-repository -// -//meta:operation GET /repos/{owner}/{repo} -func (s *RepositoriesService) Get(ctx context.Context, owner, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when the license support fully launches - // https://docs.github.com/rest/licenses/#get-a-repositorys-license - acceptHeaders := []string{ - mediaTypeCodesOfConductPreview, - mediaTypeTopicsPreview, - mediaTypeRepositoryTemplatePreview, - mediaTypeRepositoryVisibilityPreview, - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// GetCodeOfConduct gets the contents of a repository's code of conduct. -// Note that https://docs.github.com/rest/codes-of-conduct#about-the-codes-of-conduct-api -// says to use the GET /repos/{owner}/{repo} endpoint. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#get-a-repository -// -//meta:operation GET /repos/{owner}/{repo} -func (s *RepositoriesService) GetCodeOfConduct(ctx context.Context, owner, repo string) (*CodeOfConduct, *Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeCodesOfConductPreview) - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r.GetCodeOfConduct(), resp, nil -} - -// GetByID fetches a repository. -// -// Note: GetByID uses the undocumented GitHub API endpoint "GET /repositories/{repository_id}". -// -//meta:operation GET /repositories/{repository_id} -func (s *RepositoriesService) GetByID(ctx context.Context, id int64) (*Repository, *Response, error) { - u := fmt.Sprintf("repositories/%d", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// Edit updates a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#update-a-repository -// -//meta:operation PATCH /repos/{owner}/{repo} -func (s *RepositoriesService) Edit(ctx context.Context, owner, repo string, repository *Repository) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("PATCH", u, repository) - if err != nil { - return nil, nil, err - } - - acceptHeaders := []string{mediaTypeRepositoryTemplatePreview, mediaTypeRepositoryVisibilityPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// Delete a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#delete-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo} -func (s *RepositoriesService) Delete(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Contributor represents a repository contributor -type Contributor struct { - Login *string `json:"login,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - GravatarID *string `json:"gravatar_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - FollowersURL *string `json:"followers_url,omitempty"` - FollowingURL *string `json:"following_url,omitempty"` - GistsURL *string `json:"gists_url,omitempty"` - StarredURL *string `json:"starred_url,omitempty"` - SubscriptionsURL *string `json:"subscriptions_url,omitempty"` - OrganizationsURL *string `json:"organizations_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - ReceivedEventsURL *string `json:"received_events_url,omitempty"` - Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin,omitempty"` - Contributions *int `json:"contributions,omitempty"` - Name *string `json:"name,omitempty"` - Email *string `json:"email,omitempty"` -} - -// ListContributorsOptions specifies the optional parameters to the -// RepositoriesService.ListContributors method. -type ListContributorsOptions struct { - // Include anonymous contributors in results or not - Anon string `url:"anon,omitempty"` - - ListOptions -} - -// GetVulnerabilityAlerts checks if vulnerability alerts are enabled for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#check-if-vulnerability-alerts-are-enabled-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/vulnerability-alerts -func (s *RepositoriesService) GetVulnerabilityAlerts(ctx context.Context, owner, repository string) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) - - resp, err := s.client.Do(ctx, req, nil) - vulnerabilityAlertsEnabled, err := parseBoolResponse(err) - return vulnerabilityAlertsEnabled, resp, err -} - -// EnableVulnerabilityAlerts enables vulnerability alerts and the dependency graph for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#enable-vulnerability-alerts -// -//meta:operation PUT /repos/{owner}/{repo}/vulnerability-alerts -func (s *RepositoriesService) EnableVulnerabilityAlerts(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) - - return s.client.Do(ctx, req, nil) -} - -// DisableVulnerabilityAlerts disables vulnerability alerts and the dependency graph for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#disable-vulnerability-alerts -// -//meta:operation DELETE /repos/{owner}/{repo}/vulnerability-alerts -func (s *RepositoriesService) DisableVulnerabilityAlerts(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) - - return s.client.Do(ctx, req, nil) -} - -// GetAutomatedSecurityFixes checks if the automated security fixes for a repository are enabled. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#check-if-automated-security-fixes-are-enabled-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/automated-security-fixes -func (s *RepositoriesService) GetAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*AutomatedSecurityFixes, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - p := new(AutomatedSecurityFixes) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - return p, resp, nil -} - -// EnableAutomatedSecurityFixes enables the automated security fixes for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#enable-automated-security-fixes -// -//meta:operation PUT /repos/{owner}/{repo}/automated-security-fixes -func (s *RepositoriesService) EnableAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DisableAutomatedSecurityFixes disables vulnerability alerts and the dependency graph for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#disable-automated-security-fixes -// -//meta:operation DELETE /repos/{owner}/{repo}/automated-security-fixes -func (s *RepositoriesService) DisableAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListContributors lists contributors for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-contributors -// -//meta:operation GET /repos/{owner}/{repo}/contributors -func (s *RepositoriesService) ListContributors(ctx context.Context, owner string, repository string, opts *ListContributorsOptions) ([]*Contributor, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/contributors", owner, repository) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var contributor []*Contributor - resp, err := s.client.Do(ctx, req, &contributor) - if err != nil { - return nil, resp, err - } - - return contributor, resp, nil -} - -// ListLanguages lists languages for the specified repository. The returned map -// specifies the languages and the number of bytes of code written in that -// language. For example: -// -// { -// "C": 78769, -// "Python": 7769 -// } -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-languages -// -//meta:operation GET /repos/{owner}/{repo}/languages -func (s *RepositoriesService) ListLanguages(ctx context.Context, owner string, repo string) (map[string]int, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/languages", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - languages := make(map[string]int) - resp, err := s.client.Do(ctx, req, &languages) - if err != nil { - return nil, resp, err - } - - return languages, resp, nil -} - -// ListTeams lists the teams for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-teams -// -//meta:operation GET /repos/{owner}/{repo}/teams -func (s *RepositoriesService) ListTeams(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/teams", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// RepositoryTag represents a repository tag. -type RepositoryTag struct { - Name *string `json:"name,omitempty"` - Commit *Commit `json:"commit,omitempty"` - ZipballURL *string `json:"zipball_url,omitempty"` - TarballURL *string `json:"tarball_url,omitempty"` -} - -// ListTags lists tags for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-tags -// -//meta:operation GET /repos/{owner}/{repo}/tags -func (s *RepositoriesService) ListTags(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*RepositoryTag, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var tags []*RepositoryTag - resp, err := s.client.Do(ctx, req, &tags) - if err != nil { - return nil, resp, err - } - - return tags, resp, nil -} - -// Branch represents a repository branch -type Branch struct { - Name *string `json:"name,omitempty"` - Commit *RepositoryCommit `json:"commit,omitempty"` - Protected *bool `json:"protected,omitempty"` -} - -// Protection represents a repository branch's protection. -type Protection struct { - RequiredStatusChecks *RequiredStatusChecks `json:"required_status_checks"` - RequiredPullRequestReviews *PullRequestReviewsEnforcement `json:"required_pull_request_reviews"` - EnforceAdmins *AdminEnforcement `json:"enforce_admins"` - Restrictions *BranchRestrictions `json:"restrictions"` - RequireLinearHistory *RequireLinearHistory `json:"required_linear_history"` - AllowForcePushes *AllowForcePushes `json:"allow_force_pushes"` - AllowDeletions *AllowDeletions `json:"allow_deletions"` - RequiredConversationResolution *RequiredConversationResolution `json:"required_conversation_resolution"` - BlockCreations *BlockCreations `json:"block_creations,omitempty"` - LockBranch *LockBranch `json:"lock_branch,omitempty"` - AllowForkSyncing *AllowForkSyncing `json:"allow_fork_syncing,omitempty"` - RequiredSignatures *SignaturesProtectedBranch `json:"required_signatures,omitempty"` - URL *string `json:"url,omitempty"` -} - -// BlockCreations represents whether users can push changes that create branches. If this is true, this -// setting blocks pushes that create new branches, unless the push is initiated by a user, team, or app -// which has the ability to push. -type BlockCreations struct { - Enabled *bool `json:"enabled,omitempty"` -} - -// LockBranch represents if the branch is marked as read-only. If this is true, users will not be able to push to the branch. -type LockBranch struct { - Enabled *bool `json:"enabled,omitempty"` -} - -// AllowForkSyncing represents whether users can pull changes from upstream when the branch is locked. -type AllowForkSyncing struct { - Enabled *bool `json:"enabled,omitempty"` -} - -// BranchProtectionRule represents the rule applied to a repositories branch. -type BranchProtectionRule struct { - ID *int64 `json:"id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Name *string `json:"name,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PullRequestReviewsEnforcementLevel *string `json:"pull_request_reviews_enforcement_level,omitempty"` - RequiredApprovingReviewCount *int `json:"required_approving_review_count,omitempty"` - DismissStaleReviewsOnPush *bool `json:"dismiss_stale_reviews_on_push,omitempty"` - AuthorizedDismissalActorsOnly *bool `json:"authorized_dismissal_actors_only,omitempty"` - IgnoreApprovalsFromContributors *bool `json:"ignore_approvals_from_contributors,omitempty"` - RequireCodeOwnerReview *bool `json:"require_code_owner_review,omitempty"` - RequiredStatusChecks []string `json:"required_status_checks,omitempty"` - RequiredStatusChecksEnforcementLevel *string `json:"required_status_checks_enforcement_level,omitempty"` - StrictRequiredStatusChecksPolicy *bool `json:"strict_required_status_checks_policy,omitempty"` - SignatureRequirementEnforcementLevel *string `json:"signature_requirement_enforcement_level,omitempty"` - LinearHistoryRequirementEnforcementLevel *string `json:"linear_history_requirement_enforcement_level,omitempty"` - AdminEnforced *bool `json:"admin_enforced,omitempty"` - AllowForcePushesEnforcementLevel *string `json:"allow_force_pushes_enforcement_level,omitempty"` - AllowDeletionsEnforcementLevel *string `json:"allow_deletions_enforcement_level,omitempty"` - MergeQueueEnforcementLevel *string `json:"merge_queue_enforcement_level,omitempty"` - RequiredDeploymentsEnforcementLevel *string `json:"required_deployments_enforcement_level,omitempty"` - RequiredConversationResolutionLevel *string `json:"required_conversation_resolution_level,omitempty"` - AuthorizedActorsOnly *bool `json:"authorized_actors_only,omitempty"` - AuthorizedActorNames []string `json:"authorized_actor_names,omitempty"` -} - -// ProtectionChanges represents the changes to the rule if the BranchProtection was edited. -type ProtectionChanges struct { - AdminEnforced *AdminEnforcedChanges `json:"admin_enforced,omitempty"` - AllowDeletionsEnforcementLevel *AllowDeletionsEnforcementLevelChanges `json:"allow_deletions_enforcement_level,omitempty"` - AuthorizedActorNames *AuthorizedActorNames `json:"authorized_actor_names,omitempty"` - AuthorizedActorsOnly *AuthorizedActorsOnly `json:"authorized_actors_only,omitempty"` - AuthorizedDismissalActorsOnly *AuthorizedDismissalActorsOnlyChanges `json:"authorized_dismissal_actors_only,omitempty"` - CreateProtected *CreateProtectedChanges `json:"create_protected,omitempty"` - DismissStaleReviewsOnPush *DismissStaleReviewsOnPushChanges `json:"dismiss_stale_reviews_on_push,omitempty"` - LinearHistoryRequirementEnforcementLevel *LinearHistoryRequirementEnforcementLevelChanges `json:"linear_history_requirement_enforcement_level,omitempty"` - PullRequestReviewsEnforcementLevel *PullRequestReviewsEnforcementLevelChanges `json:"pull_request_reviews_enforcement_level,omitempty"` - RequireCodeOwnerReview *RequireCodeOwnerReviewChanges `json:"require_code_owner_review,omitempty"` - RequiredConversationResolutionLevel *RequiredConversationResolutionLevelChanges `json:"required_conversation_resolution_level,omitempty"` - RequiredDeploymentsEnforcementLevel *RequiredDeploymentsEnforcementLevelChanges `json:"required_deployments_enforcement_level,omitempty"` - RequiredStatusChecks *RequiredStatusChecksChanges `json:"required_status_checks,omitempty"` - RequiredStatusChecksEnforcementLevel *RequiredStatusChecksEnforcementLevelChanges `json:"required_status_checks_enforcement_level,omitempty"` - SignatureRequirementEnforcementLevel *SignatureRequirementEnforcementLevelChanges `json:"signature_requirement_enforcement_level,omitempty"` -} - -// AdminEnforcedChanges represents the changes made to the AdminEnforced policy. -type AdminEnforcedChanges struct { - From *bool `json:"from,omitempty"` -} - -// AllowDeletionsEnforcementLevelChanges represents the changes made to the AllowDeletionsEnforcementLevel policy. -type AllowDeletionsEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// AuthorizedActorNames represents who are authorized to edit the branch protection rules. -type AuthorizedActorNames struct { - From []string `json:"from,omitempty"` -} - -// AuthorizedActorsOnly represents if the branch rule can be edited by authorized actors only. -type AuthorizedActorsOnly struct { - From *bool `json:"from,omitempty"` -} - -// AuthorizedDismissalActorsOnlyChanges represents the changes made to the AuthorizedDismissalActorsOnly policy. -type AuthorizedDismissalActorsOnlyChanges struct { - From *bool `json:"from,omitempty"` -} - -// CreateProtectedChanges represents the changes made to the CreateProtected policy. -type CreateProtectedChanges struct { - From *bool `json:"from,omitempty"` -} - -// DismissStaleReviewsOnPushChanges represents the changes made to the DismissStaleReviewsOnPushChanges policy. -type DismissStaleReviewsOnPushChanges struct { - From *bool `json:"from,omitempty"` -} - -// LinearHistoryRequirementEnforcementLevelChanges represents the changes made to the LinearHistoryRequirementEnforcementLevel policy. -type LinearHistoryRequirementEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// PullRequestReviewsEnforcementLevelChanges represents the changes made to the PullRequestReviewsEnforcementLevel policy. -type PullRequestReviewsEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// RequireCodeOwnerReviewChanges represents the changes made to the RequireCodeOwnerReview policy. -type RequireCodeOwnerReviewChanges struct { - From *bool `json:"from,omitempty"` -} - -// RequiredConversationResolutionLevelChanges represents the changes made to the RequiredConversationResolutionLevel policy. -type RequiredConversationResolutionLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// RequiredDeploymentsEnforcementLevelChanges represents the changes made to the RequiredDeploymentsEnforcementLevel policy. -type RequiredDeploymentsEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// RequiredStatusChecksChanges represents the changes made to the RequiredStatusChecks policy. -type RequiredStatusChecksChanges struct { - From []string `json:"from,omitempty"` -} - -// RequiredStatusChecksEnforcementLevelChanges represents the changes made to the RequiredStatusChecksEnforcementLevel policy. -type RequiredStatusChecksEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// SignatureRequirementEnforcementLevelChanges represents the changes made to the SignatureRequirementEnforcementLevel policy. -type SignatureRequirementEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// ProtectionRequest represents a request to create/edit a branch's protection. -type ProtectionRequest struct { - RequiredStatusChecks *RequiredStatusChecks `json:"required_status_checks"` - RequiredPullRequestReviews *PullRequestReviewsEnforcementRequest `json:"required_pull_request_reviews"` - EnforceAdmins bool `json:"enforce_admins"` - Restrictions *BranchRestrictionsRequest `json:"restrictions"` - // Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. - RequireLinearHistory *bool `json:"required_linear_history,omitempty"` - // Permits force pushes to the protected branch by anyone with write access to the repository. - AllowForcePushes *bool `json:"allow_force_pushes,omitempty"` - // Allows deletion of the protected branch by anyone with write access to the repository. - AllowDeletions *bool `json:"allow_deletions,omitempty"` - // RequiredConversationResolution, if set to true, requires all comments - // on the pull request to be resolved before it can be merged to a protected branch. - RequiredConversationResolution *bool `json:"required_conversation_resolution,omitempty"` - // BlockCreations, if set to true, will cause the restrictions setting to also block pushes - // which create new branches, unless initiated by a user, team, app with the ability to push. - BlockCreations *bool `json:"block_creations,omitempty"` - // LockBranch, if set to true, will prevent users from pushing to the branch. - LockBranch *bool `json:"lock_branch,omitempty"` - // AllowForkSyncing, if set to true, will allow users to pull changes from upstream - // when the branch is locked. - AllowForkSyncing *bool `json:"allow_fork_syncing,omitempty"` -} - -// RequiredStatusChecks represents the protection status of a individual branch. -type RequiredStatusChecks struct { - // Require branches to be up to date before merging. (Required.) - Strict bool `json:"strict"` - // The list of status checks to require in order to merge into this - // branch. (Deprecated. Note: only one of Contexts/Checks can be populated, - // but at least one must be populated). - Contexts []string `json:"contexts,omitempty"` - // The list of status checks to require in order to merge into this - // branch. - Checks []*RequiredStatusCheck `json:"checks,omitempty"` - ContextsURL *string `json:"contexts_url,omitempty"` - URL *string `json:"url,omitempty"` -} - -// RequiredStatusChecksRequest represents a request to edit a protected branch's status checks. -type RequiredStatusChecksRequest struct { - Strict *bool `json:"strict,omitempty"` - // Note: if both Contexts and Checks are populated, - // the GitHub API will only use Checks. - Contexts []string `json:"contexts,omitempty"` - Checks []*RequiredStatusCheck `json:"checks,omitempty"` -} - -// RequiredStatusCheck represents a status check of a protected branch. -type RequiredStatusCheck struct { - // The name of the required check. - Context string `json:"context"` - // The ID of the GitHub App that must provide this check. - // Omit this field to automatically select the GitHub App - // that has recently provided this check, - // or any app if it was not set by a GitHub App. - // Pass -1 to explicitly allow any app to set the status. - AppID *int64 `json:"app_id,omitempty"` -} - -// PullRequestReviewsEnforcement represents the pull request reviews enforcement of a protected branch. -type PullRequestReviewsEnforcement struct { - // Allow specific users, teams, or apps to bypass pull request requirements. - BypassPullRequestAllowances *BypassPullRequestAllowances `json:"bypass_pull_request_allowances,omitempty"` - // Specifies which users, teams and apps can dismiss pull request reviews. - DismissalRestrictions *DismissalRestrictions `json:"dismissal_restrictions,omitempty"` - // Specifies if approved reviews are dismissed automatically, when a new commit is pushed. - DismissStaleReviews bool `json:"dismiss_stale_reviews"` - // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. - RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` - // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. - // Valid values are 1-6. - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. - RequireLastPushApproval bool `json:"require_last_push_approval"` -} - -// PullRequestReviewsEnforcementRequest represents request to set the pull request review -// enforcement of a protected branch. It is separate from PullRequestReviewsEnforcement above -// because the request structure is different from the response structure. -type PullRequestReviewsEnforcementRequest struct { - // Allow specific users, teams, or apps to bypass pull request requirements. - BypassPullRequestAllowancesRequest *BypassPullRequestAllowancesRequest `json:"bypass_pull_request_allowances,omitempty"` - // Specifies which users, teams and apps should be allowed to dismiss pull request reviews. - // User, team and app dismissal restrictions are only available for - // organization-owned repositories. Must be nil for personal repositories. - DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` - // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. (Required) - DismissStaleReviews bool `json:"dismiss_stale_reviews"` - // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. - RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` - // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. - // Valid values are 1-6. - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. - RequireLastPushApproval *bool `json:"require_last_push_approval,omitempty"` -} - -// PullRequestReviewsEnforcementUpdate represents request to patch the pull request review -// enforcement of a protected branch. It is separate from PullRequestReviewsEnforcementRequest above -// because the patch request does not require all fields to be initialized. -type PullRequestReviewsEnforcementUpdate struct { - // Allow specific users, teams, or apps to bypass pull request requirements. - BypassPullRequestAllowancesRequest *BypassPullRequestAllowancesRequest `json:"bypass_pull_request_allowances,omitempty"` - // Specifies which users, teams and apps can dismiss pull request reviews. Can be omitted. - DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` - // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. Can be omitted. - DismissStaleReviews *bool `json:"dismiss_stale_reviews,omitempty"` - // RequireCodeOwnerReviews specifies if merging pull requests is blocked until code owners have reviewed. - RequireCodeOwnerReviews *bool `json:"require_code_owner_reviews,omitempty"` - // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. - // Valid values are 1 - 6 or 0 to not require reviewers. - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. - RequireLastPushApproval *bool `json:"require_last_push_approval,omitempty"` -} - -// RequireLinearHistory represents the configuration to enforce branches with no merge commit. -type RequireLinearHistory struct { - Enabled bool `json:"enabled"` -} - -// AllowDeletions represents the configuration to accept deletion of protected branches. -type AllowDeletions struct { - Enabled bool `json:"enabled"` -} - -// AllowForcePushes represents the configuration to accept forced pushes on protected branches. -type AllowForcePushes struct { - Enabled bool `json:"enabled"` -} - -// RequiredConversationResolution requires all comments on the pull request to be resolved before it can be -// merged to a protected branch when enabled. -type RequiredConversationResolution struct { - Enabled bool `json:"enabled"` -} - -// AdminEnforcement represents the configuration to enforce required status checks for repository administrators. -type AdminEnforcement struct { - URL *string `json:"url,omitempty"` - Enabled bool `json:"enabled"` -} - -// BranchRestrictions represents the restriction that only certain users or -// teams may push to a branch. -type BranchRestrictions struct { - // The list of user logins with push access. - Users []*User `json:"users"` - // The list of team slugs with push access. - Teams []*Team `json:"teams"` - // The list of app slugs with push access. - Apps []*App `json:"apps"` -} - -// BranchRestrictionsRequest represents the request to create/edit the -// restriction that only certain users or teams may push to a branch. It is -// separate from BranchRestrictions above because the request structure is -// different from the response structure. -type BranchRestrictionsRequest struct { - // The list of user logins with push access. (Required; use []string{} instead of nil for empty list.) - Users []string `json:"users"` - // The list of team slugs with push access. (Required; use []string{} instead of nil for empty list.) - Teams []string `json:"teams"` - // The list of app slugs with push access. - Apps []string `json:"apps"` -} - -// BypassPullRequestAllowances represents the people, teams, or apps who are allowed to bypass required pull requests. -type BypassPullRequestAllowances struct { - // The list of users allowed to bypass pull request requirements. - Users []*User `json:"users"` - // The list of teams allowed to bypass pull request requirements. - Teams []*Team `json:"teams"` - // The list of apps allowed to bypass pull request requirements. - Apps []*App `json:"apps"` -} - -// BypassPullRequestAllowancesRequest represents the people, teams, or apps who are -// allowed to bypass required pull requests. -// It is separate from BypassPullRequestAllowances above because the request structure is -// different from the response structure. -type BypassPullRequestAllowancesRequest struct { - // The list of user logins allowed to bypass pull request requirements. - Users []string `json:"users"` - // The list of team slugs allowed to bypass pull request requirements. - Teams []string `json:"teams"` - // The list of app slugs allowed to bypass pull request requirements. - Apps []string `json:"apps"` -} - -// DismissalRestrictions specifies which users and teams can dismiss pull request reviews. -type DismissalRestrictions struct { - // The list of users who can dimiss pull request reviews. - Users []*User `json:"users"` - // The list of teams which can dismiss pull request reviews. - Teams []*Team `json:"teams"` - // The list of apps which can dismiss pull request reviews. - Apps []*App `json:"apps"` -} - -// DismissalRestrictionsRequest represents the request to create/edit the -// restriction to allows only specific users, teams or apps to dimiss pull request reviews. It is -// separate from DismissalRestrictions above because the request structure is -// different from the response structure. -// Note: Both Users and Teams must be nil, or both must be non-nil. -type DismissalRestrictionsRequest struct { - // The list of user logins who can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) - Users *[]string `json:"users,omitempty"` - // The list of team slugs which can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) - Teams *[]string `json:"teams,omitempty"` - // The list of app slugs which can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) - Apps *[]string `json:"apps,omitempty"` -} - -// SignaturesProtectedBranch represents the protection status of an individual branch. -type SignaturesProtectedBranch struct { - URL *string `json:"url,omitempty"` - // Commits pushed to matching branches must have verified signatures. - Enabled *bool `json:"enabled,omitempty"` -} - -// AutomatedSecurityFixes represents their status. -type AutomatedSecurityFixes struct { - Enabled *bool `json:"enabled"` - Paused *bool `json:"paused"` -} - -// ListBranches lists branches for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#list-branches -// -//meta:operation GET /repos/{owner}/{repo}/branches -func (s *RepositoriesService) ListBranches(ctx context.Context, owner string, repo string, opts *BranchListOptions) ([]*Branch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var branches []*Branch - resp, err := s.client.Do(ctx, req, &branches) - if err != nil { - return nil, resp, err - } - - return branches, resp, nil -} - -// GetBranch gets the specified branch for a repository. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#get-a-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch} -func (s *RepositoriesService) GetBranch(ctx context.Context, owner, repo, branch string, maxRedirects int) (*Branch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v", owner, repo, url.PathEscape(branch)) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - b := new(Branch) - err = json.NewDecoder(resp.Body).Decode(b) - return b, newResponse(resp), err -} - -// renameBranchRequest represents a request to rename a branch. -type renameBranchRequest struct { - NewName string `json:"new_name"` -} - -// RenameBranch renames a branch in a repository. -// -// To rename a non-default branch: Users must have push access. GitHub Apps must have the `contents:write` repository permission. -// To rename the default branch: Users must have admin or owner permissions. GitHub Apps must have the `administration:write` repository permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#rename-a-branch -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/rename -func (s *RepositoriesService) RenameBranch(ctx context.Context, owner, repo, branch, newName string) (*Branch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/rename", owner, repo, url.PathEscape(branch)) - r := &renameBranchRequest{NewName: newName} - req, err := s.client.NewRequest("POST", u, r) - if err != nil { - return nil, nil, err - } - - b := new(Branch) - resp, err := s.client.Do(ctx, req, b) - if err != nil { - return nil, resp, err - } - - return b, resp, nil -} - -// GetBranchProtection gets the protection of a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-branch-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection -func (s *RepositoriesService) GetBranchProtection(ctx context.Context, owner, repo, branch string) (*Protection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - p := new(Protection) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - if isBranchNotProtected(err) { - err = ErrBranchNotProtected - } - return nil, resp, err - } - - return p, resp, nil -} - -// GetRequiredStatusChecks gets the required status checks for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-status-checks-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks -func (s *RepositoriesService) GetRequiredStatusChecks(ctx context.Context, owner, repo, branch string) (*RequiredStatusChecks, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - p := new(RequiredStatusChecks) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - if isBranchNotProtected(err) { - err = ErrBranchNotProtected - } - return nil, resp, err - } - - return p, resp, nil -} - -// ListRequiredStatusChecksContexts lists the required status checks contexts for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-all-status-check-contexts -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts -func (s *RepositoriesService) ListRequiredStatusChecksContexts(ctx context.Context, owner, repo, branch string) (contexts []string, resp *Response, err error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks/contexts", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - resp, err = s.client.Do(ctx, req, &contexts) - if err != nil { - if isBranchNotProtected(err) { - err = ErrBranchNotProtected - } - return nil, resp, err - } - - return contexts, resp, nil -} - -// UpdateBranchProtection updates the protection of a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-branch-protection -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection -func (s *RepositoriesService) UpdateBranchProtection(ctx context.Context, owner, repo, branch string, preq *ProtectionRequest) (*Protection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, preq) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - p := new(Protection) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// RemoveBranchProtection removes the protection of a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-branch-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection -func (s *RepositoriesService) RemoveBranchProtection(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetSignaturesProtectedBranch gets required signatures of protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-commit-signature-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures -func (s *RepositoriesService) GetSignaturesProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeSignaturePreview) - - p := new(SignaturesProtectedBranch) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// RequireSignaturesOnProtectedBranch makes signed commits required on a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#create-commit-signature-protection -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures -func (s *RepositoriesService) RequireSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeSignaturePreview) - - r := new(SignaturesProtectedBranch) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// OptionalSignaturesOnProtectedBranch removes required signed commits on a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-commit-signature-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures -func (s *RepositoriesService) OptionalSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeSignaturePreview) - - return s.client.Do(ctx, req, nil) -} - -// UpdateRequiredStatusChecks updates the required status checks for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-status-check-protection -// -//meta:operation PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks -func (s *RepositoriesService) UpdateRequiredStatusChecks(ctx context.Context, owner, repo, branch string, sreq *RequiredStatusChecksRequest) (*RequiredStatusChecks, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PATCH", u, sreq) - if err != nil { - return nil, nil, err - } - - sc := new(RequiredStatusChecks) - resp, err := s.client.Do(ctx, req, sc) - if err != nil { - return nil, resp, err - } - - return sc, resp, nil -} - -// RemoveRequiredStatusChecks removes the required status checks for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-status-check-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks -func (s *RepositoriesService) RemoveRequiredStatusChecks(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// License gets the contents of a repository's license if one is detected. -// -// GitHub API docs: https://docs.github.com/rest/licenses/licenses#get-the-license-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/license -func (s *RepositoriesService) License(ctx context.Context, owner, repo string) (*RepositoryLicense, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/license", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - r := &RepositoryLicense{} - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// GetPullRequestReviewEnforcement gets pull request review enforcement of a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-pull-request-review-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) GetPullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string) (*PullRequestReviewsEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - r := new(PullRequestReviewsEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// UpdatePullRequestReviewEnforcement patches pull request review enforcement of a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-pull-request-review-protection -// -//meta:operation PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) UpdatePullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string, patch *PullRequestReviewsEnforcementUpdate) (*PullRequestReviewsEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PATCH", u, patch) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - r := new(PullRequestReviewsEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DisableDismissalRestrictions disables dismissal restrictions of a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-pull-request-review-protection -// -//meta:operation PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) DisableDismissalRestrictions(ctx context.Context, owner, repo, branch string) (*PullRequestReviewsEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - - data := new(struct { - DismissalRestrictionsRequest `json:"dismissal_restrictions"` - }) - - req, err := s.client.NewRequest("PATCH", u, data) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - r := new(PullRequestReviewsEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// RemovePullRequestReviewEnforcement removes pull request enforcement of a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-pull-request-review-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) RemovePullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetAdminEnforcement gets admin enforcement information of a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-admin-branch-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins -func (s *RepositoriesService) GetAdminEnforcement(ctx context.Context, owner, repo, branch string) (*AdminEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(AdminEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// AddAdminEnforcement adds admin enforcement to a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-admin-branch-protection -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins -func (s *RepositoriesService) AddAdminEnforcement(ctx context.Context, owner, repo, branch string) (*AdminEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(AdminEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// RemoveAdminEnforcement removes admin enforcement from a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-admin-branch-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins -func (s *RepositoriesService) RemoveAdminEnforcement(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// repositoryTopics represents a collection of repository topics. -type repositoryTopics struct { - Names []string `json:"names"` -} - -// ListAllTopics lists topics for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#get-all-repository-topics -// -//meta:operation GET /repos/{owner}/{repo}/topics -func (s *RepositoriesService) ListAllTopics(ctx context.Context, owner, repo string) ([]string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeTopicsPreview) - - topics := new(repositoryTopics) - resp, err := s.client.Do(ctx, req, topics) - if err != nil { - return nil, resp, err - } - - return topics.Names, resp, nil -} - -// ReplaceAllTopics replaces all repository topics. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#replace-all-repository-topics -// -//meta:operation PUT /repos/{owner}/{repo}/topics -func (s *RepositoriesService) ReplaceAllTopics(ctx context.Context, owner, repo string, topics []string) ([]string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) - t := &repositoryTopics{ - Names: topics, - } - if t.Names == nil { - t.Names = []string{} - } - req, err := s.client.NewRequest("PUT", u, t) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeTopicsPreview) - - t = new(repositoryTopics) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t.Names, resp, nil -} - -// ListApps lists the GitHub apps that have push access to a given protected branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// Deprecated: Please use ListAppRestrictions instead. -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-apps-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) ListApps(ctx context.Context, owner, repo, branch string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var apps []*App - resp, err := s.client.Do(ctx, req, &apps) - if err != nil { - return nil, resp, err - } - - return apps, resp, nil -} - -// ListAppRestrictions lists the GitHub apps that have push access to a given protected branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: This is a wrapper around ListApps so a naming convention with ListUserRestrictions and ListTeamRestrictions is preserved. -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-apps-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) ListAppRestrictions(ctx context.Context, owner, repo, branch string) ([]*App, *Response, error) { - return s.ListApps(ctx, owner, repo, branch) -} - -// ReplaceAppRestrictions replaces the apps that have push access to a given protected branch. -// It removes all apps that previously had push access and grants push access to the new list of apps. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-app-access-restrictions -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) ReplaceAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, apps) - if err != nil { - return nil, nil, err - } - - var newApps []*App - resp, err := s.client.Do(ctx, req, &newApps) - if err != nil { - return nil, resp, err - } - - return newApps, resp, nil -} - -// AddAppRestrictions grants the specified apps push access to a given protected branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#add-app-access-restrictions -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) AddAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, apps) - if err != nil { - return nil, nil, err - } - - var newApps []*App - resp, err := s.client.Do(ctx, req, &newApps) - if err != nil { - return nil, resp, err - } - - return newApps, resp, nil -} - -// RemoveAppRestrictions removes the restrictions of an app from pushing to this branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-app-access-restrictions -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) RemoveAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, apps) - if err != nil { - return nil, nil, err - } - - var newApps []*App - resp, err := s.client.Do(ctx, req, &newApps) - if err != nil { - return nil, resp, err - } - - return newApps, resp, nil -} - -// ListTeamRestrictions lists the GitHub teams that have push access to a given protected branch. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-teams-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) ListTeamRestrictions(ctx context.Context, owner, repo, branch string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ReplaceTeamRestrictions replaces the team that have push access to a given protected branch. -// This removes all teams that previously had push access and grants push access to the new list of teams. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-team-access-restrictions -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) ReplaceTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, teams) - if err != nil { - return nil, nil, err - } - - var newTeams []*Team - resp, err := s.client.Do(ctx, req, &newTeams) - if err != nil { - return nil, resp, err - } - - return newTeams, resp, nil -} - -// AddTeamRestrictions grants the specified teams push access to a given protected branch. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#add-team-access-restrictions -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) AddTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, teams) - if err != nil { - return nil, nil, err - } - - var newTeams []*Team - resp, err := s.client.Do(ctx, req, &newTeams) - if err != nil { - return nil, resp, err - } - - return newTeams, resp, nil -} - -// RemoveTeamRestrictions removes the restrictions of a team from pushing to this branch. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-team-access-restrictions -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) RemoveTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, teams) - if err != nil { - return nil, nil, err - } - - var newTeams []*Team - resp, err := s.client.Do(ctx, req, &newTeams) - if err != nil { - return nil, resp, err - } - - return newTeams, resp, nil -} - -// ListUserRestrictions lists the GitHub users that have push access to a given protected branch. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-users-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) ListUserRestrictions(ctx context.Context, owner, repo, branch string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ReplaceUserRestrictions replaces the user that have push access to a given protected branch. -// It removes all users that previously had push access and grants push access to the new list of users. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-user-access-restrictions -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) ReplaceUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, users) - if err != nil { - return nil, nil, err - } - - var newUsers []*User - resp, err := s.client.Do(ctx, req, &newUsers) - if err != nil { - return nil, resp, err - } - - return newUsers, resp, nil -} - -// AddUserRestrictions grants the specified users push access to a given protected branch. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#add-user-access-restrictions -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) AddUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, users) - if err != nil { - return nil, nil, err - } - - var newUsers []*User - resp, err := s.client.Do(ctx, req, &newUsers) - if err != nil { - return nil, resp, err - } - - return newUsers, resp, nil -} - -// RemoveUserRestrictions removes the restrictions of a user from pushing to this branch. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-user-access-restrictions -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) RemoveUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, users) - if err != nil { - return nil, nil, err - } - - var newUsers []*User - resp, err := s.client.Do(ctx, req, &newUsers) - if err != nil { - return nil, resp, err - } - - return newUsers, resp, nil -} - -// TransferRequest represents a request to transfer a repository. -type TransferRequest struct { - NewOwner string `json:"new_owner"` - NewName *string `json:"new_name,omitempty"` - TeamID []int64 `json:"team_ids,omitempty"` -} - -// Transfer transfers a repository from one account or organization to another. -// -// This method might return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it has now scheduled the transfer of the repository in a background task. -// A follow up request, after a delay of a second or so, should result -// in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#transfer-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/transfer -func (s *RepositoriesService) Transfer(ctx context.Context, owner, repo string, transfer TransferRequest) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/transfer", owner, repo) - - req, err := s.client.NewRequest("POST", u, &transfer) - if err != nil { - return nil, nil, err - } - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DispatchRequestOptions represents a request to trigger a repository_dispatch event. -type DispatchRequestOptions struct { - // EventType is a custom webhook event name. (Required.) - EventType string `json:"event_type"` - // ClientPayload is a custom JSON payload with extra information about the webhook event. - // Defaults to an empty JSON object. - ClientPayload *json.RawMessage `json:"client_payload,omitempty"` -} - -// Dispatch triggers a repository_dispatch event in a GitHub Actions workflow. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-a-repository-dispatch-event -// -//meta:operation POST /repos/{owner}/{repo}/dispatches -func (s *RepositoriesService) Dispatch(ctx context.Context, owner, repo string, opts DispatchRequestOptions) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/dispatches", owner, repo) - - req, err := s.client.NewRequest("POST", u, &opts) - if err != nil { - return nil, nil, err - } - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// isBranchNotProtected determines whether a branch is not protected -// based on the error message returned by GitHub API. -func isBranchNotProtected(err error) bool { - errorResponse, ok := err.(*ErrorResponse) - return ok && errorResponse.Message == githubBranchNotProtected -} - -// EnablePrivateReporting enables private reporting of vulnerabilities for a -// repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#enable-private-vulnerability-reporting-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/private-vulnerability-reporting -func (s *RepositoriesService) EnablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DisablePrivateReporting disables private reporting of vulnerabilities for a -// repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#disable-private-vulnerability-reporting-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/private-vulnerability-reporting -func (s *RepositoriesService) DisablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_actions_access.go b/vendor/github.com/google/go-github/v57/github/repos_actions_access.go deleted file mode 100644 index 2da1f01c..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_actions_access.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryActionsAccessLevel represents the repository actions access level. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-the-level-of-access-for-workflows-outside-of-the-repository -type RepositoryActionsAccessLevel struct { - // AccessLevel specifies the level of access that workflows outside of the repository have - // to actions and reusable workflows within the repository. - // Possible values are: "none", "organization" "enterprise". - AccessLevel *string `json:"access_level,omitempty"` -} - -// GetActionsAccessLevel gets the level of access that workflows outside of the repository have -// to actions and reusable workflows in the repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-the-level-of-access-for-workflows-outside-of-the-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/permissions/access -func (s *RepositoriesService) GetActionsAccessLevel(ctx context.Context, owner, repo string) (*RepositoryActionsAccessLevel, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/access", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - raal := new(RepositoryActionsAccessLevel) - resp, err := s.client.Do(ctx, req, raal) - if err != nil { - return nil, resp, err - } - - return raal, resp, nil -} - -// EditActionsAccessLevel sets the level of access that workflows outside of the repository have -// to actions and reusable workflows in the repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-the-level-of-access-for-workflows-outside-of-the-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/permissions/access -func (s *RepositoriesService) EditActionsAccessLevel(ctx context.Context, owner, repo string, repositoryActionsAccessLevel RepositoryActionsAccessLevel) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/access", owner, repo) - req, err := s.client.NewRequest("PUT", u, repositoryActionsAccessLevel) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_actions_allowed.go b/vendor/github.com/google/go-github/v57/github/repos_actions_allowed.go deleted file mode 100644 index e9ebff1d..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_actions_allowed.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetActionsAllowed gets the allowed actions and reusable workflows for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/permissions/selected-actions -func (s *RepositoriesService) GetActionsAllowed(ctx context.Context, org, repo string) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/selected-actions", org, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsAllowed := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, actionsAllowed) - if err != nil { - return nil, resp, err - } - - return actionsAllowed, resp, nil -} - -// EditActionsAllowed sets the allowed actions and reusable workflows for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/permissions/selected-actions -func (s *RepositoriesService) EditActionsAllowed(ctx context.Context, org, repo string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/selected-actions", org, repo) - req, err := s.client.NewRequest("PUT", u, actionsAllowed) - if err != nil { - return nil, nil, err - } - - p := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go b/vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go deleted file mode 100644 index 2dcc367d..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsPermissionsRepository represents a policy for repositories and allowed actions in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions -type ActionsPermissionsRepository struct { - Enabled *bool `json:"enabled,omitempty"` - AllowedActions *string `json:"allowed_actions,omitempty"` - SelectedActionsURL *string `json:"selected_actions_url,omitempty"` -} - -func (a ActionsPermissionsRepository) String() string { - return Stringify(a) -} - -// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-github-actions-permissions-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/permissions -func (s *RepositoriesService) GetActionsPermissions(ctx context.Context, owner, repo string) (*ActionsPermissionsRepository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissionsRepository) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} - -// EditActionsPermissions sets the permissions policy for repositories and allowed actions in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-github-actions-permissions-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/permissions -func (s *RepositoriesService) EditActionsPermissions(ctx context.Context, owner, repo string, actionsPermissionsRepository ActionsPermissionsRepository) (*ActionsPermissionsRepository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions", owner, repo) - req, err := s.client.NewRequest("PUT", u, actionsPermissionsRepository) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissionsRepository) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_autolinks.go b/vendor/github.com/google/go-github/v57/github/repos_autolinks.go deleted file mode 100644 index 200605aa..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_autolinks.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AutolinkOptions specifies parameters for RepositoriesService.AddAutolink method. -type AutolinkOptions struct { - KeyPrefix *string `json:"key_prefix,omitempty"` - URLTemplate *string `json:"url_template,omitempty"` - IsAlphanumeric *bool `json:"is_alphanumeric,omitempty"` -} - -// Autolink represents autolinks to external resources like JIRA issues and Zendesk tickets. -type Autolink struct { - ID *int64 `json:"id,omitempty"` - KeyPrefix *string `json:"key_prefix,omitempty"` - URLTemplate *string `json:"url_template,omitempty"` - IsAlphanumeric *bool `json:"is_alphanumeric,omitempty"` -} - -// ListAutolinks returns a list of autolinks configured for the given repository. -// Information about autolinks are only available to repository administrators. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#list-all-autolinks-of-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/autolinks -func (s *RepositoriesService) ListAutolinks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Autolink, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var autolinks []*Autolink - resp, err := s.client.Do(ctx, req, &autolinks) - if err != nil { - return nil, resp, err - } - - return autolinks, resp, nil -} - -// AddAutolink creates an autolink reference for a repository. -// Users with admin access to the repository can create an autolink. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#create-an-autolink-reference-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/autolinks -func (s *RepositoriesService) AddAutolink(ctx context.Context, owner, repo string, opts *AutolinkOptions) (*Autolink, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - al := new(Autolink) - resp, err := s.client.Do(ctx, req, al) - if err != nil { - return nil, resp, err - } - return al, resp, nil -} - -// GetAutolink returns a single autolink reference by ID that was configured for the given repository. -// Information about autolinks are only available to repository administrators. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#get-an-autolink-reference-of-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/autolinks/{autolink_id} -func (s *RepositoriesService) GetAutolink(ctx context.Context, owner, repo string, id int64) (*Autolink, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var autolink *Autolink - resp, err := s.client.Do(ctx, req, &autolink) - if err != nil { - return nil, resp, err - } - - return autolink, resp, nil -} - -// DeleteAutolink deletes a single autolink reference by ID that was configured for the given repository. -// Information about autolinks are only available to repository administrators. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#delete-an-autolink-reference-from-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/autolinks/{autolink_id} -func (s *RepositoriesService) DeleteAutolink(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks/%v", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_codeowners.go b/vendor/github.com/google/go-github/v57/github/repos_codeowners.go deleted file mode 100644 index 93eeae09..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_codeowners.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetCodeownersErrorsOptions specifies the optional parameters to the -// RepositoriesService.GetCodeownersErrors method. -type GetCodeownersErrorsOptions struct { - // A branch, tag or commit name used to determine which version of the CODEOWNERS file to use. - // Default: the repository's default branch (e.g. main). - Ref string `url:"ref,omitempty"` -} - -// CodeownersErrors represents a list of syntax errors detected in the CODEOWNERS file. -type CodeownersErrors struct { - Errors []*CodeownersError `json:"errors"` -} - -// CodeownersError represents a syntax error detected in the CODEOWNERS file. -type CodeownersError struct { - Line int `json:"line"` - Column int `json:"column"` - Kind string `json:"kind"` - Source string `json:"source"` - Suggestion *string `json:"suggestion,omitempty"` - Message string `json:"message"` - Path string `json:"path"` -} - -// GetCodeownersErrors lists any syntax errors that are detected in the CODEOWNERS file. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-codeowners-errors -// -//meta:operation GET /repos/{owner}/{repo}/codeowners/errors -func (s *RepositoriesService) GetCodeownersErrors(ctx context.Context, owner, repo string, opts *GetCodeownersErrorsOptions) (*CodeownersErrors, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codeowners/errors", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - codeownersErrors := &CodeownersErrors{} - resp, err := s.client.Do(ctx, req, codeownersErrors) - if err != nil { - return nil, resp, err - } - - return codeownersErrors, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_collaborators.go b/vendor/github.com/google/go-github/v57/github/repos_collaborators.go deleted file mode 100644 index 15a4e77a..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_collaborators.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListCollaboratorsOptions specifies the optional parameters to the -// RepositoriesService.ListCollaborators method. -type ListCollaboratorsOptions struct { - // Affiliation specifies how collaborators should be filtered by their affiliation. - // Possible values are: - // outside - All outside collaborators of an organization-owned repository - // direct - All collaborators with permissions to an organization-owned repository, - // regardless of organization membership status - // all - All collaborators the authenticated user can see - // - // Default value is "all". - Affiliation string `url:"affiliation,omitempty"` - - // Permission specifies how collaborators should be filtered by the permissions they have on the repository. - // Possible values are: - // "pull", "triage", "push", "maintain", "admin" - // - // If not specified, all collaborators will be returned. - Permission string `url:"permission,omitempty"` - - ListOptions -} - -// CollaboratorInvitation represents an invitation created when adding a collaborator. -// GitHub API docs: https://docs.github.com/rest/repos/collaborators/#response-when-a-new-invitation-is-created -type CollaboratorInvitation struct { - ID *int64 `json:"id,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Invitee *User `json:"invitee,omitempty"` - Inviter *User `json:"inviter,omitempty"` - Permissions *string `json:"permissions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` -} - -// ListCollaborators lists the GitHub users that have access to the repository. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#list-repository-collaborators -// -//meta:operation GET /repos/{owner}/{repo}/collaborators -func (s *RepositoriesService) ListCollaborators(ctx context.Context, owner, repo string, opts *ListCollaboratorsOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// IsCollaborator checks whether the specified GitHub user has collaborator -// access to the given repo. -// Note: This will return false if the user is not a collaborator OR the user -// is not a GitHub user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#check-if-a-user-is-a-repository-collaborator -// -//meta:operation GET /repos/{owner}/{repo}/collaborators/{username} -func (s *RepositoriesService) IsCollaborator(ctx context.Context, owner, repo, user string) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - isCollab, err := parseBoolResponse(err) - return isCollab, resp, err -} - -// RepositoryPermissionLevel represents the permission level an organization -// member has for a given repository. -type RepositoryPermissionLevel struct { - // Possible values: "admin", "write", "read", "none" - Permission *string `json:"permission,omitempty"` - - User *User `json:"user,omitempty"` -} - -// GetPermissionLevel retrieves the specific permission level a collaborator has for a given repository. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#get-repository-permissions-for-a-user -// -//meta:operation GET /repos/{owner}/{repo}/collaborators/{username}/permission -func (s *RepositoriesService) GetPermissionLevel(ctx context.Context, owner, repo, user string) (*RepositoryPermissionLevel, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v/permission", owner, repo, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - rpl := new(RepositoryPermissionLevel) - resp, err := s.client.Do(ctx, req, rpl) - if err != nil { - return nil, resp, err - } - - return rpl, resp, nil -} - -// RepositoryAddCollaboratorOptions specifies the optional parameters to the -// RepositoriesService.AddCollaborator method. -type RepositoryAddCollaboratorOptions struct { - // Permission specifies the permission to grant the user on this repository. - // Possible values are: - // pull - team members can pull, but not push to or administer this repository - // push - team members can pull and push, but not administer this repository - // admin - team members can pull, push and administer this repository - // maintain - team members can manage the repository without access to sensitive or destructive actions. - // triage - team members can proactively manage issues and pull requests without write access. - // - // Default value is "push". This option is only valid for organization-owned repositories. - Permission string `json:"permission,omitempty"` -} - -// AddCollaborator sends an invitation to the specified GitHub user -// to become a collaborator to the given repo. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#add-a-repository-collaborator -// -//meta:operation PUT /repos/{owner}/{repo}/collaborators/{username} -func (s *RepositoriesService) AddCollaborator(ctx context.Context, owner, repo, user string, opts *RepositoryAddCollaboratorOptions) (*CollaboratorInvitation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - acr := new(CollaboratorInvitation) - resp, err := s.client.Do(ctx, req, acr) - if err != nil { - return nil, resp, err - } - - return acr, resp, nil -} - -// RemoveCollaborator removes the specified GitHub user as collaborator from the given repo. -// Note: Does not return error if a valid user that is not a collaborator is removed. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#remove-a-repository-collaborator -// -//meta:operation DELETE /repos/{owner}/{repo}/collaborators/{username} -func (s *RepositoriesService) RemoveCollaborator(ctx context.Context, owner, repo, user string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_comments.go b/vendor/github.com/google/go-github/v57/github/repos_comments.go deleted file mode 100644 index 766a614c..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_comments.go +++ /dev/null @@ -1,173 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryComment represents a comment for a commit, file, or line in a repository. -type RepositoryComment struct { - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - User *User `json:"user,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - - // User-mutable fields - Body *string `json:"body"` - // User-initialized fields - Path *string `json:"path,omitempty"` - Position *int `json:"position,omitempty"` -} - -func (r RepositoryComment) String() string { - return Stringify(r) -} - -// ListComments lists all the comments for the repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#list-commit-comments-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/comments -func (s *RepositoriesService) ListComments(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var comments []*RepositoryComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// ListCommitComments lists all the comments for a given commit SHA. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#list-commit-comments -// -//meta:operation GET /repos/{owner}/{repo}/commits/{commit_sha}/comments -func (s *RepositoriesService) ListCommitComments(ctx context.Context, owner, repo, sha string, opts *ListOptions) ([]*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/comments", owner, repo, sha) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var comments []*RepositoryComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// CreateComment creates a comment for the given commit. -// Note: GitHub allows for comments to be created for non-existing files and positions. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#create-a-commit-comment -// -//meta:operation POST /repos/{owner}/{repo}/commits/{commit_sha}/comments -func (s *RepositoriesService) CreateComment(ctx context.Context, owner, repo, sha string, comment *RepositoryComment) (*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/comments", owner, repo, sha) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(RepositoryComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// GetComment gets a single comment from a repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#get-a-commit-comment -// -//meta:operation GET /repos/{owner}/{repo}/comments/{comment_id} -func (s *RepositoriesService) GetComment(ctx context.Context, owner, repo string, id int64) (*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - c := new(RepositoryComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// UpdateComment updates the body of a single comment. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#update-a-commit-comment -// -//meta:operation PATCH /repos/{owner}/{repo}/comments/{comment_id} -func (s *RepositoriesService) UpdateComment(ctx context.Context, owner, repo string, id int64, comment *RepositoryComment) (*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(RepositoryComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes a single comment from a repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#delete-a-commit-comment -// -//meta:operation DELETE /repos/{owner}/{repo}/comments/{comment_id} -func (s *RepositoriesService) DeleteComment(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_commits.go b/vendor/github.com/google/go-github/v57/github/repos_commits.go deleted file mode 100644 index cca7430c..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_commits.go +++ /dev/null @@ -1,325 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" - "net/url" - "time" -) - -// RepositoryCommit represents a commit in a repo. -// Note that it's wrapping a Commit, so author/committer information is in two places, -// but contain different details about them: in RepositoryCommit "github details", in Commit - "git details". -type RepositoryCommit struct { - NodeID *string `json:"node_id,omitempty"` - SHA *string `json:"sha,omitempty"` - Commit *Commit `json:"commit,omitempty"` - Author *User `json:"author,omitempty"` - Committer *User `json:"committer,omitempty"` - Parents []*Commit `json:"parents,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - - // Details about how many changes were made in this commit. Only filled in during GetCommit! - Stats *CommitStats `json:"stats,omitempty"` - // Details about which files, and how this commit touched. Only filled in during GetCommit! - Files []*CommitFile `json:"files,omitempty"` -} - -func (r RepositoryCommit) String() string { - return Stringify(r) -} - -// CommitStats represents the number of additions / deletions from a file in a given RepositoryCommit or GistCommit. -type CommitStats struct { - Additions *int `json:"additions,omitempty"` - Deletions *int `json:"deletions,omitempty"` - Total *int `json:"total,omitempty"` -} - -func (c CommitStats) String() string { - return Stringify(c) -} - -// CommitFile represents a file modified in a commit. -type CommitFile struct { - SHA *string `json:"sha,omitempty"` - Filename *string `json:"filename,omitempty"` - Additions *int `json:"additions,omitempty"` - Deletions *int `json:"deletions,omitempty"` - Changes *int `json:"changes,omitempty"` - Status *string `json:"status,omitempty"` - Patch *string `json:"patch,omitempty"` - BlobURL *string `json:"blob_url,omitempty"` - RawURL *string `json:"raw_url,omitempty"` - ContentsURL *string `json:"contents_url,omitempty"` - PreviousFilename *string `json:"previous_filename,omitempty"` -} - -func (c CommitFile) String() string { - return Stringify(c) -} - -// CommitsComparison is the result of comparing two commits. -// See CompareCommits() for details. -type CommitsComparison struct { - BaseCommit *RepositoryCommit `json:"base_commit,omitempty"` - MergeBaseCommit *RepositoryCommit `json:"merge_base_commit,omitempty"` - - // Head can be 'behind' or 'ahead' - Status *string `json:"status,omitempty"` - AheadBy *int `json:"ahead_by,omitempty"` - BehindBy *int `json:"behind_by,omitempty"` - TotalCommits *int `json:"total_commits,omitempty"` - - Commits []*RepositoryCommit `json:"commits,omitempty"` - - Files []*CommitFile `json:"files,omitempty"` - - HTMLURL *string `json:"html_url,omitempty"` - PermalinkURL *string `json:"permalink_url,omitempty"` - DiffURL *string `json:"diff_url,omitempty"` - PatchURL *string `json:"patch_url,omitempty"` - URL *string `json:"url,omitempty"` // API URL. -} - -func (c CommitsComparison) String() string { - return Stringify(c) -} - -// CommitsListOptions specifies the optional parameters to the -// RepositoriesService.ListCommits method. -type CommitsListOptions struct { - // SHA or branch to start listing Commits from. - SHA string `url:"sha,omitempty"` - - // Path that should be touched by the returned Commits. - Path string `url:"path,omitempty"` - - // Author of by which to filter Commits. - Author string `url:"author,omitempty"` - - // Since when should Commits be included in the response. - Since time.Time `url:"since,omitempty"` - - // Until when should Commits be included in the response. - Until time.Time `url:"until,omitempty"` - - ListOptions -} - -// BranchCommit is the result of listing branches with commit SHA. -type BranchCommit struct { - Name *string `json:"name,omitempty"` - Commit *Commit `json:"commit,omitempty"` - Protected *bool `json:"protected,omitempty"` -} - -// ListCommits lists the commits of a repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#list-commits -// -//meta:operation GET /repos/{owner}/{repo}/commits -func (s *RepositoriesService) ListCommits(ctx context.Context, owner, repo string, opts *CommitsListOptions) ([]*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var commits []*RepositoryCommit - resp, err := s.client.Do(ctx, req, &commits) - if err != nil { - return nil, resp, err - } - - return commits, resp, nil -} - -// GetCommit fetches the specified commit, including all details about it. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#get-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref} -func (s *RepositoriesService) GetCommit(ctx context.Context, owner, repo, sha string, opts *ListOptions) (*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - commit := new(RepositoryCommit) - resp, err := s.client.Do(ctx, req, commit) - if err != nil { - return nil, resp, err - } - - return commit, resp, nil -} - -// GetCommitRaw fetches the specified commit in raw (diff or patch) format. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#get-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref} -func (s *RepositoriesService) GetCommitRaw(ctx context.Context, owner string, repo string, sha string, opts RawOptions) (string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - switch opts.Type { - case Diff: - req.Header.Set("Accept", mediaTypeV3Diff) - case Patch: - req.Header.Set("Accept", mediaTypeV3Patch) - default: - return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) - } - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// GetCommitSHA1 gets the SHA-1 of a commit reference. If a last-known SHA1 is -// supplied and no new commits have occurred, a 304 Unmodified response is returned. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#get-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref} -func (s *RepositoriesService) GetCommitSHA1(ctx context.Context, owner, repo, ref, lastSHA string) (string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, refURLEscape(ref)) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - if lastSHA != "" { - req.Header.Set("If-None-Match", `"`+lastSHA+`"`) - } - - req.Header.Set("Accept", mediaTypeV3SHA) - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// CompareCommits compares a range of commits with each other. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#compare-two-commits -// -//meta:operation GET /repos/{owner}/{repo}/compare/{basehead} -func (s *RepositoriesService) CompareCommits(ctx context.Context, owner, repo string, base, head string, opts *ListOptions) (*CommitsComparison, *Response, error) { - escapedBase := url.QueryEscape(base) - escapedHead := url.QueryEscape(head) - - u := fmt.Sprintf("repos/%v/%v/compare/%v...%v", owner, repo, escapedBase, escapedHead) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - comp := new(CommitsComparison) - resp, err := s.client.Do(ctx, req, comp) - if err != nil { - return nil, resp, err - } - - return comp, resp, nil -} - -// CompareCommitsRaw compares a range of commits with each other in raw (diff or patch) format. -// -// Both "base" and "head" must be branch names in "repo". -// To compare branches across other repositories in the same network as "repo", -// use the format ":branch". -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#compare-two-commits -// -//meta:operation GET /repos/{owner}/{repo}/compare/{basehead} -func (s *RepositoriesService) CompareCommitsRaw(ctx context.Context, owner, repo, base, head string, opts RawOptions) (string, *Response, error) { - escapedBase := url.QueryEscape(base) - escapedHead := url.QueryEscape(head) - - u := fmt.Sprintf("repos/%v/%v/compare/%v...%v", owner, repo, escapedBase, escapedHead) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - switch opts.Type { - case Diff: - req.Header.Set("Accept", mediaTypeV3Diff) - case Patch: - req.Header.Set("Accept", mediaTypeV3Patch) - default: - return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) - } - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// ListBranchesHeadCommit gets all branches where the given commit SHA is the HEAD, -// or latest commit for the branch. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#list-branches-for-head-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head -func (s *RepositoriesService) ListBranchesHeadCommit(ctx context.Context, owner, repo, sha string) ([]*BranchCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/branches-where-head", owner, repo, sha) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeListPullsOrBranchesForCommitPreview) - var branchCommits []*BranchCommit - resp, err := s.client.Do(ctx, req, &branchCommits) - if err != nil { - return nil, resp, err - } - - return branchCommits, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_community_health.go b/vendor/github.com/google/go-github/v57/github/repos_community_health.go deleted file mode 100644 index 54d1b414..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_community_health.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Metric represents the different fields for one file in community health files. -type Metric struct { - Name *string `json:"name"` - Key *string `json:"key"` - SPDXID *string `json:"spdx_id"` - URL *string `json:"url"` - HTMLURL *string `json:"html_url"` - NodeID *string `json:"node_id"` -} - -// CommunityHealthFiles represents the different files in the community health metrics response. -type CommunityHealthFiles struct { - CodeOfConduct *Metric `json:"code_of_conduct"` - CodeOfConductFile *Metric `json:"code_of_conduct_file"` - Contributing *Metric `json:"contributing"` - IssueTemplate *Metric `json:"issue_template"` - PullRequestTemplate *Metric `json:"pull_request_template"` - License *Metric `json:"license"` - Readme *Metric `json:"readme"` -} - -// CommunityHealthMetrics represents a response containing the community metrics of a repository. -type CommunityHealthMetrics struct { - HealthPercentage *int `json:"health_percentage"` - Description *string `json:"description"` - Documentation *string `json:"documentation"` - Files *CommunityHealthFiles `json:"files"` - UpdatedAt *Timestamp `json:"updated_at"` - ContentReportsEnabled *bool `json:"content_reports_enabled"` -} - -// GetCommunityHealthMetrics retrieves all the community health metrics for a repository. -// -// GitHub API docs: https://docs.github.com/rest/metrics/community#get-community-profile-metrics -// -//meta:operation GET /repos/{owner}/{repo}/community/profile -func (s *RepositoriesService) GetCommunityHealthMetrics(ctx context.Context, owner, repo string) (*CommunityHealthMetrics, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/community/profile", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - metrics := &CommunityHealthMetrics{} - resp, err := s.client.Do(ctx, req, metrics) - if err != nil { - return nil, resp, err - } - - return metrics, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_contents.go b/vendor/github.com/google/go-github/v57/github/repos_contents.go deleted file mode 100644 index 9539a5c4..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_contents.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Repository contents API methods. -// GitHub API docs: https://docs.github.com/rest/repos/contents/ - -package github - -import ( - "context" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "net/url" - "path" - "strings" -) - -var ErrPathForbidden = errors.New("path must not contain '..' due to auth vulnerability issue") - -// RepositoryContent represents a file or directory in a github repository. -type RepositoryContent struct { - Type *string `json:"type,omitempty"` - // Target is only set if the type is "symlink" and the target is not a normal file. - // If Target is set, Path will be the symlink path. - Target *string `json:"target,omitempty"` - Encoding *string `json:"encoding,omitempty"` - Size *int `json:"size,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - // Content contains the actual file content, which may be encoded. - // Callers should call GetContent which will decode the content if - // necessary. - Content *string `json:"content,omitempty"` - SHA *string `json:"sha,omitempty"` - URL *string `json:"url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - DownloadURL *string `json:"download_url,omitempty"` - SubmoduleGitURL *string `json:"submodule_git_url,omitempty"` -} - -// RepositoryContentResponse holds the parsed response from CreateFile, UpdateFile, and DeleteFile. -type RepositoryContentResponse struct { - Content *RepositoryContent `json:"content,omitempty"` - Commit `json:"commit,omitempty"` -} - -// RepositoryContentFileOptions specifies optional parameters for CreateFile, UpdateFile, and DeleteFile. -type RepositoryContentFileOptions struct { - Message *string `json:"message,omitempty"` - Content []byte `json:"content"` // unencoded - SHA *string `json:"sha,omitempty"` - Branch *string `json:"branch,omitempty"` - Author *CommitAuthor `json:"author,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` -} - -// RepositoryContentGetOptions represents an optional ref parameter, which can be a SHA, -// branch, or tag -type RepositoryContentGetOptions struct { - Ref string `url:"ref,omitempty"` -} - -// String converts RepositoryContent to a string. It's primarily for testing. -func (r RepositoryContent) String() string { - return Stringify(r) -} - -// GetContent returns the content of r, decoding it if necessary. -func (r *RepositoryContent) GetContent() (string, error) { - var encoding string - if r.Encoding != nil { - encoding = *r.Encoding - } - - switch encoding { - case "base64": - if r.Content == nil { - return "", errors.New("malformed response: base64 encoding of null content") - } - c, err := base64.StdEncoding.DecodeString(*r.Content) - return string(c), err - case "": - if r.Content == nil { - return "", nil - } - return *r.Content, nil - case "none": - return "", errors.New("unsupported content encoding: none, this may occur when file size > 1 MB, if that is the case consider using DownloadContents") - default: - return "", fmt.Errorf("unsupported content encoding: %v", encoding) - } -} - -// GetReadme gets the Readme file for the repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-a-repository-readme -// -//meta:operation GET /repos/{owner}/{repo}/readme -func (s *RepositoriesService) GetReadme(ctx context.Context, owner, repo string, opts *RepositoryContentGetOptions) (*RepositoryContent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/readme", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - readme := new(RepositoryContent) - resp, err := s.client.Do(ctx, req, readme) - if err != nil { - return nil, resp, err - } - - return readme, resp, nil -} - -// DownloadContents returns an io.ReadCloser that reads the contents of the -// specified file. This function will work with files of any size, as opposed -// to GetContents which is limited to 1 Mb files. It is the caller's -// responsibility to close the ReadCloser. -// -// It is possible for the download to result in a failed response when the -// returned error is nil. Callers should check the returned Response status -// code to verify the content is from a successful response. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-repository-content -// -//meta:operation GET /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) DownloadContents(ctx context.Context, owner, repo, filepath string, opts *RepositoryContentGetOptions) (io.ReadCloser, *Response, error) { - dir := path.Dir(filepath) - filename := path.Base(filepath) - _, dirContents, resp, err := s.GetContents(ctx, owner, repo, dir, opts) - if err != nil { - return nil, resp, err - } - - for _, contents := range dirContents { - if *contents.Name == filename { - if contents.DownloadURL == nil || *contents.DownloadURL == "" { - return nil, resp, fmt.Errorf("no download link found for %s", filepath) - } - - dlResp, err := s.client.client.Get(*contents.DownloadURL) - if err != nil { - return nil, &Response{Response: dlResp}, err - } - - return dlResp.Body, &Response{Response: dlResp}, nil - } - } - - return nil, resp, fmt.Errorf("no file named %s found in %s", filename, dir) -} - -// DownloadContentsWithMeta is identical to DownloadContents but additionally -// returns the RepositoryContent of the requested file. This additional data -// is useful for future operations involving the requested file. For merely -// reading the content of a file, DownloadContents is perfectly adequate. -// -// It is possible for the download to result in a failed response when the -// returned error is nil. Callers should check the returned Response status -// code to verify the content is from a successful response. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-repository-content -// -//meta:operation GET /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) DownloadContentsWithMeta(ctx context.Context, owner, repo, filepath string, opts *RepositoryContentGetOptions) (io.ReadCloser, *RepositoryContent, *Response, error) { - dir := path.Dir(filepath) - filename := path.Base(filepath) - _, dirContents, resp, err := s.GetContents(ctx, owner, repo, dir, opts) - if err != nil { - return nil, nil, resp, err - } - - for _, contents := range dirContents { - if *contents.Name == filename { - if contents.DownloadURL == nil || *contents.DownloadURL == "" { - return nil, contents, resp, fmt.Errorf("no download link found for %s", filepath) - } - - dlResp, err := s.client.client.Get(*contents.DownloadURL) - if err != nil { - return nil, contents, &Response{Response: dlResp}, err - } - - return dlResp.Body, contents, &Response{Response: dlResp}, nil - } - } - - return nil, nil, resp, fmt.Errorf("no file named %s found in %s", filename, dir) -} - -// GetContents can return either the metadata and content of a single file -// (when path references a file) or the metadata of all the files and/or -// subdirectories of a directory (when path references a directory). To make it -// easy to distinguish between both result types and to mimic the API as much -// as possible, both result types will be returned but only one will contain a -// value and the other will be nil. -// -// Due to an auth vulnerability issue in the GitHub v3 API, ".." is not allowed -// to appear anywhere in the "path" or this method will return an error. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-repository-content -// -//meta:operation GET /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) GetContents(ctx context.Context, owner, repo, path string, opts *RepositoryContentGetOptions) (fileContent *RepositoryContent, directoryContent []*RepositoryContent, resp *Response, err error) { - if strings.Contains(path, "..") { - return nil, nil, nil, ErrPathForbidden - } - - escapedPath := (&url.URL{Path: strings.TrimSuffix(path, "/")}).String() - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, escapedPath) - u, err = addOptions(u, opts) - if err != nil { - return nil, nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, nil, err - } - - var rawJSON json.RawMessage - resp, err = s.client.Do(ctx, req, &rawJSON) - if err != nil { - return nil, nil, resp, err - } - - fileUnmarshalError := json.Unmarshal(rawJSON, &fileContent) - if fileUnmarshalError == nil { - return fileContent, nil, resp, nil - } - - directoryUnmarshalError := json.Unmarshal(rawJSON, &directoryContent) - if directoryUnmarshalError == nil { - return nil, directoryContent, resp, nil - } - - return nil, nil, resp, fmt.Errorf("unmarshalling failed for both file and directory content: %s and %s", fileUnmarshalError, directoryUnmarshalError) -} - -// CreateFile creates a new file in a repository at the given path and returns -// the commit and file metadata. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#create-or-update-file-contents -// -//meta:operation PUT /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) CreateFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - createResponse := new(RepositoryContentResponse) - resp, err := s.client.Do(ctx, req, createResponse) - if err != nil { - return nil, resp, err - } - - return createResponse, resp, nil -} - -// UpdateFile updates a file in a repository at the given path and returns the -// commit and file metadata. Requires the blob SHA of the file being updated. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#create-or-update-file-contents -// -//meta:operation PUT /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) UpdateFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - updateResponse := new(RepositoryContentResponse) - resp, err := s.client.Do(ctx, req, updateResponse) - if err != nil { - return nil, resp, err - } - - return updateResponse, resp, nil -} - -// DeleteFile deletes a file from a repository and returns the commit. -// Requires the blob SHA of the file to be deleted. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#delete-a-file -// -//meta:operation DELETE /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) DeleteFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) - req, err := s.client.NewRequest("DELETE", u, opts) - if err != nil { - return nil, nil, err - } - - deleteResponse := new(RepositoryContentResponse) - resp, err := s.client.Do(ctx, req, deleteResponse) - if err != nil { - return nil, resp, err - } - - return deleteResponse, resp, nil -} - -// ArchiveFormat is used to define the archive type when calling GetArchiveLink. -type ArchiveFormat string - -const ( - // Tarball specifies an archive in gzipped tar format. - Tarball ArchiveFormat = "tarball" - - // Zipball specifies an archive in zip format. - Zipball ArchiveFormat = "zipball" -) - -// GetArchiveLink returns an URL to download a tarball or zipball archive for a -// repository. The archiveFormat can be specified by either the github.Tarball -// or github.Zipball constant. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#download-a-repository-archive-tar -// GitHub API docs: https://docs.github.com/rest/repos/contents#download-a-repository-archive-zip -// -//meta:operation GET /repos/{owner}/{repo}/tarball/{ref} -//meta:operation GET /repos/{owner}/{repo}/zipball/{ref} -func (s *RepositoriesService) GetArchiveLink(ctx context.Context, owner, repo string, archiveformat ArchiveFormat, opts *RepositoryContentGetOptions, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/%s", owner, repo, archiveformat) - if opts != nil && opts.Ref != "" { - u += fmt.Sprintf("/%s", opts.Ref) - } - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - if err != nil { - return nil, newResponse(resp), err - } - - return parsedURL, newResponse(resp), nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_deployment_branch_policies.go b/vendor/github.com/google/go-github/v57/github/repos_deployment_branch_policies.go deleted file mode 100644 index 77ac73e4..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_deployment_branch_policies.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// DeploymentBranchPolicy represents a single deployment branch policy for an environment. -type DeploymentBranchPolicy struct { - Name *string `json:"name,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Type *string `json:"type,omitempty"` -} - -// DeploymentBranchPolicyResponse represents the slightly different format of response that comes back when you list deployment branch policies. -type DeploymentBranchPolicyResponse struct { - TotalCount *int `json:"total_count,omitempty"` - BranchPolicies []*DeploymentBranchPolicy `json:"branch_policies,omitempty"` -} - -// DeploymentBranchPolicyRequest represents a deployment branch policy request. -type DeploymentBranchPolicyRequest struct { - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` -} - -// ListDeploymentBranchPolicies lists the deployment branch policies for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#list-deployment-branch-policies -// -//meta:operation GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies -func (s *RepositoriesService) ListDeploymentBranchPolicies(ctx context.Context, owner, repo, environment string) (*DeploymentBranchPolicyResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies", owner, repo, environment) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var list *DeploymentBranchPolicyResponse - resp, err := s.client.Do(ctx, req, &list) - if err != nil { - return nil, resp, err - } - - return list, resp, nil -} - -// GetDeploymentBranchPolicy gets a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#get-a-deployment-branch-policy -// -//meta:operation GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id} -func (s *RepositoriesService) GetDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64) (*DeploymentBranchPolicy, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var policy *DeploymentBranchPolicy - resp, err := s.client.Do(ctx, req, &policy) - if err != nil { - return nil, resp, err - } - - return policy, resp, nil -} - -// CreateDeploymentBranchPolicy creates a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#create-a-deployment-branch-policy -// -//meta:operation POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies -func (s *RepositoriesService) CreateDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, request *DeploymentBranchPolicyRequest) (*DeploymentBranchPolicy, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies", owner, repo, environment) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - var policy *DeploymentBranchPolicy - resp, err := s.client.Do(ctx, req, &policy) - if err != nil { - return nil, resp, err - } - - return policy, resp, nil -} - -// UpdateDeploymentBranchPolicy updates a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#update-a-deployment-branch-policy -// -//meta:operation PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id} -func (s *RepositoriesService) UpdateDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64, request *DeploymentBranchPolicyRequest) (*DeploymentBranchPolicy, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) - - req, err := s.client.NewRequest("PUT", u, request) - if err != nil { - return nil, nil, err - } - - var policy *DeploymentBranchPolicy - resp, err := s.client.Do(ctx, req, &policy) - if err != nil { - return nil, resp, err - } - - return policy, resp, nil -} - -// DeleteDeploymentBranchPolicy deletes a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#delete-a-deployment-branch-policy -// -//meta:operation DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id} -func (s *RepositoriesService) DeleteDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_deployments.go b/vendor/github.com/google/go-github/v57/github/repos_deployments.go deleted file mode 100644 index d8c0b632..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_deployments.go +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" - "strings" -) - -// Deployment represents a deployment in a repo -type Deployment struct { - URL *string `json:"url,omitempty"` - ID *int64 `json:"id,omitempty"` - SHA *string `json:"sha,omitempty"` - Ref *string `json:"ref,omitempty"` - Task *string `json:"task,omitempty"` - Payload json.RawMessage `json:"payload,omitempty"` - Environment *string `json:"environment,omitempty"` - Description *string `json:"description,omitempty"` - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// DeploymentRequest represents a deployment request -type DeploymentRequest struct { - Ref *string `json:"ref,omitempty"` - Task *string `json:"task,omitempty"` - AutoMerge *bool `json:"auto_merge,omitempty"` - RequiredContexts *[]string `json:"required_contexts,omitempty"` - Payload interface{} `json:"payload,omitempty"` - Environment *string `json:"environment,omitempty"` - Description *string `json:"description,omitempty"` - TransientEnvironment *bool `json:"transient_environment,omitempty"` - ProductionEnvironment *bool `json:"production_environment,omitempty"` -} - -// DeploymentsListOptions specifies the optional parameters to the -// RepositoriesService.ListDeployments method. -type DeploymentsListOptions struct { - // SHA of the Deployment. - SHA string `url:"sha,omitempty"` - - // List deployments for a given ref. - Ref string `url:"ref,omitempty"` - - // List deployments for a given task. - Task string `url:"task,omitempty"` - - // List deployments for a given environment. - Environment string `url:"environment,omitempty"` - - ListOptions -} - -// ListDeployments lists the deployments of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#list-deployments -// -//meta:operation GET /repos/{owner}/{repo}/deployments -func (s *RepositoriesService) ListDeployments(ctx context.Context, owner, repo string, opts *DeploymentsListOptions) ([]*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var deployments []*Deployment - resp, err := s.client.Do(ctx, req, &deployments) - if err != nil { - return nil, resp, err - } - - return deployments, resp, nil -} - -// GetDeployment returns a single deployment of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#get-a-deployment -// -//meta:operation GET /repos/{owner}/{repo}/deployments/{deployment_id} -func (s *RepositoriesService) GetDeployment(ctx context.Context, owner, repo string, deploymentID int64) (*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v", owner, repo, deploymentID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deployment := new(Deployment) - resp, err := s.client.Do(ctx, req, deployment) - if err != nil { - return nil, resp, err - } - - return deployment, resp, nil -} - -// CreateDeployment creates a new deployment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#create-a-deployment -// -//meta:operation POST /repos/{owner}/{repo}/deployments -func (s *RepositoriesService) CreateDeployment(ctx context.Context, owner, repo string, request *DeploymentRequest) (*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments", owner, repo) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - d := new(Deployment) - resp, err := s.client.Do(ctx, req, d) - if err != nil { - return nil, resp, err - } - - return d, resp, nil -} - -// DeleteDeployment deletes an existing deployment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#delete-a-deployment -// -//meta:operation DELETE /repos/{owner}/{repo}/deployments/{deployment_id} -func (s *RepositoriesService) DeleteDeployment(ctx context.Context, owner, repo string, deploymentID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v", owner, repo, deploymentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// DeploymentStatus represents the status of a -// particular deployment. -type DeploymentStatus struct { - ID *int64 `json:"id,omitempty"` - // State is the deployment state. - // Possible values are: "pending", "success", "failure", "error", - // "inactive", "in_progress", "queued". - State *string `json:"state,omitempty"` - Creator *User `json:"creator,omitempty"` - Description *string `json:"description,omitempty"` - Environment *string `json:"environment,omitempty"` - NodeID *string `json:"node_id,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - TargetURL *string `json:"target_url,omitempty"` - DeploymentURL *string `json:"deployment_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - EnvironmentURL *string `json:"environment_url,omitempty"` - LogURL *string `json:"log_url,omitempty"` - URL *string `json:"url,omitempty"` -} - -// DeploymentStatusRequest represents a deployment request -type DeploymentStatusRequest struct { - State *string `json:"state,omitempty"` - LogURL *string `json:"log_url,omitempty"` - Description *string `json:"description,omitempty"` - Environment *string `json:"environment,omitempty"` - EnvironmentURL *string `json:"environment_url,omitempty"` - AutoInactive *bool `json:"auto_inactive,omitempty"` -} - -// ListDeploymentStatuses lists the statuses of a given deployment of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/statuses#list-deployment-statuses -// -//meta:operation GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses -func (s *RepositoriesService) ListDeploymentStatuses(ctx context.Context, owner, repo string, deployment int64, opts *ListOptions) ([]*DeploymentStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var statuses []*DeploymentStatus - resp, err := s.client.Do(ctx, req, &statuses) - if err != nil { - return nil, resp, err - } - - return statuses, resp, nil -} - -// GetDeploymentStatus returns a single deployment status of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/statuses#get-a-deployment-status -// -//meta:operation GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id} -func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, repo string, deploymentID, deploymentStatusID int64) (*DeploymentStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses/%v", owner, repo, deploymentID, deploymentStatusID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - d := new(DeploymentStatus) - resp, err := s.client.Do(ctx, req, d) - if err != nil { - return nil, resp, err - } - - return d, resp, nil -} - -// CreateDeploymentStatus creates a new status for a deployment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/statuses#create-a-deployment-status -// -//meta:operation POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses -func (s *RepositoriesService) CreateDeploymentStatus(ctx context.Context, owner, repo string, deployment int64, request *DeploymentStatusRequest) (*DeploymentStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - d := new(DeploymentStatus) - resp, err := s.client.Do(ctx, req, d) - if err != nil { - return nil, resp, err - } - - return d, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_environments.go b/vendor/github.com/google/go-github/v57/github/repos_environments.go deleted file mode 100644 index ed81e3a1..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_environments.go +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" - "net/http" -) - -// Environment represents a single environment in a repository. -type Environment struct { - Owner *string `json:"owner,omitempty"` - Repo *string `json:"repo,omitempty"` - EnvironmentName *string `json:"environment_name,omitempty"` - WaitTimer *int `json:"wait_timer,omitempty"` - Reviewers []*EnvReviewers `json:"reviewers,omitempty"` - DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy,omitempty"` - // Return/response only values - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - CanAdminsBypass *bool `json:"can_admins_bypass,omitempty"` - ProtectionRules []*ProtectionRule `json:"protection_rules,omitempty"` -} - -// EnvReviewers represents a single environment reviewer entry. -type EnvReviewers struct { - Type *string `json:"type,omitempty"` - ID *int64 `json:"id,omitempty"` -} - -// BranchPolicy represents the options for whether a branch deployment policy is applied to this environment. -type BranchPolicy struct { - ProtectedBranches *bool `json:"protected_branches,omitempty"` - CustomBranchPolicies *bool `json:"custom_branch_policies,omitempty"` -} - -// EnvResponse represents the slightly different format of response that comes back when you list an environment. -type EnvResponse struct { - TotalCount *int `json:"total_count,omitempty"` - Environments []*Environment `json:"environments,omitempty"` -} - -// ProtectionRule represents a single protection rule applied to the environment. -type ProtectionRule struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - PreventSelfReview *bool `json:"prevent_self_review,omitempty"` - Type *string `json:"type,omitempty"` - WaitTimer *int `json:"wait_timer,omitempty"` - Reviewers []*RequiredReviewer `json:"reviewers,omitempty"` -} - -// RequiredReviewer represents a required reviewer. -type RequiredReviewer struct { - Type *string `json:"type,omitempty"` - Reviewer interface{} `json:"reviewer,omitempty"` -} - -// EnvironmentListOptions specifies the optional parameters to the -// RepositoriesService.ListEnvironments method. -type EnvironmentListOptions struct { - ListOptions -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// This helps us handle the fact that RequiredReviewer can have either a User or Team type reviewer field. -func (r *RequiredReviewer) UnmarshalJSON(data []byte) error { - type aliasReviewer RequiredReviewer - var reviewer aliasReviewer - if err := json.Unmarshal(data, &reviewer); err != nil { - return err - } - - r.Type = reviewer.Type - - switch *reviewer.Type { - case "User": - reviewer.Reviewer = &User{} - if err := json.Unmarshal(data, &reviewer); err != nil { - return err - } - r.Reviewer = reviewer.Reviewer - case "Team": - reviewer.Reviewer = &Team{} - if err := json.Unmarshal(data, &reviewer); err != nil { - return err - } - r.Reviewer = reviewer.Reviewer - default: - r.Type = nil - r.Reviewer = nil - return fmt.Errorf("reviewer.Type is %T, not a string of 'User' or 'Team', unable to unmarshal", reviewer.Type) - } - - return nil -} - -// ListEnvironments lists all environments for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#list-environments -// -//meta:operation GET /repos/{owner}/{repo}/environments -func (s *RepositoriesService) ListEnvironments(ctx context.Context, owner, repo string, opts *EnvironmentListOptions) (*EnvResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var list *EnvResponse - resp, err := s.client.Do(ctx, req, &list) - if err != nil { - return nil, resp, err - } - return list, resp, nil -} - -// GetEnvironment get a single environment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#get-an-environment -// -//meta:operation GET /repos/{owner}/{repo}/environments/{environment_name} -func (s *RepositoriesService) GetEnvironment(ctx context.Context, owner, repo, name string) (*Environment, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var env *Environment - resp, err := s.client.Do(ctx, req, &env) - if err != nil { - return nil, resp, err - } - return env, resp, nil -} - -// MarshalJSON implements the json.Marshaler interface. -// As the only way to clear a WaitTimer is to set it to 0, a missing WaitTimer object should default to 0, not null. -// As the default value for CanAdminsBypass is true, a nil value here marshals to true. -func (c *CreateUpdateEnvironment) MarshalJSON() ([]byte, error) { - type Alias CreateUpdateEnvironment - if c.WaitTimer == nil { - c.WaitTimer = Int(0) - } - if c.CanAdminsBypass == nil { - c.CanAdminsBypass = Bool(true) - } - return json.Marshal(&struct { - *Alias - }{ - Alias: (*Alias)(c), - }) -} - -// CreateUpdateEnvironment represents the fields required for the create/update operation -// following the Create/Update release example. -// See https://github.com/google/go-github/issues/992 for more information. -// Removed omitempty here as the API expects null values for reviewers and deployment_branch_policy to clear them. -type CreateUpdateEnvironment struct { - WaitTimer *int `json:"wait_timer"` - Reviewers []*EnvReviewers `json:"reviewers"` - CanAdminsBypass *bool `json:"can_admins_bypass"` - DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy"` - PreventSelfReview *bool `json:"prevent_self_review,omitempty"` -} - -// createUpdateEnvironmentNoEnterprise represents the fields accepted for Pro/Teams private repos. -// Ref: https://docs.github.com/actions/deployment/targeting-different-environments/using-environments-for-deployment -// See https://github.com/google/go-github/issues/2602 for more information. -type createUpdateEnvironmentNoEnterprise struct { - DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy"` -} - -// CreateUpdateEnvironment create or update a new environment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#create-or-update-an-environment -// -//meta:operation PUT /repos/{owner}/{repo}/environments/{environment_name} -func (s *RepositoriesService) CreateUpdateEnvironment(ctx context.Context, owner, repo, name string, environment *CreateUpdateEnvironment) (*Environment, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) - - req, err := s.client.NewRequest("PUT", u, environment) - if err != nil { - return nil, nil, err - } - - e := new(Environment) - resp, err := s.client.Do(ctx, req, e) - if err != nil { - // The API returns 422 when the pricing plan doesn't support all the fields sent. - // This path will be executed for Pro/Teams private repos. - // For public repos, regardless of the pricing plan, all fields supported. - // For Free plan private repos the returned error code is 404. - // We are checking that the user didn't try to send a value for unsupported fields, - // and return an error if they did. - if resp != nil && resp.StatusCode == http.StatusUnprocessableEntity && environment != nil && len(environment.Reviewers) == 0 && environment.GetWaitTimer() == 0 { - return s.createNewEnvNoEnterprise(ctx, u, environment) - } - return nil, resp, err - } - return e, resp, nil -} - -// createNewEnvNoEnterprise is an internal function for cases where the original call returned 422. -// Currently only the `deployment_branch_policy` parameter is supported for Pro/Team private repos. -func (s *RepositoriesService) createNewEnvNoEnterprise(ctx context.Context, u string, environment *CreateUpdateEnvironment) (*Environment, *Response, error) { - req, err := s.client.NewRequest("PUT", u, &createUpdateEnvironmentNoEnterprise{ - DeploymentBranchPolicy: environment.DeploymentBranchPolicy, - }) - if err != nil { - return nil, nil, err - } - - e := new(Environment) - resp, err := s.client.Do(ctx, req, e) - if err != nil { - return nil, resp, err - } - return e, resp, nil -} - -// DeleteEnvironment delete an environment from a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#delete-an-environment -// -//meta:operation DELETE /repos/{owner}/{repo}/environments/{environment_name} -func (s *RepositoriesService) DeleteEnvironment(ctx context.Context, owner, repo, name string) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_forks.go b/vendor/github.com/google/go-github/v57/github/repos_forks.go deleted file mode 100644 index 60fb49da..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_forks.go +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// RepositoryListForksOptions specifies the optional parameters to the -// RepositoriesService.ListForks method. -type RepositoryListForksOptions struct { - // How to sort the forks list. Possible values are: newest, oldest, - // watchers. Default is "newest". - Sort string `url:"sort,omitempty"` - - ListOptions -} - -// ListForks lists the forks of the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/forks#list-forks -// -//meta:operation GET /repos/{owner}/{repo}/forks -func (s *RepositoriesService) ListForks(ctx context.Context, owner, repo string, opts *RepositoryListForksOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/forks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when topics API fully launches. - req.Header.Set("Accept", mediaTypeTopicsPreview) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryCreateForkOptions specifies the optional parameters to the -// RepositoriesService.CreateFork method. -type RepositoryCreateForkOptions struct { - // The organization to fork the repository into. - Organization string `json:"organization,omitempty"` - Name string `json:"name,omitempty"` - DefaultBranchOnly bool `json:"default_branch_only,omitempty"` -} - -// CreateFork creates a fork of the specified repository. -// -// This method might return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing creating the fork in a background task. In this event, -// the Repository value will be returned, which includes the details about the pending fork. -// A follow up request, after a delay of a second or so, should result -// in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/repos/forks#create-a-fork -// -//meta:operation POST /repos/{owner}/{repo}/forks -func (s *RepositoriesService) CreateFork(ctx context.Context, owner, repo string, opts *RepositoryCreateForkOptions) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/forks", owner, repo) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - fork := new(Repository) - resp, err := s.client.Do(ctx, req, fork) - if err != nil { - // Persist AcceptedError's metadata to the Repository object. - if aerr, ok := err.(*AcceptedError); ok { - if err := json.Unmarshal(aerr.Raw, fork); err != nil { - return fork, resp, err - } - - return fork, resp, err - } - return nil, resp, err - } - - return fork, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_hooks.go b/vendor/github.com/google/go-github/v57/github/repos_hooks.go deleted file mode 100644 index 8768d603..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_hooks.go +++ /dev/null @@ -1,271 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" - "strings" -) - -// WebHookPayload represents the data that is received from GitHub when a push -// event hook is triggered. The format of these payloads pre-date most of the -// GitHub v3 API, so there are lots of minor incompatibilities with the types -// defined in the rest of the API. Therefore, several types are duplicated -// here to account for these differences. -// -// GitHub API docs: https://help.github.com/articles/post-receive-hooks -// -// Deprecated: Please use PushEvent instead. -type WebHookPayload = PushEvent - -// WebHookCommit represents the commit variant we receive from GitHub in a -// WebHookPayload. -// -// Deprecated: Please use HeadCommit instead. -type WebHookCommit = HeadCommit - -// WebHookAuthor represents the author or committer of a commit, as specified -// in a WebHookCommit. The commit author may not correspond to a GitHub User. -// -// Deprecated: Please use CommitAuthor instead. -// NOTE Breaking API change: the `Username` field is now called `Login`. -type WebHookAuthor = CommitAuthor - -// Hook represents a GitHub (web and service) hook for a repository. -type Hook struct { - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - ID *int64 `json:"id,omitempty"` - Type *string `json:"type,omitempty"` - Name *string `json:"name,omitempty"` - TestURL *string `json:"test_url,omitempty"` - PingURL *string `json:"ping_url,omitempty"` - LastResponse map[string]interface{} `json:"last_response,omitempty"` - - // Only the following fields are used when creating a hook. - // Config is required. - Config map[string]interface{} `json:"config,omitempty"` - Events []string `json:"events,omitempty"` - Active *bool `json:"active,omitempty"` -} - -func (h Hook) String() string { - return Stringify(h) -} - -// createHookRequest is a subset of Hook and is used internally -// by CreateHook to pass only the known fields for the endpoint. -// -// See https://github.com/google/go-github/issues/1015 for more -// information. -type createHookRequest struct { - // Config is required. - Name string `json:"name"` - Config map[string]interface{} `json:"config,omitempty"` - Events []string `json:"events,omitempty"` - Active *bool `json:"active,omitempty"` -} - -// CreateHook creates a Hook for the specified repository. -// Config is a required field. -// -// Note that only a subset of the hook fields are used and hook must -// not be nil. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#create-a-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks -func (s *RepositoriesService) CreateHook(ctx context.Context, owner, repo string, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks", owner, repo) - - hookReq := &createHookRequest{ - Name: "web", - Events: hook.Events, - Active: hook.Active, - Config: hook.Config, - } - - req, err := s.client.NewRequest("POST", u, hookReq) - if err != nil { - return nil, nil, err - } - - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// ListHooks lists all Hooks for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#list-repository-webhooks -// -//meta:operation GET /repos/{owner}/{repo}/hooks -func (s *RepositoriesService) ListHooks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var hooks []*Hook - resp, err := s.client.Do(ctx, req, &hooks) - if err != nil { - return nil, resp, err - } - - return hooks, resp, nil -} - -// GetHook returns a single specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#get-a-repository-webhook -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id} -func (s *RepositoriesService) GetHook(ctx context.Context, owner, repo string, id int64) (*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// EditHook updates a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#update-a-repository-webhook -// -//meta:operation PATCH /repos/{owner}/{repo}/hooks/{hook_id} -func (s *RepositoriesService) EditHook(ctx context.Context, owner, repo string, id int64, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, hook) - if err != nil { - return nil, nil, err - } - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// DeleteHook deletes a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#delete-a-repository-webhook -// -//meta:operation DELETE /repos/{owner}/{repo}/hooks/{hook_id} -func (s *RepositoriesService) DeleteHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// PingHook triggers a 'ping' event to be sent to the Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#ping-a-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks/{hook_id}/pings -func (s *RepositoriesService) PingHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d/pings", owner, repo, id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// TestHook triggers a test Hook by github. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#test-the-push-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks/{hook_id}/tests -func (s *RepositoriesService) TestHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d/tests", owner, repo, id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// Subscribe lets servers register to receive updates when a topic is updated. -// -// GitHub API docs: https://docs.github.com/webhooks/about-webhooks-for-repositories#pubsubhubbub -// -//meta:operation POST /hub -func (s *RepositoriesService) Subscribe(ctx context.Context, owner, repo, event, callback string, secret []byte) (*Response, error) { - req, err := s.createWebSubRequest("subscribe", owner, repo, event, callback, secret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unsubscribe lets servers unregister to no longer receive updates when a topic is updated. -// -// GitHub API docs: https://docs.github.com/webhooks/about-webhooks-for-repositories#pubsubhubbub -// -//meta:operation POST /hub -func (s *RepositoriesService) Unsubscribe(ctx context.Context, owner, repo, event, callback string, secret []byte) (*Response, error) { - req, err := s.createWebSubRequest("unsubscribe", owner, repo, event, callback, secret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// createWebSubRequest returns a subscribe/unsubscribe request that implements -// the WebSub (formerly PubSubHubbub) protocol. -// -// See: https://www.w3.org/TR/websub/#subscriber-sends-subscription-request -func (s *RepositoriesService) createWebSubRequest(hubMode, owner, repo, event, callback string, secret []byte) (*http.Request, error) { - topic := fmt.Sprintf( - "https://github.com/%s/%s/events/%s", - owner, - repo, - event, - ) - form := url.Values{} - form.Add("hub.mode", hubMode) - form.Add("hub.topic", topic) - form.Add("hub.callback", callback) - if secret != nil { - form.Add("hub.secret", string(secret)) - } - body := strings.NewReader(form.Encode()) - - req, err := s.client.NewFormRequest("hub", body) - if err != nil { - return nil, err - } - - return req, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go b/vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go deleted file mode 100644 index 2203d761..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetHookConfiguration returns the configuration for the specified repository webhook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-config#get-a-webhook-configuration-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id}/config -func (s *RepositoriesService) GetHookConfiguration(ctx context.Context, owner, repo string, id int64) (*HookConfig, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - config := new(HookConfig) - resp, err := s.client.Do(ctx, req, config) - if err != nil { - return nil, resp, err - } - - return config, resp, nil -} - -// EditHookConfiguration updates the configuration for the specified repository webhook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-config#update-a-webhook-configuration-for-a-repository -// -//meta:operation PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config -func (s *RepositoriesService) EditHookConfiguration(ctx context.Context, owner, repo string, id int64, config *HookConfig) (*HookConfig, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, config) - if err != nil { - return nil, nil, err - } - - c := new(HookConfig) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go b/vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go deleted file mode 100644 index 6e1fd86f..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// HookDelivery represents the data that is received from GitHub's Webhook Delivery API -// -// GitHub API docs: -// - https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook -// - https://docs.github.com/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook -type HookDelivery struct { - ID *int64 `json:"id,omitempty"` - GUID *string `json:"guid,omitempty"` - DeliveredAt *Timestamp `json:"delivered_at,omitempty"` - Redelivery *bool `json:"redelivery,omitempty"` - Duration *float64 `json:"duration,omitempty"` - Status *string `json:"status,omitempty"` - StatusCode *int `json:"status_code,omitempty"` - Event *string `json:"event,omitempty"` - Action *string `json:"action,omitempty"` - InstallationID *int64 `json:"installation_id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - - // Request is populated by GetHookDelivery. - Request *HookRequest `json:"request,omitempty"` - // Response is populated by GetHookDelivery. - Response *HookResponse `json:"response,omitempty"` -} - -func (d HookDelivery) String() string { - return Stringify(d) -} - -// HookRequest is a part of HookDelivery that contains -// the HTTP headers and the JSON payload of the webhook request. -type HookRequest struct { - Headers map[string]string `json:"headers,omitempty"` - RawPayload *json.RawMessage `json:"payload,omitempty"` -} - -func (r HookRequest) String() string { - return Stringify(r) -} - -// HookResponse is a part of HookDelivery that contains -// the HTTP headers and the response body served by the webhook endpoint. -type HookResponse struct { - Headers map[string]string `json:"headers,omitempty"` - RawPayload *json.RawMessage `json:"payload,omitempty"` -} - -func (r HookResponse) String() string { - return Stringify(r) -} - -// ListHookDeliveries lists webhook deliveries for a webhook configured in a repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries -func (s *RepositoriesService) ListHookDeliveries(ctx context.Context, owner, repo string, id int64, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deliveries := []*HookDelivery{} - resp, err := s.client.Do(ctx, req, &deliveries) - if err != nil { - return nil, resp, err - } - - return deliveries, resp, nil -} - -// GetHookDelivery returns a delivery for a webhook configured in a repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id} -func (s *RepositoriesService) GetHookDelivery(ctx context.Context, owner, repo string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries/%v", owner, repo, hookID, deliveryID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// RedeliverHookDelivery redelivers a delivery for a webhook configured in a repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-deliveries#redeliver-a-delivery-for-a-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts -func (s *RepositoriesService) RedeliverHookDelivery(ctx context.Context, owner, repo string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries/%v/attempts", owner, repo, hookID, deliveryID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// ParseRequestPayload parses the request payload. For recognized event types, -// a value of the corresponding struct type will be returned. -func (d *HookDelivery) ParseRequestPayload() (interface{}, error) { - eType, ok := messageToTypeName[d.GetEvent()] - if !ok { - return nil, fmt.Errorf("unsupported event type %q", d.GetEvent()) - } - - e := &Event{Type: &eType, RawPayload: d.Request.RawPayload} - return e.ParsePayload() -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_invitations.go b/vendor/github.com/google/go-github/v57/github/repos_invitations.go deleted file mode 100644 index 4922e0b2..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_invitations.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryInvitation represents an invitation to collaborate on a repo. -type RepositoryInvitation struct { - ID *int64 `json:"id,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Invitee *User `json:"invitee,omitempty"` - Inviter *User `json:"inviter,omitempty"` - - // Permissions represents the permissions that the associated user will have - // on the repository. Possible values are: "read", "write", "admin". - Permissions *string `json:"permissions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` -} - -// ListInvitations lists all currently-open repository invitations. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#list-repository-invitations -// -//meta:operation GET /repos/{owner}/{repo}/invitations -func (s *RepositoriesService) ListInvitations(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryInvitation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/invitations", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - invites := []*RepositoryInvitation{} - resp, err := s.client.Do(ctx, req, &invites) - if err != nil { - return nil, resp, err - } - - return invites, resp, nil -} - -// DeleteInvitation deletes a repository invitation. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#delete-a-repository-invitation -// -//meta:operation DELETE /repos/{owner}/{repo}/invitations/{invitation_id} -func (s *RepositoriesService) DeleteInvitation(ctx context.Context, owner, repo string, invitationID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/invitations/%v", owner, repo, invitationID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UpdateInvitation updates the permissions associated with a repository -// invitation. -// -// permissions represents the permissions that the associated user will have -// on the repository. Possible values are: "read", "write", "admin". -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#update-a-repository-invitation -// -//meta:operation PATCH /repos/{owner}/{repo}/invitations/{invitation_id} -func (s *RepositoriesService) UpdateInvitation(ctx context.Context, owner, repo string, invitationID int64, permissions string) (*RepositoryInvitation, *Response, error) { - opts := &struct { - Permissions string `json:"permissions"` - }{Permissions: permissions} - u := fmt.Sprintf("repos/%v/%v/invitations/%v", owner, repo, invitationID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - invite := &RepositoryInvitation{} - resp, err := s.client.Do(ctx, req, invite) - if err != nil { - return nil, resp, err - } - - return invite, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_keys.go b/vendor/github.com/google/go-github/v57/github/repos_keys.go deleted file mode 100644 index cc86f8bb..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_keys.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// The Key type is defined in users_keys.go - -// ListKeys lists the deploy keys for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#list-deploy-keys -// -//meta:operation GET /repos/{owner}/{repo}/keys -func (s *RepositoriesService) ListKeys(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Key, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*Key - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetKey fetches a single deploy key. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#get-a-deploy-key -// -//meta:operation GET /repos/{owner}/{repo}/keys/{key_id} -func (s *RepositoriesService) GetKey(ctx context.Context, owner string, repo string, id int64) (*Key, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := new(Key) - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateKey adds a deploy key for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#create-a-deploy-key -// -//meta:operation POST /repos/{owner}/{repo}/keys -func (s *RepositoriesService) CreateKey(ctx context.Context, owner string, repo string, key *Key) (*Key, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys", owner, repo) - - req, err := s.client.NewRequest("POST", u, key) - if err != nil { - return nil, nil, err - } - - k := new(Key) - resp, err := s.client.Do(ctx, req, k) - if err != nil { - return nil, resp, err - } - - return k, resp, nil -} - -// DeleteKey deletes a deploy key. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#delete-a-deploy-key -// -//meta:operation DELETE /repos/{owner}/{repo}/keys/{key_id} -func (s *RepositoriesService) DeleteKey(ctx context.Context, owner string, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_lfs.go b/vendor/github.com/google/go-github/v57/github/repos_lfs.go deleted file mode 100644 index f0153c08..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_lfs.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// EnableLFS turns the LFS (Large File Storage) feature ON for the selected repo. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/repos/lfs#enable-git-lfs-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/lfs -func (s *RepositoriesService) EnableLFS(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/lfs", owner, repo) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DisableLFS turns the LFS (Large File Storage) feature OFF for the selected repo. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/repos/lfs#disable-git-lfs-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/lfs -func (s *RepositoriesService) DisableLFS(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/lfs", owner, repo) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_merging.go b/vendor/github.com/google/go-github/v57/github/repos_merging.go deleted file mode 100644 index b26e5da1..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_merging.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryMergeRequest represents a request to merge a branch in a -// repository. -type RepositoryMergeRequest struct { - Base *string `json:"base,omitempty"` - Head *string `json:"head,omitempty"` - CommitMessage *string `json:"commit_message,omitempty"` -} - -// RepoMergeUpstreamRequest represents a request to sync a branch of -// a forked repository to keep it up-to-date with the upstream repository. -type RepoMergeUpstreamRequest struct { - Branch *string `json:"branch,omitempty"` -} - -// RepoMergeUpstreamResult represents the result of syncing a branch of -// a forked repository with the upstream repository. -type RepoMergeUpstreamResult struct { - Message *string `json:"message,omitempty"` - MergeType *string `json:"merge_type,omitempty"` - BaseBranch *string `json:"base_branch,omitempty"` -} - -// Merge a branch in the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#merge-a-branch -// -//meta:operation POST /repos/{owner}/{repo}/merges -func (s *RepositoriesService) Merge(ctx context.Context, owner, repo string, request *RepositoryMergeRequest) (*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/merges", owner, repo) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - commit := new(RepositoryCommit) - resp, err := s.client.Do(ctx, req, commit) - if err != nil { - return nil, resp, err - } - - return commit, resp, nil -} - -// MergeUpstream syncs a branch of a forked repository to keep it up-to-date -// with the upstream repository. -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#sync-a-fork-branch-with-the-upstream-repository -// -//meta:operation POST /repos/{owner}/{repo}/merge-upstream -func (s *RepositoriesService) MergeUpstream(ctx context.Context, owner, repo string, request *RepoMergeUpstreamRequest) (*RepoMergeUpstreamResult, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/merge-upstream", owner, repo) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - result := new(RepoMergeUpstreamResult) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_pages.go b/vendor/github.com/google/go-github/v57/github/repos_pages.go deleted file mode 100644 index 6b9ba76e..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_pages.go +++ /dev/null @@ -1,324 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Pages represents a GitHub Pages site configuration. -type Pages struct { - URL *string `json:"url,omitempty"` - Status *string `json:"status,omitempty"` - CNAME *string `json:"cname,omitempty"` - Custom404 *bool `json:"custom_404,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BuildType *string `json:"build_type,omitempty"` - Source *PagesSource `json:"source,omitempty"` - Public *bool `json:"public,omitempty"` - HTTPSCertificate *PagesHTTPSCertificate `json:"https_certificate,omitempty"` - HTTPSEnforced *bool `json:"https_enforced,omitempty"` -} - -// PagesSource represents a GitHub page's source. -type PagesSource struct { - Branch *string `json:"branch,omitempty"` - Path *string `json:"path,omitempty"` -} - -// PagesError represents a build error for a GitHub Pages site. -type PagesError struct { - Message *string `json:"message,omitempty"` -} - -// PagesBuild represents the build information for a GitHub Pages site. -type PagesBuild struct { - URL *string `json:"url,omitempty"` - Status *string `json:"status,omitempty"` - Error *PagesError `json:"error,omitempty"` - Pusher *User `json:"pusher,omitempty"` - Commit *string `json:"commit,omitempty"` - Duration *int `json:"duration,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -// PagesDomain represents a domain associated with a GitHub Pages site. -type PagesDomain struct { - Host *string `json:"host,omitempty"` - URI *string `json:"uri,omitempty"` - Nameservers *string `json:"nameservers,omitempty"` - DNSResolves *bool `json:"dns_resolves,omitempty"` - IsProxied *bool `json:"is_proxied,omitempty"` - IsCloudflareIP *bool `json:"is_cloudflare_ip,omitempty"` - IsFastlyIP *bool `json:"is_fastly_ip,omitempty"` - IsOldIPAddress *bool `json:"is_old_ip_address,omitempty"` - IsARecord *bool `json:"is_a_record,omitempty"` - HasCNAMERecord *bool `json:"has_cname_record,omitempty"` - HasMXRecordsPresent *bool `json:"has_mx_records_present,omitempty"` - IsValidDomain *bool `json:"is_valid_domain,omitempty"` - IsApexDomain *bool `json:"is_apex_domain,omitempty"` - ShouldBeARecord *bool `json:"should_be_a_record,omitempty"` - IsCNAMEToGithubUserDomain *bool `json:"is_cname_to_github_user_domain,omitempty"` - IsCNAMEToPagesDotGithubDotCom *bool `json:"is_cname_to_pages_dot_github_dot_com,omitempty"` - IsCNAMEToFastly *bool `json:"is_cname_to_fastly,omitempty"` - IsPointedToGithubPagesIP *bool `json:"is_pointed_to_github_pages_ip,omitempty"` - IsNonGithubPagesIPPresent *bool `json:"is_non_github_pages_ip_present,omitempty"` - IsPagesDomain *bool `json:"is_pages_domain,omitempty"` - IsServedByPages *bool `json:"is_served_by_pages,omitempty"` - IsValid *bool `json:"is_valid,omitempty"` - Reason *string `json:"reason,omitempty"` - RespondsToHTTPS *bool `json:"responds_to_https,omitempty"` - EnforcesHTTPS *bool `json:"enforces_https,omitempty"` - HTTPSError *string `json:"https_error,omitempty"` - IsHTTPSEligible *bool `json:"is_https_eligible,omitempty"` - CAAError *string `json:"caa_error,omitempty"` -} - -// PagesHealthCheckResponse represents the response given for the health check of a GitHub Pages site. -type PagesHealthCheckResponse struct { - Domain *PagesDomain `json:"domain,omitempty"` - AltDomain *PagesDomain `json:"alt_domain,omitempty"` -} - -// PagesHTTPSCertificate represents the HTTPS Certificate information for a GitHub Pages site. -type PagesHTTPSCertificate struct { - State *string `json:"state,omitempty"` - Description *string `json:"description,omitempty"` - Domains []string `json:"domains,omitempty"` - // GitHub's API doesn't return a standard Timestamp, rather it returns a YYYY-MM-DD string. - ExpiresAt *string `json:"expires_at,omitempty"` -} - -// createPagesRequest is a subset of Pages and is used internally -// by EnablePages to pass only the known fields for the endpoint. -type createPagesRequest struct { - BuildType *string `json:"build_type,omitempty"` - Source *PagesSource `json:"source,omitempty"` -} - -// EnablePages enables GitHub Pages for the named repo. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#create-a-github-pages-site -// -//meta:operation POST /repos/{owner}/{repo}/pages -func (s *RepositoriesService) EnablePages(ctx context.Context, owner, repo string, pages *Pages) (*Pages, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - - pagesReq := &createPagesRequest{ - BuildType: pages.BuildType, - Source: pages.Source, - } - - req, err := s.client.NewRequest("POST", u, pagesReq) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeEnablePagesAPIPreview) - - enable := new(Pages) - resp, err := s.client.Do(ctx, req, enable) - if err != nil { - return nil, resp, err - } - - return enable, resp, nil -} - -// PagesUpdate sets up parameters needed to update a GitHub Pages site. -type PagesUpdate struct { - // CNAME represents a custom domain for the repository. - // Leaving CNAME empty will remove the custom domain. - CNAME *string `json:"cname"` - // BuildType is optional and can either be "legacy" or "workflow". - // "workflow" - You are using a github workflow to build your pages. - // "legacy" - You are deploying from a branch. - BuildType *string `json:"build_type,omitempty"` - // Source must include the branch name, and may optionally specify the subdirectory "/docs". - // Possible values for Source.Branch are usually "gh-pages", "main", and "master", - // or any other existing branch name. - // Possible values for Source.Path are: "/", and "/docs". - Source *PagesSource `json:"source,omitempty"` - // Public configures access controls for the site. - // If "true", the site will be accessible to anyone on the internet. If "false", - // the site will be accessible to anyone with read access to the repository that - // published the site. - Public *bool `json:"public,omitempty"` - // HTTPSEnforced specifies whether HTTPS should be enforced for the repository. - HTTPSEnforced *bool `json:"https_enforced,omitempty"` -} - -// UpdatePages updates GitHub Pages for the named repo. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#update-information-about-a-github-pages-site -// -//meta:operation PUT /repos/{owner}/{repo}/pages -func (s *RepositoriesService) UpdatePages(ctx context.Context, owner, repo string, opts *PagesUpdate) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DisablePages disables GitHub Pages for the named repo. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#delete-a-github-pages-site -// -//meta:operation DELETE /repos/{owner}/{repo}/pages -func (s *RepositoriesService) DisablePages(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeEnablePagesAPIPreview) - - return s.client.Do(ctx, req, nil) -} - -// GetPagesInfo fetches information about a GitHub Pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-a-github-pages-site -// -//meta:operation GET /repos/{owner}/{repo}/pages -func (s *RepositoriesService) GetPagesInfo(ctx context.Context, owner, repo string) (*Pages, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - site := new(Pages) - resp, err := s.client.Do(ctx, req, site) - if err != nil { - return nil, resp, err - } - - return site, resp, nil -} - -// ListPagesBuilds lists the builds for a GitHub Pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#list-github-pages-builds -// -//meta:operation GET /repos/{owner}/{repo}/pages/builds -func (s *RepositoriesService) ListPagesBuilds(ctx context.Context, owner, repo string, opts *ListOptions) ([]*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pages []*PagesBuild - resp, err := s.client.Do(ctx, req, &pages) - if err != nil { - return nil, resp, err - } - - return pages, resp, nil -} - -// GetLatestPagesBuild fetches the latest build information for a GitHub pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-latest-pages-build -// -//meta:operation GET /repos/{owner}/{repo}/pages/builds/latest -func (s *RepositoriesService) GetLatestPagesBuild(ctx context.Context, owner, repo string) (*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds/latest", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - build := new(PagesBuild) - resp, err := s.client.Do(ctx, req, build) - if err != nil { - return nil, resp, err - } - - return build, resp, nil -} - -// GetPageBuild fetches the specific build information for a GitHub pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-github-pages-build -// -//meta:operation GET /repos/{owner}/{repo}/pages/builds/{build_id} -func (s *RepositoriesService) GetPageBuild(ctx context.Context, owner, repo string, id int64) (*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds/%v", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - build := new(PagesBuild) - resp, err := s.client.Do(ctx, req, build) - if err != nil { - return nil, resp, err - } - - return build, resp, nil -} - -// RequestPageBuild requests a build of a GitHub Pages site without needing to push new commit. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#request-a-github-pages-build -// -//meta:operation POST /repos/{owner}/{repo}/pages/builds -func (s *RepositoriesService) RequestPageBuild(ctx context.Context, owner, repo string) (*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds", owner, repo) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - build := new(PagesBuild) - resp, err := s.client.Do(ctx, req, build) - if err != nil { - return nil, resp, err - } - - return build, resp, nil -} - -// GetPageHealthCheck gets a DNS health check for the CNAME record configured for a repository's GitHub Pages. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-a-dns-health-check-for-github-pages -// -//meta:operation GET /repos/{owner}/{repo}/pages/health -func (s *RepositoriesService) GetPageHealthCheck(ctx context.Context, owner, repo string) (*PagesHealthCheckResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/health", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - healthCheckResponse := new(PagesHealthCheckResponse) - resp, err := s.client.Do(ctx, req, healthCheckResponse) - if err != nil { - return nil, resp, err - } - - return healthCheckResponse, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go deleted file mode 100644 index e8361383..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// PreReceiveHook represents a GitHub pre-receive hook for a repository. -type PreReceiveHook struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Enforcement *string `json:"enforcement,omitempty"` - ConfigURL *string `json:"configuration_url,omitempty"` -} - -func (p PreReceiveHook) String() string { - return Stringify(p) -} - -// ListPreReceiveHooks lists all pre-receive hooks for the specified repository. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#list-pre-receive-hooks-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/pre-receive-hooks -func (s *RepositoriesService) ListPreReceiveHooks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*PreReceiveHook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - var hooks []*PreReceiveHook - resp, err := s.client.Do(ctx, req, &hooks) - if err != nil { - return nil, resp, err - } - - return hooks, resp, nil -} - -// GetPreReceiveHook returns a single specified pre-receive hook. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#get-a-pre-receive-hook-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/pre-receive-hooks/{pre_receive_hook_id} -func (s *RepositoriesService) GetPreReceiveHook(ctx context.Context, owner, repo string, id int64) (*PreReceiveHook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - h := new(PreReceiveHook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// UpdatePreReceiveHook updates a specified pre-receive hook. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#update-pre-receive-hook-enforcement-for-a-repository -// -//meta:operation PATCH /repos/{owner}/{repo}/pre-receive-hooks/{pre_receive_hook_id} -func (s *RepositoriesService) UpdatePreReceiveHook(ctx context.Context, owner, repo string, id int64, hook *PreReceiveHook) (*PreReceiveHook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, hook) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - h := new(PreReceiveHook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// DeletePreReceiveHook deletes a specified pre-receive hook. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#remove-pre-receive-hook-enforcement-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/pre-receive-hooks/{pre_receive_hook_id} -func (s *RepositoriesService) DeletePreReceiveHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_projects.go b/vendor/github.com/google/go-github/v57/github/repos_projects.go deleted file mode 100644 index 9269d4e9..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_projects.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ProjectListOptions specifies the optional parameters to the -// OrganizationsService.ListProjects and RepositoriesService.ListProjects methods. -type ProjectListOptions struct { - // Indicates the state of the projects to return. Can be either open, closed, or all. Default: open - State string `url:"state,omitempty"` - - ListOptions -} - -// ListProjects lists the projects for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#list-repository-projects -// -//meta:operation GET /repos/{owner}/{repo}/projects -func (s *RepositoriesService) ListProjects(ctx context.Context, owner, repo string, opts *ProjectListOptions) ([]*Project, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/projects", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// CreateProject creates a GitHub Project for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#create-a-repository-project -// -//meta:operation POST /repos/{owner}/{repo}/projects -func (s *RepositoriesService) CreateProject(ctx context.Context, owner, repo string, opts *ProjectOptions) (*Project, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/projects", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_releases.go b/vendor/github.com/google/go-github/v57/github/repos_releases.go deleted file mode 100644 index 7231db6d..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_releases.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" - "io" - "mime" - "net/http" - "os" - "path/filepath" - "strings" -) - -// RepositoryRelease represents a GitHub release in a repository. -type RepositoryRelease struct { - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Draft *bool `json:"draft,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - // MakeLatest can be one of: "true", "false", or "legacy". - MakeLatest *string `json:"make_latest,omitempty"` - DiscussionCategoryName *string `json:"discussion_category_name,omitempty"` - - // The following fields are not used in EditRelease: - GenerateReleaseNotes *bool `json:"generate_release_notes,omitempty"` - - // The following fields are not used in CreateRelease or EditRelease: - ID *int64 `json:"id,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - AssetsURL *string `json:"assets_url,omitempty"` - Assets []*ReleaseAsset `json:"assets,omitempty"` - UploadURL *string `json:"upload_url,omitempty"` - ZipballURL *string `json:"zipball_url,omitempty"` - TarballURL *string `json:"tarball_url,omitempty"` - Author *User `json:"author,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (r RepositoryRelease) String() string { - return Stringify(r) -} - -// RepositoryReleaseNotes represents a GitHub-generated release notes. -type RepositoryReleaseNotes struct { - Name string `json:"name"` - Body string `json:"body"` -} - -// GenerateNotesOptions represents the options to generate release notes. -type GenerateNotesOptions struct { - TagName string `json:"tag_name"` - PreviousTagName *string `json:"previous_tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` -} - -// ReleaseAsset represents a GitHub release asset in a repository. -type ReleaseAsset struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Label *string `json:"label,omitempty"` - State *string `json:"state,omitempty"` - ContentType *string `json:"content_type,omitempty"` - Size *int `json:"size,omitempty"` - DownloadCount *int `json:"download_count,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - BrowserDownloadURL *string `json:"browser_download_url,omitempty"` - Uploader *User `json:"uploader,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (r ReleaseAsset) String() string { - return Stringify(r) -} - -// ListReleases lists the releases for a repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#list-releases -// -//meta:operation GET /repos/{owner}/{repo}/releases -func (s *RepositoriesService) ListReleases(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var releases []*RepositoryRelease - resp, err := s.client.Do(ctx, req, &releases) - if err != nil { - return nil, resp, err - } - return releases, resp, nil -} - -// GetRelease fetches a single release. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#get-a-release -// -//meta:operation GET /repos/{owner}/{repo}/releases/{release_id} -func (s *RepositoriesService) GetRelease(ctx context.Context, owner, repo string, id int64) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) - return s.getSingleRelease(ctx, u) -} - -// GetLatestRelease fetches the latest published release for the repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#get-the-latest-release -// -//meta:operation GET /repos/{owner}/{repo}/releases/latest -func (s *RepositoriesService) GetLatestRelease(ctx context.Context, owner, repo string) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/latest", owner, repo) - return s.getSingleRelease(ctx, u) -} - -// GetReleaseByTag fetches a release with the specified tag. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#get-a-release-by-tag-name -// -//meta:operation GET /repos/{owner}/{repo}/releases/tags/{tag} -func (s *RepositoriesService) GetReleaseByTag(ctx context.Context, owner, repo, tag string) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/tags/%s", owner, repo, tag) - return s.getSingleRelease(ctx, u) -} - -// GenerateReleaseNotes generates the release notes for the given tag. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#generate-release-notes-content-for-a-release -// -//meta:operation POST /repos/{owner}/{repo}/releases/generate-notes -func (s *RepositoriesService) GenerateReleaseNotes(ctx context.Context, owner, repo string, opts *GenerateNotesOptions) (*RepositoryReleaseNotes, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/generate-notes", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - r := new(RepositoryReleaseNotes) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -func (s *RepositoriesService) getSingleRelease(ctx context.Context, url string) (*RepositoryRelease, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - release := new(RepositoryRelease) - resp, err := s.client.Do(ctx, req, release) - if err != nil { - return nil, resp, err - } - return release, resp, nil -} - -// repositoryReleaseRequest is a subset of RepositoryRelease and -// is used internally by CreateRelease and EditRelease to pass -// only the known fields for these endpoints. -// -// See https://github.com/google/go-github/issues/992 for more -// information. -type repositoryReleaseRequest struct { - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Draft *bool `json:"draft,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - MakeLatest *string `json:"make_latest,omitempty"` - GenerateReleaseNotes *bool `json:"generate_release_notes,omitempty"` - DiscussionCategoryName *string `json:"discussion_category_name,omitempty"` -} - -// CreateRelease adds a new release for a repository. -// -// Note that only a subset of the release fields are used. -// See RepositoryRelease for more information. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#create-a-release -// -//meta:operation POST /repos/{owner}/{repo}/releases -func (s *RepositoriesService) CreateRelease(ctx context.Context, owner, repo string, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases", owner, repo) - - releaseReq := &repositoryReleaseRequest{ - TagName: release.TagName, - TargetCommitish: release.TargetCommitish, - Name: release.Name, - Body: release.Body, - Draft: release.Draft, - Prerelease: release.Prerelease, - MakeLatest: release.MakeLatest, - DiscussionCategoryName: release.DiscussionCategoryName, - GenerateReleaseNotes: release.GenerateReleaseNotes, - } - - req, err := s.client.NewRequest("POST", u, releaseReq) - if err != nil { - return nil, nil, err - } - - r := new(RepositoryRelease) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - return r, resp, nil -} - -// EditRelease edits a repository release. -// -// Note that only a subset of the release fields are used. -// See RepositoryRelease for more information. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#update-a-release -// -//meta:operation PATCH /repos/{owner}/{repo}/releases/{release_id} -func (s *RepositoriesService) EditRelease(ctx context.Context, owner, repo string, id int64, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) - - releaseReq := &repositoryReleaseRequest{ - TagName: release.TagName, - TargetCommitish: release.TargetCommitish, - Name: release.Name, - Body: release.Body, - Draft: release.Draft, - Prerelease: release.Prerelease, - MakeLatest: release.MakeLatest, - DiscussionCategoryName: release.DiscussionCategoryName, - } - - req, err := s.client.NewRequest("PATCH", u, releaseReq) - if err != nil { - return nil, nil, err - } - - r := new(RepositoryRelease) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - return r, resp, nil -} - -// DeleteRelease delete a single release from a repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#delete-a-release -// -//meta:operation DELETE /repos/{owner}/{repo}/releases/{release_id} -func (s *RepositoriesService) DeleteRelease(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListReleaseAssets lists the release's assets. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#list-release-assets -// -//meta:operation GET /repos/{owner}/{repo}/releases/{release_id}/assets -func (s *RepositoriesService) ListReleaseAssets(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var assets []*ReleaseAsset - resp, err := s.client.Do(ctx, req, &assets) - if err != nil { - return nil, resp, err - } - return assets, resp, nil -} - -// GetReleaseAsset fetches a single release asset. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#get-a-release-asset -// -//meta:operation GET /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) GetReleaseAsset(ctx context.Context, owner, repo string, id int64) (*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - asset := new(ReleaseAsset) - resp, err := s.client.Do(ctx, req, asset) - if err != nil { - return nil, resp, err - } - return asset, resp, nil -} - -// DownloadReleaseAsset downloads a release asset or returns a redirect URL. -// -// DownloadReleaseAsset returns an io.ReadCloser that reads the contents of the -// specified release asset. It is the caller's responsibility to close the ReadCloser. -// If a redirect is returned, the redirect URL will be returned as a string instead -// of the io.ReadCloser. Exactly one of rc and redirectURL will be zero. -// -// followRedirectsClient can be passed to download the asset from a redirected -// location. Passing http.DefaultClient is recommended unless special circumstances -// exist, but it's possible to pass any http.Client. If nil is passed the -// redirectURL will be returned instead. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#get-a-release-asset -// -//meta:operation GET /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) DownloadReleaseAsset(ctx context.Context, owner, repo string, id int64, followRedirectsClient *http.Client) (rc io.ReadCloser, redirectURL string, err error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, "", err - } - req.Header.Set("Accept", defaultMediaType) - - s.client.clientMu.Lock() - defer s.client.clientMu.Unlock() - - var loc string - saveRedirect := s.client.client.CheckRedirect - s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { - loc = req.URL.String() - return errors.New("disable redirect") - } - defer func() { s.client.client.CheckRedirect = saveRedirect }() - - req = withContext(ctx, req) - resp, err := s.client.client.Do(req) - if err != nil { - if !strings.Contains(err.Error(), "disable redirect") { - return nil, "", err - } - if followRedirectsClient != nil { - rc, err := s.downloadReleaseAssetFromURL(ctx, followRedirectsClient, loc) - return rc, "", err - } - return nil, loc, nil // Intentionally return no error with valid redirect URL. - } - - if err := CheckResponse(resp); err != nil { - _ = resp.Body.Close() - return nil, "", err - } - - return resp.Body, "", nil -} - -func (s *RepositoriesService) downloadReleaseAssetFromURL(ctx context.Context, followRedirectsClient *http.Client, url string) (rc io.ReadCloser, err error) { - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return nil, err - } - req = withContext(ctx, req) - req.Header.Set("Accept", "*/*") - resp, err := followRedirectsClient.Do(req) - if err != nil { - return nil, err - } - if err := CheckResponse(resp); err != nil { - _ = resp.Body.Close() - return nil, err - } - return resp.Body, nil -} - -// EditReleaseAsset edits a repository release asset. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#update-a-release-asset -// -//meta:operation PATCH /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) EditReleaseAsset(ctx context.Context, owner, repo string, id int64, release *ReleaseAsset) (*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("PATCH", u, release) - if err != nil { - return nil, nil, err - } - - asset := new(ReleaseAsset) - resp, err := s.client.Do(ctx, req, asset) - if err != nil { - return nil, resp, err - } - return asset, resp, nil -} - -// DeleteReleaseAsset delete a single release asset from a repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#delete-a-release-asset -// -//meta:operation DELETE /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) DeleteReleaseAsset(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// UploadReleaseAsset creates an asset by uploading a file into a release repository. -// To upload assets that cannot be represented by an os.File, call NewUploadRequest directly. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#upload-a-release-asset -// -//meta:operation POST /repos/{owner}/{repo}/releases/{release_id}/assets -func (s *RepositoriesService) UploadReleaseAsset(ctx context.Context, owner, repo string, id int64, opts *UploadOptions, file *os.File) (*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - stat, err := file.Stat() - if err != nil { - return nil, nil, err - } - if stat.IsDir() { - return nil, nil, errors.New("the asset to upload can't be a directory") - } - - mediaType := mime.TypeByExtension(filepath.Ext(file.Name())) - if opts.MediaType != "" { - mediaType = opts.MediaType - } - - req, err := s.client.NewUploadRequest(u, file, stat.Size(), mediaType) - if err != nil { - return nil, nil, err - } - - asset := new(ReleaseAsset) - resp, err := s.client.Do(ctx, req, asset) - if err != nil { - return nil, resp, err - } - return asset, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_rules.go b/vendor/github.com/google/go-github/v57/github/repos_rules.go deleted file mode 100644 index 479806c2..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_rules.go +++ /dev/null @@ -1,511 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// BypassActor represents the bypass actors from a ruleset. -type BypassActor struct { - ActorID *int64 `json:"actor_id,omitempty"` - // Possible values for ActorType are: RepositoryRole, Team, Integration, OrganizationAdmin - ActorType *string `json:"actor_type,omitempty"` - // Possible values for BypassMode are: always, pull_request - BypassMode *string `json:"bypass_mode,omitempty"` -} - -// RulesetLink represents a single link object from GitHub ruleset request _links. -type RulesetLink struct { - HRef *string `json:"href,omitempty"` -} - -// RulesetLinks represents the "_links" object in a Ruleset. -type RulesetLinks struct { - Self *RulesetLink `json:"self,omitempty"` -} - -// RulesetRefConditionParameters represents the conditions object for ref_names. -type RulesetRefConditionParameters struct { - Include []string `json:"include"` - Exclude []string `json:"exclude"` -} - -// RulesetRepositoryNamesConditionParameters represents the conditions object for repository_names. -type RulesetRepositoryNamesConditionParameters struct { - Include []string `json:"include"` - Exclude []string `json:"exclude"` - Protected *bool `json:"protected,omitempty"` -} - -// RulesetRepositoryIDsConditionParameters represents the conditions object for repository_ids. -type RulesetRepositoryIDsConditionParameters struct { - RepositoryIDs []int64 `json:"repository_ids,omitempty"` -} - -// RulesetConditions represents the conditions object in a ruleset. -// Set either RepositoryName or RepositoryID, not both. -type RulesetConditions struct { - RefName *RulesetRefConditionParameters `json:"ref_name,omitempty"` - RepositoryName *RulesetRepositoryNamesConditionParameters `json:"repository_name,omitempty"` - RepositoryID *RulesetRepositoryIDsConditionParameters `json:"repository_id,omitempty"` -} - -// RulePatternParameters represents the rule pattern parameters. -type RulePatternParameters struct { - Name *string `json:"name,omitempty"` - // If Negate is true, the rule will fail if the pattern matches. - Negate *bool `json:"negate,omitempty"` - // Possible values for Operator are: starts_with, ends_with, contains, regex - Operator string `json:"operator"` - Pattern string `json:"pattern"` -} - -// UpdateAllowsFetchAndMergeRuleParameters represents the update rule parameters. -type UpdateAllowsFetchAndMergeRuleParameters struct { - UpdateAllowsFetchAndMerge bool `json:"update_allows_fetch_and_merge"` -} - -// RequiredDeploymentEnvironmentsRuleParameters represents the required_deployments rule parameters. -type RequiredDeploymentEnvironmentsRuleParameters struct { - RequiredDeploymentEnvironments []string `json:"required_deployment_environments"` -} - -// PullRequestRuleParameters represents the pull_request rule parameters. -type PullRequestRuleParameters struct { - DismissStaleReviewsOnPush bool `json:"dismiss_stale_reviews_on_push"` - RequireCodeOwnerReview bool `json:"require_code_owner_review"` - RequireLastPushApproval bool `json:"require_last_push_approval"` - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - RequiredReviewThreadResolution bool `json:"required_review_thread_resolution"` -} - -// RuleRequiredStatusChecks represents the RequiredStatusChecks for the RequiredStatusChecksRuleParameters object. -type RuleRequiredStatusChecks struct { - Context string `json:"context"` - IntegrationID *int64 `json:"integration_id,omitempty"` -} - -// RequiredStatusChecksRuleParameters represents the required_status_checks rule parameters. -type RequiredStatusChecksRuleParameters struct { - RequiredStatusChecks []RuleRequiredStatusChecks `json:"required_status_checks"` - StrictRequiredStatusChecksPolicy bool `json:"strict_required_status_checks_policy"` -} - -// RuleRequiredWorkflow represents the Workflow for the RequireWorkflowsRuleParameters object. -type RuleRequiredWorkflow struct { - Path string `json:"path"` - Ref *string `json:"ref,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Sha *string `json:"sha,omitempty"` -} - -// RequiredWorkflowsRuleParameters represents the workflows rule parameters. -type RequiredWorkflowsRuleParameters struct { - RequiredWorkflows []*RuleRequiredWorkflow `json:"workflows"` -} - -// RepositoryRule represents a GitHub Rule. -type RepositoryRule struct { - Type string `json:"type"` - Parameters *json.RawMessage `json:"parameters,omitempty"` -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// This helps us handle the fact that RepositoryRule parameter field can be of numerous types. -func (r *RepositoryRule) UnmarshalJSON(data []byte) error { - type rule RepositoryRule - var RepositoryRule rule - if err := json.Unmarshal(data, &RepositoryRule); err != nil { - return err - } - - r.Type = RepositoryRule.Type - - switch RepositoryRule.Type { - case "creation", "deletion", "required_linear_history", "required_signatures", "non_fast_forward": - r.Parameters = nil - case "update": - if RepositoryRule.Parameters == nil { - r.Parameters = nil - return nil - } - params := UpdateAllowsFetchAndMergeRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - - case "required_deployments": - params := RequiredDeploymentEnvironmentsRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "commit_message_pattern", "commit_author_email_pattern", "committer_email_pattern", "branch_name_pattern", "tag_name_pattern": - params := RulePatternParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "pull_request": - params := PullRequestRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "required_status_checks": - params := RequiredStatusChecksRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "workflows": - params := RequiredWorkflowsRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - default: - r.Type = "" - r.Parameters = nil - return fmt.Errorf("RepositoryRule.Type %T is not yet implemented, unable to unmarshal", RepositoryRule.Type) - } - - return nil -} - -// NewCreationRule creates a rule to only allow users with bypass permission to create matching refs. -func NewCreationRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "creation", - } -} - -// NewUpdateRule creates a rule to only allow users with bypass permission to update matching refs. -func NewUpdateRule(params *UpdateAllowsFetchAndMergeRuleParameters) (rule *RepositoryRule) { - if params != nil { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "update", - Parameters: &rawParams, - } - } - return &RepositoryRule{ - Type: "update", - } -} - -// NewDeletionRule creates a rule to only allow users with bypass permissions to delete matching refs. -func NewDeletionRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "deletion", - } -} - -// NewRequiredLinearHistoryRule creates a rule to prevent merge commits from being pushed to matching branches. -func NewRequiredLinearHistoryRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "required_linear_history", - } -} - -// NewRequiredDeploymentsRule creates a rule to require environments to be successfully deployed before they can be merged into the matching branches. -func NewRequiredDeploymentsRule(params *RequiredDeploymentEnvironmentsRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "required_deployments", - Parameters: &rawParams, - } -} - -// NewRequiredSignaturesRule creates a rule a to require commits pushed to matching branches to have verified signatures. -func NewRequiredSignaturesRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "required_signatures", - } -} - -// NewPullRequestRule creates a rule to require all commits be made to a non-target branch and submitted via a pull request before they can be merged. -func NewPullRequestRule(params *PullRequestRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "pull_request", - Parameters: &rawParams, - } -} - -// NewRequiredStatusChecksRule creates a rule to require which status checks must pass before branches can be merged into a branch rule. -func NewRequiredStatusChecksRule(params *RequiredStatusChecksRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "required_status_checks", - Parameters: &rawParams, - } -} - -// NewNonFastForwardRule creates a rule as part to prevent users with push access from force pushing to matching branches. -func NewNonFastForwardRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "non_fast_forward", - } -} - -// NewCommitMessagePatternRule creates a rule to restrict commit message patterns being pushed to matching branches. -func NewCommitMessagePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "commit_message_pattern", - Parameters: &rawParams, - } -} - -// NewCommitAuthorEmailPatternRule creates a rule to restrict commits with author email patterns being merged into matching branches. -func NewCommitAuthorEmailPatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "commit_author_email_pattern", - Parameters: &rawParams, - } -} - -// NewCommitterEmailPatternRule creates a rule to restrict commits with committer email patterns being merged into matching branches. -func NewCommitterEmailPatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "committer_email_pattern", - Parameters: &rawParams, - } -} - -// NewBranchNamePatternRule creates a rule to restrict branch patterns from being merged into matching branches. -func NewBranchNamePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "branch_name_pattern", - Parameters: &rawParams, - } -} - -// NewTagNamePatternRule creates a rule to restrict tag patterns contained in non-target branches from being merged into matching branches. -func NewTagNamePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "tag_name_pattern", - Parameters: &rawParams, - } -} - -// NewRequiredWorkflowsRule creates a rule to require which status checks must pass before branches can be merged into a branch rule. -func NewRequiredWorkflowsRule(params *RequiredWorkflowsRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "workflows", - Parameters: &rawParams, - } -} - -// Ruleset represents a GitHub ruleset object. -type Ruleset struct { - ID *int64 `json:"id,omitempty"` - Name string `json:"name"` - // Possible values for Target are branch, tag - Target *string `json:"target,omitempty"` - // Possible values for SourceType are: Repository, Organization - SourceType *string `json:"source_type,omitempty"` - Source string `json:"source"` - // Possible values for Enforcement are: disabled, active, evaluate - Enforcement string `json:"enforcement"` - BypassActors []*BypassActor `json:"bypass_actors,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Links *RulesetLinks `json:"_links,omitempty"` - Conditions *RulesetConditions `json:"conditions,omitempty"` - Rules []*RepositoryRule `json:"rules,omitempty"` -} - -// GetRulesForBranch gets all the rules that apply to the specified branch. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#get-rules-for-a-branch -// -//meta:operation GET /repos/{owner}/{repo}/rules/branches/{branch} -func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo, branch string) ([]*RepositoryRule, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rules/branches/%v", owner, repo, branch) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rules []*RepositoryRule - resp, err := s.client.Do(ctx, req, &rules) - if err != nil { - return nil, resp, err - } - - return rules, resp, nil -} - -// GetAllRulesets gets all the rules that apply to the specified repository. -// If includesParents is true, rulesets configured at the organization level that apply to the repository will be returned. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#get-all-repository-rulesets -// -//meta:operation GET /repos/{owner}/{repo}/rulesets -func (s *RepositoriesService) GetAllRulesets(ctx context.Context, owner, repo string, includesParents bool) ([]*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets?includes_parents=%v", owner, repo, includesParents) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var ruleset []*Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// CreateRuleset creates a ruleset for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#create-a-repository-ruleset -// -//meta:operation POST /repos/{owner}/{repo}/rulesets -func (s *RepositoriesService) CreateRuleset(ctx context.Context, owner, repo string, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets", owner, repo) - - req, err := s.client.NewRequest("POST", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// GetRuleset gets a ruleset for the specified repository. -// If includesParents is true, rulesets configured at the organization level that apply to the repository will be returned. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#get-a-repository-ruleset -// -//meta:operation GET /repos/{owner}/{repo}/rulesets/{ruleset_id} -func (s *RepositoriesService) GetRuleset(ctx context.Context, owner, repo string, rulesetID int64, includesParents bool) (*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets/%v?includes_parents=%v", owner, repo, rulesetID, includesParents) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// UpdateRuleset updates a ruleset for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#update-a-repository-ruleset -// -//meta:operation PUT /repos/{owner}/{repo}/rulesets/{ruleset_id} -func (s *RepositoriesService) UpdateRuleset(ctx context.Context, owner, repo string, rulesetID int64, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets/%v", owner, repo, rulesetID) - - req, err := s.client.NewRequest("PUT", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// DeleteRuleset deletes a ruleset for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#delete-a-repository-ruleset -// -//meta:operation DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id} -func (s *RepositoriesService) DeleteRuleset(ctx context.Context, owner, repo string, rulesetID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets/%v", owner, repo, rulesetID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_stats.go b/vendor/github.com/google/go-github/v57/github/repos_stats.go deleted file mode 100644 index 898693f7..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_stats.go +++ /dev/null @@ -1,242 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// ContributorStats represents a contributor to a repository and their -// weekly contributions to a given repo. -type ContributorStats struct { - Author *Contributor `json:"author,omitempty"` - Total *int `json:"total,omitempty"` - Weeks []*WeeklyStats `json:"weeks,omitempty"` -} - -func (c ContributorStats) String() string { - return Stringify(c) -} - -// WeeklyStats represents the number of additions, deletions and commits -// a Contributor made in a given week. -type WeeklyStats struct { - Week *Timestamp `json:"w,omitempty"` - Additions *int `json:"a,omitempty"` - Deletions *int `json:"d,omitempty"` - Commits *int `json:"c,omitempty"` -} - -func (w WeeklyStats) String() string { - return Stringify(w) -} - -// ListContributorsStats gets a repo's contributor list with additions, -// deletions and commit counts. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-all-contributor-commit-activity -// -//meta:operation GET /repos/{owner}/{repo}/stats/contributors -func (s *RepositoriesService) ListContributorsStats(ctx context.Context, owner, repo string) ([]*ContributorStats, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/contributors", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var contributorStats []*ContributorStats - resp, err := s.client.Do(ctx, req, &contributorStats) - if err != nil { - return nil, resp, err - } - - return contributorStats, resp, nil -} - -// WeeklyCommitActivity represents the weekly commit activity for a repository. -// The days array is a group of commits per day, starting on Sunday. -type WeeklyCommitActivity struct { - Days []int `json:"days,omitempty"` - Total *int `json:"total,omitempty"` - Week *Timestamp `json:"week,omitempty"` -} - -func (w WeeklyCommitActivity) String() string { - return Stringify(w) -} - -// ListCommitActivity returns the last year of commit activity -// grouped by week. The days array is a group of commits per day, -// starting on Sunday. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-last-year-of-commit-activity -// -//meta:operation GET /repos/{owner}/{repo}/stats/commit_activity -func (s *RepositoriesService) ListCommitActivity(ctx context.Context, owner, repo string) ([]*WeeklyCommitActivity, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/commit_activity", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var weeklyCommitActivity []*WeeklyCommitActivity - resp, err := s.client.Do(ctx, req, &weeklyCommitActivity) - if err != nil { - return nil, resp, err - } - - return weeklyCommitActivity, resp, nil -} - -// ListCodeFrequency returns a weekly aggregate of the number of additions and -// deletions pushed to a repository. Returned WeeklyStats will contain -// additions and deletions, but not total commits. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-activity -// -//meta:operation GET /repos/{owner}/{repo}/stats/code_frequency -func (s *RepositoriesService) ListCodeFrequency(ctx context.Context, owner, repo string) ([]*WeeklyStats, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/code_frequency", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var weeks [][]int - resp, err := s.client.Do(ctx, req, &weeks) - if err != nil { - return nil, resp, err - } - - // convert int slices into WeeklyStats - var stats []*WeeklyStats - for _, week := range weeks { - if len(week) != 3 { - continue - } - stat := &WeeklyStats{ - Week: &Timestamp{time.Unix(int64(week[0]), 0)}, - Additions: Int(week[1]), - Deletions: Int(week[2]), - } - stats = append(stats, stat) - } - - return stats, resp, nil -} - -// RepositoryParticipation is the number of commits by everyone -// who has contributed to the repository (including the owner) -// as well as the number of commits by the owner themself. -type RepositoryParticipation struct { - All []int `json:"all,omitempty"` - Owner []int `json:"owner,omitempty"` -} - -func (r RepositoryParticipation) String() string { - return Stringify(r) -} - -// ListParticipation returns the total commit counts for the 'owner' -// and total commit counts in 'all'. 'all' is everyone combined, -// including the 'owner' in the last 52 weeks. If you’d like to get -// the commit counts for non-owners, you can subtract 'all' from 'owner'. -// -// The array order is oldest week (index 0) to most recent week. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-count -// -//meta:operation GET /repos/{owner}/{repo}/stats/participation -func (s *RepositoriesService) ListParticipation(ctx context.Context, owner, repo string) (*RepositoryParticipation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/participation", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - participation := new(RepositoryParticipation) - resp, err := s.client.Do(ctx, req, participation) - if err != nil { - return nil, resp, err - } - - return participation, resp, nil -} - -// PunchCard represents the number of commits made during a given hour of a -// day of the week. -type PunchCard struct { - Day *int // Day of the week (0-6: =Sunday - Saturday). - Hour *int // Hour of day (0-23). - Commits *int // Number of commits. -} - -// ListPunchCard returns the number of commits per hour in each day. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-hourly-commit-count-for-each-day -// -//meta:operation GET /repos/{owner}/{repo}/stats/punch_card -func (s *RepositoriesService) ListPunchCard(ctx context.Context, owner, repo string) ([]*PunchCard, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/punch_card", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var results [][]int - resp, err := s.client.Do(ctx, req, &results) - if err != nil { - return nil, resp, err - } - - // convert int slices into Punchcards - var cards []*PunchCard - for _, result := range results { - if len(result) != 3 { - continue - } - card := &PunchCard{ - Day: Int(result[0]), - Hour: Int(result[1]), - Commits: Int(result[2]), - } - cards = append(cards, card) - } - - return cards, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_statuses.go b/vendor/github.com/google/go-github/v57/github/repos_statuses.go deleted file mode 100644 index e7b03047..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_statuses.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepoStatus represents the status of a repository at a particular reference. -type RepoStatus struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - URL *string `json:"url,omitempty"` - - // State is the current state of the repository. Possible values are: - // pending, success, error, or failure. - State *string `json:"state,omitempty"` - - // TargetURL is the URL of the page representing this status. It will be - // linked from the GitHub UI to allow users to see the source of the status. - TargetURL *string `json:"target_url,omitempty"` - - // Description is a short high level summary of the status. - Description *string `json:"description,omitempty"` - - // A string label to differentiate this status from the statuses of other systems. - Context *string `json:"context,omitempty"` - - // AvatarURL is the URL of the avatar of this status. - AvatarURL *string `json:"avatar_url,omitempty"` - - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -func (r RepoStatus) String() string { - return Stringify(r) -} - -// ListStatuses lists the statuses of a repository at the specified -// reference. ref can be a SHA, a branch name, or a tag name. -// -// GitHub API docs: https://docs.github.com/rest/commits/statuses#list-commit-statuses-for-a-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/statuses -func (s *RepositoriesService) ListStatuses(ctx context.Context, owner, repo, ref string, opts *ListOptions) ([]*RepoStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/statuses", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var statuses []*RepoStatus - resp, err := s.client.Do(ctx, req, &statuses) - if err != nil { - return nil, resp, err - } - - return statuses, resp, nil -} - -// CreateStatus creates a new status for a repository at the specified -// reference. Ref can be a SHA, a branch name, or a tag name. -// -// GitHub API docs: https://docs.github.com/rest/commits/statuses#create-a-commit-status -// -//meta:operation POST /repos/{owner}/{repo}/statuses/{sha} -func (s *RepositoriesService) CreateStatus(ctx context.Context, owner, repo, ref string, status *RepoStatus) (*RepoStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/statuses/%v", owner, repo, refURLEscape(ref)) - req, err := s.client.NewRequest("POST", u, status) - if err != nil { - return nil, nil, err - } - - repoStatus := new(RepoStatus) - resp, err := s.client.Do(ctx, req, repoStatus) - if err != nil { - return nil, resp, err - } - - return repoStatus, resp, nil -} - -// CombinedStatus represents the combined status of a repository at a particular reference. -type CombinedStatus struct { - // State is the combined state of the repository. Possible values are: - // failure, pending, or success. - State *string `json:"state,omitempty"` - - Name *string `json:"name,omitempty"` - SHA *string `json:"sha,omitempty"` - TotalCount *int `json:"total_count,omitempty"` - Statuses []*RepoStatus `json:"statuses,omitempty"` - - CommitURL *string `json:"commit_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` -} - -func (s CombinedStatus) String() string { - return Stringify(s) -} - -// GetCombinedStatus returns the combined status of a repository at the specified -// reference. ref can be a SHA, a branch name, or a tag name. -// -// GitHub API docs: https://docs.github.com/rest/commits/statuses#get-the-combined-status-for-a-specific-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/status -func (s *RepositoriesService) GetCombinedStatus(ctx context.Context, owner, repo, ref string, opts *ListOptions) (*CombinedStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/status", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - status := new(CombinedStatus) - resp, err := s.client.Do(ctx, req, status) - if err != nil { - return nil, resp, err - } - - return status, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_tags.go b/vendor/github.com/google/go-github/v57/github/repos_tags.go deleted file mode 100644 index 93164dd1..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_tags.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TagProtection represents a repository tag protection. -type TagProtection struct { - ID *int64 `json:"id"` - Pattern *string `json:"pattern"` -} - -// tagProtectionRequest represents a request to create tag protection. -type tagProtectionRequest struct { - // An optional glob pattern to match against when enforcing tag protection. - Pattern string `json:"pattern"` -} - -// ListTagProtection lists tag protection of the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/tags#list-tag-protection-states-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/tags/protection -func (s *RepositoriesService) ListTagProtection(ctx context.Context, owner, repo string) ([]*TagProtection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags/protection", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var tagProtections []*TagProtection - resp, err := s.client.Do(ctx, req, &tagProtections) - if err != nil { - return nil, resp, err - } - - return tagProtections, resp, nil -} - -// CreateTagProtection creates the tag protection of the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/tags#create-a-tag-protection-state-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/tags/protection -func (s *RepositoriesService) CreateTagProtection(ctx context.Context, owner, repo, pattern string) (*TagProtection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags/protection", owner, repo) - r := &tagProtectionRequest{Pattern: pattern} - req, err := s.client.NewRequest("POST", u, r) - if err != nil { - return nil, nil, err - } - - tagProtection := new(TagProtection) - resp, err := s.client.Do(ctx, req, tagProtection) - if err != nil { - return nil, resp, err - } - - return tagProtection, resp, nil -} - -// DeleteTagProtection deletes a tag protection from the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/tags#delete-a-tag-protection-state-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id} -func (s *RepositoriesService) DeleteTagProtection(ctx context.Context, owner, repo string, tagProtectionID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags/protection/%v", owner, repo, tagProtectionID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_traffic.go b/vendor/github.com/google/go-github/v57/github/repos_traffic.go deleted file mode 100644 index ae2f1a86..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_traffic.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TrafficReferrer represent information about traffic from a referrer . -type TrafficReferrer struct { - Referrer *string `json:"referrer,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficPath represent information about the traffic on a path of the repo. -type TrafficPath struct { - Path *string `json:"path,omitempty"` - Title *string `json:"title,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficData represent information about a specific timestamp in views or clones list. -type TrafficData struct { - Timestamp *Timestamp `json:"timestamp,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficViews represent information about the number of views in the last 14 days. -type TrafficViews struct { - Views []*TrafficData `json:"views,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficClones represent information about the number of clones in the last 14 days. -type TrafficClones struct { - Clones []*TrafficData `json:"clones,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficBreakdownOptions specifies the parameters to methods that support breakdown per day or week. -// Can be one of: day, week. Default: day. -type TrafficBreakdownOptions struct { - Per string `url:"per,omitempty"` -} - -// ListTrafficReferrers list the top 10 referrers over the last 14 days. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-top-referral-sources -// -//meta:operation GET /repos/{owner}/{repo}/traffic/popular/referrers -func (s *RepositoriesService) ListTrafficReferrers(ctx context.Context, owner, repo string) ([]*TrafficReferrer, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/popular/referrers", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var trafficReferrers []*TrafficReferrer - resp, err := s.client.Do(ctx, req, &trafficReferrers) - if err != nil { - return nil, resp, err - } - - return trafficReferrers, resp, nil -} - -// ListTrafficPaths list the top 10 popular content over the last 14 days. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-top-referral-paths -// -//meta:operation GET /repos/{owner}/{repo}/traffic/popular/paths -func (s *RepositoriesService) ListTrafficPaths(ctx context.Context, owner, repo string) ([]*TrafficPath, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/popular/paths", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var paths []*TrafficPath - resp, err := s.client.Do(ctx, req, &paths) - if err != nil { - return nil, resp, err - } - - return paths, resp, nil -} - -// ListTrafficViews get total number of views for the last 14 days and breaks it down either per day or week. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-page-views -// -//meta:operation GET /repos/{owner}/{repo}/traffic/views -func (s *RepositoriesService) ListTrafficViews(ctx context.Context, owner, repo string, opts *TrafficBreakdownOptions) (*TrafficViews, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/views", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - trafficViews := new(TrafficViews) - resp, err := s.client.Do(ctx, req, &trafficViews) - if err != nil { - return nil, resp, err - } - - return trafficViews, resp, nil -} - -// ListTrafficClones get total number of clones for the last 14 days and breaks it down either per day or week for the last 14 days. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-repository-clones -// -//meta:operation GET /repos/{owner}/{repo}/traffic/clones -func (s *RepositoriesService) ListTrafficClones(ctx context.Context, owner, repo string, opts *TrafficBreakdownOptions) (*TrafficClones, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/clones", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - trafficClones := new(TrafficClones) - resp, err := s.client.Do(ctx, req, &trafficClones) - if err != nil { - return nil, resp, err - } - - return trafficClones, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/scim.go b/vendor/github.com/google/go-github/v57/github/scim.go deleted file mode 100644 index 02136d7e..00000000 --- a/vendor/github.com/google/go-github/v57/github/scim.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// SCIMService provides access to SCIM related functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/scim -type SCIMService service - -// SCIMUserAttributes represents supported SCIM User attributes. -// -// GitHub API docs: https://docs.github.com/rest/scim#supported-scim-user-attributes -type SCIMUserAttributes struct { - UserName string `json:"userName"` // Configured by the admin. Could be an email, login, or username. (Required.) - Name SCIMUserName `json:"name"` // (Required.) - DisplayName *string `json:"displayName,omitempty"` // The name of the user, suitable for display to end-users. (Optional.) - Emails []*SCIMUserEmail `json:"emails"` // User emails. (Required.) - Schemas []string `json:"schemas,omitempty"` // (Optional.) - ExternalID *string `json:"externalId,omitempty"` // (Optional.) - Groups []string `json:"groups,omitempty"` // (Optional.) - Active *bool `json:"active,omitempty"` // (Optional.) - // Only populated as a result of calling ListSCIMProvisionedIdentitiesOptions or GetSCIMProvisioningInfoForUser: - ID *string `json:"id,omitempty"` - Meta *SCIMMeta `json:"meta,omitempty"` -} - -// SCIMUserName represents SCIM user information. -type SCIMUserName struct { - GivenName string `json:"givenName"` // The first name of the user. (Required.) - FamilyName string `json:"familyName"` // The family name of the user. (Required.) - Formatted *string `json:"formatted,omitempty"` // (Optional.) -} - -// SCIMUserEmail represents SCIM user email. -type SCIMUserEmail struct { - Value string `json:"value"` // (Required.) - Primary *bool `json:"primary,omitempty"` // (Optional.) - Type *string `json:"type,omitempty"` // (Optional.) -} - -// SCIMMeta represents metadata about the SCIM resource. -type SCIMMeta struct { - ResourceType *string `json:"resourceType,omitempty"` - Created *Timestamp `json:"created,omitempty"` - LastModified *Timestamp `json:"lastModified,omitempty"` - Location *string `json:"location,omitempty"` -} - -// SCIMProvisionedIdentities represents the result of calling ListSCIMProvisionedIdentities. -type SCIMProvisionedIdentities struct { - Schemas []string `json:"schemas,omitempty"` - TotalResults *int `json:"totalResults,omitempty"` - ItemsPerPage *int `json:"itemsPerPage,omitempty"` - StartIndex *int `json:"startIndex,omitempty"` - Resources []*SCIMUserAttributes `json:"Resources,omitempty"` -} - -// ListSCIMProvisionedIdentitiesOptions represents options for ListSCIMProvisionedIdentities. -// -// GitHub API docs: https://docs.github.com/rest/scim#list-scim-provisioned-identities--parameters -type ListSCIMProvisionedIdentitiesOptions struct { - StartIndex *int `url:"startIndex,omitempty"` // Used for pagination: the index of the first result to return. (Optional.) - Count *int `url:"count,omitempty"` // Used for pagination: the number of results to return. (Optional.) - // Filter results using the equals query parameter operator (eq). - // You can filter results that are equal to id, userName, emails, and external_id. - // For example, to search for an identity with the userName Octocat, you would use this query: ?filter=userName%20eq%20\"Octocat\". - // To filter results for the identity with the email octocat@github.com, you would use this query: ?filter=emails%20eq%20\"octocat@github.com\". - // (Optional.) - Filter *string `url:"filter,omitempty"` -} - -// ListSCIMProvisionedIdentities lists SCIM provisioned identities. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#list-scim-provisioned-identities -// -//meta:operation GET /scim/v2/organizations/{org}/Users -func (s *SCIMService) ListSCIMProvisionedIdentities(ctx context.Context, org string, opts *ListSCIMProvisionedIdentitiesOptions) (*SCIMProvisionedIdentities, *Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - identities := new(SCIMProvisionedIdentities) - resp, err := s.client.Do(ctx, req, identities) - if err != nil { - return nil, resp, err - } - - return identities, resp, nil -} - -// ProvisionAndInviteSCIMUser provisions organization membership for a user, and sends an activation email to the email address. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#provision-and-invite-a-scim-user -// -//meta:operation POST /scim/v2/organizations/{org}/Users -func (s *SCIMService) ProvisionAndInviteSCIMUser(ctx context.Context, org string, opts *SCIMUserAttributes) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetSCIMProvisioningInfoForUser returns SCIM provisioning information for a user. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#get-scim-provisioning-information-for-a-user -// -//meta:operation GET /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) GetSCIMProvisioningInfoForUser(ctx context.Context, org, scimUserID string) (*SCIMUserAttributes, *Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - user := new(SCIMUserAttributes) - resp, err := s.client.Do(ctx, req, &user) - if err != nil { - return nil, resp, err - } - - return user, resp, nil -} - -// UpdateProvisionedOrgMembership updates a provisioned organization membership. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#update-a-provisioned-organization-membership -// -//meta:operation PUT /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) UpdateProvisionedOrgMembership(ctx context.Context, org, scimUserID string, opts *SCIMUserAttributes) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - u, err := addOptions(u, opts) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UpdateAttributeForSCIMUserOptions represents options for UpdateAttributeForSCIMUser. -// -// GitHub API docs: https://docs.github.com/rest/scim#update-an-attribute-for-a-scim-user--parameters -type UpdateAttributeForSCIMUserOptions struct { - Schemas []string `json:"schemas,omitempty"` // (Optional.) - Operations UpdateAttributeForSCIMUserOperations `json:"operations"` // Set of operations to be performed. (Required.) -} - -// UpdateAttributeForSCIMUserOperations represents operations for UpdateAttributeForSCIMUser. -type UpdateAttributeForSCIMUserOperations struct { - Op string `json:"op"` // (Required.) - Path *string `json:"path,omitempty"` // (Optional.) - Value json.RawMessage `json:"value,omitempty"` // (Optional.) -} - -// UpdateAttributeForSCIMUser updates an attribute for an SCIM user. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#update-an-attribute-for-a-scim-user -// -//meta:operation PATCH /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) UpdateAttributeForSCIMUser(ctx context.Context, org, scimUserID string, opts *UpdateAttributeForSCIMUserOptions) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - u, err := addOptions(u, opts) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("PATCH", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteSCIMUserFromOrg deletes SCIM user from an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#delete-a-scim-user-from-an-organization -// -//meta:operation DELETE /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) DeleteSCIMUserFromOrg(ctx context.Context, org, scimUserID string) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/search.go b/vendor/github.com/google/go-github/v57/github/search.go deleted file mode 100644 index 71efe87a..00000000 --- a/vendor/github.com/google/go-github/v57/github/search.go +++ /dev/null @@ -1,347 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strconv" - "strings" - - qs "github.com/google/go-querystring/query" -) - -// SearchService provides access to the search related functions -// in the GitHub API. -// -// Each method takes a query string defining the search keywords and any search qualifiers. -// For example, when searching issues, the query "gopher is:issue language:go" will search -// for issues containing the word "gopher" in Go repositories. The method call -// -// opts := &github.SearchOptions{Sort: "created", Order: "asc"} -// cl.Search.Issues(ctx, "gopher is:issue language:go", opts) -// -// will search for such issues, sorting by creation date in ascending order -// (i.e., oldest first). -// -// If query includes multiple conditions, it MUST NOT include "+" as the condition separator. -// You have to use " " as the separator instead. -// For example, querying with "language:c++" and "leveldb", then query should be -// "language:c++ leveldb" but not "language:c+++leveldb". -// -// GitHub API docs: https://docs.github.com/rest/search/ -type SearchService service - -// SearchOptions specifies optional parameters to the SearchService methods. -type SearchOptions struct { - // How to sort the search results. Possible values are: - // - for repositories: stars, fork, updated - // - for commits: author-date, committer-date - // - for code: indexed - // - for issues: comments, created, updated - // - for users: followers, repositories, joined - // - // Default is to sort by best match. - Sort string `url:"sort,omitempty"` - - // Sort order if sort parameter is provided. Possible values are: asc, - // desc. Default is desc. - Order string `url:"order,omitempty"` - - // Whether to retrieve text match metadata with a query - TextMatch bool `url:"-"` - - ListOptions -} - -// Common search parameters. -type searchParameters struct { - Query string - RepositoryID *int64 // Sent if non-nil. -} - -// RepositoriesSearchResult represents the result of a repositories search. -type RepositoriesSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Repositories []*Repository `json:"items,omitempty"` -} - -// Repositories searches repositories via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-repositories -// -//meta:operation GET /search/repositories -func (s *SearchService) Repositories(ctx context.Context, query string, opts *SearchOptions) (*RepositoriesSearchResult, *Response, error) { - result := new(RepositoriesSearchResult) - resp, err := s.search(ctx, "repositories", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// TopicsSearchResult represents the result of a topics search. -type TopicsSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Topics []*TopicResult `json:"items,omitempty"` -} - -type TopicResult struct { - Name *string `json:"name,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - ShortDescription *string `json:"short_description,omitempty"` - Description *string `json:"description,omitempty"` - CreatedBy *string `json:"created_by,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *string `json:"updated_at,omitempty"` - Featured *bool `json:"featured,omitempty"` - Curated *bool `json:"curated,omitempty"` - Score *float64 `json:"score,omitempty"` -} - -// Topics finds topics via various criteria. Results are sorted by best match. -// Please see https://help.github.com/articles/searching-topics for more -// information about search qualifiers. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-topics -// -//meta:operation GET /search/topics -func (s *SearchService) Topics(ctx context.Context, query string, opts *SearchOptions) (*TopicsSearchResult, *Response, error) { - result := new(TopicsSearchResult) - resp, err := s.search(ctx, "topics", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// CommitsSearchResult represents the result of a commits search. -type CommitsSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Commits []*CommitResult `json:"items,omitempty"` -} - -// CommitResult represents a commit object as returned in commit search endpoint response. -type CommitResult struct { - SHA *string `json:"sha,omitempty"` - Commit *Commit `json:"commit,omitempty"` - Author *User `json:"author,omitempty"` - Committer *User `json:"committer,omitempty"` - Parents []*Commit `json:"parents,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - - Repository *Repository `json:"repository,omitempty"` - Score *float64 `json:"score,omitempty"` -} - -// Commits searches commits via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-commits -// -//meta:operation GET /search/commits -func (s *SearchService) Commits(ctx context.Context, query string, opts *SearchOptions) (*CommitsSearchResult, *Response, error) { - result := new(CommitsSearchResult) - resp, err := s.search(ctx, "commits", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// IssuesSearchResult represents the result of an issues search. -type IssuesSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Issues []*Issue `json:"items,omitempty"` -} - -// Issues searches issues via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-issues-and-pull-requests -// -//meta:operation GET /search/issues -func (s *SearchService) Issues(ctx context.Context, query string, opts *SearchOptions) (*IssuesSearchResult, *Response, error) { - result := new(IssuesSearchResult) - resp, err := s.search(ctx, "issues", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// UsersSearchResult represents the result of a users search. -type UsersSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Users []*User `json:"items,omitempty"` -} - -// Users searches users via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-users -// -//meta:operation GET /search/users -func (s *SearchService) Users(ctx context.Context, query string, opts *SearchOptions) (*UsersSearchResult, *Response, error) { - result := new(UsersSearchResult) - resp, err := s.search(ctx, "users", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// Match represents a single text match. -type Match struct { - Text *string `json:"text,omitempty"` - Indices []int `json:"indices,omitempty"` -} - -// TextMatch represents a text match for a SearchResult -type TextMatch struct { - ObjectURL *string `json:"object_url,omitempty"` - ObjectType *string `json:"object_type,omitempty"` - Property *string `json:"property,omitempty"` - Fragment *string `json:"fragment,omitempty"` - Matches []*Match `json:"matches,omitempty"` -} - -func (tm TextMatch) String() string { - return Stringify(tm) -} - -// CodeSearchResult represents the result of a code search. -type CodeSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - CodeResults []*CodeResult `json:"items,omitempty"` -} - -// CodeResult represents a single search result. -type CodeResult struct { - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - SHA *string `json:"sha,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Repository *Repository `json:"repository,omitempty"` - TextMatches []*TextMatch `json:"text_matches,omitempty"` -} - -func (c CodeResult) String() string { - return Stringify(c) -} - -// Code searches code via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-code -// -//meta:operation GET /search/code -func (s *SearchService) Code(ctx context.Context, query string, opts *SearchOptions) (*CodeSearchResult, *Response, error) { - result := new(CodeSearchResult) - resp, err := s.search(ctx, "code", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// LabelsSearchResult represents the result of a code search. -type LabelsSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Labels []*LabelResult `json:"items,omitempty"` -} - -// LabelResult represents a single search result. -type LabelResult struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Color *string `json:"color,omitempty"` - Default *bool `json:"default,omitempty"` - Description *string `json:"description,omitempty"` - Score *float64 `json:"score,omitempty"` -} - -func (l LabelResult) String() string { - return Stringify(l) -} - -// Labels searches labels in the repository with ID repoID via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-labels -// -//meta:operation GET /search/labels -func (s *SearchService) Labels(ctx context.Context, repoID int64, query string, opts *SearchOptions) (*LabelsSearchResult, *Response, error) { - result := new(LabelsSearchResult) - resp, err := s.search(ctx, "labels", &searchParameters{RepositoryID: &repoID, Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// Helper function that executes search queries against different -// GitHub search types (repositories, commits, code, issues, users, labels) -// -// If searchParameters.Query includes multiple condition, it MUST NOT include "+" as condition separator. -// For example, querying with "language:c++" and "leveldb", then searchParameters.Query should be "language:c++ leveldb" but not "language:c+++leveldb". -func (s *SearchService) search(ctx context.Context, searchType string, parameters *searchParameters, opts *SearchOptions, result interface{}) (*Response, error) { - params, err := qs.Values(opts) - if err != nil { - return nil, err - } - - if parameters.RepositoryID != nil { - params.Set("repository_id", strconv.FormatInt(*parameters.RepositoryID, 10)) - } - params.Set("q", parameters.Query) - u := fmt.Sprintf("search/%s?%s", searchType, params.Encode()) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, err - } - var acceptHeaders []string - switch { - case searchType == "commits": - // Accept header for search commits preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeCommitSearchPreview) - case searchType == "topics": - // Accept header for search repositories based on topics preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeTopicsPreview) - case searchType == "repositories": - // Accept header for search repositories based on topics preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeTopicsPreview) - case searchType == "issues": - // Accept header for search issues based on reactions preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeReactionsPreview) - } - // https://docs.github.com/rest/search#search-repositories - // Accept header defaults to "application/vnd.github.v3+json" - // We change it here to fetch back text-match metadata - if opts != nil && opts.TextMatch { - acceptHeaders = append(acceptHeaders, "application/vnd.github.v3.text-match+json") - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, result) -} diff --git a/vendor/github.com/google/go-github/v57/github/secret_scanning.go b/vendor/github.com/google/go-github/v57/github/secret_scanning.go deleted file mode 100644 index 9b2ad8cd..00000000 --- a/vendor/github.com/google/go-github/v57/github/secret_scanning.go +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// SecretScanningService handles communication with the secret scanning related -// methods of the GitHub API. -type SecretScanningService service - -// SecretScanningAlert represents a GitHub secret scanning alert. -type SecretScanningAlert struct { - Number *int `json:"number,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - LocationsURL *string `json:"locations_url,omitempty"` - State *string `json:"state,omitempty"` - Resolution *string `json:"resolution,omitempty"` - ResolvedAt *Timestamp `json:"resolved_at,omitempty"` - ResolvedBy *User `json:"resolved_by,omitempty"` - SecretType *string `json:"secret_type,omitempty"` - SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` - Secret *string `json:"secret,omitempty"` - Repository *Repository `json:"repository,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PushProtectionBypassed *bool `json:"push_protection_bypassed,omitempty"` - PushProtectionBypassedBy *User `json:"push_protection_bypassed_by,omitempty"` - PushProtectionBypassedAt *Timestamp `json:"push_protection_bypassed_at,omitempty"` - ResolutionComment *string `json:"resolution_comment,omitempty"` -} - -// SecretScanningAlertLocation represents the location for a secret scanning alert. -type SecretScanningAlertLocation struct { - Type *string `json:"type,omitempty"` - Details *SecretScanningAlertLocationDetails `json:"details,omitempty"` -} - -// SecretScanningAlertLocationDetails represents the location details for a secret scanning alert. -type SecretScanningAlertLocationDetails struct { - Path *string `json:"path,omitempty"` - Startline *int `json:"start_line,omitempty"` - EndLine *int `json:"end_line,omitempty"` - StartColumn *int `json:"start_column,omitempty"` - EndColumn *int `json:"end_column,omitempty"` - BlobSHA *string `json:"blob_sha,omitempty"` - BlobURL *string `json:"blob_url,omitempty"` - CommitSHA *string `json:"commit_sha,omitempty"` - CommitURL *string `json:"commit_url,omitempty"` -} - -// SecretScanningAlertListOptions specifies optional parameters to the SecretScanningService.ListAlertsForEnterprise method. -type SecretScanningAlertListOptions struct { - // State of the secret scanning alerts to list. Set to open or resolved to only list secret scanning alerts in a specific state. - State string `url:"state,omitempty"` - - // A comma-separated list of secret types to return. By default all secret types are returned. - SecretType string `url:"secret_type,omitempty"` - - // A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. - // Valid resolutions are false_positive, wont_fix, revoked, pattern_edited, pattern_deleted or used_in_tests. - Resolution string `url:"resolution,omitempty"` - - ListCursorOptions - - // List options can vary on the Enterprise type. - // On Enterprise Cloud, Secret Scan alerts support requesting by page number - // along with providing a cursor for an "after" param. - // See: https://docs.github.com/enterprise-cloud@latest/rest/secret-scanning#list-secret-scanning-alerts-for-an-organization - // Whereas on Enterprise Server, pagination is by index. - // See: https://docs.github.com/enterprise-server@3.6/rest/secret-scanning#list-secret-scanning-alerts-for-an-organization - ListOptions -} - -// SecretScanningAlertUpdateOptions specifies optional parameters to the SecretScanningService.UpdateAlert method. -type SecretScanningAlertUpdateOptions struct { - // State is required and sets the state of the secret scanning alert. - // Can be either "open" or "resolved". - // You must provide resolution when you set the state to "resolved". - State string `json:"state"` - - // Required when the state is "resolved" and represents the reason for resolving the alert. - // Can be one of: "false_positive", "wont_fix", "revoked", or "used_in_tests". - Resolution *string `json:"resolution,omitempty"` -} - -// ListAlertsForEnterprise lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. -// -// To use this endpoint, you must be a member of the enterprise, and you must use an access token with the repo scope or -// security_events scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a security manager. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/secret-scanning/alerts -func (s *SecretScanningService) ListAlertsForEnterprise(ctx context.Context, enterprise string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("enterprises/%v/secret-scanning/alerts", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListAlertsForOrg lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-an-organization -// -//meta:operation GET /orgs/{org}/secret-scanning/alerts -func (s *SecretScanningService) ListAlertsForOrg(ctx context.Context, org string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("orgs/%v/secret-scanning/alerts", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListAlertsForRepo lists secret scanning alerts for a private repository, from newest to oldest. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/secret-scanning/alerts -func (s *SecretScanningService) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// GetAlert gets a single secret scanning alert detected in a private repository. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#get-a-secret-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number} -func (s *SecretScanningService) GetAlert(ctx context.Context, owner, repo string, number int64) (*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v", owner, repo, number) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alert *SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} - -// UpdateAlert updates the status of a secret scanning alert in a private repository. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#update-a-secret-scanning-alert -// -//meta:operation PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number} -func (s *SecretScanningService) UpdateAlert(ctx context.Context, owner, repo string, number int64, opts *SecretScanningAlertUpdateOptions) (*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v", owner, repo, number) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - var alert *SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} - -// ListLocationsForAlert lists all locations for a given secret scanning alert for a private repository. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-locations-for-a-secret-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations -func (s *SecretScanningService) ListLocationsForAlert(ctx context.Context, owner, repo string, number int64, opts *ListOptions) ([]*SecretScanningAlertLocation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v/locations", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var locations []*SecretScanningAlertLocation - resp, err := s.client.Do(ctx, req, &locations) - if err != nil { - return nil, resp, err - } - - return locations, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/security_advisories.go b/vendor/github.com/google/go-github/v57/github/security_advisories.go deleted file mode 100644 index 63526374..00000000 --- a/vendor/github.com/google/go-github/v57/github/security_advisories.go +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -type SecurityAdvisoriesService service - -// SecurityAdvisorySubmission represents the Security Advisory Submission. -type SecurityAdvisorySubmission struct { - // Accepted represents whether a private vulnerability report was accepted by the repository's administrators. - Accepted *bool `json:"accepted,omitempty"` -} - -// RepoAdvisoryCredit represents the credit object for a repository Security Advisory. -type RepoAdvisoryCredit struct { - Login *string `json:"login,omitempty"` - Type *string `json:"type,omitempty"` -} - -// RepoAdvisoryCreditDetailed represents a credit given to a user for a repository Security Advisory. -type RepoAdvisoryCreditDetailed struct { - User *User `json:"user,omitempty"` - Type *string `json:"type,omitempty"` - State *string `json:"state,omitempty"` -} - -// ListRepositorySecurityAdvisoriesOptions specifies the optional parameters to list the repository security advisories. -type ListRepositorySecurityAdvisoriesOptions struct { - ListCursorOptions - - // Direction in which to sort advisories. Possible values are: asc, desc. - // Default is "asc". - Direction string `url:"direction,omitempty"` - - // Sort specifies how to sort advisories. Possible values are: created, updated, - // and published. Default value is "created". - Sort string `url:"sort,omitempty"` - - // State filters advisories based on their state. Possible values are: triage, draft, published, closed. - State string `url:"state,omitempty"` -} - -// ListGlobalSecurityAdvisoriesOptions specifies the optional parameters to list the global security advisories. -type ListGlobalSecurityAdvisoriesOptions struct { - ListCursorOptions - - // If specified, only advisories with this GHSA (GitHub Security Advisory) identifier will be returned. - GHSAID *string `url:"ghsa_id,omitempty"` - - // If specified, only advisories of this type will be returned. - // By default, a request with no other parameters defined will only return reviewed advisories that are not malware. - // Default: reviewed - // Can be one of: reviewed, malware, unreviewed - Type *string `url:"type,omitempty"` - - // If specified, only advisories with this CVE (Common Vulnerabilities and Exposures) identifier will be returned. - CVEID *string `url:"cve_id,omitempty"` - - // If specified, only advisories for these ecosystems will be returned. - // Can be one of: actions, composer, erlang, go, maven, npm, nuget, other, pip, pub, rubygems, rust - Ecosystem *string `url:"ecosystem,omitempty"` - - // If specified, only advisories with these severities will be returned. - // Can be one of: unknown, low, medium, high, critical - Severity *string `url:"severity,omitempty"` - - // If specified, only advisories with these Common Weakness Enumerations (CWEs) will be returned. - // Example: cwes=79,284,22 or cwes[]=79&cwes[]=284&cwes[]=22 - CWEs []string `url:"cwes,omitempty"` - - // Whether to only return advisories that have been withdrawn. - IsWithdrawn *bool `url:"is_withdrawn,omitempty"` - - // If specified, only return advisories that affect any of package or package@version. - // A maximum of 1000 packages can be specified. If the query parameter causes - // the URL to exceed the maximum URL length supported by your client, you must specify fewer packages. - // Example: affects=package1,package2@1.0.0,package3@^2.0.0 or affects[]=package1&affects[]=package2@1.0.0 - Affects *string `url:"affects,omitempty"` - - // If specified, only return advisories that were published on a date or date range. - Published *string `url:"published,omitempty"` - - // If specified, only return advisories that were updated on a date or date range. - Updated *string `url:"updated,omitempty"` - - // If specified, only show advisories that were updated or published on a date or date range. - Modified *string `url:"modified,omitempty"` -} - -// GlobalSecurityAdvisory represents the global security advisory object response. -type GlobalSecurityAdvisory struct { - SecurityAdvisory - ID *int64 `json:"id,omitempty"` - RepositoryAdvisoryURL *string `json:"repository_advisory_url,omitempty"` - Type *string `json:"type,omitempty"` - SourceCodeLocation *string `json:"source_code_location,omitempty"` - References []string `json:"references,omitempty"` - Vulnerabilities []*GlobalSecurityVulnerability `json:"vulnerabilities,omitempty"` - GithubReviewedAt *Timestamp `json:"github_reviewed_at,omitempty"` - NVDPublishedAt *Timestamp `json:"nvd_published_at,omitempty"` - Credits []*Credit `json:"credits,omitempty"` -} - -// GlobalSecurityVulnerability represents a vulnerability for a global security advisory. -type GlobalSecurityVulnerability struct { - Package *VulnerabilityPackage `json:"package,omitempty"` - FirstPatchedVersion *string `json:"first_patched_version,omitempty"` - VulnerableVersionRange *string `json:"vulnerable_version_range,omitempty"` - VulnerableFunctions []string `json:"vulnerable_functions,omitempty"` -} - -// Credit represents the credit object for a global security advisory. -type Credit struct { - User *User `json:"user,omitempty"` - Type *string `json:"type,omitempty"` -} - -// RequestCVE requests a Common Vulnerabilities and Exposures (CVE) for a repository security advisory. -// The ghsaID is the GitHub Security Advisory identifier of the advisory. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/repository-advisories#request-a-cve-for-a-repository-security-advisory -// -//meta:operation POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve -func (s *SecurityAdvisoriesService) RequestCVE(ctx context.Context, owner, repo, ghsaID string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/security-advisories/%v/cve", owner, repo, ghsaID) - - req, err := s.client.NewRequest("POST", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - if _, ok := err.(*AcceptedError); ok { - return resp, nil - } - - return resp, err - } - - return resp, nil -} - -// ListRepositorySecurityAdvisoriesForOrg lists the repository security advisories for an organization. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories-for-an-organization -// -//meta:operation GET /orgs/{org}/security-advisories -func (s *SecurityAdvisoriesService) ListRepositorySecurityAdvisoriesForOrg(ctx context.Context, org string, opt *ListRepositorySecurityAdvisoriesOptions) ([]*SecurityAdvisory, *Response, error) { - url := fmt.Sprintf("orgs/%v/security-advisories", org) - url, err := addOptions(url, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisories []*SecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisories) - if err != nil { - return nil, resp, err - } - - return advisories, resp, nil -} - -// ListRepositorySecurityAdvisories lists the security advisories in a repository. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories -// -//meta:operation GET /repos/{owner}/{repo}/security-advisories -func (s *SecurityAdvisoriesService) ListRepositorySecurityAdvisories(ctx context.Context, owner, repo string, opt *ListRepositorySecurityAdvisoriesOptions) ([]*SecurityAdvisory, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/security-advisories", owner, repo) - url, err := addOptions(url, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisories []*SecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisories) - if err != nil { - return nil, resp, err - } - - return advisories, resp, nil -} - -// ListGlobalSecurityAdvisories lists all global security advisories. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/global-advisories#list-global-security-advisories -// -//meta:operation GET /advisories -func (s *SecurityAdvisoriesService) ListGlobalSecurityAdvisories(ctx context.Context, opts *ListGlobalSecurityAdvisoriesOptions) ([]*GlobalSecurityAdvisory, *Response, error) { - url := "advisories" - url, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisories []*GlobalSecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisories) - if err != nil { - return nil, resp, err - } - - return advisories, resp, nil -} - -// GetGlobalSecurityAdvisories gets a global security advisory using its GitHub Security Advisory (GHSA) identifier. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/global-advisories#get-a-global-security-advisory -// -//meta:operation GET /advisories/{ghsa_id} -func (s *SecurityAdvisoriesService) GetGlobalSecurityAdvisories(ctx context.Context, ghsaID string) (*GlobalSecurityAdvisory, *Response, error) { - url := fmt.Sprintf("advisories/%s", ghsaID) - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisory *GlobalSecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisory) - if err != nil { - return nil, resp, err - } - - return advisory, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/strings.go b/vendor/github.com/google/go-github/v57/github/strings.go deleted file mode 100644 index 147c515e..00000000 --- a/vendor/github.com/google/go-github/v57/github/strings.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "fmt" - "reflect" -) - -var timestampType = reflect.TypeOf(Timestamp{}) - -// Stringify attempts to create a reasonable string representation of types in -// the GitHub library. It does things like resolve pointers to their values -// and omits struct fields with nil values. -func Stringify(message interface{}) string { - var buf bytes.Buffer - v := reflect.ValueOf(message) - stringifyValue(&buf, v) - return buf.String() -} - -// stringifyValue was heavily inspired by the goprotobuf library. - -func stringifyValue(w *bytes.Buffer, val reflect.Value) { - if val.Kind() == reflect.Ptr && val.IsNil() { - w.Write([]byte("")) - return - } - - v := reflect.Indirect(val) - - switch v.Kind() { - case reflect.String: - fmt.Fprintf(w, `"%s"`, v) - case reflect.Slice: - w.Write([]byte{'['}) - for i := 0; i < v.Len(); i++ { - if i > 0 { - w.Write([]byte{' '}) - } - - stringifyValue(w, v.Index(i)) - } - - w.Write([]byte{']'}) - return - case reflect.Struct: - if v.Type().Name() != "" { - w.Write([]byte(v.Type().String())) - } - - // special handling of Timestamp values - if v.Type() == timestampType { - fmt.Fprintf(w, "{%s}", v.Interface()) - return - } - - w.Write([]byte{'{'}) - - var sep bool - for i := 0; i < v.NumField(); i++ { - fv := v.Field(i) - if fv.Kind() == reflect.Ptr && fv.IsNil() { - continue - } - if fv.Kind() == reflect.Slice && fv.IsNil() { - continue - } - if fv.Kind() == reflect.Map && fv.IsNil() { - continue - } - - if sep { - w.Write([]byte(", ")) - } else { - sep = true - } - - w.Write([]byte(v.Type().Field(i).Name)) - w.Write([]byte{':'}) - stringifyValue(w, fv) - } - - w.Write([]byte{'}'}) - default: - if v.CanInterface() { - fmt.Fprint(w, v.Interface()) - } - } -} diff --git a/vendor/github.com/google/go-github/v57/github/teams.go b/vendor/github.com/google/go-github/v57/github/teams.go deleted file mode 100644 index fd22b792..00000000 --- a/vendor/github.com/google/go-github/v57/github/teams.go +++ /dev/null @@ -1,1067 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "strings" -) - -// TeamsService provides access to the team-related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/teams/ -type TeamsService service - -// Team represents a team within a GitHub organization. Teams are used to -// manage access to an organization's repositories. -type Team struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - URL *string `json:"url,omitempty"` - Slug *string `json:"slug,omitempty"` - - // Permission specifies the default permission for repositories owned by the team. - Permission *string `json:"permission,omitempty"` - - // Permissions identifies the permissions that a team has on a given - // repository. This is only populated when calling Repositories.ListTeams. - Permissions map[string]bool `json:"permissions,omitempty"` - - // Privacy identifies the level of privacy this team should have. - // Possible values are: - // secret - only visible to organization owners and members of this team - // closed - visible to all members of this organization - // Default is "secret". - Privacy *string `json:"privacy,omitempty"` - - MembersCount *int `json:"members_count,omitempty"` - ReposCount *int `json:"repos_count,omitempty"` - Organization *Organization `json:"organization,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - MembersURL *string `json:"members_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` - Parent *Team `json:"parent,omitempty"` - - // LDAPDN is only available in GitHub Enterprise and when the team - // membership is synchronized with LDAP. - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -func (t Team) String() string { - return Stringify(t) -} - -// Invitation represents a team member's invitation status. -type Invitation struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Login *string `json:"login,omitempty"` - Email *string `json:"email,omitempty"` - // Role can be one of the values - 'direct_member', 'admin', 'billing_manager', 'hiring_manager', or 'reinstate'. - Role *string `json:"role,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - Inviter *User `json:"inviter,omitempty"` - TeamCount *int `json:"team_count,omitempty"` - InvitationTeamURL *string `json:"invitation_team_url,omitempty"` - FailedAt *Timestamp `json:"failed_at,omitempty"` - FailedReason *string `json:"failed_reason,omitempty"` -} - -func (i Invitation) String() string { - return Stringify(i) -} - -// ListTeams lists all of the teams for an organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-teams -// -//meta:operation GET /orgs/{org}/teams -func (s *TeamsService) ListTeams(ctx context.Context, org string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// GetTeamByID fetches a team, given a specified organization ID, by ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#get-a-team-by-name -// -//meta:operation GET /orgs/{org}/teams/{team_slug} -func (s *TeamsService) GetTeamByID(ctx context.Context, orgID, teamID int64) (*Team, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// GetTeamBySlug fetches a team, given a specified organization name, by slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#get-a-team-by-name -// -//meta:operation GET /orgs/{org}/teams/{team_slug} -func (s *TeamsService) GetTeamBySlug(ctx context.Context, org, slug string) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// NewTeam represents a team to be created or modified. -type NewTeam struct { - Name string `json:"name"` // Name of the team. (Required.) - Description *string `json:"description,omitempty"` - Maintainers []string `json:"maintainers,omitempty"` - RepoNames []string `json:"repo_names,omitempty"` - ParentTeamID *int64 `json:"parent_team_id,omitempty"` - - // Deprecated: Permission is deprecated when creating or editing a team in an org - // using the new GitHub permission model. It no longer identifies the - // permission a team has on its repos, but only specifies the default - // permission a repo is initially added with. Avoid confusion by - // specifying a permission value when calling AddTeamRepo. - Permission *string `json:"permission,omitempty"` - - // Privacy identifies the level of privacy this team should have. - // Possible values are: - // secret - only visible to organization owners and members of this team - // closed - visible to all members of this organization - // Default is "secret". - Privacy *string `json:"privacy,omitempty"` - - // LDAPDN may be used in GitHub Enterprise when the team membership - // is synchronized with LDAP. - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -func (s NewTeam) String() string { - return Stringify(s) -} - -// CreateTeam creates a new team within an organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#create-a-team -// -//meta:operation POST /orgs/{org}/teams -func (s *TeamsService) CreateTeam(ctx context.Context, org string, team NewTeam) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - req, err := s.client.NewRequest("POST", u, team) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// newTeamNoParent is the same as NewTeam but ensures that the -// "parent_team_id" field will be null. It is for internal use -// only and should not be exported. -type newTeamNoParent struct { - Name string `json:"name"` - Description *string `json:"description,omitempty"` - Maintainers []string `json:"maintainers,omitempty"` - RepoNames []string `json:"repo_names,omitempty"` - ParentTeamID *int64 `json:"parent_team_id"` // This will be "null" - Privacy *string `json:"privacy,omitempty"` - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -// copyNewTeamWithoutParent is used to set the "parent_team_id" -// field to "null" after copying the other fields from a NewTeam. -// It is for internal use only and should not be exported. -func copyNewTeamWithoutParent(team *NewTeam) *newTeamNoParent { - return &newTeamNoParent{ - Name: team.Name, - Description: team.Description, - Maintainers: team.Maintainers, - RepoNames: team.RepoNames, - Privacy: team.Privacy, - LDAPDN: team.LDAPDN, - } -} - -// EditTeamByID edits a team, given an organization ID, selected by ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#update-a-team -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug} -func (s *TeamsService) EditTeamByID(ctx context.Context, orgID, teamID int64, team NewTeam, removeParent bool) (*Team, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) - - var req *http.Request - var err error - if removeParent { - teamRemoveParent := copyNewTeamWithoutParent(&team) - req, err = s.client.NewRequest("PATCH", u, teamRemoveParent) - } else { - req, err = s.client.NewRequest("PATCH", u, team) - } - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// EditTeamBySlug edits a team, given an organization name, by slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#update-a-team -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug} -func (s *TeamsService) EditTeamBySlug(ctx context.Context, org, slug string, team NewTeam, removeParent bool) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) - - var req *http.Request - var err error - if removeParent { - teamRemoveParent := copyNewTeamWithoutParent(&team) - req, err = s.client.NewRequest("PATCH", u, teamRemoveParent) - } else { - req, err = s.client.NewRequest("PATCH", u, team) - } - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// DeleteTeamByID deletes a team referenced by ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#delete-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug} -func (s *TeamsService) DeleteTeamByID(ctx context.Context, orgID, teamID int64) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteTeamBySlug deletes a team reference by slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#delete-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug} -func (s *TeamsService) DeleteTeamBySlug(ctx context.Context, org, slug string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListChildTeamsByParentID lists child teams for a parent team given parent ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-child-teams -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/teams -func (s *TeamsService) ListChildTeamsByParentID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/teams", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ListChildTeamsByParentSlug lists child teams for a parent team given parent slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-child-teams -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/teams -func (s *TeamsService) ListChildTeamsByParentSlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/teams", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ListTeamReposByID lists the repositories given a team ID that the specified team has access to. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-repositories -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos -func (s *TeamsService) ListTeamReposByID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when topics API fully launches. - headers := []string{mediaTypeTopicsPreview} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// ListTeamReposBySlug lists the repositories given a team slug that the specified team has access to. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-repositories -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos -func (s *TeamsService) ListTeamReposBySlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when topics API fully launches. - headers := []string{mediaTypeTopicsPreview} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// IsTeamRepoByID checks if a team, given its ID, manages the specified repository. If the -// repository is managed by team, a Repository is returned which includes the -// permissions team has for that repo. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-repository -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) IsTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - headers := []string{mediaTypeOrgPermissionRepo} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// IsTeamRepoBySlug checks if a team, given its slug, manages the specified repository. If the -// repository is managed by team, a Repository is returned which includes the -// permissions team has for that repo. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-repository -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) IsTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - headers := []string{mediaTypeOrgPermissionRepo} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// TeamAddTeamRepoOptions specifies the optional parameters to the -// TeamsService.AddTeamRepoByID and TeamsService.AddTeamRepoBySlug methods. -type TeamAddTeamRepoOptions struct { - // Permission specifies the permission to grant the team on this repository. - // Possible values are: - // pull - team members can pull, but not push to or administer this repository - // push - team members can pull and push, but not administer this repository - // admin - team members can pull, push and administer this repository - // maintain - team members can manage the repository without access to sensitive or destructive actions. - // triage - team members can proactively manage issues and pull requests without write access. - // - // If not specified, the team's permission attribute will be used. - Permission string `json:"permission,omitempty"` -} - -// AddTeamRepoByID adds a repository to be managed by the specified team given the team ID. -// The specified repository must be owned by the organization to which the team -// belongs, or a direct fork of a repository owned by the organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-repository-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) AddTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string, opts *TeamAddTeamRepoOptions) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddTeamRepoBySlug adds a repository to be managed by the specified team given the team slug. -// The specified repository must be owned by the organization to which the team -// belongs, or a direct fork of a repository owned by the organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-repository-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) AddTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string, opts *TeamAddTeamRepoOptions) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamRepoByID removes a repository from being managed by the specified -// team given the team ID. Note that this does not delete the repository, it -// just removes it from the team. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-repository-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) RemoveTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamRepoBySlug removes a repository from being managed by the specified -// team given the team slug. Note that this does not delete the repository, it -// just removes it from the team. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-repository-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) RemoveTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListUserTeams lists a user's teams -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-teams-for-the-authenticated-user -// -//meta:operation GET /user/teams -func (s *TeamsService) ListUserTeams(ctx context.Context, opts *ListOptions) ([]*Team, *Response, error) { - u := "user/teams" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ListTeamProjectsByID lists the organization projects for a team given the team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-projects -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects -func (s *TeamsService) ListTeamProjectsByID(ctx context.Context, orgID, teamID int64) ([]*Project, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects", orgID, teamID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// ListTeamProjectsBySlug lists the organization projects for a team given the team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-projects -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects -func (s *TeamsService) ListTeamProjectsBySlug(ctx context.Context, org, slug string) ([]*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects", org, slug) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// ReviewTeamProjectsByID checks whether a team, given its ID, has read, write, or admin -// permissions for an organization project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-project -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) ReviewTeamProjectsByID(ctx context.Context, orgID, teamID, projectID int64) (*Project, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - projects := &Project{} - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// ReviewTeamProjectsBySlug checks whether a team, given its slug, has read, write, or admin -// permissions for an organization project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-project -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) ReviewTeamProjectsBySlug(ctx context.Context, org, slug string, projectID int64) (*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - projects := &Project{} - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// TeamProjectOptions specifies the optional parameters to the -// TeamsService.AddTeamProject method. -type TeamProjectOptions struct { - // Permission specifies the permission to grant to the team for this project. - // Possible values are: - // "read" - team members can read, but not write to or administer this project. - // "write" - team members can read and write, but not administer this project. - // "admin" - team members can read, write and administer this project. - // - Permission *string `json:"permission,omitempty"` -} - -// AddTeamProjectByID adds an organization project to a team given the team ID. -// To add a project to a team or update the team's permission on a project, the -// authenticated user must have admin permissions for the project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-project-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) AddTeamProjectByID(ctx context.Context, orgID, teamID, projectID int64, opts *TeamProjectOptions) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// AddTeamProjectBySlug adds an organization project to a team given the team slug. -// To add a project to a team or update the team's permission on a project, the -// authenticated user must have admin permissions for the project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-project-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) AddTeamProjectBySlug(ctx context.Context, org, slug string, projectID int64, opts *TeamProjectOptions) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamProjectByID removes an organization project from a team given team ID. -// An organization owner or a team maintainer can remove any project from the team. -// To remove a project from a team as an organization member, the authenticated user -// must have "read" access to both the team and project, or "admin" access to the team -// or project. -// Note: This endpoint removes the project from the team, but does not delete it. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-project-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) RemoveTeamProjectByID(ctx context.Context, orgID, teamID, projectID int64) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamProjectBySlug removes an organization project from a team given team slug. -// An organization owner or a team maintainer can remove any project from the team. -// To remove a project from a team as an organization member, the authenticated user -// must have "read" access to both the team and project, or "admin" access to the team -// or project. -// Note: This endpoint removes the project from the team, but does not delete it. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-project-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) RemoveTeamProjectBySlug(ctx context.Context, org, slug string, projectID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// IDPGroupList represents a list of external identity provider (IDP) groups. -type IDPGroupList struct { - Groups []*IDPGroup `json:"groups"` -} - -// IDPGroup represents an external identity provider (IDP) group. -type IDPGroup struct { - GroupID *string `json:"group_id,omitempty"` - GroupName *string `json:"group_name,omitempty"` - GroupDescription *string `json:"group_description,omitempty"` -} - -// ListIDPGroupsInOrganization lists IDP groups available in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#list-idp-groups-for-an-organization -// -//meta:operation GET /orgs/{org}/team-sync/groups -func (s *TeamsService) ListIDPGroupsInOrganization(ctx context.Context, org string, opts *ListCursorOptions) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/team-sync/groups", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// ListIDPGroupsForTeamByID lists IDP groups connected to a team on GitHub -// given organization and team IDs. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#list-idp-groups-for-a-team -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) ListIDPGroupsForTeamByID(ctx context.Context, orgID, teamID int64) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/team-sync/group-mappings", orgID, teamID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// ListIDPGroupsForTeamBySlug lists IDP groups connected to a team on GitHub -// given organization name and team slug. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#list-idp-groups-for-a-team -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) ListIDPGroupsForTeamBySlug(ctx context.Context, org, slug string) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/team-sync/group-mappings", org, slug) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// CreateOrUpdateIDPGroupConnectionsByID creates, updates, or removes a connection -// between a team and an IDP group given organization and team IDs. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#create-or-update-idp-group-connections -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) CreateOrUpdateIDPGroupConnectionsByID(ctx context.Context, orgID, teamID int64, opts IDPGroupList) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/team-sync/group-mappings", orgID, teamID) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// CreateOrUpdateIDPGroupConnectionsBySlug creates, updates, or removes a connection -// between a team and an IDP group given organization name and team slug. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#create-or-update-idp-group-connections -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) CreateOrUpdateIDPGroupConnectionsBySlug(ctx context.Context, org, slug string, opts IDPGroupList) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/team-sync/group-mappings", org, slug) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// ExternalGroupMember represents a member of an external group. -type ExternalGroupMember struct { - MemberID *int64 `json:"member_id,omitempty"` - MemberLogin *string `json:"member_login,omitempty"` - MemberName *string `json:"member_name,omitempty"` - MemberEmail *string `json:"member_email,omitempty"` -} - -// ExternalGroupTeam represents a team connected to an external group. -type ExternalGroupTeam struct { - TeamID *int64 `json:"team_id,omitempty"` - TeamName *string `json:"team_name,omitempty"` -} - -// ExternalGroup represents an external group. -type ExternalGroup struct { - GroupID *int64 `json:"group_id,omitempty"` - GroupName *string `json:"group_name,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Teams []*ExternalGroupTeam `json:"teams,omitempty"` - Members []*ExternalGroupMember `json:"members,omitempty"` -} - -// ExternalGroupList represents a list of external groups. -type ExternalGroupList struct { - Groups []*ExternalGroup `json:"groups"` -} - -// GetExternalGroup fetches an external group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#get-an-external-group -// -//meta:operation GET /orgs/{org}/external-group/{group_id} -func (s *TeamsService) GetExternalGroup(ctx context.Context, org string, groupID int64) (*ExternalGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/external-group/%v", org, groupID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - externalGroup := new(ExternalGroup) - resp, err := s.client.Do(ctx, req, externalGroup) - if err != nil { - return nil, resp, err - } - - return externalGroup, resp, nil -} - -// ListExternalGroupsOptions specifies the optional parameters to the -// TeamsService.ListExternalGroups method. -type ListExternalGroupsOptions struct { - DisplayName *string `url:"display_name,omitempty"` - - ListOptions -} - -// ListExternalGroups lists external groups in an organization on GitHub. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#list-external-groups-in-an-organization -// -//meta:operation GET /orgs/{org}/external-groups -func (s *TeamsService) ListExternalGroups(ctx context.Context, org string, opts *ListExternalGroupsOptions) (*ExternalGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/external-groups", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - externalGroups := new(ExternalGroupList) - resp, err := s.client.Do(ctx, req, externalGroups) - if err != nil { - return nil, resp, err - } - - return externalGroups, resp, nil -} - -// ListExternalGroupsForTeamBySlug lists external groups connected to a team on GitHub. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#list-a-connection-between-an-external-group-and-a-team -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/external-groups -func (s *TeamsService) ListExternalGroupsForTeamBySlug(ctx context.Context, org, slug string) (*ExternalGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - externalGroups := new(ExternalGroupList) - resp, err := s.client.Do(ctx, req, externalGroups) - if err != nil { - return nil, resp, err - } - - return externalGroups, resp, nil -} - -// UpdateConnectedExternalGroup updates the connection between an external group and a team. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#update-the-connection-between-an-external-group-and-a-team -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/external-groups -func (s *TeamsService) UpdateConnectedExternalGroup(ctx context.Context, org, slug string, eg *ExternalGroup) (*ExternalGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) - - req, err := s.client.NewRequest("PATCH", u, eg) - if err != nil { - return nil, nil, err - } - - externalGroup := new(ExternalGroup) - resp, err := s.client.Do(ctx, req, externalGroup) - if err != nil { - return nil, resp, err - } - - return externalGroup, resp, nil -} - -// RemoveConnectedExternalGroup removes the connection between an external group and a team. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#remove-the-connection-between-an-external-group-and-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/external-groups -func (s *TeamsService) RemoveConnectedExternalGroup(ctx context.Context, org, slug string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go deleted file mode 100644 index ad3818c1..00000000 --- a/vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go +++ /dev/null @@ -1,262 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// DiscussionComment represents a GitHub dicussion in a team. -type DiscussionComment struct { - Author *User `json:"author,omitempty"` - Body *string `json:"body,omitempty"` - BodyHTML *string `json:"body_html,omitempty"` - BodyVersion *string `json:"body_version,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` - DiscussionURL *string `json:"discussion_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Number *int `json:"number,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` -} - -func (c DiscussionComment) String() string { - return Stringify(c) -} - -// DiscussionCommentListOptions specifies optional parameters to the -// TeamServices.ListComments method. -type DiscussionCommentListOptions struct { - // Sorts the discussion comments by the date they were created. - // Accepted values are asc and desc. Default is desc. - Direction string `url:"direction,omitempty"` - ListOptions -} - -// ListCommentsByID lists all comments on a team discussion by team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#list-discussion-comments -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) ListCommentsByID(ctx context.Context, orgID, teamID int64, discussionNumber int, options *DiscussionCommentListOptions) ([]*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments", orgID, teamID, discussionNumber) - u, err := addOptions(u, options) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*DiscussionComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// ListCommentsBySlug lists all comments on a team discussion by team slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#list-discussion-comments -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) ListCommentsBySlug(ctx context.Context, org, slug string, discussionNumber int, options *DiscussionCommentListOptions) ([]*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments", org, slug, discussionNumber) - u, err := addOptions(u, options) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*DiscussionComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetCommentByID gets a specific comment on a team discussion by team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#get-a-discussion-comment -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) GetCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// GetCommentBySlug gets a specific comment on a team discussion by team slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#get-a-discussion-comment -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) GetCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// CreateCommentByID creates a new comment on a team discussion by team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#create-a-discussion-comment -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) CreateCommentByID(ctx context.Context, orgID, teamID int64, discsusionNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments", orgID, teamID, discsusionNumber) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// CreateCommentBySlug creates a new comment on a team discussion by team slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#create-a-discussion-comment -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) CreateCommentBySlug(ctx context.Context, org, slug string, discsusionNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments", org, slug, discsusionNumber) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// EditCommentByID edits the body text of a discussion comment by team ID. -// Authenticated user must grant write:discussion scope. -// User is allowed to edit body of a comment only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#update-a-discussion-comment -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) EditCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// EditCommentBySlug edits the body text of a discussion comment by team slug. -// Authenticated user must grant write:discussion scope. -// User is allowed to edit body of a comment only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#update-a-discussion-comment -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) EditCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// DeleteCommentByID deletes a comment on a team discussion by team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#delete-a-discussion-comment -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) DeleteCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteCommentBySlug deletes a comment on a team discussion by team slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#delete-a-discussion-comment -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) DeleteCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/teams_discussions.go b/vendor/github.com/google/go-github/v57/github/teams_discussions.go deleted file mode 100644 index ee78c032..00000000 --- a/vendor/github.com/google/go-github/v57/github/teams_discussions.go +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TeamDiscussion represents a GitHub dicussion in a team. -type TeamDiscussion struct { - Author *User `json:"author,omitempty"` - Body *string `json:"body,omitempty"` - BodyHTML *string `json:"body_html,omitempty"` - BodyVersion *string `json:"body_version,omitempty"` - CommentsCount *int `json:"comments_count,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Number *int `json:"number,omitempty"` - Pinned *bool `json:"pinned,omitempty"` - Private *bool `json:"private,omitempty"` - TeamURL *string `json:"team_url,omitempty"` - Title *string `json:"title,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` -} - -func (d TeamDiscussion) String() string { - return Stringify(d) -} - -// DiscussionListOptions specifies optional parameters to the -// TeamServices.ListDiscussions method. -type DiscussionListOptions struct { - // Sorts the discussion by the date they were created. - // Accepted values are asc and desc. Default is desc. - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListDiscussionsByID lists all discussions on team's page given Organization and Team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#list-discussions -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) ListDiscussionsByID(ctx context.Context, orgID, teamID int64, opts *DiscussionListOptions) ([]*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teamDiscussions []*TeamDiscussion - resp, err := s.client.Do(ctx, req, &teamDiscussions) - if err != nil { - return nil, resp, err - } - - return teamDiscussions, resp, nil -} - -// ListDiscussionsBySlug lists all discussions on team's page given Organization name and Team's slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#list-discussions -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) ListDiscussionsBySlug(ctx context.Context, org, slug string, opts *DiscussionListOptions) ([]*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teamDiscussions []*TeamDiscussion - resp, err := s.client.Do(ctx, req, &teamDiscussions) - if err != nil { - return nil, resp, err - } - - return teamDiscussions, resp, nil -} - -// GetDiscussionByID gets a specific discussion on a team's page given Organization and Team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#get-a-discussion -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) GetDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// GetDiscussionBySlug gets a specific discussion on a team's page given Organization name and Team's slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#get-a-discussion -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) GetDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// CreateDiscussionByID creates a new discussion post on a team's page given Organization and Team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#create-a-discussion -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) CreateDiscussionByID(ctx context.Context, orgID, teamID int64, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions", orgID, teamID) - req, err := s.client.NewRequest("POST", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// CreateDiscussionBySlug creates a new discussion post on a team's page given Organization name and Team's slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#create-a-discussion -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) CreateDiscussionBySlug(ctx context.Context, org, slug string, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions", org, slug) - req, err := s.client.NewRequest("POST", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// EditDiscussionByID edits the title and body text of a discussion post given Organization and Team ID. -// Authenticated user must grant write:discussion scope. -// User is allowed to change Title and Body of a discussion only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#update-a-discussion -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) EditDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) - req, err := s.client.NewRequest("PATCH", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// EditDiscussionBySlug edits the title and body text of a discussion post given Organization name and Team's slug. -// Authenticated user must grant write:discussion scope. -// User is allowed to change Title and Body of a discussion only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#update-a-discussion -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) EditDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) - req, err := s.client.NewRequest("PATCH", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// DeleteDiscussionByID deletes a discussion from team's page given Organization and Team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#delete-a-discussion -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) DeleteDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteDiscussionBySlug deletes a discussion from team's page given Organization name and Team's slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#delete-a-discussion -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) DeleteDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/teams_members.go b/vendor/github.com/google/go-github/v57/github/teams_members.go deleted file mode 100644 index 059d993a..00000000 --- a/vendor/github.com/google/go-github/v57/github/teams_members.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TeamListTeamMembersOptions specifies the optional parameters to the -// TeamsService.ListTeamMembers method. -type TeamListTeamMembersOptions struct { - // Role filters members returned by their role in the team. Possible - // values are "all", "member", "maintainer". Default is "all". - Role string `url:"role,omitempty"` - - ListOptions -} - -// ListTeamMembersByID lists all of the users who are members of a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-team-members -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/members -func (s *TeamsService) ListTeamMembersByID(ctx context.Context, orgID, teamID int64, opts *TeamListTeamMembersOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/members", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// ListTeamMembersBySlug lists all of the users who are members of a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-team-members -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/members -func (s *TeamsService) ListTeamMembersBySlug(ctx context.Context, org, slug string, opts *TeamListTeamMembersOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/members", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// GetTeamMembershipByID returns the membership status for a user in a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-team-members -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/members -func (s *TeamsService) GetTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string) (*Membership, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// GetTeamMembershipBySlug returns the membership status for a user in a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#get-team-membership-for-a-user -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) GetTeamMembershipBySlug(ctx context.Context, org, slug, user string) (*Membership, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// TeamAddTeamMembershipOptions specifies the optional -// parameters to the TeamsService.AddTeamMembership method. -type TeamAddTeamMembershipOptions struct { - // Role specifies the role the user should have in the team. Possible - // values are: - // member - a normal member of the team - // maintainer - a team maintainer. Able to add/remove other team - // members, promote other team members to team - // maintainer, and edit the team’s name and description - // - // Default value is "member". - Role string `json:"role,omitempty"` -} - -// AddTeamMembershipByID adds or invites a user to a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#add-or-update-team-membership-for-a-user -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) AddTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string, opts *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// AddTeamMembershipBySlug adds or invites a user to a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#add-or-update-team-membership-for-a-user -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) AddTeamMembershipBySlug(ctx context.Context, org, slug, user string, opts *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// RemoveTeamMembershipByID removes a user from a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#remove-team-membership-for-a-user -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) RemoveTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamMembershipBySlug removes a user from a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#remove-team-membership-for-a-user -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) RemoveTeamMembershipBySlug(ctx context.Context, org, slug, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListPendingTeamInvitationsByID gets pending invitation list of a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-pending-team-invitations -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/invitations -func (s *TeamsService) ListPendingTeamInvitationsByID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/invitations", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} - -// ListPendingTeamInvitationsBySlug get pending invitation list of a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-pending-team-invitations -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/invitations -func (s *TeamsService) ListPendingTeamInvitationsBySlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/invitations", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/timestamp.go b/vendor/github.com/google/go-github/v57/github/timestamp.go deleted file mode 100644 index 00c1235e..00000000 --- a/vendor/github.com/google/go-github/v57/github/timestamp.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "strconv" - "time" -) - -// Timestamp represents a time that can be unmarshalled from a JSON string -// formatted as either an RFC3339 or Unix timestamp. This is necessary for some -// fields since the GitHub API is inconsistent in how it represents times. All -// exported methods of time.Time can be called on Timestamp. -type Timestamp struct { - time.Time -} - -func (t Timestamp) String() string { - return t.Time.String() -} - -// GetTime returns std time.Time. -func (t *Timestamp) GetTime() *time.Time { - if t == nil { - return nil - } - return &t.Time -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// Time is expected in RFC3339 or Unix format. -func (t *Timestamp) UnmarshalJSON(data []byte) (err error) { - str := string(data) - i, err := strconv.ParseInt(str, 10, 64) - if err == nil { - t.Time = time.Unix(i, 0) - if t.Time.Year() > 3000 { - t.Time = time.Unix(0, i*1e6) - } - } else { - t.Time, err = time.Parse(`"`+time.RFC3339+`"`, str) - } - return -} - -// Equal reports whether t and u are equal based on time.Equal -func (t Timestamp) Equal(u Timestamp) bool { - return t.Time.Equal(u.Time) -} diff --git a/vendor/github.com/google/go-github/v57/github/users.go b/vendor/github.com/google/go-github/v57/github/users.go deleted file mode 100644 index 51b2b219..00000000 --- a/vendor/github.com/google/go-github/v57/github/users.go +++ /dev/null @@ -1,294 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// UsersService handles communication with the user related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/users/ -type UsersService service - -// User represents a GitHub user. -type User struct { - Login *string `json:"login,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - GravatarID *string `json:"gravatar_id,omitempty"` - Name *string `json:"name,omitempty"` - Company *string `json:"company,omitempty"` - Blog *string `json:"blog,omitempty"` - Location *string `json:"location,omitempty"` - Email *string `json:"email,omitempty"` - Hireable *bool `json:"hireable,omitempty"` - Bio *string `json:"bio,omitempty"` - TwitterUsername *string `json:"twitter_username,omitempty"` - PublicRepos *int `json:"public_repos,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` - Followers *int `json:"followers,omitempty"` - Following *int `json:"following,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - SuspendedAt *Timestamp `json:"suspended_at,omitempty"` - Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin,omitempty"` - TotalPrivateRepos *int64 `json:"total_private_repos,omitempty"` - OwnedPrivateRepos *int64 `json:"owned_private_repos,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - DiskUsage *int `json:"disk_usage,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - TwoFactorAuthentication *bool `json:"two_factor_authentication,omitempty"` - Plan *Plan `json:"plan,omitempty"` - LdapDn *string `json:"ldap_dn,omitempty"` - - // API URLs - URL *string `json:"url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - FollowingURL *string `json:"following_url,omitempty"` - FollowersURL *string `json:"followers_url,omitempty"` - GistsURL *string `json:"gists_url,omitempty"` - OrganizationsURL *string `json:"organizations_url,omitempty"` - ReceivedEventsURL *string `json:"received_events_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` - StarredURL *string `json:"starred_url,omitempty"` - SubscriptionsURL *string `json:"subscriptions_url,omitempty"` - - // TextMatches is only populated from search results that request text matches - // See: search.go and https://docs.github.com/rest/search/#text-match-metadata - TextMatches []*TextMatch `json:"text_matches,omitempty"` - - // Permissions and RoleName identify the permissions and role that a user has on a given - // repository. These are only populated when calling Repositories.ListCollaborators. - Permissions map[string]bool `json:"permissions,omitempty"` - RoleName *string `json:"role_name,omitempty"` -} - -func (u User) String() string { - return Stringify(u) -} - -// Get fetches a user. Passing the empty string will fetch the authenticated -// user. -// -// GitHub API docs: https://docs.github.com/rest/users/users#get-a-user -// GitHub API docs: https://docs.github.com/rest/users/users#get-the-authenticated-user -// -//meta:operation GET /user -//meta:operation GET /users/{username} -func (s *UsersService) Get(ctx context.Context, user string) (*User, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v", user) - } else { - u = "user" - } - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - uResp := new(User) - resp, err := s.client.Do(ctx, req, uResp) - if err != nil { - return nil, resp, err - } - - return uResp, resp, nil -} - -// GetByID fetches a user. -// -// Note: GetByID uses the undocumented GitHub API endpoint "GET /user/{user_id}". -// -//meta:operation GET /user/{user_id} -func (s *UsersService) GetByID(ctx context.Context, id int64) (*User, *Response, error) { - u := fmt.Sprintf("user/%d", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - user := new(User) - resp, err := s.client.Do(ctx, req, user) - if err != nil { - return nil, resp, err - } - - return user, resp, nil -} - -// Edit the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/users#update-the-authenticated-user -// -//meta:operation PATCH /user -func (s *UsersService) Edit(ctx context.Context, user *User) (*User, *Response, error) { - u := "user" - req, err := s.client.NewRequest("PATCH", u, user) - if err != nil { - return nil, nil, err - } - - uResp := new(User) - resp, err := s.client.Do(ctx, req, uResp) - if err != nil { - return nil, resp, err - } - - return uResp, resp, nil -} - -// HovercardOptions specifies optional parameters to the UsersService.GetHovercard -// method. -type HovercardOptions struct { - // SubjectType specifies the additional information to be received about the hovercard. - // Possible values are: organization, repository, issue, pull_request. (Required when using subject_id.) - SubjectType string `url:"subject_type"` - - // SubjectID specifies the ID for the SubjectType. (Required when using subject_type.) - SubjectID string `url:"subject_id"` -} - -// Hovercard represents hovercard information about a user. -type Hovercard struct { - Contexts []*UserContext `json:"contexts,omitempty"` -} - -// UserContext represents the contextual information about user. -type UserContext struct { - Message *string `json:"message,omitempty"` - Octicon *string `json:"octicon,omitempty"` -} - -// GetHovercard fetches contextual information about user. It requires authentication -// via Basic Auth or via OAuth with the repo scope. -// -// GitHub API docs: https://docs.github.com/rest/users/users#get-contextual-information-for-a-user -// -//meta:operation GET /users/{username}/hovercard -func (s *UsersService) GetHovercard(ctx context.Context, user string, opts *HovercardOptions) (*Hovercard, *Response, error) { - u := fmt.Sprintf("users/%v/hovercard", user) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - hc := new(Hovercard) - resp, err := s.client.Do(ctx, req, hc) - if err != nil { - return nil, resp, err - } - - return hc, resp, nil -} - -// UserListOptions specifies optional parameters to the UsersService.ListAll -// method. -type UserListOptions struct { - // ID of the last user seen - Since int64 `url:"since,omitempty"` - - // Note: Pagination is powered exclusively by the Since parameter, - // ListOptions.Page has no effect. - // ListOptions.PerPage controls an undocumented GitHub API parameter. - ListOptions -} - -// ListAll lists all GitHub users. -// -// To paginate through all users, populate 'Since' with the ID of the last user. -// -// GitHub API docs: https://docs.github.com/rest/users/users#list-users -// -//meta:operation GET /users -func (s *UsersService) ListAll(ctx context.Context, opts *UserListOptions) ([]*User, *Response, error) { - u, err := addOptions("users", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ListInvitations lists all currently-open repository invitations for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#list-repository-invitations-for-the-authenticated-user -// -//meta:operation GET /user/repository_invitations -func (s *UsersService) ListInvitations(ctx context.Context, opts *ListOptions) ([]*RepositoryInvitation, *Response, error) { - u, err := addOptions("user/repository_invitations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - invites := []*RepositoryInvitation{} - resp, err := s.client.Do(ctx, req, &invites) - if err != nil { - return nil, resp, err - } - - return invites, resp, nil -} - -// AcceptInvitation accepts the currently-open repository invitation for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#accept-a-repository-invitation -// -//meta:operation PATCH /user/repository_invitations/{invitation_id} -func (s *UsersService) AcceptInvitation(ctx context.Context, invitationID int64) (*Response, error) { - u := fmt.Sprintf("user/repository_invitations/%v", invitationID) - req, err := s.client.NewRequest("PATCH", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeclineInvitation declines the currently-open repository invitation for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#decline-a-repository-invitation -// -//meta:operation DELETE /user/repository_invitations/{invitation_id} -func (s *UsersService) DeclineInvitation(ctx context.Context, invitationID int64) (*Response, error) { - u := fmt.Sprintf("user/repository_invitations/%v", invitationID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_administration.go b/vendor/github.com/google/go-github/v57/github/users_administration.go deleted file mode 100644 index 02cb894b..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_administration.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// PromoteSiteAdmin promotes a user to a site administrator of a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#promote-a-user-to-be-a-site-administrator -// -//meta:operation PUT /users/{username}/site_admin -func (s *UsersService) PromoteSiteAdmin(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("users/%v/site_admin", user) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DemoteSiteAdmin demotes a user from site administrator of a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#demote-a-site-administrator -// -//meta:operation DELETE /users/{username}/site_admin -func (s *UsersService) DemoteSiteAdmin(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("users/%v/site_admin", user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UserSuspendOptions represents the reason a user is being suspended. -type UserSuspendOptions struct { - Reason *string `json:"reason,omitempty"` -} - -// Suspend a user on a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#suspend-a-user -// -//meta:operation PUT /users/{username}/suspended -func (s *UsersService) Suspend(ctx context.Context, user string, opts *UserSuspendOptions) (*Response, error) { - u := fmt.Sprintf("users/%v/suspended", user) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unsuspend a user on a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#unsuspend-a-user -// -//meta:operation DELETE /users/{username}/suspended -func (s *UsersService) Unsuspend(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("users/%v/suspended", user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_blocking.go b/vendor/github.com/google/go-github/v57/github/users_blocking.go deleted file mode 100644 index 3f2af38f..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_blocking.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListBlockedUsers lists all the blocked users by the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#list-users-blocked-by-the-authenticated-user -// -//meta:operation GET /user/blocks -func (s *UsersService) ListBlockedUsers(ctx context.Context, opts *ListOptions) ([]*User, *Response, error) { - u := "user/blocks" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - var blockedUsers []*User - resp, err := s.client.Do(ctx, req, &blockedUsers) - if err != nil { - return nil, resp, err - } - - return blockedUsers, resp, nil -} - -// IsBlocked reports whether specified user is blocked by the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#check-if-a-user-is-blocked-by-the-authenticated-user -// -//meta:operation GET /user/blocks/{username} -func (s *UsersService) IsBlocked(ctx context.Context, user string) (bool, *Response, error) { - u := fmt.Sprintf("user/blocks/%v", user) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - resp, err := s.client.Do(ctx, req, nil) - isBlocked, err := parseBoolResponse(err) - return isBlocked, resp, err -} - -// BlockUser blocks specified user for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#block-a-user -// -//meta:operation PUT /user/blocks/{username} -func (s *UsersService) BlockUser(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/blocks/%v", user) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnblockUser unblocks specified user for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#unblock-a-user -// -//meta:operation DELETE /user/blocks/{username} -func (s *UsersService) UnblockUser(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/blocks/%v", user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_emails.go b/vendor/github.com/google/go-github/v57/github/users_emails.go deleted file mode 100644 index 8386de25..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_emails.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import "context" - -// UserEmail represents user's email address -type UserEmail struct { - Email *string `json:"email,omitempty"` - Primary *bool `json:"primary,omitempty"` - Verified *bool `json:"verified,omitempty"` - Visibility *string `json:"visibility,omitempty"` -} - -// ListEmails lists all email addresses for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/emails#list-email-addresses-for-the-authenticated-user -// -//meta:operation GET /user/emails -func (s *UsersService) ListEmails(ctx context.Context, opts *ListOptions) ([]*UserEmail, *Response, error) { - u := "user/emails" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var emails []*UserEmail - resp, err := s.client.Do(ctx, req, &emails) - if err != nil { - return nil, resp, err - } - - return emails, resp, nil -} - -// AddEmails adds email addresses of the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/emails#add-an-email-address-for-the-authenticated-user -// -//meta:operation POST /user/emails -func (s *UsersService) AddEmails(ctx context.Context, emails []string) ([]*UserEmail, *Response, error) { - u := "user/emails" - req, err := s.client.NewRequest("POST", u, emails) - if err != nil { - return nil, nil, err - } - - var e []*UserEmail - resp, err := s.client.Do(ctx, req, &e) - if err != nil { - return nil, resp, err - } - - return e, resp, nil -} - -// DeleteEmails deletes email addresses from authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/emails#delete-an-email-address-for-the-authenticated-user -// -//meta:operation DELETE /user/emails -func (s *UsersService) DeleteEmails(ctx context.Context, emails []string) (*Response, error) { - u := "user/emails" - req, err := s.client.NewRequest("DELETE", u, emails) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// SetEmailVisibility sets the visibility for the primary email address of the authenticated user. -// `visibility` can be "private" or "public". -// -// GitHub API docs: https://docs.github.com/rest/users/emails#set-primary-email-visibility-for-the-authenticated-user -// -//meta:operation PATCH /user/email/visibility -func (s *UsersService) SetEmailVisibility(ctx context.Context, visibility string) ([]*UserEmail, *Response, error) { - u := "user/email/visibility" - - updateVisiblilityReq := &UserEmail{ - Visibility: &visibility, - } - - req, err := s.client.NewRequest("PATCH", u, updateVisiblilityReq) - if err != nil { - return nil, nil, err - } - - var e []*UserEmail - resp, err := s.client.Do(ctx, req, &e) - if err != nil { - return nil, resp, err - } - - return e, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/users_followers.go b/vendor/github.com/google/go-github/v57/github/users_followers.go deleted file mode 100644 index ec6f531e..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_followers.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListFollowers lists the followers for a user. Passing the empty string will -// fetch followers for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#list-followers-of-a-user -// GitHub API docs: https://docs.github.com/rest/users/followers#list-followers-of-the-authenticated-user -// -//meta:operation GET /user/followers -//meta:operation GET /users/{username}/followers -func (s *UsersService) ListFollowers(ctx context.Context, user string, opts *ListOptions) ([]*User, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/followers", user) - } else { - u = "user/followers" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ListFollowing lists the people that a user is following. Passing the empty -// string will list people the authenticated user is following. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#list-the-people-a-user-follows -// GitHub API docs: https://docs.github.com/rest/users/followers#list-the-people-the-authenticated-user-follows -// -//meta:operation GET /user/following -//meta:operation GET /users/{username}/following -func (s *UsersService) ListFollowing(ctx context.Context, user string, opts *ListOptions) ([]*User, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/following", user) - } else { - u = "user/following" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// IsFollowing checks if "user" is following "target". Passing the empty -// string for "user" will check if the authenticated user is following "target". -// -// GitHub API docs: https://docs.github.com/rest/users/followers#check-if-a-person-is-followed-by-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/users/followers#check-if-a-user-follows-another-user -// -//meta:operation GET /user/following/{username} -//meta:operation GET /users/{username}/following/{target_user} -func (s *UsersService) IsFollowing(ctx context.Context, user, target string) (bool, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/following/%v", user, target) - } else { - u = fmt.Sprintf("user/following/%v", target) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - following, err := parseBoolResponse(err) - return following, resp, err -} - -// Follow will cause the authenticated user to follow the specified user. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#follow-a-user -// -//meta:operation PUT /user/following/{username} -func (s *UsersService) Follow(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/following/%v", user) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unfollow will cause the authenticated user to unfollow the specified user. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#unfollow-a-user -// -//meta:operation DELETE /user/following/{username} -func (s *UsersService) Unfollow(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/following/%v", user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_gpg_keys.go b/vendor/github.com/google/go-github/v57/github/users_gpg_keys.go deleted file mode 100644 index de7caaf1..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_gpg_keys.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GPGKey represents a GitHub user's public GPG key used to verify GPG signed commits and tags. -// -// https://developer.github.com/changes/2016-04-04-git-signing-api-preview/ -type GPGKey struct { - ID *int64 `json:"id,omitempty"` - PrimaryKeyID *int64 `json:"primary_key_id,omitempty"` - KeyID *string `json:"key_id,omitempty"` - RawKey *string `json:"raw_key,omitempty"` - PublicKey *string `json:"public_key,omitempty"` - Emails []*GPGEmail `json:"emails,omitempty"` - Subkeys []*GPGKey `json:"subkeys,omitempty"` - CanSign *bool `json:"can_sign,omitempty"` - CanEncryptComms *bool `json:"can_encrypt_comms,omitempty"` - CanEncryptStorage *bool `json:"can_encrypt_storage,omitempty"` - CanCertify *bool `json:"can_certify,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} - -// String stringifies a GPGKey. -func (k GPGKey) String() string { - return Stringify(k) -} - -// GPGEmail represents an email address associated to a GPG key. -type GPGEmail struct { - Email *string `json:"email,omitempty"` - Verified *bool `json:"verified,omitempty"` -} - -// ListGPGKeys lists the public GPG keys for a user. Passing the empty -// string will fetch keys for the authenticated user. It requires authentication -// via Basic Auth or via OAuth with at least read:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#list-gpg-keys-for-a-user -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#list-gpg-keys-for-the-authenticated-user -// -//meta:operation GET /user/gpg_keys -//meta:operation GET /users/{username}/gpg_keys -func (s *UsersService) ListGPGKeys(ctx context.Context, user string, opts *ListOptions) ([]*GPGKey, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/gpg_keys", user) - } else { - u = "user/gpg_keys" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*GPGKey - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetGPGKey gets extended details for a single GPG key. It requires authentication -// via Basic Auth or via OAuth with at least read:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#get-a-gpg-key-for-the-authenticated-user -// -//meta:operation GET /user/gpg_keys/{gpg_key_id} -func (s *UsersService) GetGPGKey(ctx context.Context, id int64) (*GPGKey, *Response, error) { - u := fmt.Sprintf("user/gpg_keys/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := &GPGKey{} - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateGPGKey creates a GPG key. It requires authenticatation via Basic Auth -// or OAuth with at least write:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#create-a-gpg-key-for-the-authenticated-user -// -//meta:operation POST /user/gpg_keys -func (s *UsersService) CreateGPGKey(ctx context.Context, armoredPublicKey string) (*GPGKey, *Response, error) { - gpgKey := &struct { - ArmoredPublicKey string `json:"armored_public_key"` - }{ArmoredPublicKey: armoredPublicKey} - req, err := s.client.NewRequest("POST", "user/gpg_keys", gpgKey) - if err != nil { - return nil, nil, err - } - - key := &GPGKey{} - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// DeleteGPGKey deletes a GPG key. It requires authentication via Basic Auth or -// via OAuth with at least admin:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#delete-a-gpg-key-for-the-authenticated-user -// -//meta:operation DELETE /user/gpg_keys/{gpg_key_id} -func (s *UsersService) DeleteGPGKey(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("user/gpg_keys/%v", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_keys.go b/vendor/github.com/google/go-github/v57/github/users_keys.go deleted file mode 100644 index 4d42986e..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_keys.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Key represents a public SSH key used to authenticate a user or deploy script. -type Key struct { - ID *int64 `json:"id,omitempty"` - Key *string `json:"key,omitempty"` - URL *string `json:"url,omitempty"` - Title *string `json:"title,omitempty"` - ReadOnly *bool `json:"read_only,omitempty"` - Verified *bool `json:"verified,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - AddedBy *string `json:"added_by,omitempty"` - LastUsed *Timestamp `json:"last_used,omitempty"` -} - -func (k Key) String() string { - return Stringify(k) -} - -// ListKeys lists the verified public keys for a user. Passing the empty -// string will fetch keys for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#list-public-keys-for-a-user -// GitHub API docs: https://docs.github.com/rest/users/keys#list-public-ssh-keys-for-the-authenticated-user -// -//meta:operation GET /user/keys -//meta:operation GET /users/{username}/keys -func (s *UsersService) ListKeys(ctx context.Context, user string, opts *ListOptions) ([]*Key, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/keys", user) - } else { - u = "user/keys" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*Key - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetKey fetches a single public key. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#get-a-public-ssh-key-for-the-authenticated-user -// -//meta:operation GET /user/keys/{key_id} -func (s *UsersService) GetKey(ctx context.Context, id int64) (*Key, *Response, error) { - u := fmt.Sprintf("user/keys/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := new(Key) - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateKey adds a public key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#create-a-public-ssh-key-for-the-authenticated-user -// -//meta:operation POST /user/keys -func (s *UsersService) CreateKey(ctx context.Context, key *Key) (*Key, *Response, error) { - u := "user/keys" - - req, err := s.client.NewRequest("POST", u, key) - if err != nil { - return nil, nil, err - } - - k := new(Key) - resp, err := s.client.Do(ctx, req, k) - if err != nil { - return nil, resp, err - } - - return k, resp, nil -} - -// DeleteKey deletes a public key. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#delete-a-public-ssh-key-for-the-authenticated-user -// -//meta:operation DELETE /user/keys/{key_id} -func (s *UsersService) DeleteKey(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("user/keys/%v", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_packages.go b/vendor/github.com/google/go-github/v57/github/users_packages.go deleted file mode 100644 index 3ccf68a1..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_packages.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListPackages lists the packages for a user. Passing the empty string for "user" will -// list packages for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-packages-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-packages-for-the-authenticated-users-namespace -// -//meta:operation GET /user/packages -//meta:operation GET /users/{username}/packages -func (s *UsersService) ListPackages(ctx context.Context, user string, opts *PackageListOptions) ([]*Package, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages", user) - } else { - u = "user/packages" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var packages []*Package - resp, err := s.client.Do(ctx, req, &packages) - if err != nil { - return nil, resp, err - } - - return packages, resp, nil -} - -// GetPackage gets a package by name for a user. Passing the empty string for "user" will -// get the package for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-for-the-authenticated-user -// -//meta:operation GET /user/packages/{package_type}/{package_name} -//meta:operation GET /users/{username}/packages/{package_type}/{package_name} -func (s *UsersService) GetPackage(ctx context.Context, user, packageType, packageName string) (*Package, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v", packageType, packageName) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pack *Package - resp, err := s.client.Do(ctx, req, &pack) - if err != nil { - return nil, resp, err - } - - return pack, resp, nil -} - -// DeletePackage deletes a package from a user. Passing the empty string for "user" will -// delete the package for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-for-the-authenticated-user -// -//meta:operation DELETE /user/packages/{package_type}/{package_name} -//meta:operation DELETE /users/{username}/packages/{package_type}/{package_name} -func (s *UsersService) DeletePackage(ctx context.Context, user, packageType, packageName string) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v", packageType, packageName) - } - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RestorePackage restores a package to a user. Passing the empty string for "user" will -// restore the package for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-for-the-authenticated-user -// -//meta:operation POST /user/packages/{package_type}/{package_name}/restore -//meta:operation POST /users/{username}/packages/{package_type}/{package_name}/restore -func (s *UsersService) RestorePackage(ctx context.Context, user, packageType, packageName string) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/restore", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v/restore", packageType, packageName) - } - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageGetAllVersions gets all versions of a package for a user. Passing the empty string for "user" will -// get versions for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-package-versions-for-a-package-owned-by-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-package-versions-for-a-package-owned-by-the-authenticated-user -// -//meta:operation GET /user/packages/{package_type}/{package_name}/versions -//meta:operation GET /users/{username}/packages/{package_type}/{package_name}/versions -func (s *UsersService) PackageGetAllVersions(ctx context.Context, user, packageType, packageName string, opts *PackageListOptions) ([]*PackageVersion, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions", packageType, packageName) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var versions []*PackageVersion - resp, err := s.client.Do(ctx, req, &versions) - if err != nil { - return nil, resp, err - } - - return versions, resp, nil -} - -// PackageGetVersion gets a specific version of a package for a user. Passing the empty string for "user" will -// get the version for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-version-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-version-for-the-authenticated-user -// -//meta:operation GET /user/packages/{package_type}/{package_name}/versions/{package_version_id} -//meta:operation GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *UsersService) PackageGetVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*PackageVersion, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v", user, packageType, packageName, packageVersionID) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions/%v", packageType, packageName, packageVersionID) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var version *PackageVersion - resp, err := s.client.Do(ctx, req, &version) - if err != nil { - return nil, resp, err - } - - return version, resp, nil -} - -// PackageDeleteVersion deletes a package version for a user. Passing the empty string for "user" will -// delete the version for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-version-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-package-version-for-a-user -// -//meta:operation DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id} -//meta:operation DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *UsersService) PackageDeleteVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v", user, packageType, packageName, packageVersionID) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions/%v", packageType, packageName, packageVersionID) - } - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageRestoreVersion restores a package version to a user. Passing the empty string for "user" will -// restore the version for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-version-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-package-version-for-a-user -// -//meta:operation POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore -//meta:operation POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore -func (s *UsersService) PackageRestoreVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v/restore", user, packageType, packageName, packageVersionID) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions/%v/restore", packageType, packageName, packageVersionID) - } - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_projects.go b/vendor/github.com/google/go-github/v57/github/users_projects.go deleted file mode 100644 index 0ab57e5c..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_projects.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListProjects lists the projects for the specified user. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#list-user-projects -// -//meta:operation GET /users/{username}/projects -func (s *UsersService) ListProjects(ctx context.Context, user string, opts *ProjectListOptions) ([]*Project, *Response, error) { - u := fmt.Sprintf("users/%v/projects", user) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// CreateUserProjectOptions specifies the parameters to the UsersService.CreateProject method. -type CreateUserProjectOptions struct { - // The name of the project. (Required.) - Name string `json:"name"` - // The description of the project. (Optional.) - Body *string `json:"body,omitempty"` -} - -// CreateProject creates a GitHub Project for the current user. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#create-a-user-project -// -//meta:operation POST /user/projects -func (s *UsersService) CreateProject(ctx context.Context, opts *CreateUserProjectOptions) (*Project, *Response, error) { - u := "user/projects" - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/users_ssh_signing_keys.go b/vendor/github.com/google/go-github/v57/github/users_ssh_signing_keys.go deleted file mode 100644 index fcc930be..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_ssh_signing_keys.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// SSHSigningKey represents a public SSH key used to sign git commits. -type SSHSigningKey struct { - ID *int64 `json:"id,omitempty"` - Key *string `json:"key,omitempty"` - Title *string `json:"title,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -func (k SSHSigningKey) String() string { - return Stringify(k) -} - -// ListSSHSigningKeys lists the SSH signing keys for a user. Passing an empty -// username string will fetch SSH signing keys for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#list-ssh-signing-keys-for-a-user -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#list-ssh-signing-keys-for-the-authenticated-user -// -//meta:operation GET /user/ssh_signing_keys -//meta:operation GET /users/{username}/ssh_signing_keys -func (s *UsersService) ListSSHSigningKeys(ctx context.Context, user string, opts *ListOptions) ([]*SSHSigningKey, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/ssh_signing_keys", user) - } else { - u = "user/ssh_signing_keys" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*SSHSigningKey - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetSSHSigningKey fetches a single SSH signing key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#get-an-ssh-signing-key-for-the-authenticated-user -// -//meta:operation GET /user/ssh_signing_keys/{ssh_signing_key_id} -func (s *UsersService) GetSSHSigningKey(ctx context.Context, id int64) (*SSHSigningKey, *Response, error) { - u := fmt.Sprintf("user/ssh_signing_keys/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := new(SSHSigningKey) - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateSSHSigningKey adds a SSH signing key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#create-a-ssh-signing-key-for-the-authenticated-user -// -//meta:operation POST /user/ssh_signing_keys -func (s *UsersService) CreateSSHSigningKey(ctx context.Context, key *Key) (*SSHSigningKey, *Response, error) { - u := "user/ssh_signing_keys" - - req, err := s.client.NewRequest("POST", u, key) - if err != nil { - return nil, nil, err - } - - k := new(SSHSigningKey) - resp, err := s.client.Do(ctx, req, k) - if err != nil { - return nil, resp, err - } - - return k, resp, nil -} - -// DeleteSSHSigningKey deletes a SSH signing key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#delete-an-ssh-signing-key-for-the-authenticated-user -// -//meta:operation DELETE /user/ssh_signing_keys/{ssh_signing_key_id} -func (s *UsersService) DeleteSSHSigningKey(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("user/ssh_signing_keys/%v", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/with_appengine.go b/vendor/github.com/google/go-github/v57/github/with_appengine.go deleted file mode 100644 index 9053ce10..00000000 --- a/vendor/github.com/google/go-github/v57/github/with_appengine.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build appengine -// +build appengine - -// This file provides glue for making github work on App Engine. - -package github - -import ( - "context" - "net/http" -) - -func withContext(ctx context.Context, req *http.Request) *http.Request { - // No-op because App Engine adds context to a request differently. - return req -} diff --git a/vendor/github.com/google/go-github/v57/github/without_appengine.go b/vendor/github.com/google/go-github/v57/github/without_appengine.go deleted file mode 100644 index 0024ae41..00000000 --- a/vendor/github.com/google/go-github/v57/github/without_appengine.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !appengine -// +build !appengine - -// This file provides glue for making github work without App Engine. - -package github - -import ( - "context" - "net/http" -) - -func withContext(ctx context.Context, req *http.Request) *http.Request { - return req.WithContext(ctx) -} diff --git a/vendor/modules.txt b/vendor/modules.txt index c19620d9..283eaf52 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -97,9 +97,6 @@ github.com/golang-jwt/jwt/v4 # github.com/golang-jwt/jwt/v5 v5.2.2 ## explicit; go 1.18 github.com/golang-jwt/jwt/v5 -# github.com/google/go-github/v57 v57.0.0 -## explicit; go 1.17 -github.com/google/go-github/v57/github # github.com/google/go-github/v71 v71.0.0 ## explicit; go 1.23.0 github.com/google/go-github/v71/github From 73340da32226ed5b8aa4234377caa3e90bb76d4e Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 28 Apr 2025 13:37:33 +0000 Subject: [PATCH 033/179] Add RateLimit() function to gh client Signed-off-by: Gabriel Adrian Samfira --- runner/common/util.go | 1 + runner/pool/stub_client.go | 4 ++ test/integration/organizations_test.go | 4 +- test/integration/repositories_test.go | 4 +- util/github/client.go | 70 +++++++++++++++++--------- 5 files changed, 54 insertions(+), 29 deletions(-) diff --git a/runner/common/util.go b/runner/common/util.go index 71b1849f..06751aa3 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -18,6 +18,7 @@ type GithubEntityOperations interface { ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) RemoveEntityRunner(ctx context.Context, runnerID int64) error + RateLimit(ctx context.Context) (*github.RateLimits, error) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index d01c834e..2518ce9c 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -64,3 +64,7 @@ func (s *stubGithubClient) GetEntity() params.GithubEntity { func (s *stubGithubClient) GithubBaseURL() *url.URL { return nil } + +func (s *stubGithubClient) RateLimit(_ context.Context) (*github.RateLimits, error) { + return nil, s.err +} diff --git a/test/integration/organizations_test.go b/test/integration/organizations_test.go index 0151d2fc..a96e625c 100644 --- a/test/integration/organizations_test.go +++ b/test/integration/organizations_test.go @@ -99,8 +99,8 @@ func getGhOrgWebhook(url, ghToken, orgName string) (*github.Hook, error) { } for _, hook := range ghOrgHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.Config.GetURL() + if hookURL == url { return hook, nil } } diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index bcf948e5..7b396ffc 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -109,8 +109,8 @@ func getGhRepoWebhook(url, ghToken, orgName, repoName string) (*github.Hook, err } for _, hook := range ghRepoHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.Config.GetURL() + if hookURL == url { return hook, nil } } diff --git a/util/github/client.go b/util/github/client.go index 1b899913..fcd661fa 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -37,6 +37,7 @@ type githubClient struct { org *github.OrganizationsService repo *github.RepositoriesService enterprise *github.EnterpriseService + rateLimit *github.RateLimitService entity params.GithubEntity cli *github.Client @@ -226,6 +227,38 @@ func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) return ret, response, err } +func parseError(response *github.Response, err error) error { + switch response.StatusCode { + case http.StatusNotFound: + return runnerErrors.ErrNotFound + case http.StatusUnauthorized: + return runnerErrors.ErrUnauthorized + case http.StatusUnprocessableEntity: + return runnerErrors.ErrBadRequest + default: + if response.StatusCode >= 100 && response.StatusCode < 300 { + return nil + } + if err != nil { + errResp := &github.ErrorResponse{} + if errors.As(err, &errResp) && errResp.Response != nil { + switch errResp.Response.StatusCode { + case http.StatusNotFound: + return runnerErrors.ErrNotFound + case http.StatusUnauthorized: + return runnerErrors.ErrUnauthorized + case http.StatusUnprocessableEntity: + return runnerErrors.ErrBadRequest + default: + return err + } + } + return err + } + return errors.New("unknown error") + } +} + func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) error { var response *github.Response var err error @@ -254,30 +287,8 @@ func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) e return errors.New("invalid entity type") } - switch response.StatusCode { - case http.StatusNotFound: - return runnerErrors.NewNotFoundError("runner %d not found", runnerID) - case http.StatusUnauthorized: - return runnerErrors.ErrUnauthorized - case http.StatusUnprocessableEntity: - return runnerErrors.NewBadRequestError("cannot remove runner %d in its current state", runnerID) - default: - if err != nil { - errResp := &github.ErrorResponse{} - if errors.As(err, &errResp) && errResp.Response != nil { - switch errResp.Response.StatusCode { - case http.StatusNotFound: - return runnerErrors.NewNotFoundError("runner %d not found", runnerID) - case http.StatusUnauthorized: - return runnerErrors.ErrUnauthorized - case http.StatusUnprocessableEntity: - return runnerErrors.NewBadRequestError("cannot remove runner %d in its current state", runnerID) - default: - return errors.Wrap(err, "removing runner") - } - } - return errors.Wrap(err, "removing runner") - } + if err := parseError(response, err); err != nil { + return errors.Wrapf(err, "removing runner %d", runnerID) } return nil @@ -411,7 +422,7 @@ func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, Labels: labels, // nolint:golangci-lint,godox // TODO(gabriel-samfira): Should we make this configurable? - WorkFolder: github.String("_work"), + WorkFolder: github.Ptr("_work"), } metrics.GithubOperationCount.WithLabelValues( @@ -463,6 +474,14 @@ func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, return jitConfig, ret.Runner, nil } +func (g *githubClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { + limits, resp, err := g.rateLimit.Get(ctx) + if err := parseError(resp, err); err != nil { + return nil, fmt.Errorf("getting rate limit: %w", err) + } + return limits, nil +} + func (g *githubClient) GetEntity() params.GithubEntity { return g.entity } @@ -494,6 +513,7 @@ func Client(ctx context.Context, entity params.GithubEntity) (common.GithubClien org: ghClient.Organizations, repo: ghClient.Repositories, enterprise: ghClient.Enterprise, + rateLimit: ghClient.RateLimit, cli: ghClient, entity: entity, } From 059734f0648bb9026c0ed9df577b7a5ef28f6335 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 1 May 2025 13:35:56 +0000 Subject: [PATCH 034/179] Add runner periodic cleanup check Adds a periodic cleanup function that cross checks runners between github, the provider and the GARM database. If an inconsistency is found, GARM will attempt to fix it. Signed-off-by: Gabriel Adrian Samfira --- database/watcher/filters.go | 3 +- locking/local_locker.go | 9 +- params/github.go | 21 ++ params/params.go | 15 ++ runner/common/util.go | 4 + runner/pool/pool.go | 12 +- util/github/client.go | 24 ++ util/github/scalesets/message_sessions.go | 13 +- workers/entity/worker_watcher.go | 40 ++-- workers/scaleset/controller.go | 108 +++++++++ workers/scaleset/scaleset.go | 262 +++++++++++++++++++++- workers/scaleset/util.go | 11 + 12 files changed, 479 insertions(+), 43 deletions(-) diff --git a/database/watcher/filters.go b/database/watcher/filters.go index 251a6bc6..dfcd54bb 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -255,9 +255,10 @@ func WithScaleSetInstanceFilter(scaleset params.ScaleSet) dbCommon.PayloadFilter } instance, ok := payload.Payload.(params.Instance) - if !ok { + if !ok || instance.ScaleSetID == 0 { return false } + return instance.ScaleSetID == scaleset.ID } } diff --git a/locking/local_locker.go b/locking/local_locker.go index 270138ef..aeae610f 100644 --- a/locking/local_locker.go +++ b/locking/local_locker.go @@ -33,19 +33,19 @@ var _ Locker = &keyMutex{} func (k *keyMutex) TryLock(key, identifier string) bool { mux, _ := k.muxes.LoadOrStore(key, &lockWithIdent{ - mux: sync.Mutex{}, - ident: identifier, + mux: sync.Mutex{}, }) keyMux := mux.(*lockWithIdent) + keyMux.ident = identifier return keyMux.mux.TryLock() } func (k *keyMutex) Lock(key, identifier string) { mux, _ := k.muxes.LoadOrStore(key, &lockWithIdent{ - mux: sync.Mutex{}, - ident: identifier, + mux: sync.Mutex{}, }) keyMux := mux.(*lockWithIdent) + keyMux.ident = identifier keyMux.mux.Lock() } @@ -60,6 +60,7 @@ func (k *keyMutex) Unlock(key string, remove bool) { } _, filename, line, _ := runtime.Caller(1) slog.Debug("unlocking", "key", key, "identifier", keyMux.ident, "caller", fmt.Sprintf("%s:%d", filename, line)) + keyMux.ident = "" keyMux.mux.Unlock() } diff --git a/params/github.go b/params/github.go index 4b37b83b..888288fc 100644 --- a/params/github.go +++ b/params/github.go @@ -421,6 +421,7 @@ func (r RunnerScaleSetMessage) GetJobsFromBody() ([]ScaleSetJobMessage, error) { type RunnerReference struct { ID int64 `json:"id"` Name string `json:"name"` + OS string `json:"os"` RunnerScaleSetID int `json:"runnerScaleSetId"` CreatedOn interface{} `json:"createdOn"` RunnerGroupID uint64 `json:"runnerGroupId"` @@ -431,9 +432,29 @@ type RunnerReference struct { Status interface{} `json:"status"` DisableUpdate bool `json:"disableUpdate"` ProvisioningState string `json:"provisioningState"` + Busy bool `json:"busy"` Labels []Label `json:"labels,omitempty"` } +func (r RunnerReference) GetStatus() RunnerStatus { + status, ok := r.Status.(string) + if !ok { + return RunnerUnknown + } + runnerStatus := RunnerStatus(status) + if !runnerStatus.IsValid() { + return RunnerUnknown + } + + if runnerStatus == RunnerOnline { + if r.Busy { + return RunnerActive + } + return RunnerIdle + } + return runnerStatus +} + type RunnerScaleSetJitRunnerConfig struct { Runner *RunnerReference `json:"runner"` EncodedJITConfig string `json:"encodedJITConfig"` diff --git a/params/params.go b/params/params.go index 69ec179c..5653e386 100644 --- a/params/params.go +++ b/params/params.go @@ -49,6 +49,18 @@ type ( ScaleSetMessageType string ) +func (s RunnerStatus) IsValid() bool { + switch s { + case RunnerIdle, RunnerPending, RunnerTerminated, + RunnerInstalling, RunnerFailed, + RunnerActive, RunnerOffline, + RunnerUnknown, RunnerOnline: + + return true + } + return false +} + const ( // PoolBalancerTypeRoundRobin will try to cycle through the pools of an entity // in a round robin fashion. For example, if a repository has multiple pools that @@ -117,6 +129,9 @@ const ( RunnerInstalling RunnerStatus = "installing" RunnerFailed RunnerStatus = "failed" RunnerActive RunnerStatus = "active" + RunnerOffline RunnerStatus = "offline" + RunnerOnline RunnerStatus = "online" + RunnerUnknown RunnerStatus = "unknown" ) const ( diff --git a/runner/common/util.go b/runner/common/util.go index 06751aa3..55e8fb00 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -28,6 +28,10 @@ type GithubEntityOperations interface { GithubBaseURL() *url.URL } +type RateLimitClient interface { + RateLimit(ctx context.Context) (*github.RateLimits, error) +} + // GithubClient that describes the minimum list of functions we need to interact with github. // Allows for easier testing. // diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 0173b3fc..1f2e96ec 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -439,10 +439,14 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne // github so we let them be for now. continue } + pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) + if err != nil { + return errors.Wrap(err, "fetching instance pool info") + } switch instance.RunnerStatus { case params.RunnerPending, params.RunnerInstalling: - if time.Since(instance.UpdatedAt).Minutes() < float64(instance.RunnerTimeout()) { + if time.Since(instance.UpdatedAt).Minutes() < float64(pool.RunnerTimeout()) { // runner is still installing. We give it a chance to finish. slog.DebugContext( r.ctx, "runner is still installing, give it a chance to finish", @@ -510,7 +514,11 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { } defer locking.Unlock(instance.Name, false) - if time.Since(instance.UpdatedAt).Minutes() < float64(instance.RunnerTimeout()) { + pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) + if err != nil { + return errors.Wrap(err, "fetching instance pool info") + } + if time.Since(instance.UpdatedAt).Minutes() < float64(pool.RunnerTimeout()) { continue } diff --git a/util/github/client.go b/util/github/client.go index fcd661fa..77803f4f 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -490,6 +490,30 @@ func (g *githubClient) GithubBaseURL() *url.URL { return g.cli.BaseURL } +func NewRateLimitClient(ctx context.Context, credentials params.GithubCredentials) (common.RateLimitClient, error) { + httpClient, err := credentials.GetHTTPClient(ctx) + if err != nil { + return nil, errors.Wrap(err, "fetching http client") + } + + slog.DebugContext( + ctx, "creating rate limit client", + "base_url", credentials.APIBaseURL, + "upload_url", credentials.UploadBaseURL) + + ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs( + credentials.APIBaseURL, credentials.UploadBaseURL) + if err != nil { + return nil, errors.Wrap(err, "fetching github client") + } + cli := &githubClient{ + rateLimit: ghClient.RateLimit, + cli: ghClient, + } + + return cli, nil +} + func Client(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { // func GithubClient(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { httpClient, err := entity.Credentials.GetHTTPClient(ctx) diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go index efd684d4..79d5c26e 100644 --- a/util/github/scalesets/message_sessions.go +++ b/util/github/scalesets/message_sessions.go @@ -141,16 +141,17 @@ func (m *MessageSession) maybeRefreshToken(ctx context.Context) error { if m.session == nil { return fmt.Errorf("session is nil") } - // add some jitter - randInt, err := rand.Int(rand.Reader, big.NewInt(5000)) - if err != nil { - return fmt.Errorf("failed to get a random number") - } + expiresAt, err := m.session.ExiresAt() if err != nil { return fmt.Errorf("failed to get expires at: %w", err) } - expiresIn := time.Duration(randInt.Int64())*time.Millisecond + 10*time.Minute + // add some jitter (30 second interval) + randInt, err := rand.Int(rand.Reader, big.NewInt(30)) + if err != nil { + return fmt.Errorf("failed to get a random number") + } + expiresIn := time.Duration(randInt.Int64())*time.Second + 10*time.Minute slog.DebugContext(ctx, "checking if message session token needs refresh", "expires_at", expiresAt) if m.session.ExpiresIn(expiresIn) { if err := m.Refresh(ctx); err != nil { diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go index 7e00112e..4ce83ddf 100644 --- a/workers/entity/worker_watcher.go +++ b/workers/entity/worker_watcher.go @@ -13,32 +13,28 @@ func (w *Worker) handleWorkerWatcherEvent(event dbCommon.ChangePayload) { entityType := dbCommon.DatabaseEntityType(w.Entity.EntityType) switch event.EntityType { case entityType: - entityGetter, ok := event.Payload.(params.EntityGetter) - if !ok { - slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - return - } - entity, err := entityGetter.GetEntity() - if err != nil { - slog.ErrorContext(w.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) - return - } - w.handleEntityEventPayload(entity, event) + w.handleEntityEventPayload(event) return case dbCommon.GithubCredentialsEntityType: slog.DebugContext(w.ctx, "got github credentials payload event") - credentials, ok := event.Payload.(params.GithubCredentials) - if !ok { - slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - return - } - w.handleEntityCredentialsEventPayload(credentials, event) + w.handleEntityCredentialsEventPayload(event) default: slog.DebugContext(w.ctx, "invalid entity type; ignoring", "entity_type", event.EntityType) } } -func (w *Worker) handleEntityEventPayload(entity params.GithubEntity, event dbCommon.ChangePayload) { +func (w *Worker) handleEntityEventPayload(event dbCommon.ChangePayload) { + entityGetter, ok := event.Payload.(params.EntityGetter) + if !ok { + slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return + } + entity, err := entityGetter.GetEntity() + if err != nil { + slog.ErrorContext(w.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) + return + } + switch event.Operation { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got update operation") @@ -57,7 +53,13 @@ func (w *Worker) handleEntityEventPayload(entity params.GithubEntity, event dbCo } } -func (w *Worker) handleEntityCredentialsEventPayload(credentials params.GithubCredentials, event dbCommon.ChangePayload) { +func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayload) { + credentials, ok := event.Payload.(params.GithubCredentials) + if !ok { + slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return + } + switch event.Operation { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got delete operation") diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 02528656..b6d61f54 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -8,6 +8,8 @@ import ( "sync" "time" + "golang.org/x/sync/errgroup" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" @@ -16,6 +18,14 @@ import ( "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/github" + "github.com/cloudbase/garm/util/github/scalesets" +) + +const ( + // These are duplicated until we decide if we move the pool manager to the new + // worker flow. + poolIDLabelprefix = "runner-pool-id:" + controllerLabelPrefix = "runner-controller-id:" ) func NewController(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Controller, error) { @@ -176,11 +186,88 @@ func (c *Controller) updateTools() error { return nil } +// consolidateRunnerState will list all runners on GitHub for this entity, sort by +// pool or scale set and pass those runners to the appropriate worker. The worker will +// then have the responsibility to cross check the runners from github with what it +// knows should be true from the database. Any inconsistency needs to be handled. +// If we have an offline runner in github but no database entry for it, we remove the +// runner from github. If we have a runner that is active in the provider but does not +// exist in github, we remove it from the provider and the database. +func (c *Controller) consolidateRunnerState() error { + scaleSetCli, err := scalesets.NewClient(c.ghCli) + if err != nil { + return fmt.Errorf("creating scaleset client: %w", err) + } + // Client is scoped to the current entity. Only runners in a repo/org/enterprise + // will be listed. + runners, err := scaleSetCli.ListAllRunners(c.ctx) + if err != nil { + return fmt.Errorf("listing runners: %w", err) + } + + byPoolID := make(map[string][]params.RunnerReference) + byScaleSetID := make(map[int][]params.RunnerReference) + for _, runner := range runners.RunnerReferences { + if runner.RunnerScaleSetID != 0 { + byScaleSetID[runner.RunnerScaleSetID] = append(byScaleSetID[runner.RunnerScaleSetID], runner) + } else { + poolID := poolIDFromLabels(runner) + if poolID == "" { + continue + } + byPoolID[poolID] = append(byPoolID[poolID], runner) + } + } + + g, ctx := errgroup.WithContext(c.ctx) + for _, scaleSet := range c.ScaleSets { + runners := byScaleSetID[scaleSet.scaleSet.ScaleSetID] + g.Go(func() error { + slog.DebugContext(ctx, "consolidating runners for scale set", "scale_set_id", scaleSet.scaleSet.ScaleSetID, "runners", runners) + if err := scaleSet.worker.consolidateRunnerState(runners); err != nil { + return fmt.Errorf("consolidating runners for scale set %d: %w", scaleSet.scaleSet.ScaleSetID, err) + } + return nil + }) + } + if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) + } + return nil +} + +func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { + if g == nil { + return nil + } + + done := make(chan error, 1) + go func() { + waitErr := g.Wait() + done <- waitErr + }() + + select { + case err := <-done: + return err + case <-c.ctx.Done(): + return c.ctx.Err() + case <-c.quit: + return nil + } +} + func (c *Controller) loop() { defer c.Stop() updateToolsTicker := time.NewTicker(common.PoolToolUpdateInterval) + defer updateToolsTicker.Stop() + + consilidateTicker := time.NewTicker(common.PoolReapTimeoutInterval) + defer consilidateTicker.Stop() + initialToolUpdate := make(chan struct{}, 1) defer close(initialToolUpdate) + go func() { slog.InfoContext(c.ctx, "running initial tool update") if err := c.updateTools(); err != nil { @@ -206,8 +293,29 @@ func (c *Controller) loop() { slog.InfoContext(c.ctx, "update tools ticker closed") return } + validCreds := c.forgeCredsAreValid if err := c.updateTools(); err != nil { + if err := c.store.AddEntityEvent(c.ctx, c.Entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err.Error()), 30); err != nil { + slog.With(slog.Any("error", err)).Error("failed to add entity event") + } slog.With(slog.Any("error", err)).Error("failed to update tools") + continue + } + if validCreds != c.forgeCredsAreValid && c.forgeCredsAreValid { + if err := c.store.AddEntityEvent(c.ctx, c.Entity, params.StatusEvent, params.EventInfo, "tools updated successfully", 30); err != nil { + slog.With(slog.Any("error", err)).Error("failed to add entity event") + } + } + case _, ok := <-consilidateTicker.C: + if !ok { + slog.InfoContext(c.ctx, "consolidate ticker closed") + return + } + if err := c.consolidateRunnerState(); err != nil { + if err := c.store.AddEntityEvent(c.ctx, c.Entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to consolidate runner state: %q", err.Error()), 30); err != nil { + slog.With(slog.Any("error", err)).Error("failed to add entity event") + } + slog.With(slog.Any("error", err)).Error("failed to consolidate runner state") } case <-c.quit: return diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index a4b690ef..f740f051 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -63,14 +63,6 @@ type Worker struct { quit chan struct{} } -func (w *Worker) RunnersAndStatuses() map[string]string { - runners := make(map[string]string) - for _, runner := range w.runners { - runners[runner.Name] = string(runner.Status) - } - return runners -} - func (w *Worker) Stop() error { slog.DebugContext(w.ctx, "stopping scale set worker", "scale_set", w.consumerID) w.mux.Lock() @@ -239,6 +231,240 @@ func (w *Worker) Start() (err error) { return nil } +func (w *Worker) runnerByName() map[string]params.Instance { + runners := make(map[string]params.Instance) + for _, runner := range w.runners { + runners[runner.Name] = runner + } + return runners +} + +func (w *Worker) setRunnerDBStatus(runner string, status commonParams.InstanceStatus) (params.Instance, error) { + updateParams := params.UpdateInstanceParams{ + Status: status, + } + newDbInstance, err := w.store.UpdateInstance(w.ctx, runner, updateParams) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + return params.Instance{}, fmt.Errorf("updating runner %s: %w", runner, err) + } + } + return newDbInstance, nil +} + +func (w *Worker) removeRunnerFromGithubAndSetPendingDelete(runnerName string, agentID int64) error { + if err := w.scaleSetCli.RemoveRunner(w.ctx, agentID); err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + return fmt.Errorf("removing runner %s: %w", runnerName, err) + } + } + instance, err := w.setRunnerDBStatus(runnerName, commonParams.InstancePendingDelete) + if err != nil { + return fmt.Errorf("updating runner %s: %w", instance.Name, err) + } + w.runners[instance.ID] = instance + return nil +} + +func (w *Worker) reapTimedOutRunners(runners map[string]params.RunnerReference) (func(), error) { + lockNames := []string{} + + unlockFn := func() { + for _, name := range lockNames { + slog.DebugContext(w.ctx, "unlockFn unlocking runner", "runner_name", name) + locking.Unlock(name, false) + } + } + + for _, runner := range w.runners { + if time.Since(runner.UpdatedAt).Minutes() < float64(w.scaleSet.RunnerBootstrapTimeout) { + continue + } + switch runner.Status { + case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, + commonParams.InstanceDeleting, commonParams.InstanceDeleted: + continue + } + + if runner.RunnerStatus != params.RunnerPending && runner.RunnerStatus != params.RunnerInstalling { + slog.DebugContext(w.ctx, "runner is not pending or installing; skipping", "runner_name", runner.Name) + continue + } + if ghRunner, ok := runners[runner.Name]; !ok || ghRunner.GetStatus() == params.RunnerOffline { + if ok, err := locking.TryLock(runner.Name, w.consumerID); err != nil || !ok { + slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) + continue + } + lockNames = append(lockNames, runner.Name) + + slog.InfoContext( + w.ctx, "reaping timed-out/failed runner", + "runner_name", runner.Name) + + if err := w.removeRunnerFromGithubAndSetPendingDelete(runner.Name, runner.AgentID); err != nil { + slog.ErrorContext(w.ctx, "error removing runner", "runner_name", runner.Name, "error", err) + unlockFn() + return nil, fmt.Errorf("removing runner %s: %w", runner.Name, err) + } + } + } + return unlockFn, nil +} + +func (w *Worker) consolidateRunnerState(runners []params.RunnerReference) error { + w.mux.Lock() + defer w.mux.Unlock() + + ghRunnersByName := make(map[string]params.RunnerReference) + for _, runner := range runners { + ghRunnersByName[runner.Name] = runner + } + + dbRunnersByName := w.runnerByName() + // Cross check what exists in github with what we have in the database. + for name, runner := range ghRunnersByName { + status := runner.GetStatus() + if _, ok := dbRunnersByName[name]; !ok { + // runner appears to be active. Is it not managed by GARM? + if status != params.RunnerIdle && status != params.RunnerActive { + slog.InfoContext(w.ctx, "runner does not exist in GARM; removing from github", "runner_name", name) + if err := w.scaleSetCli.RemoveRunner(w.ctx, runner.ID); err != nil { + if errors.Is(err, runnerErrors.ErrNotFound) { + continue + } + slog.ErrorContext(w.ctx, "error removing runner", "runner_name", runner.Name, "error", err) + } + } + continue + } + } + + unlockFn, err := w.reapTimedOutRunners(ghRunnersByName) + if err != nil { + return fmt.Errorf("reaping timed out runners: %w", err) + } + defer unlockFn() + + // refresh the map. It may have been mutated above. + dbRunnersByName = w.runnerByName() + // Cross check what exists in the database with what we have in github. + for name, runner := range dbRunnersByName { + // in the case of scale sets, JIT configs re used. There is no situation + // in which we create a runner in the DB and one does not exist in github. + // We can safely assume that if the runner is not in github anymore, it can + // be removed from the provider and the DB. + switch runner.Status { + case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, + commonParams.InstanceDeleting, commonParams.InstanceDeleted: + continue + } + + if _, ok := ghRunnersByName[name]; !ok { + if ok, err := locking.TryLock(name, w.consumerID); err != nil || !ok { + slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", name) + continue + } + // unlock the runner only after this function returns. This function also cross + // checks between the provider and the database, and removes left over runners. + // If we unlock early, the provider worker will attempt to remove runners that + // we set in pending_delete. This function holds the mutex, so we won't see those + // changes until we return. So we hold the instance lock here until we are done. + // That way, even if the provider sees the pending_delete status, it won't act on + // it until it manages to lock the instance. + defer locking.Unlock(name, false) + + slog.InfoContext(w.ctx, "runner does not exist in github; removing from provider", "runner_name", name) + instance, err := w.setRunnerDBStatus(runner.Name, commonParams.InstancePendingDelete) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + return fmt.Errorf("updating runner %s: %w", instance.Name, err) + } + } + // We will get an update event anyway from the watcher, but updating the runner + // here, will prevent race conditions if some other event is already in the queue + // which involves this runner. For the duration of the lifetime of this function, we + // hold the lock, so no race condition can occur. + w.runners[runner.ID] = instance + } + } + + // Cross check what exists in the provider with the DB. + pseudoPoolID, err := w.pseudoPoolID() + if err != nil { + return fmt.Errorf("getting pseudo pool ID: %w", err) + } + listParams := common.ListInstancesParams{ + ListInstancesV011: common.ListInstancesV011Params{ + ProviderBaseParams: common.ProviderBaseParams{ + ControllerInfo: w.controllerInfo, + }, + }, + } + + providerRunners, err := w.provider.ListInstances(w.ctx, pseudoPoolID, listParams) + if err != nil { + return fmt.Errorf("listing instances: %w", err) + } + + providerRunnersByName := make(map[string]commonParams.ProviderInstance) + for _, runner := range providerRunners { + providerRunnersByName[runner.Name] = runner + } + + deleteInstanceParams := common.DeleteInstanceParams{ + DeleteInstanceV011: common.DeleteInstanceV011Params{ + ProviderBaseParams: common.ProviderBaseParams{ + ControllerInfo: w.controllerInfo, + }, + }, + } + + // refresh the map. It may have been mutated above. + dbRunnersByName = w.runnerByName() + for _, runner := range providerRunners { + if _, ok := dbRunnersByName[runner.Name]; !ok { + slog.InfoContext(w.ctx, "runner does not exist in database; removing from provider", "runner_name", runner.Name) + // There is no situation in which the runner will disappear from the provider + // after it was removed from the database. The provider worker will remove the + // instance from the provider nd mark the instance as deleted in the database. + // It is the responsibility of the scaleset worker to then clean up the runners + // in the deleted state. + // That means that if we have a runner in the provider but not the DB, it is most + // likely an inconsistency. + if err := w.provider.DeleteInstance(w.ctx, runner.Name, deleteInstanceParams); err != nil { + slog.ErrorContext(w.ctx, "error removing instance", "instance_name", runner.Name, "error", err) + } + continue + } + } + + for _, runner := range dbRunnersByName { + switch runner.Status { + case commonParams.InstancePendingDelete, commonParams.InstancePendingForceDelete, + commonParams.InstanceDeleting, commonParams.InstanceDeleted: + // This instance is already being deleted. + continue + } + + locked, err := locking.TryLock(runner.Name, w.consumerID) + if err != nil || !locked { + slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) + continue + } + defer locking.Unlock(runner.Name, false) + + if _, ok := providerRunnersByName[runner.Name]; !ok { + // The runner is not in the provider anymore. Remove it from the DB. + slog.InfoContext(w.ctx, "runner does not exist in provider; removing from database", "runner_name", runner.Name) + if err := w.removeRunnerFromGithubAndSetPendingDelete(runner.Name, runner.AgentID); err != nil { + return fmt.Errorf("removing runner %s: %w", runner.Name, err) + } + } + } + + return nil +} + func (w *Worker) SetGithubClient(client common.GithubClient) error { w.mux.Lock() defer w.mux.Unlock() @@ -256,6 +482,15 @@ func (w *Worker) SetGithubClient(client common.GithubClient) error { return nil } +func (w *Worker) pseudoPoolID() (string, error) { + // This is temporary. We need to extend providers to know about scale sets. + entity, err := w.scaleSet.GetEntity() + if err != nil { + return "", fmt.Errorf("getting entity: %w", err) + } + return fmt.Sprintf("%s-%s", w.scaleSet.Name, entity.ID), nil +} + func (w *Worker) handleScaleSetEvent(event dbCommon.ChangePayload) { scaleSet, ok := event.Payload.(params.ScaleSet) if !ok { @@ -418,7 +653,10 @@ func (w *Worker) keepListenerAlive() { w.mux.Unlock() continue } - // noop if already started + // noop if already started. If the scaleset was just enabled, we need to + // start the listener here, or the <-w.listener.Wait() channel receive bellow + // will block forever, even if we start the listener, as a nil channel will + // block forever. w.listener.Start() w.mux.Unlock() @@ -513,13 +751,15 @@ func (w *Worker) handleScaleUp(target, current uint) { AgentID: jitConfig.Runner.ID, } - if _, err := w.store.CreateScaleSetInstance(w.ctx, w.scaleSet.ID, runnerParams); err != nil { + dbInstance, err := w.store.CreateScaleSetInstance(w.ctx, w.scaleSet.ID, runnerParams) + if err != nil { slog.ErrorContext(w.ctx, "error creating instance", "error", err) if err := w.scaleSetCli.RemoveRunner(w.ctx, jitConfig.Runner.ID); err != nil { slog.ErrorContext(w.ctx, "error deleting runner", "error", err) } continue } + w.runners[dbInstance.ID] = dbInstance _, err = w.scaleSetCli.GetRunner(w.ctx, jitConfig.Runner.ID) if err != nil { @@ -636,7 +876,7 @@ func (w *Worker) handleAutoScale() { lastMsg := "" lastMsgDebugLog := func(msg string, targetRunners, currentRunners uint) { if lastMsg != msg { - slog.DebugContext(w.ctx, msg, "current_runners", currentRunners, "target_runners", targetRunners, "current_runners", w.RunnersAndStatuses()) + slog.DebugContext(w.ctx, msg, "current_runners", currentRunners, "target_runners", targetRunners) lastMsg = msg } } diff --git a/workers/scaleset/util.go b/workers/scaleset/util.go index a594f88c..aa3156c7 100644 --- a/workers/scaleset/util.go +++ b/workers/scaleset/util.go @@ -1,6 +1,8 @@ package scaleset import ( + "strings" + dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" @@ -26,3 +28,12 @@ func composeControllerWatcherFilters(entity params.GithubEntity) dbCommon.Payloa ), ) } + +func poolIDFromLabels(runner params.RunnerReference) string { + for _, lbl := range runner.Labels { + if strings.HasPrefix(lbl.Name, poolIDLabelprefix) { + return lbl.Name[len(poolIDLabelprefix):] + } + } + return "" +} From 92d04c8e8d70d912dcaa720e129b290d7acd654c Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 2 May 2025 09:32:24 +0000 Subject: [PATCH 035/179] Add tests for cache and locking Signed-off-by: Gabriel Adrian Samfira --- cache/cache_test.go | 87 ++++++++++ internal/testing/testing.go | 4 + locking/interface.go | 1 + locking/local_backoff_locker.go | 63 ++++++++ locking/local_backoff_locker_test.go | 75 +++++++++ locking/local_locker.go | 73 ++------- locking/local_locker_test.go | 228 +++++++++++++++++++++++++++ locking/locking.go | 58 ++++--- runner/pool/pool.go | 40 ++--- workers/scaleset/scaleset.go | 16 +- 10 files changed, 533 insertions(+), 112 deletions(-) create mode 100644 cache/cache_test.go create mode 100644 locking/local_backoff_locker.go create mode 100644 locking/local_backoff_locker_test.go create mode 100644 locking/local_locker_test.go diff --git a/cache/cache_test.go b/cache/cache_test.go new file mode 100644 index 00000000..a2155e97 --- /dev/null +++ b/cache/cache_test.go @@ -0,0 +1,87 @@ +package cache + +import ( + "testing" + "time" + + "github.com/stretchr/testify/suite" + + commonParams "github.com/cloudbase/garm-provider-common/params" + garmTesting "github.com/cloudbase/garm/internal/testing" + "github.com/cloudbase/garm/params" +) + +type CacheTestSuite struct { + suite.Suite + entity params.GithubEntity +} + +func (c *CacheTestSuite) SetupTest() { + c.entity = params.GithubEntity{ + ID: "1234", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } +} + +func (c *CacheTestSuite) TearDownTest() { + // Clean up the cache after each test + githubToolsCache.mux.Lock() + defer githubToolsCache.mux.Unlock() + githubToolsCache.entities = make(map[string]GithubEntityTools) +} + +func (c *CacheTestSuite) TestCacheIsInitialized() { + c.Require().NotNil(githubToolsCache) +} + +func (c *CacheTestSuite) TestSetCacheWorks() { + tools := []commonParams.RunnerApplicationDownload{ + { + DownloadURL: garmTesting.Ptr("https://example.com"), + }, + } + + c.Require().NotNil(githubToolsCache) + c.Require().Len(githubToolsCache.entities, 0) + SetGithubToolsCache(c.entity, tools) + c.Require().Len(githubToolsCache.entities, 1) + cachedTools, ok := GetGithubToolsCache(c.entity) + c.Require().True(ok) + c.Require().Len(cachedTools, 1) + c.Require().Equal(tools[0].GetDownloadURL(), cachedTools[0].GetDownloadURL()) +} + +func (c *CacheTestSuite) TestTimedOutToolsCache() { + tools := []commonParams.RunnerApplicationDownload{ + { + DownloadURL: garmTesting.Ptr("https://example.com"), + }, + } + + c.Require().NotNil(githubToolsCache) + c.Require().Len(githubToolsCache.entities, 0) + SetGithubToolsCache(c.entity, tools) + c.Require().Len(githubToolsCache.entities, 1) + entity := githubToolsCache.entities[c.entity.String()] + entity.updatedAt = entity.updatedAt.Add(-2 * time.Hour) + githubToolsCache.entities[c.entity.String()] = entity + + cachedTools, ok := GetGithubToolsCache(c.entity) + c.Require().False(ok) + c.Require().Nil(cachedTools) +} + +func (c *CacheTestSuite) TestGetInexistentCache() { + c.Require().NotNil(githubToolsCache) + c.Require().Len(githubToolsCache.entities, 0) + cachedTools, ok := GetGithubToolsCache(c.entity) + c.Require().False(ok) + c.Require().Nil(cachedTools) +} + +func TestCacheTestSuite(t *testing.T) { + t.Parallel() + suite.Run(t, new(CacheTestSuite)) +} diff --git a/internal/testing/testing.go b/internal/testing/testing.go index 1b937b6c..b3d049fd 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -153,6 +153,10 @@ type NameAndIDDBEntity interface { GetName() string } +func Ptr[T any](v T) *T { + return &v +} + func EqualDBEntityByName[T NameAndIDDBEntity](t *testing.T, expected, actual []T) { require.Equal(t, len(expected), len(actual)) diff --git a/locking/interface.go b/locking/interface.go index 7750167b..2b6ffb47 100644 --- a/locking/interface.go +++ b/locking/interface.go @@ -5,6 +5,7 @@ import "time" type Locker interface { TryLock(key, identifier string) bool Lock(key, identifier string) + LockedBy(key string) (string, bool) Unlock(key string, remove bool) Delete(key string) } diff --git a/locking/local_backoff_locker.go b/locking/local_backoff_locker.go new file mode 100644 index 00000000..9c2fecb1 --- /dev/null +++ b/locking/local_backoff_locker.go @@ -0,0 +1,63 @@ +package locking + +import ( + "context" + "sync" + "time" + + "github.com/cloudbase/garm/runner/common" +) + +func NewInstanceDeleteBackoff(_ context.Context) (InstanceDeleteBackoff, error) { + return &instanceDeleteBackoff{}, nil +} + +type instanceBackOff struct { + backoffSeconds float64 + lastRecordedFailureTime time.Time + mux sync.Mutex +} + +type instanceDeleteBackoff struct { + muxes sync.Map +} + +func (i *instanceDeleteBackoff) ShouldProcess(key string) (bool, time.Time) { + backoff, loaded := i.muxes.LoadOrStore(key, &instanceBackOff{}) + if !loaded { + return true, time.Time{} + } + + ib := backoff.(*instanceBackOff) + ib.mux.Lock() + defer ib.mux.Unlock() + + if ib.lastRecordedFailureTime.IsZero() || ib.backoffSeconds == 0 { + return true, time.Time{} + } + + now := time.Now().UTC() + deadline := ib.lastRecordedFailureTime.Add(time.Duration(ib.backoffSeconds) * time.Second) + return now.After(deadline), deadline +} + +func (i *instanceDeleteBackoff) Delete(key string) { + i.muxes.Delete(key) +} + +func (i *instanceDeleteBackoff) RecordFailure(key string) { + backoff, _ := i.muxes.LoadOrStore(key, &instanceBackOff{}) + ib := backoff.(*instanceBackOff) + ib.mux.Lock() + defer ib.mux.Unlock() + + ib.lastRecordedFailureTime = time.Now().UTC() + if ib.backoffSeconds == 0 { + ib.backoffSeconds = common.PoolConsilitationInterval.Seconds() + } else { + // Geometric progression of 1.5 + newBackoff := ib.backoffSeconds * 1.5 + // Cap the backoff to 20 minutes + ib.backoffSeconds = min(newBackoff, maxBackoffSeconds) + } +} diff --git a/locking/local_backoff_locker_test.go b/locking/local_backoff_locker_test.go new file mode 100644 index 00000000..a9a986e2 --- /dev/null +++ b/locking/local_backoff_locker_test.go @@ -0,0 +1,75 @@ +package locking + +import ( + "testing" + "time" + + "github.com/stretchr/testify/suite" +) + +type LockerBackoffTestSuite struct { + suite.Suite + + locker *instanceDeleteBackoff +} + +func (l *LockerBackoffTestSuite) SetupTest() { + l.locker = &instanceDeleteBackoff{} +} + +func (l *LockerBackoffTestSuite) TearDownTest() { + l.locker = nil +} + +func (l *LockerBackoffTestSuite) TestShouldProcess() { + shouldProcess, deadline := l.locker.ShouldProcess("test") + l.Require().True(shouldProcess) + l.Require().Equal(time.Time{}, deadline) + + l.locker.muxes.Store("test", &instanceBackOff{ + backoffSeconds: 0, + lastRecordedFailureTime: time.Time{}, + }) + + shouldProcess, deadline = l.locker.ShouldProcess("test") + l.Require().True(shouldProcess) + l.Require().Equal(time.Time{}, deadline) + + l.locker.muxes.Store("test", &instanceBackOff{ + backoffSeconds: 100, + lastRecordedFailureTime: time.Now().UTC(), + }) + + shouldProcess, deadline = l.locker.ShouldProcess("test") + l.Require().False(shouldProcess) + l.Require().NotEqual(time.Time{}, deadline) +} + +func (l *LockerBackoffTestSuite) TestRecordFailure() { + l.locker.RecordFailure("test") + + mux, ok := l.locker.muxes.Load("test") + l.Require().True(ok) + ib := mux.(*instanceBackOff) + l.Require().NotNil(ib) + l.Require().NotEqual(time.Time{}, ib.lastRecordedFailureTime) + l.Require().Equal(float64(5), ib.backoffSeconds) + + l.locker.RecordFailure("test") + mux, ok = l.locker.muxes.Load("test") + l.Require().True(ok) + ib = mux.(*instanceBackOff) + l.Require().NotNil(ib) + l.Require().NotEqual(time.Time{}, ib.lastRecordedFailureTime) + l.Require().Equal(7.5, ib.backoffSeconds) + + l.locker.Delete("test") + mux, ok = l.locker.muxes.Load("test") + l.Require().False(ok) + l.Require().Nil(mux) +} + +func TestBackoffTestSuite(t *testing.T) { + t.Parallel() + suite.Run(t, new(LockerBackoffTestSuite)) +} diff --git a/locking/local_locker.go b/locking/local_locker.go index aeae610f..fc5ea847 100644 --- a/locking/local_locker.go +++ b/locking/local_locker.go @@ -2,14 +2,9 @@ package locking import ( "context" - "fmt" - "log/slog" - "runtime" "sync" - "time" dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/runner/common" ) const ( @@ -36,8 +31,11 @@ func (k *keyMutex) TryLock(key, identifier string) bool { mux: sync.Mutex{}, }) keyMux := mux.(*lockWithIdent) - keyMux.ident = identifier - return keyMux.mux.TryLock() + locked := keyMux.mux.TryLock() + if locked { + keyMux.ident = identifier + } + return locked } func (k *keyMutex) Lock(key, identifier string) { @@ -58,8 +56,6 @@ func (k *keyMutex) Unlock(key string, remove bool) { if remove { k.Delete(key) } - _, filename, line, _ := runtime.Caller(1) - slog.Debug("unlocking", "key", key, "identifier", keyMux.ident, "caller", fmt.Sprintf("%s:%d", filename, line)) keyMux.ident = "" keyMux.mux.Unlock() } @@ -68,56 +64,15 @@ func (k *keyMutex) Delete(key string) { k.muxes.Delete(key) } -func NewInstanceDeleteBackoff(_ context.Context) (InstanceDeleteBackoff, error) { - return &instanceDeleteBackoff{}, nil -} - -type instanceBackOff struct { - backoffSeconds float64 - lastRecordedFailureTime time.Time - mux sync.Mutex -} - -type instanceDeleteBackoff struct { - muxes sync.Map -} - -func (i *instanceDeleteBackoff) ShouldProcess(key string) (bool, time.Time) { - backoff, loaded := i.muxes.LoadOrStore(key, &instanceBackOff{}) - if !loaded { - return true, time.Time{} +func (k *keyMutex) LockedBy(key string) (string, bool) { + mux, ok := k.muxes.Load(key) + if !ok { + return "", false + } + keyMux := mux.(*lockWithIdent) + if keyMux.ident == "" { + return "", false } - ib := backoff.(*instanceBackOff) - ib.mux.Lock() - defer ib.mux.Unlock() - - if ib.lastRecordedFailureTime.IsZero() || ib.backoffSeconds == 0 { - return true, time.Time{} - } - - now := time.Now().UTC() - deadline := ib.lastRecordedFailureTime.Add(time.Duration(ib.backoffSeconds) * time.Second) - return deadline.After(now), deadline -} - -func (i *instanceDeleteBackoff) Delete(key string) { - i.muxes.Delete(key) -} - -func (i *instanceDeleteBackoff) RecordFailure(key string) { - backoff, _ := i.muxes.LoadOrStore(key, &instanceBackOff{}) - ib := backoff.(*instanceBackOff) - ib.mux.Lock() - defer ib.mux.Unlock() - - ib.lastRecordedFailureTime = time.Now().UTC() - if ib.backoffSeconds == 0 { - ib.backoffSeconds = common.PoolConsilitationInterval.Seconds() - } else { - // Geometric progression of 1.5 - newBackoff := ib.backoffSeconds * 1.5 - // Cap the backoff to 20 minutes - ib.backoffSeconds = min(newBackoff, maxBackoffSeconds) - } + return keyMux.ident, true } diff --git a/locking/local_locker_test.go b/locking/local_locker_test.go new file mode 100644 index 00000000..6decf512 --- /dev/null +++ b/locking/local_locker_test.go @@ -0,0 +1,228 @@ +package locking + +import ( + "testing" + + "github.com/stretchr/testify/suite" +) + +type LockerTestSuite struct { + suite.Suite + + mux *keyMutex +} + +func (l *LockerTestSuite) SetupTest() { + l.mux = &keyMutex{} + err := RegisterLocker(l.mux) + l.Require().NoError(err, "should register the locker") +} + +func (l *LockerTestSuite) TearDownTest() { + l.mux = nil + locker = nil +} + +func (l *LockerTestSuite) TestLocalLockerLockUnlock() { + l.mux.Lock("test", "test-identifier") + mux, ok := l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux := mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + l.mux.Unlock("test", true) + mux, ok = l.mux.muxes.Load("test") + l.Require().False(ok) + l.Require().Nil(mux) + l.mux.Unlock("test", false) +} + +func (l *LockerTestSuite) TestLocalLockerTryLock() { + locked := l.mux.TryLock("test", "test-identifier") + l.Require().True(locked) + mux, ok := l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux := mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + + locked = l.mux.TryLock("test", "another-identifier2") + l.Require().False(locked) + mux, ok = l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux = mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + + l.mux.Unlock("test", true) + locked = l.mux.TryLock("test", "another-identifier2") + l.Require().True(locked) + mux, ok = l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux = mux.(*lockWithIdent) + l.Require().Equal("another-identifier2", keyMux.ident) + l.mux.Unlock("test", true) +} + +func (l *LockerTestSuite) TestLocalLockertLockedBy() { + l.mux.Lock("test", "test-identifier") + identifier, ok := l.mux.LockedBy("test") + l.Require().True(ok) + l.Require().Equal("test-identifier", identifier) + l.mux.Unlock("test", true) + identifier, ok = l.mux.LockedBy("test") + l.Require().False(ok) + l.Require().Equal("", identifier) + + l.mux.Lock("test", "test-identifier") + identifier, ok = l.mux.LockedBy("test") + l.Require().True(ok) + l.Require().Equal("test-identifier", identifier) + l.mux.Unlock("test", false) + identifier, ok = l.mux.LockedBy("test") + l.Require().False(ok) + l.Require().Equal("", identifier) +} + +func (l *LockerTestSuite) TestLockerPanicsIfNotInitialized() { + locker = nil + l.Require().Panics( + func() { + Lock("test", "test-identifier") + }, + "Lock should panic if locker is not initialized", + ) + + l.Require().Panics( + func() { + TryLock("test", "test-identifier") + }, + "TryLock should panic if locker is not initialized", + ) + + l.Require().Panics( + func() { + Unlock("test", false) + }, + "Unlock should panic if locker is not initialized", + ) + + l.Require().Panics( + func() { + Delete("test") + }, + "Delete should panic if locker is not initialized", + ) + + l.Require().Panics( + func() { + LockedBy("test") + }, + "LockedBy should panic if locker is not initialized", + ) +} + +func (l *LockerTestSuite) TestLockerAlreadyRegistered() { + err := RegisterLocker(l.mux) + l.Require().Error(err, "should not be able to register the same locker again") + l.Require().Equal("locker already registered", err.Error()) +} + +func (l *LockerTestSuite) TestLockerDelete() { + Lock("test", "test-identifier") + mux, ok := l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux := mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + + Delete("test") + mux, ok = l.mux.muxes.Load("test") + l.Require().False(ok) + l.Require().Nil(mux) + + identifier, ok := l.mux.LockedBy("test") + l.Require().False(ok) + l.Require().Equal("", identifier) +} + +func (l *LockerTestSuite) TestLockUnlock() { + Lock("test", "test-identifier") + mux, ok := l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux := mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + + Unlock("test", true) + mux, ok = l.mux.muxes.Load("test") + l.Require().False(ok) + l.Require().Nil(mux) + + identifier, ok := l.mux.LockedBy("test") + l.Require().False(ok) + l.Require().Equal("", identifier) +} + +func (l *LockerTestSuite) TestLockUnlockWithoutRemove() { + Lock("test", "test-identifier") + mux, ok := l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux := mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + + Unlock("test", false) + mux, ok = l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux = mux.(*lockWithIdent) + l.Require().Equal("", keyMux.ident) + + identifier, ok := l.mux.LockedBy("test") + l.Require().False(ok) + l.Require().Equal("", identifier) +} + +func (l *LockerTestSuite) TestTryLock() { + locked := TryLock("test", "test-identifier") + l.Require().True(locked) + mux, ok := l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux := mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + + locked = TryLock("test", "another-identifier2") + l.Require().False(locked) + mux, ok = l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux = mux.(*lockWithIdent) + l.Require().Equal("test-identifier", keyMux.ident) + + Unlock("test", true) + locked = TryLock("test", "another-identifier2") + l.Require().True(locked) + mux, ok = l.mux.muxes.Load("test") + l.Require().True(ok) + keyMux = mux.(*lockWithIdent) + l.Require().Equal("another-identifier2", keyMux.ident) + Unlock("test", true) +} + +func (l *LockerTestSuite) TestLockedBy() { + Lock("test", "test-identifier") + identifier, ok := LockedBy("test") + l.Require().True(ok) + l.Require().Equal("test-identifier", identifier) + Unlock("test", true) + identifier, ok = LockedBy("test") + l.Require().False(ok) + l.Require().Equal("", identifier) + + Lock("test", "test-identifier2") + identifier, ok = LockedBy("test") + l.Require().True(ok) + l.Require().Equal("test-identifier2", identifier) + Unlock("test", false) + identifier, ok = LockedBy("test") + l.Require().False(ok) + l.Require().Equal("", identifier) +} + +func TestLockerTestSuite(t *testing.T) { + t.Parallel() + suite.Run(t, new(LockerTestSuite)) +} diff --git a/locking/locking.go b/locking/locking.go index c7ad89a3..d485f5ff 100644 --- a/locking/locking.go +++ b/locking/locking.go @@ -11,48 +11,56 @@ var locker Locker var lockerMux = sync.Mutex{} -func TryLock(key, identifier string) (ok bool, err error) { - _, filename, line, _ := runtime.Caller(1) - slog.Debug("attempting to try lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) - defer slog.Debug("try lock returned", "key", key, "identifier", identifier, "locked", ok, "caller", fmt.Sprintf("%s:%d", filename, line)) - if locker == nil { - return false, fmt.Errorf("no locker is registered") - } - - ok = locker.TryLock(key, identifier) - return ok, nil -} - -func Lock(key, identifier string) { - _, filename, line, _ := runtime.Caller(1) - slog.Debug("attempting to lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) - defer slog.Debug("lock acquired", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) - +func TryLock(key, identifier string) (ok bool) { if locker == nil { panic("no locker is registered") } + _, filename, line, _ := runtime.Caller(1) + slog.Debug("attempting to try lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) + defer slog.Debug("try lock returned", "key", key, "identifier", identifier, "locked", ok, "caller", fmt.Sprintf("%s:%d", filename, line)) + + ok = locker.TryLock(key, identifier) + return ok +} + +func Lock(key, identifier string) { + if locker == nil { + panic("no locker is registered") + } + + _, filename, line, _ := runtime.Caller(1) + slog.Debug("attempting to lock", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) + defer slog.Debug("lock acquired", "key", key, "identifier", identifier, "caller", fmt.Sprintf("%s:%d", filename, line)) + locker.Lock(key, identifier) } -func Unlock(key string, remove bool) error { - _, filename, line, _ := runtime.Caller(1) - slog.Debug("attempting to unlock", "key", key, "remove", remove, "caller", fmt.Sprintf("%s:%d", filename, line)) +func Unlock(key string, remove bool) { if locker == nil { - return fmt.Errorf("no locker is registered") + panic("no locker is registered") } + _, filename, line, _ := runtime.Caller(1) + slog.Debug("attempting to unlock", "key", key, "remove", remove, "caller", fmt.Sprintf("%s:%d", filename, line)) + defer slog.Debug("unlock completed", "key", key, "remove", remove, "caller", fmt.Sprintf("%s:%d", filename, line)) locker.Unlock(key, remove) - return nil } -func Delete(key string) error { +func LockedBy(key string) (string, bool) { if locker == nil { - return fmt.Errorf("no locker is registered") + panic("no locker is registered") + } + + return locker.LockedBy(key) +} + +func Delete(key string) { + if locker == nil { + panic("no locker is registered") } locker.Delete(key) - return nil } func RegisterLocker(lock Locker) error { diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 1f2e96ec..f5f9a13b 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -422,11 +422,11 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne continue } - lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) - if !lockAcquired || err != nil { + lockAcquired := locking.TryLock(instance.Name, r.consumerID) + if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", instance.Name, "error", err) + "runner_name", instance.Name) continue } defer locking.Unlock(instance.Name, false) @@ -505,11 +505,11 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { slog.DebugContext( r.ctx, "attempting to lock instance", "runner_name", instance.Name) - lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) - if !lockAcquired || err != nil { + lockAcquired := locking.TryLock(instance.Name, r.consumerID) + if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", instance.Name, "error", err) + "runner_name", instance.Name) continue } defer locking.Unlock(instance.Name, false) @@ -639,11 +639,11 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) poolInstanceCache[dbInstance.PoolID] = poolInstances } - lockAcquired, err := locking.TryLock(dbInstance.Name, r.consumerID) - if !lockAcquired || err != nil { + lockAcquired := locking.TryLock(dbInstance.Name, r.consumerID) + if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", dbInstance.Name, "error", err) + "runner_name", dbInstance.Name) continue } @@ -1076,11 +1076,11 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool for _, instanceToDelete := range idleWorkers[:numScaleDown] { instanceToDelete := instanceToDelete - lockAcquired, err := locking.TryLock(instanceToDelete.Name, r.consumerID) - if !lockAcquired || err != nil { + lockAcquired := locking.TryLock(instanceToDelete.Name, r.consumerID) + if !lockAcquired { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to acquire lock for instance", - "provider_id", instanceToDelete.Name, "error", err) + "provider_id", instanceToDelete.Name) continue } defer locking.Unlock(instanceToDelete.Name, false) @@ -1229,11 +1229,11 @@ func (r *basePoolManager) retryFailedInstancesForOnePool(ctx context.Context, po slog.DebugContext( ctx, "attempting to retry failed instance", "runner_name", instance.Name) - lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) - if !lockAcquired || err != nil { + lockAcquired := locking.TryLock(instance.Name, r.consumerID) + if !lockAcquired { slog.DebugContext( ctx, "failed to acquire lock for instance", - "runner_name", instance.Name, "error", err) + "runner_name", instance.Name) continue } @@ -1413,8 +1413,8 @@ func (r *basePoolManager) deletePendingInstances() error { r.ctx, "removing instance from pool", "runner_name", instance.Name, "pool_id", instance.PoolID) - lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) - if !lockAcquired || err != nil { + lockAcquired := locking.TryLock(instance.Name, r.consumerID) + if !lockAcquired { slog.InfoContext( r.ctx, "failed to acquire lock for instance", "runner_name", instance.Name) @@ -1525,11 +1525,11 @@ func (r *basePoolManager) addPendingInstances() error { r.ctx, "attempting to acquire lock for instance", "runner_name", instance.Name, "action", "create_pending") - lockAcquired, err := locking.TryLock(instance.Name, r.consumerID) - if !lockAcquired || err != nil { + lockAcquired := locking.TryLock(instance.Name, r.consumerID) + if !lockAcquired { slog.DebugContext( r.ctx, "failed to acquire lock for instance", - "runner_name", instance.Name, "error", err) + "runner_name", instance.Name) continue } diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index f740f051..660bbe97 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -291,7 +291,7 @@ func (w *Worker) reapTimedOutRunners(runners map[string]params.RunnerReference) continue } if ghRunner, ok := runners[runner.Name]; !ok || ghRunner.GetStatus() == params.RunnerOffline { - if ok, err := locking.TryLock(runner.Name, w.consumerID); err != nil || !ok { + if ok := locking.TryLock(runner.Name, w.consumerID); !ok { slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) continue } @@ -360,7 +360,7 @@ func (w *Worker) consolidateRunnerState(runners []params.RunnerReference) error } if _, ok := ghRunnersByName[name]; !ok { - if ok, err := locking.TryLock(name, w.consumerID); err != nil || !ok { + if ok := locking.TryLock(name, w.consumerID); !ok { slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", name) continue } @@ -446,8 +446,8 @@ func (w *Worker) consolidateRunnerState(runners []params.RunnerReference) error continue } - locked, err := locking.TryLock(runner.Name, w.consumerID) - if err != nil || !locked { + locked := locking.TryLock(runner.Name, w.consumerID) + if !locked { slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) continue } @@ -777,8 +777,8 @@ func (w *Worker) handleScaleDown(target, current uint) { removed := 0 candidates := []params.Instance{} for _, runner := range w.runners { - locked, err := locking.TryLock(runner.Name, w.consumerID) - if err != nil || !locked { + locked := locking.TryLock(runner.Name, w.consumerID) + if !locked { slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) continue } @@ -809,8 +809,8 @@ func (w *Worker) handleScaleDown(target, current uint) { break } - locked, err := locking.TryLock(runner.Name, w.consumerID) - if err != nil || !locked { + locked := locking.TryLock(runner.Name, w.consumerID) + if !locked { slog.DebugContext(w.ctx, "runner is locked; skipping", "runner_name", runner.Name) continue } From c601f88cf7fb68477f15a9be9812f0e88de16d19 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 2 May 2025 12:22:04 +0000 Subject: [PATCH 036/179] Add more tests Signed-off-by: Gabriel Adrian Samfira --- database/sql/instances_test.go | 6 + database/sql/pools_test.go | 141 +++++++++++++++++++++- database/watcher/watcher.go | 9 ++ database/watcher/watcher_store_test.go | 5 +- util/github/scalesets/message_sessions.go | 2 +- 5 files changed, 160 insertions(+), 3 deletions(-) diff --git a/database/sql/instances_test.go b/database/sql/instances_test.go index 9d000cef..90418be7 100644 --- a/database/sql/instances_test.go +++ b/database/sql/instances_test.go @@ -119,6 +119,12 @@ func (s *InstancesTestSuite) SetupTest() { CallbackURL: "https://garm.example.com/", Status: commonParams.InstanceRunning, RunnerStatus: params.RunnerIdle, + JitConfiguration: map[string]string{ + "secret": fmt.Sprintf("secret-%d", i), + }, + AditionalLabels: []string{ + fmt.Sprintf("label-%d", i), + }, }, ) if err != nil { diff --git a/database/sql/pools_test.go b/database/sql/pools_test.go index e6cf7f4a..990d6808 100644 --- a/database/sql/pools_test.go +++ b/database/sql/pools_test.go @@ -16,6 +16,7 @@ package sql import ( "context" + "encoding/json" "flag" "fmt" "regexp" @@ -27,7 +28,10 @@ import ( "gorm.io/gorm" "gorm.io/gorm/logger" + commonParams "github.com/cloudbase/garm-provider-common/params" + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" garmTesting "github.com/cloudbase/garm/internal/testing" "github.com/cloudbase/garm/params" ) @@ -40,7 +44,9 @@ type PoolsTestFixtures struct { type PoolsTestSuite struct { suite.Suite - Store dbCommon.Store + Store dbCommon.Store + ctx context.Context + StoreSQLMocked *sqlDatabase Fixtures *PoolsTestFixtures adminCtx context.Context @@ -53,13 +59,21 @@ func (s *PoolsTestSuite) assertSQLMockExpectations() { } } +func (s *PoolsTestSuite) TearDownTest() { + watcher.CloseWatcher() +} + func (s *PoolsTestSuite) SetupTest() { // create testing sqlite database + ctx := context.Background() + watcher.InitWatcher(ctx) + db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) if err != nil { s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) } s.Store = db + s.ctx = garmTesting.ImpersonateAdminContext(ctx, s.Store, s.T()) adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) s.adminCtx = adminCtx @@ -194,6 +208,131 @@ func (s *PoolsTestSuite) TestDeletePoolByIDDBRemoveErr() { s.Require().Equal("removing pool: mocked removing pool error", err.Error()) } +func (s *PoolsTestSuite) TestEntityPoolOperations() { + ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.Store, s.T()) + creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.Store, s.T(), ep) + s.T().Cleanup(func() { s.Store.DeleteGithubCredentials(s.ctx, creds.ID) }) + repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotEmpty(repo.ID) + s.T().Cleanup(func() { s.Store.DeleteRepository(s.ctx, repo.ID) }) + + entity, err := repo.GetEntity() + s.Require().NoError(err) + + createPoolParams := params.CreatePoolParams{ + ProviderName: "test-provider", + Image: "test-image", + Flavor: "test-flavor", + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + Tags: []string{"test-tag"}, + } + + pool, err := s.Store.CreateEntityPool(s.ctx, entity, createPoolParams) + s.Require().NoError(err) + s.Require().NotEmpty(pool.ID) + s.T().Cleanup(func() { s.Store.DeleteEntityPool(s.ctx, entity, pool.ID) }) + + entityPool, err := s.Store.GetEntityPool(s.ctx, entity, pool.ID) + s.Require().NoError(err) + s.Require().Equal(pool.ID, entityPool.ID) + s.Require().Equal(pool.ProviderName, entityPool.ProviderName) + + updatePoolParams := params.UpdatePoolParams{ + Enabled: garmTesting.Ptr(true), + Flavor: "new-flavor", + Image: "new-image", + RunnerPrefix: params.RunnerPrefix{ + Prefix: "new-prefix", + }, + MaxRunners: garmTesting.Ptr(uint(100)), + MinIdleRunners: garmTesting.Ptr(uint(50)), + OSType: commonParams.Windows, + OSArch: commonParams.Amd64, + Tags: []string{"new-tag"}, + RunnerBootstrapTimeout: garmTesting.Ptr(uint(10)), + ExtraSpecs: json.RawMessage(`{"extra": "specs"}`), + GitHubRunnerGroup: garmTesting.Ptr("new-group"), + Priority: garmTesting.Ptr(uint(1)), + } + pool, err = s.Store.UpdateEntityPool(s.ctx, entity, pool.ID, updatePoolParams) + s.Require().NoError(err) + s.Require().Equal(*updatePoolParams.Enabled, pool.Enabled) + s.Require().Equal(updatePoolParams.Flavor, pool.Flavor) + s.Require().Equal(updatePoolParams.Image, pool.Image) + s.Require().Equal(updatePoolParams.RunnerPrefix.Prefix, pool.RunnerPrefix.Prefix) + s.Require().Equal(*updatePoolParams.MaxRunners, pool.MaxRunners) + s.Require().Equal(*updatePoolParams.MinIdleRunners, pool.MinIdleRunners) + s.Require().Equal(updatePoolParams.OSType, pool.OSType) + s.Require().Equal(updatePoolParams.OSArch, pool.OSArch) + s.Require().Equal(*updatePoolParams.RunnerBootstrapTimeout, pool.RunnerBootstrapTimeout) + s.Require().Equal(updatePoolParams.ExtraSpecs, pool.ExtraSpecs) + s.Require().Equal(*updatePoolParams.GitHubRunnerGroup, pool.GitHubRunnerGroup) + s.Require().Equal(*updatePoolParams.Priority, pool.Priority) + + entityPools, err := s.Store.ListEntityPools(s.ctx, entity) + s.Require().NoError(err) + s.Require().Len(entityPools, 1) + s.Require().Equal(pool.ID, entityPools[0].ID) + + tagsToMatch := []string{"new-tag"} + pools, err := s.Store.FindPoolsMatchingAllTags(s.ctx, entity.EntityType, entity.ID, tagsToMatch) + s.Require().NoError(err) + s.Require().Len(pools, 1) + s.Require().Equal(pool.ID, pools[0].ID) + + invalidTagsToMatch := []string{"invalid-tag"} + pools, err = s.Store.FindPoolsMatchingAllTags(s.ctx, entity.EntityType, entity.ID, invalidTagsToMatch) + s.Require().NoError(err) + s.Require().Len(pools, 0) +} + +func (s *PoolsTestSuite) TestListEntityInstances() { + ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.Store, s.T()) + creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.Store, s.T(), ep) + s.T().Cleanup(func() { s.Store.DeleteGithubCredentials(s.ctx, creds.ID) }) + repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotEmpty(repo.ID) + s.T().Cleanup(func() { s.Store.DeleteRepository(s.ctx, repo.ID) }) + + entity, err := repo.GetEntity() + s.Require().NoError(err) + + createPoolParams := params.CreatePoolParams{ + ProviderName: "test-provider", + Image: "test-image", + Flavor: "test-flavor", + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + Tags: []string{"test-tag"}, + } + + pool, err := s.Store.CreateEntityPool(s.ctx, entity, createPoolParams) + s.Require().NoError(err) + s.Require().NotEmpty(pool.ID) + s.T().Cleanup(func() { s.Store.DeleteEntityPool(s.ctx, entity, pool.ID) }) + + createInstanceParams := params.CreateInstanceParams{ + Name: "test-instance", + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + Status: commonParams.InstanceCreating, + } + instance, err := s.Store.CreateInstance(s.ctx, pool.ID, createInstanceParams) + s.Require().NoError(err) + s.Require().NotEmpty(instance.ID) + + s.T().Cleanup(func() { s.Store.DeleteInstance(s.ctx, pool.ID, instance.ID) }) + + instances, err := s.Store.ListEntityInstances(s.ctx, entity) + s.Require().NoError(err) + s.Require().Len(instances, 1) + s.Require().Equal(instance.ID, instances[0].ID) + s.Require().Equal(instance.Name, instances[0].Name) +} + func TestPoolsTestSuite(t *testing.T) { t.Parallel() suite.Run(t, new(PoolsTestSuite)) diff --git a/database/watcher/watcher.go b/database/watcher/watcher.go index 2ef1aeee..fda318c6 100644 --- a/database/watcher/watcher.go +++ b/database/watcher/watcher.go @@ -29,6 +29,15 @@ func InitWatcher(ctx context.Context) { databaseWatcher = w } +func CloseWatcher() error { + if databaseWatcher == nil { + return nil + } + databaseWatcher.Close() + databaseWatcher = nil + return nil +} + func RegisterProducer(ctx context.Context, id string) (common.Producer, error) { if databaseWatcher == nil { return nil, common.ErrWatcherNotInitialized diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index a0845b9c..af3185db 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -748,8 +748,11 @@ func consumeEvents(consumer common.Consumer) { consume: for { select { - case <-consumer.Watch(): + case _, ok := <-consumer.Watch(): // throw away event. + if !ok { + return + } case <-time.After(100 * time.Millisecond): break consume } diff --git a/util/github/scalesets/message_sessions.go b/util/github/scalesets/message_sessions.go index 79d5c26e..8fafc2c4 100644 --- a/util/github/scalesets/message_sessions.go +++ b/util/github/scalesets/message_sessions.go @@ -132,7 +132,7 @@ func (m *MessageSession) Refresh(ctx context.Context) error { if err := json.NewDecoder(resp.Body).Decode(&refreshedSession); err != nil { return fmt.Errorf("failed to decode response: %w", err) } - slog.DebugContext(ctx, "refreshed message session token", "session", refreshedSession) + slog.DebugContext(ctx, "refreshed message session token") m.session = &refreshedSession return nil } From 2a5e374ae66af44238c0bd44777c7b4ad429bc20 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 2 May 2025 12:26:48 +0000 Subject: [PATCH 037/179] Remove unused field Signed-off-by: Gabriel Adrian Samfira --- database/sql/util.go | 2 -- params/params.go | 11 ----------- workers/provider/instance_manager.go | 4 ++-- 3 files changed, 2 insertions(+), 15 deletions(-) diff --git a/database/sql/util.go b/database/sql/util.go index 12513ede..62f22179 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -76,13 +76,11 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e if instance.ScaleSetFkID != nil { ret.ScaleSetID = *instance.ScaleSetFkID ret.ProviderName = instance.ScaleSet.ProviderName - ret.RunnerBootstrapTimeout = instance.ScaleSet.RunnerBootstrapTimeout } if instance.PoolID != nil { ret.PoolID = instance.PoolID.String() ret.ProviderName = instance.Pool.ProviderName - ret.RunnerBootstrapTimeout = instance.Pool.RunnerBootstrapTimeout } if ret.ScaleSetID == 0 && ret.PoolID == "" { diff --git a/params/params.go b/params/params.go index 5653e386..2c1ed042 100644 --- a/params/params.go +++ b/params/params.go @@ -243,10 +243,6 @@ type Instance struct { // Job is the current job that is being serviced by this runner. Job *Job `json:"job,omitempty"` - // RunnerBootstrapTimeout is the timeout in minutes after which the runner deployment - // will be considered failed. This value is caried over from the pool or scale set. - RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` - // Do not serialize sensitive info. CallbackURL string `json:"-"` MetadataURL string `json:"-"` @@ -264,13 +260,6 @@ func (i Instance) GetID() string { return i.ID } -func (i Instance) RunnerTimeout() uint { - if i.RunnerBootstrapTimeout == 0 { - return appdefaults.DefaultRunnerBootstrapTimeout - } - return i.RunnerBootstrapTimeout -} - // used by swagger client generated code type Instances []Instance diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index 506e6ef1..95d29f69 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -149,9 +149,9 @@ func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instanc if err != nil { return fmt.Errorf("getting entity: %w", err) } - jwtValidity := instance.RunnerTimeout() + token, err := i.helper.InstanceTokenGetter().NewInstanceJWTToken( - instance, entity.String(), entity.EntityType, jwtValidity) + instance, entity.String(), entity.EntityType, i.scaleSet.RunnerBootstrapTimeout) if err != nil { return fmt.Errorf("creating instance token: %w", err) } From ff383ea49338c9cd9a6feca51018553c703499a0 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 2 May 2025 14:24:25 +0000 Subject: [PATCH 038/179] Add db scaleset tests Signed-off-by: Gabriel Adrian Samfira --- database/sql/controller_test.go | 1 - database/sql/enterprise_test.go | 1 - database/sql/github_test.go | 1 - database/sql/instances_test.go | 1 - database/sql/organizations_test.go | 1 - database/sql/pools_test.go | 3 +- database/sql/repositories_test.go | 9 +- database/sql/scalesets_test.go | 354 +++++++++++++++++++++++++++++ database/sql/users_test.go | 7 + 9 files changed, 369 insertions(+), 9 deletions(-) create mode 100644 database/sql/scalesets_test.go diff --git a/database/sql/controller_test.go b/database/sql/controller_test.go index b4076e92..949f675f 100644 --- a/database/sql/controller_test.go +++ b/database/sql/controller_test.go @@ -69,6 +69,5 @@ func (s *CtrlTestSuite) TestInitControllerAlreadyInitialized() { } func TestCtrlTestSuite(t *testing.T) { - t.Parallel() suite.Run(t, new(CtrlTestSuite)) } diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 5da67b1d..3cfcbc32 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -782,6 +782,5 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolInvalidEnterpriseID() { } func TestEnterpriseTestSuite(t *testing.T) { - t.Parallel() suite.Run(t, new(EnterpriseTestSuite)) } diff --git a/database/sql/github_test.go b/database/sql/github_test.go index b0399a68..9d53569a 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -738,7 +738,6 @@ func (s *GithubTestSuite) TestAdminUserCanUpdateAnyGithubCredentials() { } func TestGithubTestSuite(t *testing.T) { - t.Parallel() suite.Run(t, new(GithubTestSuite)) } diff --git a/database/sql/instances_test.go b/database/sql/instances_test.go index 90418be7..8610409b 100644 --- a/database/sql/instances_test.go +++ b/database/sql/instances_test.go @@ -591,6 +591,5 @@ func (s *InstancesTestSuite) TestPoolInstanceCountDBCountErr() { } func TestInstTestSuite(t *testing.T) { - t.Parallel() suite.Run(t, new(InstancesTestSuite)) } diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index b80ae763..b30ea701 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -787,6 +787,5 @@ func (s *OrgTestSuite) TestUpdateOrganizationPoolInvalidOrgID() { } func TestOrgTestSuite(t *testing.T) { - t.Parallel() suite.Run(t, new(OrgTestSuite)) } diff --git a/database/sql/pools_test.go b/database/sql/pools_test.go index 990d6808..758dcacd 100644 --- a/database/sql/pools_test.go +++ b/database/sql/pools_test.go @@ -29,7 +29,6 @@ import ( "gorm.io/gorm/logger" commonParams "github.com/cloudbase/garm-provider-common/params" - dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" garmTesting "github.com/cloudbase/garm/internal/testing" @@ -331,9 +330,9 @@ func (s *PoolsTestSuite) TestListEntityInstances() { s.Require().Len(instances, 1) s.Require().Equal(instance.ID, instances[0].ID) s.Require().Equal(instance.Name, instances[0].Name) + s.Require().Equal(instance.ProviderName, pool.ProviderName) } func TestPoolsTestSuite(t *testing.T) { - t.Parallel() suite.Run(t, new(PoolsTestSuite)) } diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index 88fb577a..484742ae 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -94,6 +94,9 @@ func (s *RepoTestSuite) assertSQLMockExpectations() { func (s *RepoTestSuite) SetupTest() { // create testing sqlite database + ctx := context.Background() + watcher.InitWatcher(ctx) + db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) if err != nil { s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) @@ -191,6 +194,10 @@ func (s *RepoTestSuite) SetupTest() { s.Fixtures = fixtures } +func (s *RepoTestSuite) TearDownTest() { + watcher.CloseWatcher() +} + func (s *RepoTestSuite) TestCreateRepository() { // call tested function repo, err := s.Store.CreateRepository( @@ -831,7 +838,5 @@ func (s *RepoTestSuite) TestUpdateRepositoryPoolInvalidRepoID() { } func TestRepoTestSuite(t *testing.T) { - t.Parallel() - suite.Run(t, new(RepoTestSuite)) } diff --git a/database/sql/scalesets_test.go b/database/sql/scalesets_test.go new file mode 100644 index 00000000..951c3735 --- /dev/null +++ b/database/sql/scalesets_test.go @@ -0,0 +1,354 @@ +package sql + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/stretchr/testify/suite" + + commonParams "github.com/cloudbase/garm-provider-common/params" + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + garmTesting "github.com/cloudbase/garm/internal/testing" + "github.com/cloudbase/garm/params" +) + +type ScaleSetsTestSuite struct { + suite.Suite + Store dbCommon.Store + adminCtx context.Context + creds params.GithubCredentials + + org params.Organization + repo params.Repository + enterprise params.Enterprise + + orgEntity params.GithubEntity + repoEntity params.GithubEntity + enterpriseEntity params.GithubEntity +} + +func (s *ScaleSetsTestSuite) SetupTest() { + // create testing sqlite database + ctx := context.Background() + watcher.InitWatcher(ctx) + + db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) + } + s.Store = db + + adminCtx := garmTesting.ImpersonateAdminContext(ctx, db, s.T()) + s.adminCtx = adminCtx + + githubEndpoint := garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.creds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), githubEndpoint) + + // create an organization for testing purposes + s.org, err = s.Store.CreateOrganization(s.adminCtx, "test-org", s.creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create org: %s", err)) + } + + s.repo, err = s.Store.CreateRepository(s.adminCtx, "test-org", "test-repo", s.creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create repo: %s", err)) + } + + s.enterprise, err = s.Store.CreateEnterprise(s.adminCtx, "test-enterprise", s.creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create enterprise: %s", err)) + } + + s.orgEntity, err = s.org.GetEntity() + if err != nil { + s.FailNow(fmt.Sprintf("failed to get org entity: %s", err)) + } + + s.repoEntity, err = s.repo.GetEntity() + if err != nil { + s.FailNow(fmt.Sprintf("failed to get repo entity: %s", err)) + } + + s.enterpriseEntity, err = s.enterprise.GetEntity() + if err != nil { + s.FailNow(fmt.Sprintf("failed to get enterprise entity: %s", err)) + } + + s.T().Cleanup(func() { + err := s.Store.DeleteOrganization(s.adminCtx, s.org.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to delete org: %s", err)) + } + err = s.Store.DeleteRepository(s.adminCtx, s.repo.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to delete repo: %s", err)) + } + err = s.Store.DeleteEnterprise(s.adminCtx, s.enterprise.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to delete enterprise: %s", err)) + } + }) +} + +func (s *ScaleSetsTestSuite) TearDownTest() { + watcher.CloseWatcher() +} + +func (s *ScaleSetsTestSuite) callback(old, newSet params.ScaleSet) error { + s.Require().Equal(old.Name, "test-scaleset") + s.Require().Equal(newSet.Name, "test-scaleset-updated") + s.Require().Equal(old.OSType, commonParams.Linux) + s.Require().Equal(newSet.OSType, commonParams.Windows) + s.Require().Equal(old.OSArch, commonParams.Amd64) + s.Require().Equal(newSet.OSArch, commonParams.Arm64) + s.Require().Equal(old.ExtraSpecs, json.RawMessage(`{"test": 1}`)) + s.Require().Equal(newSet.ExtraSpecs, json.RawMessage(`{"test": 111}`)) + s.Require().Equal(old.MaxRunners, uint(10)) + s.Require().Equal(newSet.MaxRunners, uint(60)) + s.Require().Equal(old.MinIdleRunners, uint(5)) + s.Require().Equal(newSet.MinIdleRunners, uint(50)) + s.Require().Equal(old.Image, "test-image") + s.Require().Equal(newSet.Image, "new-test-image") + s.Require().Equal(old.Flavor, "test-flavor") + s.Require().Equal(newSet.Flavor, "new-test-flavor") + s.Require().Equal(old.GitHubRunnerGroup, "test-group") + s.Require().Equal(newSet.GitHubRunnerGroup, "new-test-group") + s.Require().Equal(old.RunnerPrefix.Prefix, "garm") + s.Require().Equal(newSet.RunnerPrefix.Prefix, "test-prefix2") + s.Require().Equal(old.Enabled, false) + s.Require().Equal(newSet.Enabled, true) + return nil +} + +func (s *ScaleSetsTestSuite) TestScaleSetOperations() { + // create a scale set for the organization + createScaleSetPrams := params.CreateScaleSetParams{ + Name: "test-scaleset", + ProviderName: "test-provider", + MaxRunners: 10, + MinIdleRunners: 5, + Image: "test-image", + Flavor: "test-flavor", + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + ExtraSpecs: json.RawMessage(`{"test": 1}`), + GitHubRunnerGroup: "test-group", + } + + var orgScaleSet params.ScaleSet + var repoScaleSet params.ScaleSet + var enterpriseScaleSet params.ScaleSet + var err error + + s.T().Run("create org scaleset", func(_ *testing.T) { + orgScaleSet, err = s.Store.CreateEntityScaleSet(s.adminCtx, s.orgEntity, createScaleSetPrams) + s.Require().NoError(err) + s.Require().NotNil(orgScaleSet) + s.Require().Equal(orgScaleSet.Name, createScaleSetPrams.Name) + s.T().Cleanup(func() { + err := s.Store.DeleteScaleSetByID(s.adminCtx, orgScaleSet.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to delete scaleset: %s", err)) + } + }) + }) + + s.T().Run("create repo scaleset", func(_ *testing.T) { + repoScaleSet, err = s.Store.CreateEntityScaleSet(s.adminCtx, s.repoEntity, createScaleSetPrams) + s.Require().NoError(err) + s.Require().NotNil(repoScaleSet) + s.Require().Equal(repoScaleSet.Name, createScaleSetPrams.Name) + s.T().Cleanup(func() { + err := s.Store.DeleteScaleSetByID(s.adminCtx, repoScaleSet.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to delete scaleset: %s", err)) + } + }) + }) + + s.T().Run("create enterprise scaleset", func(_ *testing.T) { + enterpriseScaleSet, err = s.Store.CreateEntityScaleSet(s.adminCtx, s.enterpriseEntity, createScaleSetPrams) + s.Require().NoError(err) + s.Require().NotNil(enterpriseScaleSet) + s.Require().Equal(enterpriseScaleSet.Name, createScaleSetPrams.Name) + + s.T().Cleanup(func() { + err := s.Store.DeleteScaleSetByID(s.adminCtx, enterpriseScaleSet.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to delete scaleset: %s", err)) + } + }) + }) + + s.T().Run("create list all scalesets", func(_ *testing.T) { + allScaleSets, err := s.Store.ListAllScaleSets(s.adminCtx) + s.Require().NoError(err) + s.Require().NotEmpty(allScaleSets) + s.Require().Len(allScaleSets, 3) + }) + + s.T().Run("list repo scalesets", func(_ *testing.T) { + repoScaleSets, err := s.Store.ListEntityScaleSets(s.adminCtx, s.repoEntity) + s.Require().NoError(err) + s.Require().NotEmpty(repoScaleSets) + s.Require().Len(repoScaleSets, 1) + }) + + s.T().Run("list org scalesets", func(_ *testing.T) { + orgScaleSets, err := s.Store.ListEntityScaleSets(s.adminCtx, s.orgEntity) + s.Require().NoError(err) + s.Require().NotEmpty(orgScaleSets) + s.Require().Len(orgScaleSets, 1) + }) + + s.T().Run("list enterprise scalesets", func(_ *testing.T) { + enterpriseScaleSets, err := s.Store.ListEntityScaleSets(s.adminCtx, s.enterpriseEntity) + s.Require().NoError(err) + s.Require().NotEmpty(enterpriseScaleSets) + s.Require().Len(enterpriseScaleSets, 1) + }) + + s.T().Run("get repo scaleset by ID", func(_ *testing.T) { + repoScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, repoScaleSet.ID) + s.Require().NoError(err) + s.Require().NotNil(repoScaleSetByID) + s.Require().Equal(repoScaleSetByID.ID, repoScaleSet.ID) + s.Require().Equal(repoScaleSetByID.Name, repoScaleSet.Name) + }) + + s.T().Run("get org scaleset by ID", func(_ *testing.T) { + orgScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, orgScaleSet.ID) + s.Require().NoError(err) + s.Require().NotNil(orgScaleSetByID) + s.Require().Equal(orgScaleSetByID.ID, orgScaleSet.ID) + s.Require().Equal(orgScaleSetByID.Name, orgScaleSet.Name) + }) + + s.T().Run("get enterprise scaleset by ID", func(_ *testing.T) { + enterpriseScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, enterpriseScaleSet.ID) + s.Require().NoError(err) + s.Require().NotNil(enterpriseScaleSetByID) + s.Require().Equal(enterpriseScaleSetByID.ID, enterpriseScaleSet.ID) + s.Require().Equal(enterpriseScaleSetByID.Name, enterpriseScaleSet.Name) + }) + + s.T().Run("get scaleset by ID not found", func(_ *testing.T) { + _, err = s.Store.GetScaleSetByID(s.adminCtx, 999) + s.Require().Error(err) + s.Require().Contains(err.Error(), "not found") + }) + + s.T().Run("Set scale set last message ID and desired count", func(_ *testing.T) { + err = s.Store.SetScaleSetLastMessageID(s.adminCtx, orgScaleSet.ID, 20) + s.Require().NoError(err) + err = s.Store.SetScaleSetDesiredRunnerCount(s.adminCtx, orgScaleSet.ID, 5) + s.Require().NoError(err) + orgScaleSetByID, err := s.Store.GetScaleSetByID(s.adminCtx, orgScaleSet.ID) + s.Require().NoError(err) + s.Require().NotNil(orgScaleSetByID) + s.Require().Equal(orgScaleSetByID.LastMessageID, int64(20)) + s.Require().Equal(orgScaleSetByID.DesiredRunnerCount, 5) + }) + + updateParams := params.UpdateScaleSetParams{ + Name: "test-scaleset-updated", + RunnerPrefix: params.RunnerPrefix{ + Prefix: "test-prefix2", + }, + OSType: commonParams.Windows, + OSArch: commonParams.Arm64, + ExtraSpecs: json.RawMessage(`{"test": 111}`), + Enabled: garmTesting.Ptr(true), + MaxRunners: garmTesting.Ptr(uint(60)), + MinIdleRunners: garmTesting.Ptr(uint(50)), + Image: "new-test-image", + Flavor: "new-test-flavor", + GitHubRunnerGroup: garmTesting.Ptr("new-test-group"), + } + + s.T().Run("update repo scaleset", func(_ *testing.T) { + newRepoScaleSet, err := s.Store.UpdateEntityScaleSet(s.adminCtx, s.repoEntity, repoScaleSet.ID, updateParams, s.callback) + s.Require().NoError(err) + s.Require().NotNil(newRepoScaleSet) + s.Require().NoError(s.callback(repoScaleSet, newRepoScaleSet)) + }) + + s.T().Run("update org scaleset", func(_ *testing.T) { + newOrgScaleSet, err := s.Store.UpdateEntityScaleSet(s.adminCtx, s.orgEntity, orgScaleSet.ID, updateParams, s.callback) + s.Require().NoError(err) + s.Require().NotNil(newOrgScaleSet) + s.Require().NoError(s.callback(orgScaleSet, newOrgScaleSet)) + }) + + s.T().Run("update enterprise scaleset", func(_ *testing.T) { + newEnterpriseScaleSet, err := s.Store.UpdateEntityScaleSet(s.adminCtx, s.enterpriseEntity, enterpriseScaleSet.ID, updateParams, s.callback) + s.Require().NoError(err) + s.Require().NotNil(newEnterpriseScaleSet) + s.Require().NoError(s.callback(enterpriseScaleSet, newEnterpriseScaleSet)) + }) + + s.T().Run("update scaleset not found", func(_ *testing.T) { + _, err = s.Store.UpdateEntityScaleSet(s.adminCtx, s.enterpriseEntity, 99999, updateParams, s.callback) + s.Require().Error(err) + s.Require().Contains(err.Error(), "not found") + }) + + s.T().Run("update scaleset with invalid entity", func(_ *testing.T) { + _, err = s.Store.UpdateEntityScaleSet(s.adminCtx, params.GithubEntity{}, enterpriseScaleSet.ID, params.UpdateScaleSetParams{}, nil) + s.Require().Error(err) + s.Require().Contains(err.Error(), "missing entity id") + }) + + s.T().Run("Create repo scale set instance", func(_ *testing.T) { + param := params.CreateInstanceParams{ + Name: "test-instance", + Status: commonParams.InstancePendingCreate, + RunnerStatus: params.RunnerPending, + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + CallbackURL: "http://localhost:8080/callback", + MetadataURL: "http://localhost:8080/metadata", + GitHubRunnerGroup: "test-group", + JitConfiguration: map[string]string{ + "test": "test", + }, + AgentID: 5, + } + + instance, err := s.Store.CreateScaleSetInstance(s.adminCtx, repoScaleSet.ID, param) + s.Require().NoError(err) + s.Require().NotNil(instance) + s.Require().Equal(instance.Name, param.Name) + s.Require().Equal(instance.Status, param.Status) + s.Require().Equal(instance.RunnerStatus, param.RunnerStatus) + s.Require().Equal(instance.OSType, param.OSType) + s.Require().Equal(instance.OSArch, param.OSArch) + s.Require().Equal(instance.CallbackURL, param.CallbackURL) + s.Require().Equal(instance.MetadataURL, param.MetadataURL) + s.Require().Equal(instance.GitHubRunnerGroup, param.GitHubRunnerGroup) + s.Require().Equal(instance.JitConfiguration, param.JitConfiguration) + s.Require().Equal(instance.AgentID, param.AgentID) + + s.T().Cleanup(func() { + err := s.Store.DeleteInstanceByName(s.adminCtx, instance.Name) + if err != nil { + s.FailNow(fmt.Sprintf("failed to delete scaleset instance: %s", err)) + } + }) + }) + + s.T().Run("List repo scale set instances", func(_ *testing.T) { + instances, err := s.Store.ListScaleSetInstances(s.adminCtx, repoScaleSet.ID) + s.Require().NoError(err) + s.Require().NotEmpty(instances) + s.Require().Len(instances, 1) + }) +} + +func TestScaleSetsTestSuite(t *testing.T) { + suite.Run(t, new(ScaleSetsTestSuite)) +} diff --git a/database/sql/users_test.go b/database/sql/users_test.go index 627c4b93..db24adc3 100644 --- a/database/sql/users_test.go +++ b/database/sql/users_test.go @@ -28,6 +28,7 @@ import ( "gorm.io/gorm/logger" dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" garmTesting "github.com/cloudbase/garm/internal/testing" "github.com/cloudbase/garm/params" ) @@ -53,7 +54,13 @@ func (s *UserTestSuite) assertSQLMockExpectations() { } } +func (s *UserTestSuite) TearDownTest() { + watcher.CloseWatcher() +} + func (s *UserTestSuite) SetupTest() { + ctx := context.Background() + watcher.InitWatcher(ctx) // create testing sqlite database db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) if err != nil { From 77895d9c894031bf791f9c76e70c3c764d00f9a0 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 2 May 2025 17:38:29 +0000 Subject: [PATCH 039/179] Add more tests Signed-off-by: Gabriel Adrian Samfira --- database/sql/instances.go | 2 +- database/sql/scalesets.go | 2 +- database/watcher/watcher_store_test.go | 234 +++++++++++++++++++++++++ 3 files changed, 236 insertions(+), 2 deletions(-) diff --git a/database/sql/instances.go b/database/sql/instances.go index 39e32211..dab81f10 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -188,7 +188,7 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN } func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName string) error { - instance, err := s.getInstanceByName(ctx, instanceName) + instance, err := s.getInstanceByName(ctx, instanceName, "Pool", "ScaleSet") if err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { return nil diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index ea4878bf..e4bd28f3 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -357,7 +357,7 @@ func (s *sqlDatabase) DeleteScaleSetByID(_ context.Context, scaleSetID uint) (er } }() err = s.conn.Transaction(func(tx *gorm.DB) error { - dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances") + dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") if err != nil { return errors.Wrap(err, "fetching scale set") } diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index af3185db..5a1486a8 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -241,6 +241,112 @@ func (s *WatcherStoreTestSuite) TestInstanceWatcher() { } } +func (s *WatcherStoreTestSuite) TestScaleSetInstanceWatcher() { + consumer, err := watcher.RegisterConsumer( + s.ctx, "instance-test", + watcher.WithEntityTypeFilter(common.InstanceEntityType), + watcher.WithAny( + watcher.WithOperationTypeFilter(common.CreateOperation), + watcher.WithOperationTypeFilter(common.UpdateOperation), + watcher.WithOperationTypeFilter(common.DeleteOperation)), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + s.T().Cleanup(func() { consumer.Close() }) + consumeEvents(consumer) + + ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.store, s.T()) + creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) + s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) + + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotEmpty(repo.ID) + s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) + + entity, err := repo.GetEntity() + s.Require().NoError(err) + + createScaleSetParams := params.CreateScaleSetParams{ + ProviderName: "test-provider", + Name: "test-scaleset", + Image: "test-image", + Flavor: "test-flavor", + MinIdleRunners: 0, + MaxRunners: 1, + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + } + + scaleSet, err := s.store.CreateEntityScaleSet(s.ctx, entity, createScaleSetParams) + s.Require().NoError(err) + s.Require().NotEmpty(scaleSet.ID) + s.T().Cleanup(func() { s.store.DeleteScaleSetByID(s.ctx, scaleSet.ID) }) + + createInstanceParams := params.CreateInstanceParams{ + Name: "test-instance", + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + Status: commonParams.InstanceCreating, + } + instance, err := s.store.CreateScaleSetInstance(s.ctx, scaleSet.ID, createInstanceParams) + s.Require().NoError(err) + s.Require().NotEmpty(instance.ID) + + select { + case event := <-consumer.Watch(): + s.Require().Equal(common.ChangePayload{ + EntityType: common.InstanceEntityType, + Operation: common.CreateOperation, + Payload: instance, + }, event) + asInstance, ok := event.Payload.(params.Instance) + s.Require().True(ok) + s.Require().Equal(instance.Name, "test-instance") + s.Require().Equal(asInstance.Name, "test-instance") + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + updateParams := params.UpdateInstanceParams{ + RunnerStatus: params.RunnerActive, + } + + updatedInstance, err := s.store.UpdateInstance(s.ctx, instance.Name, updateParams) + s.Require().NoError(err) + + select { + case event := <-consumer.Watch(): + s.Require().Equal(common.ChangePayload{ + EntityType: common.InstanceEntityType, + Operation: common.UpdateOperation, + Payload: updatedInstance, + }, event) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + err = s.store.DeleteInstanceByName(s.ctx, updatedInstance.Name) + s.Require().NoError(err) + + select { + case event := <-consumer.Watch(): + s.Require().Equal(common.ChangePayload{ + EntityType: common.InstanceEntityType, + Operation: common.DeleteOperation, + Payload: params.Instance{ + ID: updatedInstance.ID, + Name: updatedInstance.Name, + ProviderID: updatedInstance.ProviderID, + AgentID: updatedInstance.AgentID, + ScaleSetID: updatedInstance.ScaleSetID, + }, + }, event) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } +} + func (s *WatcherStoreTestSuite) TestPoolWatcher() { consumer, err := watcher.RegisterConsumer( s.ctx, "pool-test", @@ -362,6 +468,134 @@ func (s *WatcherStoreTestSuite) TestPoolWatcher() { } } +func (s *WatcherStoreTestSuite) TestScaleSetWatcher() { + consumer, err := watcher.RegisterConsumer( + s.ctx, "scaleset-test", + watcher.WithEntityTypeFilter(common.ScaleSetEntityType), + watcher.WithAny( + watcher.WithOperationTypeFilter(common.CreateOperation), + watcher.WithOperationTypeFilter(common.UpdateOperation), + watcher.WithOperationTypeFilter(common.DeleteOperation)), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + s.T().Cleanup(func() { consumer.Close() }) + consumeEvents(consumer) + + ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.store, s.T()) + creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) + s.T().Cleanup(func() { + if err := s.store.DeleteGithubCredentials(s.ctx, creds.ID); err != nil { + s.T().Logf("failed to delete Github credentials: %v", err) + } + }) + + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotEmpty(repo.ID) + s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) + + entity, err := repo.GetEntity() + s.Require().NoError(err) + + createScaleSetParams := params.CreateScaleSetParams{ + ProviderName: "test-provider", + Name: "test-scaleset", + Image: "test-image", + Flavor: "test-flavor", + MinIdleRunners: 0, + MaxRunners: 1, + OSType: commonParams.Linux, + OSArch: commonParams.Amd64, + Tags: []string{"test-tag"}, + } + scaleSet, err := s.store.CreateEntityScaleSet(s.ctx, entity, createScaleSetParams) + s.Require().NoError(err) + s.Require().NotEmpty(scaleSet.ID) + + select { + case event := <-consumer.Watch(): + s.Require().Equal(common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.CreateOperation, + Payload: scaleSet, + }, event) + asScaleSet, ok := event.Payload.(params.ScaleSet) + s.Require().True(ok) + s.Require().Equal(scaleSet.Image, "test-image") + s.Require().Equal(asScaleSet.Image, "test-image") + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + updateParams := params.UpdateScaleSetParams{ + Flavor: "updated-flavor", + } + + callbackFn := func(old, newScaleSet params.ScaleSet) error { + s.Require().Equal(old.ID, newScaleSet.ID) + s.Require().Equal(old.Flavor, "test-flavor") + s.Require().Equal(newScaleSet.Flavor, "updated-flavor") + return nil + } + updatedScaleSet, err := s.store.UpdateEntityScaleSet(s.ctx, entity, scaleSet.ID, updateParams, callbackFn) + s.Require().NoError(err) + + select { + case event := <-consumer.Watch(): + s.Require().Equal(common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: updatedScaleSet, + }, event) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + err = s.store.SetScaleSetLastMessageID(s.ctx, updatedScaleSet.ID, 99) + s.Require().NoError(err) + + select { + case event := <-consumer.Watch(): + asScaleSet, ok := event.Payload.(params.ScaleSet) + s.Require().True(ok) + s.Require().Equal(asScaleSet.ID, updatedScaleSet.ID) + s.Require().Equal(asScaleSet.LastMessageID, int64(99)) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + err = s.store.SetScaleSetDesiredRunnerCount(s.ctx, updatedScaleSet.ID, 5) + s.Require().NoError(err) + + select { + case event := <-consumer.Watch(): + asScaleSet, ok := event.Payload.(params.ScaleSet) + s.Require().True(ok) + s.Require().Equal(asScaleSet.ID, updatedScaleSet.ID) + s.Require().Equal(asScaleSet.DesiredRunnerCount, 5) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + err = s.store.DeleteScaleSetByID(s.ctx, scaleSet.ID) + s.Require().NoError(err) + + select { + case event := <-consumer.Watch(): + // We updated last message ID and desired runner count above. + updatedScaleSet.DesiredRunnerCount = 5 + updatedScaleSet.LastMessageID = 99 + s.Require().Equal(common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.DeleteOperation, + Payload: updatedScaleSet, + }, event) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } +} + func (s *WatcherStoreTestSuite) TestControllerWatcher() { consumer, err := watcher.RegisterConsumer( s.ctx, "controller-test", From 3b3095c5461c9acb997dff3f61cbd149102d14fa Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 3 May 2025 22:49:15 +0000 Subject: [PATCH 040/179] Scale sets are unique within a runner group You can have multiple scale sets with the same name, as long as they live in different runner groups. Signed-off-by: Gabriel Adrian Samfira --- database/sql/models.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/database/sql/models.go b/database/sql/models.go index c1b6462d..d6fbb6e9 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -98,9 +98,10 @@ type ScaleSet struct { // the scale set was ceated in GARM but has not yet been created in GitHub. // The scale set ID is also not globally unique. It is only unique within the context // of an entity. - ScaleSetID int `gorm:"index:idx_scale_set"` - Name string `gorm:"index:idx_name"` - DisableUpdate bool + ScaleSetID int `gorm:"index:idx_scale_set"` + Name string `gorm:"unique_index:idx_name"` + GitHubRunnerGroup string `gorm:"unique_index:idx_name"` + DisableUpdate bool // State stores the provisioning state of the scale set in GitHub State params.ScaleSetState @@ -123,8 +124,7 @@ type ScaleSet struct { // ExtraSpecs is an opaque json that gets sent to the provider // as part of the bootstrap params for instances. It can contain // any kind of data needed by providers. - ExtraSpecs datatypes.JSON - GitHubRunnerGroup string + ExtraSpecs datatypes.JSON RepoID *uuid.UUID `gorm:"index"` Repository Repository `gorm:"foreignKey:RepoID;"` From 1d093cc33632bfca5ad9eb2e6067bc544192acb2 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 5 May 2025 18:21:57 +0000 Subject: [PATCH 041/179] Slight refactor; add creds cache worker * Split the main function into a couple of more functions * Add credentials, entity, pool and scaleset cache * add credentials worker that updates the cache Signed-off-by: Gabriel Adrian Samfira --- cache/credentials_cache.go | 73 ++++++++ cache/entity_cache.go | 189 +++++++++++++++++++++ cache/{cache.go => tools_cache.go} | 0 cmd/garm/main.go | 225 ++++++++++++++----------- workers/credentials/credentials.go | 133 +++++++++++++++ workers/entity/controller.go | 18 +- workers/entity/controller_watcher.go | 2 + workers/scaleset/controller_watcher.go | 15 +- 8 files changed, 554 insertions(+), 101 deletions(-) create mode 100644 cache/credentials_cache.go create mode 100644 cache/entity_cache.go rename cache/{cache.go => tools_cache.go} (100%) create mode 100644 workers/credentials/credentials.go diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go new file mode 100644 index 00000000..731d1640 --- /dev/null +++ b/cache/credentials_cache.go @@ -0,0 +1,73 @@ +package cache + +import ( + "sync" + + "github.com/cloudbase/garm/params" +) + +var credentialsCache *GithubCredentials + +func init() { + ghCredentialsCache := &GithubCredentials{ + cache: make(map[uint]params.GithubCredentials), + } + credentialsCache = ghCredentialsCache +} + +type GithubCredentials struct { + mux sync.Mutex + + cache map[uint]params.GithubCredentials +} + +func (g *GithubCredentials) SetCredentials(credentials params.GithubCredentials) { + g.mux.Lock() + defer g.mux.Unlock() + + g.cache[credentials.ID] = credentials +} + +func (g *GithubCredentials) GetCredentials(id uint) (params.GithubCredentials, bool) { + g.mux.Lock() + defer g.mux.Unlock() + + if creds, ok := g.cache[id]; ok { + return creds, true + } + return params.GithubCredentials{}, false +} + +func (g *GithubCredentials) DeleteCredentials(id uint) { + g.mux.Lock() + defer g.mux.Unlock() + + delete(g.cache, id) +} + +func (g *GithubCredentials) GetAllCredentials() []params.GithubCredentials { + g.mux.Lock() + defer g.mux.Unlock() + + creds := make([]params.GithubCredentials, 0, len(g.cache)) + for _, cred := range g.cache { + creds = append(creds, cred) + } + return creds +} + +func SetGithubCredentials(credentials params.GithubCredentials) { + credentialsCache.SetCredentials(credentials) +} + +func GetGithubCredentials(id uint) (params.GithubCredentials, bool) { + return credentialsCache.GetCredentials(id) +} + +func DeleteGithubCredentials(id uint) { + credentialsCache.DeleteCredentials(id) +} + +func GetAllGithubCredentials() []params.GithubCredentials { + return credentialsCache.GetAllCredentials() +} diff --git a/cache/entity_cache.go b/cache/entity_cache.go new file mode 100644 index 00000000..920b9a9b --- /dev/null +++ b/cache/entity_cache.go @@ -0,0 +1,189 @@ +package cache + +import ( + "sync" + + "github.com/cloudbase/garm/params" +) + +var entityCache *EntityCache + +func init() { + ghEntityCache := &EntityCache{ + entities: make(map[string]EntityItem), + } + entityCache = ghEntityCache +} + +type EntityItem struct { + Entity params.GithubEntity + Pools map[string]params.Pool + ScaleSets map[uint]params.ScaleSet +} + +type EntityCache struct { + mux sync.Mutex + // entity IDs are UUID4s. It is highly unlikely they will collide (🤞). + entities map[string]EntityItem +} + +func (e *EntityCache) GetEntity(entity params.GithubEntity) (EntityItem, bool) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entity.ID]; ok { + // Updating specific credential details will not update entity cache which + // uses those credentials. + // Entity credentials in the cache are only updated if you swap the creds + // on the entity. We get the updated credentials from the credentials cache. + creds, ok := GetGithubCredentials(cache.Entity.Credentials.ID) + if ok { + cache.Entity.Credentials = creds + } + return cache, true + } + return EntityItem{}, false +} + +func (e *EntityCache) SetEntity(entity params.GithubEntity) { + e.mux.Lock() + defer e.mux.Unlock() + + e.entities[entity.ID] = EntityItem{ + Entity: entity, + } +} + +func (e *EntityCache) ReplaceEntityPools(entityID string, pools map[string]params.Pool) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + cache.Pools = pools + e.entities[entityID] = cache + } +} + +func (e *EntityCache) ReplaceEntityScaleSets(entityID string, scaleSets map[uint]params.ScaleSet) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + cache.ScaleSets = scaleSets + e.entities[entityID] = cache + } +} + +func (e *EntityCache) DeleteEntity(entityID string) { + e.mux.Lock() + defer e.mux.Unlock() + delete(e.entities, entityID) +} + +func (e *EntityCache) SetEntityPool(entityID string, pool params.Pool) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + cache.Pools[pool.ID] = pool + e.entities[entityID] = cache + } +} + +func (e *EntityCache) SetEntityScaleSet(entityID string, scaleSet params.ScaleSet) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + cache.ScaleSets[scaleSet.ID] = scaleSet + e.entities[entityID] = cache + } +} + +func (e *EntityCache) DeleteEntityPool(entityID string, poolID string) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + delete(cache.Pools, poolID) + e.entities[entityID] = cache + } +} + +func (e *EntityCache) DeleteEntityScaleSet(entityID string, scaleSetID uint) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + delete(cache.ScaleSets, scaleSetID) + e.entities[entityID] = cache + } +} + +func (e *EntityCache) GetEntityPool(entityID string, poolID string) (params.Pool, bool) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + if pool, ok := cache.Pools[poolID]; ok { + return pool, true + } + } + return params.Pool{}, false +} + +func (e *EntityCache) GetEntityScaleSet(entityID string, scaleSetID uint) (params.ScaleSet, bool) { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + if scaleSet, ok := cache.ScaleSets[scaleSetID]; ok { + return scaleSet, true + } + } + return params.ScaleSet{}, false +} + +func GetEntity(entity params.GithubEntity) (EntityItem, bool) { + return entityCache.GetEntity(entity) +} + +func SetEntity(entity params.GithubEntity) { + entityCache.SetEntity(entity) +} + +func ReplaceEntityPools(entityID string, pools map[string]params.Pool) { + entityCache.ReplaceEntityPools(entityID, pools) +} + +func ReplaceEntityScaleSets(entityID string, scaleSets map[uint]params.ScaleSet) { + entityCache.ReplaceEntityScaleSets(entityID, scaleSets) +} + +func DeleteEntity(entityID string) { + entityCache.DeleteEntity(entityID) +} + +func SetEntityPool(entityID string, pool params.Pool) { + entityCache.SetEntityPool(entityID, pool) +} + +func SetEntityScaleSet(entityID string, scaleSet params.ScaleSet) { + entityCache.SetEntityScaleSet(entityID, scaleSet) +} + +func DeleteEntityPool(entityID string, poolID string) { + entityCache.DeleteEntityPool(entityID, poolID) +} + +func DeleteEntityScaleSet(entityID string, scaleSetID uint) { + entityCache.DeleteEntityScaleSet(entityID, scaleSetID) +} + +func GetEntityPool(entityID string, poolID string) (params.Pool, bool) { + return entityCache.GetEntityPool(entityID, poolID) +} + +func GetEntityScaleSet(entityID string, scaleSetID uint) (params.ScaleSet, bool) { + return entityCache.GetEntityScaleSet(entityID, scaleSetID) +} diff --git a/cache/cache.go b/cache/tools_cache.go similarity index 100% rename from cache/cache.go rename to cache/tools_cache.go diff --git a/cmd/garm/main.go b/cmd/garm/main.go index c43e3c93..15ba7069 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -51,6 +51,7 @@ import ( garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/appdefaults" "github.com/cloudbase/garm/websocket" + "github.com/cloudbase/garm/workers/credentials" "github.com/cloudbase/garm/workers/entity" "github.com/cloudbase/garm/workers/provider" ) @@ -180,7 +181,127 @@ func maybeUpdateURLsFromConfig(cfg config.Config, store common.Store) error { return nil } -//gocyclo:ignore +func configureRouter(ctx context.Context, cfg config.Config, db common.Store, hub *websocket.Hub, runner *runner.Runner) (http.Handler, error) { + authenticator := auth.NewAuthenticator(cfg.JWTAuth, db) + controller, err := controllers.NewAPIController(runner, authenticator, hub) + if err != nil { + return nil, fmt.Errorf("creating controller: %w", err) + } + + instanceMiddleware, err := auth.NewInstanceMiddleware(db, cfg.JWTAuth) + if err != nil { + return nil, fmt.Errorf("creating instance middleware: %w", err) + } + + jwtMiddleware, err := auth.NewjwtMiddleware(db, cfg.JWTAuth) + if err != nil { + return nil, fmt.Errorf("creating jwt middleware: %w", err) + } + + initMiddleware, err := auth.NewInitRequiredMiddleware(db) + if err != nil { + return nil, fmt.Errorf("creating init required middleware: %w", err) + } + + urlsRequiredMiddleware, err := auth.NewUrlsRequiredMiddleware(db) + if err != nil { + return nil, fmt.Errorf("creating urls required middleware: %w", err) + } + + metricsMiddleware, err := auth.NewMetricsMiddleware(cfg.JWTAuth) + if err != nil { + return nil, fmt.Errorf("creating metrics middleware: %w", err) + } + + router := routers.NewAPIRouter(controller, jwtMiddleware, initMiddleware, urlsRequiredMiddleware, instanceMiddleware, cfg.Default.EnableWebhookManagement) + + // start the metrics collector + if cfg.Metrics.Enable { + slog.InfoContext(ctx, "setting up metric routes") + router = routers.WithMetricsRouter(router, cfg.Metrics.DisableAuth, metricsMiddleware) + + slog.InfoContext(ctx, "register metrics") + if err := metrics.RegisterMetrics(); err != nil { + return nil, fmt.Errorf("registering metrics: %w", err) + } + + slog.InfoContext(ctx, "start metrics collection") + runnerMetrics.CollectObjectMetric(ctx, runner, cfg.Metrics.Duration()) + } + + if cfg.Default.DebugServer { + runtime.SetBlockProfileRate(1) + runtime.SetMutexProfileFraction(1) + slog.InfoContext(ctx, "setting up debug routes") + router = routers.WithDebugServer(router) + } + + corsMw := mux.CORSMethodMiddleware(router) + router.Use(corsMw) + + allowedOrigins := handlers.AllowedOrigins(cfg.APIServer.CORSOrigins) + methodsOk := handlers.AllowedMethods([]string{"GET", "HEAD", "POST", "PUT", "OPTIONS", "DELETE"}) + headersOk := handlers.AllowedHeaders([]string{"X-Requested-With", "Content-Type", "Authorization"}) + + handler := handlers.CORS(methodsOk, headersOk, allowedOrigins)(router) + return handler, nil +} + +func startWorkers(ctx context.Context, cfg config.Config, db common.Store, controllerID string) (func() error, error) { + credsWorker, err := credentials.NewWorker(ctx, db) + if err != nil { + return nil, fmt.Errorf("failed to create credentials worker: %+v", err) + } + + if err := credsWorker.Start(); err != nil { + return nil, fmt.Errorf("failed to start credentials worker: %+v", err) + } + + providers, err := providers.LoadProvidersFromConfig(ctx, cfg, controllerID) + if err != nil { + return nil, fmt.Errorf("loading providers: %+v", err) + } + + entityController, err := entity.NewController(ctx, db, providers) + if err != nil { + return nil, fmt.Errorf("failed to create entity controller: %+v", err) + } + if err := entityController.Start(); err != nil { + return nil, fmt.Errorf("failed to start entity controller: %+v", err) + } + + instanceTokenGetter, err := auth.NewInstanceTokenGetter(cfg.JWTAuth.Secret) + if err != nil { + return nil, fmt.Errorf("failed to create instance token getter: %+v", err) + } + + providerWorker, err := provider.NewWorker(ctx, db, providers, instanceTokenGetter) + if err != nil { + return nil, fmt.Errorf("failed to create provider worker: %+v", err) + } + if err := providerWorker.Start(); err != nil { + return nil, fmt.Errorf("failed to start provider worker: %+v", err) + } + + return func() error { + slog.InfoContext(ctx, "shutting down credentials worker") + if err := credsWorker.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop credentials worker") + } + + slog.InfoContext(ctx, "shutting down entity controller") + if err := entityController.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop entity controller") + } + + slog.InfoContext(ctx, "shutting down provider worker") + if err := providerWorker.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop provider worker") + } + return nil + }, nil +} + func main() { flag.Parse() if *version { @@ -192,7 +313,6 @@ func main() { watcher.InitWatcher(ctx) ctx = auth.GetAdminContext(ctx) - cfg, err := config.NewConfig(*conf) if err != nil { log.Fatalf("Fetching config: %+v", err) //nolint:gocritic @@ -237,30 +357,9 @@ func main() { log.Fatal(err) } - providers, err := providers.LoadProvidersFromConfig(ctx, *cfg, controllerInfo.ControllerID.String()) + stopWorkersFn, err := startWorkers(ctx, *cfg, db, controllerInfo.ControllerID.String()) if err != nil { - log.Fatalf("loading providers: %+v", err) - } - - entityController, err := entity.NewController(ctx, db, providers) - if err != nil { - log.Fatalf("failed to create entity controller: %+v", err) - } - if err := entityController.Start(); err != nil { - log.Fatalf("failed to start entity controller: %+v", err) - } - - instanceTokenGetter, err := auth.NewInstanceTokenGetter(cfg.JWTAuth.Secret) - if err != nil { - log.Fatalf("failed to create instance token getter: %+v", err) - } - - providerWorker, err := provider.NewWorker(ctx, db, providers, instanceTokenGetter) - if err != nil { - log.Fatalf("failed to create provider worker: %+v", err) - } - if err := providerWorker.Start(); err != nil { - log.Fatalf("failed to start provider worker: %+v", err) + log.Fatalf("failed to start workers: %+v", err) } runner, err := runner.NewRunner(ctx, *cfg, db) @@ -273,73 +372,17 @@ func main() { log.Fatal(err) } - authenticator := auth.NewAuthenticator(cfg.JWTAuth, db) - controller, err := controllers.NewAPIController(runner, authenticator, hub) + handler, err := configureRouter(ctx, *cfg, db, hub, runner) if err != nil { - log.Fatalf("failed to create controller: %+v", err) + log.Fatalf("failed to configure router: %+v", err) } - instanceMiddleware, err := auth.NewInstanceMiddleware(db, cfg.JWTAuth) - if err != nil { - log.Fatal(err) - } - - jwtMiddleware, err := auth.NewjwtMiddleware(db, cfg.JWTAuth) - if err != nil { - log.Fatal(err) - } - - initMiddleware, err := auth.NewInitRequiredMiddleware(db) - if err != nil { - log.Fatal(err) - } - - urlsRequiredMiddleware, err := auth.NewUrlsRequiredMiddleware(db) - if err != nil { - log.Fatal(err) - } - - metricsMiddleware, err := auth.NewMetricsMiddleware(cfg.JWTAuth) - if err != nil { - log.Fatal(err) - } - - router := routers.NewAPIRouter(controller, jwtMiddleware, initMiddleware, urlsRequiredMiddleware, instanceMiddleware, cfg.Default.EnableWebhookManagement) - - // start the metrics collector - if cfg.Metrics.Enable { - slog.InfoContext(ctx, "setting up metric routes") - router = routers.WithMetricsRouter(router, cfg.Metrics.DisableAuth, metricsMiddleware) - - slog.InfoContext(ctx, "register metrics") - if err := metrics.RegisterMetrics(); err != nil { - log.Fatal(err) - } - - slog.InfoContext(ctx, "start metrics collection") - runnerMetrics.CollectObjectMetric(ctx, runner, cfg.Metrics.Duration()) - } - - if cfg.Default.DebugServer { - runtime.SetBlockProfileRate(1) - runtime.SetMutexProfileFraction(1) - slog.InfoContext(ctx, "setting up debug routes") - router = routers.WithDebugServer(router) - } - - corsMw := mux.CORSMethodMiddleware(router) - router.Use(corsMw) - - allowedOrigins := handlers.AllowedOrigins(cfg.APIServer.CORSOrigins) - methodsOk := handlers.AllowedMethods([]string{"GET", "HEAD", "POST", "PUT", "OPTIONS", "DELETE"}) - headersOk := handlers.AllowedHeaders([]string{"X-Requested-With", "Content-Type", "Authorization"}) - // nolint:golangci-lint,gosec // G112: Potential Slowloris Attack because ReadHeaderTimeout is not configured in the http.Server srv := &http.Server{ Addr: cfg.APIServer.BindAddress(), // Pass our instance of gorilla/mux in. - Handler: handlers.CORS(methodsOk, headersOk, allowedOrigins)(router), + Handler: handler, } listener, err := net.Listen("tcp", srv.Addr) @@ -361,22 +404,16 @@ func main() { <-ctx.Done() - slog.InfoContext(ctx, "shutting down entity controller") - if err := entityController.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop entity controller") - } - - slog.InfoContext(ctx, "shutting down provider worker") - if err := providerWorker.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop provider worker") - } - shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 60*time.Second) defer shutdownCancel() if err := srv.Shutdown(shutdownCtx); err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "graceful api server shutdown failed") } + if err := stopWorkersFn(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop workers") + } + slog.With(slog.Any("error", err)).InfoContext(ctx, "waiting for runner to stop") if err := runner.Wait(); err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to shutdown workers") diff --git a/workers/credentials/credentials.go b/workers/credentials/credentials.go new file mode 100644 index 00000000..7c590401 --- /dev/null +++ b/workers/credentials/credentials.go @@ -0,0 +1,133 @@ +package credentials + +import ( + "context" + "fmt" + "log/slog" + "sync" + + "github.com/cloudbase/garm/cache" + dbCommon "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" + garmUtil "github.com/cloudbase/garm/util" +) + +func NewWorker(ctx context.Context, store dbCommon.Store) (*Worker, error) { + consumerID := "credentials-worker" + + ctx = garmUtil.WithSlogContext( + ctx, + slog.Any("worker", consumerID)) + + return &Worker{ + ctx: ctx, + consumerID: consumerID, + store: store, + running: false, + quit: make(chan struct{}), + credentials: make(map[uint]params.GithubCredentials), + }, nil +} + +// Worker is responsible for maintaining the credentials cache. +type Worker struct { + consumerID string + ctx context.Context + + consumer dbCommon.Consumer + store dbCommon.Store + + credentials map[uint]params.GithubCredentials + + running bool + quit chan struct{} + + mux sync.Mutex +} + +func (w *Worker) loadAllCredentials() error { + creds, err := w.store.ListGithubCredentials(w.ctx) + if err != nil { + return err + } + + for _, cred := range creds { + w.credentials[cred.ID] = cred + cache.SetGithubCredentials(cred) + } + + return nil +} + +func (w *Worker) Start() error { + w.mux.Lock() + defer w.mux.Unlock() + + if w.running { + return nil + } + slog.DebugContext(w.ctx, "starting credentials worker") + if err := w.loadAllCredentials(); err != nil { + return fmt.Errorf("loading credentials: %w", err) + } + + consumer, err := watcher.RegisterConsumer( + w.ctx, w.consumerID, + watcher.WithEntityTypeFilter(dbCommon.GithubCredentialsEntityType), + ) + if err != nil { + return fmt.Errorf("failed to create consumer for entity controller: %w", err) + } + w.consumer = consumer + + w.running = true + go w.loop() + return nil +} + +func (w *Worker) Stop() error { + w.mux.Lock() + defer w.mux.Unlock() + + if !w.running { + return nil + } + + close(w.quit) + w.running = false + + return nil +} + +func (w *Worker) loop() { + defer w.Stop() + + for { + select { + case <-w.quit: + return + case event, ok := <-w.consumer.Watch(): + if !ok { + slog.ErrorContext(w.ctx, "consumer channel closed") + return + } + creds, ok := event.Payload.(params.GithubCredentials) + if !ok { + slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + continue + } + w.mux.Lock() + switch event.Operation { + case dbCommon.DeleteOperation: + slog.DebugContext(w.ctx, "got delete operation") + delete(w.credentials, creds.ID) + cache.DeleteGithubCredentials(creds.ID) + default: + w.credentials[creds.ID] = creds + cache.SetGithubCredentials(creds) + } + w.mux.Unlock() + } + } +} diff --git a/workers/entity/controller.go b/workers/entity/controller.go index 41708ec2..066bdfe3 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -7,6 +7,7 @@ import ( "sync" "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/runner/common" @@ -66,6 +67,9 @@ func (c *Controller) loadAllRepositories() error { return fmt.Errorf("starting worker: %w", err) } c.Entities[entity.ID] = worker + // take advantage of the fact that we're loading all entities + // and set the cache. + cache.SetEntity(entity) } return nil } @@ -90,6 +94,9 @@ func (c *Controller) loadAllOrganizations() error { return fmt.Errorf("starting worker: %w", err) } c.Entities[entity.ID] = worker + // take advantage of the fact that we're loading all entities + // and set the cache. + cache.SetEntity(entity) } return nil } @@ -114,6 +121,9 @@ func (c *Controller) loadAllEnterprises() error { return fmt.Errorf("starting worker: %w", err) } c.Entities[entity.ID] = worker + // take advantage of the fact that we're loading all entities + // and set the cache. + cache.SetEntity(entity) } return nil } @@ -126,14 +136,14 @@ func (c *Controller) Start() error { } c.mux.Unlock() - if err := c.loadAllRepositories(); err != nil { - return fmt.Errorf("loading repositories: %w", err) + if err := c.loadAllEnterprises(); err != nil { + return fmt.Errorf("loading enterprises: %w", err) } if err := c.loadAllOrganizations(); err != nil { return fmt.Errorf("loading organizations: %w", err) } - if err := c.loadAllEnterprises(); err != nil { - return fmt.Errorf("loading enterprises: %w", err) + if err := c.loadAllRepositories(); err != nil { + return fmt.Errorf("loading repositories: %w", err) } consumer, err := watcher.RegisterConsumer( diff --git a/workers/entity/controller_watcher.go b/workers/entity/controller_watcher.go index ace63702..dcd6ee9a 100644 --- a/workers/entity/controller_watcher.go +++ b/workers/entity/controller_watcher.go @@ -3,6 +3,7 @@ package entity import ( "log/slog" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) @@ -95,4 +96,5 @@ func (c *Controller) handleWatcherDeleteOperation(entityGetter params.EntityGett return } delete(c.Entities, entity.ID) + cache.DeleteEntity(entity.ID) } diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 04cfe1cd..131cb56c 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -4,6 +4,7 @@ import ( "fmt" "log/slog" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" @@ -63,6 +64,7 @@ func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli c if _, ok := c.ScaleSets[sSet.ID]; ok { slog.DebugContext(c.ctx, "scale set already exists in worker list", "scale_set_id", sSet.ID) + cache.SetEntityScaleSet(c.Entity.ID, sSet) return nil } @@ -88,9 +90,9 @@ func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli c } c.ScaleSets[sSet.ID] = &scaleSet{ scaleSet: sSet, - // status: scaleSetStatus{}, - worker: worker, + worker: worker, } + cache.SetEntityScaleSet(c.Entity.ID, sSet) return nil } @@ -109,6 +111,7 @@ func (c *Controller) handleScaleSetDeleteOperation(sSet params.ScaleSet) error { return fmt.Errorf("stopping scale set worker: %w", err) } delete(c.ScaleSets, sSet.ID) + cache.DeleteEntityScaleSet(c.Entity.ID, sSet.ID) return nil } @@ -116,12 +119,16 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { c.mux.Lock() defer c.mux.Unlock() - if _, ok := c.ScaleSets[sSet.ID]; !ok { + set, ok := c.ScaleSets[sSet.ID] + if !ok { // Some error may have occurred when the scale set was first created, so we // attempt to create it after the user updated the scale set, hopefully // fixing the reason for the failure. return c.handleScaleSetCreateOperation(sSet, c.ghCli) } + set.scaleSet = sSet + c.ScaleSets[sSet.ID] = set + cache.SetEntityScaleSet(c.Entity.ID, sSet) // We let the watcher in the scale set worker handle the update operation. return nil } @@ -139,6 +146,7 @@ func (c *Controller) handleCredentialsEvent(event dbCommon.ChangePayload) { c.mux.Lock() defer c.mux.Unlock() + cache.SetGithubCredentials(credentials) if c.Entity.Credentials.ID != credentials.ID { // stale update event. return @@ -177,6 +185,7 @@ func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { } } c.Entity = entity + cache.SetEntity(c.Entity) default: slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) return From 9f640965e2187c39fa962892b39d1c1b2755cf60 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 5 May 2025 21:12:20 +0000 Subject: [PATCH 042/179] revert main Signed-off-by: Gabriel Adrian Samfira --- cmd/garm/main.go | 236 +++++++++++++++++++++-------------------------- 1 file changed, 106 insertions(+), 130 deletions(-) diff --git a/cmd/garm/main.go b/cmd/garm/main.go index 15ba7069..958ea001 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -181,127 +181,7 @@ func maybeUpdateURLsFromConfig(cfg config.Config, store common.Store) error { return nil } -func configureRouter(ctx context.Context, cfg config.Config, db common.Store, hub *websocket.Hub, runner *runner.Runner) (http.Handler, error) { - authenticator := auth.NewAuthenticator(cfg.JWTAuth, db) - controller, err := controllers.NewAPIController(runner, authenticator, hub) - if err != nil { - return nil, fmt.Errorf("creating controller: %w", err) - } - - instanceMiddleware, err := auth.NewInstanceMiddleware(db, cfg.JWTAuth) - if err != nil { - return nil, fmt.Errorf("creating instance middleware: %w", err) - } - - jwtMiddleware, err := auth.NewjwtMiddleware(db, cfg.JWTAuth) - if err != nil { - return nil, fmt.Errorf("creating jwt middleware: %w", err) - } - - initMiddleware, err := auth.NewInitRequiredMiddleware(db) - if err != nil { - return nil, fmt.Errorf("creating init required middleware: %w", err) - } - - urlsRequiredMiddleware, err := auth.NewUrlsRequiredMiddleware(db) - if err != nil { - return nil, fmt.Errorf("creating urls required middleware: %w", err) - } - - metricsMiddleware, err := auth.NewMetricsMiddleware(cfg.JWTAuth) - if err != nil { - return nil, fmt.Errorf("creating metrics middleware: %w", err) - } - - router := routers.NewAPIRouter(controller, jwtMiddleware, initMiddleware, urlsRequiredMiddleware, instanceMiddleware, cfg.Default.EnableWebhookManagement) - - // start the metrics collector - if cfg.Metrics.Enable { - slog.InfoContext(ctx, "setting up metric routes") - router = routers.WithMetricsRouter(router, cfg.Metrics.DisableAuth, metricsMiddleware) - - slog.InfoContext(ctx, "register metrics") - if err := metrics.RegisterMetrics(); err != nil { - return nil, fmt.Errorf("registering metrics: %w", err) - } - - slog.InfoContext(ctx, "start metrics collection") - runnerMetrics.CollectObjectMetric(ctx, runner, cfg.Metrics.Duration()) - } - - if cfg.Default.DebugServer { - runtime.SetBlockProfileRate(1) - runtime.SetMutexProfileFraction(1) - slog.InfoContext(ctx, "setting up debug routes") - router = routers.WithDebugServer(router) - } - - corsMw := mux.CORSMethodMiddleware(router) - router.Use(corsMw) - - allowedOrigins := handlers.AllowedOrigins(cfg.APIServer.CORSOrigins) - methodsOk := handlers.AllowedMethods([]string{"GET", "HEAD", "POST", "PUT", "OPTIONS", "DELETE"}) - headersOk := handlers.AllowedHeaders([]string{"X-Requested-With", "Content-Type", "Authorization"}) - - handler := handlers.CORS(methodsOk, headersOk, allowedOrigins)(router) - return handler, nil -} - -func startWorkers(ctx context.Context, cfg config.Config, db common.Store, controllerID string) (func() error, error) { - credsWorker, err := credentials.NewWorker(ctx, db) - if err != nil { - return nil, fmt.Errorf("failed to create credentials worker: %+v", err) - } - - if err := credsWorker.Start(); err != nil { - return nil, fmt.Errorf("failed to start credentials worker: %+v", err) - } - - providers, err := providers.LoadProvidersFromConfig(ctx, cfg, controllerID) - if err != nil { - return nil, fmt.Errorf("loading providers: %+v", err) - } - - entityController, err := entity.NewController(ctx, db, providers) - if err != nil { - return nil, fmt.Errorf("failed to create entity controller: %+v", err) - } - if err := entityController.Start(); err != nil { - return nil, fmt.Errorf("failed to start entity controller: %+v", err) - } - - instanceTokenGetter, err := auth.NewInstanceTokenGetter(cfg.JWTAuth.Secret) - if err != nil { - return nil, fmt.Errorf("failed to create instance token getter: %+v", err) - } - - providerWorker, err := provider.NewWorker(ctx, db, providers, instanceTokenGetter) - if err != nil { - return nil, fmt.Errorf("failed to create provider worker: %+v", err) - } - if err := providerWorker.Start(); err != nil { - return nil, fmt.Errorf("failed to start provider worker: %+v", err) - } - - return func() error { - slog.InfoContext(ctx, "shutting down credentials worker") - if err := credsWorker.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop credentials worker") - } - - slog.InfoContext(ctx, "shutting down entity controller") - if err := entityController.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop entity controller") - } - - slog.InfoContext(ctx, "shutting down provider worker") - if err := providerWorker.Stop(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop provider worker") - } - return nil - }, nil -} - +//gocyclo:ignore func main() { flag.Parse() if *version { @@ -313,6 +193,7 @@ func main() { watcher.InitWatcher(ctx) ctx = auth.GetAdminContext(ctx) + cfg, err := config.NewConfig(*conf) if err != nil { log.Fatalf("Fetching config: %+v", err) //nolint:gocritic @@ -357,9 +238,38 @@ func main() { log.Fatal(err) } - stopWorkersFn, err := startWorkers(ctx, *cfg, db, controllerInfo.ControllerID.String()) + credsWorker, err := credentials.NewWorker(ctx, db) if err != nil { - log.Fatalf("failed to start workers: %+v", err) + log.Fatalf("failed to create credentials worker: %+v", err) + } + if err := credsWorker.Start(); err != nil { + log.Fatalf("failed to start credentials worker: %+v", err) + } + + providers, err := providers.LoadProvidersFromConfig(ctx, *cfg, controllerInfo.ControllerID.String()) + if err != nil { + log.Fatalf("loading providers: %+v", err) + } + + entityController, err := entity.NewController(ctx, db, providers) + if err != nil { + log.Fatalf("failed to create entity controller: %+v", err) + } + if err := entityController.Start(); err != nil { + log.Fatalf("failed to start entity controller: %+v", err) + } + + instanceTokenGetter, err := auth.NewInstanceTokenGetter(cfg.JWTAuth.Secret) + if err != nil { + log.Fatalf("failed to create instance token getter: %+v", err) + } + + providerWorker, err := provider.NewWorker(ctx, db, providers, instanceTokenGetter) + if err != nil { + log.Fatalf("failed to create provider worker: %+v", err) + } + if err := providerWorker.Start(); err != nil { + log.Fatalf("failed to start provider worker: %+v", err) } runner, err := runner.NewRunner(ctx, *cfg, db) @@ -372,17 +282,73 @@ func main() { log.Fatal(err) } - handler, err := configureRouter(ctx, *cfg, db, hub, runner) + authenticator := auth.NewAuthenticator(cfg.JWTAuth, db) + controller, err := controllers.NewAPIController(runner, authenticator, hub) if err != nil { - log.Fatalf("failed to configure router: %+v", err) + log.Fatalf("failed to create controller: %+v", err) } + instanceMiddleware, err := auth.NewInstanceMiddleware(db, cfg.JWTAuth) + if err != nil { + log.Fatal(err) + } + + jwtMiddleware, err := auth.NewjwtMiddleware(db, cfg.JWTAuth) + if err != nil { + log.Fatal(err) + } + + initMiddleware, err := auth.NewInitRequiredMiddleware(db) + if err != nil { + log.Fatal(err) + } + + urlsRequiredMiddleware, err := auth.NewUrlsRequiredMiddleware(db) + if err != nil { + log.Fatal(err) + } + + metricsMiddleware, err := auth.NewMetricsMiddleware(cfg.JWTAuth) + if err != nil { + log.Fatal(err) + } + + router := routers.NewAPIRouter(controller, jwtMiddleware, initMiddleware, urlsRequiredMiddleware, instanceMiddleware, cfg.Default.EnableWebhookManagement) + + // start the metrics collector + if cfg.Metrics.Enable { + slog.InfoContext(ctx, "setting up metric routes") + router = routers.WithMetricsRouter(router, cfg.Metrics.DisableAuth, metricsMiddleware) + + slog.InfoContext(ctx, "register metrics") + if err := metrics.RegisterMetrics(); err != nil { + log.Fatal(err) + } + + slog.InfoContext(ctx, "start metrics collection") + runnerMetrics.CollectObjectMetric(ctx, runner, cfg.Metrics.Duration()) + } + + if cfg.Default.DebugServer { + runtime.SetBlockProfileRate(1) + runtime.SetMutexProfileFraction(1) + slog.InfoContext(ctx, "setting up debug routes") + router = routers.WithDebugServer(router) + } + + corsMw := mux.CORSMethodMiddleware(router) + router.Use(corsMw) + + allowedOrigins := handlers.AllowedOrigins(cfg.APIServer.CORSOrigins) + methodsOk := handlers.AllowedMethods([]string{"GET", "HEAD", "POST", "PUT", "OPTIONS", "DELETE"}) + headersOk := handlers.AllowedHeaders([]string{"X-Requested-With", "Content-Type", "Authorization"}) + // nolint:golangci-lint,gosec // G112: Potential Slowloris Attack because ReadHeaderTimeout is not configured in the http.Server srv := &http.Server{ Addr: cfg.APIServer.BindAddress(), // Pass our instance of gorilla/mux in. - Handler: handler, + Handler: handlers.CORS(methodsOk, headersOk, allowedOrigins)(router), } listener, err := net.Listen("tcp", srv.Addr) @@ -404,16 +370,26 @@ func main() { <-ctx.Done() + if err := credsWorker.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop credentials worker") + } + + slog.InfoContext(ctx, "shutting down entity controller") + if err := entityController.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop entity controller") + } + + slog.InfoContext(ctx, "shutting down provider worker") + if err := providerWorker.Stop(); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop provider worker") + } + shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 60*time.Second) defer shutdownCancel() if err := srv.Shutdown(shutdownCtx); err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "graceful api server shutdown failed") } - if err := stopWorkersFn(); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop workers") - } - slog.With(slog.Any("error", err)).InfoContext(ctx, "waiting for runner to stop") if err := runner.Wait(); err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to shutdown workers") From 2f3c74562eee95a12f61659c5aefcac72e0df5b9 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 5 May 2025 23:34:53 +0000 Subject: [PATCH 043/179] Add instance cache Signed-off-by: Gabriel Adrian Samfira --- cache/entity_cache.go | 10 +++ cache/instance_cache.go | 107 +++++++++++++++++++++++++++ runner/pool/pool.go | 6 -- workers/provider/instance_manager.go | 69 +++++++++-------- workers/provider/provider.go | 18 ++++- workers/provider/util.go | 7 +- 6 files changed, 171 insertions(+), 46 deletions(-) create mode 100644 cache/instance_cache.go diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 920b9a9b..d69d7099 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -49,6 +49,16 @@ func (e *EntityCache) SetEntity(entity params.GithubEntity) { e.mux.Lock() defer e.mux.Unlock() + _, ok := e.entities[entity.ID] + if !ok { + e.entities[entity.ID] = EntityItem{ + Entity: entity, + Pools: make(map[string]params.Pool), + ScaleSets: make(map[uint]params.ScaleSet), + } + return + } + e.entities[entity.ID] = EntityItem{ Entity: entity, } diff --git a/cache/instance_cache.go b/cache/instance_cache.go new file mode 100644 index 00000000..88074765 --- /dev/null +++ b/cache/instance_cache.go @@ -0,0 +1,107 @@ +package cache + +import ( + "sync" + + "github.com/cloudbase/garm/params" +) + +var instanceCache *InstanceCache + +func init() { + cache := &InstanceCache{ + cache: make(map[string]params.Instance), + } + instanceCache = cache +} + +type InstanceCache struct { + mux sync.Mutex + + cache map[string]params.Instance +} + +func (i *InstanceCache) SetInstance(instance params.Instance) { + i.mux.Lock() + defer i.mux.Unlock() + + i.cache[instance.ID] = instance +} + +func (i *InstanceCache) GetInstance(id string) (params.Instance, bool) { + i.mux.Lock() + defer i.mux.Unlock() + + if instance, ok := i.cache[id]; ok { + return instance, true + } + return params.Instance{}, false +} + +func (i *InstanceCache) DeleteInstance(id string) { + i.mux.Lock() + defer i.mux.Unlock() + + delete(i.cache, id) +} + +func (i *InstanceCache) GetAllInstances() []params.Instance { + i.mux.Lock() + defer i.mux.Unlock() + + instances := make([]params.Instance, 0, len(i.cache)) + for _, instance := range i.cache { + instances = append(instances, instance) + } + return instances +} + +func (i *InstanceCache) GetInstancesForPool(poolID string) []params.Instance { + i.mux.Lock() + defer i.mux.Unlock() + + var filteredInstances []params.Instance + for _, instance := range i.cache { + if instance.PoolID == poolID { + filteredInstances = append(filteredInstances, instance) + } + } + return filteredInstances +} + +func (i *InstanceCache) GetInstancesForScaleSet(scaleSetID uint) []params.Instance { + i.mux.Lock() + defer i.mux.Unlock() + + var filteredInstances []params.Instance + for _, instance := range i.cache { + if instance.ScaleSetID == scaleSetID { + filteredInstances = append(filteredInstances, instance) + } + } + return filteredInstances +} + +func SetInstanceCache(instance params.Instance) { + instanceCache.SetInstance(instance) +} + +func GetInstanceCache(id string) (params.Instance, bool) { + return instanceCache.GetInstance(id) +} + +func DeleteInstanceCache(id string) { + instanceCache.DeleteInstance(id) +} + +func GetAllInstancesCache() []params.Instance { + return instanceCache.GetAllInstances() +} + +func GetInstancesForPool(poolID string) []params.Instance { + return instanceCache.GetInstancesForPool(poolID) +} + +func GetInstancesForScaleSet(scaleSetID uint) []params.Instance { + return instanceCache.GetInstancesForScaleSet(scaleSetID) +} diff --git a/runner/pool/pool.go b/runner/pool/pool.go index f5f9a13b..f1134de8 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -226,12 +226,6 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { case "completed": jobParams, err = r.paramsWorkflowJobToParamsJob(job) if err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - // Unassigned jobs will have an empty runner_name. - // We also need to ignore not found errors, as we may get a webhook regarding - // a workflow that is handled by a runner at a different hierarchy level. - return nil - } return errors.Wrap(err, "converting job to params") } diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index 95d29f69..dfcd1cb5 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -18,7 +18,7 @@ import ( ) func newInstanceManager(ctx context.Context, instance params.Instance, scaleSet params.ScaleSet, provider common.Provider, helper providerHelper) (*instanceManager, error) { - ctx = garmUtil.WithSlogContext(ctx, slog.Any("instance", instance.Name)) + ctx = garmUtil.WithSlogContext(ctx, slog.Any("worker", fmt.Sprintf("instance-worker-%s", instance.Name))) githubEntity, err := scaleSet.GetEntity() if err != nil { @@ -66,25 +66,17 @@ func (i *instanceManager) Start() error { i.mux.Lock() defer i.mux.Unlock() + slog.DebugContext(i.ctx, "starting instance manager", "instance", i.instance.Name) if i.running { return nil } - // switch i.instance.Status { - // case commonParams.InstancePendingCreate, - // commonParams.InstancePendingDelete, - // commonParams.InstancePendingForceDelete: - // if err := i.consolidateState(); err != nil { - // return fmt.Errorf("consolidating state: %w", err) - // } - // case commonParams.InstanceDeleted: - // return ErrInstanceDeleted - // } i.running = true i.quit = make(chan struct{}) i.updates = make(chan dbCommon.ChangePayload) go i.loop() + go i.updatesLoop() return nil } @@ -106,6 +98,7 @@ func (i *instanceManager) sleepForBackOffOrCanceled() bool { timer := time.NewTimer(i.deleteBackoff) defer timer.Stop() + slog.DebugContext(i.ctx, "sleeping for backoff", "duration", i.deleteBackoff, "instance", i.instance.Name) select { case <-timer.C: return false @@ -274,6 +267,7 @@ func (i *instanceManager) handleDeleteInstanceInProvider(instance params.Instanc func (i *instanceManager) consolidateState() error { i.mux.Lock() defer i.mux.Unlock() + if !i.running { return nil } @@ -347,9 +341,6 @@ func (i *instanceManager) handleUpdate(update dbCommon.ChangePayload) error { // We need a better way to handle instance state. Database updates may fail, and we // end up with an inconsistent state between what we know about the instance and what // is reflected in the database. - i.mux.Lock() - defer i.mux.Unlock() - if !i.running { return nil } @@ -359,25 +350,23 @@ func (i *instanceManager) handleUpdate(update dbCommon.ChangePayload) error { return runnerErrors.NewBadRequestError("invalid payload type") } - i.instance = instance - if i.instance.Status == instance.Status { - // Nothing of interest happened. + switch instance.Status { + case commonParams.InstanceDeleting, commonParams.InstanceCreating: return nil } + i.instance = instance return nil } func (i *instanceManager) Update(instance dbCommon.ChangePayload) error { - i.mux.Lock() - defer i.mux.Unlock() - if !i.running { return runnerErrors.NewBadRequestError("instance manager is not running") } - timer := time.NewTimer(60 * time.Second) + timer := time.NewTimer(10 * time.Second) defer timer.Stop() + slog.DebugContext(i.ctx, "sending update to instance manager") select { case i.updates <- instance: case <-i.quit: @@ -390,6 +379,33 @@ func (i *instanceManager) Update(instance dbCommon.ChangePayload) error { return nil } +func (i *instanceManager) updatesLoop() { + defer i.Stop() + ticker := time.NewTicker(5 * time.Second) + defer ticker.Stop() + for { + select { + case <-i.quit: + return + case <-i.ctx.Done(): + return + case update, ok := <-i.updates: + if !ok { + slog.InfoContext(i.ctx, "updates channel closed") + return + } + slog.DebugContext(i.ctx, "received update") + if err := i.handleUpdate(update); err != nil { + if errors.Is(err, ErrInstanceDeleted) { + // instance had been deleted, we can exit the loop. + return + } + slog.ErrorContext(i.ctx, "handling update", "error", err) + } + } + } +} + func (i *instanceManager) loop() { defer i.Stop() ticker := time.NewTicker(5 * time.Second) @@ -408,17 +424,6 @@ func (i *instanceManager) loop() { } slog.ErrorContext(i.ctx, "consolidating state", "error", err) } - case update, ok := <-i.updates: - if !ok { - return - } - if err := i.handleUpdate(update); err != nil { - if errors.Is(err, ErrInstanceDeleted) { - // instance had been deleted, we can exit the loop. - return - } - slog.ErrorContext(i.ctx, "handling update", "error", err) - } } } } diff --git a/workers/provider/provider.go b/workers/provider/provider.go index ba95d733..3a7447f6 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -8,16 +8,23 @@ import ( commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" + garmUtil "github.com/cloudbase/garm/util" ) func NewWorker(ctx context.Context, store dbCommon.Store, providers map[string]common.Provider, tokenGetter auth.InstanceTokenGetter) (*Provider, error) { consumerID := "provider-worker" + + ctx = garmUtil.WithSlogContext( + ctx, + slog.Any("worker", consumerID)) + return &Provider{ - ctx: context.Background(), + ctx: ctx, store: store, consumerID: consumerID, providers: providers, @@ -74,6 +81,7 @@ func (p *Provider) loadAllRunners() error { } for _, runner := range runners { + cache.SetInstanceCache(runner) // Skip non scale set instances for now. This condition needs to be // removed once we replace the current pool manager. if runner.ScaleSetID == 0 { @@ -246,29 +254,34 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { return } + slog.DebugContext(p.ctx, "handling instance event", "instance_name", instance.Name) switch event.Operation { case dbCommon.CreateOperation: + cache.SetInstanceCache(instance) slog.DebugContext(p.ctx, "got create operation") if err := p.handleInstanceAdded(instance); err != nil { slog.ErrorContext(p.ctx, "failed to handle instance added", "error", err) return } case dbCommon.UpdateOperation: + cache.SetInstanceCache(instance) slog.DebugContext(p.ctx, "got update operation") existingInstance, ok := p.runners[instance.Name] if !ok { + slog.DebugContext(p.ctx, "instance not found, creating new instance", "instance_name", instance.Name) if err := p.handleInstanceAdded(instance); err != nil { slog.ErrorContext(p.ctx, "failed to handle instance added", "error", err) return } } else { + slog.DebugContext(p.ctx, "updating instance", "instance_name", instance.Name) if err := existingInstance.Update(event); err != nil { slog.ErrorContext(p.ctx, "failed to update instance", "error", err) return } } case dbCommon.DeleteOperation: - slog.DebugContext(p.ctx, "got delete operation") + slog.DebugContext(p.ctx, "got delete operation", "instance_name", instance.Name) existingInstance, ok := p.runners[instance.Name] if ok { if err := existingInstance.Stop(); err != nil { @@ -277,6 +290,7 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { } } delete(p.runners, instance.Name) + cache.DeleteInstanceCache(instance.ID) default: slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) return diff --git a/workers/provider/util.go b/workers/provider/util.go index 7e6395ff..8cd33525 100644 --- a/workers/provider/util.go +++ b/workers/provider/util.go @@ -1,18 +1,13 @@ package provider import ( - commonParams "github.com/cloudbase/garm-provider-common/params" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" ) func composeProviderWatcher() dbCommon.PayloadFilterFunc { return watcher.WithAny( - watcher.WithInstanceStatusFilter( - commonParams.InstancePendingCreate, - commonParams.InstancePendingDelete, - commonParams.InstancePendingForceDelete, - ), + watcher.WithEntityTypeFilter(dbCommon.InstanceEntityType), watcher.WithEntityTypeFilter(dbCommon.ScaleSetEntityType), ) } From 0e1fa0018b9af0e8c02545c5d133fe36723a5075 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 6 May 2025 17:50:12 +0000 Subject: [PATCH 044/179] Add some more caching, record scaleset jobs Signed-off-by: Gabriel Adrian Samfira --- cache/entity_cache.go | 57 ++++++++++++++++++-- cache/instance_cache.go | 18 +++---- cmd/garm-cli/cmd/organization.go | 2 - params/github.go | 35 ++++++++++++- params/params.go | 11 ++++ runner/pool/pool.go | 72 +++++++++++++------------- runner/pool/util.go | 2 + runner/pool/watcher.go | 20 +++++++ runner/scalesets.go | 16 +++--- workers/provider/instance_manager.go | 4 -- workers/provider/provider.go | 12 ++++- workers/scaleset/controller.go | 2 + workers/scaleset/controller_watcher.go | 6 +-- workers/scaleset/interfaces.go | 1 + workers/scaleset/scaleset_helper.go | 51 ++++++++++++++++++ workers/scaleset/scaleset_listener.go | 5 ++ 16 files changed, 244 insertions(+), 70 deletions(-) diff --git a/cache/entity_cache.go b/cache/entity_cache.go index d69d7099..08e218df 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -1,6 +1,7 @@ package cache import ( + "log/slog" "sync" "github.com/cloudbase/garm/params" @@ -64,14 +65,21 @@ func (e *EntityCache) SetEntity(entity params.GithubEntity) { } } -func (e *EntityCache) ReplaceEntityPools(entityID string, pools map[string]params.Pool) { +func (e *EntityCache) ReplaceEntityPools(entityID string, pools []params.Pool) { e.mux.Lock() defer e.mux.Unlock() - if cache, ok := e.entities[entityID]; ok { - cache.Pools = pools - e.entities[entityID] = cache + cache, ok := e.entities[entityID] + if !ok { + return } + + poolsByID := map[string]params.Pool{} + for _, pool := range pools { + poolsByID[pool.ID] = pool + } + cache.Pools = poolsByID + e.entities[entityID] = cache } func (e *EntityCache) ReplaceEntityScaleSets(entityID string, scaleSets map[uint]params.ScaleSet) { @@ -154,6 +162,37 @@ func (e *EntityCache) GetEntityScaleSet(entityID string, scaleSetID uint) (param return params.ScaleSet{}, false } +func (e *EntityCache) FindPoolsMatchingAllTags(entityID string, tags []string) []params.Pool { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + var pools []params.Pool + slog.Debug("Finding pools matching all tags", "entityID", entityID, "tags", tags, "pools", cache.Pools) + for _, pool := range cache.Pools { + if pool.HasRequiredLabels(tags) { + pools = append(pools, pool) + } + } + return pools + } + return nil +} + +func (e *EntityCache) GetEntityPools(entityID string) []params.Pool { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + var pools []params.Pool + for _, pool := range cache.Pools { + pools = append(pools, pool) + } + return pools + } + return nil +} + func GetEntity(entity params.GithubEntity) (EntityItem, bool) { return entityCache.GetEntity(entity) } @@ -162,7 +201,7 @@ func SetEntity(entity params.GithubEntity) { entityCache.SetEntity(entity) } -func ReplaceEntityPools(entityID string, pools map[string]params.Pool) { +func ReplaceEntityPools(entityID string, pools []params.Pool) { entityCache.ReplaceEntityPools(entityID, pools) } @@ -197,3 +236,11 @@ func GetEntityPool(entityID string, poolID string) (params.Pool, bool) { func GetEntityScaleSet(entityID string, scaleSetID uint) (params.ScaleSet, bool) { return entityCache.GetEntityScaleSet(entityID, scaleSetID) } + +func FindPoolsMatchingAllTags(entityID string, tags []string) []params.Pool { + return entityCache.FindPoolsMatchingAllTags(entityID, tags) +} + +func GetEntityPools(entityID string) []params.Pool { + return entityCache.GetEntityPools(entityID) +} diff --git a/cache/instance_cache.go b/cache/instance_cache.go index 88074765..44f95ec2 100644 --- a/cache/instance_cache.go +++ b/cache/instance_cache.go @@ -25,24 +25,24 @@ func (i *InstanceCache) SetInstance(instance params.Instance) { i.mux.Lock() defer i.mux.Unlock() - i.cache[instance.ID] = instance + i.cache[instance.Name] = instance } -func (i *InstanceCache) GetInstance(id string) (params.Instance, bool) { +func (i *InstanceCache) GetInstance(name string) (params.Instance, bool) { i.mux.Lock() defer i.mux.Unlock() - if instance, ok := i.cache[id]; ok { + if instance, ok := i.cache[name]; ok { return instance, true } return params.Instance{}, false } -func (i *InstanceCache) DeleteInstance(id string) { +func (i *InstanceCache) DeleteInstance(name string) { i.mux.Lock() defer i.mux.Unlock() - delete(i.cache, id) + delete(i.cache, name) } func (i *InstanceCache) GetAllInstances() []params.Instance { @@ -86,12 +86,12 @@ func SetInstanceCache(instance params.Instance) { instanceCache.SetInstance(instance) } -func GetInstanceCache(id string) (params.Instance, bool) { - return instanceCache.GetInstance(id) +func GetInstanceCache(name string) (params.Instance, bool) { + return instanceCache.GetInstance(name) } -func DeleteInstanceCache(id string) { - instanceCache.DeleteInstance(id) +func DeleteInstanceCache(name string) { + instanceCache.DeleteInstance(name) } func GetAllInstancesCache() []params.Instance { diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index c7be1f19..c7b80fec 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -379,8 +379,6 @@ func formatOneOrganization(org params.Organization) { t.AppendRow(table.Row{"Endpoint", org.Endpoint.Name}) t.AppendRow(table.Row{"Pool balancer type", org.GetBalancerType()}) t.AppendRow(table.Row{"Credentials", org.CredentialsName}) - t.AppendRow(table.Row{"Created at", org.CreatedAt}) - t.AppendRow(table.Row{"Updated at", org.UpdatedAt}) t.AppendRow(table.Row{"Pool manager running", org.PoolManagerStatus.IsRunning}) if !org.PoolManagerStatus.IsRunning { t.AppendRow(table.Row{"Failure reason", org.PoolManagerStatus.FailureReason}) diff --git a/params/github.go b/params/github.go index 888288fc..0f963090 100644 --- a/params/github.go +++ b/params/github.go @@ -242,7 +242,7 @@ type RunnerScaleSetStatistic struct { type RunnerScaleSet struct { ID int `json:"id,omitempty"` Name string `json:"name,omitempty"` - RunnerGroupID int `json:"runnerGroupId,omitempty"` + RunnerGroupID int64 `json:"runnerGroupId,omitempty"` RunnerGroupName string `json:"runnerGroupName,omitempty"` Labels []Label `json:"labels,omitempty"` RunnerSetting RunnerSetting `json:"RunnerSetting,omitempty"` @@ -523,7 +523,38 @@ type ScaleSetJobMessage struct { RunnerAssignTime time.Time `json:"runnerAssignTime,omitempty"` FinishTime time.Time `json:"finishTime,omitempty"` Result string `json:"result,omitempty"` - RunnerID int `json:"runnerId,omitempty"` + RunnerID int64 `json:"runnerId,omitempty"` RunnerName string `json:"runnerName,omitempty"` AcquireJobURL string `json:"acquireJobUrl,omitempty"` } + +func (s ScaleSetJobMessage) MessageTypeToStatus() JobStatus { + switch s.MessageType { + case MessageTypeJobAssigned: + return JobStatusQueued + case MessageTypeJobStarted: + return JobStatusInProgress + case MessageTypeJobCompleted: + return JobStatusCompleted + default: + return JobStatusQueued + } +} + +func (s ScaleSetJobMessage) ToJob() Job { + return Job{ + ID: s.RunnerRequestID, + Action: s.EventName, + RunID: s.WorkflowRunID, + Status: string(s.MessageTypeToStatus()), + Conclusion: s.Result, + CompletedAt: s.FinishTime, + StartedAt: s.RunnerAssignTime, + Name: s.JobDisplayName, + GithubRunnerID: s.RunnerID, + RunnerName: s.RunnerName, + RepositoryName: s.RepositoryName, + RepositoryOwner: s.OwnerName, + Labels: s.RequestLabels, + } +} diff --git a/params/params.go b/params/params.go index 2c1ed042..a15d2446 100644 --- a/params/params.go +++ b/params/params.go @@ -1039,6 +1039,17 @@ func (g GithubEntity) String() string { return "" } +func (g GithubEntity) GetIDAsUUID() (uuid.UUID, error) { + if g.ID == "" { + return uuid.Nil, nil + } + id, err := uuid.Parse(g.ID) + if err != nil { + return uuid.Nil, fmt.Errorf("failed to parse entity ID: %w", err) + } + return id, nil +} + // used by swagger client generated code type GithubEndpoints []GithubEndpoint diff --git a/runner/pool/pool.go b/runner/pool/pool.go index f1134de8..73a0b0fa 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -36,6 +36,7 @@ import ( commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm-provider-common/util" "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/locking" @@ -165,14 +166,13 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { var err error var triggeredBy int64 defer func() { + if jobParams.ID == 0 { + return + } // we're updating the job in the database, regardless of whether it was successful or not. // or if it was meant for this pool or not. Github will send the same job data to all hierarchies // that have been configured to work with garm. Updating the job at all levels should yield the same // outcome in the db. - if jobParams.ID == 0 { - return - } - _, err := r.store.GetJobByID(r.ctx, jobParams.ID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { @@ -182,13 +182,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return } // This job is new to us. Check if we have a pool that can handle it. - potentialPools, err := r.store.FindPoolsMatchingAllTags(r.ctx, r.entity.EntityType, r.entity.ID, jobParams.Labels) - if err != nil { - slog.With(slog.Any("error", err)).WarnContext( - r.ctx, "failed to find pools matching tags; not recording job", - "requested_tags", strings.Join(jobParams.Labels, ", ")) - return - } + potentialPools := cache.FindPoolsMatchingAllTags(r.entity.ID, jobParams.Labels) if len(potentialPools) == 0 { slog.WarnContext( r.ctx, "no pools matching tags; not recording job", @@ -236,6 +230,16 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return nil } + fromCache, ok := cache.GetInstanceCache(jobParams.RunnerName) + if !ok { + return nil + } + + if _, ok := cache.GetEntityPool(r.entity.ID, fromCache.PoolID); !ok { + slog.DebugContext(r.ctx, "instance belongs to a pool not managed by this entity", "pool_id", fromCache.PoolID) + return nil + } + // update instance workload state. if _, err := r.setInstanceRunnerStatus(jobParams.RunnerName, params.RunnerTerminated); err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { @@ -261,17 +265,20 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { case "in_progress": jobParams, err = r.paramsWorkflowJobToParamsJob(job) if err != nil { - if errors.Is(err, runnerErrors.ErrNotFound) { - // This is most likely a runner we're not managing. If we define a repo from within an org - // and also define that same org, we will get a hook from github from both the repo and the org - // regarding the same workflow. We look for the runner in the database, and make sure it exists and is - // part of a pool that this manager is responsible for. A not found error here will most likely mean - // that we are not responsible for that runner, and we should ignore it. - return nil - } return errors.Wrap(err, "converting job to params") } + fromCache, ok := cache.GetInstanceCache(jobParams.RunnerName) + if !ok { + slog.DebugContext(r.ctx, "instance not found in cache", "runner_name", jobParams.RunnerName) + return nil + } + + pool, ok := cache.GetEntityPool(r.entity.ID, fromCache.PoolID) + if !ok { + slog.DebugContext(r.ctx, "instance belongs to a pool not managed by this entity", "pool_id", fromCache.PoolID) + return nil + } // update instance workload state. instance, err := r.setInstanceRunnerStatus(jobParams.RunnerName, params.RunnerActive) if err != nil { @@ -288,10 +295,6 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { // A runner has picked up the job, and is now running it. It may need to be replaced if the pool has // a minimum number of idle runners configured. - pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) - if err != nil { - return errors.Wrap(err, "getting pool") - } if err := r.ensureIdleRunnersForOnePool(pool); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "error ensuring idle runners for pool", @@ -1286,10 +1289,7 @@ func (r *basePoolManager) retryFailedInstancesForOnePool(ctx context.Context, po } func (r *basePoolManager) retryFailedInstances() error { - pools, err := r.store.ListEntityPools(r.ctx, r.entity) - if err != nil { - return fmt.Errorf("error listing pools: %w", err) - } + pools := cache.GetEntityPools(r.entity.ID) g, ctx := errgroup.WithContext(r.ctx) for _, pool := range pools { pool := pool @@ -1309,10 +1309,7 @@ func (r *basePoolManager) retryFailedInstances() error { } func (r *basePoolManager) scaleDown() error { - pools, err := r.store.ListEntityPools(r.ctx, r.entity) - if err != nil { - return fmt.Errorf("error listing pools: %w", err) - } + pools := cache.GetEntityPools(r.entity.ID) g, ctx := errgroup.WithContext(r.ctx) for _, pool := range pools { pool := pool @@ -1330,11 +1327,7 @@ func (r *basePoolManager) scaleDown() error { } func (r *basePoolManager) ensureMinIdleRunners() error { - pools, err := r.store.ListEntityPools(r.ctx, r.entity) - if err != nil { - return fmt.Errorf("error listing pools: %w", err) - } - + pools := cache.GetEntityPools(r.entity.ID) g, _ := errgroup.WithContext(r.ctx) for _, pool := range pools { pool := pool @@ -1613,6 +1606,13 @@ func (r *basePoolManager) cleanupOrphanedRunners(runners []*github.Runner) error } func (r *basePoolManager) Start() error { + // load pools in cache + pools, err := r.store.ListEntityPools(r.ctx, r.entity) + if err != nil { + return fmt.Errorf("failed to list pools: %w", err) + } + cache.ReplaceEntityPools(r.entity.ID, pools) + initialToolUpdate := make(chan struct{}, 1) go func() { slog.Info("running initial tool update") diff --git a/runner/pool/util.go b/runner/pool/util.go index 9b7b7f14..d7b2c416 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -132,5 +132,7 @@ func composeWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFun watcher.WithEntityFilter(entity), // Watch for changes to the github credentials watcher.WithGithubCredentialsFilter(entity.Credentials), + // Watch for entity pool operations + watcher.WithEntityPoolFilter(entity), ) } diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 7f05d93b..61a1117c 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -6,6 +6,7 @@ import ( "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/cache" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" runnerCommon "github.com/cloudbase/garm/runner/common" @@ -121,6 +122,23 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.GithubCrede r.mux.Unlock() } +func (r *basePoolManager) handleEntityPoolEvent(event common.ChangePayload) { + pool, ok := event.Payload.(params.Pool) + if !ok { + slog.ErrorContext(r.ctx, "failed to cast payload to pool") + return + } + + switch event.Operation { + case common.CreateOperation, common.UpdateOperation: + slog.DebugContext(r.ctx, "updating pool in cache", "pool_id", pool.ID) + cache.SetEntityPool(r.entity.ID, pool) + case common.DeleteOperation: + slog.DebugContext(r.ctx, "deleting pool from cache", "pool_id", pool.ID) + cache.DeleteEntityPool(r.entity.ID, pool.ID) + } +} + func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { dbEntityType := common.DatabaseEntityType(r.entity.EntityType) switch event.EntityType { @@ -150,6 +168,8 @@ func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { return } r.handleEntityUpdate(entityInfo, event.Operation) + case common.PoolEntityType: + r.handleEntityPoolEvent(event) } } diff --git a/runner/scalesets.go b/runner/scalesets.go index f55b5dca..83432e63 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -153,12 +153,12 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param return params.ScaleSet{}, errors.Wrap(err, "creating github client") } - callback := func(old, newSet params.ScaleSet) error { - scalesetCli, err := scalesets.NewClient(ghCli) - if err != nil { - return errors.Wrap(err, "getting scaleset client") - } + scalesetCli, err := scalesets.NewClient(ghCli) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "getting scaleset client") + } + callback := func(old, newSet params.ScaleSet) error { updateParams := params.RunnerScaleSet{} hasUpdates := false if old.Name != newSet.Name { @@ -171,7 +171,7 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param if err != nil { return fmt.Errorf("error fetching runner group from github: %w", err) } - updateParams.RunnerGroupID = int(runnerGroup.ID) + updateParams.RunnerGroupID = runnerGroup.ID hasUpdates = true } @@ -225,13 +225,13 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.Git if err != nil { return params.ScaleSet{}, errors.Wrap(err, "getting scaleset client") } - runnerGroupID := 1 + var runnerGroupID int64 = 1 if param.GitHubRunnerGroup != "Default" { runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, param.GitHubRunnerGroup) if err != nil { return params.ScaleSet{}, errors.Wrap(err, "getting runner group") } - runnerGroupID = int(runnerGroup.ID) + runnerGroupID = runnerGroup.ID } createParam := ¶ms.RunnerScaleSet{ diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index dfcd1cb5..dcb10257 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -350,10 +350,6 @@ func (i *instanceManager) handleUpdate(update dbCommon.ChangePayload) error { return runnerErrors.NewBadRequestError("invalid payload type") } - switch instance.Status { - case commonParams.InstanceDeleting, commonParams.InstanceCreating: - return nil - } i.instance = instance return nil } diff --git a/workers/provider/provider.go b/workers/provider/provider.go index 3a7447f6..05a78c7e 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -239,6 +239,15 @@ func (p *Provider) handleInstanceAdded(instance params.Instance) error { return nil } +func (p *Provider) updateInstanceCache(instance params.Instance, op dbCommon.OperationType) { + if op == dbCommon.DeleteOperation { + slog.DebugContext(p.ctx, "deleting instance from cache", "instance_name", instance.Name) + cache.DeleteInstanceCache(instance.Name) + return + } + cache.SetInstanceCache(instance) +} + func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { p.mux.Lock() defer p.mux.Unlock() @@ -248,6 +257,7 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { slog.ErrorContext(p.ctx, "invalid payload type", "payload_type", fmt.Sprintf("%T", event.Payload)) return } + p.updateInstanceCache(instance, event.Operation) if instance.ScaleSetID == 0 { slog.DebugContext(p.ctx, "skipping instance event for non scale set instance") @@ -290,7 +300,7 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { } } delete(p.runners, instance.Name) - cache.DeleteInstanceCache(instance.ID) + cache.DeleteInstanceCache(instance.Name) default: slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) return diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index b6d61f54..e1758550 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -91,6 +91,8 @@ func (c *Controller) loadAllScaleSets(cli common.GithubClient) error { } for _, sSet := range scaleSets { + cache.SetEntityScaleSet(c.Entity.ID, sSet) + slog.DebugContext(c.ctx, "loading scale set", "scale_set", sSet.ID) if err := c.handleScaleSetCreateOperation(sSet, cli); err != nil { slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 131cb56c..99fd4617 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -61,10 +61,10 @@ func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli common.GithubClient) error { c.mux.Lock() defer c.mux.Unlock() + cache.SetEntityScaleSet(c.Entity.ID, sSet) if _, ok := c.ScaleSets[sSet.ID]; ok { slog.DebugContext(c.ctx, "scale set already exists in worker list", "scale_set_id", sSet.ID) - cache.SetEntityScaleSet(c.Entity.ID, sSet) return nil } @@ -92,7 +92,6 @@ func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli c scaleSet: sSet, worker: worker, } - cache.SetEntityScaleSet(c.Entity.ID, sSet) return nil } @@ -119,6 +118,8 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { c.mux.Lock() defer c.mux.Unlock() + cache.SetEntityScaleSet(c.Entity.ID, sSet) + set, ok := c.ScaleSets[sSet.ID] if !ok { // Some error may have occurred when the scale set was first created, so we @@ -128,7 +129,6 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { } set.scaleSet = sSet c.ScaleSets[sSet.ID] = set - cache.SetEntityScaleSet(c.Entity.ID, sSet) // We let the watcher in the scale set worker handle the update operation. return nil } diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go index 51d1d54b..ee089c25 100644 --- a/workers/scaleset/interfaces.go +++ b/workers/scaleset/interfaces.go @@ -13,4 +13,5 @@ type scaleSetHelper interface { Owner() string HandleJobsCompleted(jobs []params.ScaleSetJobMessage) error HandleJobsStarted(jobs []params.ScaleSetJobMessage) error + HandleJobsAvailable(jobs []params.ScaleSetJobMessage) error } diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index b83351f2..82a8a052 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -31,6 +31,37 @@ func (w *Worker) SetLastMessageID(id int64) error { return nil } +func (w *Worker) recordOrUpdateJob(job params.ScaleSetJobMessage) error { + entity, err := w.scaleSet.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + asUUID, err := entity.GetIDAsUUID() + if err != nil { + return fmt.Errorf("getting entity ID as UUID: %w", err) + } + + jobParams := job.ToJob() + jobParams.RunnerGroupName = w.scaleSet.GitHubRunnerGroup + + switch entity.EntityType { + case params.GithubEntityTypeEnterprise: + jobParams.EnterpriseID = &asUUID + case params.GithubEntityTypeRepository: + jobParams.RepoID = &asUUID + case params.GithubEntityTypeOrganization: + jobParams.OrgID = &asUUID + default: + return fmt.Errorf("unknown entity type: %s", entity.EntityType) + } + + if _, jobErr := w.store.CreateOrUpdateJob(w.ctx, jobParams); jobErr != nil { + slog.With(slog.Any("error", jobErr)).ErrorContext( + w.ctx, "failed to update job", "job_id", jobParams.ID) + } + return nil +} + // HandleJobCompleted handles a job completed message. If a job had a runner // assigned and was not canceled before it had a chance to run, then we mark // that runner as pending_delete. @@ -39,6 +70,11 @@ func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) (err erro defer slog.DebugContext(w.ctx, "finished handling job completed", "jobs", jobs, "error", err) for _, job := range jobs { + if err := w.recordOrUpdateJob(job); err != nil { + // recording scale set jobs are purely informational for now. + slog.ErrorContext(w.ctx, "recording job", "job", job, "error", err) + } + if job.RunnerName == "" { // This job was not assigned to a runner, so we can skip it. continue @@ -68,6 +104,11 @@ func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) (err error) slog.DebugContext(w.ctx, "handling job started", "jobs", jobs) defer slog.DebugContext(w.ctx, "finished handling job started", "jobs", jobs, "error", err) for _, job := range jobs { + if err := w.recordOrUpdateJob(job); err != nil { + // recording scale set jobs are purely informational for now. + slog.ErrorContext(w.ctx, "recording job", "job", job, "error", err) + } + if job.RunnerName == "" { // This should not happen, but just in case. continue @@ -93,6 +134,16 @@ func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) (err error) return nil } +func (w *Worker) HandleJobsAvailable(jobs []params.ScaleSetJobMessage) error { + for _, job := range jobs { + if err := w.recordOrUpdateJob(job); err != nil { + // recording scale set jobs are purely informational for now. + slog.ErrorContext(w.ctx, "recording job", "job", job, "error", err) + } + } + return nil +} + func (w *Worker) SetDesiredRunnerCount(count int) error { if err := w.store.SetScaleSetDesiredRunnerCount(w.ctx, w.scaleSet.ID, count); err != nil { return fmt.Errorf("setting desired runner count: %w", err) diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 07b3bf96..df4ab0bc 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -150,6 +150,11 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage slog.ErrorContext(l.ctx, "acquiring jobs", "error", err) return } + // HandleJobsAvailable only records jobs in the database for now. The jobs are purely + // informational, so an error here won't break anything. + if err := l.scaleSetHelper.HandleJobsAvailable(availableJobs); err != nil { + slog.ErrorContext(l.ctx, "error handling available jobs", "error", err) + } slog.DebugContext(l.ctx, "acquired jobs", "job_ids", idsAcquired) } From a80b900ee969f4b5e9fdb42d841e9588aeaaffcf Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 6 May 2025 18:27:20 +0000 Subject: [PATCH 045/179] Update dependencies Signed-off-by: Gabriel Adrian Samfira --- go.mod | 16 +- go.sum | 32 +- .../prometheus/procfs/.golangci.yml | 72 +-- .../prometheus/procfs/Makefile.common | 4 +- .../procfs/internal/util/sysreadfile.go | 20 + .../prometheus/procfs/mountstats.go | 23 +- .../prometheus/procfs/net_protocols.go | 21 +- vendor/github.com/prometheus/procfs/proc.go | 8 +- .../prometheus/procfs/proc_netstat.go | 224 ++++---- .../github.com/prometheus/procfs/proc_snmp.go | 120 ++--- .../prometheus/procfs/proc_snmp6.go | 150 +++--- .../github.com/prometheus/procfs/proc_sys.go | 2 +- .../github.com/prometheus/procfs/softirqs.go | 22 +- vendor/golang.org/x/oauth2/internal/doc.go | 2 +- vendor/golang.org/x/oauth2/internal/oauth2.go | 2 +- vendor/golang.org/x/oauth2/internal/token.go | 50 +- .../golang.org/x/oauth2/internal/transport.go | 4 +- vendor/golang.org/x/oauth2/oauth2.go | 55 +- vendor/golang.org/x/oauth2/pkce.go | 15 +- vendor/golang.org/x/oauth2/token.go | 15 +- vendor/golang.org/x/oauth2/transport.go | 24 +- vendor/golang.org/x/sync/errgroup/errgroup.go | 107 +++- vendor/golang.org/x/sys/cpu/cpu.go | 11 + .../golang.org/x/sys/cpu/cpu_linux_riscv64.go | 23 + vendor/golang.org/x/sys/cpu/cpu_riscv64.go | 12 + .../x/sys/windows/security_windows.go | 49 +- .../x/sys/windows/syscall_windows.go | 6 +- .../golang.org/x/sys/windows/types_windows.go | 212 ++++++++ .../x/sys/windows/zsyscall_windows.go | 9 + vendor/gorm.io/gorm/.golangci.yml | 15 +- vendor/gorm.io/gorm/CODE_OF_CONDUCT.md | 128 +++++ vendor/gorm.io/gorm/LICENSE | 2 +- vendor/gorm.io/gorm/callbacks/associations.go | 10 +- vendor/gorm.io/gorm/clause/returning.go | 9 +- vendor/gorm.io/gorm/finisher_api.go | 6 +- vendor/gorm.io/gorm/gorm.go | 18 +- vendor/gorm.io/gorm/internal/lru/lru.go | 493 ++++++++++++++++++ .../gorm/internal/stmt_store/stmt_store.go | 182 +++++++ vendor/gorm.io/gorm/logger/logger.go | 12 + vendor/gorm.io/gorm/migrator/migrator.go | 4 +- vendor/gorm.io/gorm/prepare_stmt.go | 144 ++--- vendor/gorm.io/gorm/scan.go | 4 +- vendor/gorm.io/gorm/schema/field.go | 2 +- vendor/gorm.io/gorm/schema/index.go | 31 +- vendor/gorm.io/gorm/schema/relationship.go | 6 +- vendor/gorm.io/gorm/schema/utils.go | 2 +- vendor/modules.txt | 20 +- workers/scaleset/scaleset_helper.go | 6 +- workers/scaleset/scaleset_listener.go | 8 + 49 files changed, 1801 insertions(+), 611 deletions(-) create mode 100644 vendor/gorm.io/gorm/CODE_OF_CONDUCT.md create mode 100644 vendor/gorm.io/gorm/internal/lru/lru.go create mode 100644 vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go diff --git a/go.mod b/go.mod index afc1af60..db57a68b 100644 --- a/go.mod +++ b/go.mod @@ -28,15 +28,15 @@ require ( github.com/prometheus/client_golang v1.22.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 - golang.org/x/crypto v0.37.0 - golang.org/x/oauth2 v0.29.0 - golang.org/x/sync v0.13.0 + golang.org/x/crypto v0.38.0 + golang.org/x/oauth2 v0.30.0 + golang.org/x/sync v0.14.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gorm.io/datatypes v1.2.5 gorm.io/driver/mysql v1.5.7 gorm.io/driver/sqlite v1.5.7 - gorm.io/gorm v1.25.12 + gorm.io/gorm v1.26.0 ) require ( @@ -76,7 +76,7 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect github.com/prometheus/common v0.63.0 // indirect - github.com/prometheus/procfs v0.16.0 // indirect + github.com/prometheus/procfs v0.16.1 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/spf13/pflag v1.0.6 // indirect github.com/stretchr/objx v0.5.2 // indirect @@ -86,9 +86,9 @@ require ( go.opentelemetry.io/otel v1.35.0 // indirect go.opentelemetry.io/otel/metric v1.35.0 // indirect go.opentelemetry.io/otel/trace v1.35.0 // indirect - golang.org/x/net v0.39.0 // indirect - golang.org/x/sys v0.32.0 // indirect - golang.org/x/text v0.24.0 // indirect + golang.org/x/net v0.40.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.25.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 14e83505..5ca7575d 100644 --- a/go.sum +++ b/go.sum @@ -155,8 +155,8 @@ github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNw github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18= -github.com/prometheus/procfs v0.16.0 h1:xh6oHhKwnOJKMYiYBDWmkHqQPyiY40sny36Cmx2bbsM= -github.com/prometheus/procfs v0.16.0/go.mod h1:8veyXUu3nGP7oaCxhX6yeaM5u4stL2FeMXnCqhDthZg= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= @@ -188,21 +188,21 @@ go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucg go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= -golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= -golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= +golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= +golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= +golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= +golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= +golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= +golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= @@ -229,5 +229,5 @@ gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDa gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= -gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8= -gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ= +gorm.io/gorm v1.26.0 h1:9lqQVPG5aNNS6AyHdRiwScAVnXHg/L/Srzx55G5fOgs= +gorm.io/gorm v1.26.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/vendor/github.com/prometheus/procfs/.golangci.yml b/vendor/github.com/prometheus/procfs/.golangci.yml index b43e09f6..3c3bf910 100644 --- a/vendor/github.com/prometheus/procfs/.golangci.yml +++ b/vendor/github.com/prometheus/procfs/.golangci.yml @@ -1,31 +1,45 @@ ---- +version: "2" linters: enable: - - errcheck - - forbidigo - - godot - - gofmt - - goimports - - gosimple - - govet - - ineffassign - - misspell - - revive - - staticcheck - - testifylint - - unused - -linters-settings: - forbidigo: - forbid: - - p: ^fmt\.Print.*$ - msg: Do not commit print statements. - godot: - capital: true - exclude: - # Ignore "See: URL" - - 'See:' - goimports: - local-prefixes: github.com/prometheus/procfs - misspell: - locale: US + - forbidigo + - godot + - misspell + - revive + - testifylint + settings: + forbidigo: + forbid: + - pattern: ^fmt\.Print.*$ + msg: Do not commit print statements. + godot: + exclude: + # Ignore "See: URL". + - 'See:' + capital: true + misspell: + locale: US + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - third_party$ + - builtin$ + - examples$ +formatters: + enable: + - gofmt + - goimports + settings: + goimports: + local-prefixes: + - github.com/prometheus/procfs + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common index cbb5d863..0ed55c2b 100644 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ b/vendor/github.com/prometheus/procfs/Makefile.common @@ -33,7 +33,7 @@ GOHOSTOS ?= $(shell $(GO) env GOHOSTOS) GOHOSTARCH ?= $(shell $(GO) env GOHOSTARCH) GO_VERSION ?= $(shell $(GO) version) -GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION)) +GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION))Error Parsing File PRE_GO_111 ?= $(shell echo $(GO_VERSION_NUMBER) | grep -E 'go1\.(10|[0-9])\.') PROMU := $(FIRST_GOPATH)/bin/promu @@ -61,7 +61,7 @@ PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_ SKIP_GOLANGCI_LINT := GOLANGCI_LINT := GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v1.60.2 +GOLANGCI_LINT_VERSION ?= v2.0.2 # golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64. # windows isn't included here because of the path separator being different. ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) diff --git a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go b/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go index 1ab875ce..d5404a6d 100644 --- a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go +++ b/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go @@ -20,6 +20,8 @@ package util import ( "bytes" "os" + "strconv" + "strings" "syscall" ) @@ -48,3 +50,21 @@ func SysReadFile(file string) (string, error) { return string(bytes.TrimSpace(b[:n])), nil } + +// SysReadUintFromFile reads a file using SysReadFile and attempts to parse a uint64 from it. +func SysReadUintFromFile(path string) (uint64, error) { + data, err := SysReadFile(path) + if err != nil { + return 0, err + } + return strconv.ParseUint(strings.TrimSpace(string(data)), 10, 64) +} + +// SysReadIntFromFile reads a file using SysReadFile and attempts to parse a int64 from it. +func SysReadIntFromFile(path string) (int64, error) { + data, err := SysReadFile(path) + if err != nil { + return 0, err + } + return strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) +} diff --git a/vendor/github.com/prometheus/procfs/mountstats.go b/vendor/github.com/prometheus/procfs/mountstats.go index b6c8d1a5..50caa732 100644 --- a/vendor/github.com/prometheus/procfs/mountstats.go +++ b/vendor/github.com/prometheus/procfs/mountstats.go @@ -601,11 +601,12 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats switch statVersion { case statVersion10: var expectedLength int - if protocol == "tcp" { + switch protocol { + case "tcp": expectedLength = fieldTransport10TCPLen - } else if protocol == "udp" { + case "udp": expectedLength = fieldTransport10UDPLen - } else { + default: return nil, fmt.Errorf("%w: Invalid NFS protocol \"%s\" in stats 1.0 statement: %v", ErrFileParse, protocol, ss) } if len(ss) != expectedLength { @@ -613,13 +614,14 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats } case statVersion11: var expectedLength int - if protocol == "tcp" { + switch protocol { + case "tcp": expectedLength = fieldTransport11TCPLen - } else if protocol == "udp" { + case "udp": expectedLength = fieldTransport11UDPLen - } else if protocol == "rdma" { + case "rdma": expectedLength = fieldTransport11RDMAMinLen - } else { + default: return nil, fmt.Errorf("%w: invalid NFS protocol \"%s\" in stats 1.1 statement: %v", ErrFileParse, protocol, ss) } if (len(ss) != expectedLength && (protocol == "tcp" || protocol == "udp")) || @@ -655,11 +657,12 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats // For the udp RPC transport there is no connection count, connect idle time, // or idle time (fields #3, #4, and #5); all other fields are the same. So // we set them to 0 here. - if protocol == "udp" { + switch protocol { + case "udp": ns = append(ns[:2], append(make([]uint64, 3), ns[2:]...)...) - } else if protocol == "tcp" { + case "tcp": ns = append(ns[:fieldTransport11TCPLen], make([]uint64, fieldTransport11RDMAMaxLen-fieldTransport11TCPLen+3)...) - } else if protocol == "rdma" { + case "rdma": ns = append(ns[:fieldTransport10TCPLen], append(make([]uint64, 3), ns[fieldTransport10TCPLen:]...)...) } diff --git a/vendor/github.com/prometheus/procfs/net_protocols.go b/vendor/github.com/prometheus/procfs/net_protocols.go index b6c77b70..8d4b1ac0 100644 --- a/vendor/github.com/prometheus/procfs/net_protocols.go +++ b/vendor/github.com/prometheus/procfs/net_protocols.go @@ -115,22 +115,24 @@ func (ps NetProtocolStats) parseLine(rawLine string) (*NetProtocolStatLine, erro if err != nil { return nil, err } - if fields[4] == enabled { + switch fields[4] { + case enabled: line.Pressure = 1 - } else if fields[4] == disabled { + case disabled: line.Pressure = 0 - } else { + default: line.Pressure = -1 } line.MaxHeader, err = strconv.ParseUint(fields[5], 10, 64) if err != nil { return nil, err } - if fields[6] == enabled { + switch fields[6] { + case enabled: line.Slab = true - } else if fields[6] == disabled { + case disabled: line.Slab = false - } else { + default: return nil, fmt.Errorf("%w: capability for protocol: %s", ErrFileParse, line.Name) } line.ModuleName = fields[7] @@ -168,11 +170,12 @@ func (pc *NetProtocolCapabilities) parseCapabilities(capabilities []string) erro } for i := 0; i < len(capabilities); i++ { - if capabilities[i] == "y" { + switch capabilities[i] { + case "y": *capabilityFields[i] = true - } else if capabilities[i] == "n" { + case "n": *capabilityFields[i] = false - } else { + default: return fmt.Errorf("%w: capability block for protocol: position %d", ErrFileParse, i) } } diff --git a/vendor/github.com/prometheus/procfs/proc.go b/vendor/github.com/prometheus/procfs/proc.go index 14279636..368187fa 100644 --- a/vendor/github.com/prometheus/procfs/proc.go +++ b/vendor/github.com/prometheus/procfs/proc.go @@ -37,9 +37,9 @@ type Proc struct { type Procs []Proc var ( - ErrFileParse = errors.New("Error Parsing File") - ErrFileRead = errors.New("Error Reading File") - ErrMountPoint = errors.New("Error Accessing Mount point") + ErrFileParse = errors.New("error parsing file") + ErrFileRead = errors.New("error reading file") + ErrMountPoint = errors.New("error accessing mount point") ) func (p Procs) Len() int { return len(p) } @@ -79,7 +79,7 @@ func (fs FS) Self() (Proc, error) { if err != nil { return Proc{}, err } - pid, err := strconv.Atoi(strings.Replace(p, string(fs.proc), "", -1)) + pid, err := strconv.Atoi(strings.ReplaceAll(p, string(fs.proc), "")) if err != nil { return Proc{}, err } diff --git a/vendor/github.com/prometheus/procfs/proc_netstat.go b/vendor/github.com/prometheus/procfs/proc_netstat.go index 8e3ff4d7..4248c171 100644 --- a/vendor/github.com/prometheus/procfs/proc_netstat.go +++ b/vendor/github.com/prometheus/procfs/proc_netstat.go @@ -209,232 +209,232 @@ func parseProcNetstat(r io.Reader, fileName string) (ProcNetstat, error) { case "TcpExt": switch key { case "SyncookiesSent": - procNetstat.TcpExt.SyncookiesSent = &value + procNetstat.SyncookiesSent = &value case "SyncookiesRecv": - procNetstat.TcpExt.SyncookiesRecv = &value + procNetstat.SyncookiesRecv = &value case "SyncookiesFailed": - procNetstat.TcpExt.SyncookiesFailed = &value + procNetstat.SyncookiesFailed = &value case "EmbryonicRsts": - procNetstat.TcpExt.EmbryonicRsts = &value + procNetstat.EmbryonicRsts = &value case "PruneCalled": - procNetstat.TcpExt.PruneCalled = &value + procNetstat.PruneCalled = &value case "RcvPruned": - procNetstat.TcpExt.RcvPruned = &value + procNetstat.RcvPruned = &value case "OfoPruned": - procNetstat.TcpExt.OfoPruned = &value + procNetstat.OfoPruned = &value case "OutOfWindowIcmps": - procNetstat.TcpExt.OutOfWindowIcmps = &value + procNetstat.OutOfWindowIcmps = &value case "LockDroppedIcmps": - procNetstat.TcpExt.LockDroppedIcmps = &value + procNetstat.LockDroppedIcmps = &value case "ArpFilter": - procNetstat.TcpExt.ArpFilter = &value + procNetstat.ArpFilter = &value case "TW": - procNetstat.TcpExt.TW = &value + procNetstat.TW = &value case "TWRecycled": - procNetstat.TcpExt.TWRecycled = &value + procNetstat.TWRecycled = &value case "TWKilled": - procNetstat.TcpExt.TWKilled = &value + procNetstat.TWKilled = &value case "PAWSActive": - procNetstat.TcpExt.PAWSActive = &value + procNetstat.PAWSActive = &value case "PAWSEstab": - procNetstat.TcpExt.PAWSEstab = &value + procNetstat.PAWSEstab = &value case "DelayedACKs": - procNetstat.TcpExt.DelayedACKs = &value + procNetstat.DelayedACKs = &value case "DelayedACKLocked": - procNetstat.TcpExt.DelayedACKLocked = &value + procNetstat.DelayedACKLocked = &value case "DelayedACKLost": - procNetstat.TcpExt.DelayedACKLost = &value + procNetstat.DelayedACKLost = &value case "ListenOverflows": - procNetstat.TcpExt.ListenOverflows = &value + procNetstat.ListenOverflows = &value case "ListenDrops": - procNetstat.TcpExt.ListenDrops = &value + procNetstat.ListenDrops = &value case "TCPHPHits": - procNetstat.TcpExt.TCPHPHits = &value + procNetstat.TCPHPHits = &value case "TCPPureAcks": - procNetstat.TcpExt.TCPPureAcks = &value + procNetstat.TCPPureAcks = &value case "TCPHPAcks": - procNetstat.TcpExt.TCPHPAcks = &value + procNetstat.TCPHPAcks = &value case "TCPRenoRecovery": - procNetstat.TcpExt.TCPRenoRecovery = &value + procNetstat.TCPRenoRecovery = &value case "TCPSackRecovery": - procNetstat.TcpExt.TCPSackRecovery = &value + procNetstat.TCPSackRecovery = &value case "TCPSACKReneging": - procNetstat.TcpExt.TCPSACKReneging = &value + procNetstat.TCPSACKReneging = &value case "TCPSACKReorder": - procNetstat.TcpExt.TCPSACKReorder = &value + procNetstat.TCPSACKReorder = &value case "TCPRenoReorder": - procNetstat.TcpExt.TCPRenoReorder = &value + procNetstat.TCPRenoReorder = &value case "TCPTSReorder": - procNetstat.TcpExt.TCPTSReorder = &value + procNetstat.TCPTSReorder = &value case "TCPFullUndo": - procNetstat.TcpExt.TCPFullUndo = &value + procNetstat.TCPFullUndo = &value case "TCPPartialUndo": - procNetstat.TcpExt.TCPPartialUndo = &value + procNetstat.TCPPartialUndo = &value case "TCPDSACKUndo": - procNetstat.TcpExt.TCPDSACKUndo = &value + procNetstat.TCPDSACKUndo = &value case "TCPLossUndo": - procNetstat.TcpExt.TCPLossUndo = &value + procNetstat.TCPLossUndo = &value case "TCPLostRetransmit": - procNetstat.TcpExt.TCPLostRetransmit = &value + procNetstat.TCPLostRetransmit = &value case "TCPRenoFailures": - procNetstat.TcpExt.TCPRenoFailures = &value + procNetstat.TCPRenoFailures = &value case "TCPSackFailures": - procNetstat.TcpExt.TCPSackFailures = &value + procNetstat.TCPSackFailures = &value case "TCPLossFailures": - procNetstat.TcpExt.TCPLossFailures = &value + procNetstat.TCPLossFailures = &value case "TCPFastRetrans": - procNetstat.TcpExt.TCPFastRetrans = &value + procNetstat.TCPFastRetrans = &value case "TCPSlowStartRetrans": - procNetstat.TcpExt.TCPSlowStartRetrans = &value + procNetstat.TCPSlowStartRetrans = &value case "TCPTimeouts": - procNetstat.TcpExt.TCPTimeouts = &value + procNetstat.TCPTimeouts = &value case "TCPLossProbes": - procNetstat.TcpExt.TCPLossProbes = &value + procNetstat.TCPLossProbes = &value case "TCPLossProbeRecovery": - procNetstat.TcpExt.TCPLossProbeRecovery = &value + procNetstat.TCPLossProbeRecovery = &value case "TCPRenoRecoveryFail": - procNetstat.TcpExt.TCPRenoRecoveryFail = &value + procNetstat.TCPRenoRecoveryFail = &value case "TCPSackRecoveryFail": - procNetstat.TcpExt.TCPSackRecoveryFail = &value + procNetstat.TCPSackRecoveryFail = &value case "TCPRcvCollapsed": - procNetstat.TcpExt.TCPRcvCollapsed = &value + procNetstat.TCPRcvCollapsed = &value case "TCPDSACKOldSent": - procNetstat.TcpExt.TCPDSACKOldSent = &value + procNetstat.TCPDSACKOldSent = &value case "TCPDSACKOfoSent": - procNetstat.TcpExt.TCPDSACKOfoSent = &value + procNetstat.TCPDSACKOfoSent = &value case "TCPDSACKRecv": - procNetstat.TcpExt.TCPDSACKRecv = &value + procNetstat.TCPDSACKRecv = &value case "TCPDSACKOfoRecv": - procNetstat.TcpExt.TCPDSACKOfoRecv = &value + procNetstat.TCPDSACKOfoRecv = &value case "TCPAbortOnData": - procNetstat.TcpExt.TCPAbortOnData = &value + procNetstat.TCPAbortOnData = &value case "TCPAbortOnClose": - procNetstat.TcpExt.TCPAbortOnClose = &value + procNetstat.TCPAbortOnClose = &value case "TCPDeferAcceptDrop": - procNetstat.TcpExt.TCPDeferAcceptDrop = &value + procNetstat.TCPDeferAcceptDrop = &value case "IPReversePathFilter": - procNetstat.TcpExt.IPReversePathFilter = &value + procNetstat.IPReversePathFilter = &value case "TCPTimeWaitOverflow": - procNetstat.TcpExt.TCPTimeWaitOverflow = &value + procNetstat.TCPTimeWaitOverflow = &value case "TCPReqQFullDoCookies": - procNetstat.TcpExt.TCPReqQFullDoCookies = &value + procNetstat.TCPReqQFullDoCookies = &value case "TCPReqQFullDrop": - procNetstat.TcpExt.TCPReqQFullDrop = &value + procNetstat.TCPReqQFullDrop = &value case "TCPRetransFail": - procNetstat.TcpExt.TCPRetransFail = &value + procNetstat.TCPRetransFail = &value case "TCPRcvCoalesce": - procNetstat.TcpExt.TCPRcvCoalesce = &value + procNetstat.TCPRcvCoalesce = &value case "TCPRcvQDrop": - procNetstat.TcpExt.TCPRcvQDrop = &value + procNetstat.TCPRcvQDrop = &value case "TCPOFOQueue": - procNetstat.TcpExt.TCPOFOQueue = &value + procNetstat.TCPOFOQueue = &value case "TCPOFODrop": - procNetstat.TcpExt.TCPOFODrop = &value + procNetstat.TCPOFODrop = &value case "TCPOFOMerge": - procNetstat.TcpExt.TCPOFOMerge = &value + procNetstat.TCPOFOMerge = &value case "TCPChallengeACK": - procNetstat.TcpExt.TCPChallengeACK = &value + procNetstat.TCPChallengeACK = &value case "TCPSYNChallenge": - procNetstat.TcpExt.TCPSYNChallenge = &value + procNetstat.TCPSYNChallenge = &value case "TCPFastOpenActive": - procNetstat.TcpExt.TCPFastOpenActive = &value + procNetstat.TCPFastOpenActive = &value case "TCPFastOpenActiveFail": - procNetstat.TcpExt.TCPFastOpenActiveFail = &value + procNetstat.TCPFastOpenActiveFail = &value case "TCPFastOpenPassive": - procNetstat.TcpExt.TCPFastOpenPassive = &value + procNetstat.TCPFastOpenPassive = &value case "TCPFastOpenPassiveFail": - procNetstat.TcpExt.TCPFastOpenPassiveFail = &value + procNetstat.TCPFastOpenPassiveFail = &value case "TCPFastOpenListenOverflow": - procNetstat.TcpExt.TCPFastOpenListenOverflow = &value + procNetstat.TCPFastOpenListenOverflow = &value case "TCPFastOpenCookieReqd": - procNetstat.TcpExt.TCPFastOpenCookieReqd = &value + procNetstat.TCPFastOpenCookieReqd = &value case "TCPFastOpenBlackhole": - procNetstat.TcpExt.TCPFastOpenBlackhole = &value + procNetstat.TCPFastOpenBlackhole = &value case "TCPSpuriousRtxHostQueues": - procNetstat.TcpExt.TCPSpuriousRtxHostQueues = &value + procNetstat.TCPSpuriousRtxHostQueues = &value case "BusyPollRxPackets": - procNetstat.TcpExt.BusyPollRxPackets = &value + procNetstat.BusyPollRxPackets = &value case "TCPAutoCorking": - procNetstat.TcpExt.TCPAutoCorking = &value + procNetstat.TCPAutoCorking = &value case "TCPFromZeroWindowAdv": - procNetstat.TcpExt.TCPFromZeroWindowAdv = &value + procNetstat.TCPFromZeroWindowAdv = &value case "TCPToZeroWindowAdv": - procNetstat.TcpExt.TCPToZeroWindowAdv = &value + procNetstat.TCPToZeroWindowAdv = &value case "TCPWantZeroWindowAdv": - procNetstat.TcpExt.TCPWantZeroWindowAdv = &value + procNetstat.TCPWantZeroWindowAdv = &value case "TCPSynRetrans": - procNetstat.TcpExt.TCPSynRetrans = &value + procNetstat.TCPSynRetrans = &value case "TCPOrigDataSent": - procNetstat.TcpExt.TCPOrigDataSent = &value + procNetstat.TCPOrigDataSent = &value case "TCPHystartTrainDetect": - procNetstat.TcpExt.TCPHystartTrainDetect = &value + procNetstat.TCPHystartTrainDetect = &value case "TCPHystartTrainCwnd": - procNetstat.TcpExt.TCPHystartTrainCwnd = &value + procNetstat.TCPHystartTrainCwnd = &value case "TCPHystartDelayDetect": - procNetstat.TcpExt.TCPHystartDelayDetect = &value + procNetstat.TCPHystartDelayDetect = &value case "TCPHystartDelayCwnd": - procNetstat.TcpExt.TCPHystartDelayCwnd = &value + procNetstat.TCPHystartDelayCwnd = &value case "TCPACKSkippedSynRecv": - procNetstat.TcpExt.TCPACKSkippedSynRecv = &value + procNetstat.TCPACKSkippedSynRecv = &value case "TCPACKSkippedPAWS": - procNetstat.TcpExt.TCPACKSkippedPAWS = &value + procNetstat.TCPACKSkippedPAWS = &value case "TCPACKSkippedSeq": - procNetstat.TcpExt.TCPACKSkippedSeq = &value + procNetstat.TCPACKSkippedSeq = &value case "TCPACKSkippedFinWait2": - procNetstat.TcpExt.TCPACKSkippedFinWait2 = &value + procNetstat.TCPACKSkippedFinWait2 = &value case "TCPACKSkippedTimeWait": - procNetstat.TcpExt.TCPACKSkippedTimeWait = &value + procNetstat.TCPACKSkippedTimeWait = &value case "TCPACKSkippedChallenge": - procNetstat.TcpExt.TCPACKSkippedChallenge = &value + procNetstat.TCPACKSkippedChallenge = &value case "TCPWinProbe": - procNetstat.TcpExt.TCPWinProbe = &value + procNetstat.TCPWinProbe = &value case "TCPKeepAlive": - procNetstat.TcpExt.TCPKeepAlive = &value + procNetstat.TCPKeepAlive = &value case "TCPMTUPFail": - procNetstat.TcpExt.TCPMTUPFail = &value + procNetstat.TCPMTUPFail = &value case "TCPMTUPSuccess": - procNetstat.TcpExt.TCPMTUPSuccess = &value + procNetstat.TCPMTUPSuccess = &value case "TCPWqueueTooBig": - procNetstat.TcpExt.TCPWqueueTooBig = &value + procNetstat.TCPWqueueTooBig = &value } case "IpExt": switch key { case "InNoRoutes": - procNetstat.IpExt.InNoRoutes = &value + procNetstat.InNoRoutes = &value case "InTruncatedPkts": - procNetstat.IpExt.InTruncatedPkts = &value + procNetstat.InTruncatedPkts = &value case "InMcastPkts": - procNetstat.IpExt.InMcastPkts = &value + procNetstat.InMcastPkts = &value case "OutMcastPkts": - procNetstat.IpExt.OutMcastPkts = &value + procNetstat.OutMcastPkts = &value case "InBcastPkts": - procNetstat.IpExt.InBcastPkts = &value + procNetstat.InBcastPkts = &value case "OutBcastPkts": - procNetstat.IpExt.OutBcastPkts = &value + procNetstat.OutBcastPkts = &value case "InOctets": - procNetstat.IpExt.InOctets = &value + procNetstat.InOctets = &value case "OutOctets": - procNetstat.IpExt.OutOctets = &value + procNetstat.OutOctets = &value case "InMcastOctets": - procNetstat.IpExt.InMcastOctets = &value + procNetstat.InMcastOctets = &value case "OutMcastOctets": - procNetstat.IpExt.OutMcastOctets = &value + procNetstat.OutMcastOctets = &value case "InBcastOctets": - procNetstat.IpExt.InBcastOctets = &value + procNetstat.InBcastOctets = &value case "OutBcastOctets": - procNetstat.IpExt.OutBcastOctets = &value + procNetstat.OutBcastOctets = &value case "InCsumErrors": - procNetstat.IpExt.InCsumErrors = &value + procNetstat.InCsumErrors = &value case "InNoECTPkts": - procNetstat.IpExt.InNoECTPkts = &value + procNetstat.InNoECTPkts = &value case "InECT1Pkts": - procNetstat.IpExt.InECT1Pkts = &value + procNetstat.InECT1Pkts = &value case "InECT0Pkts": - procNetstat.IpExt.InECT0Pkts = &value + procNetstat.InECT0Pkts = &value case "InCEPkts": - procNetstat.IpExt.InCEPkts = &value + procNetstat.InCEPkts = &value case "ReasmOverlaps": - procNetstat.IpExt.ReasmOverlaps = &value + procNetstat.ReasmOverlaps = &value } } } diff --git a/vendor/github.com/prometheus/procfs/proc_snmp.go b/vendor/github.com/prometheus/procfs/proc_snmp.go index b9d2cf64..4bdc90b0 100644 --- a/vendor/github.com/prometheus/procfs/proc_snmp.go +++ b/vendor/github.com/prometheus/procfs/proc_snmp.go @@ -173,138 +173,138 @@ func parseSnmp(r io.Reader, fileName string) (ProcSnmp, error) { case "Ip": switch key { case "Forwarding": - procSnmp.Ip.Forwarding = &value + procSnmp.Forwarding = &value case "DefaultTTL": - procSnmp.Ip.DefaultTTL = &value + procSnmp.DefaultTTL = &value case "InReceives": - procSnmp.Ip.InReceives = &value + procSnmp.InReceives = &value case "InHdrErrors": - procSnmp.Ip.InHdrErrors = &value + procSnmp.InHdrErrors = &value case "InAddrErrors": - procSnmp.Ip.InAddrErrors = &value + procSnmp.InAddrErrors = &value case "ForwDatagrams": - procSnmp.Ip.ForwDatagrams = &value + procSnmp.ForwDatagrams = &value case "InUnknownProtos": - procSnmp.Ip.InUnknownProtos = &value + procSnmp.InUnknownProtos = &value case "InDiscards": - procSnmp.Ip.InDiscards = &value + procSnmp.InDiscards = &value case "InDelivers": - procSnmp.Ip.InDelivers = &value + procSnmp.InDelivers = &value case "OutRequests": - procSnmp.Ip.OutRequests = &value + procSnmp.OutRequests = &value case "OutDiscards": - procSnmp.Ip.OutDiscards = &value + procSnmp.OutDiscards = &value case "OutNoRoutes": - procSnmp.Ip.OutNoRoutes = &value + procSnmp.OutNoRoutes = &value case "ReasmTimeout": - procSnmp.Ip.ReasmTimeout = &value + procSnmp.ReasmTimeout = &value case "ReasmReqds": - procSnmp.Ip.ReasmReqds = &value + procSnmp.ReasmReqds = &value case "ReasmOKs": - procSnmp.Ip.ReasmOKs = &value + procSnmp.ReasmOKs = &value case "ReasmFails": - procSnmp.Ip.ReasmFails = &value + procSnmp.ReasmFails = &value case "FragOKs": - procSnmp.Ip.FragOKs = &value + procSnmp.FragOKs = &value case "FragFails": - procSnmp.Ip.FragFails = &value + procSnmp.FragFails = &value case "FragCreates": - procSnmp.Ip.FragCreates = &value + procSnmp.FragCreates = &value } case "Icmp": switch key { case "InMsgs": - procSnmp.Icmp.InMsgs = &value + procSnmp.InMsgs = &value case "InErrors": procSnmp.Icmp.InErrors = &value case "InCsumErrors": procSnmp.Icmp.InCsumErrors = &value case "InDestUnreachs": - procSnmp.Icmp.InDestUnreachs = &value + procSnmp.InDestUnreachs = &value case "InTimeExcds": - procSnmp.Icmp.InTimeExcds = &value + procSnmp.InTimeExcds = &value case "InParmProbs": - procSnmp.Icmp.InParmProbs = &value + procSnmp.InParmProbs = &value case "InSrcQuenchs": - procSnmp.Icmp.InSrcQuenchs = &value + procSnmp.InSrcQuenchs = &value case "InRedirects": - procSnmp.Icmp.InRedirects = &value + procSnmp.InRedirects = &value case "InEchos": - procSnmp.Icmp.InEchos = &value + procSnmp.InEchos = &value case "InEchoReps": - procSnmp.Icmp.InEchoReps = &value + procSnmp.InEchoReps = &value case "InTimestamps": - procSnmp.Icmp.InTimestamps = &value + procSnmp.InTimestamps = &value case "InTimestampReps": - procSnmp.Icmp.InTimestampReps = &value + procSnmp.InTimestampReps = &value case "InAddrMasks": - procSnmp.Icmp.InAddrMasks = &value + procSnmp.InAddrMasks = &value case "InAddrMaskReps": - procSnmp.Icmp.InAddrMaskReps = &value + procSnmp.InAddrMaskReps = &value case "OutMsgs": - procSnmp.Icmp.OutMsgs = &value + procSnmp.OutMsgs = &value case "OutErrors": - procSnmp.Icmp.OutErrors = &value + procSnmp.OutErrors = &value case "OutDestUnreachs": - procSnmp.Icmp.OutDestUnreachs = &value + procSnmp.OutDestUnreachs = &value case "OutTimeExcds": - procSnmp.Icmp.OutTimeExcds = &value + procSnmp.OutTimeExcds = &value case "OutParmProbs": - procSnmp.Icmp.OutParmProbs = &value + procSnmp.OutParmProbs = &value case "OutSrcQuenchs": - procSnmp.Icmp.OutSrcQuenchs = &value + procSnmp.OutSrcQuenchs = &value case "OutRedirects": - procSnmp.Icmp.OutRedirects = &value + procSnmp.OutRedirects = &value case "OutEchos": - procSnmp.Icmp.OutEchos = &value + procSnmp.OutEchos = &value case "OutEchoReps": - procSnmp.Icmp.OutEchoReps = &value + procSnmp.OutEchoReps = &value case "OutTimestamps": - procSnmp.Icmp.OutTimestamps = &value + procSnmp.OutTimestamps = &value case "OutTimestampReps": - procSnmp.Icmp.OutTimestampReps = &value + procSnmp.OutTimestampReps = &value case "OutAddrMasks": - procSnmp.Icmp.OutAddrMasks = &value + procSnmp.OutAddrMasks = &value case "OutAddrMaskReps": - procSnmp.Icmp.OutAddrMaskReps = &value + procSnmp.OutAddrMaskReps = &value } case "IcmpMsg": switch key { case "InType3": - procSnmp.IcmpMsg.InType3 = &value + procSnmp.InType3 = &value case "OutType3": - procSnmp.IcmpMsg.OutType3 = &value + procSnmp.OutType3 = &value } case "Tcp": switch key { case "RtoAlgorithm": - procSnmp.Tcp.RtoAlgorithm = &value + procSnmp.RtoAlgorithm = &value case "RtoMin": - procSnmp.Tcp.RtoMin = &value + procSnmp.RtoMin = &value case "RtoMax": - procSnmp.Tcp.RtoMax = &value + procSnmp.RtoMax = &value case "MaxConn": - procSnmp.Tcp.MaxConn = &value + procSnmp.MaxConn = &value case "ActiveOpens": - procSnmp.Tcp.ActiveOpens = &value + procSnmp.ActiveOpens = &value case "PassiveOpens": - procSnmp.Tcp.PassiveOpens = &value + procSnmp.PassiveOpens = &value case "AttemptFails": - procSnmp.Tcp.AttemptFails = &value + procSnmp.AttemptFails = &value case "EstabResets": - procSnmp.Tcp.EstabResets = &value + procSnmp.EstabResets = &value case "CurrEstab": - procSnmp.Tcp.CurrEstab = &value + procSnmp.CurrEstab = &value case "InSegs": - procSnmp.Tcp.InSegs = &value + procSnmp.InSegs = &value case "OutSegs": - procSnmp.Tcp.OutSegs = &value + procSnmp.OutSegs = &value case "RetransSegs": - procSnmp.Tcp.RetransSegs = &value + procSnmp.RetransSegs = &value case "InErrs": - procSnmp.Tcp.InErrs = &value + procSnmp.InErrs = &value case "OutRsts": - procSnmp.Tcp.OutRsts = &value + procSnmp.OutRsts = &value case "InCsumErrors": procSnmp.Tcp.InCsumErrors = &value } diff --git a/vendor/github.com/prometheus/procfs/proc_snmp6.go b/vendor/github.com/prometheus/procfs/proc_snmp6.go index 3059cc6a..fb7fd399 100644 --- a/vendor/github.com/prometheus/procfs/proc_snmp6.go +++ b/vendor/github.com/prometheus/procfs/proc_snmp6.go @@ -182,161 +182,161 @@ func parseSNMP6Stats(r io.Reader) (ProcSnmp6, error) { case "Ip6": switch key { case "InReceives": - procSnmp6.Ip6.InReceives = &value + procSnmp6.InReceives = &value case "InHdrErrors": - procSnmp6.Ip6.InHdrErrors = &value + procSnmp6.InHdrErrors = &value case "InTooBigErrors": - procSnmp6.Ip6.InTooBigErrors = &value + procSnmp6.InTooBigErrors = &value case "InNoRoutes": - procSnmp6.Ip6.InNoRoutes = &value + procSnmp6.InNoRoutes = &value case "InAddrErrors": - procSnmp6.Ip6.InAddrErrors = &value + procSnmp6.InAddrErrors = &value case "InUnknownProtos": - procSnmp6.Ip6.InUnknownProtos = &value + procSnmp6.InUnknownProtos = &value case "InTruncatedPkts": - procSnmp6.Ip6.InTruncatedPkts = &value + procSnmp6.InTruncatedPkts = &value case "InDiscards": - procSnmp6.Ip6.InDiscards = &value + procSnmp6.InDiscards = &value case "InDelivers": - procSnmp6.Ip6.InDelivers = &value + procSnmp6.InDelivers = &value case "OutForwDatagrams": - procSnmp6.Ip6.OutForwDatagrams = &value + procSnmp6.OutForwDatagrams = &value case "OutRequests": - procSnmp6.Ip6.OutRequests = &value + procSnmp6.OutRequests = &value case "OutDiscards": - procSnmp6.Ip6.OutDiscards = &value + procSnmp6.OutDiscards = &value case "OutNoRoutes": - procSnmp6.Ip6.OutNoRoutes = &value + procSnmp6.OutNoRoutes = &value case "ReasmTimeout": - procSnmp6.Ip6.ReasmTimeout = &value + procSnmp6.ReasmTimeout = &value case "ReasmReqds": - procSnmp6.Ip6.ReasmReqds = &value + procSnmp6.ReasmReqds = &value case "ReasmOKs": - procSnmp6.Ip6.ReasmOKs = &value + procSnmp6.ReasmOKs = &value case "ReasmFails": - procSnmp6.Ip6.ReasmFails = &value + procSnmp6.ReasmFails = &value case "FragOKs": - procSnmp6.Ip6.FragOKs = &value + procSnmp6.FragOKs = &value case "FragFails": - procSnmp6.Ip6.FragFails = &value + procSnmp6.FragFails = &value case "FragCreates": - procSnmp6.Ip6.FragCreates = &value + procSnmp6.FragCreates = &value case "InMcastPkts": - procSnmp6.Ip6.InMcastPkts = &value + procSnmp6.InMcastPkts = &value case "OutMcastPkts": - procSnmp6.Ip6.OutMcastPkts = &value + procSnmp6.OutMcastPkts = &value case "InOctets": - procSnmp6.Ip6.InOctets = &value + procSnmp6.InOctets = &value case "OutOctets": - procSnmp6.Ip6.OutOctets = &value + procSnmp6.OutOctets = &value case "InMcastOctets": - procSnmp6.Ip6.InMcastOctets = &value + procSnmp6.InMcastOctets = &value case "OutMcastOctets": - procSnmp6.Ip6.OutMcastOctets = &value + procSnmp6.OutMcastOctets = &value case "InBcastOctets": - procSnmp6.Ip6.InBcastOctets = &value + procSnmp6.InBcastOctets = &value case "OutBcastOctets": - procSnmp6.Ip6.OutBcastOctets = &value + procSnmp6.OutBcastOctets = &value case "InNoECTPkts": - procSnmp6.Ip6.InNoECTPkts = &value + procSnmp6.InNoECTPkts = &value case "InECT1Pkts": - procSnmp6.Ip6.InECT1Pkts = &value + procSnmp6.InECT1Pkts = &value case "InECT0Pkts": - procSnmp6.Ip6.InECT0Pkts = &value + procSnmp6.InECT0Pkts = &value case "InCEPkts": - procSnmp6.Ip6.InCEPkts = &value + procSnmp6.InCEPkts = &value } case "Icmp6": switch key { case "InMsgs": - procSnmp6.Icmp6.InMsgs = &value + procSnmp6.InMsgs = &value case "InErrors": procSnmp6.Icmp6.InErrors = &value case "OutMsgs": - procSnmp6.Icmp6.OutMsgs = &value + procSnmp6.OutMsgs = &value case "OutErrors": - procSnmp6.Icmp6.OutErrors = &value + procSnmp6.OutErrors = &value case "InCsumErrors": procSnmp6.Icmp6.InCsumErrors = &value case "InDestUnreachs": - procSnmp6.Icmp6.InDestUnreachs = &value + procSnmp6.InDestUnreachs = &value case "InPktTooBigs": - procSnmp6.Icmp6.InPktTooBigs = &value + procSnmp6.InPktTooBigs = &value case "InTimeExcds": - procSnmp6.Icmp6.InTimeExcds = &value + procSnmp6.InTimeExcds = &value case "InParmProblems": - procSnmp6.Icmp6.InParmProblems = &value + procSnmp6.InParmProblems = &value case "InEchos": - procSnmp6.Icmp6.InEchos = &value + procSnmp6.InEchos = &value case "InEchoReplies": - procSnmp6.Icmp6.InEchoReplies = &value + procSnmp6.InEchoReplies = &value case "InGroupMembQueries": - procSnmp6.Icmp6.InGroupMembQueries = &value + procSnmp6.InGroupMembQueries = &value case "InGroupMembResponses": - procSnmp6.Icmp6.InGroupMembResponses = &value + procSnmp6.InGroupMembResponses = &value case "InGroupMembReductions": - procSnmp6.Icmp6.InGroupMembReductions = &value + procSnmp6.InGroupMembReductions = &value case "InRouterSolicits": - procSnmp6.Icmp6.InRouterSolicits = &value + procSnmp6.InRouterSolicits = &value case "InRouterAdvertisements": - procSnmp6.Icmp6.InRouterAdvertisements = &value + procSnmp6.InRouterAdvertisements = &value case "InNeighborSolicits": - procSnmp6.Icmp6.InNeighborSolicits = &value + procSnmp6.InNeighborSolicits = &value case "InNeighborAdvertisements": - procSnmp6.Icmp6.InNeighborAdvertisements = &value + procSnmp6.InNeighborAdvertisements = &value case "InRedirects": - procSnmp6.Icmp6.InRedirects = &value + procSnmp6.InRedirects = &value case "InMLDv2Reports": - procSnmp6.Icmp6.InMLDv2Reports = &value + procSnmp6.InMLDv2Reports = &value case "OutDestUnreachs": - procSnmp6.Icmp6.OutDestUnreachs = &value + procSnmp6.OutDestUnreachs = &value case "OutPktTooBigs": - procSnmp6.Icmp6.OutPktTooBigs = &value + procSnmp6.OutPktTooBigs = &value case "OutTimeExcds": - procSnmp6.Icmp6.OutTimeExcds = &value + procSnmp6.OutTimeExcds = &value case "OutParmProblems": - procSnmp6.Icmp6.OutParmProblems = &value + procSnmp6.OutParmProblems = &value case "OutEchos": - procSnmp6.Icmp6.OutEchos = &value + procSnmp6.OutEchos = &value case "OutEchoReplies": - procSnmp6.Icmp6.OutEchoReplies = &value + procSnmp6.OutEchoReplies = &value case "OutGroupMembQueries": - procSnmp6.Icmp6.OutGroupMembQueries = &value + procSnmp6.OutGroupMembQueries = &value case "OutGroupMembResponses": - procSnmp6.Icmp6.OutGroupMembResponses = &value + procSnmp6.OutGroupMembResponses = &value case "OutGroupMembReductions": - procSnmp6.Icmp6.OutGroupMembReductions = &value + procSnmp6.OutGroupMembReductions = &value case "OutRouterSolicits": - procSnmp6.Icmp6.OutRouterSolicits = &value + procSnmp6.OutRouterSolicits = &value case "OutRouterAdvertisements": - procSnmp6.Icmp6.OutRouterAdvertisements = &value + procSnmp6.OutRouterAdvertisements = &value case "OutNeighborSolicits": - procSnmp6.Icmp6.OutNeighborSolicits = &value + procSnmp6.OutNeighborSolicits = &value case "OutNeighborAdvertisements": - procSnmp6.Icmp6.OutNeighborAdvertisements = &value + procSnmp6.OutNeighborAdvertisements = &value case "OutRedirects": - procSnmp6.Icmp6.OutRedirects = &value + procSnmp6.OutRedirects = &value case "OutMLDv2Reports": - procSnmp6.Icmp6.OutMLDv2Reports = &value + procSnmp6.OutMLDv2Reports = &value case "InType1": - procSnmp6.Icmp6.InType1 = &value + procSnmp6.InType1 = &value case "InType134": - procSnmp6.Icmp6.InType134 = &value + procSnmp6.InType134 = &value case "InType135": - procSnmp6.Icmp6.InType135 = &value + procSnmp6.InType135 = &value case "InType136": - procSnmp6.Icmp6.InType136 = &value + procSnmp6.InType136 = &value case "InType143": - procSnmp6.Icmp6.InType143 = &value + procSnmp6.InType143 = &value case "OutType133": - procSnmp6.Icmp6.OutType133 = &value + procSnmp6.OutType133 = &value case "OutType135": - procSnmp6.Icmp6.OutType135 = &value + procSnmp6.OutType135 = &value case "OutType136": - procSnmp6.Icmp6.OutType136 = &value + procSnmp6.OutType136 = &value case "OutType143": - procSnmp6.Icmp6.OutType143 = &value + procSnmp6.OutType143 = &value } case "Udp6": switch key { @@ -355,7 +355,7 @@ func parseSNMP6Stats(r io.Reader) (ProcSnmp6, error) { case "InCsumErrors": procSnmp6.Udp6.InCsumErrors = &value case "IgnoredMulti": - procSnmp6.Udp6.IgnoredMulti = &value + procSnmp6.IgnoredMulti = &value } case "UdpLite6": switch key { diff --git a/vendor/github.com/prometheus/procfs/proc_sys.go b/vendor/github.com/prometheus/procfs/proc_sys.go index 5eefbe2e..3810d1ac 100644 --- a/vendor/github.com/prometheus/procfs/proc_sys.go +++ b/vendor/github.com/prometheus/procfs/proc_sys.go @@ -21,7 +21,7 @@ import ( ) func sysctlToPath(sysctl string) string { - return strings.Replace(sysctl, ".", "/", -1) + return strings.ReplaceAll(sysctl, ".", "/") } func (fs FS) SysctlStrings(sysctl string) ([]string, error) { diff --git a/vendor/github.com/prometheus/procfs/softirqs.go b/vendor/github.com/prometheus/procfs/softirqs.go index 28708e07..403e6ae7 100644 --- a/vendor/github.com/prometheus/procfs/softirqs.go +++ b/vendor/github.com/prometheus/procfs/softirqs.go @@ -68,8 +68,8 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { if len(parts) < 2 { continue } - switch { - case parts[0] == "HI:": + switch parts[0] { + case "HI:": perCPU := parts[1:] softirqs.Hi = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -77,7 +77,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (HI%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "TIMER:": + case "TIMER:": perCPU := parts[1:] softirqs.Timer = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -85,7 +85,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (TIMER%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "NET_TX:": + case "NET_TX:": perCPU := parts[1:] softirqs.NetTx = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -93,7 +93,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (NET_TX%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "NET_RX:": + case "NET_RX:": perCPU := parts[1:] softirqs.NetRx = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -101,7 +101,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (NET_RX%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "BLOCK:": + case "BLOCK:": perCPU := parts[1:] softirqs.Block = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -109,7 +109,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (BLOCK%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "IRQ_POLL:": + case "IRQ_POLL:": perCPU := parts[1:] softirqs.IRQPoll = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -117,7 +117,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (IRQ_POLL%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "TASKLET:": + case "TASKLET:": perCPU := parts[1:] softirqs.Tasklet = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -125,7 +125,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (TASKLET%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "SCHED:": + case "SCHED:": perCPU := parts[1:] softirqs.Sched = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -133,7 +133,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (SCHED%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "HRTIMER:": + case "HRTIMER:": perCPU := parts[1:] softirqs.HRTimer = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -141,7 +141,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (HRTIMER%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "RCU:": + case "RCU:": perCPU := parts[1:] softirqs.RCU = make([]uint64, len(perCPU)) for i, count := range perCPU { diff --git a/vendor/golang.org/x/oauth2/internal/doc.go b/vendor/golang.org/x/oauth2/internal/doc.go index 03265e88..8c7c475f 100644 --- a/vendor/golang.org/x/oauth2/internal/doc.go +++ b/vendor/golang.org/x/oauth2/internal/doc.go @@ -2,5 +2,5 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package internal contains support packages for oauth2 package. +// Package internal contains support packages for [golang.org/x/oauth2]. package internal diff --git a/vendor/golang.org/x/oauth2/internal/oauth2.go b/vendor/golang.org/x/oauth2/internal/oauth2.go index 14989bea..71ea6ad1 100644 --- a/vendor/golang.org/x/oauth2/internal/oauth2.go +++ b/vendor/golang.org/x/oauth2/internal/oauth2.go @@ -13,7 +13,7 @@ import ( ) // ParseKey converts the binary contents of a private key file -// to an *rsa.PrivateKey. It detects whether the private key is in a +// to an [*rsa.PrivateKey]. It detects whether the private key is in a // PEM container or not. If so, it extracts the private key // from PEM container before conversion. It only supports PEM // containers with no passphrase. diff --git a/vendor/golang.org/x/oauth2/internal/token.go b/vendor/golang.org/x/oauth2/internal/token.go index e83ddeef..8389f246 100644 --- a/vendor/golang.org/x/oauth2/internal/token.go +++ b/vendor/golang.org/x/oauth2/internal/token.go @@ -10,7 +10,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "math" "mime" "net/http" @@ -26,9 +25,9 @@ import ( // the requests to access protected resources on the OAuth 2.0 // provider's backend. // -// This type is a mirror of oauth2.Token and exists to break +// This type is a mirror of [golang.org/x/oauth2.Token] and exists to break // an otherwise-circular dependency. Other internal packages -// should convert this Token into an oauth2.Token before use. +// should convert this Token into an [golang.org/x/oauth2.Token] before use. type Token struct { // AccessToken is the token that authorizes and authenticates // the requests. @@ -50,9 +49,16 @@ type Token struct { // mechanisms for that TokenSource will not be used. Expiry time.Time + // ExpiresIn is the OAuth2 wire format "expires_in" field, + // which specifies how many seconds later the token expires, + // relative to an unknown time base approximately around "now". + // It is the application's responsibility to populate + // `Expiry` from `ExpiresIn` when required. + ExpiresIn int64 `json:"expires_in,omitempty"` + // Raw optionally contains extra metadata from the server // when updating a token. - Raw interface{} + Raw any } // tokenJSON is the struct representing the HTTP response from OAuth2 @@ -99,14 +105,6 @@ func (e *expirationTime) UnmarshalJSON(b []byte) error { return nil } -// RegisterBrokenAuthHeaderProvider previously did something. It is now a no-op. -// -// Deprecated: this function no longer does anything. Caller code that -// wants to avoid potential extra HTTP requests made during -// auto-probing of the provider's auth style should set -// Endpoint.AuthStyle. -func RegisterBrokenAuthHeaderProvider(tokenURL string) {} - // AuthStyle is a copy of the golang.org/x/oauth2 package's AuthStyle type. type AuthStyle int @@ -143,6 +141,11 @@ func (lc *LazyAuthStyleCache) Get() *AuthStyleCache { return c } +type authStyleCacheKey struct { + url string + clientID string +} + // AuthStyleCache is the set of tokenURLs we've successfully used via // RetrieveToken and which style auth we ended up using. // It's called a cache, but it doesn't (yet?) shrink. It's expected that @@ -150,26 +153,26 @@ func (lc *LazyAuthStyleCache) Get() *AuthStyleCache { // small. type AuthStyleCache struct { mu sync.Mutex - m map[string]AuthStyle // keyed by tokenURL + m map[authStyleCacheKey]AuthStyle } // lookupAuthStyle reports which auth style we last used with tokenURL // when calling RetrieveToken and whether we have ever done so. -func (c *AuthStyleCache) lookupAuthStyle(tokenURL string) (style AuthStyle, ok bool) { +func (c *AuthStyleCache) lookupAuthStyle(tokenURL, clientID string) (style AuthStyle, ok bool) { c.mu.Lock() defer c.mu.Unlock() - style, ok = c.m[tokenURL] + style, ok = c.m[authStyleCacheKey{tokenURL, clientID}] return } // setAuthStyle adds an entry to authStyleCache, documented above. -func (c *AuthStyleCache) setAuthStyle(tokenURL string, v AuthStyle) { +func (c *AuthStyleCache) setAuthStyle(tokenURL, clientID string, v AuthStyle) { c.mu.Lock() defer c.mu.Unlock() if c.m == nil { - c.m = make(map[string]AuthStyle) + c.m = make(map[authStyleCacheKey]AuthStyle) } - c.m[tokenURL] = v + c.m[authStyleCacheKey{tokenURL, clientID}] = v } // newTokenRequest returns a new *http.Request to retrieve a new token @@ -210,9 +213,9 @@ func cloneURLValues(v url.Values) url.Values { } func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, v url.Values, authStyle AuthStyle, styleCache *AuthStyleCache) (*Token, error) { - needsAuthStyleProbe := authStyle == 0 + needsAuthStyleProbe := authStyle == AuthStyleUnknown if needsAuthStyleProbe { - if style, ok := styleCache.lookupAuthStyle(tokenURL); ok { + if style, ok := styleCache.lookupAuthStyle(tokenURL, clientID); ok { authStyle = style needsAuthStyleProbe = false } else { @@ -242,7 +245,7 @@ func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, token, err = doTokenRoundTrip(ctx, req) } if needsAuthStyleProbe && err == nil { - styleCache.setAuthStyle(tokenURL, authStyle) + styleCache.setAuthStyle(tokenURL, clientID, authStyle) } // Don't overwrite `RefreshToken` with an empty value // if this was a token refreshing request. @@ -257,7 +260,7 @@ func doTokenRoundTrip(ctx context.Context, req *http.Request) (*Token, error) { if err != nil { return nil, err } - body, err := ioutil.ReadAll(io.LimitReader(r.Body, 1<<20)) + body, err := io.ReadAll(io.LimitReader(r.Body, 1<<20)) r.Body.Close() if err != nil { return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err) @@ -312,7 +315,8 @@ func doTokenRoundTrip(ctx context.Context, req *http.Request) (*Token, error) { TokenType: tj.TokenType, RefreshToken: tj.RefreshToken, Expiry: tj.expiry(), - Raw: make(map[string]interface{}), + ExpiresIn: int64(tj.ExpiresIn), + Raw: make(map[string]any), } json.Unmarshal(body, &token.Raw) // no error checks for optional fields } diff --git a/vendor/golang.org/x/oauth2/internal/transport.go b/vendor/golang.org/x/oauth2/internal/transport.go index b9db01dd..afc0aeb2 100644 --- a/vendor/golang.org/x/oauth2/internal/transport.go +++ b/vendor/golang.org/x/oauth2/internal/transport.go @@ -9,8 +9,8 @@ import ( "net/http" ) -// HTTPClient is the context key to use with golang.org/x/net/context's -// WithValue function to associate an *http.Client value with a context. +// HTTPClient is the context key to use with [context.WithValue] +// to associate an [*http.Client] value with a context. var HTTPClient ContextKey // ContextKey is just an empty struct. It exists so HTTPClient can be diff --git a/vendor/golang.org/x/oauth2/oauth2.go b/vendor/golang.org/x/oauth2/oauth2.go index eacdd7fd..de34feb8 100644 --- a/vendor/golang.org/x/oauth2/oauth2.go +++ b/vendor/golang.org/x/oauth2/oauth2.go @@ -22,9 +22,9 @@ import ( ) // NoContext is the default context you should supply if not using -// your own context.Context (see https://golang.org/x/net/context). +// your own [context.Context]. // -// Deprecated: Use context.Background() or context.TODO() instead. +// Deprecated: Use [context.Background] or [context.TODO] instead. var NoContext = context.TODO() // RegisterBrokenAuthHeaderProvider previously did something. It is now a no-op. @@ -37,8 +37,8 @@ func RegisterBrokenAuthHeaderProvider(tokenURL string) {} // Config describes a typical 3-legged OAuth2 flow, with both the // client application information and the server's endpoint URLs. -// For the client credentials 2-legged OAuth2 flow, see the clientcredentials -// package (https://golang.org/x/oauth2/clientcredentials). +// For the client credentials 2-legged OAuth2 flow, see the +// [golang.org/x/oauth2/clientcredentials] package. type Config struct { // ClientID is the application's ID. ClientID string @@ -46,7 +46,7 @@ type Config struct { // ClientSecret is the application's secret. ClientSecret string - // Endpoint contains the resource server's token endpoint + // Endpoint contains the authorization server's token endpoint // URLs. These are constants specific to each server and are // often available via site-specific packages, such as // google.Endpoint or github.Endpoint. @@ -135,7 +135,7 @@ type setParam struct{ k, v string } func (p setParam) setValue(m url.Values) { m.Set(p.k, p.v) } -// SetAuthURLParam builds an AuthCodeOption which passes key/value parameters +// SetAuthURLParam builds an [AuthCodeOption] which passes key/value parameters // to a provider's authorization endpoint. func SetAuthURLParam(key, value string) AuthCodeOption { return setParam{key, value} @@ -148,8 +148,8 @@ func SetAuthURLParam(key, value string) AuthCodeOption { // request and callback. The authorization server includes this value when // redirecting the user agent back to the client. // -// Opts may include AccessTypeOnline or AccessTypeOffline, as well -// as ApprovalForce. +// Opts may include [AccessTypeOnline] or [AccessTypeOffline], as well +// as [ApprovalForce]. // // To protect against CSRF attacks, opts should include a PKCE challenge // (S256ChallengeOption). Not all servers support PKCE. An alternative is to @@ -194,7 +194,7 @@ func (c *Config) AuthCodeURL(state string, opts ...AuthCodeOption) string { // and when other authorization grant types are not available." // See https://tools.ietf.org/html/rfc6749#section-4.3 for more info. // -// The provided context optionally controls which HTTP client is used. See the HTTPClient variable. +// The provided context optionally controls which HTTP client is used. See the [HTTPClient] variable. func (c *Config) PasswordCredentialsToken(ctx context.Context, username, password string) (*Token, error) { v := url.Values{ "grant_type": {"password"}, @@ -212,10 +212,10 @@ func (c *Config) PasswordCredentialsToken(ctx context.Context, username, passwor // It is used after a resource provider redirects the user back // to the Redirect URI (the URL obtained from AuthCodeURL). // -// The provided context optionally controls which HTTP client is used. See the HTTPClient variable. +// The provided context optionally controls which HTTP client is used. See the [HTTPClient] variable. // -// The code will be in the *http.Request.FormValue("code"). Before -// calling Exchange, be sure to validate FormValue("state") if you are +// The code will be in the [http.Request.FormValue]("code"). Before +// calling Exchange, be sure to validate [http.Request.FormValue]("state") if you are // using it to protect against CSRF attacks. // // If using PKCE to protect against CSRF attacks, opts should include a @@ -242,10 +242,10 @@ func (c *Config) Client(ctx context.Context, t *Token) *http.Client { return NewClient(ctx, c.TokenSource(ctx, t)) } -// TokenSource returns a TokenSource that returns t until t expires, +// TokenSource returns a [TokenSource] that returns t until t expires, // automatically refreshing it as necessary using the provided context. // -// Most users will use Config.Client instead. +// Most users will use [Config.Client] instead. func (c *Config) TokenSource(ctx context.Context, t *Token) TokenSource { tkr := &tokenRefresher{ ctx: ctx, @@ -260,7 +260,7 @@ func (c *Config) TokenSource(ctx context.Context, t *Token) TokenSource { } } -// tokenRefresher is a TokenSource that makes "grant_type"=="refresh_token" +// tokenRefresher is a TokenSource that makes "grant_type=refresh_token" // HTTP requests to renew a token using a RefreshToken. type tokenRefresher struct { ctx context.Context // used to get HTTP requests @@ -305,8 +305,7 @@ type reuseTokenSource struct { } // Token returns the current token if it's still valid, else will -// refresh the current token (using r.Context for HTTP client -// information) and return the new one. +// refresh the current token and return the new one. func (s *reuseTokenSource) Token() (*Token, error) { s.mu.Lock() defer s.mu.Unlock() @@ -322,7 +321,7 @@ func (s *reuseTokenSource) Token() (*Token, error) { return t, nil } -// StaticTokenSource returns a TokenSource that always returns the same token. +// StaticTokenSource returns a [TokenSource] that always returns the same token. // Because the provided token t is never refreshed, StaticTokenSource is only // useful for tokens that never expire. func StaticTokenSource(t *Token) TokenSource { @@ -338,16 +337,16 @@ func (s staticTokenSource) Token() (*Token, error) { return s.t, nil } -// HTTPClient is the context key to use with golang.org/x/net/context's -// WithValue function to associate an *http.Client value with a context. +// HTTPClient is the context key to use with [context.WithValue] +// to associate a [*http.Client] value with a context. var HTTPClient internal.ContextKey -// NewClient creates an *http.Client from a Context and TokenSource. +// NewClient creates an [*http.Client] from a [context.Context] and [TokenSource]. // The returned client is not valid beyond the lifetime of the context. // -// Note that if a custom *http.Client is provided via the Context it +// Note that if a custom [*http.Client] is provided via the [context.Context] it // is used only for token acquisition and is not used to configure the -// *http.Client returned from NewClient. +// [*http.Client] returned from NewClient. // // As a special case, if src is nil, a non-OAuth2 client is returned // using the provided context. This exists to support related OAuth2 @@ -368,7 +367,7 @@ func NewClient(ctx context.Context, src TokenSource) *http.Client { } } -// ReuseTokenSource returns a TokenSource which repeatedly returns the +// ReuseTokenSource returns a [TokenSource] which repeatedly returns the // same token as long as it's valid, starting with t. // When its cached token is invalid, a new token is obtained from src. // @@ -376,10 +375,10 @@ func NewClient(ctx context.Context, src TokenSource) *http.Client { // (such as a file on disk) between runs of a program, rather than // obtaining new tokens unnecessarily. // -// The initial token t may be nil, in which case the TokenSource is +// The initial token t may be nil, in which case the [TokenSource] is // wrapped in a caching version if it isn't one already. This also // means it's always safe to wrap ReuseTokenSource around any other -// TokenSource without adverse effects. +// [TokenSource] without adverse effects. func ReuseTokenSource(t *Token, src TokenSource) TokenSource { // Don't wrap a reuseTokenSource in itself. That would work, // but cause an unnecessary number of mutex operations. @@ -397,8 +396,8 @@ func ReuseTokenSource(t *Token, src TokenSource) TokenSource { } } -// ReuseTokenSourceWithExpiry returns a TokenSource that acts in the same manner as the -// TokenSource returned by ReuseTokenSource, except the expiry buffer is +// ReuseTokenSourceWithExpiry returns a [TokenSource] that acts in the same manner as the +// [TokenSource] returned by [ReuseTokenSource], except the expiry buffer is // configurable. The expiration time of a token is calculated as // t.Expiry.Add(-earlyExpiry). func ReuseTokenSourceWithExpiry(t *Token, src TokenSource, earlyExpiry time.Duration) TokenSource { diff --git a/vendor/golang.org/x/oauth2/pkce.go b/vendor/golang.org/x/oauth2/pkce.go index 6a95da97..cea8374d 100644 --- a/vendor/golang.org/x/oauth2/pkce.go +++ b/vendor/golang.org/x/oauth2/pkce.go @@ -1,6 +1,7 @@ // Copyright 2023 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. + package oauth2 import ( @@ -20,9 +21,9 @@ const ( // This follows recommendations in RFC 7636. // // A fresh verifier should be generated for each authorization. -// S256ChallengeOption(verifier) should then be passed to Config.AuthCodeURL -// (or Config.DeviceAuth) and VerifierOption(verifier) to Config.Exchange -// (or Config.DeviceAccessToken). +// The resulting verifier should be passed to [Config.AuthCodeURL] or [Config.DeviceAuth] +// with [S256ChallengeOption], and to [Config.Exchange] or [Config.DeviceAccessToken] +// with [VerifierOption]. func GenerateVerifier() string { // "RECOMMENDED that the output of a suitable random number generator be // used to create a 32-octet sequence. The octet sequence is then @@ -36,22 +37,22 @@ func GenerateVerifier() string { return base64.RawURLEncoding.EncodeToString(data) } -// VerifierOption returns a PKCE code verifier AuthCodeOption. It should be -// passed to Config.Exchange or Config.DeviceAccessToken only. +// VerifierOption returns a PKCE code verifier [AuthCodeOption]. It should only be +// passed to [Config.Exchange] or [Config.DeviceAccessToken]. func VerifierOption(verifier string) AuthCodeOption { return setParam{k: codeVerifierKey, v: verifier} } // S256ChallengeFromVerifier returns a PKCE code challenge derived from verifier with method S256. // -// Prefer to use S256ChallengeOption where possible. +// Prefer to use [S256ChallengeOption] where possible. func S256ChallengeFromVerifier(verifier string) string { sha := sha256.Sum256([]byte(verifier)) return base64.RawURLEncoding.EncodeToString(sha[:]) } // S256ChallengeOption derives a PKCE code challenge derived from verifier with -// method S256. It should be passed to Config.AuthCodeURL or Config.DeviceAuth +// method S256. It should be passed to [Config.AuthCodeURL] or [Config.DeviceAuth] // only. func S256ChallengeOption(verifier string) AuthCodeOption { return challengeOption{ diff --git a/vendor/golang.org/x/oauth2/token.go b/vendor/golang.org/x/oauth2/token.go index 8c31136c..239ec329 100644 --- a/vendor/golang.org/x/oauth2/token.go +++ b/vendor/golang.org/x/oauth2/token.go @@ -44,7 +44,7 @@ type Token struct { // Expiry is the optional expiration time of the access token. // - // If zero, TokenSource implementations will reuse the same + // If zero, [TokenSource] implementations will reuse the same // token forever and RefreshToken or equivalent // mechanisms for that TokenSource will not be used. Expiry time.Time `json:"expiry,omitempty"` @@ -58,7 +58,7 @@ type Token struct { // raw optionally contains extra metadata from the server // when updating a token. - raw interface{} + raw any // expiryDelta is used to calculate when a token is considered // expired, by subtracting from Expiry. If zero, defaultExpiryDelta @@ -86,16 +86,16 @@ func (t *Token) Type() string { // SetAuthHeader sets the Authorization header to r using the access // token in t. // -// This method is unnecessary when using Transport or an HTTP Client +// This method is unnecessary when using [Transport] or an HTTP Client // returned by this package. func (t *Token) SetAuthHeader(r *http.Request) { r.Header.Set("Authorization", t.Type()+" "+t.AccessToken) } -// WithExtra returns a new Token that's a clone of t, but using the +// WithExtra returns a new [Token] that's a clone of t, but using the // provided raw extra map. This is only intended for use by packages // implementing derivative OAuth2 flows. -func (t *Token) WithExtra(extra interface{}) *Token { +func (t *Token) WithExtra(extra any) *Token { t2 := new(Token) *t2 = *t t2.raw = extra @@ -105,8 +105,8 @@ func (t *Token) WithExtra(extra interface{}) *Token { // Extra returns an extra field. // Extra fields are key-value pairs returned by the server as a // part of the token retrieval response. -func (t *Token) Extra(key string) interface{} { - if raw, ok := t.raw.(map[string]interface{}); ok { +func (t *Token) Extra(key string) any { + if raw, ok := t.raw.(map[string]any); ok { return raw[key] } @@ -163,6 +163,7 @@ func tokenFromInternal(t *internal.Token) *Token { TokenType: t.TokenType, RefreshToken: t.RefreshToken, Expiry: t.Expiry, + ExpiresIn: t.ExpiresIn, raw: t.Raw, } } diff --git a/vendor/golang.org/x/oauth2/transport.go b/vendor/golang.org/x/oauth2/transport.go index 90657915..8bbebbac 100644 --- a/vendor/golang.org/x/oauth2/transport.go +++ b/vendor/golang.org/x/oauth2/transport.go @@ -11,12 +11,12 @@ import ( "sync" ) -// Transport is an http.RoundTripper that makes OAuth 2.0 HTTP requests, -// wrapping a base RoundTripper and adding an Authorization header -// with a token from the supplied Sources. +// Transport is an [http.RoundTripper] that makes OAuth 2.0 HTTP requests, +// wrapping a base [http.RoundTripper] and adding an Authorization header +// with a token from the supplied [TokenSource]. // // Transport is a low-level mechanism. Most code will use the -// higher-level Config.Client method instead. +// higher-level [Config.Client] method instead. type Transport struct { // Source supplies the token to add to outgoing requests' // Authorization headers. @@ -47,7 +47,7 @@ func (t *Transport) RoundTrip(req *http.Request) (*http.Response, error) { return nil, err } - req2 := cloneRequest(req) // per RoundTripper contract + req2 := req.Clone(req.Context()) token.SetAuthHeader(req2) // req.Body is assumed to be closed by the base RoundTripper. @@ -73,17 +73,3 @@ func (t *Transport) base() http.RoundTripper { } return http.DefaultTransport } - -// cloneRequest returns a clone of the provided *http.Request. -// The clone is a shallow copy of the struct and its Header map. -func cloneRequest(r *http.Request) *http.Request { - // shallow copy of the struct - r2 := new(http.Request) - *r2 = *r - // deep copy of the Header - r2.Header = make(http.Header, len(r.Header)) - for k, s := range r.Header { - r2.Header[k] = append([]string(nil), s...) - } - return r2 -} diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go index f8c3c092..cfafed5b 100644 --- a/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -12,6 +12,8 @@ package errgroup import ( "context" "fmt" + "runtime" + "runtime/debug" "sync" ) @@ -31,6 +33,10 @@ type Group struct { errOnce sync.Once err error + + mu sync.Mutex + panicValue any // = PanicError | PanicValue; non-nil if some Group.Go goroutine panicked. + abnormal bool // some Group.Go goroutine terminated abnormally (panic or goexit). } func (g *Group) done() { @@ -50,13 +56,22 @@ func WithContext(ctx context.Context) (*Group, context.Context) { return &Group{cancel: cancel}, ctx } -// Wait blocks until all function calls from the Go method have returned, then -// returns the first non-nil error (if any) from them. +// Wait blocks until all function calls from the Go method have returned +// normally, then returns the first non-nil error (if any) from them. +// +// If any of the calls panics, Wait panics with a [PanicValue]; +// and if any of them calls [runtime.Goexit], Wait calls runtime.Goexit. func (g *Group) Wait() error { g.wg.Wait() if g.cancel != nil { g.cancel(g.err) } + if g.panicValue != nil { + panic(g.panicValue) + } + if g.abnormal { + runtime.Goexit() + } return g.err } @@ -65,18 +80,56 @@ func (g *Group) Wait() error { // It blocks until the new goroutine can be added without the number of // active goroutines in the group exceeding the configured limit. // -// The first call to return a non-nil error cancels the group's context, if the -// group was created by calling WithContext. The error will be returned by Wait. +// It blocks until the new goroutine can be added without the number of +// goroutines in the group exceeding the configured limit. +// +// The first goroutine in the group that returns a non-nil error, panics, or +// invokes [runtime.Goexit] will cancel the associated Context, if any. func (g *Group) Go(f func() error) { if g.sem != nil { g.sem <- token{} } + g.add(f) +} + +func (g *Group) add(f func() error) { g.wg.Add(1) go func() { defer g.done() + normalReturn := false + defer func() { + if normalReturn { + return + } + v := recover() + g.mu.Lock() + defer g.mu.Unlock() + if !g.abnormal { + if g.cancel != nil { + g.cancel(g.err) + } + g.abnormal = true + } + if v != nil && g.panicValue == nil { + switch v := v.(type) { + case error: + g.panicValue = PanicError{ + Recovered: v, + Stack: debug.Stack(), + } + default: + g.panicValue = PanicValue{ + Recovered: v, + Stack: debug.Stack(), + } + } + } + }() - if err := f(); err != nil { + err := f() + normalReturn = true + if err != nil { g.errOnce.Do(func() { g.err = err if g.cancel != nil { @@ -101,19 +154,7 @@ func (g *Group) TryGo(f func() error) bool { } } - g.wg.Add(1) - go func() { - defer g.done() - - if err := f(); err != nil { - g.errOnce.Do(func() { - g.err = err - if g.cancel != nil { - g.cancel(g.err) - } - }) - } - }() + g.add(f) return true } @@ -135,3 +176,33 @@ func (g *Group) SetLimit(n int) { } g.sem = make(chan token, n) } + +// PanicError wraps an error recovered from an unhandled panic +// when calling a function passed to Go or TryGo. +type PanicError struct { + Recovered error + Stack []byte // result of call to [debug.Stack] +} + +func (p PanicError) Error() string { + // A Go Error method conventionally does not include a stack dump, so omit it + // here. (Callers who care can extract it from the Stack field.) + return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) +} + +func (p PanicError) Unwrap() error { return p.Recovered } + +// PanicValue wraps a value that does not implement the error interface, +// recovered from an unhandled panic when calling a function passed to Go or +// TryGo. +type PanicValue struct { + Recovered any + Stack []byte // result of call to [debug.Stack] +} + +func (p PanicValue) String() string { + if len(p.Stack) > 0 { + return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) + } + return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) +} diff --git a/vendor/golang.org/x/sys/cpu/cpu.go b/vendor/golang.org/x/sys/cpu/cpu.go index 2e73ee19..63541994 100644 --- a/vendor/golang.org/x/sys/cpu/cpu.go +++ b/vendor/golang.org/x/sys/cpu/cpu.go @@ -232,6 +232,17 @@ var RISCV64 struct { HasZba bool // Address generation instructions extension HasZbb bool // Basic bit-manipulation extension HasZbs bool // Single-bit instructions extension + HasZvbb bool // Vector Basic Bit-manipulation + HasZvbc bool // Vector Carryless Multiplication + HasZvkb bool // Vector Cryptography Bit-manipulation + HasZvkt bool // Vector Data-Independent Execution Latency + HasZvkg bool // Vector GCM/GMAC + HasZvkn bool // NIST Algorithm Suite (AES/SHA256/SHA512) + HasZvknc bool // NIST Algorithm Suite with carryless multiply + HasZvkng bool // NIST Algorithm Suite with GCM + HasZvks bool // ShangMi Algorithm Suite + HasZvksc bool // ShangMi Algorithm Suite with carryless multiplication + HasZvksg bool // ShangMi Algorithm Suite with GCM _ CacheLinePad } diff --git a/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go b/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go index cb4a0c57..ad741536 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go +++ b/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go @@ -58,6 +58,15 @@ const ( riscv_HWPROBE_EXT_ZBA = 0x8 riscv_HWPROBE_EXT_ZBB = 0x10 riscv_HWPROBE_EXT_ZBS = 0x20 + riscv_HWPROBE_EXT_ZVBB = 0x20000 + riscv_HWPROBE_EXT_ZVBC = 0x40000 + riscv_HWPROBE_EXT_ZVKB = 0x80000 + riscv_HWPROBE_EXT_ZVKG = 0x100000 + riscv_HWPROBE_EXT_ZVKNED = 0x200000 + riscv_HWPROBE_EXT_ZVKNHB = 0x800000 + riscv_HWPROBE_EXT_ZVKSED = 0x1000000 + riscv_HWPROBE_EXT_ZVKSH = 0x2000000 + riscv_HWPROBE_EXT_ZVKT = 0x4000000 riscv_HWPROBE_KEY_CPUPERF_0 = 0x5 riscv_HWPROBE_MISALIGNED_FAST = 0x3 riscv_HWPROBE_MISALIGNED_MASK = 0x7 @@ -99,6 +108,20 @@ func doinit() { RISCV64.HasZba = isSet(v, riscv_HWPROBE_EXT_ZBA) RISCV64.HasZbb = isSet(v, riscv_HWPROBE_EXT_ZBB) RISCV64.HasZbs = isSet(v, riscv_HWPROBE_EXT_ZBS) + RISCV64.HasZvbb = isSet(v, riscv_HWPROBE_EXT_ZVBB) + RISCV64.HasZvbc = isSet(v, riscv_HWPROBE_EXT_ZVBC) + RISCV64.HasZvkb = isSet(v, riscv_HWPROBE_EXT_ZVKB) + RISCV64.HasZvkg = isSet(v, riscv_HWPROBE_EXT_ZVKG) + RISCV64.HasZvkt = isSet(v, riscv_HWPROBE_EXT_ZVKT) + // Cryptography shorthand extensions + RISCV64.HasZvkn = isSet(v, riscv_HWPROBE_EXT_ZVKNED) && + isSet(v, riscv_HWPROBE_EXT_ZVKNHB) && RISCV64.HasZvkb && RISCV64.HasZvkt + RISCV64.HasZvknc = RISCV64.HasZvkn && RISCV64.HasZvbc + RISCV64.HasZvkng = RISCV64.HasZvkn && RISCV64.HasZvkg + RISCV64.HasZvks = isSet(v, riscv_HWPROBE_EXT_ZVKSED) && + isSet(v, riscv_HWPROBE_EXT_ZVKSH) && RISCV64.HasZvkb && RISCV64.HasZvkt + RISCV64.HasZvksc = RISCV64.HasZvks && RISCV64.HasZvbc + RISCV64.HasZvksg = RISCV64.HasZvks && RISCV64.HasZvkg } if pairs[1].key != -1 { v := pairs[1].value & riscv_HWPROBE_MISALIGNED_MASK diff --git a/vendor/golang.org/x/sys/cpu/cpu_riscv64.go b/vendor/golang.org/x/sys/cpu/cpu_riscv64.go index aca3199c..0f617aef 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_riscv64.go +++ b/vendor/golang.org/x/sys/cpu/cpu_riscv64.go @@ -16,5 +16,17 @@ func initOptions() { {Name: "zba", Feature: &RISCV64.HasZba}, {Name: "zbb", Feature: &RISCV64.HasZbb}, {Name: "zbs", Feature: &RISCV64.HasZbs}, + // RISC-V Cryptography Extensions + {Name: "zvbb", Feature: &RISCV64.HasZvbb}, + {Name: "zvbc", Feature: &RISCV64.HasZvbc}, + {Name: "zvkb", Feature: &RISCV64.HasZvkb}, + {Name: "zvkg", Feature: &RISCV64.HasZvkg}, + {Name: "zvkt", Feature: &RISCV64.HasZvkt}, + {Name: "zvkn", Feature: &RISCV64.HasZvkn}, + {Name: "zvknc", Feature: &RISCV64.HasZvknc}, + {Name: "zvkng", Feature: &RISCV64.HasZvkng}, + {Name: "zvks", Feature: &RISCV64.HasZvks}, + {Name: "zvksc", Feature: &RISCV64.HasZvksc}, + {Name: "zvksg", Feature: &RISCV64.HasZvksg}, } } diff --git a/vendor/golang.org/x/sys/windows/security_windows.go b/vendor/golang.org/x/sys/windows/security_windows.go index b6e1ab76..a8b0364c 100644 --- a/vendor/golang.org/x/sys/windows/security_windows.go +++ b/vendor/golang.org/x/sys/windows/security_windows.go @@ -1303,7 +1303,10 @@ func (selfRelativeSD *SECURITY_DESCRIPTOR) ToAbsolute() (absoluteSD *SECURITY_DE return nil, err } if absoluteSDSize > 0 { - absoluteSD = (*SECURITY_DESCRIPTOR)(unsafe.Pointer(&make([]byte, absoluteSDSize)[0])) + absoluteSD = new(SECURITY_DESCRIPTOR) + if unsafe.Sizeof(*absoluteSD) < uintptr(absoluteSDSize) { + panic("sizeof(SECURITY_DESCRIPTOR) too small") + } } var ( dacl *ACL @@ -1312,19 +1315,55 @@ func (selfRelativeSD *SECURITY_DESCRIPTOR) ToAbsolute() (absoluteSD *SECURITY_DE group *SID ) if daclSize > 0 { - dacl = (*ACL)(unsafe.Pointer(&make([]byte, daclSize)[0])) + dacl = (*ACL)(unsafe.Pointer(unsafe.SliceData(make([]byte, daclSize)))) } if saclSize > 0 { - sacl = (*ACL)(unsafe.Pointer(&make([]byte, saclSize)[0])) + sacl = (*ACL)(unsafe.Pointer(unsafe.SliceData(make([]byte, saclSize)))) } if ownerSize > 0 { - owner = (*SID)(unsafe.Pointer(&make([]byte, ownerSize)[0])) + owner = (*SID)(unsafe.Pointer(unsafe.SliceData(make([]byte, ownerSize)))) } if groupSize > 0 { - group = (*SID)(unsafe.Pointer(&make([]byte, groupSize)[0])) + group = (*SID)(unsafe.Pointer(unsafe.SliceData(make([]byte, groupSize)))) } + // We call into Windows via makeAbsoluteSD, which sets up + // pointers within absoluteSD that point to other chunks of memory + // we pass into makeAbsoluteSD, and that happens outside the view of the GC. + // We therefore take some care here to then verify the pointers are as we expect + // and set them explicitly in view of the GC. See https://go.dev/issue/73199. + // TODO: consider weak pointers once Go 1.24 is appropriate. See suggestion in https://go.dev/cl/663575. err = makeAbsoluteSD(selfRelativeSD, absoluteSD, &absoluteSDSize, dacl, &daclSize, sacl, &saclSize, owner, &ownerSize, group, &groupSize) + if err != nil { + // Don't return absoluteSD, which might be partially initialized. + return nil, err + } + // Before using any fields, verify absoluteSD is in the format we expect according to Windows. + // See https://learn.microsoft.com/en-us/windows/win32/secauthz/absolute-and-self-relative-security-descriptors + absControl, _, err := absoluteSD.Control() + if err != nil { + panic("absoluteSD: " + err.Error()) + } + if absControl&SE_SELF_RELATIVE != 0 { + panic("absoluteSD not in absolute format") + } + if absoluteSD.dacl != dacl { + panic("dacl pointer mismatch") + } + if absoluteSD.sacl != sacl { + panic("sacl pointer mismatch") + } + if absoluteSD.owner != owner { + panic("owner pointer mismatch") + } + if absoluteSD.group != group { + panic("group pointer mismatch") + } + absoluteSD.dacl = dacl + absoluteSD.sacl = sacl + absoluteSD.owner = owner + absoluteSD.group = group + return } diff --git a/vendor/golang.org/x/sys/windows/syscall_windows.go b/vendor/golang.org/x/sys/windows/syscall_windows.go index 4a325438..640f6b15 100644 --- a/vendor/golang.org/x/sys/windows/syscall_windows.go +++ b/vendor/golang.org/x/sys/windows/syscall_windows.go @@ -870,6 +870,7 @@ const socket_error = uintptr(^uint32(0)) //sys WSARecvFrom(s Handle, bufs *WSABuf, bufcnt uint32, recvd *uint32, flags *uint32, from *RawSockaddrAny, fromlen *int32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSARecvFrom //sys WSASendTo(s Handle, bufs *WSABuf, bufcnt uint32, sent *uint32, flags uint32, to *RawSockaddrAny, tolen int32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSASendTo //sys WSASocket(af int32, typ int32, protocol int32, protoInfo *WSAProtocolInfo, group uint32, flags uint32) (handle Handle, err error) [failretval==InvalidHandle] = ws2_32.WSASocketW +//sys WSADuplicateSocket(s Handle, processID uint32, info *WSAProtocolInfo) (err error) [failretval!=0] = ws2_32.WSADuplicateSocketW //sys GetHostByName(name string) (h *Hostent, err error) [failretval==nil] = ws2_32.gethostbyname //sys GetServByName(name string, proto string) (s *Servent, err error) [failretval==nil] = ws2_32.getservbyname //sys Ntohs(netshort uint16) (u uint16) = ws2_32.ntohs @@ -1698,8 +1699,9 @@ func NewNTUnicodeString(s string) (*NTUnicodeString, error) { // Slice returns a uint16 slice that aliases the data in the NTUnicodeString. func (s *NTUnicodeString) Slice() []uint16 { - slice := unsafe.Slice(s.Buffer, s.MaximumLength) - return slice[:s.Length] + // Note: this rounds the length down, if it happens + // to (incorrectly) be odd. Probably safer than rounding up. + return unsafe.Slice(s.Buffer, s.MaximumLength/2)[:s.Length/2] } func (s *NTUnicodeString) String() string { diff --git a/vendor/golang.org/x/sys/windows/types_windows.go b/vendor/golang.org/x/sys/windows/types_windows.go index ad67df2f..958bcf47 100644 --- a/vendor/golang.org/x/sys/windows/types_windows.go +++ b/vendor/golang.org/x/sys/windows/types_windows.go @@ -2700,6 +2700,8 @@ type CommTimeouts struct { // NTUnicodeString is a UTF-16 string for NT native APIs, corresponding to UNICODE_STRING. type NTUnicodeString struct { + // Note: Length and MaximumLength are in *bytes*, not uint16s. + // They should always be even. Length uint16 MaximumLength uint16 Buffer *uint16 @@ -3628,3 +3630,213 @@ const ( KLF_NOTELLSHELL = 0x00000080 KLF_SETFORPROCESS = 0x00000100 ) + +// Virtual Key codes +// https://docs.microsoft.com/en-us/windows/win32/inputdev/virtual-key-codes +const ( + VK_LBUTTON = 0x01 + VK_RBUTTON = 0x02 + VK_CANCEL = 0x03 + VK_MBUTTON = 0x04 + VK_XBUTTON1 = 0x05 + VK_XBUTTON2 = 0x06 + VK_BACK = 0x08 + VK_TAB = 0x09 + VK_CLEAR = 0x0C + VK_RETURN = 0x0D + VK_SHIFT = 0x10 + VK_CONTROL = 0x11 + VK_MENU = 0x12 + VK_PAUSE = 0x13 + VK_CAPITAL = 0x14 + VK_KANA = 0x15 + VK_HANGEUL = 0x15 + VK_HANGUL = 0x15 + VK_IME_ON = 0x16 + VK_JUNJA = 0x17 + VK_FINAL = 0x18 + VK_HANJA = 0x19 + VK_KANJI = 0x19 + VK_IME_OFF = 0x1A + VK_ESCAPE = 0x1B + VK_CONVERT = 0x1C + VK_NONCONVERT = 0x1D + VK_ACCEPT = 0x1E + VK_MODECHANGE = 0x1F + VK_SPACE = 0x20 + VK_PRIOR = 0x21 + VK_NEXT = 0x22 + VK_END = 0x23 + VK_HOME = 0x24 + VK_LEFT = 0x25 + VK_UP = 0x26 + VK_RIGHT = 0x27 + VK_DOWN = 0x28 + VK_SELECT = 0x29 + VK_PRINT = 0x2A + VK_EXECUTE = 0x2B + VK_SNAPSHOT = 0x2C + VK_INSERT = 0x2D + VK_DELETE = 0x2E + VK_HELP = 0x2F + VK_LWIN = 0x5B + VK_RWIN = 0x5C + VK_APPS = 0x5D + VK_SLEEP = 0x5F + VK_NUMPAD0 = 0x60 + VK_NUMPAD1 = 0x61 + VK_NUMPAD2 = 0x62 + VK_NUMPAD3 = 0x63 + VK_NUMPAD4 = 0x64 + VK_NUMPAD5 = 0x65 + VK_NUMPAD6 = 0x66 + VK_NUMPAD7 = 0x67 + VK_NUMPAD8 = 0x68 + VK_NUMPAD9 = 0x69 + VK_MULTIPLY = 0x6A + VK_ADD = 0x6B + VK_SEPARATOR = 0x6C + VK_SUBTRACT = 0x6D + VK_DECIMAL = 0x6E + VK_DIVIDE = 0x6F + VK_F1 = 0x70 + VK_F2 = 0x71 + VK_F3 = 0x72 + VK_F4 = 0x73 + VK_F5 = 0x74 + VK_F6 = 0x75 + VK_F7 = 0x76 + VK_F8 = 0x77 + VK_F9 = 0x78 + VK_F10 = 0x79 + VK_F11 = 0x7A + VK_F12 = 0x7B + VK_F13 = 0x7C + VK_F14 = 0x7D + VK_F15 = 0x7E + VK_F16 = 0x7F + VK_F17 = 0x80 + VK_F18 = 0x81 + VK_F19 = 0x82 + VK_F20 = 0x83 + VK_F21 = 0x84 + VK_F22 = 0x85 + VK_F23 = 0x86 + VK_F24 = 0x87 + VK_NUMLOCK = 0x90 + VK_SCROLL = 0x91 + VK_OEM_NEC_EQUAL = 0x92 + VK_OEM_FJ_JISHO = 0x92 + VK_OEM_FJ_MASSHOU = 0x93 + VK_OEM_FJ_TOUROKU = 0x94 + VK_OEM_FJ_LOYA = 0x95 + VK_OEM_FJ_ROYA = 0x96 + VK_LSHIFT = 0xA0 + VK_RSHIFT = 0xA1 + VK_LCONTROL = 0xA2 + VK_RCONTROL = 0xA3 + VK_LMENU = 0xA4 + VK_RMENU = 0xA5 + VK_BROWSER_BACK = 0xA6 + VK_BROWSER_FORWARD = 0xA7 + VK_BROWSER_REFRESH = 0xA8 + VK_BROWSER_STOP = 0xA9 + VK_BROWSER_SEARCH = 0xAA + VK_BROWSER_FAVORITES = 0xAB + VK_BROWSER_HOME = 0xAC + VK_VOLUME_MUTE = 0xAD + VK_VOLUME_DOWN = 0xAE + VK_VOLUME_UP = 0xAF + VK_MEDIA_NEXT_TRACK = 0xB0 + VK_MEDIA_PREV_TRACK = 0xB1 + VK_MEDIA_STOP = 0xB2 + VK_MEDIA_PLAY_PAUSE = 0xB3 + VK_LAUNCH_MAIL = 0xB4 + VK_LAUNCH_MEDIA_SELECT = 0xB5 + VK_LAUNCH_APP1 = 0xB6 + VK_LAUNCH_APP2 = 0xB7 + VK_OEM_1 = 0xBA + VK_OEM_PLUS = 0xBB + VK_OEM_COMMA = 0xBC + VK_OEM_MINUS = 0xBD + VK_OEM_PERIOD = 0xBE + VK_OEM_2 = 0xBF + VK_OEM_3 = 0xC0 + VK_OEM_4 = 0xDB + VK_OEM_5 = 0xDC + VK_OEM_6 = 0xDD + VK_OEM_7 = 0xDE + VK_OEM_8 = 0xDF + VK_OEM_AX = 0xE1 + VK_OEM_102 = 0xE2 + VK_ICO_HELP = 0xE3 + VK_ICO_00 = 0xE4 + VK_PROCESSKEY = 0xE5 + VK_ICO_CLEAR = 0xE6 + VK_OEM_RESET = 0xE9 + VK_OEM_JUMP = 0xEA + VK_OEM_PA1 = 0xEB + VK_OEM_PA2 = 0xEC + VK_OEM_PA3 = 0xED + VK_OEM_WSCTRL = 0xEE + VK_OEM_CUSEL = 0xEF + VK_OEM_ATTN = 0xF0 + VK_OEM_FINISH = 0xF1 + VK_OEM_COPY = 0xF2 + VK_OEM_AUTO = 0xF3 + VK_OEM_ENLW = 0xF4 + VK_OEM_BACKTAB = 0xF5 + VK_ATTN = 0xF6 + VK_CRSEL = 0xF7 + VK_EXSEL = 0xF8 + VK_EREOF = 0xF9 + VK_PLAY = 0xFA + VK_ZOOM = 0xFB + VK_NONAME = 0xFC + VK_PA1 = 0xFD + VK_OEM_CLEAR = 0xFE +) + +// Mouse button constants. +// https://docs.microsoft.com/en-us/windows/console/mouse-event-record-str +const ( + FROM_LEFT_1ST_BUTTON_PRESSED = 0x0001 + RIGHTMOST_BUTTON_PRESSED = 0x0002 + FROM_LEFT_2ND_BUTTON_PRESSED = 0x0004 + FROM_LEFT_3RD_BUTTON_PRESSED = 0x0008 + FROM_LEFT_4TH_BUTTON_PRESSED = 0x0010 +) + +// Control key state constaints. +// https://docs.microsoft.com/en-us/windows/console/key-event-record-str +// https://docs.microsoft.com/en-us/windows/console/mouse-event-record-str +const ( + CAPSLOCK_ON = 0x0080 + ENHANCED_KEY = 0x0100 + LEFT_ALT_PRESSED = 0x0002 + LEFT_CTRL_PRESSED = 0x0008 + NUMLOCK_ON = 0x0020 + RIGHT_ALT_PRESSED = 0x0001 + RIGHT_CTRL_PRESSED = 0x0004 + SCROLLLOCK_ON = 0x0040 + SHIFT_PRESSED = 0x0010 +) + +// Mouse event record event flags. +// https://docs.microsoft.com/en-us/windows/console/mouse-event-record-str +const ( + MOUSE_MOVED = 0x0001 + DOUBLE_CLICK = 0x0002 + MOUSE_WHEELED = 0x0004 + MOUSE_HWHEELED = 0x0008 +) + +// Input Record Event Types +// https://learn.microsoft.com/en-us/windows/console/input-record-str +const ( + FOCUS_EVENT = 0x0010 + KEY_EVENT = 0x0001 + MENU_EVENT = 0x0008 + MOUSE_EVENT = 0x0002 + WINDOW_BUFFER_SIZE_EVENT = 0x0004 +) diff --git a/vendor/golang.org/x/sys/windows/zsyscall_windows.go b/vendor/golang.org/x/sys/windows/zsyscall_windows.go index 01c0716c..a58bc48b 100644 --- a/vendor/golang.org/x/sys/windows/zsyscall_windows.go +++ b/vendor/golang.org/x/sys/windows/zsyscall_windows.go @@ -511,6 +511,7 @@ var ( procFreeAddrInfoW = modws2_32.NewProc("FreeAddrInfoW") procGetAddrInfoW = modws2_32.NewProc("GetAddrInfoW") procWSACleanup = modws2_32.NewProc("WSACleanup") + procWSADuplicateSocketW = modws2_32.NewProc("WSADuplicateSocketW") procWSAEnumProtocolsW = modws2_32.NewProc("WSAEnumProtocolsW") procWSAGetOverlappedResult = modws2_32.NewProc("WSAGetOverlappedResult") procWSAIoctl = modws2_32.NewProc("WSAIoctl") @@ -4391,6 +4392,14 @@ func WSACleanup() (err error) { return } +func WSADuplicateSocket(s Handle, processID uint32, info *WSAProtocolInfo) (err error) { + r1, _, e1 := syscall.Syscall(procWSADuplicateSocketW.Addr(), 3, uintptr(s), uintptr(processID), uintptr(unsafe.Pointer(info))) + if r1 != 0 { + err = errnoErr(e1) + } + return +} + func WSAEnumProtocols(protocols *int32, protocolBuffer *WSAProtocolInfo, bufferLength *uint32) (n int32, err error) { r0, _, e1 := syscall.Syscall(procWSAEnumProtocolsW.Addr(), 3, uintptr(unsafe.Pointer(protocols)), uintptr(unsafe.Pointer(protocolBuffer)), uintptr(unsafe.Pointer(bufferLength))) n = int32(r0) diff --git a/vendor/gorm.io/gorm/.golangci.yml b/vendor/gorm.io/gorm/.golangci.yml index b88bf672..6c48152c 100644 --- a/vendor/gorm.io/gorm/.golangci.yml +++ b/vendor/gorm.io/gorm/.golangci.yml @@ -1,7 +1,9 @@ +version: "2" + linters: + default: standard enable: - cyclop - - exportloopref - gocritic - gosec - ineffassign @@ -9,12 +11,9 @@ linters: - prealloc - unconvert - unparam - - goimports - whitespace -linters-settings: - whitespace: - multi-func: true - goimports: - local-prefixes: gorm.io/gorm - +formatters: + enable: + - gofumpt + - goimports diff --git a/vendor/gorm.io/gorm/CODE_OF_CONDUCT.md b/vendor/gorm.io/gorm/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..60117926 --- /dev/null +++ b/vendor/gorm.io/gorm/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to participate in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community includes: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period. This +includes avoiding interactions in community spaces and external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any interaction or public +communication with the community for a specified period. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/vendor/gorm.io/gorm/LICENSE b/vendor/gorm.io/gorm/LICENSE index 037e1653..52964f13 100644 --- a/vendor/gorm.io/gorm/LICENSE +++ b/vendor/gorm.io/gorm/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2013-NOW Jinzhu +Copyright (c) 2013-present Jinzhu Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/gorm.io/gorm/callbacks/associations.go b/vendor/gorm.io/gorm/callbacks/associations.go index f3cd464a..67531127 100644 --- a/vendor/gorm.io/gorm/callbacks/associations.go +++ b/vendor/gorm.io/gorm/callbacks/associations.go @@ -47,7 +47,7 @@ func SaveBeforeAssociations(create bool) func(db *gorm.DB) { ) if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) @@ -126,7 +126,7 @@ func SaveAfterAssociations(create bool) func(db *gorm.DB) { ) if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) @@ -195,7 +195,7 @@ func SaveAfterAssociations(create bool) func(db *gorm.DB) { fieldType := rel.Field.IndirectFieldType.Elem() isPtr := fieldType.Kind() == reflect.Ptr if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) identityMap := map[string]bool{} @@ -268,11 +268,11 @@ func SaveAfterAssociations(create bool) func(db *gorm.DB) { fieldType := rel.Field.IndirectFieldType.Elem() isPtr := fieldType.Kind() == reflect.Ptr if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) distinctElems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) - joins := reflect.MakeSlice(reflect.SliceOf(reflect.PtrTo(rel.JoinTable.ModelType)), 0, 10) + joins := reflect.MakeSlice(reflect.SliceOf(reflect.PointerTo(rel.JoinTable.ModelType)), 0, 10) objs := []reflect.Value{} appendToJoins := func(obj reflect.Value, elem reflect.Value) { diff --git a/vendor/gorm.io/gorm/clause/returning.go b/vendor/gorm.io/gorm/clause/returning.go index d94b7a4c..76064c4a 100644 --- a/vendor/gorm.io/gorm/clause/returning.go +++ b/vendor/gorm.io/gorm/clause/returning.go @@ -26,9 +26,12 @@ func (returning Returning) Build(builder Builder) { // MergeClause merge order by clauses func (returning Returning) MergeClause(clause *Clause) { - if v, ok := clause.Expression.(Returning); ok { - returning.Columns = append(v.Columns, returning.Columns...) + if v, ok := clause.Expression.(Returning); ok && len(returning.Columns) > 0 { + if v.Columns != nil { + returning.Columns = append(v.Columns, returning.Columns...) + } else { + returning.Columns = nil + } } - clause.Expression = returning } diff --git a/vendor/gorm.io/gorm/finisher_api.go b/vendor/gorm.io/gorm/finisher_api.go index f97571ed..6802945c 100644 --- a/vendor/gorm.io/gorm/finisher_api.go +++ b/vendor/gorm.io/gorm/finisher_api.go @@ -4,6 +4,7 @@ import ( "database/sql" "errors" "fmt" + "hash/maphash" "reflect" "strings" @@ -623,14 +624,15 @@ func (db *DB) Transaction(fc func(tx *DB) error, opts ...*sql.TxOptions) (err er if committer, ok := db.Statement.ConnPool.(TxCommitter); ok && committer != nil { // nested transaction if !db.DisableNestedTransaction { - err = db.SavePoint(fmt.Sprintf("sp%p", fc)).Error + spID := new(maphash.Hash).Sum64() + err = db.SavePoint(fmt.Sprintf("sp%d", spID)).Error if err != nil { return } defer func() { // Make sure to rollback when panic, Block error or Commit error if panicked || err != nil { - db.RollbackTo(fmt.Sprintf("sp%p", fc)) + db.RollbackTo(fmt.Sprintf("sp%d", spID)) } }() } diff --git a/vendor/gorm.io/gorm/gorm.go b/vendor/gorm.io/gorm/gorm.go index 117d2fd0..d253736d 100644 --- a/vendor/gorm.io/gorm/gorm.go +++ b/vendor/gorm.io/gorm/gorm.go @@ -34,6 +34,11 @@ type Config struct { DryRun bool // PrepareStmt executes the given query in cached statement PrepareStmt bool + // PrepareStmt cache support LRU expired, + // default maxsize=int64 Max value and ttl=1h + PrepareStmtMaxSize int + PrepareStmtTTL time.Duration + // DisableAutomaticPing DisableAutomaticPing bool // DisableForeignKeyConstraintWhenMigrating @@ -105,6 +110,8 @@ type DB struct { type Session struct { DryRun bool PrepareStmt bool + PrepareStmtMaxSize int + PrepareStmtTTL time.Duration NewDB bool Initialized bool SkipHooks bool @@ -183,16 +190,21 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { if config.Dialector != nil { err = config.Dialector.Initialize(db) - if err != nil { if db, _ := db.DB(); db != nil { _ = db.Close() } } + + if config.TranslateError { + if _, ok := db.Dialector.(ErrorTranslator); !ok { + config.Logger.Warn(context.Background(), "The TranslateError option is enabled, but the Dialector %s does not implement ErrorTranslator.", db.Dialector.Name()) + } + } } if config.PrepareStmt { - preparedStmt := NewPreparedStmtDB(db.ConnPool) + preparedStmt := NewPreparedStmtDB(db.ConnPool, config.PrepareStmtMaxSize, config.PrepareStmtTTL) db.cacheStore.Store(preparedStmtDBKey, preparedStmt) db.ConnPool = preparedStmt } @@ -263,7 +275,7 @@ func (db *DB) Session(config *Session) *DB { if v, ok := db.cacheStore.Load(preparedStmtDBKey); ok { preparedStmt = v.(*PreparedStmtDB) } else { - preparedStmt = NewPreparedStmtDB(db.ConnPool) + preparedStmt = NewPreparedStmtDB(db.ConnPool, config.PrepareStmtMaxSize, config.PrepareStmtTTL) db.cacheStore.Store(preparedStmtDBKey, preparedStmt) } diff --git a/vendor/gorm.io/gorm/internal/lru/lru.go b/vendor/gorm.io/gorm/internal/lru/lru.go new file mode 100644 index 00000000..4f21589a --- /dev/null +++ b/vendor/gorm.io/gorm/internal/lru/lru.go @@ -0,0 +1,493 @@ +package lru + +// golang -lru +// https://github.com/hashicorp/golang-lru +import ( + "sync" + "time" +) + +// EvictCallback is used to get a callback when a cache entry is evicted +type EvictCallback[K comparable, V any] func(key K, value V) + +// LRU implements a thread-safe LRU with expirable entries. +type LRU[K comparable, V any] struct { + size int + evictList *LruList[K, V] + items map[K]*Entry[K, V] + onEvict EvictCallback[K, V] + + // expirable options + mu sync.Mutex + ttl time.Duration + done chan struct{} + + // buckets for expiration + buckets []bucket[K, V] + // uint8 because it's number between 0 and numBuckets + nextCleanupBucket uint8 +} + +// bucket is a container for holding entries to be expired +type bucket[K comparable, V any] struct { + entries map[K]*Entry[K, V] + newestEntry time.Time +} + +// noEvictionTTL - very long ttl to prevent eviction +const noEvictionTTL = time.Hour * 24 * 365 * 10 + +// because of uint8 usage for nextCleanupBucket, should not exceed 256. +// casting it as uint8 explicitly requires type conversions in multiple places +const numBuckets = 100 + +// NewLRU returns a new thread-safe cache with expirable entries. +// +// Size parameter set to 0 makes cache of unlimited size, e.g. turns LRU mechanism off. +// +// Providing 0 TTL turns expiring off. +// +// Delete expired entries every 1/100th of ttl value. Goroutine which deletes expired entries runs indefinitely. +func NewLRU[K comparable, V any](size int, onEvict EvictCallback[K, V], ttl time.Duration) *LRU[K, V] { + if size < 0 { + size = 0 + } + if ttl <= 0 { + ttl = noEvictionTTL + } + + res := LRU[K, V]{ + ttl: ttl, + size: size, + evictList: NewList[K, V](), + items: make(map[K]*Entry[K, V]), + onEvict: onEvict, + done: make(chan struct{}), + } + + // initialize the buckets + res.buckets = make([]bucket[K, V], numBuckets) + for i := 0; i < numBuckets; i++ { + res.buckets[i] = bucket[K, V]{entries: make(map[K]*Entry[K, V])} + } + + // enable deleteExpired() running in separate goroutine for cache with non-zero TTL + // + // Important: done channel is never closed, so deleteExpired() goroutine will never exit, + // it's decided to add functionality to close it in the version later than v2. + if res.ttl != noEvictionTTL { + go func(done <-chan struct{}) { + ticker := time.NewTicker(res.ttl / numBuckets) + defer ticker.Stop() + for { + select { + case <-done: + return + case <-ticker.C: + res.deleteExpired() + } + } + }(res.done) + } + return &res +} + +// Purge clears the cache completely. +// onEvict is called for each evicted key. +func (c *LRU[K, V]) Purge() { + c.mu.Lock() + defer c.mu.Unlock() + for k, v := range c.items { + if c.onEvict != nil { + c.onEvict(k, v.Value) + } + delete(c.items, k) + } + for _, b := range c.buckets { + for _, ent := range b.entries { + delete(b.entries, ent.Key) + } + } + c.evictList.Init() +} + +// Add adds a value to the cache. Returns true if an eviction occurred. +// Returns false if there was no eviction: the item was already in the cache, +// or the size was not exceeded. +func (c *LRU[K, V]) Add(key K, value V) (evicted bool) { + c.mu.Lock() + defer c.mu.Unlock() + now := time.Now() + + // Check for existing item + if ent, ok := c.items[key]; ok { + c.evictList.MoveToFront(ent) + c.removeFromBucket(ent) // remove the entry from its current bucket as expiresAt is renewed + ent.Value = value + ent.ExpiresAt = now.Add(c.ttl) + c.addToBucket(ent) + return false + } + + // Add new item + ent := c.evictList.PushFrontExpirable(key, value, now.Add(c.ttl)) + c.items[key] = ent + c.addToBucket(ent) // adds the entry to the appropriate bucket and sets entry.expireBucket + + evict := c.size > 0 && c.evictList.Length() > c.size + // Verify size not exceeded + if evict { + c.removeOldest() + } + return evict +} + +// Get looks up a key's value from the cache. +func (c *LRU[K, V]) Get(key K) (value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + var ent *Entry[K, V] + if ent, ok = c.items[key]; ok { + // Expired item check + if time.Now().After(ent.ExpiresAt) { + return value, false + } + c.evictList.MoveToFront(ent) + return ent.Value, true + } + return +} + +// Contains checks if a key is in the cache, without updating the recent-ness +// or deleting it for being stale. +func (c *LRU[K, V]) Contains(key K) (ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + _, ok = c.items[key] + return ok +} + +// Peek returns the key value (or undefined if not found) without updating +// the "recently used"-ness of the key. +func (c *LRU[K, V]) Peek(key K) (value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + var ent *Entry[K, V] + if ent, ok = c.items[key]; ok { + // Expired item check + if time.Now().After(ent.ExpiresAt) { + return value, false + } + return ent.Value, true + } + return +} + +// Remove removes the provided key from the cache, returning if the +// key was contained. +func (c *LRU[K, V]) Remove(key K) bool { + c.mu.Lock() + defer c.mu.Unlock() + if ent, ok := c.items[key]; ok { + c.removeElement(ent) + return true + } + return false +} + +// RemoveOldest removes the oldest item from the cache. +func (c *LRU[K, V]) RemoveOldest() (key K, value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + if ent := c.evictList.Back(); ent != nil { + c.removeElement(ent) + return ent.Key, ent.Value, true + } + return +} + +// GetOldest returns the oldest entry +func (c *LRU[K, V]) GetOldest() (key K, value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + if ent := c.evictList.Back(); ent != nil { + return ent.Key, ent.Value, true + } + return +} + +func (c *LRU[K, V]) KeyValues() map[K]V { + c.mu.Lock() + defer c.mu.Unlock() + maps := make(map[K]V) + now := time.Now() + for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() { + if now.After(ent.ExpiresAt) { + continue + } + maps[ent.Key] = ent.Value + // keys = append(keys, ent.Key) + } + return maps +} + +// Keys returns a slice of the keys in the cache, from oldest to newest. +// Expired entries are filtered out. +func (c *LRU[K, V]) Keys() []K { + c.mu.Lock() + defer c.mu.Unlock() + keys := make([]K, 0, len(c.items)) + now := time.Now() + for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() { + if now.After(ent.ExpiresAt) { + continue + } + keys = append(keys, ent.Key) + } + return keys +} + +// Values returns a slice of the values in the cache, from oldest to newest. +// Expired entries are filtered out. +func (c *LRU[K, V]) Values() []V { + c.mu.Lock() + defer c.mu.Unlock() + values := make([]V, 0, len(c.items)) + now := time.Now() + for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() { + if now.After(ent.ExpiresAt) { + continue + } + values = append(values, ent.Value) + } + return values +} + +// Len returns the number of items in the cache. +func (c *LRU[K, V]) Len() int { + c.mu.Lock() + defer c.mu.Unlock() + return c.evictList.Length() +} + +// Resize changes the cache size. Size of 0 means unlimited. +func (c *LRU[K, V]) Resize(size int) (evicted int) { + c.mu.Lock() + defer c.mu.Unlock() + if size <= 0 { + c.size = 0 + return 0 + } + diff := c.evictList.Length() - size + if diff < 0 { + diff = 0 + } + for i := 0; i < diff; i++ { + c.removeOldest() + } + c.size = size + return diff +} + +// Close destroys cleanup goroutine. To clean up the cache, run Purge() before Close(). +// func (c *LRU[K, V]) Close() { +// c.mu.Lock() +// defer c.mu.Unlock() +// select { +// case <-c.done: +// return +// default: +// } +// close(c.done) +// } + +// removeOldest removes the oldest item from the cache. Has to be called with lock! +func (c *LRU[K, V]) removeOldest() { + if ent := c.evictList.Back(); ent != nil { + c.removeElement(ent) + } +} + +// removeElement is used to remove a given list element from the cache. Has to be called with lock! +func (c *LRU[K, V]) removeElement(e *Entry[K, V]) { + c.evictList.Remove(e) + delete(c.items, e.Key) + c.removeFromBucket(e) + if c.onEvict != nil { + c.onEvict(e.Key, e.Value) + } +} + +// deleteExpired deletes expired records from the oldest bucket, waiting for the newest entry +// in it to expire first. +func (c *LRU[K, V]) deleteExpired() { + c.mu.Lock() + bucketIdx := c.nextCleanupBucket + timeToExpire := time.Until(c.buckets[bucketIdx].newestEntry) + // wait for newest entry to expire before cleanup without holding lock + if timeToExpire > 0 { + c.mu.Unlock() + time.Sleep(timeToExpire) + c.mu.Lock() + } + for _, ent := range c.buckets[bucketIdx].entries { + c.removeElement(ent) + } + c.nextCleanupBucket = (c.nextCleanupBucket + 1) % numBuckets + c.mu.Unlock() +} + +// addToBucket adds entry to expire bucket so that it will be cleaned up when the time comes. Has to be called with lock! +func (c *LRU[K, V]) addToBucket(e *Entry[K, V]) { + bucketID := (numBuckets + c.nextCleanupBucket - 1) % numBuckets + e.ExpireBucket = bucketID + c.buckets[bucketID].entries[e.Key] = e + if c.buckets[bucketID].newestEntry.Before(e.ExpiresAt) { + c.buckets[bucketID].newestEntry = e.ExpiresAt + } +} + +// removeFromBucket removes the entry from its corresponding bucket. Has to be called with lock! +func (c *LRU[K, V]) removeFromBucket(e *Entry[K, V]) { + delete(c.buckets[e.ExpireBucket].entries, e.Key) +} + +// Cap returns the capacity of the cache +func (c *LRU[K, V]) Cap() int { + return c.size +} + +// Entry is an LRU Entry +type Entry[K comparable, V any] struct { + // Next and previous pointers in the doubly-linked list of elements. + // To simplify the implementation, internally a list l is implemented + // as a ring, such that &l.root is both the next element of the last + // list element (l.Back()) and the previous element of the first list + // element (l.Front()). + next, prev *Entry[K, V] + + // The list to which this element belongs. + list *LruList[K, V] + + // The LRU Key of this element. + Key K + + // The Value stored with this element. + Value V + + // The time this element would be cleaned up, optional + ExpiresAt time.Time + + // The expiry bucket item was put in, optional + ExpireBucket uint8 +} + +// PrevEntry returns the previous list element or nil. +func (e *Entry[K, V]) PrevEntry() *Entry[K, V] { + if p := e.prev; e.list != nil && p != &e.list.root { + return p + } + return nil +} + +// LruList represents a doubly linked list. +// The zero Value for LruList is an empty list ready to use. +type LruList[K comparable, V any] struct { + root Entry[K, V] // sentinel list element, only &root, root.prev, and root.next are used + len int // current list Length excluding (this) sentinel element +} + +// Init initializes or clears list l. +func (l *LruList[K, V]) Init() *LruList[K, V] { + l.root.next = &l.root + l.root.prev = &l.root + l.len = 0 + return l +} + +// NewList returns an initialized list. +func NewList[K comparable, V any]() *LruList[K, V] { return new(LruList[K, V]).Init() } + +// Length returns the number of elements of list l. +// The complexity is O(1). +func (l *LruList[K, V]) Length() int { return l.len } + +// Back returns the last element of list l or nil if the list is empty. +func (l *LruList[K, V]) Back() *Entry[K, V] { + if l.len == 0 { + return nil + } + return l.root.prev +} + +// lazyInit lazily initializes a zero List Value. +func (l *LruList[K, V]) lazyInit() { + if l.root.next == nil { + l.Init() + } +} + +// insert inserts e after at, increments l.len, and returns e. +func (l *LruList[K, V]) insert(e, at *Entry[K, V]) *Entry[K, V] { + e.prev = at + e.next = at.next + e.prev.next = e + e.next.prev = e + e.list = l + l.len++ + return e +} + +// insertValue is a convenience wrapper for insert(&Entry{Value: v, ExpiresAt: ExpiresAt}, at). +func (l *LruList[K, V]) insertValue(k K, v V, expiresAt time.Time, at *Entry[K, V]) *Entry[K, V] { + return l.insert(&Entry[K, V]{Value: v, Key: k, ExpiresAt: expiresAt}, at) +} + +// Remove removes e from its list, decrements l.len +func (l *LruList[K, V]) Remove(e *Entry[K, V]) V { + e.prev.next = e.next + e.next.prev = e.prev + e.next = nil // avoid memory leaks + e.prev = nil // avoid memory leaks + e.list = nil + l.len-- + + return e.Value +} + +// move moves e to next to at. +func (l *LruList[K, V]) move(e, at *Entry[K, V]) { + if e == at { + return + } + e.prev.next = e.next + e.next.prev = e.prev + + e.prev = at + e.next = at.next + e.prev.next = e + e.next.prev = e +} + +// PushFront inserts a new element e with value v at the front of list l and returns e. +func (l *LruList[K, V]) PushFront(k K, v V) *Entry[K, V] { + l.lazyInit() + return l.insertValue(k, v, time.Time{}, &l.root) +} + +// PushFrontExpirable inserts a new expirable element e with Value v at the front of list l and returns e. +func (l *LruList[K, V]) PushFrontExpirable(k K, v V, expiresAt time.Time) *Entry[K, V] { + l.lazyInit() + return l.insertValue(k, v, expiresAt, &l.root) +} + +// MoveToFront moves element e to the front of list l. +// If e is not an element of l, the list is not modified. +// The element must not be nil. +func (l *LruList[K, V]) MoveToFront(e *Entry[K, V]) { + if e.list != l || l.root.next == e { + return + } + // see comment in List.Remove about initialization of l + l.move(e, &l.root) +} diff --git a/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go b/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go new file mode 100644 index 00000000..7068419d --- /dev/null +++ b/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go @@ -0,0 +1,182 @@ +package stmt_store + +import ( + "context" + "database/sql" + "sync" + "time" + + "gorm.io/gorm/internal/lru" +) + +type Stmt struct { + *sql.Stmt + Transaction bool + prepared chan struct{} + prepareErr error +} + +func (stmt *Stmt) Error() error { + return stmt.prepareErr +} + +func (stmt *Stmt) Close() error { + <-stmt.prepared + + if stmt.Stmt != nil { + return stmt.Stmt.Close() + } + return nil +} + +// Store defines an interface for managing the caching operations of SQL statements (Stmt). +// This interface provides methods for creating new statements, retrieving all cache keys, +// getting cached statements, setting cached statements, and deleting cached statements. +type Store interface { + // New creates a new Stmt object and caches it. + // Parameters: + // ctx: The context for the request, which can carry deadlines, cancellation signals, etc. + // key: The key representing the SQL query, used for caching and preparing the statement. + // isTransaction: Indicates whether this operation is part of a transaction, which may affect the caching strategy. + // connPool: A connection pool that provides database connections. + // locker: A synchronization lock that is unlocked after initialization to avoid deadlocks. + // Returns: + // *Stmt: A newly created statement object for executing SQL operations. + // error: An error if the statement preparation fails. + New(ctx context.Context, key string, isTransaction bool, connPool ConnPool, locker sync.Locker) (*Stmt, error) + + // Keys returns a slice of all cache keys in the store. + Keys() []string + + // Get retrieves a Stmt object from the store based on the given key. + // Parameters: + // key: The key used to look up the Stmt object. + // Returns: + // *Stmt: The found Stmt object, or nil if not found. + // bool: Indicates whether the corresponding Stmt object was successfully found. + Get(key string) (*Stmt, bool) + + // Set stores the given Stmt object in the store and associates it with the specified key. + // Parameters: + // key: The key used to associate the Stmt object. + // value: The Stmt object to be stored. + Set(key string, value *Stmt) + + // Delete removes the Stmt object corresponding to the specified key from the store. + // Parameters: + // key: The key associated with the Stmt object to be deleted. + Delete(key string) +} + +// defaultMaxSize defines the default maximum capacity of the cache. +// Its value is the maximum value of the int64 type, which means that when the cache size is not specified, +// the cache can theoretically store as many elements as possible. +// (1 << 63) - 1 is the maximum value that an int64 type can represent. +const ( + defaultMaxSize = (1 << 63) - 1 + // defaultTTL defines the default time-to-live (TTL) for each cache entry. + // When the TTL for cache entries is not specified, each cache entry will expire after 24 hours. + defaultTTL = time.Hour * 24 +) + +// New creates and returns a new Store instance. +// +// Parameters: +// - size: The maximum capacity of the cache. If the provided size is less than or equal to 0, +// it defaults to defaultMaxSize. +// - ttl: The time-to-live duration for each cache entry. If the provided ttl is less than or equal to 0, +// it defaults to defaultTTL. +// +// This function defines an onEvicted callback that is invoked when a cache entry is evicted. +// The callback ensures that if the evicted value (v) is not nil, its Close method is called asynchronously +// to release associated resources. +// +// Returns: +// - A Store instance implemented by lruStore, which internally uses an LRU cache with the specified size, +// eviction callback, and TTL. +func New(size int, ttl time.Duration) Store { + if size <= 0 { + size = defaultMaxSize + } + + if ttl <= 0 { + ttl = defaultTTL + } + + onEvicted := func(k string, v *Stmt) { + if v != nil { + go v.Close() + } + } + return &lruStore{lru: lru.NewLRU[string, *Stmt](size, onEvicted, ttl)} +} + +type lruStore struct { + lru *lru.LRU[string, *Stmt] +} + +func (s *lruStore) Keys() []string { + return s.lru.Keys() +} + +func (s *lruStore) Get(key string) (*Stmt, bool) { + stmt, ok := s.lru.Get(key) + if ok && stmt != nil { + <-stmt.prepared + } + return stmt, ok +} + +func (s *lruStore) Set(key string, value *Stmt) { + s.lru.Add(key, value) +} + +func (s *lruStore) Delete(key string) { + s.lru.Remove(key) +} + +type ConnPool interface { + PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) +} + +// New creates a new Stmt object for executing SQL queries. +// It caches the Stmt object for future use and handles preparation and error states. +// Parameters: +// +// ctx: Context for the request, used to carry deadlines, cancellation signals, etc. +// key: The key representing the SQL query, used for caching and preparing the statement. +// isTransaction: Indicates whether this operation is part of a transaction, affecting cache strategy. +// conn: A connection pool that provides database connections. +// locker: A synchronization lock that is unlocked after initialization to avoid deadlocks. +// +// Returns: +// +// *Stmt: A newly created statement object for executing SQL operations. +// error: An error if the statement preparation fails. +func (s *lruStore) New(ctx context.Context, key string, isTransaction bool, conn ConnPool, locker sync.Locker) (_ *Stmt, err error) { + // Create a Stmt object and set its Transaction property. + // The prepared channel is used to synchronize the statement preparation state. + cacheStmt := &Stmt{ + Transaction: isTransaction, + prepared: make(chan struct{}), + } + // Cache the Stmt object with the associated key. + s.Set(key, cacheStmt) + // Unlock after completing initialization to prevent deadlocks. + locker.Unlock() + + // Ensure the prepared channel is closed after the function execution completes. + defer close(cacheStmt.prepared) + + // Prepare the SQL statement using the provided connection. + cacheStmt.Stmt, err = conn.PrepareContext(ctx, key) + if err != nil { + // If statement preparation fails, record the error and remove the invalid Stmt object from the cache. + cacheStmt.prepareErr = err + s.Delete(key) + return &Stmt{}, err + } + + // Return the successfully prepared Stmt object. + return cacheStmt, nil +} diff --git a/vendor/gorm.io/gorm/logger/logger.go b/vendor/gorm.io/gorm/logger/logger.go index 253f0325..8088cde2 100644 --- a/vendor/gorm.io/gorm/logger/logger.go +++ b/vendor/gorm.io/gorm/logger/logger.go @@ -80,6 +80,11 @@ var ( }) // Recorder logger records running SQL into a recorder instance Recorder = traceRecorder{Interface: Default, BeginAt: time.Now()} + + // RecorderParamsFilter defaults to no-op, allows to be run-over by a different implementation + RecorderParamsFilter = func(ctx context.Context, sql string, params ...interface{}) (string, []interface{}) { + return sql, params + } ) // New initialize logger @@ -211,3 +216,10 @@ func (l *traceRecorder) Trace(ctx context.Context, begin time.Time, fc func() (s l.SQL, l.RowsAffected = fc() l.Err = err } + +func (l *traceRecorder) ParamsFilter(ctx context.Context, sql string, params ...interface{}) (string, []interface{}) { + if RecorderParamsFilter == nil { + return sql, params + } + return RecorderParamsFilter(ctx, sql, params...) +} diff --git a/vendor/gorm.io/gorm/migrator/migrator.go b/vendor/gorm.io/gorm/migrator/migrator.go index 189a141f..cec4e30f 100644 --- a/vendor/gorm.io/gorm/migrator/migrator.go +++ b/vendor/gorm.io/gorm/migrator/migrator.go @@ -524,8 +524,8 @@ func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnTy // check nullable if nullable, ok := columnType.Nullable(); ok && nullable == field.NotNull { - // not primary key & database is nullable - if !field.PrimaryKey && nullable { + // not primary key & current database is non-nullable(to be nullable) + if !field.PrimaryKey && !nullable { alterColumn = true } } diff --git a/vendor/gorm.io/gorm/prepare_stmt.go b/vendor/gorm.io/gorm/prepare_stmt.go index 094bb477..799df5bc 100644 --- a/vendor/gorm.io/gorm/prepare_stmt.go +++ b/vendor/gorm.io/gorm/prepare_stmt.go @@ -7,29 +7,35 @@ import ( "errors" "reflect" "sync" + "time" + + "gorm.io/gorm/internal/stmt_store" ) -type Stmt struct { - *sql.Stmt - Transaction bool - prepared chan struct{} - prepareErr error -} - type PreparedStmtDB struct { - Stmts map[string]*Stmt + Stmts stmt_store.Store Mux *sync.RWMutex ConnPool } -func NewPreparedStmtDB(connPool ConnPool) *PreparedStmtDB { +// NewPreparedStmtDB creates and initializes a new instance of PreparedStmtDB. +// +// Parameters: +// - connPool: A connection pool that implements the ConnPool interface, used for managing database connections. +// - maxSize: The maximum number of prepared statements that can be stored in the statement store. +// - ttl: The time-to-live duration for each prepared statement in the store. Statements older than this duration will be automatically removed. +// +// Returns: +// - A pointer to a PreparedStmtDB instance, which manages prepared statements using the provided connection pool and configuration. +func NewPreparedStmtDB(connPool ConnPool, maxSize int, ttl time.Duration) *PreparedStmtDB { return &PreparedStmtDB{ - ConnPool: connPool, - Stmts: make(map[string]*Stmt), - Mux: &sync.RWMutex{}, + ConnPool: connPool, // Assigns the provided connection pool to manage database connections. + Stmts: stmt_store.New(maxSize, ttl), // Initializes a new statement store with the specified maximum size and TTL. + Mux: &sync.RWMutex{}, // Sets up a read-write mutex for synchronizing access to the statement store. } } +// GetDBConn returns the underlying *sql.DB connection func (db *PreparedStmtDB) GetDBConn() (*sql.DB, error) { if sqldb, ok := db.ConnPool.(*sql.DB); ok { return sqldb, nil @@ -42,98 +48,41 @@ func (db *PreparedStmtDB) GetDBConn() (*sql.DB, error) { return nil, ErrInvalidDB } +// Close closes all prepared statements in the store func (db *PreparedStmtDB) Close() { db.Mux.Lock() defer db.Mux.Unlock() - for _, stmt := range db.Stmts { - go func(s *Stmt) { - // make sure the stmt must finish preparation first - <-s.prepared - if s.Stmt != nil { - _ = s.Close() - } - }(stmt) + for _, key := range db.Stmts.Keys() { + db.Stmts.Delete(key) } - // setting db.Stmts to nil to avoid further using - db.Stmts = nil } -func (sdb *PreparedStmtDB) Reset() { - sdb.Mux.Lock() - defer sdb.Mux.Unlock() - - for _, stmt := range sdb.Stmts { - go func(s *Stmt) { - // make sure the stmt must finish preparation first - <-s.prepared - if s.Stmt != nil { - _ = s.Close() - } - }(stmt) - } - sdb.Stmts = make(map[string]*Stmt) +// Reset Deprecated use Close instead +func (db *PreparedStmtDB) Reset() { + db.Close() } -func (db *PreparedStmtDB) prepare(ctx context.Context, conn ConnPool, isTransaction bool, query string) (Stmt, error) { +func (db *PreparedStmtDB) prepare(ctx context.Context, conn ConnPool, isTransaction bool, query string) (_ *stmt_store.Stmt, err error) { db.Mux.RLock() - if stmt, ok := db.Stmts[query]; ok && (!stmt.Transaction || isTransaction) { - db.Mux.RUnlock() - // wait for other goroutines prepared - <-stmt.prepared - if stmt.prepareErr != nil { - return Stmt{}, stmt.prepareErr + if db.Stmts != nil { + if stmt, ok := db.Stmts.Get(query); ok && (!stmt.Transaction || isTransaction) { + db.Mux.RUnlock() + return stmt, stmt.Error() } - - return *stmt, nil } db.Mux.RUnlock() + // retry db.Mux.Lock() - // double check - if stmt, ok := db.Stmts[query]; ok && (!stmt.Transaction || isTransaction) { - db.Mux.Unlock() - // wait for other goroutines prepared - <-stmt.prepared - if stmt.prepareErr != nil { - return Stmt{}, stmt.prepareErr + if db.Stmts != nil { + if stmt, ok := db.Stmts.Get(query); ok && (!stmt.Transaction || isTransaction) { + db.Mux.Unlock() + return stmt, stmt.Error() } - - return *stmt, nil - } - // check db.Stmts first to avoid Segmentation Fault(setting value to nil map) - // which cause by calling Close and executing SQL concurrently - if db.Stmts == nil { - db.Mux.Unlock() - return Stmt{}, ErrInvalidDB - } - // cache preparing stmt first - cacheStmt := Stmt{Transaction: isTransaction, prepared: make(chan struct{})} - db.Stmts[query] = &cacheStmt - db.Mux.Unlock() - - // prepare completed - defer close(cacheStmt.prepared) - - // Reason why cannot lock conn.PrepareContext - // suppose the maxopen is 1, g1 is creating record and g2 is querying record. - // 1. g1 begin tx, g1 is requeue because of waiting for the system call, now `db.ConnPool` db.numOpen == 1. - // 2. g2 select lock `conn.PrepareContext(ctx, query)`, now db.numOpen == db.maxOpen , wait for release. - // 3. g1 tx exec insert, wait for unlock `conn.PrepareContext(ctx, query)` to finish tx and release. - stmt, err := conn.PrepareContext(ctx, query) - if err != nil { - cacheStmt.prepareErr = err - db.Mux.Lock() - delete(db.Stmts, query) - db.Mux.Unlock() - return Stmt{}, err } - db.Mux.Lock() - cacheStmt.Stmt = stmt - db.Mux.Unlock() - - return cacheStmt, nil + return db.Stmts.New(ctx, query, isTransaction, conn, db.Mux) } func (db *PreparedStmtDB) BeginTx(ctx context.Context, opt *sql.TxOptions) (ConnPool, error) { @@ -162,10 +111,7 @@ func (db *PreparedStmtDB) ExecContext(ctx context.Context, query string, args .. if err == nil { result, err = stmt.ExecContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - db.Mux.Lock() - defer db.Mux.Unlock() - go stmt.Close() - delete(db.Stmts, query) + db.Stmts.Delete(query) } } return result, err @@ -176,11 +122,7 @@ func (db *PreparedStmtDB) QueryContext(ctx context.Context, query string, args . if err == nil { rows, err = stmt.QueryContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - db.Mux.Lock() - defer db.Mux.Unlock() - - go stmt.Close() - delete(db.Stmts, query) + db.Stmts.Delete(query) } } return rows, err @@ -230,11 +172,7 @@ func (tx *PreparedStmtTX) ExecContext(ctx context.Context, query string, args .. if err == nil { result, err = tx.Tx.StmtContext(ctx, stmt.Stmt).ExecContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - tx.PreparedStmtDB.Mux.Lock() - defer tx.PreparedStmtDB.Mux.Unlock() - - go stmt.Close() - delete(tx.PreparedStmtDB.Stmts, query) + tx.PreparedStmtDB.Stmts.Delete(query) } } return result, err @@ -245,11 +183,7 @@ func (tx *PreparedStmtTX) QueryContext(ctx context.Context, query string, args . if err == nil { rows, err = tx.Tx.StmtContext(ctx, stmt.Stmt).QueryContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - tx.PreparedStmtDB.Mux.Lock() - defer tx.PreparedStmtDB.Mux.Unlock() - - go stmt.Close() - delete(tx.PreparedStmtDB.Stmts, query) + tx.PreparedStmtDB.Stmts.Delete(query) } } return rows, err diff --git a/vendor/gorm.io/gorm/scan.go b/vendor/gorm.io/gorm/scan.go index d852c2c9..6dc55f62 100644 --- a/vendor/gorm.io/gorm/scan.go +++ b/vendor/gorm.io/gorm/scan.go @@ -15,7 +15,7 @@ func prepareValues(values []interface{}, db *DB, columnTypes []*sql.ColumnType, if db.Statement.Schema != nil { for idx, name := range columns { if field := db.Statement.Schema.LookUpField(name); field != nil { - values[idx] = reflect.New(reflect.PtrTo(field.FieldType)).Interface() + values[idx] = reflect.New(reflect.PointerTo(field.FieldType)).Interface() continue } values[idx] = new(interface{}) @@ -23,7 +23,7 @@ func prepareValues(values []interface{}, db *DB, columnTypes []*sql.ColumnType, } else if len(columnTypes) > 0 { for idx, columnType := range columnTypes { if columnType.ScanType() != nil { - values[idx] = reflect.New(reflect.PtrTo(columnType.ScanType())).Interface() + values[idx] = reflect.New(reflect.PointerTo(columnType.ScanType())).Interface() } else { values[idx] = new(interface{}) } diff --git a/vendor/gorm.io/gorm/schema/field.go b/vendor/gorm.io/gorm/schema/field.go index a16c98ab..d1a633ce 100644 --- a/vendor/gorm.io/gorm/schema/field.go +++ b/vendor/gorm.io/gorm/schema/field.go @@ -996,6 +996,6 @@ func (field *Field) setupNewValuePool() { } if field.NewValuePool == nil { - field.NewValuePool = poolInitializer(reflect.PtrTo(field.IndirectFieldType)) + field.NewValuePool = poolInitializer(reflect.PointerTo(field.IndirectFieldType)) } } diff --git a/vendor/gorm.io/gorm/schema/index.go b/vendor/gorm.io/gorm/schema/index.go index f4f36751..a1cdc639 100644 --- a/vendor/gorm.io/gorm/schema/index.go +++ b/vendor/gorm.io/gorm/schema/index.go @@ -23,12 +23,13 @@ type IndexOption struct { Sort string // DESC, ASC Collate string Length int - priority int + Priority int } // ParseIndexes parse schema indexes -func (schema *Schema) ParseIndexes() map[string]Index { - indexes := map[string]Index{} +func (schema *Schema) ParseIndexes() []*Index { + indexesByName := map[string]*Index{} + indexes := []*Index{} for _, field := range schema.Fields { if field.TagSettings["INDEX"] != "" || field.TagSettings["UNIQUEINDEX"] != "" { @@ -38,7 +39,12 @@ func (schema *Schema) ParseIndexes() map[string]Index { break } for _, index := range fieldIndexes { - idx := indexes[index.Name] + idx := indexesByName[index.Name] + if idx == nil { + idx = &Index{Name: index.Name} + indexesByName[index.Name] = idx + indexes = append(indexes, idx) + } idx.Name = index.Name if idx.Class == "" { idx.Class = index.Class @@ -58,10 +64,8 @@ func (schema *Schema) ParseIndexes() map[string]Index { idx.Fields = append(idx.Fields, index.Fields...) sort.Slice(idx.Fields, func(i, j int) bool { - return idx.Fields[i].priority < idx.Fields[j].priority + return idx.Fields[i].Priority < idx.Fields[j].Priority }) - - indexes[index.Name] = idx } } } @@ -78,12 +82,12 @@ func (schema *Schema) LookIndex(name string) *Index { indexes := schema.ParseIndexes() for _, index := range indexes { if index.Name == name { - return &index + return index } for _, field := range index.Fields { if field.Name == name { - return &index + return index } } } @@ -111,17 +115,14 @@ func parseFieldIndexes(field *Field) (indexes []Index, err error) { idx = len(tag) } - if idx != -1 { - name = tag[0:idx] - } - + name = tag[0:idx] if name == "" { subName := field.Name const key = "COMPOSITE" if composite, found := settings[key]; found { if len(composite) == 0 || composite == key { err = fmt.Errorf( - "The composite tag of %s.%s cannot be empty", + "the composite tag of %s.%s cannot be empty", field.Schema.Name, field.Name) return @@ -154,7 +155,7 @@ func parseFieldIndexes(field *Field) (indexes []Index, err error) { Sort: settings["SORT"], Collate: settings["COLLATE"], Length: length, - priority: priority, + Priority: priority, }}, }) } diff --git a/vendor/gorm.io/gorm/schema/relationship.go b/vendor/gorm.io/gorm/schema/relationship.go index 32676b39..def4a595 100644 --- a/vendor/gorm.io/gorm/schema/relationship.go +++ b/vendor/gorm.io/gorm/schema/relationship.go @@ -5,6 +5,7 @@ import ( "fmt" "reflect" "strings" + "sync" "github.com/jinzhu/inflection" "golang.org/x/text/cases" @@ -32,6 +33,8 @@ type Relationships struct { Relations map[string]*Relationship EmbeddedRelations map[string]*Relationships + + Mux sync.RWMutex } type Relationship struct { @@ -98,9 +101,10 @@ func (schema *Schema) parseRelation(field *Field) *Relationship { } if relation.Type == has { - // don't add relations to embedded schema, which might be shared if relation.FieldSchema != relation.Schema && relation.Polymorphic == nil && field.OwnerSchema == nil { + relation.FieldSchema.Relationships.Mux.Lock() relation.FieldSchema.Relationships.Relations["_"+relation.Schema.Name+"_"+relation.Name] = relation + relation.FieldSchema.Relationships.Mux.Unlock() } switch field.IndirectFieldType.Kind() { diff --git a/vendor/gorm.io/gorm/schema/utils.go b/vendor/gorm.io/gorm/schema/utils.go index 7fdda185..fa1c65d4 100644 --- a/vendor/gorm.io/gorm/schema/utils.go +++ b/vendor/gorm.io/gorm/schema/utils.go @@ -71,7 +71,7 @@ func appendSettingFromTag(tag reflect.StructTag, value string) reflect.StructTag // GetRelationsValues get relations's values from a reflect value func GetRelationsValues(ctx context.Context, reflectValue reflect.Value, rels []*Relationship) (reflectResults reflect.Value) { for _, rel := range rels { - reflectResults = reflect.MakeSlice(reflect.SliceOf(reflect.PtrTo(rel.FieldSchema.ModelType)), 0, 1) + reflectResults = reflect.MakeSlice(reflect.SliceOf(reflect.PointerTo(rel.FieldSchema.ModelType)), 0, 1) appendToResults := func(value reflect.Value) { if _, isZero := rel.Field.ValueOf(ctx, value); !isZero { diff --git a/vendor/modules.txt b/vendor/modules.txt index 283eaf52..9ca8e528 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -212,8 +212,8 @@ github.com/prometheus/client_model/go ## explicit; go 1.21 github.com/prometheus/common/expfmt github.com/prometheus/common/model -# github.com/prometheus/procfs v0.16.0 -## explicit; go 1.21 +# github.com/prometheus/procfs v0.16.1 +## explicit; go 1.23.0 github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util @@ -277,7 +277,7 @@ go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.37.0 +# golang.org/x/crypto v0.38.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -286,23 +286,23 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/net v0.39.0 +# golang.org/x/net v0.40.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks golang.org/x/net/proxy -# golang.org/x/oauth2 v0.29.0 +# golang.org/x/oauth2 v0.30.0 ## explicit; go 1.23.0 golang.org/x/oauth2 golang.org/x/oauth2/internal -# golang.org/x/sync v0.13.0 +# golang.org/x/sync v0.14.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup -# golang.org/x/sys v0.32.0 +# golang.org/x/sys v0.33.0 ## explicit; go 1.23.0 golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.24.0 +# golang.org/x/text v0.25.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal @@ -362,11 +362,13 @@ gorm.io/driver/mysql # gorm.io/driver/sqlite v1.5.7 ## explicit; go 1.20 gorm.io/driver/sqlite -# gorm.io/gorm v1.25.12 +# gorm.io/gorm v1.26.0 ## explicit; go 1.18 gorm.io/gorm gorm.io/gorm/callbacks gorm.io/gorm/clause +gorm.io/gorm/internal/lru +gorm.io/gorm/internal/stmt_store gorm.io/gorm/logger gorm.io/gorm/migrator gorm.io/gorm/schema diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 82a8a052..c3302f75 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -72,7 +72,7 @@ func (w *Worker) HandleJobsCompleted(jobs []params.ScaleSetJobMessage) (err erro for _, job := range jobs { if err := w.recordOrUpdateJob(job); err != nil { // recording scale set jobs are purely informational for now. - slog.ErrorContext(w.ctx, "recording job", "job", job, "error", err) + slog.ErrorContext(w.ctx, "failed to save job data", "job", job, "error", err) } if job.RunnerName == "" { @@ -106,7 +106,7 @@ func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) (err error) for _, job := range jobs { if err := w.recordOrUpdateJob(job); err != nil { // recording scale set jobs are purely informational for now. - slog.ErrorContext(w.ctx, "recording job", "job", job, "error", err) + slog.ErrorContext(w.ctx, "failed to save job data", "job", job, "error", err) } if job.RunnerName == "" { @@ -138,7 +138,7 @@ func (w *Worker) HandleJobsAvailable(jobs []params.ScaleSetJobMessage) error { for _, job := range jobs { if err := w.recordOrUpdateJob(job); err != nil { // recording scale set jobs are purely informational for now. - slog.ErrorContext(w.ctx, "recording job", "job", job, "error", err) + slog.ErrorContext(w.ctx, "failed to save job data", "job", job, "error", err) } } return nil diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index df4ab0bc..9f2087d7 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -118,11 +118,13 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage var completedJobs []params.ScaleSetJobMessage var availableJobs []params.ScaleSetJobMessage var startedJobs []params.ScaleSetJobMessage + var assignedJobs []params.ScaleSetJobMessage for _, job := range body { switch job.MessageType { case params.MessageTypeJobAssigned: slog.InfoContext(l.ctx, "new job assigned", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName) + assignedJobs = append(assignedJobs, job) case params.MessageTypeJobStarted: slog.InfoContext(l.ctx, "job started", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) startedJobs = append(startedJobs, job) @@ -137,6 +139,12 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage } } + if len(assignedJobs) > 0 { + if err := l.scaleSetHelper.HandleJobsAvailable(assignedJobs); err != nil { + slog.ErrorContext(l.ctx, "error handling available jobs", "error", err) + } + } + if len(availableJobs) > 0 { jobIDs := make([]int64, len(availableJobs)) for idx, job := range availableJobs { From f7cd743a9c5ab8325b15c3837f09d06b6ff58326 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 6 May 2025 19:37:07 +0000 Subject: [PATCH 046/179] Add more tests Signed-off-by: Gabriel Adrian Samfira --- cache/cache_test.go | 532 ++++++++++++++++++++++++++++++++++++++++++ cache/entity_cache.go | 30 ++- 2 files changed, 556 insertions(+), 6 deletions(-) diff --git a/cache/cache_test.go b/cache/cache_test.go index a2155e97..aef4e94a 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -30,10 +30,18 @@ func (c *CacheTestSuite) TearDownTest() { githubToolsCache.mux.Lock() defer githubToolsCache.mux.Unlock() githubToolsCache.entities = make(map[string]GithubEntityTools) + credentialsCache.cache = make(map[uint]params.GithubCredentials) + instanceCache.cache = make(map[string]params.Instance) + entityCache = &EntityCache{ + entities: make(map[string]EntityItem), + } } func (c *CacheTestSuite) TestCacheIsInitialized() { c.Require().NotNil(githubToolsCache) + c.Require().NotNil(credentialsCache) + c.Require().NotNil(instanceCache) + c.Require().NotNil(entityCache) } func (c *CacheTestSuite) TestSetCacheWorks() { @@ -81,6 +89,530 @@ func (c *CacheTestSuite) TestGetInexistentCache() { c.Require().Nil(cachedTools) } +func (c *CacheTestSuite) TestSetGithubCredentials() { + credentials := params.GithubCredentials{ + ID: 1, + } + SetGithubCredentials(credentials) + cachedCreds, ok := GetGithubCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) +} + +func (c *CacheTestSuite) TestGetGithubCredentials() { + credentials := params.GithubCredentials{ + ID: 1, + } + SetGithubCredentials(credentials) + cachedCreds, ok := GetGithubCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) + + nonExisting, ok := GetGithubCredentials(2) + c.Require().False(ok) + c.Require().Equal(params.GithubCredentials{}, nonExisting) +} + +func (c *CacheTestSuite) TestDeleteGithubCredentials() { + credentials := params.GithubCredentials{ + ID: 1, + } + SetGithubCredentials(credentials) + cachedCreds, ok := GetGithubCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) + + DeleteGithubCredentials(1) + cachedCreds, ok = GetGithubCredentials(1) + c.Require().False(ok) + c.Require().Equal(params.GithubCredentials{}, cachedCreds) +} + +func (c *CacheTestSuite) TestGetAllGithubCredentials() { + credentials1 := params.GithubCredentials{ + ID: 1, + } + credentials2 := params.GithubCredentials{ + ID: 2, + } + SetGithubCredentials(credentials1) + SetGithubCredentials(credentials2) + + cachedCreds := GetAllGithubCredentials() + c.Require().Len(cachedCreds, 2) + c.Require().Contains(cachedCreds, credentials1) + c.Require().Contains(cachedCreds, credentials2) +} + +func (c *CacheTestSuite) TestSetInstanceCache() { + instance := params.Instance{ + Name: "test-instance", + } + SetInstanceCache(instance) + cachedInstance, ok := GetInstanceCache("test-instance") + c.Require().True(ok) + c.Require().Equal(instance.Name, cachedInstance.Name) +} + +func (c *CacheTestSuite) TestGetInstanceCache() { + instance := params.Instance{ + Name: "test-instance", + } + SetInstanceCache(instance) + cachedInstance, ok := GetInstanceCache("test-instance") + c.Require().True(ok) + c.Require().Equal(instance.Name, cachedInstance.Name) + + nonExisting, ok := GetInstanceCache("non-existing") + c.Require().False(ok) + c.Require().Equal(params.Instance{}, nonExisting) +} + +func (c *CacheTestSuite) TestDeleteInstanceCache() { + instance := params.Instance{ + Name: "test-instance", + } + SetInstanceCache(instance) + cachedInstance, ok := GetInstanceCache("test-instance") + c.Require().True(ok) + c.Require().Equal(instance.Name, cachedInstance.Name) + + DeleteInstanceCache("test-instance") + cachedInstance, ok = GetInstanceCache("test-instance") + c.Require().False(ok) + c.Require().Equal(params.Instance{}, cachedInstance) +} + +func (c *CacheTestSuite) TestGetAllInstances() { + instance1 := params.Instance{ + Name: "test-instance-1", + } + instance2 := params.Instance{ + Name: "test-instance-2", + } + SetInstanceCache(instance1) + SetInstanceCache(instance2) + + cachedInstances := GetAllInstancesCache() + c.Require().Len(cachedInstances, 2) + c.Require().Contains(cachedInstances, instance1) + c.Require().Contains(cachedInstances, instance2) +} + +func (c *CacheTestSuite) TestGetInstancesForPool() { + instance1 := params.Instance{ + Name: "test-instance-1", + PoolID: "pool-1", + } + instance2 := params.Instance{ + Name: "test-instance-2", + PoolID: "pool-1", + } + instance3 := params.Instance{ + Name: "test-instance-3", + PoolID: "pool-2", + } + SetInstanceCache(instance1) + SetInstanceCache(instance2) + SetInstanceCache(instance3) + + cachedInstances := GetInstancesForPool("pool-1") + c.Require().Len(cachedInstances, 2) + c.Require().Contains(cachedInstances, instance1) + c.Require().Contains(cachedInstances, instance2) + + cachedInstances = GetInstancesForPool("pool-2") + c.Require().Len(cachedInstances, 1) + c.Require().Contains(cachedInstances, instance3) +} + +func (c *CacheTestSuite) TestGetInstancesForScaleSet() { + instance1 := params.Instance{ + Name: "test-instance-1", + ScaleSetID: 1, + } + instance2 := params.Instance{ + Name: "test-instance-2", + ScaleSetID: 1, + } + instance3 := params.Instance{ + Name: "test-instance-3", + ScaleSetID: 2, + } + SetInstanceCache(instance1) + SetInstanceCache(instance2) + SetInstanceCache(instance3) + + cachedInstances := GetInstancesForScaleSet(1) + c.Require().Len(cachedInstances, 2) + c.Require().Contains(cachedInstances, instance1) + c.Require().Contains(cachedInstances, instance2) + + cachedInstances = GetInstancesForScaleSet(2) + c.Require().Len(cachedInstances, 1) + c.Require().Contains(cachedInstances, instance3) +} + +func (c *CacheTestSuite) TestSetGetEntityCache() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + SetEntity(entity) + cachedEntity, ok := GetEntity("test-entity") + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + + entity.Credentials.Description = "test description" + SetEntity(entity) + cachedEntity, ok = GetEntity("test-entity") + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + c.Require().Equal(entity.Credentials.Description, cachedEntity.Credentials.Description) +} + +func (c *CacheTestSuite) TestReplaceEntityPools() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: params.GithubCredentials{ + ID: 1, + }, + } + pool1 := params.Pool{ + ID: "pool-1", + } + pool2 := params.Pool{ + ID: "pool-2", + } + + credentials := params.GithubCredentials{ + ID: 1, + Name: "test", + } + SetGithubCredentials(credentials) + + SetEntity(entity) + ReplaceEntityPools(entity.ID, []params.Pool{pool1, pool2}) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + c.Require().Equal("test", cachedEntity.Credentials.Name) + + pools := GetEntityPools(entity.ID) + c.Require().Len(pools, 2) + c.Require().Contains(pools, pool1) + c.Require().Contains(pools, pool2) +} + +func (c *CacheTestSuite) TestReplaceEntityScaleSets() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + scaleSet1 := params.ScaleSet{ + ID: 1, + } + scaleSet2 := params.ScaleSet{ + ID: 2, + } + + SetEntity(entity) + ReplaceEntityScaleSets(entity.ID, map[uint]params.ScaleSet{1: scaleSet1, 2: scaleSet2}) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + + scaleSets := GetEntityScaleSets(entity.ID) + c.Require().Len(scaleSets, 2) + c.Require().Contains(scaleSets, scaleSet1) + c.Require().Contains(scaleSets, scaleSet2) +} + +func (c *CacheTestSuite) TestDeleteEntity() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + SetEntity(entity) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + + DeleteEntity(entity.ID) + cachedEntity, ok = GetEntity(entity.ID) + c.Require().False(ok) + c.Require().Equal(params.GithubEntity{}, cachedEntity) +} + +func (c *CacheTestSuite) TestSetEntityPool() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + pool := params.Pool{ + ID: "pool-1", + } + + SetEntity(entity) + + SetEntityPool(entity.ID, pool) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + pools := GetEntityPools(entity.ID) + c.Require().Len(pools, 1) + c.Require().Contains(pools, pool) + c.Require().False(pools[0].Enabled) + + pool.Enabled = true + SetEntityPool(entity.ID, pool) + cachedEntity, ok = GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + pools = GetEntityPools(entity.ID) + c.Require().Len(pools, 1) + c.Require().Contains(pools, pool) + c.Require().True(pools[0].Enabled) +} + +func (c *CacheTestSuite) TestSetEntityScaleSet() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + scaleSet := params.ScaleSet{ + ID: 1, + } + + SetEntity(entity) + SetEntityScaleSet(entity.ID, scaleSet) + + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + scaleSets := GetEntityScaleSets(entity.ID) + c.Require().Len(scaleSets, 1) + c.Require().Contains(scaleSets, scaleSet) + c.Require().False(scaleSets[0].Enabled) + + scaleSet.Enabled = true + SetEntityScaleSet(entity.ID, scaleSet) + scaleSets = GetEntityScaleSets(entity.ID) + c.Require().Len(scaleSets, 1) + c.Require().Contains(scaleSets, scaleSet) + c.Require().True(scaleSets[0].Enabled) +} + +func (c *CacheTestSuite) TestDeleteEntityPool() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + pool := params.Pool{ + ID: "pool-1", + } + + SetEntity(entity) + SetEntityPool(entity.ID, pool) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + + DeleteEntityPool(entity.ID, pool.ID) + pools := GetEntityPools(entity.ID) + c.Require().Len(pools, 0) + c.Require().NotContains(pools, pool) +} + +func (c *CacheTestSuite) TestDeleteEntityScaleSet() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + scaleSet := params.ScaleSet{ + ID: 1, + } + + SetEntity(entity) + SetEntityScaleSet(entity.ID, scaleSet) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + + DeleteEntityScaleSet(entity.ID, scaleSet.ID) + scaleSets := GetEntityScaleSets(entity.ID) + c.Require().Len(scaleSets, 0) + c.Require().NotContains(scaleSets, scaleSet) +} + +func (c *CacheTestSuite) TestFindPoolsMatchingAllTags() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + pool1 := params.Pool{ + ID: "pool-1", + Tags: []params.Tag{ + { + Name: "tag1", + }, + { + Name: "tag2", + }, + }, + } + pool2 := params.Pool{ + ID: "pool-2", + Tags: []params.Tag{ + { + Name: "tag1", + }, + }, + } + pool3 := params.Pool{ + ID: "pool-3", + Tags: []params.Tag{ + { + Name: "tag3", + }, + }, + } + + SetEntity(entity) + SetEntityPool(entity.ID, pool1) + SetEntityPool(entity.ID, pool2) + SetEntityPool(entity.ID, pool3) + + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + pools := FindPoolsMatchingAllTags(entity.ID, []string{"tag1", "tag2"}) + c.Require().Len(pools, 1) + c.Require().Contains(pools, pool1) + pools = FindPoolsMatchingAllTags(entity.ID, []string{"tag1"}) + c.Require().Len(pools, 2) + c.Require().Contains(pools, pool1) + c.Require().Contains(pools, pool2) + pools = FindPoolsMatchingAllTags(entity.ID, []string{"tag3"}) + c.Require().Len(pools, 1) + c.Require().Contains(pools, pool3) + pools = FindPoolsMatchingAllTags(entity.ID, []string{"tag4"}) + c.Require().Len(pools, 0) +} + +func (c *CacheTestSuite) TestGetEntityPools() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + pool1 := params.Pool{ + ID: "pool-1", + Tags: []params.Tag{ + { + Name: "tag1", + }, + { + Name: "tag2", + }, + }, + } + pool2 := params.Pool{ + ID: "pool-2", + Tags: []params.Tag{ + { + Name: "tag1", + }, + { + Name: "tag3", + }, + }, + } + + SetEntity(entity) + SetEntityPool(entity.ID, pool1) + SetEntityPool(entity.ID, pool2) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + pools := GetEntityPools(entity.ID) + c.Require().Len(pools, 2) + c.Require().Contains(pools, pool1) + c.Require().Contains(pools, pool2) +} + +func (c *CacheTestSuite) TestGetEntityScaleSet() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + scaleSet := params.ScaleSet{ + ID: 1, + } + + SetEntity(entity) + SetEntityScaleSet(entity.ID, scaleSet) + + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + scaleSets, ok := GetEntityScaleSet(entity.ID, scaleSet.ID) + c.Require().True(ok) + c.Require().Equal(scaleSet.ID, scaleSets.ID) +} + +func (c *CacheTestSuite) TestGetEntityPool() { + entity := params.GithubEntity{ + ID: "test-entity", + EntityType: params.GithubEntityTypeOrganization, + Name: "test", + Owner: "test", + } + + pool := params.Pool{ + ID: "pool-1", + Tags: []params.Tag{ + { + Name: "tag1", + }, + { + Name: "tag2", + }, + }, + } + + SetEntity(entity) + SetEntityPool(entity.ID, pool) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + poolFromCache, ok := GetEntityPool(entity.ID, pool.ID) + c.Require().True(ok) + c.Require().Equal(pool.ID, poolFromCache.ID) +} + func TestCacheTestSuite(t *testing.T) { t.Parallel() suite.Run(t, new(CacheTestSuite)) diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 08e218df..cd8d80eb 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -28,11 +28,11 @@ type EntityCache struct { entities map[string]EntityItem } -func (e *EntityCache) GetEntity(entity params.GithubEntity) (EntityItem, bool) { +func (e *EntityCache) GetEntity(entityID string) (params.GithubEntity, bool) { e.mux.Lock() defer e.mux.Unlock() - if cache, ok := e.entities[entity.ID]; ok { + if cache, ok := e.entities[entityID]; ok { // Updating specific credential details will not update entity cache which // uses those credentials. // Entity credentials in the cache are only updated if you swap the creds @@ -41,9 +41,9 @@ func (e *EntityCache) GetEntity(entity params.GithubEntity) (EntityItem, bool) { if ok { cache.Entity.Credentials = creds } - return cache, true + return cache.Entity, true } - return EntityItem{}, false + return params.GithubEntity{}, false } func (e *EntityCache) SetEntity(entity params.GithubEntity) { @@ -193,8 +193,22 @@ func (e *EntityCache) GetEntityPools(entityID string) []params.Pool { return nil } -func GetEntity(entity params.GithubEntity) (EntityItem, bool) { - return entityCache.GetEntity(entity) +func (e *EntityCache) GetEntityScaleSets(entityID string) []params.ScaleSet { + e.mux.Lock() + defer e.mux.Unlock() + + if cache, ok := e.entities[entityID]; ok { + var scaleSets []params.ScaleSet + for _, scaleSet := range cache.ScaleSets { + scaleSets = append(scaleSets, scaleSet) + } + return scaleSets + } + return nil +} + +func GetEntity(entityID string) (params.GithubEntity, bool) { + return entityCache.GetEntity(entityID) } func SetEntity(entity params.GithubEntity) { @@ -244,3 +258,7 @@ func FindPoolsMatchingAllTags(entityID string, tags []string) []params.Pool { func GetEntityPools(entityID string) []params.Pool { return entityCache.GetEntityPools(entityID) } + +func GetEntityScaleSets(entityID string) []params.ScaleSet { + return entityCache.GetEntityScaleSets(entityID) +} From 2f2ff62411345da1ace70d0e867a5e91dd96c3cc Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 6 May 2025 20:59:41 +0000 Subject: [PATCH 047/179] Deduplicate code Signed-off-by: Gabriel Adrian Samfira --- runner/pool/pool.go | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 73a0b0fa..3e96d1e9 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -162,8 +162,11 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return nil } - var jobParams params.Job - var err error + jobParams, err := r.paramsWorkflowJobToParamsJob(job) + if err != nil { + return errors.Wrap(err, "converting job to params") + } + var triggeredBy int64 defer func() { if jobParams.ID == 0 { @@ -213,16 +216,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { case "queued": // Record the job in the database. Queued jobs will be picked up by the consumeQueuedJobs() method // when reconciling. - jobParams, err = r.paramsWorkflowJobToParamsJob(job) - if err != nil { - return errors.Wrap(err, "converting job to params") - } case "completed": - jobParams, err = r.paramsWorkflowJobToParamsJob(job) - if err != nil { - return errors.Wrap(err, "converting job to params") - } - // If job was not assigned to a runner, we can ignore it. if jobParams.RunnerName == "" { slog.InfoContext( @@ -263,11 +257,6 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return errors.Wrap(err, "updating runner") } case "in_progress": - jobParams, err = r.paramsWorkflowJobToParamsJob(job) - if err != nil { - return errors.Wrap(err, "converting job to params") - } - fromCache, ok := cache.GetInstanceCache(jobParams.RunnerName) if !ok { slog.DebugContext(r.ctx, "instance not found in cache", "runner_name", jobParams.RunnerName) From d0c9462a5d8023a0979a832044377612d249ab91 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 7 May 2025 08:01:36 +0000 Subject: [PATCH 048/179] Add cache worker Add dedicated worker to maintain cache. Signed-off-by: Gabriel Adrian Samfira --- cache/cache_test.go | 2 +- cache/entity_cache.go | 17 +- cmd/garm/main.go | 8 +- runner/pool/pool.go | 7 - runner/pool/util.go | 2 - runner/pool/watcher.go | 20 -- workers/cache/cache.go | 338 +++++++++++++++++++++++++ workers/credentials/credentials.go | 133 ---------- workers/entity/controller.go | 10 - workers/entity/controller_watcher.go | 2 - workers/provider/provider.go | 15 -- workers/scaleset/controller.go | 2 - workers/scaleset/controller_watcher.go | 7 - 13 files changed, 355 insertions(+), 208 deletions(-) create mode 100644 workers/cache/cache.go delete mode 100644 workers/credentials/credentials.go diff --git a/cache/cache_test.go b/cache/cache_test.go index aef4e94a..43b15953 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -324,7 +324,7 @@ func (c *CacheTestSuite) TestReplaceEntityScaleSets() { } SetEntity(entity) - ReplaceEntityScaleSets(entity.ID, map[uint]params.ScaleSet{1: scaleSet1, 2: scaleSet2}) + ReplaceEntityScaleSets(entity.ID, []params.ScaleSet{scaleSet1, scaleSet2}) cachedEntity, ok := GetEntity(entity.ID) c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) diff --git a/cache/entity_cache.go b/cache/entity_cache.go index cd8d80eb..5a71b184 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -82,14 +82,21 @@ func (e *EntityCache) ReplaceEntityPools(entityID string, pools []params.Pool) { e.entities[entityID] = cache } -func (e *EntityCache) ReplaceEntityScaleSets(entityID string, scaleSets map[uint]params.ScaleSet) { +func (e *EntityCache) ReplaceEntityScaleSets(entityID string, scaleSets []params.ScaleSet) { e.mux.Lock() defer e.mux.Unlock() - if cache, ok := e.entities[entityID]; ok { - cache.ScaleSets = scaleSets - e.entities[entityID] = cache + cache, ok := e.entities[entityID] + if !ok { + return } + + scaleSetsByID := map[uint]params.ScaleSet{} + for _, scaleSet := range scaleSets { + scaleSetsByID[scaleSet.ID] = scaleSet + } + cache.ScaleSets = scaleSetsByID + e.entities[entityID] = cache } func (e *EntityCache) DeleteEntity(entityID string) { @@ -219,7 +226,7 @@ func ReplaceEntityPools(entityID string, pools []params.Pool) { entityCache.ReplaceEntityPools(entityID, pools) } -func ReplaceEntityScaleSets(entityID string, scaleSets map[uint]params.ScaleSet) { +func ReplaceEntityScaleSets(entityID string, scaleSets []params.ScaleSet) { entityCache.ReplaceEntityScaleSets(entityID, scaleSets) } diff --git a/cmd/garm/main.go b/cmd/garm/main.go index 958ea001..20f34eba 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -51,7 +51,7 @@ import ( garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/appdefaults" "github.com/cloudbase/garm/websocket" - "github.com/cloudbase/garm/workers/credentials" + "github.com/cloudbase/garm/workers/cache" "github.com/cloudbase/garm/workers/entity" "github.com/cloudbase/garm/workers/provider" ) @@ -238,11 +238,11 @@ func main() { log.Fatal(err) } - credsWorker, err := credentials.NewWorker(ctx, db) + cacheWorker := cache.NewWorker(ctx, db) if err != nil { log.Fatalf("failed to create credentials worker: %+v", err) } - if err := credsWorker.Start(); err != nil { + if err := cacheWorker.Start(); err != nil { log.Fatalf("failed to start credentials worker: %+v", err) } @@ -370,7 +370,7 @@ func main() { <-ctx.Done() - if err := credsWorker.Stop(); err != nil { + if err := cacheWorker.Stop(); err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop credentials worker") } diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 3e96d1e9..3cb8bff3 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -1595,13 +1595,6 @@ func (r *basePoolManager) cleanupOrphanedRunners(runners []*github.Runner) error } func (r *basePoolManager) Start() error { - // load pools in cache - pools, err := r.store.ListEntityPools(r.ctx, r.entity) - if err != nil { - return fmt.Errorf("failed to list pools: %w", err) - } - cache.ReplaceEntityPools(r.entity.ID, pools) - initialToolUpdate := make(chan struct{}, 1) go func() { slog.Info("running initial tool update") diff --git a/runner/pool/util.go b/runner/pool/util.go index d7b2c416..9b7b7f14 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -132,7 +132,5 @@ func composeWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFun watcher.WithEntityFilter(entity), // Watch for changes to the github credentials watcher.WithGithubCredentialsFilter(entity.Credentials), - // Watch for entity pool operations - watcher.WithEntityPoolFilter(entity), ) } diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 61a1117c..7f05d93b 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -6,7 +6,6 @@ import ( "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/cache" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" runnerCommon "github.com/cloudbase/garm/runner/common" @@ -122,23 +121,6 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.GithubCrede r.mux.Unlock() } -func (r *basePoolManager) handleEntityPoolEvent(event common.ChangePayload) { - pool, ok := event.Payload.(params.Pool) - if !ok { - slog.ErrorContext(r.ctx, "failed to cast payload to pool") - return - } - - switch event.Operation { - case common.CreateOperation, common.UpdateOperation: - slog.DebugContext(r.ctx, "updating pool in cache", "pool_id", pool.ID) - cache.SetEntityPool(r.entity.ID, pool) - case common.DeleteOperation: - slog.DebugContext(r.ctx, "deleting pool from cache", "pool_id", pool.ID) - cache.DeleteEntityPool(r.entity.ID, pool.ID) - } -} - func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { dbEntityType := common.DatabaseEntityType(r.entity.EntityType) switch event.EntityType { @@ -168,8 +150,6 @@ func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { return } r.handleEntityUpdate(entityInfo, event.Operation) - case common.PoolEntityType: - r.handleEntityPoolEvent(event) } } diff --git a/workers/cache/cache.go b/workers/cache/cache.go new file mode 100644 index 00000000..3973e7c7 --- /dev/null +++ b/workers/cache/cache.go @@ -0,0 +1,338 @@ +package cache + +import ( + "context" + "fmt" + "log/slog" + "sync" + + "github.com/cloudbase/garm/cache" + "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" + garmUtil "github.com/cloudbase/garm/util" +) + +func NewWorker(ctx context.Context, store common.Store) *Worker { + consumerID := "cache" + ctx = garmUtil.WithSlogContext( + ctx, + slog.Any("worker", consumerID)) + + return &Worker{ + ctx: ctx, + store: store, + consumerID: consumerID, + quit: make(chan struct{}), + } +} + +type Worker struct { + ctx context.Context + consumerID string + + consumer common.Consumer + store common.Store + + mux sync.Mutex + running bool + quit chan struct{} +} + +func (w *Worker) setCacheForEntity(entityGetter params.EntityGetter, pools []params.Pool, scaleSets []params.ScaleSet) error { + entity, err := entityGetter.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + cache.SetEntity(entity) + var repoPools []params.Pool + var repoScaleSets []params.ScaleSet + + for _, pool := range pools { + if pool.RepoID == entity.ID { + repoPools = append(repoPools, pool) + } + } + + for _, scaleSet := range scaleSets { + if scaleSet.RepoID == entity.ID { + repoScaleSets = append(repoScaleSets, scaleSet) + } + } + + cache.ReplaceEntityPools(entity.ID, repoPools) + cache.ReplaceEntityScaleSets(entity.ID, repoScaleSets) + + return nil +} + +func (w *Worker) loadAllEntities() error { + pools, err := w.store.ListAllPools(w.ctx) + if err != nil { + return fmt.Errorf("listing pools: %w", err) + } + + scaleSets, err := w.store.ListAllScaleSets(w.ctx) + if err != nil { + return fmt.Errorf("listing scale sets: %w", err) + } + + repos, err := w.store.ListRepositories(w.ctx) + if err != nil { + return fmt.Errorf("listing repositories: %w", err) + } + + orgs, err := w.store.ListOrganizations(w.ctx) + if err != nil { + return fmt.Errorf("listing organizations: %w", err) + } + + enterprises, err := w.store.ListEnterprises(w.ctx) + if err != nil { + return fmt.Errorf("listing enterprises: %w", err) + } + + for _, repo := range repos { + if err := w.setCacheForEntity(repo, pools, scaleSets); err != nil { + return fmt.Errorf("setting cache for repo: %w", err) + } + } + + for _, org := range orgs { + if err := w.setCacheForEntity(org, pools, scaleSets); err != nil { + return fmt.Errorf("setting cache for org: %w", err) + } + } + + for _, enterprise := range enterprises { + if err := w.setCacheForEntity(enterprise, pools, scaleSets); err != nil { + return fmt.Errorf("setting cache for enterprise: %w", err) + } + } + + return nil +} + +func (w *Worker) loadAllInstances() error { + instances, err := w.store.ListAllInstances(w.ctx) + if err != nil { + return fmt.Errorf("listing instances: %w", err) + } + + for _, instance := range instances { + cache.SetInstanceCache(instance) + } + return nil +} + +func (w *Worker) loadAllCredentials() error { + creds, err := w.store.ListGithubCredentials(w.ctx) + if err != nil { + return fmt.Errorf("listing github credentials: %w", err) + } + + for _, cred := range creds { + cache.SetGithubCredentials(cred) + } + return nil +} + +func (w *Worker) Start() error { + slog.DebugContext(w.ctx, "starting cache worker") + w.mux.Lock() + defer w.mux.Unlock() + + if w.running { + return nil + } + + if err := w.loadAllEntities(); err != nil { + return fmt.Errorf("loading all entities: %w", err) + } + + if err := w.loadAllInstances(); err != nil { + return fmt.Errorf("loading all instances: %w", err) + } + + if err := w.loadAllCredentials(); err != nil { + return fmt.Errorf("loading all credentials: %w", err) + } + + consumer, err := watcher.RegisterConsumer( + w.ctx, w.consumerID, + watcher.WithAll()) + if err != nil { + return fmt.Errorf("registering consumer: %w", err) + } + w.consumer = consumer + w.running = true + w.quit = make(chan struct{}) + + go w.loop() + return nil +} + +func (w *Worker) Stop() error { + slog.DebugContext(w.ctx, "stopping cache worker") + w.mux.Lock() + defer w.mux.Unlock() + + if !w.running { + return nil + } + + w.consumer.Close() + w.running = false + close(w.quit) + return nil +} + +func (w *Worker) handleEntityEvent(entityGetter params.EntityGetter, op common.OperationType) { + entity, err := entityGetter.GetEntity() + if err != nil { + slog.DebugContext(w.ctx, "getting entity from event", "error", err) + return + } + switch op { + case common.CreateOperation, common.UpdateOperation: + cache.SetEntity(entity) + case common.DeleteOperation: + cache.DeleteEntity(entity.ID) + } +} + +func (w *Worker) handleRepositoryEvent(event common.ChangePayload) { + repo, ok := event.Payload.(params.Repository) + if !ok { + slog.DebugContext(w.ctx, "invalid payload type for repository event", "payload", event.Payload) + return + } + w.handleEntityEvent(repo, event.Operation) +} + +func (w *Worker) handleOrgEvent(event common.ChangePayload) { + org, ok := event.Payload.(params.Organization) + if !ok { + slog.DebugContext(w.ctx, "invalid payload type for org event", "payload", event.Payload) + return + } + w.handleEntityEvent(org, event.Operation) +} + +func (w *Worker) handleEnterpriseEvent(event common.ChangePayload) { + enterprise, ok := event.Payload.(params.Enterprise) + if !ok { + slog.DebugContext(w.ctx, "invalid payload type for enterprise event", "payload", event.Payload) + return + } + w.handleEntityEvent(enterprise, event.Operation) +} + +func (w *Worker) handlePoolEvent(event common.ChangePayload) { + pool, ok := event.Payload.(params.Pool) + if !ok { + slog.DebugContext(w.ctx, "invalid payload type for pool event", "payload", event.Payload) + return + } + entity, err := pool.GetEntity() + if err != nil { + slog.DebugContext(w.ctx, "getting entity from pool", "error", err) + return + } + + switch event.Operation { + case common.CreateOperation, common.UpdateOperation: + cache.SetEntityPool(entity.ID, pool) + case common.DeleteOperation: + cache.DeleteEntityPool(entity.ID, pool.ID) + } +} + +func (w *Worker) handleScaleSetEvent(event common.ChangePayload) { + scaleSet, ok := event.Payload.(params.ScaleSet) + if !ok { + slog.DebugContext(w.ctx, "invalid payload type for pool event", "payload", event.Payload) + return + } + entity, err := scaleSet.GetEntity() + if err != nil { + slog.DebugContext(w.ctx, "getting entity from pool", "error", err) + return + } + + switch event.Operation { + case common.CreateOperation, common.UpdateOperation: + cache.SetEntityScaleSet(entity.ID, scaleSet) + case common.DeleteOperation: + cache.DeleteEntityScaleSet(entity.ID, scaleSet.ID) + } +} + +func (w *Worker) handleInstanceEvent(event common.ChangePayload) { + instance, ok := event.Payload.(params.Instance) + if !ok { + slog.DebugContext(w.ctx, "invalid payload type for instance event", "payload", event.Payload) + return + } + switch event.Operation { + case common.CreateOperation, common.UpdateOperation: + cache.SetInstanceCache(instance) + case common.DeleteOperation: + cache.DeleteInstanceCache(instance.Name) + } +} + +func (w *Worker) handleCredentialsEvent(event common.ChangePayload) { + credentials, ok := event.Payload.(params.GithubCredentials) + if !ok { + slog.DebugContext(w.ctx, "invalid payload type for credentials event", "payload", event.Payload) + return + } + switch event.Operation { + case common.CreateOperation, common.UpdateOperation: + cache.SetGithubCredentials(credentials) + case common.DeleteOperation: + cache.DeleteGithubCredentials(credentials.ID) + } +} + +func (w *Worker) handleEvent(event common.ChangePayload) { + slog.DebugContext(w.ctx, "handling event", "event", event) + switch event.EntityType { + case common.PoolEntityType: + w.handlePoolEvent(event) + case common.ScaleSetEntityType: + w.handleScaleSetEvent(event) + case common.InstanceEntityType: + w.handleInstanceEvent(event) + case common.RepositoryEntityType: + w.handleRepositoryEvent(event) + case common.OrganizationEntityType: + w.handleOrgEvent(event) + case common.EnterpriseEntityType: + w.handleEnterpriseEvent(event) + case common.GithubCredentialsEntityType: + w.handleCredentialsEvent(event) + default: + slog.DebugContext(w.ctx, "unknown entity type", "entity_type", event.EntityType) + } +} + +func (w *Worker) loop() { + defer w.Stop() + for { + select { + case <-w.quit: + return + case event, ok := <-w.consumer.Watch(): + if !ok { + slog.InfoContext(w.ctx, "consumer channel closed") + return + } + w.handleEvent(event) + case <-w.ctx.Done(): + slog.DebugContext(w.ctx, "context done") + return + } + } +} diff --git a/workers/credentials/credentials.go b/workers/credentials/credentials.go deleted file mode 100644 index 7c590401..00000000 --- a/workers/credentials/credentials.go +++ /dev/null @@ -1,133 +0,0 @@ -package credentials - -import ( - "context" - "fmt" - "log/slog" - "sync" - - "github.com/cloudbase/garm/cache" - dbCommon "github.com/cloudbase/garm/database/common" - "github.com/cloudbase/garm/database/watcher" - "github.com/cloudbase/garm/params" - garmUtil "github.com/cloudbase/garm/util" -) - -func NewWorker(ctx context.Context, store dbCommon.Store) (*Worker, error) { - consumerID := "credentials-worker" - - ctx = garmUtil.WithSlogContext( - ctx, - slog.Any("worker", consumerID)) - - return &Worker{ - ctx: ctx, - consumerID: consumerID, - store: store, - running: false, - quit: make(chan struct{}), - credentials: make(map[uint]params.GithubCredentials), - }, nil -} - -// Worker is responsible for maintaining the credentials cache. -type Worker struct { - consumerID string - ctx context.Context - - consumer dbCommon.Consumer - store dbCommon.Store - - credentials map[uint]params.GithubCredentials - - running bool - quit chan struct{} - - mux sync.Mutex -} - -func (w *Worker) loadAllCredentials() error { - creds, err := w.store.ListGithubCredentials(w.ctx) - if err != nil { - return err - } - - for _, cred := range creds { - w.credentials[cred.ID] = cred - cache.SetGithubCredentials(cred) - } - - return nil -} - -func (w *Worker) Start() error { - w.mux.Lock() - defer w.mux.Unlock() - - if w.running { - return nil - } - slog.DebugContext(w.ctx, "starting credentials worker") - if err := w.loadAllCredentials(); err != nil { - return fmt.Errorf("loading credentials: %w", err) - } - - consumer, err := watcher.RegisterConsumer( - w.ctx, w.consumerID, - watcher.WithEntityTypeFilter(dbCommon.GithubCredentialsEntityType), - ) - if err != nil { - return fmt.Errorf("failed to create consumer for entity controller: %w", err) - } - w.consumer = consumer - - w.running = true - go w.loop() - return nil -} - -func (w *Worker) Stop() error { - w.mux.Lock() - defer w.mux.Unlock() - - if !w.running { - return nil - } - - close(w.quit) - w.running = false - - return nil -} - -func (w *Worker) loop() { - defer w.Stop() - - for { - select { - case <-w.quit: - return - case event, ok := <-w.consumer.Watch(): - if !ok { - slog.ErrorContext(w.ctx, "consumer channel closed") - return - } - creds, ok := event.Payload.(params.GithubCredentials) - if !ok { - slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) - continue - } - w.mux.Lock() - switch event.Operation { - case dbCommon.DeleteOperation: - slog.DebugContext(w.ctx, "got delete operation") - delete(w.credentials, creds.ID) - cache.DeleteGithubCredentials(creds.ID) - default: - w.credentials[creds.ID] = creds - cache.SetGithubCredentials(creds) - } - w.mux.Unlock() - } - } -} diff --git a/workers/entity/controller.go b/workers/entity/controller.go index 066bdfe3..07fb38ce 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -7,7 +7,6 @@ import ( "sync" "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/runner/common" @@ -67,9 +66,6 @@ func (c *Controller) loadAllRepositories() error { return fmt.Errorf("starting worker: %w", err) } c.Entities[entity.ID] = worker - // take advantage of the fact that we're loading all entities - // and set the cache. - cache.SetEntity(entity) } return nil } @@ -94,9 +90,6 @@ func (c *Controller) loadAllOrganizations() error { return fmt.Errorf("starting worker: %w", err) } c.Entities[entity.ID] = worker - // take advantage of the fact that we're loading all entities - // and set the cache. - cache.SetEntity(entity) } return nil } @@ -121,9 +114,6 @@ func (c *Controller) loadAllEnterprises() error { return fmt.Errorf("starting worker: %w", err) } c.Entities[entity.ID] = worker - // take advantage of the fact that we're loading all entities - // and set the cache. - cache.SetEntity(entity) } return nil } diff --git a/workers/entity/controller_watcher.go b/workers/entity/controller_watcher.go index dcd6ee9a..ace63702 100644 --- a/workers/entity/controller_watcher.go +++ b/workers/entity/controller_watcher.go @@ -3,7 +3,6 @@ package entity import ( "log/slog" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) @@ -96,5 +95,4 @@ func (c *Controller) handleWatcherDeleteOperation(entityGetter params.EntityGett return } delete(c.Entities, entity.ID) - cache.DeleteEntity(entity.ID) } diff --git a/workers/provider/provider.go b/workers/provider/provider.go index 05a78c7e..b1ab1220 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -8,7 +8,6 @@ import ( commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/auth" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" @@ -81,7 +80,6 @@ func (p *Provider) loadAllRunners() error { } for _, runner := range runners { - cache.SetInstanceCache(runner) // Skip non scale set instances for now. This condition needs to be // removed once we replace the current pool manager. if runner.ScaleSetID == 0 { @@ -239,15 +237,6 @@ func (p *Provider) handleInstanceAdded(instance params.Instance) error { return nil } -func (p *Provider) updateInstanceCache(instance params.Instance, op dbCommon.OperationType) { - if op == dbCommon.DeleteOperation { - slog.DebugContext(p.ctx, "deleting instance from cache", "instance_name", instance.Name) - cache.DeleteInstanceCache(instance.Name) - return - } - cache.SetInstanceCache(instance) -} - func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { p.mux.Lock() defer p.mux.Unlock() @@ -257,7 +246,6 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { slog.ErrorContext(p.ctx, "invalid payload type", "payload_type", fmt.Sprintf("%T", event.Payload)) return } - p.updateInstanceCache(instance, event.Operation) if instance.ScaleSetID == 0 { slog.DebugContext(p.ctx, "skipping instance event for non scale set instance") @@ -267,14 +255,12 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { slog.DebugContext(p.ctx, "handling instance event", "instance_name", instance.Name) switch event.Operation { case dbCommon.CreateOperation: - cache.SetInstanceCache(instance) slog.DebugContext(p.ctx, "got create operation") if err := p.handleInstanceAdded(instance); err != nil { slog.ErrorContext(p.ctx, "failed to handle instance added", "error", err) return } case dbCommon.UpdateOperation: - cache.SetInstanceCache(instance) slog.DebugContext(p.ctx, "got update operation") existingInstance, ok := p.runners[instance.Name] if !ok { @@ -300,7 +286,6 @@ func (p *Provider) handleInstanceEvent(event dbCommon.ChangePayload) { } } delete(p.runners, instance.Name) - cache.DeleteInstanceCache(instance.Name) default: slog.ErrorContext(p.ctx, "invalid operation type", "operation_type", event.Operation) return diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index e1758550..b6d61f54 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -91,8 +91,6 @@ func (c *Controller) loadAllScaleSets(cli common.GithubClient) error { } for _, sSet := range scaleSets { - cache.SetEntityScaleSet(c.Entity.ID, sSet) - slog.DebugContext(c.ctx, "loading scale set", "scale_set", sSet.ID) if err := c.handleScaleSetCreateOperation(sSet, cli); err != nil { slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 99fd4617..9d94c794 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -4,7 +4,6 @@ import ( "fmt" "log/slog" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" @@ -61,7 +60,6 @@ func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli common.GithubClient) error { c.mux.Lock() defer c.mux.Unlock() - cache.SetEntityScaleSet(c.Entity.ID, sSet) if _, ok := c.ScaleSets[sSet.ID]; ok { slog.DebugContext(c.ctx, "scale set already exists in worker list", "scale_set_id", sSet.ID) @@ -110,7 +108,6 @@ func (c *Controller) handleScaleSetDeleteOperation(sSet params.ScaleSet) error { return fmt.Errorf("stopping scale set worker: %w", err) } delete(c.ScaleSets, sSet.ID) - cache.DeleteEntityScaleSet(c.Entity.ID, sSet.ID) return nil } @@ -118,8 +115,6 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { c.mux.Lock() defer c.mux.Unlock() - cache.SetEntityScaleSet(c.Entity.ID, sSet) - set, ok := c.ScaleSets[sSet.ID] if !ok { // Some error may have occurred when the scale set was first created, so we @@ -146,7 +141,6 @@ func (c *Controller) handleCredentialsEvent(event dbCommon.ChangePayload) { c.mux.Lock() defer c.mux.Unlock() - cache.SetGithubCredentials(credentials) if c.Entity.Credentials.ID != credentials.ID { // stale update event. return @@ -185,7 +179,6 @@ func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { } } c.Entity = entity - cache.SetEntity(c.Entity) default: slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) return From 90200ffa71e66134f3e3fcea5ef9f3d1ba8e7453 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 7 May 2025 08:14:44 +0000 Subject: [PATCH 049/179] Do not clobber pools and scaleset caches on update Signed-off-by: Gabriel Adrian Samfira --- cache/entity_cache.go | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 5a71b184..3e3a1337 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -50,7 +50,7 @@ func (e *EntityCache) SetEntity(entity params.GithubEntity) { e.mux.Lock() defer e.mux.Unlock() - _, ok := e.entities[entity.ID] + cache, ok := e.entities[entity.ID] if !ok { e.entities[entity.ID] = EntityItem{ Entity: entity, @@ -59,10 +59,8 @@ func (e *EntityCache) SetEntity(entity params.GithubEntity) { } return } - - e.entities[entity.ID] = EntityItem{ - Entity: entity, - } + cache.Entity = entity + e.entities[entity.ID] = cache } func (e *EntityCache) ReplaceEntityPools(entityID string, pools []params.Pool) { From 52007f4ffad91cbbe9c36ca65c0b4ef3c442ee6e Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 7 May 2025 23:01:22 +0000 Subject: [PATCH 050/179] Add tools update routine and cleanup logging This change adds an update routine in the cache worker, for github tools downloads. Signed-off-by: Gabriel Adrian Samfira --- cache/cache_test.go | 10 +- cache/credentials_cache.go | 1 + cache/entity_cache.go | 60 ++++++++- cache/tools_cache.go | 12 +- workers/cache/cache.go | 57 +++++++-- workers/cache/tool_cache.go | 170 ++++++++++++++++++++++++++ workers/provider/instance_manager.go | 2 +- workers/scaleset/controller.go | 61 +-------- workers/scaleset/scaleset.go | 31 +++++ workers/scaleset/scaleset_listener.go | 2 +- 10 files changed, 320 insertions(+), 86 deletions(-) create mode 100644 workers/cache/tool_cache.go diff --git a/cache/cache_test.go b/cache/cache_test.go index 43b15953..7a977394 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -55,7 +55,7 @@ func (c *CacheTestSuite) TestSetCacheWorks() { c.Require().Len(githubToolsCache.entities, 0) SetGithubToolsCache(c.entity, tools) c.Require().Len(githubToolsCache.entities, 1) - cachedTools, ok := GetGithubToolsCache(c.entity) + cachedTools, ok := GetGithubToolsCache(c.entity.ID) c.Require().True(ok) c.Require().Len(cachedTools, 1) c.Require().Equal(tools[0].GetDownloadURL(), cachedTools[0].GetDownloadURL()) @@ -72,11 +72,11 @@ func (c *CacheTestSuite) TestTimedOutToolsCache() { c.Require().Len(githubToolsCache.entities, 0) SetGithubToolsCache(c.entity, tools) c.Require().Len(githubToolsCache.entities, 1) - entity := githubToolsCache.entities[c.entity.String()] + entity := githubToolsCache.entities[c.entity.ID] entity.updatedAt = entity.updatedAt.Add(-2 * time.Hour) - githubToolsCache.entities[c.entity.String()] = entity + githubToolsCache.entities[c.entity.ID] = entity - cachedTools, ok := GetGithubToolsCache(c.entity) + cachedTools, ok := GetGithubToolsCache(c.entity.ID) c.Require().False(ok) c.Require().Nil(cachedTools) } @@ -84,7 +84,7 @@ func (c *CacheTestSuite) TestTimedOutToolsCache() { func (c *CacheTestSuite) TestGetInexistentCache() { c.Require().NotNil(githubToolsCache) c.Require().Len(githubToolsCache.entities, 0) - cachedTools, ok := GetGithubToolsCache(c.entity) + cachedTools, ok := GetGithubToolsCache(c.entity.ID) c.Require().False(ok) c.Require().Nil(cachedTools) } diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go index 731d1640..092d2e90 100644 --- a/cache/credentials_cache.go +++ b/cache/credentials_cache.go @@ -26,6 +26,7 @@ func (g *GithubCredentials) SetCredentials(credentials params.GithubCredentials) defer g.mux.Unlock() g.cache[credentials.ID] = credentials + UpdateCredentialsInAffectedEntities(credentials) } func (g *GithubCredentials) GetCredentials(id uint) (params.GithubCredentials, bool) { diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 3e3a1337..0c549498 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -1,7 +1,6 @@ package cache import ( - "log/slog" "sync" "github.com/cloudbase/garm/params" @@ -28,15 +27,24 @@ type EntityCache struct { entities map[string]EntityItem } +func (e *EntityCache) UpdateCredentialsInAffectedEntities(creds params.GithubCredentials) { + e.mux.Lock() + defer e.mux.Unlock() + + for entityID, cache := range e.entities { + if cache.Entity.Credentials.ID == creds.ID { + cache.Entity.Credentials = creds + e.entities[entityID] = cache + } + } +} + func (e *EntityCache) GetEntity(entityID string) (params.GithubEntity, bool) { e.mux.Lock() defer e.mux.Unlock() if cache, ok := e.entities[entityID]; ok { - // Updating specific credential details will not update entity cache which - // uses those credentials. - // Entity credentials in the cache are only updated if you swap the creds - // on the entity. We get the updated credentials from the credentials cache. + // Get the credentials from the credentials cache. creds, ok := GetGithubCredentials(cache.Entity.Credentials.ID) if ok { cache.Entity.Credentials = creds @@ -173,7 +181,6 @@ func (e *EntityCache) FindPoolsMatchingAllTags(entityID string, tags []string) [ if cache, ok := e.entities[entityID]; ok { var pools []params.Pool - slog.Debug("Finding pools matching all tags", "entityID", entityID, "tags", tags, "pools", cache.Pools) for _, pool := range cache.Pools { if pool.HasRequiredLabels(tags) { pools = append(pools, pool) @@ -212,6 +219,35 @@ func (e *EntityCache) GetEntityScaleSets(entityID string) []params.ScaleSet { return nil } +func (e *EntityCache) GetEntitiesUsingGredentials(credsID uint) []params.GithubEntity { + e.mux.Lock() + defer e.mux.Unlock() + + var entities []params.GithubEntity + for _, cache := range e.entities { + if cache.Entity.Credentials.ID == credsID { + entities = append(entities, cache.Entity) + } + } + return entities +} + +func (e *EntityCache) GetAllEntities() []params.GithubEntity { + e.mux.Lock() + defer e.mux.Unlock() + + var entities []params.GithubEntity + for _, cache := range e.entities { + // Get the credentials from the credentials cache. + creds, ok := GetGithubCredentials(cache.Entity.Credentials.ID) + if ok { + cache.Entity.Credentials = creds + } + entities = append(entities, cache.Entity) + } + return entities +} + func GetEntity(entityID string) (params.GithubEntity, bool) { return entityCache.GetEntity(entityID) } @@ -267,3 +303,15 @@ func GetEntityPools(entityID string) []params.Pool { func GetEntityScaleSets(entityID string) []params.ScaleSet { return entityCache.GetEntityScaleSets(entityID) } + +func UpdateCredentialsInAffectedEntities(creds params.GithubCredentials) { + entityCache.UpdateCredentialsInAffectedEntities(creds) +} + +func GetEntitiesUsingGredentials(credsID uint) []params.GithubEntity { + return entityCache.GetEntitiesUsingGredentials(credsID) +} + +func GetAllEntities() []params.GithubEntity { + return entityCache.GetAllEntities() +} diff --git a/cache/tools_cache.go b/cache/tools_cache.go index 1960de38..233de2c1 100644 --- a/cache/tools_cache.go +++ b/cache/tools_cache.go @@ -29,14 +29,14 @@ type GithubToolsCache struct { entities map[string]GithubEntityTools } -func (g *GithubToolsCache) Get(entity params.GithubEntity) ([]commonParams.RunnerApplicationDownload, bool) { +func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicationDownload, bool) { g.mux.Lock() defer g.mux.Unlock() - if cache, ok := g.entities[entity.String()]; ok { + if cache, ok := g.entities[entityID]; ok { if time.Since(cache.updatedAt) > 1*time.Hour { // Stale cache, remove it. - delete(g.entities, entity.String()) + delete(g.entities, entityID) return nil, false } return cache.tools, true @@ -48,7 +48,7 @@ func (g *GithubToolsCache) Set(entity params.GithubEntity, tools []commonParams. g.mux.Lock() defer g.mux.Unlock() - g.entities[entity.String()] = GithubEntityTools{ + g.entities[entity.ID] = GithubEntityTools{ updatedAt: time.Now(), entity: entity, tools: tools, @@ -59,6 +59,6 @@ func SetGithubToolsCache(entity params.GithubEntity, tools []commonParams.Runner githubToolsCache.Set(entity, tools) } -func GetGithubToolsCache(entity params.GithubEntity) ([]commonParams.RunnerApplicationDownload, bool) { - return githubToolsCache.Get(entity) +func GetGithubToolsCache(entityID string) ([]commonParams.RunnerApplicationDownload, bool) { + return githubToolsCache.Get(entityID) } diff --git a/workers/cache/cache.go b/workers/cache/cache.go index 3973e7c7..d19bbbaf 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -20,10 +20,11 @@ func NewWorker(ctx context.Context, store common.Store) *Worker { slog.Any("worker", consumerID)) return &Worker{ - ctx: ctx, - store: store, - consumerID: consumerID, - quit: make(chan struct{}), + ctx: ctx, + store: store, + consumerID: consumerID, + toolsWorkes: make(map[string]*toolsUpdater), + quit: make(chan struct{}), } } @@ -31,8 +32,9 @@ type Worker struct { ctx context.Context consumerID string - consumer common.Consumer - store common.Store + consumer common.Consumer + store common.Store + toolsWorkes map[string]*toolsUpdater mux sync.Mutex running bool @@ -110,6 +112,13 @@ func (w *Worker) loadAllEntities() error { } } + for _, entity := range cache.GetAllEntities() { + worker := newToolsUpdater(w.ctx, entity) + if err := worker.Start(); err != nil { + return fmt.Errorf("starting tools updater: %w", err) + } + w.toolsWorkes[entity.ID] = worker + } return nil } @@ -181,6 +190,11 @@ func (w *Worker) Stop() error { return nil } + for _, worker := range w.toolsWorkes { + if err := worker.Stop(); err != nil { + slog.ErrorContext(w.ctx, "stopping tools updater", "error", err) + } + } w.consumer.Close() w.running = false close(w.quit) @@ -195,9 +209,31 @@ func (w *Worker) handleEntityEvent(entityGetter params.EntityGetter, op common.O } switch op { case common.CreateOperation, common.UpdateOperation: + old, hasOld := cache.GetEntity(entity.ID) cache.SetEntity(entity) + worker, ok := w.toolsWorkes[entity.ID] + if !ok { + worker = newToolsUpdater(w.ctx, entity) + if err := worker.Start(); err != nil { + slog.ErrorContext(w.ctx, "starting tools updater", "error", err) + return + } + w.toolsWorkes[entity.ID] = worker + } else if hasOld { + // probably an update operation + if old.Credentials.ID != entity.Credentials.ID { + worker.Reset() + } + } case common.DeleteOperation: cache.DeleteEntity(entity.ID) + worker, ok := w.toolsWorkes[entity.ID] + if ok { + if err := worker.Stop(); err != nil { + slog.ErrorContext(w.ctx, "stopping tools updater", "error", err) + } + delete(w.toolsWorkes, entity.ID) + } } } @@ -291,13 +327,20 @@ func (w *Worker) handleCredentialsEvent(event common.ChangePayload) { switch event.Operation { case common.CreateOperation, common.UpdateOperation: cache.SetGithubCredentials(credentials) + entities := cache.GetEntitiesUsingGredentials(credentials.ID) + for _, entity := range entities { + worker, ok := w.toolsWorkes[entity.ID] + if ok { + worker.Reset() + } + } case common.DeleteOperation: cache.DeleteGithubCredentials(credentials.ID) } } func (w *Worker) handleEvent(event common.ChangePayload) { - slog.DebugContext(w.ctx, "handling event", "event", event) + slog.DebugContext(w.ctx, "handling event", "event_entity_type", event.EntityType, "event_operation", event.Operation) switch event.EntityType { case common.PoolEntityType: w.handlePoolEvent(event) diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go new file mode 100644 index 00000000..6133580d --- /dev/null +++ b/workers/cache/tool_cache.go @@ -0,0 +1,170 @@ +package cache + +import ( + "context" + "crypto/rand" + "fmt" + "log/slog" + "math/big" + "sync" + "time" + + commonParams "github.com/cloudbase/garm-provider-common/params" + "github.com/cloudbase/garm/cache" + "github.com/cloudbase/garm/params" + garmUtil "github.com/cloudbase/garm/util" + "github.com/cloudbase/garm/util/github" +) + +func newToolsUpdater(ctx context.Context, entity params.GithubEntity) *toolsUpdater { + return &toolsUpdater{ + ctx: ctx, + entity: entity, + quit: make(chan struct{}), + } +} + +type toolsUpdater struct { + ctx context.Context + + entity params.GithubEntity + tools []commonParams.RunnerApplicationDownload + lastUpdate time.Time + + mux sync.Mutex + running bool + quit chan struct{} + + reset chan struct{} +} + +func (t *toolsUpdater) Start() error { + t.mux.Lock() + defer t.mux.Unlock() + + if t.running { + return nil + } + + t.running = true + t.quit = make(chan struct{}) + + go t.loop() + return nil +} + +func (t *toolsUpdater) Stop() error { + t.mux.Lock() + defer t.mux.Unlock() + + if !t.running { + return nil + } + + t.running = false + close(t.quit) + + return nil +} + +func (t *toolsUpdater) updateTools() error { + slog.DebugContext(t.ctx, "updating tools", "entity", t.entity.String()) + entity, ok := cache.GetEntity(t.entity.ID) + if !ok { + return fmt.Errorf("getting entity from cache: %s", t.entity.ID) + } + ghCli, err := github.Client(t.ctx, entity) + if err != nil { + return fmt.Errorf("getting github client: %w", err) + } + + tools, err := garmUtil.FetchTools(t.ctx, ghCli) + if err != nil { + return fmt.Errorf("fetching tools: %w", err) + } + t.lastUpdate = time.Now().UTC() + t.tools = tools + + slog.DebugContext(t.ctx, "updating tools cache", "entity", t.entity.String()) + cache.SetGithubToolsCache(entity, tools) + return nil +} + +func (t *toolsUpdater) Reset() { + t.mux.Lock() + defer t.mux.Unlock() + + if !t.running { + return + } + + if t.reset != nil { + close(t.reset) + t.reset = nil + } +} + +func (t *toolsUpdater) loop() { + defer t.Stop() + + // add some jitter. When spinning up multiple entities, we add + // jitter to prevent stampeeding herd. + randInt, err := rand.Int(rand.Reader, big.NewInt(3000)) + if err != nil { + randInt = big.NewInt(0) + } + time.Sleep(time.Duration(randInt.Int64()) * time.Millisecond) + + var resetTime time.Time + now := time.Now().UTC() + if now.After(t.lastUpdate.Add(40 * time.Minute)) { + if err := t.updateTools(); err != nil { + slog.ErrorContext(t.ctx, "initial tools update error", "error", err) + resetTime = now.Add(5 * time.Minute) + slog.ErrorContext(t.ctx, "initial tools update error", "error", err) + } else { + // Tools are usually valid for 1 hour. + resetTime = t.lastUpdate.Add(40 * time.Minute) + } + } + + for { + if t.reset == nil { + t.reset = make(chan struct{}) + } + // add some jitter + randInt, err := rand.Int(rand.Reader, big.NewInt(300)) + if err != nil { + randInt = big.NewInt(0) + } + timer := time.NewTimer(resetTime.Sub(now) + time.Duration(randInt.Int64())*time.Second) + select { + case <-t.quit: + slog.DebugContext(t.ctx, "stopping tools updater") + timer.Stop() + return + case <-timer.C: + slog.DebugContext(t.ctx, "updating tools") + now = time.Now().UTC() + if err := t.updateTools(); err == nil { + slog.ErrorContext(t.ctx, "updating tools", "error", err) + resetTime = now.Add(5 * time.Minute) + } else { + // Tools are usually valid for 1 hour. + resetTime = t.lastUpdate.Add(40 * time.Minute) + } + case <-t.reset: + slog.DebugContext(t.ctx, "resetting tools updater") + timer.Stop() + now = time.Now().UTC() + if err := t.updateTools(); err != nil { + slog.ErrorContext(t.ctx, "updating tools", "error", err) + resetTime = now.Add(5 * time.Minute) + } else { + // Tools are usually valid for 1 hour. + resetTime = t.lastUpdate.Add(40 * time.Minute) + } + } + timer.Stop() + } +} diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index dcb10257..37680cd0 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -148,7 +148,7 @@ func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instanc if err != nil { return fmt.Errorf("creating instance token: %w", err) } - tools, ok := cache.GetGithubToolsCache(entity) + tools, ok := cache.GetGithubToolsCache(entity.ID) if !ok { return fmt.Errorf("tools not found in cache for entity %s", entity.String()) } diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index b6d61f54..41e7578c 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -2,7 +2,6 @@ package scaleset import ( "context" - "errors" "fmt" "log/slog" "sync" @@ -10,8 +9,6 @@ import ( "golang.org/x/sync/errgroup" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" @@ -76,8 +73,7 @@ type Controller struct { store dbCommon.Store providers map[string]common.Provider - ghCli common.GithubClient - forgeCredsAreValid bool + ghCli common.GithubClient mux sync.Mutex running bool @@ -163,29 +159,6 @@ func (c *Controller) Stop() error { return nil } -func (c *Controller) updateTools() error { - c.mux.Lock() - defer c.mux.Unlock() - - slog.DebugContext(c.ctx, "updating tools for entity", "entity", c.Entity.String()) - - tools, err := garmUtil.FetchTools(c.ctx, c.ghCli) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - c.ctx, "failed to update tools for entity", "entity", c.Entity.String()) - if errors.Is(err, runnerErrors.ErrUnauthorized) { - // nolint:golangci-lint,godox - // TODO: block all scale sets - c.forgeCredsAreValid = false - } - return fmt.Errorf("failed to update tools for entity %s: %w", c.Entity.String(), err) - } - slog.DebugContext(c.ctx, "tools successfully updated for entity", "entity", c.Entity.String()) - c.forgeCredsAreValid = true - cache.SetGithubToolsCache(c.Entity, tools) - return nil -} - // consolidateRunnerState will list all runners on GitHub for this entity, sort by // pool or scale set and pass those runners to the appropriate worker. The worker will // then have the responsibility to cross check the runners from github with what it @@ -259,23 +232,10 @@ func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) erro func (c *Controller) loop() { defer c.Stop() - updateToolsTicker := time.NewTicker(common.PoolToolUpdateInterval) - defer updateToolsTicker.Stop() consilidateTicker := time.NewTicker(common.PoolReapTimeoutInterval) defer consilidateTicker.Stop() - initialToolUpdate := make(chan struct{}, 1) - defer close(initialToolUpdate) - - go func() { - slog.InfoContext(c.ctx, "running initial tool update") - if err := c.updateTools(); err != nil { - slog.With(slog.Any("error", err)).Error("failed to update tools") - } - initialToolUpdate <- struct{}{} - }() - for { select { case payload, ok := <-c.consumer.Watch(): @@ -287,25 +247,6 @@ func (c *Controller) loop() { go c.handleWatcherEvent(payload) case <-c.ctx.Done(): return - case <-initialToolUpdate: - case _, ok := <-updateToolsTicker.C: - if !ok { - slog.InfoContext(c.ctx, "update tools ticker closed") - return - } - validCreds := c.forgeCredsAreValid - if err := c.updateTools(); err != nil { - if err := c.store.AddEntityEvent(c.ctx, c.Entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err.Error()), 30); err != nil { - slog.With(slog.Any("error", err)).Error("failed to add entity event") - } - slog.With(slog.Any("error", err)).Error("failed to update tools") - continue - } - if validCreds != c.forgeCredsAreValid && c.forgeCredsAreValid { - if err := c.store.AddEntityEvent(c.ctx, c.Entity, params.StatusEvent, params.EventInfo, "tools updated successfully", 30); err != nil { - slog.With(slog.Any("error", err)).Error("failed to add entity event") - } - } case _, ok := <-consilidateTicker.C: if !ok { slog.InfoContext(c.ctx, "consolidate ticker closed") diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 660bbe97..73d08c98 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -11,6 +11,7 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm-provider-common/util" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/locking" @@ -769,6 +770,24 @@ func (w *Worker) handleScaleUp(target, current uint) { } } +func (w *Worker) waitForToolsOrCancel() (hasTools, stopped bool) { + ticker := time.NewTicker(1 * time.Second) + defer ticker.Stop() + select { + case <-ticker.C: + entity, err := w.scaleSet.GetEntity() + if err != nil { + slog.ErrorContext(w.ctx, "error getting entity", "error", err) + } + _, ok := cache.GetGithubToolsCache(entity.ID) + return ok, false + case <-w.quit: + return false, true + case <-w.ctx.Done(): + return false, true + } +} + func (w *Worker) handleScaleDown(target, current uint) { delta := current - target if delta <= 0 { @@ -880,7 +899,19 @@ func (w *Worker) handleAutoScale() { lastMsg = msg } } + for { + hasTools, stopped := w.waitForToolsOrCancel() + if stopped { + slog.DebugContext(w.ctx, "worker is stopped; exiting handleAutoScale") + return + } + + if !hasTools { + time.Sleep(1 * time.Second) + continue + } + select { case <-w.quit: return diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 9f2087d7..7a521e46 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -109,7 +109,7 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage if err != nil { slog.ErrorContext(l.ctx, "getting jobs from body", "error", err) } - slog.InfoContext(l.ctx, "handling message", "message", msg, "body", body) + if msg.MessageID < l.lastMessageID { slog.DebugContext(l.ctx, "message is older than last message, ignoring") return From a38d72a01c47afe3eb4b335fc623f94f74d6a674 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 8 May 2025 08:09:54 +0000 Subject: [PATCH 051/179] Add runner group to the scale set list output Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/scalesets.go | 4 ++-- go.mod | 2 +- go.sum | 4 ++-- vendor/gorm.io/gorm/gorm.go | 4 +--- vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go | 3 ++- vendor/modules.txt | 2 +- 6 files changed, 9 insertions(+), 10 deletions(-) diff --git a/cmd/garm-cli/cmd/scalesets.go b/cmd/garm-cli/cmd/scalesets.go index 79486a0e..920b60cf 100644 --- a/cmd/garm-cli/cmd/scalesets.go +++ b/cmd/garm-cli/cmd/scalesets.go @@ -436,7 +436,7 @@ func formatScaleSets(scaleSets []params.ScaleSet) { return } t := table.NewWriter() - header := table.Row{"ID", "Scale Set Name", "Image", "Flavor", "Belongs to", "Level", "Enabled", "Runner Prefix", "Provider"} + header := table.Row{"ID", "Scale Set Name", "Image", "Flavor", "Belongs to", "Level", "Runner Group", "Enabled", "Runner Prefix", "Provider"} t.AppendHeader(header) for _, scaleSet := range scaleSets { @@ -454,7 +454,7 @@ func formatScaleSets(scaleSets []params.ScaleSet) { belongsTo = scaleSet.EnterpriseName level = entityTypeEnterprise } - t.AppendRow(table.Row{scaleSet.ID, scaleSet.Name, scaleSet.Image, scaleSet.Flavor, belongsTo, level, scaleSet.Enabled, scaleSet.GetRunnerPrefix(), scaleSet.ProviderName}) + t.AppendRow(table.Row{scaleSet.ID, scaleSet.Name, scaleSet.Image, scaleSet.Flavor, belongsTo, level, scaleSet.GitHubRunnerGroup, scaleSet.Enabled, scaleSet.GetRunnerPrefix(), scaleSet.ProviderName}) t.AppendSeparator() } fmt.Println(t.Render()) diff --git a/go.mod b/go.mod index db57a68b..a0b3901f 100644 --- a/go.mod +++ b/go.mod @@ -36,7 +36,7 @@ require ( gorm.io/datatypes v1.2.5 gorm.io/driver/mysql v1.5.7 gorm.io/driver/sqlite v1.5.7 - gorm.io/gorm v1.26.0 + gorm.io/gorm v1.26.1 ) require ( diff --git a/go.sum b/go.sum index 5ca7575d..3c9af9bb 100644 --- a/go.sum +++ b/go.sum @@ -229,5 +229,5 @@ gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDa gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= -gorm.io/gorm v1.26.0 h1:9lqQVPG5aNNS6AyHdRiwScAVnXHg/L/Srzx55G5fOgs= -gorm.io/gorm v1.26.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= +gorm.io/gorm v1.26.1 h1:ghB2gUI9FkS46luZtn6DLZ0f6ooBJ5IbVej2ENFDjRw= +gorm.io/gorm v1.26.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/vendor/gorm.io/gorm/gorm.go b/vendor/gorm.io/gorm/gorm.go index d253736d..63a28b37 100644 --- a/vendor/gorm.io/gorm/gorm.go +++ b/vendor/gorm.io/gorm/gorm.go @@ -110,8 +110,6 @@ type DB struct { type Session struct { DryRun bool PrepareStmt bool - PrepareStmtMaxSize int - PrepareStmtTTL time.Duration NewDB bool Initialized bool SkipHooks bool @@ -275,7 +273,7 @@ func (db *DB) Session(config *Session) *DB { if v, ok := db.cacheStore.Load(preparedStmtDBKey); ok { preparedStmt = v.(*PreparedStmtDB) } else { - preparedStmt = NewPreparedStmtDB(db.ConnPool, config.PrepareStmtMaxSize, config.PrepareStmtTTL) + preparedStmt = NewPreparedStmtDB(db.ConnPool, db.PrepareStmtMaxSize, db.PrepareStmtTTL) db.cacheStore.Store(preparedStmtDBKey, preparedStmt) } diff --git a/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go b/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go index 7068419d..a82b2cf5 100644 --- a/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go +++ b/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go @@ -3,6 +3,7 @@ package stmt_store import ( "context" "database/sql" + "math" "sync" "time" @@ -73,7 +74,7 @@ type Store interface { // the cache can theoretically store as many elements as possible. // (1 << 63) - 1 is the maximum value that an int64 type can represent. const ( - defaultMaxSize = (1 << 63) - 1 + defaultMaxSize = math.MaxInt // defaultTTL defines the default time-to-live (TTL) for each cache entry. // When the TTL for cache entries is not specified, each cache entry will expire after 24 hours. defaultTTL = time.Hour * 24 diff --git a/vendor/modules.txt b/vendor/modules.txt index 9ca8e528..5cb70bb1 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -362,7 +362,7 @@ gorm.io/driver/mysql # gorm.io/driver/sqlite v1.5.7 ## explicit; go 1.20 gorm.io/driver/sqlite -# gorm.io/gorm v1.26.0 +# gorm.io/gorm v1.26.1 ## explicit; go 1.18 gorm.io/gorm gorm.io/gorm/callbacks From 2e9535530d4b43e8e06f692ca3b1a635980b56bf Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 8 May 2025 08:27:41 +0000 Subject: [PATCH 052/179] Fix entity update handler Signed-off-by: Gabriel Adrian Samfira --- workers/scaleset/controller.go | 2 +- workers/scaleset/controller_watcher.go | 23 +++++++++++++++++++---- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 41e7578c..5d00471f 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -26,7 +26,7 @@ const ( ) func NewController(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Controller, error) { - consumerID := fmt.Sprintf("scaleset-worker-%s", entity.String()) + consumerID := fmt.Sprintf("scaleset-controller-%s", entity.String()) ctx = garmUtil.WithSlogContext( ctx, diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 9d94c794..6702e0f0 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -31,7 +31,7 @@ func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { scaleSet, ok := event.Payload.(params.ScaleSet) if !ok { - slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + slog.ErrorContext(c.ctx, "invalid scale set payload for entity type", "entity_type", event.EntityType, "payload", event) return } @@ -131,7 +131,7 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { func (c *Controller) handleCredentialsEvent(event dbCommon.ChangePayload) { credentials, ok := event.Payload.(params.GithubCredentials) if !ok { - slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + slog.ErrorContext(c.ctx, "invalid credentials payload for entity type", "entity_type", event.EntityType, "payload", event) return } @@ -158,9 +158,24 @@ func (c *Controller) handleCredentialsEvent(event dbCommon.ChangePayload) { } func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { - entity, ok := event.Payload.(params.GithubEntity) + var entityGetter params.EntityGetter + var ok bool + switch c.Entity.EntityType { + case params.GithubEntityTypeRepository: + entityGetter, ok = event.Payload.(params.Repository) + case params.GithubEntityTypeOrganization: + entityGetter, ok = event.Payload.(params.Organization) + case params.GithubEntityTypeEnterprise: + entityGetter, ok = event.Payload.(params.Enterprise) + } if !ok { - slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + slog.ErrorContext(c.ctx, "invalid entity payload for entity type", "entity_type", event.EntityType, "payload", event) + return + } + + entity, err := entityGetter.GetEntity() + if err != nil { + slog.ErrorContext(c.ctx, "invalid GitHub entity payload for entity type", "entity_type", event.EntityType, "payload", event) return } From 1a719567ff002b8e2d2f3ea3075fdb428c0240ca Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 8 May 2025 21:39:55 +0000 Subject: [PATCH 053/179] Add rate limit cache and fixes This change adds a loop that keeps a cache of credentials rate limits as reported by the github API. The cache is updated every 30 seconds and is purely informational for the user. This change also adds some caching improvements. Functions that return values from the cache as lists, will now sort by ID or creation date. Signed-off-by: Gabriel Adrian Samfira --- cache/credentials_cache.go | 33 +++++++++ cache/entity_cache.go | 44 ++++++++++++ cache/instance_cache.go | 3 + cache/util.go | 19 +++++ cmd/garm-cli/cmd/github_credentials.go | 6 ++ params/interfaces.go | 10 +++ params/params.go | 97 +++++++++++++++++++++++--- runner/github_credentials.go | 19 +++++ workers/cache/cache.go | 60 +++++++++++++--- workers/scaleset/controller.go | 8 +-- 10 files changed, 276 insertions(+), 23 deletions(-) create mode 100644 cache/util.go diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go index 092d2e90..7cf65a03 100644 --- a/cache/credentials_cache.go +++ b/cache/credentials_cache.go @@ -21,6 +21,16 @@ type GithubCredentials struct { cache map[uint]params.GithubCredentials } +func (g *GithubCredentials) SetCredentialsRateLimit(credsID uint, rateLimit params.GithubRateLimit) { + g.mux.Lock() + defer g.mux.Unlock() + + if creds, ok := g.cache[credsID]; ok { + creds.RateLimit = rateLimit + g.cache[credsID] = creds + } +} + func (g *GithubCredentials) SetCredentials(credentials params.GithubCredentials) { g.mux.Lock() defer g.mux.Unlock() @@ -54,6 +64,21 @@ func (g *GithubCredentials) GetAllCredentials() []params.GithubCredentials { for _, cred := range g.cache { creds = append(creds, cred) } + + // Sort the credentials by ID + sortByID(creds) + return creds +} + +func (g *GithubCredentials) GetAllCredentialsAsMap() map[uint]params.GithubCredentials { + g.mux.Lock() + defer g.mux.Unlock() + + creds := make(map[uint]params.GithubCredentials, len(g.cache)) + for id, cred := range g.cache { + creds[id] = cred + } + return creds } @@ -72,3 +97,11 @@ func DeleteGithubCredentials(id uint) { func GetAllGithubCredentials() []params.GithubCredentials { return credentialsCache.GetAllCredentials() } + +func SetCredentialsRateLimit(credsID uint, rateLimit params.GithubRateLimit) { + credentialsCache.SetCredentialsRateLimit(credsID, rateLimit) +} + +func GetAllGithubCredentialsAsMap() map[uint]params.GithubCredentials { + return credentialsCache.GetAllCredentialsAsMap() +} diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 0c549498..006f40db 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -186,6 +186,8 @@ func (e *EntityCache) FindPoolsMatchingAllTags(entityID string, tags []string) [ pools = append(pools, pool) } } + // Sort the pools by creation date. + sortByCreationDate(pools) return pools } return nil @@ -200,6 +202,8 @@ func (e *EntityCache) GetEntityPools(entityID string) []params.Pool { for _, pool := range cache.Pools { pools = append(pools, pool) } + // Sort the pools by creation date. + sortByCreationDate(pools) return pools } return nil @@ -214,6 +218,8 @@ func (e *EntityCache) GetEntityScaleSets(entityID string) []params.ScaleSet { for _, scaleSet := range cache.ScaleSets { scaleSets = append(scaleSets, scaleSet) } + // Sort the scale sets by creation date. + sortByID(scaleSets) return scaleSets } return nil @@ -229,6 +235,7 @@ func (e *EntityCache) GetEntitiesUsingGredentials(credsID uint) []params.GithubE entities = append(entities, cache.Entity) } } + sortByCreationDate(entities) return entities } @@ -245,9 +252,38 @@ func (e *EntityCache) GetAllEntities() []params.GithubEntity { } entities = append(entities, cache.Entity) } + sortByCreationDate(entities) return entities } +func (e *EntityCache) GetAllPools() []params.Pool { + e.mux.Lock() + defer e.mux.Unlock() + + var pools []params.Pool + for _, cache := range e.entities { + for _, pool := range cache.Pools { + pools = append(pools, pool) + } + } + sortByCreationDate(pools) + return pools +} + +func (e *EntityCache) GetAllScaleSets() []params.ScaleSet { + e.mux.Lock() + defer e.mux.Unlock() + + var scaleSets []params.ScaleSet + for _, cache := range e.entities { + for _, scaleSet := range cache.ScaleSets { + scaleSets = append(scaleSets, scaleSet) + } + } + sortByID(scaleSets) + return scaleSets +} + func GetEntity(entityID string) (params.GithubEntity, bool) { return entityCache.GetEntity(entityID) } @@ -315,3 +351,11 @@ func GetEntitiesUsingGredentials(credsID uint) []params.GithubEntity { func GetAllEntities() []params.GithubEntity { return entityCache.GetAllEntities() } + +func GetAllPools() []params.Pool { + return entityCache.GetAllPools() +} + +func GetAllScaleSets() []params.ScaleSet { + return entityCache.GetAllScaleSets() +} diff --git a/cache/instance_cache.go b/cache/instance_cache.go index 44f95ec2..b96db5e9 100644 --- a/cache/instance_cache.go +++ b/cache/instance_cache.go @@ -53,6 +53,7 @@ func (i *InstanceCache) GetAllInstances() []params.Instance { for _, instance := range i.cache { instances = append(instances, instance) } + sortByCreationDate(instances) return instances } @@ -66,6 +67,7 @@ func (i *InstanceCache) GetInstancesForPool(poolID string) []params.Instance { filteredInstances = append(filteredInstances, instance) } } + sortByCreationDate(filteredInstances) return filteredInstances } @@ -79,6 +81,7 @@ func (i *InstanceCache) GetInstancesForScaleSet(scaleSetID uint) []params.Instan filteredInstances = append(filteredInstances, instance) } } + sortByCreationDate(filteredInstances) return filteredInstances } diff --git a/cache/util.go b/cache/util.go new file mode 100644 index 00000000..f8769c65 --- /dev/null +++ b/cache/util.go @@ -0,0 +1,19 @@ +package cache + +import ( + "sort" + + "github.com/cloudbase/garm/params" +) + +func sortByID[T params.IDGetter](s []T) { + sort.Slice(s, func(i, j int) bool { + return s[i].GetID() < s[j].GetID() + }) +} + +func sortByCreationDate[T params.CreationDateGetter](s []T) { + sort.Slice(s, func(i, j int) bool { + return s[i].GetCreatedAt().Before(s[j].GetCreatedAt()) + }) +} diff --git a/cmd/garm-cli/cmd/github_credentials.go b/cmd/garm-cli/cmd/github_credentials.go index c4faec1a..bd3521bf 100644 --- a/cmd/garm-cli/cmd/github_credentials.go +++ b/cmd/garm-cli/cmd/github_credentials.go @@ -375,6 +375,8 @@ func formatOneGithubCredential(cred params.GithubCredentials) { header := table.Row{"Field", "Value"} t.AppendHeader(header) + resetMinutes := cred.RateLimit.ResetIn().Minutes() + t.AppendRow(table.Row{"ID", cred.ID}) t.AppendRow(table.Row{"Created At", cred.CreatedAt}) t.AppendRow(table.Row{"Updated At", cred.UpdatedAt}) @@ -385,6 +387,10 @@ func formatOneGithubCredential(cred params.GithubCredentials) { t.AppendRow(table.Row{"Upload URL", cred.UploadBaseURL}) t.AppendRow(table.Row{"Type", cred.AuthType}) t.AppendRow(table.Row{"Endpoint", cred.Endpoint.Name}) + if resetMinutes > 0 { + t.AppendRow(table.Row{"Remaining API requests", cred.RateLimit.Remaining}) + t.AppendRow(table.Row{"Rate limit reset", fmt.Sprintf("%d minutes", int64(resetMinutes))}) + } if len(cred.Repositories) > 0 { t.AppendRow(table.Row{"", ""}) diff --git a/params/interfaces.go b/params/interfaces.go index 95f02a9a..cd9b94ff 100644 --- a/params/interfaces.go +++ b/params/interfaces.go @@ -1,7 +1,17 @@ package params +import "time" + // EntityGetter is implemented by all github entities (repositories, organizations and enterprises). // It defines the GetEntity() function which returns a github entity. type EntityGetter interface { GetEntity() (GithubEntity, error) } + +type IDGetter interface { + GetID() uint +} + +type CreationDateGetter interface { + GetCreatedAt() time.Time +} diff --git a/params/params.go b/params/params.go index a15d2446..7636102f 100644 --- a/params/params.go +++ b/params/params.go @@ -252,6 +252,10 @@ type Instance struct { JitConfiguration map[string]string `json:"-"` } +func (i Instance) GetCreatedAt() time.Time { + return i.CreatedAt +} + func (i Instance) GetName() string { return i.Name } @@ -370,6 +374,22 @@ type Pool struct { Priority uint `json:"priority,omitempty"` } +func (p Pool) BelongsTo(entity GithubEntity) bool { + switch p.PoolType() { + case GithubEntityTypeRepository: + return p.RepoID == entity.ID + case GithubEntityTypeOrganization: + return p.OrgID == entity.ID + case GithubEntityTypeEnterprise: + return p.EnterpriseID == entity.ID + } + return false +} + +func (p Pool) GetCreatedAt() time.Time { + return p.CreatedAt +} + func (p Pool) MinIdleRunnersAsInt() int { if p.MinIdleRunners > math.MaxInt { return math.MaxInt @@ -493,6 +513,22 @@ type ScaleSet struct { LastMessageID int64 `json:"-"` } +func (p ScaleSet) BelongsTo(entity GithubEntity) bool { + switch p.ScaleSetType() { + case GithubEntityTypeRepository: + return p.RepoID == entity.ID + case GithubEntityTypeOrganization: + return p.OrgID == entity.ID + case GithubEntityTypeEnterprise: + return p.EnterpriseID == entity.ID + } + return false +} + +func (p ScaleSet) GetID() uint { + return p.ID +} + func (p ScaleSet) GetEntity() (GithubEntity, error) { switch p.ScaleSetType() { case GithubEntityTypeRepository: @@ -526,10 +562,6 @@ func (p *ScaleSet) ScaleSetType() GithubEntityType { return "" } -func (p ScaleSet) GetID() uint { - return p.ID -} - func (p *ScaleSet) RunnerTimeout() uint { if p.RunnerBootstrapTimeout == 0 { return appdefaults.DefaultRunnerBootstrapTimeout @@ -560,6 +592,10 @@ type Repository struct { WebhookSecret string `json:"-"` } +func (r Repository) CreationDateGetter() time.Time { + return r.CreatedAt +} + func (r Repository) GetEntity() (GithubEntity, error) { if r.ID == "" { return GithubEntity{}, fmt.Errorf("repository has no ID") @@ -572,6 +608,7 @@ func (r Repository) GetEntity() (GithubEntity, error) { PoolBalancerType: r.PoolBalancerType, Credentials: r.Credentials, WebhookSecret: r.WebhookSecret, + CreatedAt: r.CreatedAt, }, nil } @@ -616,6 +653,10 @@ type Organization struct { WebhookSecret string `json:"-"` } +func (o Organization) GetCreatedAt() time.Time { + return o.CreatedAt +} + func (o Organization) GetEntity() (GithubEntity, error) { if o.ID == "" { return GithubEntity{}, fmt.Errorf("organization has no ID") @@ -627,6 +668,7 @@ func (o Organization) GetEntity() (GithubEntity, error) { WebhookSecret: o.WebhookSecret, PoolBalancerType: o.PoolBalancerType, Credentials: o.Credentials, + CreatedAt: o.CreatedAt, }, nil } @@ -667,6 +709,10 @@ type Enterprise struct { WebhookSecret string `json:"-"` } +func (e Enterprise) GetCreatedAt() time.Time { + return e.CreatedAt +} + func (e Enterprise) GetEntity() (GithubEntity, error) { if e.ID == "" { return GithubEntity{}, fmt.Errorf("enterprise has no ID") @@ -678,6 +724,7 @@ func (e Enterprise) GetEntity() (GithubEntity, error) { WebhookSecret: e.WebhookSecret, PoolBalancerType: e.PoolBalancerType, Credentials: e.Credentials, + CreatedAt: e.CreatedAt, }, nil } @@ -772,6 +819,24 @@ func (c *ControllerInfo) JobBackoff() time.Duration { return time.Duration(int64(c.MinimumJobAgeBackoff)) } +type GithubRateLimit struct { + Limit int `json:"limit,omitempty"` + Used int `json:"used,omitempty"` + Remaining int `json:"remaining,omitempty"` + Reset int64 `json:"reset,omitempty"` +} + +func (g GithubRateLimit) ResetIn() time.Duration { + return time.Until(g.ResetAt()) +} + +func (g GithubRateLimit) ResetAt() time.Time { + if g.Reset == 0 { + return time.Time{} + } + return time.Unix(g.Reset, 0) +} + type GithubCredentials struct { ID uint `json:"id,omitempty"` Name string `json:"name,omitempty"` @@ -782,17 +847,22 @@ type GithubCredentials struct { CABundle []byte `json:"ca_bundle,omitempty"` AuthType GithubAuthType `json:"auth-type,omitempty"` - Repositories []Repository `json:"repositories,omitempty"` - Organizations []Organization `json:"organizations,omitempty"` - Enterprises []Enterprise `json:"enterprises,omitempty"` - Endpoint GithubEndpoint `json:"endpoint,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` + Repositories []Repository `json:"repositories,omitempty"` + Organizations []Organization `json:"organizations,omitempty"` + Enterprises []Enterprise `json:"enterprises,omitempty"` + Endpoint GithubEndpoint `json:"endpoint,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` + UpdatedAt time.Time `json:"updated_at,omitempty"` + RateLimit GithubRateLimit `json:"rate_limit,omitempty"` // Do not serialize sensitive info. CredentialsPayload []byte `json:"-"` } +func (g GithubCredentials) GetID() uint { + return g.ID +} + func (g GithubCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { var roots *x509.CertPool if g.CABundle != nil { @@ -994,11 +1064,16 @@ type GithubEntity struct { EntityType GithubEntityType `json:"entity_type,omitempty"` Credentials GithubCredentials `json:"credentials,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` WebhookSecret string `json:"-"` } -func (g *GithubEntity) GithubURL() string { +func (g GithubEntity) GetCreatedAt() time.Time { + return g.CreatedAt +} + +func (g GithubEntity) GithubURL() string { switch g.EntityType { case GithubEntityTypeRepository: return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL, g.Owner, g.Name) diff --git a/runner/github_credentials.go b/runner/github_credentials.go index fbf9d330..7cd4e74c 100644 --- a/runner/github_credentials.go +++ b/runner/github_credentials.go @@ -7,6 +7,7 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/cache" "github.com/cloudbase/garm/params" ) @@ -15,11 +16,24 @@ func (r *Runner) ListCredentials(ctx context.Context) ([]params.GithubCredential return nil, runnerErrors.ErrUnauthorized } + // Get the credentials from the store. The cache is always updated after the database successfully + // commits the transaction that created/updated the credentials. + // If we create a set of credentials then immediately after we call ListCredentials, + // there is a posibillity that not all creds will be in the cache. creds, err := r.store.ListGithubCredentials(ctx) if err != nil { return nil, errors.Wrap(err, "fetching github credentials") } + // If we do have cache, update the rate limit for each credential. The rate limits are queried + // every 30 seconds and set in cache. + credsCache := cache.GetAllGithubCredentialsAsMap() + for idx, cred := range creds { + inCache, ok := credsCache[cred.ID] + if ok { + creds[idx].RateLimit = inCache.RateLimit + } + } return creds, nil } @@ -50,6 +64,11 @@ func (r *Runner) GetGithubCredentials(ctx context.Context, id uint) (params.Gith return params.GithubCredentials{}, errors.Wrap(err, "failed to get github credentials") } + cached, ok := cache.GetGithubCredentials((creds.ID)) + if ok { + creds.RateLimit = cached.RateLimit + } + return creds, nil } diff --git a/workers/cache/cache.go b/workers/cache/cache.go index d19bbbaf..315876d6 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -5,12 +5,14 @@ import ( "fmt" "log/slog" "sync" + "time" "github.com/cloudbase/garm/cache" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" garmUtil "github.com/cloudbase/garm/util" + "github.com/cloudbase/garm/util/github" ) func NewWorker(ctx context.Context, store common.Store) *Worker { @@ -47,23 +49,23 @@ func (w *Worker) setCacheForEntity(entityGetter params.EntityGetter, pools []par return fmt.Errorf("getting entity: %w", err) } cache.SetEntity(entity) - var repoPools []params.Pool - var repoScaleSets []params.ScaleSet + var entityPools []params.Pool + var entityScaleSets []params.ScaleSet for _, pool := range pools { - if pool.RepoID == entity.ID { - repoPools = append(repoPools, pool) + if pool.BelongsTo(entity) { + entityPools = append(entityPools, pool) } } for _, scaleSet := range scaleSets { - if scaleSet.RepoID == entity.ID { - repoScaleSets = append(repoScaleSets, scaleSet) + if scaleSet.BelongsTo(entity) { + entityScaleSets = append(entityScaleSets, scaleSet) } } - cache.ReplaceEntityPools(entity.ID, repoPools) - cache.ReplaceEntityScaleSets(entity.ID, repoScaleSets) + cache.ReplaceEntityPools(entity.ID, entityPools) + cache.ReplaceEntityScaleSets(entity.ID, entityScaleSets) return nil } @@ -178,6 +180,7 @@ func (w *Worker) Start() error { w.quit = make(chan struct{}) go w.loop() + go w.rateLimitLoop() return nil } @@ -379,3 +382,44 @@ func (w *Worker) loop() { } } } + +func (w *Worker) rateLimitLoop() { + defer w.Stop() + + ticker := time.NewTicker(30 * time.Second) + defer ticker.Stop() + + for { + select { + case <-w.quit: + return + case <-w.ctx.Done(): + slog.DebugContext(w.ctx, "context done") + return + case <-ticker.C: + // update credentials rate limits + for _, creds := range cache.GetAllGithubCredentials() { + rateCli, err := github.NewRateLimitClient(w.ctx, creds) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(w.ctx, "failed to create rate limit client") + continue + } + rateLimit, err := rateCli.RateLimit(w.ctx) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(w.ctx, "failed to get rate limit") + continue + } + if rateLimit != nil { + core := rateLimit.GetCore() + limit := params.GithubRateLimit{ + Limit: core.Limit, + Used: core.Used, + Remaining: core.Remaining, + Reset: core.Reset.Unix(), + } + cache.SetCredentialsRateLimit(creds.ID, limit) + } + } + } + } +} diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 5d00471f..3b4287c2 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -233,8 +233,8 @@ func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) erro func (c *Controller) loop() { defer c.Stop() - consilidateTicker := time.NewTicker(common.PoolReapTimeoutInterval) - defer consilidateTicker.Stop() + consolidateTicker := time.NewTicker(common.PoolReapTimeoutInterval) + defer consolidateTicker.Stop() for { select { @@ -244,10 +244,10 @@ func (c *Controller) loop() { return } slog.InfoContext(c.ctx, "received payload") - go c.handleWatcherEvent(payload) + c.handleWatcherEvent(payload) case <-c.ctx.Done(): return - case _, ok := <-consilidateTicker.C: + case _, ok := <-consolidateTicker.C: if !ok { slog.InfoContext(c.ctx, "consolidate ticker closed") return From 68183384dce1c74c2d568ac4125a3ffd96f1adfc Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 8 May 2025 22:19:02 +0000 Subject: [PATCH 054/179] Load entities in parallel This change uses an error group to load different DB resources in parallel. Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/github_credentials.go | 1 + workers/cache/cache.go | 54 ++++++++-- workers/entity/controller.go | 130 ++++++++++++++++--------- workers/entity/util.go | 23 +++++ workers/provider/provider.go | 24 ++++- workers/provider/util.go | 23 +++++ 6 files changed, 198 insertions(+), 57 deletions(-) diff --git a/cmd/garm-cli/cmd/github_credentials.go b/cmd/garm-cli/cmd/github_credentials.go index bd3521bf..2b2128d0 100644 --- a/cmd/garm-cli/cmd/github_credentials.go +++ b/cmd/garm-cli/cmd/github_credentials.go @@ -388,6 +388,7 @@ func formatOneGithubCredential(cred params.GithubCredentials) { t.AppendRow(table.Row{"Type", cred.AuthType}) t.AppendRow(table.Row{"Endpoint", cred.Endpoint.Name}) if resetMinutes > 0 { + t.AppendRow(table.Row{"", ""}) t.AppendRow(table.Row{"Remaining API requests", cred.RateLimit.Remaining}) t.AppendRow(table.Row{"Rate limit reset", fmt.Sprintf("%d minutes", int64(resetMinutes))}) } diff --git a/workers/cache/cache.go b/workers/cache/cache.go index 315876d6..13400a3a 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -7,6 +7,8 @@ import ( "sync" "time" + "golang.org/x/sync/errgroup" + "github.com/cloudbase/garm/cache" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" @@ -148,6 +150,27 @@ func (w *Worker) loadAllCredentials() error { return nil } +func (w *Worker) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { + if g == nil { + return nil + } + + done := make(chan error, 1) + go func() { + waitErr := g.Wait() + done <- waitErr + }() + + select { + case err := <-done: + return err + case <-w.ctx.Done(): + return w.ctx.Err() + case <-w.quit: + return nil + } +} + func (w *Worker) Start() error { slog.DebugContext(w.ctx, "starting cache worker") w.mux.Lock() @@ -157,16 +180,31 @@ func (w *Worker) Start() error { return nil } - if err := w.loadAllEntities(); err != nil { - return fmt.Errorf("loading all entities: %w", err) - } + g, _ := errgroup.WithContext(w.ctx) - if err := w.loadAllInstances(); err != nil { - return fmt.Errorf("loading all instances: %w", err) - } + g.Go(func() error { + if err := w.loadAllEntities(); err != nil { + return fmt.Errorf("loading all entities: %w", err) + } + return nil + }) - if err := w.loadAllCredentials(); err != nil { - return fmt.Errorf("loading all credentials: %w", err) + g.Go(func() error { + if err := w.loadAllInstances(); err != nil { + return fmt.Errorf("loading all instances: %w", err) + } + return nil + }) + + g.Go(func() error { + if err := w.loadAllCredentials(); err != nil { + return fmt.Errorf("loading all credentials: %w", err) + } + return nil + }) + + if err := w.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) } consumer, err := watcher.RegisterConsumer( diff --git a/workers/entity/controller.go b/workers/entity/controller.go index 07fb38ce..db353f0e 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -6,6 +6,8 @@ import ( "log/slog" "sync" + "golang.org/x/sync/errgroup" + "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" @@ -53,19 +55,26 @@ func (c *Controller) loadAllRepositories() error { return fmt.Errorf("fetching repositories: %w", err) } + g, _ := errgroup.WithContext(c.ctx) for _, repo := range repos { - entity, err := repo.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - worker, err := NewWorker(c.ctx, c.store, entity, c.providers) - if err != nil { - return fmt.Errorf("creating worker: %w", err) - } - if err := worker.Start(); err != nil { - return fmt.Errorf("starting worker: %w", err) - } - c.Entities[entity.ID] = worker + g.Go(func() error { + entity, err := repo.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) + if err != nil { + return fmt.Errorf("creating worker: %w", err) + } + if err := worker.Start(); err != nil { + return fmt.Errorf("starting worker: %w", err) + } + c.Entities[entity.ID] = worker + return nil + }) + } + if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) } return nil } @@ -77,19 +86,27 @@ func (c *Controller) loadAllOrganizations() error { if err != nil { return fmt.Errorf("fetching organizations: %w", err) } + + g, _ := errgroup.WithContext(c.ctx) for _, org := range orgs { - entity, err := org.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - worker, err := NewWorker(c.ctx, c.store, entity, c.providers) - if err != nil { - return fmt.Errorf("creating worker: %w", err) - } - if err := worker.Start(); err != nil { - return fmt.Errorf("starting worker: %w", err) - } - c.Entities[entity.ID] = worker + g.Go(func() error { + entity, err := org.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) + if err != nil { + return fmt.Errorf("creating worker: %w", err) + } + if err := worker.Start(); err != nil { + return fmt.Errorf("starting worker: %w", err) + } + c.Entities[entity.ID] = worker + return nil + }) + } + if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) } return nil } @@ -101,19 +118,28 @@ func (c *Controller) loadAllEnterprises() error { if err != nil { return fmt.Errorf("fetching enterprises: %w", err) } + + g, _ := errgroup.WithContext(c.ctx) + for _, enterprise := range enterprises { - entity, err := enterprise.GetEntity() - if err != nil { - return fmt.Errorf("getting entity: %w", err) - } - worker, err := NewWorker(c.ctx, c.store, entity, c.providers) - if err != nil { - return fmt.Errorf("creating worker: %w", err) - } - if err := worker.Start(); err != nil { - return fmt.Errorf("starting worker: %w", err) - } - c.Entities[entity.ID] = worker + g.Go(func() error { + entity, err := enterprise.GetEntity() + if err != nil { + return fmt.Errorf("getting entity: %w", err) + } + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) + if err != nil { + return fmt.Errorf("creating worker: %w", err) + } + if err := worker.Start(); err != nil { + return fmt.Errorf("starting worker: %w", err) + } + c.Entities[entity.ID] = worker + return nil + }) + } + if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) } return nil } @@ -126,14 +152,30 @@ func (c *Controller) Start() error { } c.mux.Unlock() - if err := c.loadAllEnterprises(); err != nil { - return fmt.Errorf("loading enterprises: %w", err) - } - if err := c.loadAllOrganizations(); err != nil { - return fmt.Errorf("loading organizations: %w", err) - } - if err := c.loadAllRepositories(); err != nil { - return fmt.Errorf("loading repositories: %w", err) + g, _ := errgroup.WithContext(c.ctx) + g.Go(func() error { + if err := c.loadAllEnterprises(); err != nil { + return fmt.Errorf("loading enterprises: %w", err) + } + return nil + }) + + g.Go(func() error { + if err := c.loadAllOrganizations(); err != nil { + return fmt.Errorf("loading organizations: %w", err) + } + return nil + }) + + g.Go(func() error { + if err := c.loadAllRepositories(); err != nil { + return fmt.Errorf("loading repositories: %w", err) + } + return nil + }) + + if err := c.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) } consumer, err := watcher.RegisterConsumer( diff --git a/workers/entity/util.go b/workers/entity/util.go index 28b9f955..4912beba 100644 --- a/workers/entity/util.go +++ b/workers/entity/util.go @@ -1,6 +1,8 @@ package entity import ( + "golang.org/x/sync/errgroup" + dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" @@ -33,3 +35,24 @@ func composeWorkerWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFil ), ) } + +func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { + if g == nil { + return nil + } + + done := make(chan error, 1) + go func() { + waitErr := g.Wait() + done <- waitErr + }() + + select { + case err := <-done: + return err + case <-c.ctx.Done(): + return c.ctx.Err() + case <-c.quit: + return nil + } +} diff --git a/workers/provider/provider.go b/workers/provider/provider.go index b1ab1220..ffc5183d 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -6,6 +6,8 @@ import ( "log/slog" "sync" + "golang.org/x/sync/errgroup" + commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" @@ -131,12 +133,24 @@ func (p *Provider) Start() error { return nil } - if err := p.loadAllScaleSets(); err != nil { - return fmt.Errorf("loading all scale sets: %w", err) - } + g, _ := errgroup.WithContext(p.ctx) - if err := p.loadAllRunners(); err != nil { - return fmt.Errorf("loading all runners: %w", err) + g.Go(func() error { + if err := p.loadAllScaleSets(); err != nil { + return fmt.Errorf("loading all scale sets: %w", err) + } + return nil + }) + + g.Go(func() error { + if err := p.loadAllRunners(); err != nil { + return fmt.Errorf("loading all runners: %w", err) + } + return nil + }) + + if err := p.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) } consumer, err := watcher.RegisterConsumer( diff --git a/workers/provider/util.go b/workers/provider/util.go index 8cd33525..ca2626c0 100644 --- a/workers/provider/util.go +++ b/workers/provider/util.go @@ -1,6 +1,8 @@ package provider import ( + "golang.org/x/sync/errgroup" + dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" ) @@ -11,3 +13,24 @@ func composeProviderWatcher() dbCommon.PayloadFilterFunc { watcher.WithEntityTypeFilter(dbCommon.ScaleSetEntityType), ) } + +func (p *Provider) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { + if g == nil { + return nil + } + + done := make(chan error, 1) + go func() { + waitErr := g.Wait() + done <- waitErr + }() + + select { + case err := <-done: + return err + case <-p.ctx.Done(): + return p.ctx.Err() + case <-p.quit: + return nil + } +} From 979c07adbec9d7ca59144f13d69752355b97d286 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 9 May 2025 21:28:29 +0000 Subject: [PATCH 055/179] Add some info about scale sets Signed-off-by: Gabriel Adrian Samfira --- README.md | 38 ++++++++++++-------- doc/scalesets.md | 93 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+), 14 deletions(-) create mode 100644 doc/scalesets.md diff --git a/README.md b/README.md index 4411834c..69f4ee6f 100644 --- a/README.md +++ b/README.md @@ -4,16 +4,17 @@ -- [About GARM](#about-garm) -- [Join us on slack](#join-us-on-slack) -- [Installing](#installing) - - [Quickstart](#quickstart) - - [Installing on Kubernetes](#installing-on-kubernetes) -- [Using GARM](#using-garm) -- [Supported providers](#supported-providers) - - [Installing external providers](#installing-external-providers) -- [Optimizing your runners](#optimizing-your-runners) -- [Write your own provider](#write-your-own-provider) +- [GitHub Actions Runner Manager GARM](#github-actions-runner-manager-garm) + - [About GARM](#about-garm) + - [Join us on slack](#join-us-on-slack) + - [Installing](#installing) + - [Quickstart](#quickstart) + - [Installing on Kubernetes](#installing-on-kubernetes) + - [Using GARM](#using-garm) + - [Supported providers](#supported-providers) + - [Installing external providers](#installing-external-providers) + - [Optimizing your runners](#optimizing-your-runners) + - [Write your own provider](#write-your-own-provider) @@ -23,19 +24,28 @@ Welcome to GARM! GARM enables you to create and automatically maintain pools of [self-hosted GitHub runners](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners), with auto-scaling that can be used inside your github workflow runs. -The goal of ```GARM``` is to be simple to set up, simple to configure and simple to use. The server itself is a single binary that can run on any GNU/Linux machine without any other requirements other than the providers you want to enable in your setup. It is intended to be easy to deploy in any environment and can create runners in virtually any system you can write a provider for. There is no complicated setup process and no extremely complex concepts to understand. Once set up, it's meant to stay out of your way. +The goal of ```GARM``` is to be simple to set up, simple to configure and simple to use. The server itself is a single binary that can run on any GNU/Linux machine without any other requirements other than the providers you want to enable in your setup. It is intended to be easy to deploy in any environment and can create runners in virtually any system you can write a provider for (if one does not alreay exist). There is no complicated setup process and no extremely complex concepts to understand. Once set up, it's meant to stay out of your way. -GARM supports creating pools in either GitHub itself or in your own deployment of [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.10/admin/overview/about-github-enterprise-server). For instructions on how to use ```GARM``` with GHE, see the [credentials](/doc/github_credentials.md) section of the documentation. +GARM supports creating pools and scale sets in either GitHub itself or in your own deployment of [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.10/admin/overview/about-github-enterprise-server). For instructions on how to use ```GARM``` with GHE, see the [credentials](/doc/github_credentials.md) section of the documentation. -Through the use of providers, `GARM` can create runners in a variety of environments using the same `GARM` instance. Whether you want to create pools of runners in your OpenStack cloud, your Azure cloud or your Kubernetes cluster, that is easily achieved by just installing the appropriate providers, configuring them in `GARM` and creating pools that use them. You can create zero-runner pools for instances with high costs (large VMs, GPU enabled instances, etc) and have them spin up on demand, or you can create large pools of eagerly created k8s backed runners that can be used for your CI/CD pipelines at a moment's notice. You can mix them up and create pools in any combination of providers or resource allocations you want. +Through the use of providers, `GARM` can create runners in a variety of environments using the same `GARM` instance. Whether you want to create runners in your OpenStack cloud, your Azure cloud or your Kubernetes cluster, that is easily achieved by installing the appropriate providers, configuring them in `GARM` and creating pools that use them. You can create zero-runner pools for instances with high costs (large VMs, GPU enabled instances, etc) and have them spin up on demand, or you can create large pools of eagerly created k8s backed runners that can be used for your CI/CD pipelines at a moment's notice. You can mix them up and create pools in any combination of providers or resource allocations you want. -Here is a brief architectural diagram of how GARM reacts to workflows triggered in GitHub (click the image to see a larger version): +GARM supports two modes of operation: + +* Pools +* Scale sets + +Here is a brief architectural diagram of how pools work and how GARM reacts to workflows triggered in GitHub (click the image to see a larger version): ![GARM architecture diagram](/doc/images/garm-light.drawio.svg?raw=true#gh-light-mode-only) ![GARM architecture diagram](/doc/images/garm-dark.drawio.svg?raw=true#gh-dark-mode-only) +**Scale sets** work differently. While pools (as they are defined in GARM) rely on webhooks to know when a job was started and GARM needs to internally make the right decission in terms of which pool should handle that runner, scale sets have a lot of the scheduling and decission making logic done in GitHub itself. + :warning: **Important note**: The README and documentation in the `main` branch are relevant to the not yet released code that is present in `main`. Following the documentation from the `main` branch for a stable release of GARM, may lead to errors. To view the documentation for the latest stable release, please switch to the appropriate tag. For information about setting up `v0.1.5`, please refer to the [v0.1.5 tag](https://github.com/cloudbase/garm/tree/v0.1.5). +:warning: **Important note**: The `main` branch holds the latest code and is not guaranteed to be stable. If you are looking for a stable release, please check the releases page. If you plan to use the `main` branch, please do so on a new instance. Do not upgrade from a stable release to `main`. + ## Join us on slack Whether you're running into issues or just want to drop by and say "hi", feel free to [join us on slack](https://communityinviter.com/apps/garm-hq/garm). diff --git a/doc/scalesets.md b/doc/scalesets.md new file mode 100644 index 00000000..2bbd1a8e --- /dev/null +++ b/doc/scalesets.md @@ -0,0 +1,93 @@ +# Scale Sets + + + +- [Scale Sets](#scale-sets) + - [Create a new scale set](#create-a-new-scale-set) + - [Scale Set vs Pool](#scale-set-vs-pool) + + + +GARM supports [scale sets](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners-with-actions-runner-controller/deploying-runner-scale-sets-with-actions-runner-controller). This new mode of operation was added by GitHub to enable more efficient scheduling of runners using their own ARC (Actions Runner Controller) project. The APIs for enabling scale sets are not yet public and the scale set functionlity itself is not terribly well documented outside the context of ARC, but it can be implemented in third party auto scalers. + +In this document we will focus on how scale sets work, how they are different than pools and how to manage them. + +We'll start with detailing how to create a scale set. + +## Create a new scale set + +Creating a scale set is identical to [creating a pool](/doc/using_garm.md#creating-a-runner-pool), but instead of adding labels to a scale set, it takes a name. We'll assume you already have a provider enabled and you have added a repo, org or enterprise to GARM. + +```bash +ubuntu@garm:~$ garm-cli repo ls ++--------------------------------------+-----------+--------------+------------+------------------+--------------------+------------------+ +| ID | OWNER | NAME | ENDPOINT | CREDENTIALS NAME | POOL BALANCER TYPE | POOL MGR RUNNING | ++--------------------------------------+-----------+--------------+------------+------------------+--------------------+------------------+ +| 84a5e82f-7ab1-427f-8ee0-4569b922296c | gsamfira | garm-testing | github.com | gabriel-samfira | roundrobin | true | ++--------------------------------------+-----------+--------------+------------+------------------+--------------------+------------------+ +``` + +List providers: + +```bash +ubuntu@garm:~$ garm-cli provider list ++--------------+---------------------------------+----------+ +| NAME | DESCRIPTION | TYPE | ++--------------+---------------------------------+----------+ +| incus | Incus external provider | external | ++--------------+---------------------------------+----------+ +| azure | azure provider | external | ++--------------+---------------------------------+----------+ +| aws_ec2 | Amazon EC2 provider | external | ++--------------+---------------------------------+----------+ +``` + +Create a new scale set: + +```bash +garm-cli scaleset add \ + --repo 84a5e82f-7ab1-427f-8ee0-4569b922296c \ + --provider-name incus \ + --image ubuntu:22.04 \ + --name garm-scale-set \ + --flavor default \ + --enabled true \ + --min-idle-runners=0 \ + --max-runners=20 ++--------------------------+-----------------------+ +| FIELD | VALUE | ++--------------------------+-----------------------+ +| ID | 8 | +| Scale Set ID | 14 | +| Scale Name | garm-scale-set | +| Provider Name | incus | +| Image | ubuntu:22.04 | +| Flavor | default | +| OS Type | linux | +| OS Architecture | amd64 | +| Max Runners | 20 | +| Min Idle Runners | 0 | +| Runner Bootstrap Timeout | 20 | +| Belongs to | gsamfira/garm-testing | +| Level | repo | +| Enabled | true | +| Runner Prefix | garm | +| Extra specs | | +| GitHub Runner Group | Default | ++--------------------------+-----------------------+ +``` + +That's it. You now have a scale set created, ready to accept jobs. + +## Scale Set vs Pool + +Scale sets are a new way of managing runners. They were introduced by GitHub to enable more efficient scheduling of runners using their own Actions Runner Controller (ARC) project. Scale sets are meant to reduce API calls, improve reliability of message deliveries and improve efficiency of runner management. While webhooks work great most of the time, under heavy load, they may not fire or they may fire while the auto scaler is offline. If webhooks are fired while GARM is down, we will never know about those jobs unless we query the current workflow runs. + +Listing workflow runs is not feisable for orgs or enterprises, as that would mean listing all repos withing an org then for each repository, listing all workflow runs. This gets worse for enterprises. Scale sets on the other hand allows GARM to subscribe to a message queue and get messages just for that scale set over HTTP long poll. + +Advantages of scale sets over pools: + +* No more need to install a webhook, reducing your security footprint. +* Scheduling is done by GitHub. GARM receives runner requests from GitHub and GARM can choose to acquire those jobs or leave them for some other scaler. +* Easier use of runner groups. While GARM supports runner groups, github currently [does not send the group name](https://github.com/orgs/community/discussions/158000) as part of webhooks in `queued` state. This prevents GARM (or any other auto scaler) to efficiently schedule runners to pools that have runner groups set. But given that in the case of scale sets, GitHub schedules the runners to the scaleset itself, we can efficiently create runners in certain runner groups. +* scale set names must be unique within a runner group \ No newline at end of file From 41700cd958f691ee5c81d4cd139f95252d3822bf Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 10 May 2025 06:14:38 +0000 Subject: [PATCH 056/179] Slight rewording Signed-off-by: Gabriel Adrian Samfira --- doc/scalesets.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/scalesets.md b/doc/scalesets.md index 2bbd1a8e..867f8f61 100644 --- a/doc/scalesets.md +++ b/doc/scalesets.md @@ -81,7 +81,7 @@ That's it. You now have a scale set created, ready to accept jobs. ## Scale Set vs Pool -Scale sets are a new way of managing runners. They were introduced by GitHub to enable more efficient scheduling of runners using their own Actions Runner Controller (ARC) project. Scale sets are meant to reduce API calls, improve reliability of message deliveries and improve efficiency of runner management. While webhooks work great most of the time, under heavy load, they may not fire or they may fire while the auto scaler is offline. If webhooks are fired while GARM is down, we will never know about those jobs unless we query the current workflow runs. +Scale sets are a new way of managing runners. They were introduced by GitHub to enable more efficient scheduling of runners. Scale sets are meant to reduce API calls, improve reliability of message deliveries to the auto scaler and improve efficiency of runner scheduling. While webhooks work great most of the time, under heavy load, they may not fire or they may fire while the auto scaler is offline, leading to lost messages. If webhooks are fired while GARM is down, we will never know about those jobs unless we query the current workflow runs. Listing workflow runs is not feisable for orgs or enterprises, as that would mean listing all repos withing an org then for each repository, listing all workflow runs. This gets worse for enterprises. Scale sets on the other hand allows GARM to subscribe to a message queue and get messages just for that scale set over HTTP long poll. From ef676488b70f2bc18067db9b0d14a64b6516e119 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 10 May 2025 20:08:51 +0000 Subject: [PATCH 057/179] Use cache for github client Signed-off-by: Gabriel Adrian Samfira --- cache/github_client.go | 47 +++++++++++ workers/entity/util.go | 18 ++++ workers/entity/worker.go | 109 ++++++++++++++++++++++++- workers/entity/worker_watcher.go | 16 ++++ workers/pools/controller.go | 3 + workers/scaleset/controller.go | 76 ++--------------- workers/scaleset/controller_watcher.go | 68 +-------------- workers/scaleset/interfaces.go | 2 +- workers/scaleset/scaleset.go | 76 ++++++++--------- workers/scaleset/scaleset_helper.go | 19 ++++- workers/scaleset/scaleset_listener.go | 20 ++++- workers/scaleset/util.go | 15 ---- 12 files changed, 277 insertions(+), 192 deletions(-) create mode 100644 cache/github_client.go create mode 100644 workers/pools/controller.go diff --git a/cache/github_client.go b/cache/github_client.go new file mode 100644 index 00000000..0126e45b --- /dev/null +++ b/cache/github_client.go @@ -0,0 +1,47 @@ +package cache + +import ( + "sync" + + "github.com/cloudbase/garm/runner/common" +) + +var ghClientCache *GithubClientCache + +type GithubClientCache struct { + mux sync.Mutex + + cache map[string]common.GithubClient +} + +func init() { + clientCache := &GithubClientCache{ + cache: make(map[string]common.GithubClient), + } + ghClientCache = clientCache +} + +func (g *GithubClientCache) SetClient(entityID string, client common.GithubClient) { + g.mux.Lock() + defer g.mux.Unlock() + + g.cache[entityID] = client +} + +func (g *GithubClientCache) GetClient(entityID string) (common.GithubClient, bool) { + g.mux.Lock() + defer g.mux.Unlock() + + if client, ok := g.cache[entityID]; ok { + return client, true + } + return nil, false +} + +func SetGithubClient(entityID string, client common.GithubClient) { + ghClientCache.SetClient(entityID, client) +} + +func GetGithubClient(entityID string) (common.GithubClient, bool) { + return ghClientCache.GetClient(entityID) +} diff --git a/workers/entity/util.go b/workers/entity/util.go index 4912beba..95c9b2cc 100644 --- a/workers/entity/util.go +++ b/workers/entity/util.go @@ -1,6 +1,8 @@ package entity import ( + "strings" + "golang.org/x/sync/errgroup" dbCommon "github.com/cloudbase/garm/database/common" @@ -8,6 +10,13 @@ import ( "github.com/cloudbase/garm/params" ) +const ( + // These are duplicated until we decide if we move the pool manager to the new + // worker flow. + poolIDLabelprefix = "runner-pool-id:" + controllerLabelPrefix = "runner-controller-id:" +) + func composeControllerWatcherFilters() dbCommon.PayloadFilterFunc { return watcher.WithAll( watcher.WithAny( @@ -56,3 +65,12 @@ func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) erro return nil } } + +func poolIDFromLabels(runner params.RunnerReference) string { + for _, lbl := range runner.Labels { + if strings.HasPrefix(lbl.Name, poolIDLabelprefix) { + return lbl.Name[len(poolIDLabelprefix):] + } + } + return "" +} diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 8aebb747..fcfd2a60 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -5,13 +5,18 @@ import ( "fmt" "log/slog" "sync" + "time" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" + "github.com/cloudbase/garm/util/github" + "github.com/cloudbase/garm/util/github/scalesets" "github.com/cloudbase/garm/workers/scaleset" + "golang.org/x/sync/errgroup" ) func NewWorker(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Worker, error) { @@ -36,6 +41,7 @@ type Worker struct { consumer dbCommon.Consumer store dbCommon.Store + ghCli common.GithubClient Entity params.GithubEntity providers map[string]common.Provider @@ -71,6 +77,13 @@ func (w *Worker) Start() (err error) { w.mux.Lock() defer w.mux.Unlock() + ghCli, err := github.Client(w.ctx, w.Entity) + if err != nil { + return fmt.Errorf("creating github client: %w", err) + } + w.ghCli = ghCli + cache.SetGithubClient(w.Entity.ID, ghCli) + scaleSetController, err := scaleset.NewController(w.ctx, w.store, w.Entity, w.providers) if err != nil { return fmt.Errorf("creating scale set controller: %w", err) @@ -100,16 +113,110 @@ func (w *Worker) Start() (err error) { w.quit = make(chan struct{}) go w.loop() + go w.consolidateRunnerLoop() return nil } +// consolidateRunnerState will list all runners on GitHub for this entity, sort by +// pool or scale set and pass those runners to the appropriate controller (pools or scale sets). +// The controller will then pass along to their respective workers the list of runners +// they should be responsible for. The workers will then cross check the current state +// from github with their local state and reconcile any differences. This cleans up +// any runners that have been removed out of band in either the provider or github. +func (w *Worker) consolidateRunnerState() error { + scaleSetCli, err := scalesets.NewClient(w.ghCli) + if err != nil { + return fmt.Errorf("creating scaleset client: %w", err) + } + // Client is scoped to the current entity. Only runners in a repo/org/enterprise + // will be listed. + runners, err := scaleSetCli.ListAllRunners(w.ctx) + if err != nil { + return fmt.Errorf("listing runners: %w", err) + } + + byPoolID := make(map[string][]params.RunnerReference) + byScaleSetID := make(map[int][]params.RunnerReference) + for _, runner := range runners.RunnerReferences { + if runner.RunnerScaleSetID != 0 { + byScaleSetID[runner.RunnerScaleSetID] = append(byScaleSetID[runner.RunnerScaleSetID], runner) + } else { + poolID := poolIDFromLabels(runner) + if poolID == "" { + continue + } + byPoolID[poolID] = append(byPoolID[poolID], runner) + } + } + + g, ctx := errgroup.WithContext(w.ctx) + g.Go(func() error { + slog.DebugContext(ctx, "consolidating scale set runners", "entity", w.Entity.String(), "runners", runners) + if err := w.scaleSetController.ConsolidateRunnerState(byScaleSetID); err != nil { + return fmt.Errorf("consolidating runners for scale set: %w", err) + } + return nil + }) + + if err := w.waitForErrorGroupOrContextCancelled(g); err != nil { + return fmt.Errorf("waiting for error group: %w", err) + } + return nil +} + +func (w *Worker) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { + if g == nil { + return nil + } + + done := make(chan error, 1) + go func() { + waitErr := g.Wait() + done <- waitErr + }() + + select { + case err := <-done: + return err + case <-w.ctx.Done(): + return w.ctx.Err() + case <-w.quit: + return nil + } +} + +func (w *Worker) consolidateRunnerLoop() { + ticker := time.NewTicker(common.PoolReapTimeoutInterval) + defer ticker.Stop() + + for { + select { + case _, ok := <-ticker.C: + if !ok { + slog.InfoContext(w.ctx, "consolidate ticker closed") + return + } + if err := w.consolidateRunnerState(); err != nil { + if err := w.store.AddEntityEvent(w.ctx, w.Entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to consolidate runner state: %q", err.Error()), 30); err != nil { + slog.With(slog.Any("error", err)).Error("failed to add entity event") + } + slog.With(slog.Any("error", err)).Error("failed to consolidate runner state") + } + case <-w.ctx.Done(): + return + case <-w.quit: + return + } + } +} + func (w *Worker) loop() { defer w.Stop() for { select { case payload := <-w.consumer.Watch(): slog.InfoContext(w.ctx, "received payload") - go w.handleWorkerWatcherEvent(payload) + w.handleWorkerWatcherEvent(payload) case <-w.ctx.Done(): return case <-w.quit: diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go index 4ce83ddf..be0b6b3c 100644 --- a/workers/entity/worker_watcher.go +++ b/workers/entity/worker_watcher.go @@ -3,8 +3,10 @@ package entity import ( "log/slog" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" + "github.com/cloudbase/garm/util/github" ) func (w *Worker) handleWorkerWatcherEvent(event dbCommon.ChangePayload) { @@ -46,6 +48,13 @@ func (w *Worker) handleEntityEventPayload(event dbCommon.ChangePayload) { // credentials were swapped on the entity. We need to recompose the watcher // filters. w.consumer.SetFilters(composeWorkerWatcherFilters(entity)) + ghCli, err := github.Client(w.ctx, entity) + if err != nil { + slog.ErrorContext(w.ctx, "creating github client", "entity_id", entity.ID, "error", err) + return + } + w.ghCli = ghCli + cache.SetGithubClient(entity.ID, ghCli) } w.Entity = entity default: @@ -72,6 +81,13 @@ func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayloa return } w.Entity.Credentials = credentials + ghCli, err := github.Client(w.ctx, w.Entity) + if err != nil { + slog.ErrorContext(w.ctx, "creating github client", "entity_id", w.Entity.ID, "error", err) + return + } + w.ghCli = ghCli + cache.SetGithubClient(w.Entity.ID, ghCli) default: slog.ErrorContext(w.ctx, "invalid operation type", "operation_type", event.Operation) } diff --git a/workers/pools/controller.go b/workers/pools/controller.go new file mode 100644 index 00000000..458766a9 --- /dev/null +++ b/workers/pools/controller.go @@ -0,0 +1,3 @@ +package pools + + diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 3b4287c2..6e3170a0 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -5,7 +5,6 @@ import ( "fmt" "log/slog" "sync" - "time" "golang.org/x/sync/errgroup" @@ -14,15 +13,6 @@ import ( "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" - "github.com/cloudbase/garm/util/github" - "github.com/cloudbase/garm/util/github/scalesets" -) - -const ( - // These are duplicated until we decide if we move the pool manager to the new - // worker flow. - poolIDLabelprefix = "runner-pool-id:" - controllerLabelPrefix = "runner-controller-id:" ) func NewController(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Controller, error) { @@ -73,14 +63,12 @@ type Controller struct { store dbCommon.Store providers map[string]common.Provider - ghCli common.GithubClient - mux sync.Mutex running bool quit chan struct{} } -func (c *Controller) loadAllScaleSets(cli common.GithubClient) error { +func (c *Controller) loadAllScaleSets() error { scaleSets, err := c.store.ListEntityScaleSets(c.ctx, c.Entity) if err != nil { return fmt.Errorf("listing scale sets: %w", err) @@ -88,7 +76,7 @@ func (c *Controller) loadAllScaleSets(cli common.GithubClient) error { for _, sSet := range scaleSets { slog.DebugContext(c.ctx, "loading scale set", "scale_set", sSet.ID) - if err := c.handleScaleSetCreateOperation(sSet, cli); err != nil { + if err := c.handleScaleSetCreateOperation(sSet); err != nil { slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") continue } @@ -105,13 +93,8 @@ func (c *Controller) Start() (err error) { } c.mux.Unlock() - ghCli, err := github.Client(c.ctx, c.Entity) - if err != nil { - return fmt.Errorf("creating github client: %w", err) - } - slog.DebugContext(c.ctx, "loaging scale sets", "entity", c.Entity.String()) - if err := c.loadAllScaleSets(ghCli); err != nil { + if err := c.loadAllScaleSets(); err != nil { return fmt.Errorf("loading all scale sets: %w", err) } @@ -124,7 +107,6 @@ func (c *Controller) Start() (err error) { } c.mux.Lock() - c.ghCli = ghCli c.consumer = consumer c.running = true c.quit = make(chan struct{}) @@ -159,39 +141,11 @@ func (c *Controller) Stop() error { return nil } -// consolidateRunnerState will list all runners on GitHub for this entity, sort by -// pool or scale set and pass those runners to the appropriate worker. The worker will -// then have the responsibility to cross check the runners from github with what it -// knows should be true from the database. Any inconsistency needs to be handled. -// If we have an offline runner in github but no database entry for it, we remove the -// runner from github. If we have a runner that is active in the provider but does not -// exist in github, we remove it from the provider and the database. -func (c *Controller) consolidateRunnerState() error { - scaleSetCli, err := scalesets.NewClient(c.ghCli) - if err != nil { - return fmt.Errorf("creating scaleset client: %w", err) - } - // Client is scoped to the current entity. Only runners in a repo/org/enterprise - // will be listed. - runners, err := scaleSetCli.ListAllRunners(c.ctx) - if err != nil { - return fmt.Errorf("listing runners: %w", err) - } - - byPoolID := make(map[string][]params.RunnerReference) - byScaleSetID := make(map[int][]params.RunnerReference) - for _, runner := range runners.RunnerReferences { - if runner.RunnerScaleSetID != 0 { - byScaleSetID[runner.RunnerScaleSetID] = append(byScaleSetID[runner.RunnerScaleSetID], runner) - } else { - poolID := poolIDFromLabels(runner) - if poolID == "" { - continue - } - byPoolID[poolID] = append(byPoolID[poolID], runner) - } - } - +// ConsolidateRunnerState will send a list of existing github runners to each scale set worker. +// The scale set worker will then need to cross check the existing runners in Github with the sate +// in the database. Any inconsistencies will b reconciliated. This cleans up any manually removed +// runners in either github or the providers. +func (c *Controller) ConsolidateRunnerState(byScaleSetID map[int][]params.RunnerReference) error { g, ctx := errgroup.WithContext(c.ctx) for _, scaleSet := range c.ScaleSets { runners := byScaleSetID[scaleSet.scaleSet.ScaleSetID] @@ -233,9 +187,6 @@ func (c *Controller) waitForErrorGroupOrContextCancelled(g *errgroup.Group) erro func (c *Controller) loop() { defer c.Stop() - consolidateTicker := time.NewTicker(common.PoolReapTimeoutInterval) - defer consolidateTicker.Stop() - for { select { case payload, ok := <-c.consumer.Watch(): @@ -247,17 +198,6 @@ func (c *Controller) loop() { c.handleWatcherEvent(payload) case <-c.ctx.Done(): return - case _, ok := <-consolidateTicker.C: - if !ok { - slog.InfoContext(c.ctx, "consolidate ticker closed") - return - } - if err := c.consolidateRunnerState(); err != nil { - if err := c.store.AddEntityEvent(c.ctx, c.Entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to consolidate runner state: %q", err.Error()), 30); err != nil { - slog.With(slog.Any("error", err)).Error("failed to add entity event") - } - slog.With(slog.Any("error", err)).Error("failed to consolidate runner state") - } case <-c.quit: return } diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 6702e0f0..ec4771fc 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -6,8 +6,6 @@ import ( dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" - "github.com/cloudbase/garm/util/github" ) func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { @@ -19,9 +17,6 @@ func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { case entityType: slog.DebugContext(c.ctx, "got entity payload event") c.handleEntityEvent(event) - case dbCommon.GithubCredentialsEntityType: - slog.DebugContext(c.ctx, "got github credentials payload event") - c.handleCredentialsEvent(event) default: slog.ErrorContext(c.ctx, "invalid entity type", "entity_type", event.EntityType) return @@ -38,7 +33,7 @@ func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { switch event.Operation { case dbCommon.CreateOperation: slog.DebugContext(c.ctx, "got create operation for scale set", "scale_set_id", scaleSet.ID, "scale_set_name", scaleSet.Name) - if err := c.handleScaleSetCreateOperation(scaleSet, c.ghCli); err != nil { + if err := c.handleScaleSetCreateOperation(scaleSet); err != nil { slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to handle scale set create operation") } case dbCommon.UpdateOperation: @@ -57,7 +52,7 @@ func (c *Controller) handleScaleSet(event dbCommon.ChangePayload) { } } -func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli common.GithubClient) error { +func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet) error { c.mux.Lock() defer c.mux.Unlock() @@ -74,7 +69,7 @@ func (c *Controller) handleScaleSetCreateOperation(sSet params.ScaleSet, ghCli c return fmt.Errorf("provider %s not found for scale set %s", sSet.ProviderName, sSet.Name) } - worker, err := NewWorker(c.ctx, c.store, sSet, provider, ghCli) + worker, err := NewWorker(c.ctx, c.store, sSet, provider) if err != nil { return fmt.Errorf("creating scale set worker: %w", err) } @@ -120,7 +115,7 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { // Some error may have occurred when the scale set was first created, so we // attempt to create it after the user updated the scale set, hopefully // fixing the reason for the failure. - return c.handleScaleSetCreateOperation(sSet, c.ghCli) + return c.handleScaleSetCreateOperation(sSet) } set.scaleSet = sSet c.ScaleSets[sSet.ID] = set @@ -128,35 +123,6 @@ func (c *Controller) handleScaleSetUpdateOperation(sSet params.ScaleSet) error { return nil } -func (c *Controller) handleCredentialsEvent(event dbCommon.ChangePayload) { - credentials, ok := event.Payload.(params.GithubCredentials) - if !ok { - slog.ErrorContext(c.ctx, "invalid credentials payload for entity type", "entity_type", event.EntityType, "payload", event) - return - } - - switch event.Operation { - case dbCommon.UpdateOperation: - slog.DebugContext(c.ctx, "got update operation") - c.mux.Lock() - defer c.mux.Unlock() - - if c.Entity.Credentials.ID != credentials.ID { - // stale update event. - return - } - c.Entity.Credentials = credentials - - if err := c.updateAndBroadcastCredentials(c.Entity); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to update credentials") - return - } - default: - slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) - return - } -} - func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { var entityGetter params.EntityGetter var ok bool @@ -184,35 +150,9 @@ func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { slog.DebugContext(c.ctx, "got update operation") c.mux.Lock() defer c.mux.Unlock() - - if c.Entity.Credentials.ID != entity.Credentials.ID { - // credentials were swapped on the entity. We need to recompose the watcher - // filters. - c.consumer.SetFilters(composeControllerWatcherFilters(entity)) - if err := c.updateAndBroadcastCredentials(c.Entity); err != nil { - slog.With(slog.Any("error", err)).ErrorContext(c.ctx, "failed to update credentials") - } - } c.Entity = entity default: slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) return } } - -func (c *Controller) updateAndBroadcastCredentials(entity params.GithubEntity) error { - ghCli, err := github.Client(c.ctx, entity) - if err != nil { - return fmt.Errorf("creating github client: %w", err) - } - - c.ghCli = ghCli - - for _, scaleSet := range c.ScaleSets { - if err := scaleSet.worker.SetGithubClient(ghCli); err != nil { - slog.ErrorContext(c.ctx, "setting github client on worker", "error", err) - continue - } - } - return nil -} diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go index ee089c25..ca529ce1 100644 --- a/workers/scaleset/interfaces.go +++ b/workers/scaleset/interfaces.go @@ -6,8 +6,8 @@ import ( ) type scaleSetHelper interface { - ScaleSetCLI() *scalesets.ScaleSetClient GetScaleSet() params.ScaleSet + GetScaleSetClient() (*scalesets.ScaleSetClient, error) SetLastMessageID(id int64) error SetDesiredRunnerCount(count int) error Owner() string diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 73d08c98..097a8680 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -17,19 +17,14 @@ import ( "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" - "github.com/cloudbase/garm/util/github/scalesets" ) -func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleSet, provider common.Provider, ghCli common.GithubClient) (*Worker, error) { +func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleSet, provider common.Provider) (*Worker, error) { consumerID := fmt.Sprintf("scaleset-worker-%s-%d", scaleSet.Name, scaleSet.ID) controllerInfo, err := store.ControllerInfo() if err != nil { return nil, fmt.Errorf("getting controller info: %w", err) } - scaleSetCli, err := scalesets.NewClient(ghCli) - if err != nil { - return nil, fmt.Errorf("creating scale set client: %w", err) - } return &Worker{ ctx: ctx, controllerInfo: controllerInfo, @@ -37,8 +32,6 @@ func NewWorker(ctx context.Context, store dbCommon.Store, scaleSet params.ScaleS store: store, provider: provider, scaleSet: scaleSet, - ghCli: ghCli, - scaleSetCli: scaleSetCli, runners: make(map[string]params.Instance), }, nil } @@ -53,9 +46,7 @@ type Worker struct { scaleSet params.ScaleSet runners map[string]params.Instance - ghCli common.GithubClient - scaleSetCli *scalesets.ScaleSetClient - consumer dbCommon.Consumer + consumer dbCommon.Consumer listener *scaleSetListener @@ -110,7 +101,12 @@ func (w *Worker) Start() (err error) { instanceState := commonParams.InstancePendingDelete locking.Lock(instance.Name, w.consumerID) if instance.AgentID != 0 { - if err := w.scaleSetCli.RemoveRunner(w.ctx, instance.AgentID); err != nil { + scaleSetCli, err := w.GetScaleSetClient() + if err != nil { + slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) + return fmt.Errorf("getting scale set client: %w", err) + } + if err := scaleSetCli.RemoveRunner(w.ctx, instance.AgentID); err != nil { // scale sets use JIT runners. This means that we create the runner in github // before we create the actual instance that will use the credentials. We need // to remove the runner from github if it exists. @@ -128,7 +124,7 @@ func (w *Worker) Start() (err error) { } // The runner may have come up, registered and is currently running a // job, in which case, github will not allow us to remove it. - runnerInstance, err := w.scaleSetCli.GetRunner(w.ctx, instance.AgentID) + runnerInstance, err := scaleSetCli.GetRunner(w.ctx, instance.AgentID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { // We could not get info about the runner and it wasn't not found @@ -254,7 +250,11 @@ func (w *Worker) setRunnerDBStatus(runner string, status commonParams.InstanceSt } func (w *Worker) removeRunnerFromGithubAndSetPendingDelete(runnerName string, agentID int64) error { - if err := w.scaleSetCli.RemoveRunner(w.ctx, agentID); err != nil { + scaleSetCli, err := w.GetScaleSetClient() + if err != nil { + return fmt.Errorf("getting scale set client: %w", err) + } + if err := scaleSetCli.RemoveRunner(w.ctx, agentID); err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { return fmt.Errorf("removing runner %s: %w", runnerName, err) } @@ -321,6 +321,10 @@ func (w *Worker) consolidateRunnerState(runners []params.RunnerReference) error ghRunnersByName[runner.Name] = runner } + scaleSetCli, err := w.GetScaleSetClient() + if err != nil { + return fmt.Errorf("getting scale set client: %w", err) + } dbRunnersByName := w.runnerByName() // Cross check what exists in github with what we have in the database. for name, runner := range ghRunnersByName { @@ -329,7 +333,7 @@ func (w *Worker) consolidateRunnerState(runners []params.RunnerReference) error // runner appears to be active. Is it not managed by GARM? if status != params.RunnerIdle && status != params.RunnerActive { slog.InfoContext(w.ctx, "runner does not exist in GARM; removing from github", "runner_name", name) - if err := w.scaleSetCli.RemoveRunner(w.ctx, runner.ID); err != nil { + if err := scaleSetCli.RemoveRunner(w.ctx, runner.ID); err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { continue } @@ -466,23 +470,6 @@ func (w *Worker) consolidateRunnerState(runners []params.RunnerReference) error return nil } -func (w *Worker) SetGithubClient(client common.GithubClient) error { - w.mux.Lock() - defer w.mux.Unlock() - - if err := w.listener.Stop(); err != nil { - slog.ErrorContext(w.ctx, "error stopping listener", "error", err) - } - - w.ghCli = client - scaleSetCli, err := scalesets.NewClient(client) - if err != nil { - return fmt.Errorf("error creating scale set client: %w", err) - } - w.scaleSetCli = scaleSetCli - return nil -} - func (w *Worker) pseudoPoolID() (string, error) { // This is temporary. We need to extend providers to know about scale sets. entity, err := w.scaleSet.GetEntity() @@ -563,8 +550,13 @@ func (w *Worker) handleInstanceEntityEvent(event dbCommon.ChangePayload) { w.mux.Unlock() return } + scaleSetCli, err := w.GetScaleSetClient() + if err != nil { + slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) + return + } if oldInstance.RunnerStatus != instance.RunnerStatus && instance.RunnerStatus == params.RunnerIdle { - serviceRuner, err := w.scaleSetCli.GetRunner(w.ctx, instance.AgentID) + serviceRuner, err := scaleSetCli.GetRunner(w.ctx, instance.AgentID) if err != nil { slog.ErrorContext(w.ctx, "error getting runner details", "error", err) w.mux.Unlock() @@ -725,9 +717,14 @@ func (w *Worker) handleScaleUp(target, current uint) { return } + scaleSetCli, err := w.GetScaleSetClient() + if err != nil { + slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) + return + } for i := current; i < target; i++ { newRunnerName := fmt.Sprintf("%s-%s", w.scaleSet.GetRunnerPrefix(), util.NewID()) - jitConfig, err := w.scaleSetCli.GenerateJitRunnerConfig(w.ctx, newRunnerName, w.scaleSet.ScaleSetID) + jitConfig, err := scaleSetCli.GenerateJitRunnerConfig(w.ctx, newRunnerName, w.scaleSet.ScaleSetID) if err != nil { slog.ErrorContext(w.ctx, "error generating jit config", "error", err) continue @@ -755,14 +752,14 @@ func (w *Worker) handleScaleUp(target, current uint) { dbInstance, err := w.store.CreateScaleSetInstance(w.ctx, w.scaleSet.ID, runnerParams) if err != nil { slog.ErrorContext(w.ctx, "error creating instance", "error", err) - if err := w.scaleSetCli.RemoveRunner(w.ctx, jitConfig.Runner.ID); err != nil { + if err := scaleSetCli.RemoveRunner(w.ctx, jitConfig.Runner.ID); err != nil { slog.ErrorContext(w.ctx, "error deleting runner", "error", err) } continue } w.runners[dbInstance.ID] = dbInstance - _, err = w.scaleSetCli.GetRunner(w.ctx, jitConfig.Runner.ID) + _, err = scaleSetCli.GetRunner(w.ctx, jitConfig.Runner.ID) if err != nil { slog.ErrorContext(w.ctx, "error getting runner details", "error", err) continue @@ -854,8 +851,13 @@ func (w *Worker) handleScaleDown(target, current uint) { continue } + scaleSetCli, err := w.GetScaleSetClient() + if err != nil { + slog.ErrorContext(w.ctx, "error getting scale set client", "error", err) + return + } slog.DebugContext(w.ctx, "removing runner", "runner_name", runner.Name) - if err := w.scaleSetCli.RemoveRunner(w.ctx, runner.AgentID); err != nil { + if err := scaleSetCli.RemoveRunner(w.ctx, runner.AgentID); err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { slog.ErrorContext(w.ctx, "error removing runner", "runner_name", runner.Name, "error", err) locking.Unlock(runner.Name, false) diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index c3302f75..26f845ff 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -7,13 +7,28 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" + "github.com/cloudbase/garm/cache" "github.com/cloudbase/garm/locking" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/util/github/scalesets" ) -func (w *Worker) ScaleSetCLI() *scalesets.ScaleSetClient { - return w.scaleSetCli +func (w *Worker) GetScaleSetClient() (*scalesets.ScaleSetClient, error) { + scaleSetEntity, err := w.scaleSet.GetEntity() + if err != nil { + return nil, fmt.Errorf("getting entity: %w", err) + } + + ghCli, ok := cache.GetGithubClient(scaleSetEntity.ID) + if !ok { + return nil, fmt.Errorf("getting github client: %w", err) + } + scaleSetClient, err := scalesets.NewClient(ghCli) + if err != nil { + return nil, fmt.Errorf("creating scale set client: %w", err) + } + + return scaleSetClient, nil } func (w *Worker) GetScaleSet() params.ScaleSet { diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 7a521e46..7e0ec869 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -48,8 +48,12 @@ func (l *scaleSetListener) Start() error { l.listenerCtx, l.cancelFunc = context.WithCancel(context.Background()) scaleSet := l.scaleSetHelper.GetScaleSet() + scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() + if err != nil { + return fmt.Errorf("getting scale set client: %w", err) + } slog.DebugContext(l.ctx, "creating new message session", "scale_set", scaleSet.ScaleSetID) - session, err := l.scaleSetHelper.ScaleSetCLI().CreateMessageSession( + session, err := scaleSetClient.CreateMessageSession( l.listenerCtx, scaleSet.ScaleSetID, l.scaleSetHelper.Owner(), ) @@ -72,13 +76,16 @@ func (l *scaleSetListener) Stop() error { if !l.running { return nil } - + scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() + if err != nil { + return fmt.Errorf("getting scale set client: %w", err) + } if l.messageSession != nil { slog.DebugContext(l.ctx, "closing message session", "scale_set", l.scaleSetHelper.GetScaleSet().ScaleSetID) if err := l.messageSession.Close(); err != nil { slog.ErrorContext(l.ctx, "closing message session", "error", err) } - if err := l.scaleSetHelper.ScaleSetCLI().DeleteMessageSession(context.Background(), l.messageSession); err != nil { + if err := scaleSetClient.DeleteMessageSession(context.Background(), l.messageSession); err != nil { slog.ErrorContext(l.ctx, "error deleting message session", "error", err) } } @@ -145,12 +152,17 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage } } + scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() + if err != nil { + slog.ErrorContext(l.ctx, "getting scale set client", "error", err) + return + } if len(availableJobs) > 0 { jobIDs := make([]int64, len(availableJobs)) for idx, job := range availableJobs { jobIDs[idx] = job.RunnerRequestID } - idsAcquired, err := l.scaleSetHelper.ScaleSetCLI().AcquireJobs( + idsAcquired, err := scaleSetClient.AcquireJobs( l.listenerCtx, l.scaleSetHelper.GetScaleSet().ScaleSetID, l.messageSession.MessageQueueAccessToken(), jobIDs) if err != nil { diff --git a/workers/scaleset/util.go b/workers/scaleset/util.go index aa3156c7..02d33b69 100644 --- a/workers/scaleset/util.go +++ b/workers/scaleset/util.go @@ -1,8 +1,6 @@ package scaleset import ( - "strings" - dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" @@ -22,18 +20,5 @@ func composeControllerWatcherFilters(entity params.GithubEntity) dbCommon.Payloa watcher.WithEntityFilter(entity), watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), ), - watcher.WithAll( - watcher.WithGithubCredentialsFilter(entity.Credentials), - watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), - ), ) } - -func poolIDFromLabels(runner params.RunnerReference) string { - for _, lbl := range runner.Labels { - if strings.HasPrefix(lbl.Name, poolIDLabelprefix) { - return lbl.Name[len(poolIDLabelprefix):] - } - } - return "" -} From 4890eb47325becea0bd2d8f3a88b09ac82f5cf0f Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 12 May 2025 17:32:37 +0000 Subject: [PATCH 058/179] Add EndpointType Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/controllers.go | 4 ++++ database/sql/github.go | 1 + database/sql/models.go | 2 ++ database/sql/sql.go | 11 ++++++++++ params/params.go | 8 +++++++ params/requests.go | 33 ++++++++++++++++++---------- 6 files changed, 48 insertions(+), 11 deletions(-) diff --git a/apiserver/controllers/controllers.go b/apiserver/controllers/controllers.go index d8750a50..3e6413e0 100644 --- a/apiserver/controllers/controllers.go +++ b/apiserver/controllers/controllers.go @@ -103,6 +103,7 @@ func (a *APIController) handleWorkflowJobEvent(ctx context.Context, w http.Respo handleError(ctx, w, gErrors.NewBadRequestError("invalid post body: %s", err)) return } + slog.Debug("received workflow job event", "body", string(body)) signature := r.Header.Get("X-Hub-Signature-256") hookType := r.Header.Get("X-Github-Hook-Installation-Target-Type") @@ -154,6 +155,9 @@ func (a *APIController) WebhookHandler(w http.ResponseWriter, r *http.Request) { } headers := r.Header.Clone() + for k, v := range headers { + slog.Debug("header", "key", k, "value", v) + } event := runnerParams.Event(headers.Get("X-Github-Event")) switch event { diff --git a/database/sql/github.go b/database/sql/github.go index 22e357bd..2fb46d65 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -97,6 +97,7 @@ func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.Githu UploadBaseURL: ep.UploadBaseURL, CACertBundle: ep.CACertBundle, CreatedAt: ep.CreatedAt, + EndpointType: ep.EndpointType, UpdatedAt: ep.UpdatedAt, }, nil } diff --git a/database/sql/models.go b/database/sql/models.go index d6fbb6e9..2a04db0c 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -381,6 +381,8 @@ type GithubEndpoint struct { UpdatedAt time.Time DeletedAt gorm.DeletedAt `gorm:"index"` + EndpointType params.EndpointType + Description string `gorm:"type:text"` APIBaseURL string `gorm:"type:text collate nocase"` UploadBaseURL string `gorm:"type:text collate nocase"` diff --git a/database/sql/sql.go b/database/sql/sql.go index 76495732..86729ad6 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -409,6 +409,17 @@ func (s *sqlDatabase) migrateDB() error { } } + if s.conn.Migrator().HasTable(&GithubEndpoint{}) { + if !s.conn.Migrator().HasColumn(&GithubEndpoint{}, "endpoint_type") { + if err := s.conn.Migrator().AutoMigrate(&GithubEndpoint{}); err != nil { + return errors.Wrap(err, "migrating github endpoints") + } + if err := s.conn.Exec("update github_endpoints set endpoint_type = 'github' where endpoint_type is null").Error; err != nil { + return errors.Wrap(err, "updating github endpoints") + } + } + } + var needsCredentialMigration bool if !s.conn.Migrator().HasTable(&GithubCredentials{}) || !s.conn.Migrator().HasTable(&GithubEndpoint{}) { needsCredentialMigration = true diff --git a/params/params.go b/params/params.go index 7636102f..fdf3b836 100644 --- a/params/params.go +++ b/params/params.go @@ -44,6 +44,7 @@ type ( RunnerStatus string WebhookEndpointType string GithubAuthType string + EndpointType string PoolBalancerType string ScaleSetState string ScaleSetMessageType string @@ -76,6 +77,11 @@ const ( PoolBalancerTypeNone PoolBalancerType = "" ) +const ( + GithubEndpointType EndpointType = "github" + GiteaEndpointType EndpointType = "gitea" +) + const ( // LXDProvider represents the LXD provider. LXDProvider ProviderType = "lxd" @@ -1138,5 +1144,7 @@ type GithubEndpoint struct { CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` + EndpointType EndpointType `json:"endpoint_type,omitempty"` + Credentials []GithubCredentials `json:"credentials,omitempty"` } diff --git a/params/requests.go b/params/requests.go index 1166418f..12d9b838 100644 --- a/params/requests.go +++ b/params/requests.go @@ -281,6 +281,7 @@ type CreateGithubEndpointParams struct { APIBaseURL string `json:"api_base_url,omitempty"` UploadBaseURL string `json:"upload_base_url,omitempty"` BaseURL string `json:"base_url,omitempty"` + EndpointType string `json:"endpoint_type,omitempty"` CACertBundle []byte `json:"ca_cert_bundle,omitempty"` } @@ -289,6 +290,14 @@ func (c CreateGithubEndpointParams) Validate() error { return runnerErrors.NewBadRequestError("missing api_base_url") } + if c.EndpointType != "" { + switch c.EndpointType { + case string(GithubEndpointType), string(GiteaEndpointType): + default: + return runnerErrors.NewBadRequestError("invalid endpoint_type: %s", c.EndpointType) + } + } + url, err := url.Parse(c.APIBaseURL) if err != nil || url.Scheme == "" || url.Host == "" { return runnerErrors.NewBadRequestError("invalid api_base_url") @@ -299,19 +308,21 @@ func (c CreateGithubEndpointParams) Validate() error { return runnerErrors.NewBadRequestError("invalid api_base_url") } - if c.UploadBaseURL == "" { - return runnerErrors.NewBadRequestError("missing upload_base_url") - } + if c.EndpointType == string(GithubEndpointType) { + if c.UploadBaseURL == "" { + return runnerErrors.NewBadRequestError("missing upload_base_url") + } - url, err = url.Parse(c.UploadBaseURL) - if err != nil || url.Scheme == "" || url.Host == "" { - return runnerErrors.NewBadRequestError("invalid upload_base_url") - } + url, err = url.Parse(c.UploadBaseURL) + if err != nil || url.Scheme == "" || url.Host == "" { + return runnerErrors.NewBadRequestError("invalid upload_base_url") + } - switch url.Scheme { - case httpsScheme, httpScheme: - default: - return runnerErrors.NewBadRequestError("invalid api_base_url") + switch url.Scheme { + case httpsScheme, httpScheme: + default: + return runnerErrors.NewBadRequestError("invalid api_base_url") + } } if c.BaseURL == "" { From 40e6581a759d5174d6f3054245fd28d5b0582026 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 12 May 2025 21:47:13 +0000 Subject: [PATCH 059/179] Rename GitHub specific types This change renames a lot of variables, types and functions to be more generic. The goal is to allow GARM to add more forges in the future. Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/enterprises.go | 4 +- apiserver/controllers/organizations.go | 4 +- apiserver/controllers/repositories.go | 4 +- auth/instance_middleware.go | 4 +- auth/interfaces.go | 2 +- cache/cache_test.go | 69 ++-- cache/credentials_cache.go | 2 +- cache/entity_cache.go | 60 ++-- cache/tools_cache.go | 6 +- .../create_github_endpoint_responses.go | 4 +- .../get_github_endpoint_responses.go | 4 +- .../list_github_endpoints_responses.go | 4 +- .../update_github_endpoint_responses.go | 4 +- cmd/garm-cli/cmd/github_credentials.go | 6 +- cmd/garm-cli/cmd/github_endpoints.go | 4 +- database/common/mocks/Store.go | 144 ++++---- database/common/store.go | 34 +- database/sql/enterprise_test.go | 26 +- database/sql/github.go | 38 +- database/sql/github_test.go | 30 +- database/sql/jobs.go | 8 +- database/sql/models.go | 6 +- database/sql/organizations_test.go | 26 +- database/sql/pools.go | 50 +-- database/sql/repositories_test.go | 26 +- database/sql/scalesets.go | 28 +- database/sql/scalesets_test.go | 8 +- database/sql/sql.go | 8 +- database/sql/util.go | 30 +- database/watcher/filters.go | 46 +-- database/watcher/watcher_store_test.go | 4 +- internal/testing/testing.go | 6 +- params/interfaces.go | 6 +- params/params.go | 329 ++++++++++++------ params/requests.go | 18 +- runner/common/mocks/GithubClient.go | 8 +- runner/common/mocks/GithubEntityOperations.go | 8 +- runner/common/util.go | 2 +- runner/enterprises.go | 24 +- runner/enterprises_test.go | 40 +-- runner/github_endpoints.go | 24 +- runner/metadata.go | 8 +- runner/organizations.go | 24 +- runner/organizations_test.go | 34 +- runner/pool/pool.go | 22 +- runner/pool/stub_client.go | 4 +- runner/pool/util.go | 4 +- runner/pool/watcher.go | 18 +- runner/pools_test.go | 6 +- runner/repositories.go | 24 +- runner/repositories_test.go | 38 +- runner/runner.go | 10 +- runner/scalesets.go | 12 +- test/integration/client_utils.go | 8 +- test/integration/credentials_test.go | 16 +- test/integration/endpoints.go | 2 +- test/integration/endpoints_test.go | 8 +- test/integration/repositories_test.go | 2 +- util/github/client.go | 122 +++++-- util/github/scalesets/token.go | 2 +- workers/cache/cache.go | 2 +- workers/cache/tool_cache.go | 4 +- workers/entity/util.go | 4 +- workers/entity/worker.go | 7 +- workers/entity/worker_watcher.go | 17 +- workers/pools/controller.go | 2 - workers/provider/instance_manager.go | 12 +- workers/provider/provider_helper.go | 8 +- workers/scaleset/controller.go | 4 +- workers/scaleset/controller_watcher.go | 6 +- workers/scaleset/scaleset_helper.go | 6 +- workers/scaleset/util.go | 2 +- 72 files changed, 896 insertions(+), 700 deletions(-) diff --git a/apiserver/controllers/enterprises.go b/apiserver/controllers/enterprises.go index 9be1f1bc..9ce278cd 100644 --- a/apiserver/controllers/enterprises.go +++ b/apiserver/controllers/enterprises.go @@ -320,7 +320,7 @@ func (a *APIController) CreateEnterpriseScaleSetHandler(w http.ResponseWriter, r return } - scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.GithubEntityTypeEnterprise, enterpriseID, scaleSetData) + scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.ForgeEntityTypeEnterprise, enterpriseID, scaleSetData) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating enterprise scale set") handleError(ctx, w, err) @@ -404,7 +404,7 @@ func (a *APIController) ListEnterpriseScaleSetsHandler(w http.ResponseWriter, r return } - scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.GithubEntityTypeEnterprise, enterpriseID) + scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.ForgeEntityTypeEnterprise, enterpriseID) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") handleError(ctx, w, err) diff --git a/apiserver/controllers/organizations.go b/apiserver/controllers/organizations.go index 149dd490..86f3c5d6 100644 --- a/apiserver/controllers/organizations.go +++ b/apiserver/controllers/organizations.go @@ -330,7 +330,7 @@ func (a *APIController) CreateOrgScaleSetHandler(w http.ResponseWriter, r *http. return } - scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.GithubEntityTypeOrganization, orgID, scalesetData) + scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.ForgeEntityTypeOrganization, orgID, scalesetData) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating organization scale set") handleError(ctx, w, err) @@ -414,7 +414,7 @@ func (a *APIController) ListOrgScaleSetsHandler(w http.ResponseWriter, r *http.R return } - scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.GithubEntityTypeOrganization, orgID) + scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.ForgeEntityTypeOrganization, orgID) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") handleError(ctx, w, err) diff --git a/apiserver/controllers/repositories.go b/apiserver/controllers/repositories.go index 14693aac..2eea0001 100644 --- a/apiserver/controllers/repositories.go +++ b/apiserver/controllers/repositories.go @@ -329,7 +329,7 @@ func (a *APIController) CreateRepoScaleSetHandler(w http.ResponseWriter, r *http return } - scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.GithubEntityTypeRepository, repoID, scaleSetData) + scaleSet, err := a.r.CreateEntityScaleSet(ctx, runnerParams.ForgeEntityTypeRepository, repoID, scaleSetData) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "error creating repository scale set") handleError(ctx, w, err) @@ -413,7 +413,7 @@ func (a *APIController) ListRepoScaleSetsHandler(w http.ResponseWriter, r *http. return } - scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.GithubEntityTypeRepository, repoID) + scaleSets, err := a.r.ListEntityScaleSets(ctx, runnerParams.ForgeEntityTypeRepository, repoID) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing scale sets") handleError(ctx, w, err) diff --git a/auth/instance_middleware.go b/auth/instance_middleware.go index dbd3cfb7..38e1fdcf 100644 --- a/auth/instance_middleware.go +++ b/auth/instance_middleware.go @@ -40,7 +40,7 @@ type InstanceJWTClaims struct { Name string `json:"name"` PoolID string `json:"provider_id"` // Scope is either repository or organization - Scope params.GithubEntityType `json:"scope"` + Scope params.ForgeEntityType `json:"scope"` // Entity is the repo or org name Entity string `json:"entity"` CreateAttempt int `json:"create_attempt"` @@ -60,7 +60,7 @@ type instanceToken struct { jwtSecret string } -func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity string, entityType params.GithubEntityType, ttlMinutes uint) (string, error) { +func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity string, entityType params.ForgeEntityType, ttlMinutes uint) (string, error) { // Token expiration is equal to the bootstrap timeout set on the pool plus the polling // interval garm uses to check for timed out runners. Runners that have not sent their info // by the end of this interval are most likely failed and will be reaped by garm anyway. diff --git a/auth/interfaces.go b/auth/interfaces.go index 4e4d370c..095db4b2 100644 --- a/auth/interfaces.go +++ b/auth/interfaces.go @@ -26,5 +26,5 @@ type Middleware interface { } type InstanceTokenGetter interface { - NewInstanceJWTToken(instance params.Instance, entity string, poolType params.GithubEntityType, ttlMinutes uint) (string, error) + NewInstanceJWTToken(instance params.Instance, entity string, poolType params.ForgeEntityType, ttlMinutes uint) (string, error) } diff --git a/cache/cache_test.go b/cache/cache_test.go index 7a977394..08b269b8 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -13,13 +13,13 @@ import ( type CacheTestSuite struct { suite.Suite - entity params.GithubEntity + entity params.ForgeEntity } func (c *CacheTestSuite) SetupTest() { - c.entity = params.GithubEntity{ + c.entity = params.ForgeEntity{ ID: "1234", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -254,9 +254,9 @@ func (c *CacheTestSuite) TestGetInstancesForScaleSet() { } func (c *CacheTestSuite) TestSetGetEntityCache() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -265,22 +265,25 @@ func (c *CacheTestSuite) TestSetGetEntityCache() { c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) - entity.Credentials.Description = "test description" + entity.Credentials.GithubCredentials.Description = "test description" SetEntity(entity) cachedEntity, ok = GetEntity("test-entity") c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) - c.Require().Equal(entity.Credentials.Description, cachedEntity.Credentials.Description) + c.Require().Equal(entity.Credentials.GithubCredentials.Description, cachedEntity.Credentials.GithubCredentials.Description) } func (c *CacheTestSuite) TestReplaceEntityPools() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", - Credentials: params.GithubCredentials{ - ID: 1, + Credentials: params.ForgeCredentials{ + ForgeType: params.GithubEndpointType, + GithubCredentials: params.GithubCredentials{ + ID: 1, + }, }, } pool1 := params.Pool{ @@ -301,7 +304,7 @@ func (c *CacheTestSuite) TestReplaceEntityPools() { cachedEntity, ok := GetEntity(entity.ID) c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) - c.Require().Equal("test", cachedEntity.Credentials.Name) + c.Require().Equal("test", cachedEntity.Credentials.GithubCredentials.Name) pools := GetEntityPools(entity.ID) c.Require().Len(pools, 2) @@ -310,9 +313,9 @@ func (c *CacheTestSuite) TestReplaceEntityPools() { } func (c *CacheTestSuite) TestReplaceEntityScaleSets() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -336,9 +339,9 @@ func (c *CacheTestSuite) TestReplaceEntityScaleSets() { } func (c *CacheTestSuite) TestDeleteEntity() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -350,13 +353,13 @@ func (c *CacheTestSuite) TestDeleteEntity() { DeleteEntity(entity.ID) cachedEntity, ok = GetEntity(entity.ID) c.Require().False(ok) - c.Require().Equal(params.GithubEntity{}, cachedEntity) + c.Require().Equal(params.ForgeEntity{}, cachedEntity) } func (c *CacheTestSuite) TestSetEntityPool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -387,9 +390,9 @@ func (c *CacheTestSuite) TestSetEntityPool() { } func (c *CacheTestSuite) TestSetEntityScaleSet() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -417,9 +420,9 @@ func (c *CacheTestSuite) TestSetEntityScaleSet() { } func (c *CacheTestSuite) TestDeleteEntityPool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -440,9 +443,9 @@ func (c *CacheTestSuite) TestDeleteEntityPool() { } func (c *CacheTestSuite) TestDeleteEntityScaleSet() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -463,9 +466,9 @@ func (c *CacheTestSuite) TestDeleteEntityScaleSet() { } func (c *CacheTestSuite) TestFindPoolsMatchingAllTags() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -520,9 +523,9 @@ func (c *CacheTestSuite) TestFindPoolsMatchingAllTags() { } func (c *CacheTestSuite) TestGetEntityPools() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -562,9 +565,9 @@ func (c *CacheTestSuite) TestGetEntityPools() { } func (c *CacheTestSuite) TestGetEntityScaleSet() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } @@ -584,9 +587,9 @@ func (c *CacheTestSuite) TestGetEntityScaleSet() { } func (c *CacheTestSuite) TestGetEntityPool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "test-entity", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", } diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go index 7cf65a03..060b076a 100644 --- a/cache/credentials_cache.go +++ b/cache/credentials_cache.go @@ -36,7 +36,7 @@ func (g *GithubCredentials) SetCredentials(credentials params.GithubCredentials) defer g.mux.Unlock() g.cache[credentials.ID] = credentials - UpdateCredentialsInAffectedEntities(credentials) + UpdateCredentialsInAffectedEntities(credentials.GetForgeCredentials()) } func (g *GithubCredentials) GetCredentials(id uint) (params.GithubCredentials, bool) { diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 006f40db..74f406de 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -16,7 +16,7 @@ func init() { } type EntityItem struct { - Entity params.GithubEntity + Entity params.ForgeEntity Pools map[string]params.Pool ScaleSets map[uint]params.ScaleSet } @@ -27,34 +27,42 @@ type EntityCache struct { entities map[string]EntityItem } -func (e *EntityCache) UpdateCredentialsInAffectedEntities(creds params.GithubCredentials) { +func (e *EntityCache) UpdateCredentialsInAffectedEntities(creds params.ForgeCredentials) { e.mux.Lock() defer e.mux.Unlock() for entityID, cache := range e.entities { - if cache.Entity.Credentials.ID == creds.ID { + if cache.Entity.Credentials.GetID() == creds.GetID() { cache.Entity.Credentials = creds e.entities[entityID] = cache } } } -func (e *EntityCache) GetEntity(entityID string) (params.GithubEntity, bool) { +func (e *EntityCache) GetEntity(entityID string) (params.ForgeEntity, bool) { e.mux.Lock() defer e.mux.Unlock() if cache, ok := e.entities[entityID]; ok { // Get the credentials from the credentials cache. - creds, ok := GetGithubCredentials(cache.Entity.Credentials.ID) - if ok { - cache.Entity.Credentials = creds + var forgeCredsGetter params.ForgeCredentialsGetter + var credsOk bool + switch cache.Entity.Credentials.ForgeType { + case params.GithubEndpointType: + forgeCredsGetter, credsOk = GetGithubCredentials(cache.Entity.Credentials.GetID()) + case params.GiteaEndpointType: + // add gitea credentials getter + return cache.Entity, false + } + if credsOk { + cache.Entity.Credentials = forgeCredsGetter.GetForgeCredentials() } return cache.Entity, true } - return params.GithubEntity{}, false + return params.ForgeEntity{}, false } -func (e *EntityCache) SetEntity(entity params.GithubEntity) { +func (e *EntityCache) SetEntity(entity params.ForgeEntity) { e.mux.Lock() defer e.mux.Unlock() @@ -225,13 +233,13 @@ func (e *EntityCache) GetEntityScaleSets(entityID string) []params.ScaleSet { return nil } -func (e *EntityCache) GetEntitiesUsingGredentials(credsID uint) []params.GithubEntity { +func (e *EntityCache) GetEntitiesUsingGredentials(credsID uint) []params.ForgeEntity { e.mux.Lock() defer e.mux.Unlock() - var entities []params.GithubEntity + var entities []params.ForgeEntity for _, cache := range e.entities { - if cache.Entity.Credentials.ID == credsID { + if cache.Entity.Credentials.GetID() == credsID { entities = append(entities, cache.Entity) } } @@ -239,16 +247,24 @@ func (e *EntityCache) GetEntitiesUsingGredentials(credsID uint) []params.GithubE return entities } -func (e *EntityCache) GetAllEntities() []params.GithubEntity { +func (e *EntityCache) GetAllEntities() []params.ForgeEntity { e.mux.Lock() defer e.mux.Unlock() - var entities []params.GithubEntity + var entities []params.ForgeEntity for _, cache := range e.entities { // Get the credentials from the credentials cache. - creds, ok := GetGithubCredentials(cache.Entity.Credentials.ID) - if ok { - cache.Entity.Credentials = creds + var forgeCredsGetter params.ForgeCredentialsGetter + var credsOk bool + switch cache.Entity.Credentials.ForgeType { + case params.GithubEndpointType: + forgeCredsGetter, credsOk = GetGithubCredentials(cache.Entity.Credentials.GetID()) + case params.GiteaEndpointType: + // add gitea credentials getter + return nil + } + if credsOk { + cache.Entity.Credentials = forgeCredsGetter.GetForgeCredentials() } entities = append(entities, cache.Entity) } @@ -284,11 +300,11 @@ func (e *EntityCache) GetAllScaleSets() []params.ScaleSet { return scaleSets } -func GetEntity(entityID string) (params.GithubEntity, bool) { +func GetEntity(entityID string) (params.ForgeEntity, bool) { return entityCache.GetEntity(entityID) } -func SetEntity(entity params.GithubEntity) { +func SetEntity(entity params.ForgeEntity) { entityCache.SetEntity(entity) } @@ -340,15 +356,15 @@ func GetEntityScaleSets(entityID string) []params.ScaleSet { return entityCache.GetEntityScaleSets(entityID) } -func UpdateCredentialsInAffectedEntities(creds params.GithubCredentials) { +func UpdateCredentialsInAffectedEntities(creds params.ForgeCredentials) { entityCache.UpdateCredentialsInAffectedEntities(creds) } -func GetEntitiesUsingGredentials(credsID uint) []params.GithubEntity { +func GetEntitiesUsingGredentials(credsID uint) []params.ForgeEntity { return entityCache.GetEntitiesUsingGredentials(credsID) } -func GetAllEntities() []params.GithubEntity { +func GetAllEntities() []params.ForgeEntity { return entityCache.GetAllEntities() } diff --git a/cache/tools_cache.go b/cache/tools_cache.go index 233de2c1..f4a2db62 100644 --- a/cache/tools_cache.go +++ b/cache/tools_cache.go @@ -19,7 +19,7 @@ func init() { type GithubEntityTools struct { updatedAt time.Time - entity params.GithubEntity + entity params.ForgeEntity tools []commonParams.RunnerApplicationDownload } @@ -44,7 +44,7 @@ func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicatio return nil, false } -func (g *GithubToolsCache) Set(entity params.GithubEntity, tools []commonParams.RunnerApplicationDownload) { +func (g *GithubToolsCache) Set(entity params.ForgeEntity, tools []commonParams.RunnerApplicationDownload) { g.mux.Lock() defer g.mux.Unlock() @@ -55,7 +55,7 @@ func (g *GithubToolsCache) Set(entity params.GithubEntity, tools []commonParams. } } -func SetGithubToolsCache(entity params.GithubEntity, tools []commonParams.RunnerApplicationDownload) { +func SetGithubToolsCache(entity params.ForgeEntity, tools []commonParams.RunnerApplicationDownload) { githubToolsCache.Set(entity, tools) } diff --git a/client/endpoints/create_github_endpoint_responses.go b/client/endpoints/create_github_endpoint_responses.go index acd95088..57016978 100644 --- a/client/endpoints/create_github_endpoint_responses.go +++ b/client/endpoints/create_github_endpoint_responses.go @@ -54,7 +54,7 @@ CreateGithubEndpointOK describes a response with status code 200, with default h GithubEndpoint */ type CreateGithubEndpointOK struct { - Payload garm_params.GithubEndpoint + Payload garm_params.ForgeEndpoint } // IsSuccess returns true when this create github endpoint o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *CreateGithubEndpointOK) String() string { return fmt.Sprintf("[POST /github/endpoints][%d] createGithubEndpointOK %s", 200, payload) } -func (o *CreateGithubEndpointOK) GetPayload() garm_params.GithubEndpoint { +func (o *CreateGithubEndpointOK) GetPayload() garm_params.ForgeEndpoint { return o.Payload } diff --git a/client/endpoints/get_github_endpoint_responses.go b/client/endpoints/get_github_endpoint_responses.go index d84f9280..4b4881cd 100644 --- a/client/endpoints/get_github_endpoint_responses.go +++ b/client/endpoints/get_github_endpoint_responses.go @@ -54,7 +54,7 @@ GetGithubEndpointOK describes a response with status code 200, with default head GithubEndpoint */ type GetGithubEndpointOK struct { - Payload garm_params.GithubEndpoint + Payload garm_params.ForgeEndpoint } // IsSuccess returns true when this get github endpoint o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *GetGithubEndpointOK) String() string { return fmt.Sprintf("[GET /github/endpoints/{name}][%d] getGithubEndpointOK %s", 200, payload) } -func (o *GetGithubEndpointOK) GetPayload() garm_params.GithubEndpoint { +func (o *GetGithubEndpointOK) GetPayload() garm_params.ForgeEndpoint { return o.Payload } diff --git a/client/endpoints/list_github_endpoints_responses.go b/client/endpoints/list_github_endpoints_responses.go index 6c2dde6c..f7b10a1c 100644 --- a/client/endpoints/list_github_endpoints_responses.go +++ b/client/endpoints/list_github_endpoints_responses.go @@ -54,7 +54,7 @@ ListGithubEndpointsOK describes a response with status code 200, with default he GithubEndpoints */ type ListGithubEndpointsOK struct { - Payload garm_params.GithubEndpoints + Payload garm_params.ForgeEndpoints } // IsSuccess returns true when this list github endpoints o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *ListGithubEndpointsOK) String() string { return fmt.Sprintf("[GET /github/endpoints][%d] listGithubEndpointsOK %s", 200, payload) } -func (o *ListGithubEndpointsOK) GetPayload() garm_params.GithubEndpoints { +func (o *ListGithubEndpointsOK) GetPayload() garm_params.ForgeEndpoints { return o.Payload } diff --git a/client/endpoints/update_github_endpoint_responses.go b/client/endpoints/update_github_endpoint_responses.go index 234ed711..969d6bbd 100644 --- a/client/endpoints/update_github_endpoint_responses.go +++ b/client/endpoints/update_github_endpoint_responses.go @@ -54,7 +54,7 @@ UpdateGithubEndpointOK describes a response with status code 200, with default h GithubEndpoint */ type UpdateGithubEndpointOK struct { - Payload garm_params.GithubEndpoint + Payload garm_params.ForgeEndpoint } // IsSuccess returns true when this update github endpoint o k response has a 2xx status code @@ -97,7 +97,7 @@ func (o *UpdateGithubEndpointOK) String() string { return fmt.Sprintf("[PUT /github/endpoints/{name}][%d] updateGithubEndpointOK %s", 200, payload) } -func (o *UpdateGithubEndpointOK) GetPayload() garm_params.GithubEndpoint { +func (o *UpdateGithubEndpointOK) GetPayload() garm_params.ForgeEndpoint { return o.Payload } diff --git a/cmd/garm-cli/cmd/github_credentials.go b/cmd/garm-cli/cmd/github_credentials.go index 2b2128d0..fb3853d8 100644 --- a/cmd/garm-cli/cmd/github_credentials.go +++ b/cmd/garm-cli/cmd/github_credentials.go @@ -283,12 +283,12 @@ func parsePrivateKeyFromPath(path string) ([]byte, error) { func parseCredentialsAddParams() (ret params.CreateGithubCredentialsParams, err error) { ret.Name = credentialsName ret.Description = credentialsDescription - ret.AuthType = params.GithubAuthType(credentialsType) + ret.AuthType = params.ForgeAuthType(credentialsType) ret.Endpoint = credentialsEndpoint switch ret.AuthType { - case params.GithubAuthTypePAT: + case params.ForgeAuthTypePAT: ret.PAT.OAuth2Token = credentialsOAuthToken - case params.GithubAuthTypeApp: + case params.ForgeAuthTypeApp: ret.App.InstallationID = credentialsAppInstallationID ret.App.AppID = credentialsAppID keyContents, err := parsePrivateKeyFromPath(credentialsPrivateKeyPath) diff --git a/cmd/garm-cli/cmd/github_endpoints.go b/cmd/garm-cli/cmd/github_endpoints.go index 2be14f52..f119a1a2 100644 --- a/cmd/garm-cli/cmd/github_endpoints.go +++ b/cmd/garm-cli/cmd/github_endpoints.go @@ -252,7 +252,7 @@ func parseCreateParams() (params.CreateGithubEndpointParams, error) { return ret, nil } -func formatEndpoints(endpoints params.GithubEndpoints) { +func formatEndpoints(endpoints params.ForgeEndpoints) { if outputFormat == common.OutputFormatJSON { printAsJSON(endpoints) return @@ -274,7 +274,7 @@ func formatEndpoints(endpoints params.GithubEndpoints) { fmt.Println(t.Render()) } -func formatOneEndpoint(endpoint params.GithubEndpoint) { +func formatOneEndpoint(endpoint params.ForgeEndpoint) { if outputFormat == common.OutputFormatJSON { printAsJSON(endpoint) return diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index f7f508b5..53b90720 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -15,7 +15,7 @@ type Store struct { } // AddEntityEvent provides a mock function with given fields: ctx, entity, event, eventLevel, statusMessage, maxEvents -func (_m *Store) AddEntityEvent(ctx context.Context, entity params.GithubEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { +func (_m *Store) AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { ret := _m.Called(ctx, entity, event, eventLevel, statusMessage, maxEvents) if len(ret) == 0 { @@ -23,7 +23,7 @@ func (_m *Store) AddEntityEvent(ctx context.Context, entity params.GithubEntity, } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.EventType, params.EventLevel, string, int) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.EventType, params.EventLevel, string, int) error); ok { r0 = rf(ctx, entity, event, eventLevel, statusMessage, maxEvents) } else { r0 = ret.Error(0) @@ -125,7 +125,7 @@ func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsN } // CreateEntityPool provides a mock function with given fields: ctx, entity, param -func (_m *Store) CreateEntityPool(ctx context.Context, entity params.GithubEntity, param params.CreatePoolParams) (params.Pool, error) { +func (_m *Store) CreateEntityPool(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams) (params.Pool, error) { ret := _m.Called(ctx, entity, param) if len(ret) == 0 { @@ -134,16 +134,16 @@ func (_m *Store) CreateEntityPool(ctx context.Context, entity params.GithubEntit var r0 params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreatePoolParams) (params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreatePoolParams) (params.Pool, error)); ok { return rf(ctx, entity, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreatePoolParams) params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreatePoolParams) params.Pool); ok { r0 = rf(ctx, entity, param) } else { r0 = ret.Get(0).(params.Pool) } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, params.CreatePoolParams) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, params.CreatePoolParams) error); ok { r1 = rf(ctx, entity, param) } else { r1 = ret.Error(1) @@ -153,7 +153,7 @@ func (_m *Store) CreateEntityPool(ctx context.Context, entity params.GithubEntit } // CreateEntityScaleSet provides a mock function with given fields: _a0, entity, param -func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.GithubEntity, param params.CreateScaleSetParams) (params.ScaleSet, error) { +func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams) (params.ScaleSet, error) { ret := _m.Called(_a0, entity, param) if len(ret) == 0 { @@ -162,16 +162,16 @@ func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.GithubE var r0 params.ScaleSet var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreateScaleSetParams) (params.ScaleSet, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) (params.ScaleSet, error)); ok { return rf(_a0, entity, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, params.CreateScaleSetParams) params.ScaleSet); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) params.ScaleSet); ok { r0 = rf(_a0, entity, param) } else { r0 = ret.Get(0).(params.ScaleSet) } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, params.CreateScaleSetParams) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) error); ok { r1 = rf(_a0, entity, param) } else { r1 = ret.Error(1) @@ -209,22 +209,22 @@ func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.Creat } // CreateGithubEndpoint provides a mock function with given fields: ctx, param -func (_m *Store) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.GithubEndpoint, error) { +func (_m *Store) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.ForgeEndpoint, error) { ret := _m.Called(ctx, param) if len(ret) == 0 { panic("no return value specified for CreateGithubEndpoint") } - var r0 params.GithubEndpoint + var r0 params.ForgeEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) (params.GithubEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) (params.ForgeEndpoint, error)); ok { return rf(ctx, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) params.GithubEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubEndpointParams) params.ForgeEndpoint); ok { r0 = rf(ctx, param) } else { - r0 = ret.Get(0).(params.GithubEndpoint) + r0 = ret.Get(0).(params.ForgeEndpoint) } if rf, ok := ret.Get(1).(func(context.Context, params.CreateGithubEndpointParams) error); ok { @@ -441,7 +441,7 @@ func (_m *Store) DeleteEnterprise(ctx context.Context, enterpriseID string) erro } // DeleteEntityPool provides a mock function with given fields: ctx, entity, poolID -func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) error { +func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) error { ret := _m.Called(ctx, entity, poolID) if len(ret) == 0 { @@ -449,7 +449,7 @@ func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.GithubEntit } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string) error); ok { r0 = rf(ctx, entity, poolID) } else { r0 = ret.Error(0) @@ -621,7 +621,7 @@ func (_m *Store) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error } // FindPoolsMatchingAllTags provides a mock function with given fields: ctx, entityType, entityID, tags -func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params.GithubEntityType, entityID string, tags []string) ([]params.Pool, error) { +func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) { ret := _m.Called(ctx, entityType, entityID, tags) if len(ret) == 0 { @@ -630,10 +630,10 @@ func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params var r0 []params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, []string) ([]params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, []string) ([]params.Pool, error)); ok { return rf(ctx, entityType, entityID, tags) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, []string) []params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, []string) []params.Pool); ok { r0 = rf(ctx, entityType, entityID, tags) } else { if ret.Get(0) != nil { @@ -641,7 +641,7 @@ func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params } } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntityType, string, []string) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string, []string) error); ok { r1 = rf(ctx, entityType, entityID, tags) } else { r1 = ret.Error(1) @@ -735,7 +735,7 @@ func (_m *Store) GetEnterpriseByID(ctx context.Context, enterpriseID string) (pa } // GetEntityPool provides a mock function with given fields: ctx, entity, poolID -func (_m *Store) GetEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) (params.Pool, error) { +func (_m *Store) GetEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) (params.Pool, error) { ret := _m.Called(ctx, entity, poolID) if len(ret) == 0 { @@ -744,16 +744,16 @@ func (_m *Store) GetEntityPool(ctx context.Context, entity params.GithubEntity, var r0 params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string) (params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string) (params.Pool, error)); ok { return rf(ctx, entity, poolID) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string) params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string) params.Pool); ok { r0 = rf(ctx, entity, poolID) } else { r0 = ret.Get(0).(params.Pool) } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, string) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, string) error); ok { r1 = rf(ctx, entity, poolID) } else { r1 = ret.Error(1) @@ -819,22 +819,22 @@ func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, de } // GetGithubEndpoint provides a mock function with given fields: ctx, name -func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.GithubEndpoint, error) { +func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) { ret := _m.Called(ctx, name) if len(ret) == 0 { panic("no return value specified for GetGithubEndpoint") } - var r0 params.GithubEndpoint + var r0 params.ForgeEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (params.GithubEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) (params.ForgeEndpoint, error)); ok { return rf(ctx, name) } - if rf, ok := ret.Get(0).(func(context.Context, string) params.GithubEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) params.ForgeEndpoint); ok { r0 = rf(ctx, name) } else { - r0 = ret.Get(0).(params.GithubEndpoint) + r0 = ret.Get(0).(params.ForgeEndpoint) } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { @@ -846,26 +846,26 @@ func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.Git return r0, r1 } -// GetGithubEntity provides a mock function with given fields: _a0, entityType, entityID -func (_m *Store) GetGithubEntity(_a0 context.Context, entityType params.GithubEntityType, entityID string) (params.GithubEntity, error) { +// GetForgeEntity provides a mock function with given fields: _a0, entityType, entityID +func (_m *Store) GetForgeEntity(_a0 context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) { ret := _m.Called(_a0, entityType, entityID) if len(ret) == 0 { - panic("no return value specified for GetGithubEntity") + panic("no return value specified for GetForgeEntity") } - var r0 params.GithubEntity + var r0 params.ForgeEntity var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string) (params.GithubEntity, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) (params.ForgeEntity, error)); ok { return rf(_a0, entityType, entityID) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string) params.GithubEntity); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) params.ForgeEntity); ok { r0 = rf(_a0, entityType, entityID) } else { - r0 = ret.Get(0).(params.GithubEntity) + r0 = ret.Get(0).(params.ForgeEntity) } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntityType, string) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string) error); ok { r1 = rf(_a0, entityType, entityID) } else { r1 = ret.Error(1) @@ -1379,7 +1379,7 @@ func (_m *Store) ListEnterprises(ctx context.Context) ([]params.Enterprise, erro } // ListEntityInstances provides a mock function with given fields: ctx, entity -func (_m *Store) ListEntityInstances(ctx context.Context, entity params.GithubEntity) ([]params.Instance, error) { +func (_m *Store) ListEntityInstances(ctx context.Context, entity params.ForgeEntity) ([]params.Instance, error) { ret := _m.Called(ctx, entity) if len(ret) == 0 { @@ -1388,10 +1388,10 @@ func (_m *Store) ListEntityInstances(ctx context.Context, entity params.GithubEn var r0 []params.Instance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) ([]params.Instance, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) ([]params.Instance, error)); ok { return rf(ctx, entity) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) []params.Instance); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) []params.Instance); ok { r0 = rf(ctx, entity) } else { if ret.Get(0) != nil { @@ -1399,7 +1399,7 @@ func (_m *Store) ListEntityInstances(ctx context.Context, entity params.GithubEn } } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity) error); ok { r1 = rf(ctx, entity) } else { r1 = ret.Error(1) @@ -1409,7 +1409,7 @@ func (_m *Store) ListEntityInstances(ctx context.Context, entity params.GithubEn } // ListEntityJobsByStatus provides a mock function with given fields: ctx, entityType, entityID, status -func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.GithubEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { +func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { ret := _m.Called(ctx, entityType, entityID, status) if len(ret) == 0 { @@ -1418,10 +1418,10 @@ func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.G var r0 []params.Job var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, params.JobStatus) ([]params.Job, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, params.JobStatus) ([]params.Job, error)); ok { return rf(ctx, entityType, entityID, status) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntityType, string, params.JobStatus) []params.Job); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string, params.JobStatus) []params.Job); ok { r0 = rf(ctx, entityType, entityID, status) } else { if ret.Get(0) != nil { @@ -1429,7 +1429,7 @@ func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.G } } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntityType, string, params.JobStatus) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string, params.JobStatus) error); ok { r1 = rf(ctx, entityType, entityID, status) } else { r1 = ret.Error(1) @@ -1439,7 +1439,7 @@ func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.G } // ListEntityPools provides a mock function with given fields: ctx, entity -func (_m *Store) ListEntityPools(ctx context.Context, entity params.GithubEntity) ([]params.Pool, error) { +func (_m *Store) ListEntityPools(ctx context.Context, entity params.ForgeEntity) ([]params.Pool, error) { ret := _m.Called(ctx, entity) if len(ret) == 0 { @@ -1448,10 +1448,10 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.GithubEntity var r0 []params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) ([]params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) ([]params.Pool, error)); ok { return rf(ctx, entity) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) []params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) []params.Pool); ok { r0 = rf(ctx, entity) } else { if ret.Get(0) != nil { @@ -1459,7 +1459,7 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.GithubEntity } } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity) error); ok { r1 = rf(ctx, entity) } else { r1 = ret.Error(1) @@ -1469,7 +1469,7 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.GithubEntity } // ListEntityScaleSets provides a mock function with given fields: _a0, entity -func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.GithubEntity) ([]params.ScaleSet, error) { +func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) { ret := _m.Called(_a0, entity) if len(ret) == 0 { @@ -1478,10 +1478,10 @@ func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.GithubEn var r0 []params.ScaleSet var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) ([]params.ScaleSet, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) ([]params.ScaleSet, error)); ok { return rf(_a0, entity) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity) []params.ScaleSet); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity) []params.ScaleSet); ok { r0 = rf(_a0, entity) } else { if ret.Get(0) != nil { @@ -1489,7 +1489,7 @@ func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.GithubEn } } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity) error); ok { r1 = rf(_a0, entity) } else { r1 = ret.Error(1) @@ -1529,23 +1529,23 @@ func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.GithubCred } // ListGithubEndpoints provides a mock function with given fields: ctx -func (_m *Store) ListGithubEndpoints(ctx context.Context) ([]params.GithubEndpoint, error) { +func (_m *Store) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) { ret := _m.Called(ctx) if len(ret) == 0 { panic("no return value specified for ListGithubEndpoints") } - var r0 []params.GithubEndpoint + var r0 []params.ForgeEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.GithubEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeEndpoint, error)); ok { return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context) []params.GithubEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeEndpoint); ok { r0 = rf(ctx) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.GithubEndpoint) + r0 = ret.Get(0).([]params.ForgeEndpoint) } } @@ -1865,7 +1865,7 @@ func (_m *Store) UpdateEnterprise(ctx context.Context, enterpriseID string, para } // UpdateEntityPool provides a mock function with given fields: ctx, entity, poolID, param -func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.GithubEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) { +func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) { ret := _m.Called(ctx, entity, poolID, param) if len(ret) == 0 { @@ -1874,16 +1874,16 @@ func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.GithubEntit var r0 params.Pool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string, params.UpdatePoolParams) (params.Pool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) (params.Pool, error)); ok { return rf(ctx, entity, poolID, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, string, params.UpdatePoolParams) params.Pool); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) params.Pool); ok { r0 = rf(ctx, entity, poolID, param) } else { r0 = ret.Get(0).(params.Pool) } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, string, params.UpdatePoolParams) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) error); ok { r1 = rf(ctx, entity, poolID, param) } else { r1 = ret.Error(1) @@ -1893,7 +1893,7 @@ func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.GithubEntit } // UpdateEntityScaleSet provides a mock function with given fields: _a0, entity, scaleSetID, param, callback -func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error) { +func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error) { ret := _m.Called(_a0, entity, scaleSetID, param, callback) if len(ret) == 0 { @@ -1902,16 +1902,16 @@ func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.GithubE var r0 params.ScaleSet var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error)); ok { return rf(_a0, entity, scaleSetID, param, callback) } - if rf, ok := ret.Get(0).(func(context.Context, params.GithubEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) params.ScaleSet); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) params.ScaleSet); ok { r0 = rf(_a0, entity, scaleSetID, param, callback) } else { r0 = ret.Get(0).(params.ScaleSet) } - if rf, ok := ret.Get(1).(func(context.Context, params.GithubEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) error); ok { r1 = rf(_a0, entity, scaleSetID, param, callback) } else { r1 = ret.Error(1) @@ -1949,22 +1949,22 @@ func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param par } // UpdateGithubEndpoint provides a mock function with given fields: ctx, name, param -func (_m *Store) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.GithubEndpoint, error) { +func (_m *Store) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error) { ret := _m.Called(ctx, name, param) if len(ret) == 0 { panic("no return value specified for UpdateGithubEndpoint") } - var r0 params.GithubEndpoint + var r0 params.ForgeEndpoint var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) (params.GithubEndpoint, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error)); ok { return rf(ctx, name, param) } - if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) params.GithubEndpoint); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGithubEndpointParams) params.ForgeEndpoint); ok { r0 = rf(ctx, name, param) } else { - r0 = ret.Get(0).(params.GithubEndpoint) + r0 = ret.Get(0).(params.ForgeEndpoint) } if rf, ok := ret.Get(1).(func(context.Context, string, params.UpdateGithubEndpointParams) error); ok { diff --git a/database/common/store.go b/database/common/store.go index 65fd1343..e5458eaf 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -21,10 +21,10 @@ import ( ) type GithubEndpointStore interface { - CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.GithubEndpoint, error) - GetGithubEndpoint(ctx context.Context, name string) (params.GithubEndpoint, error) - ListGithubEndpoints(ctx context.Context) ([]params.GithubEndpoint, error) - UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.GithubEndpoint, error) + CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.ForgeEndpoint, error) + GetGithubEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) + ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) + UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error) DeleteGithubEndpoint(ctx context.Context, name string) error } @@ -76,7 +76,7 @@ type PoolStore interface { PoolInstanceCount(ctx context.Context, poolID string) (int64, error) GetPoolInstanceByName(ctx context.Context, poolID string, instanceName string) (params.Instance, error) - FindPoolsMatchingAllTags(ctx context.Context, entityType params.GithubEntityType, entityID string, tags []string) ([]params.Pool, error) + FindPoolsMatchingAllTags(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) } type UserStore interface { @@ -107,7 +107,7 @@ type InstanceStore interface { type JobsStore interface { CreateOrUpdateJob(ctx context.Context, job params.Job) (params.Job, error) - ListEntityJobsByStatus(ctx context.Context, entityType params.GithubEntityType, entityID string, status params.JobStatus) ([]params.Job, error) + ListEntityJobsByStatus(ctx context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus) ([]params.Job, error) ListJobsByStatus(ctx context.Context, status params.JobStatus) ([]params.Job, error) ListAllJobs(ctx context.Context) ([]params.Job, error) @@ -121,13 +121,13 @@ type JobsStore interface { } type EntityPoolStore interface { - CreateEntityPool(ctx context.Context, entity params.GithubEntity, param params.CreatePoolParams) (params.Pool, error) - GetEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) (params.Pool, error) - DeleteEntityPool(ctx context.Context, entity params.GithubEntity, poolID string) error - UpdateEntityPool(ctx context.Context, entity params.GithubEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) + CreateEntityPool(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams) (params.Pool, error) + GetEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) (params.Pool, error) + DeleteEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) error + UpdateEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) - ListEntityPools(ctx context.Context, entity params.GithubEntity) ([]params.Pool, error) - ListEntityInstances(ctx context.Context, entity params.GithubEntity) ([]params.Instance, error) + ListEntityPools(ctx context.Context, entity params.ForgeEntity) ([]params.Pool, error) + ListEntityInstances(ctx context.Context, entity params.ForgeEntity) ([]params.Instance, error) } type ControllerStore interface { @@ -138,9 +138,9 @@ type ControllerStore interface { type ScaleSetsStore interface { ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) - CreateEntityScaleSet(_ context.Context, entity params.GithubEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) - ListEntityScaleSets(_ context.Context, entity params.GithubEntity) ([]params.ScaleSet, error) - UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) + CreateEntityScaleSet(_ context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) + ListEntityScaleSets(_ context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) + UpdateEntityScaleSet(_ context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) (err error) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error @@ -170,6 +170,6 @@ type Store interface { ControllerInfo() (params.ControllerInfo, error) InitController() (params.ControllerInfo, error) - GetGithubEntity(_ context.Context, entityType params.GithubEntityType, entityID string) (params.GithubEntity, error) - AddEntityEvent(ctx context.Context, entity params.GithubEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error + GetForgeEntity(_ context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) + AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error } diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 3cfcbc32..3e8f6493 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -55,7 +55,7 @@ type EnterpriseTestSuite struct { testCreds params.GithubCredentials secondaryTestCreds params.GithubCredentials - githubEndpoint params.GithubEndpoint + githubEndpoint params.ForgeEndpoint } func (s *EnterpriseTestSuite) equalInstancesByName(expected, actual []params.Instance) { @@ -490,9 +490,9 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolMissingTags() { } func (s *EnterpriseTestSuite) TestCreateEnterprisePoolInvalidEnterpriseID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-enterprise-id", - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) @@ -637,9 +637,9 @@ func (s *EnterpriseTestSuite) TestListEnterprisePools() { } func (s *EnterpriseTestSuite) TestListEnterprisePoolsInvalidEnterpriseID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-enterprise-id", - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } _, err := s.Store.ListEntityPools(s.adminCtx, entity) @@ -662,9 +662,9 @@ func (s *EnterpriseTestSuite) TestGetEnterprisePool() { } func (s *EnterpriseTestSuite) TestGetEnterprisePoolInvalidEnterpriseID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-enterprise-id", - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") @@ -688,9 +688,9 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolInvalidEnterpriseID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-enterprise-id", - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") @@ -743,9 +743,9 @@ func (s *EnterpriseTestSuite) TestListEnterpriseInstances() { } func (s *EnterpriseTestSuite) TestListEnterpriseInstancesInvalidEnterpriseID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-enterprise-id", - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } _, err := s.Store.ListEntityInstances(s.adminCtx, entity) @@ -771,9 +771,9 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePool() { } func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolInvalidEnterpriseID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-enterprise-id", - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-pool-id", s.Fixtures.UpdatePoolParams) diff --git a/database/sql/github.go b/database/sql/github.go index 2fb46d65..861c824c 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -88,8 +88,8 @@ func (s *sqlDatabase) sqlToCommonGithubCredentials(creds GithubCredentials) (par return commonCreds, nil } -func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.GithubEndpoint, error) { - return params.GithubEndpoint{ +func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.ForgeEndpoint, error) { + return params.ForgeEndpoint{ Name: ep.Name, Description: ep.Description, APIBaseURL: ep.APIBaseURL, @@ -115,7 +115,7 @@ func getUIDFromContext(ctx context.Context) (uuid.UUID, error) { return asUUID, nil } -func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.CreateGithubEndpointParams) (ghEndpoint params.GithubEndpoint, err error) { +func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.CreateGithubEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.CreateOperation, ghEndpoint) @@ -141,23 +141,23 @@ func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.Creat return nil }) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "creating github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "creating github endpoint") } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "converting github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "converting github endpoint") } return ghEndpoint, nil } -func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.GithubEndpoint, error) { +func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.ForgeEndpoint, error) { var endpoints []GithubEndpoint err := s.conn.Find(&endpoints).Error if err != nil { return nil, errors.Wrap(err, "fetching github endpoints") } - var ret []params.GithubEndpoint + var ret []params.ForgeEndpoint for _, ep := range endpoints { commonEp, err := s.sqlToCommonGithubEndpoint(ep) if err != nil { @@ -168,9 +168,9 @@ func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.GithubEnd return ret, nil } -func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param params.UpdateGithubEndpointParams) (ghEndpoint params.GithubEndpoint, err error) { +func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param params.UpdateGithubEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { if name == defaultGithubEndpoint { - return params.GithubEndpoint{}, errors.Wrap(runnerErrors.ErrBadRequest, "cannot update default github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(runnerErrors.ErrBadRequest, "cannot update default github endpoint") } defer func() { @@ -213,24 +213,24 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param return nil }) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "updating github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "updating github endpoint") } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "converting github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "converting github endpoint") } return ghEndpoint, nil } -func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params.GithubEndpoint, error) { +func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) { var endpoint GithubEndpoint err := s.conn.Where("name = ?", name).First(&endpoint).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.GithubEndpoint{}, errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") + return params.ForgeEndpoint{}, errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") } - return params.GithubEndpoint{}, errors.Wrap(err, "fetching github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "fetching github endpoint") } return s.sqlToCommonGithubEndpoint(endpoint) @@ -243,7 +243,7 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err defer func() { if err == nil { - s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.GithubEndpoint{Name: name}) + s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.ForgeEndpoint{Name: name}) } }() err = s.conn.Transaction(func(tx *gorm.DB) error { @@ -329,9 +329,9 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. var data []byte var err error switch param.AuthType { - case params.GithubAuthTypePAT: + case params.ForgeAuthTypePAT: data, err = s.marshalAndSeal(param.PAT) - case params.GithubAuthTypeApp: + case params.ForgeAuthTypeApp: data, err = s.marshalAndSeal(param.App) default: return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") @@ -495,7 +495,7 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para var data []byte var err error switch creds.AuthType { - case params.GithubAuthTypePAT: + case params.ForgeAuthTypePAT: if param.PAT != nil { data, err = s.marshalAndSeal(param.PAT) } @@ -503,7 +503,7 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para if param.App != nil { return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update app credentials for PAT") } - case params.GithubAuthTypeApp: + case params.ForgeAuthTypeApp: if param.App != nil { data, err = s.marshalAndSeal(param.App) } diff --git a/database/sql/github_test.go b/database/sql/github_test.go index 9d53569a..e46d963d 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -266,7 +266,7 @@ func (s *GithubTestSuite) TestCreateCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -290,7 +290,7 @@ func (s *GithubTestSuite) TestCreateCredentialsFailsOnDuplicateCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -320,7 +320,7 @@ func (s *GithubTestSuite) TestNormalUsersCanOnlySeeTheirOwnCredentialsAdminCanSe Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -376,7 +376,7 @@ func (s *GithubTestSuite) TestGetGithubCredentialsByNameReturnsOnlyCurrentUserCr Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -421,7 +421,7 @@ func (s *GithubTestSuite) TestGetGithubCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -451,7 +451,7 @@ func (s *GithubTestSuite) TestDeleteGithubCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -478,7 +478,7 @@ func (s *GithubTestSuite) TestDeleteGithubCredentialsByNonAdminUser() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test-creds4", }, @@ -523,7 +523,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -581,7 +581,7 @@ func (s *GithubTestSuite) TestUpdateCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -616,7 +616,7 @@ func (s *GithubTestSuite) TestUpdateGithubCredentialsFailIfWrongCredentialTypeIs Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test", }, @@ -643,7 +643,7 @@ func (s *GithubTestSuite) TestUpdateGithubCredentialsFailIfWrongCredentialTypeIs Name: "test-credsApp", Description: "test credsApp", Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypeApp, + AuthType: params.ForgeAuthTypeApp, App: params.GithubApp{ AppID: 1, InstallationID: 2, @@ -688,7 +688,7 @@ func (s *GithubTestSuite) TestUpdateCredentialsFailsIfCredentialsAreOwnedByNonAd Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test-creds5", }, @@ -717,7 +717,7 @@ func (s *GithubTestSuite) TestAdminUserCanUpdateAnyGithubCredentials() { Name: testCredsName, Description: testCredsDescription, Endpoint: defaultGithubEndpoint, - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "test-creds5", }, @@ -836,10 +836,10 @@ func TestCredentialsAndEndpointMigration(t *testing.T) { t.Fatalf("expected ghes-test to be associated with example.com endpoint, got %s", creds[1].Endpoint.Name) } - if creds[0].AuthType != params.GithubAuthTypePAT { + if creds[0].AuthType != params.ForgeAuthTypePAT { t.Fatalf("expected test-creds to have PAT auth type, got %s", creds[0].AuthType) } - if creds[1].AuthType != params.GithubAuthTypeApp { + if creds[1].AuthType != params.ForgeAuthTypeApp { t.Fatalf("expected ghes-test to have App auth type, got %s", creds[1].AuthType) } if len(creds[0].CredentialsPayload) == 0 { diff --git a/database/sql/jobs.go b/database/sql/jobs.go index b7dda926..7f9b7b00 100644 --- a/database/sql/jobs.go +++ b/database/sql/jobs.go @@ -306,7 +306,7 @@ func (s *sqlDatabase) ListJobsByStatus(_ context.Context, status params.JobStatu } // ListEntityJobsByStatus lists all jobs for a given entity type and id. -func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType params.GithubEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { +func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { u, err := uuid.Parse(entityID) if err != nil { return nil, err @@ -316,11 +316,11 @@ func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType param query := s.conn.Model(&WorkflowJob{}).Preload("Instance").Where("status = ?", status) switch entityType { - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: query = query.Where("org_id = ?", u) - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: query = query.Where("repo_id = ?", u) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: query = query.Where("enterprise_id = ?", u) } diff --git a/database/sql/models.go b/database/sql/models.go index 2a04db0c..2accccc4 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -397,9 +397,9 @@ type GithubCredentials struct { UserID *uuid.UUID `gorm:"index:idx_github_credentials,unique"` User User `gorm:"foreignKey:UserID"` - Description string `gorm:"type:text"` - AuthType params.GithubAuthType `gorm:"index"` - Payload []byte `gorm:"type:longblob"` + Description string `gorm:"type:text"` + AuthType params.ForgeAuthType `gorm:"index"` + Payload []byte `gorm:"type:longblob"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName"` EndpointName *string `gorm:"index"` diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index b30ea701..030a3abe 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -55,7 +55,7 @@ type OrgTestSuite struct { testCreds params.GithubCredentials secondaryTestCreds params.GithubCredentials - githubEndpoint params.GithubEndpoint + githubEndpoint params.ForgeEndpoint } func (s *OrgTestSuite) equalInstancesByName(expected, actual []params.Instance) { @@ -492,9 +492,9 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolMissingTags() { } func (s *OrgTestSuite) TestCreateOrganizationPoolInvalidOrgID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-org-id", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) @@ -640,9 +640,9 @@ func (s *OrgTestSuite) TestListOrgPools() { } func (s *OrgTestSuite) TestListOrgPoolsInvalidOrgID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-org-id", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } _, err := s.Store.ListEntityPools(s.adminCtx, entity) @@ -665,9 +665,9 @@ func (s *OrgTestSuite) TestGetOrganizationPool() { } func (s *OrgTestSuite) TestGetOrganizationPoolInvalidOrgID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-org-id", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") @@ -691,9 +691,9 @@ func (s *OrgTestSuite) TestDeleteOrganizationPool() { } func (s *OrgTestSuite) TestDeleteOrganizationPoolInvalidOrgID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-org-id", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") @@ -748,9 +748,9 @@ func (s *OrgTestSuite) TestListOrgInstances() { } func (s *OrgTestSuite) TestListOrgInstancesInvalidOrgID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-org-id", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } _, err := s.Store.ListEntityInstances(s.adminCtx, entity) @@ -776,9 +776,9 @@ func (s *OrgTestSuite) TestUpdateOrganizationPool() { } func (s *OrgTestSuite) TestUpdateOrganizationPoolInvalidOrgID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-org-id", - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-pool-id", s.Fixtures.UpdatePoolParams) diff --git a/database/sql/pools.go b/database/sql/pools.go index 5cb6d136..24476fe8 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -86,7 +86,7 @@ func (s *sqlDatabase) DeletePoolByID(_ context.Context, poolID string) (err erro return nil } -func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.GithubEntityType, entityID, poolID string, preload ...string) (Pool, error) { +func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.ForgeEntityType, entityID, poolID string, preload ...string) (Pool, error) { if entityID == "" { return Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing entity id") } @@ -99,13 +99,13 @@ func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.GithubEntityT var fieldName string var entityField string switch entityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: fieldName = entityTypeRepoName entityField = repositoryFieldName - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: fieldName = entityTypeOrgName entityField = organizationFieldName - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: fieldName = entityTypeEnterpriseName entityField = enterpriseFieldName default: @@ -135,7 +135,7 @@ func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.GithubEntityT return pool, nil } -func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.GithubEntityType, entityID string, preload ...string) ([]Pool, error) { +func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, preload ...string) ([]Pool, error) { if _, err := uuid.Parse(entityID); err != nil { return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } @@ -147,13 +147,13 @@ func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.GithubEntit var preloadEntity string var fieldName string switch entityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: fieldName = entityTypeRepoName preloadEntity = "Repository" - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: fieldName = entityTypeOrgName preloadEntity = "Organization" - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: fieldName = entityTypeEnterpriseName preloadEntity = "Enterprise" default: @@ -184,7 +184,7 @@ func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.GithubEntit return pools, nil } -func (s *sqlDatabase) findPoolByTags(id string, poolType params.GithubEntityType, tags []string) ([]params.Pool, error) { +func (s *sqlDatabase) findPoolByTags(id string, poolType params.ForgeEntityType, tags []string) ([]params.Pool, error) { if len(tags) == 0 { return nil, runnerErrors.NewBadRequestError("missing tags") } @@ -195,11 +195,11 @@ func (s *sqlDatabase) findPoolByTags(id string, poolType params.GithubEntityType var fieldName string switch poolType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: fieldName = entityTypeRepoName - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: fieldName = entityTypeOrgName - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: fieldName = entityTypeEnterpriseName default: return nil, fmt.Errorf("invalid poolType: %v", poolType) @@ -238,7 +238,7 @@ func (s *sqlDatabase) findPoolByTags(id string, poolType params.GithubEntityType return ret, nil } -func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType params.GithubEntityType, entityID string, tags []string) ([]params.Pool, error) { +func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) { if len(tags) == 0 { return nil, runnerErrors.NewBadRequestError("missing tags") } @@ -254,7 +254,7 @@ func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType par return pools, nil } -func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.GithubEntity, param params.CreatePoolParams) (pool params.Pool, err error) { +func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.ForgeEntity, param params.CreatePoolParams) (pool params.Pool, err error) { if len(param.Tags) == 0 { return params.Pool{}, runnerErrors.NewBadRequestError("no tags specified") } @@ -289,11 +289,11 @@ func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.GithubEn } switch entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: newPool.RepoID = &entityID - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: newPool.OrgID = &entityID - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: newPool.EnterpriseID = &entityID } err = s.conn.Transaction(func(tx *gorm.DB) error { @@ -334,7 +334,7 @@ func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.GithubEn return s.sqlToCommonPool(dbPool) } -func (s *sqlDatabase) GetEntityPool(_ context.Context, entity params.GithubEntity, poolID string) (params.Pool, error) { +func (s *sqlDatabase) GetEntityPool(_ context.Context, entity params.ForgeEntity, poolID string) (params.Pool, error) { pool, err := s.getEntityPool(s.conn, entity.EntityType, entity.ID, poolID, "Tags", "Instances") if err != nil { return params.Pool{}, fmt.Errorf("fetching pool: %w", err) @@ -342,7 +342,7 @@ func (s *sqlDatabase) GetEntityPool(_ context.Context, entity params.GithubEntit return s.sqlToCommonPool(pool) } -func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.GithubEntity, poolID string) (err error) { +func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.ForgeEntity, poolID string) (err error) { entityID, err := uuid.Parse(entity.ID) if err != nil { return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") @@ -363,11 +363,11 @@ func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.GithubEn } var fieldName string switch entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: fieldName = entityTypeRepoName - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: fieldName = entityTypeOrgName - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: fieldName = entityTypeEnterpriseName default: return fmt.Errorf("invalid entityType: %v", entity.EntityType) @@ -379,7 +379,7 @@ func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.GithubEn return nil } -func (s *sqlDatabase) UpdateEntityPool(_ context.Context, entity params.GithubEntity, poolID string, param params.UpdatePoolParams) (updatedPool params.Pool, err error) { +func (s *sqlDatabase) UpdateEntityPool(_ context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (updatedPool params.Pool, err error) { defer func() { if err == nil { s.sendNotify(common.PoolEntityType, common.UpdateOperation, updatedPool) @@ -403,7 +403,7 @@ func (s *sqlDatabase) UpdateEntityPool(_ context.Context, entity params.GithubEn return updatedPool, nil } -func (s *sqlDatabase) ListEntityPools(_ context.Context, entity params.GithubEntity) ([]params.Pool, error) { +func (s *sqlDatabase) ListEntityPools(_ context.Context, entity params.ForgeEntity) ([]params.Pool, error) { pools, err := s.listEntityPools(s.conn, entity.EntityType, entity.ID, "Tags") if err != nil { return nil, errors.Wrap(err, "fetching pools") @@ -420,7 +420,7 @@ func (s *sqlDatabase) ListEntityPools(_ context.Context, entity params.GithubEnt return ret, nil } -func (s *sqlDatabase) ListEntityInstances(_ context.Context, entity params.GithubEntity) ([]params.Instance, error) { +func (s *sqlDatabase) ListEntityInstances(_ context.Context, entity params.ForgeEntity) ([]params.Instance, error) { pools, err := s.listEntityPools(s.conn, entity.EntityType, entity.ID, "Instances", "Instances.Job") if err != nil { return nil, errors.Wrap(err, "fetching entity") diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index 484742ae..f43b9357 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -60,7 +60,7 @@ type RepoTestSuite struct { testCreds params.GithubCredentials secondaryTestCreds params.GithubCredentials - githubEndpoint params.GithubEndpoint + githubEndpoint params.ForgeEndpoint } func (s *RepoTestSuite) equalReposByName(expected, actual []params.Repository) { @@ -541,9 +541,9 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolMissingTags() { } func (s *RepoTestSuite) TestCreateRepositoryPoolInvalidRepoID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-repo-id", - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) @@ -692,9 +692,9 @@ func (s *RepoTestSuite) TestListRepoPools() { } func (s *RepoTestSuite) TestListRepoPoolsInvalidRepoID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-repo-id", - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } _, err := s.Store.ListEntityPools(s.adminCtx, entity) @@ -717,9 +717,9 @@ func (s *RepoTestSuite) TestGetRepositoryPool() { } func (s *RepoTestSuite) TestGetRepositoryPoolInvalidRepoID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-repo-id", - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") @@ -743,9 +743,9 @@ func (s *RepoTestSuite) TestDeleteRepositoryPool() { } func (s *RepoTestSuite) TestDeleteRepositoryPoolInvalidRepoID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-repo-id", - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") @@ -799,9 +799,9 @@ func (s *RepoTestSuite) TestListRepoInstances() { } func (s *RepoTestSuite) TestListRepoInstancesInvalidRepoID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-repo-id", - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } _, err := s.Store.ListEntityInstances(s.adminCtx, entity) @@ -827,9 +827,9 @@ func (s *RepoTestSuite) TestUpdateRepositoryPool() { } func (s *RepoTestSuite) TestUpdateRepositoryPoolInvalidRepoID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: "dummy-repo-id", - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-repo-id", s.Fixtures.UpdatePoolParams) diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index e4bd28f3..1d272704 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -53,7 +53,7 @@ func (s *sqlDatabase) ListAllScaleSets(_ context.Context) ([]params.ScaleSet, er return ret, nil } -func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.GithubEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) { +func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams) (scaleSet params.ScaleSet, err error) { if err := param.Validate(); err != nil { return params.ScaleSet{}, fmt.Errorf("failed to validate create params: %w", err) } @@ -92,11 +92,11 @@ func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.Gith } switch entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: newScaleSet.RepoID = &entityID - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: newScaleSet.OrgID = &entityID - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: newScaleSet.EnterpriseID = &entityID } err = s.conn.Transaction(func(tx *gorm.DB) error { @@ -123,7 +123,7 @@ func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.Gith return s.sqlToCommonScaleSet(dbScaleSet) } -func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.GithubEntityType, entityID string, preload ...string) ([]ScaleSet, error) { +func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, preload ...string) ([]ScaleSet, error) { if _, err := uuid.Parse(entityID); err != nil { return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } @@ -135,13 +135,13 @@ func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.GithubE var preloadEntity string var fieldName string switch entityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: fieldName = entityTypeRepoName preloadEntity = repositoryFieldName - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: fieldName = entityTypeOrgName preloadEntity = organizationFieldName - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: fieldName = entityTypeEnterpriseName preloadEntity = enterpriseFieldName default: @@ -173,7 +173,7 @@ func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.GithubE return scaleSets, nil } -func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.GithubEntity) ([]params.ScaleSet, error) { +func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) { scaleSets, err := s.listEntityScaleSets(s.conn, entity.EntityType, entity.ID) if err != nil { return nil, errors.Wrap(err, "fetching scale sets") @@ -190,7 +190,7 @@ func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.Githu return ret, nil } -func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.GithubEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { +func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { defer func() { if err == nil { s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, updatedScaleSet) @@ -225,7 +225,7 @@ func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.Gith return updatedScaleSet, nil } -func (s *sqlDatabase) getEntityScaleSet(tx *gorm.DB, entityType params.GithubEntityType, entityID string, scaleSetID uint, preload ...string) (ScaleSet, error) { +func (s *sqlDatabase) getEntityScaleSet(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, scaleSetID uint, preload ...string) (ScaleSet, error) { if entityID == "" { return ScaleSet{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing entity id") } @@ -237,13 +237,13 @@ func (s *sqlDatabase) getEntityScaleSet(tx *gorm.DB, entityType params.GithubEnt var fieldName string var entityField string switch entityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: fieldName = entityTypeRepoName entityField = "Repository" - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: fieldName = entityTypeOrgName entityField = "Organization" - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: fieldName = entityTypeEnterpriseName entityField = "Enterprise" default: diff --git a/database/sql/scalesets_test.go b/database/sql/scalesets_test.go index 951c3735..9b8b241d 100644 --- a/database/sql/scalesets_test.go +++ b/database/sql/scalesets_test.go @@ -25,9 +25,9 @@ type ScaleSetsTestSuite struct { repo params.Repository enterprise params.Enterprise - orgEntity params.GithubEntity - repoEntity params.GithubEntity - enterpriseEntity params.GithubEntity + orgEntity params.ForgeEntity + repoEntity params.ForgeEntity + enterpriseEntity params.ForgeEntity } func (s *ScaleSetsTestSuite) SetupTest() { @@ -298,7 +298,7 @@ func (s *ScaleSetsTestSuite) TestScaleSetOperations() { }) s.T().Run("update scaleset with invalid entity", func(_ *testing.T) { - _, err = s.Store.UpdateEntityScaleSet(s.adminCtx, params.GithubEntity{}, enterpriseScaleSet.ID, params.UpdateScaleSetParams{}, nil) + _, err = s.Store.UpdateEntityScaleSet(s.adminCtx, params.ForgeEntity{}, enterpriseScaleSet.ID, params.UpdateScaleSetParams{}, nil) s.Require().Error(err) s.Require().Contains(err.Error(), "missing entity id") }) diff --git a/database/sql/sql.go b/database/sql/sql.go index 86729ad6..82601316 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -299,7 +299,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { CACertBundle: certBundle, } - var endpoint params.GithubEndpoint + var endpoint params.ForgeEndpoint endpoint, err = s.GetGithubEndpoint(adminCtx, hostname) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { @@ -315,10 +315,10 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { Name: cred.Name, Description: cred.Description, Endpoint: endpoint.Name, - AuthType: params.GithubAuthType(cred.GetAuthType()), + AuthType: params.ForgeAuthType(cred.GetAuthType()), } switch credParams.AuthType { - case params.GithubAuthTypeApp: + case params.ForgeAuthTypeApp: keyBytes, err := cred.App.PrivateKeyBytes() if err != nil { return errors.Wrap(err, "getting private key bytes") @@ -332,7 +332,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { if err := credParams.App.Validate(); err != nil { return errors.Wrap(err, "validating app credentials") } - case params.GithubAuthTypePAT: + case params.ForgeAuthTypePAT: token := cred.PAT.OAuth2Token if token == "" { token = cred.OAuth2Token diff --git a/database/sql/util.go b/database/sql/util.go index 62f22179..a2531449 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -546,18 +546,18 @@ func (s *sqlDatabase) getScaleSetByID(tx *gorm.DB, scaleSetID uint, preload ...s return scaleSet, nil } -func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.GithubEntityType, entityID string) error { +func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.ForgeEntityType, entityID string) error { u, err := uuid.Parse(entityID) if err != nil { return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } var q *gorm.DB switch entityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: q = tx.Model(&Repository{}).Where("id = ?", u) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: q = tx.Model(&Organization{}).Where("id = ?", u) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: q = tx.Model(&Enterprise{}).Where("id = ?", u) default: return errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") @@ -608,26 +608,26 @@ func (s *sqlDatabase) sendNotify(entityType dbCommon.DatabaseEntityType, op dbCo return s.producer.Notify(message) } -func (s *sqlDatabase) GetGithubEntity(_ context.Context, entityType params.GithubEntityType, entityID string) (params.GithubEntity, error) { +func (s *sqlDatabase) GetForgeEntity(_ context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) { var ghEntity params.EntityGetter var err error switch entityType { - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: ghEntity, err = s.GetEnterpriseByID(s.ctx, entityID) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ghEntity, err = s.GetOrganizationByID(s.ctx, entityID) - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ghEntity, err = s.GetRepositoryByID(s.ctx, entityID) default: - return params.GithubEntity{}, errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") + return params.ForgeEntity{}, errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") } if err != nil { - return params.GithubEntity{}, errors.Wrap(err, "failed to get ") + return params.ForgeEntity{}, errors.Wrap(err, "failed to get ") } entity, err := ghEntity.GetEntity() if err != nil { - return params.GithubEntity{}, errors.Wrap(err, "failed to get entity") + return params.ForgeEntity{}, errors.Wrap(err, "failed to get entity") } return entity, nil } @@ -747,17 +747,17 @@ func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, even return nil } -func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.GithubEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { +func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { if maxEvents == 0 { return errors.Wrap(runnerErrors.ErrBadRequest, "max events cannot be 0") } switch entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: return s.addRepositoryEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: return s.addOrgEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: return s.addEnterpriseEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) default: return errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") diff --git a/database/watcher/filters.go b/database/watcher/filters.go index dfcd54bb..51820270 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -63,7 +63,7 @@ func WithOperationTypeFilter(operationType dbCommon.OperationType) dbCommon.Payl // WithEntityPoolFilter returns true if the change payload is a pool that belongs to the // supplied Github entity. This is useful when an entity worker wants to watch for changes // in pools that belong to it. -func WithEntityPoolFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFunc { +func WithEntityPoolFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { switch payload.EntityType { case dbCommon.PoolEntityType: @@ -72,11 +72,11 @@ func WithEntityPoolFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFu return false } switch ghEntity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: return pool.RepoID == ghEntity.ID - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: return pool.OrgID == ghEntity.ID - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: return pool.EnterpriseID == ghEntity.ID default: return false @@ -90,7 +90,7 @@ func WithEntityPoolFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFu // WithEntityPoolFilter returns true if the change payload is a pool that belongs to the // supplied Github entity. This is useful when an entity worker wants to watch for changes // in pools that belong to it. -func WithEntityScaleSetFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFunc { +func WithEntityScaleSetFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { switch payload.EntityType { case dbCommon.ScaleSetEntityType: @@ -99,11 +99,11 @@ func WithEntityScaleSetFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilt return false } switch ghEntity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: return scaleSet.RepoID == ghEntity.ID - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: return scaleSet.OrgID == ghEntity.ID - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: return scaleSet.EnterpriseID == ghEntity.ID default: return false @@ -116,26 +116,26 @@ func WithEntityScaleSetFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilt // WithEntityFilter returns a filter function that filters payloads by entity. // Change payloads that match the entity type and ID will return true. -func WithEntityFilter(entity params.GithubEntity) dbCommon.PayloadFilterFunc { +func WithEntityFilter(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { - if params.GithubEntityType(payload.EntityType) != entity.EntityType { + if params.ForgeEntityType(payload.EntityType) != entity.EntityType { return false } var ent IDGetter var ok bool switch payload.EntityType { case dbCommon.RepositoryEntityType: - if entity.EntityType != params.GithubEntityTypeRepository { + if entity.EntityType != params.ForgeEntityTypeRepository { return false } ent, ok = payload.Payload.(params.Repository) case dbCommon.OrganizationEntityType: - if entity.EntityType != params.GithubEntityTypeOrganization { + if entity.EntityType != params.ForgeEntityTypeOrganization { return false } ent, ok = payload.Payload.(params.Organization) case dbCommon.EnterpriseEntityType: - if entity.EntityType != params.GithubEntityTypeEnterprise { + if entity.EntityType != params.ForgeEntityTypeEnterprise { return false } ent, ok = payload.Payload.(params.Enterprise) @@ -149,7 +149,7 @@ func WithEntityFilter(entity params.GithubEntity) dbCommon.PayloadFilterFunc { } } -func WithEntityJobFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFunc { +func WithEntityJobFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { switch payload.EntityType { case dbCommon.JobEntityType: @@ -159,15 +159,15 @@ func WithEntityJobFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFun } switch ghEntity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: if job.RepoID != nil && job.RepoID.String() != ghEntity.ID { return false } - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: if job.OrgID != nil && job.OrgID.String() != ghEntity.ID { return false } - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: if job.EnterpriseID != nil && job.EnterpriseID.String() != ghEntity.ID { return false } @@ -183,16 +183,20 @@ func WithEntityJobFilter(ghEntity params.GithubEntity) dbCommon.PayloadFilterFun } // WithGithubCredentialsFilter returns a filter function that filters payloads by Github credentials. -func WithGithubCredentialsFilter(creds params.GithubCredentials) dbCommon.PayloadFilterFunc { +func WithForgeCredentialsFilter(creds params.ForgeCredentials) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { - if payload.EntityType != dbCommon.GithubCredentialsEntityType { + var idGetter params.IDGetter + var ok bool + switch payload.EntityType { + case dbCommon.GithubCredentialsEntityType: + idGetter, ok = payload.Payload.(params.GithubCredentials) + default: return false } - credsPayload, ok := payload.Payload.(params.GithubCredentials) if !ok { return false } - return credsPayload.ID == creds.ID + return idGetter.GetID() == creds.GetID() } } diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index 5a1486a8..2300ac0a 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -848,7 +848,7 @@ func (s *WatcherStoreTestSuite) TestGithubCredentialsWatcher() { Name: "test-creds", Description: "test credentials", Endpoint: "github.com", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "bogus", }, @@ -971,7 +971,7 @@ func (s *WatcherStoreTestSuite) TestGithubEndpointWatcher() { EntityType: common.GithubEndpointEntityType, Operation: common.DeleteOperation, // We only get the name of the deleted entity - Payload: params.GithubEndpoint{Name: ghEp.Name}, + Payload: params.ForgeEndpoint{Name: ghEp.Name}, }, event) case <-time.After(1 * time.Second): s.T().Fatal("expected payload not received") diff --git a/internal/testing/testing.go b/internal/testing/testing.go index b3d049fd..6f253267 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -85,7 +85,7 @@ func CreateGARMTestUser(ctx context.Context, username string, db common.Store, s return user } -func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testing.T) params.GithubEndpoint { +func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testing.T) params.ForgeEndpoint { endpointParams := params.CreateGithubEndpointParams{ Name: "github.com", Description: "github endpoint", @@ -110,11 +110,11 @@ func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testin return ep } -func CreateTestGithubCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.GithubEndpoint) params.GithubCredentials { +func CreateTestGithubCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.ForgeEndpoint) params.GithubCredentials { newCredsParams := params.CreateGithubCredentialsParams{ Name: credsName, Description: "Test creds", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, Endpoint: endpoint.Name, PAT: params.GithubPAT{ OAuth2Token: "test-token", diff --git a/params/interfaces.go b/params/interfaces.go index cd9b94ff..ccf04e06 100644 --- a/params/interfaces.go +++ b/params/interfaces.go @@ -5,7 +5,7 @@ import "time" // EntityGetter is implemented by all github entities (repositories, organizations and enterprises). // It defines the GetEntity() function which returns a github entity. type EntityGetter interface { - GetEntity() (GithubEntity, error) + GetEntity() (ForgeEntity, error) } type IDGetter interface { @@ -15,3 +15,7 @@ type IDGetter interface { type CreationDateGetter interface { GetCreatedAt() time.Time } + +type ForgeCredentialsGetter interface { + GetForgeCredentials() ForgeCredentials +} diff --git a/params/params.go b/params/params.go index fdf3b836..daa54b61 100644 --- a/params/params.go +++ b/params/params.go @@ -36,14 +36,14 @@ import ( ) type ( - GithubEntityType string + ForgeEntityType string EventType string EventLevel string ProviderType string JobStatus string RunnerStatus string WebhookEndpointType string - GithubAuthType string + ForgeAuthType string EndpointType string PoolBalancerType string ScaleSetState string @@ -106,9 +106,9 @@ const ( ) const ( - GithubEntityTypeRepository GithubEntityType = "repository" - GithubEntityTypeOrganization GithubEntityType = "organization" - GithubEntityTypeEnterprise GithubEntityType = "enterprise" + ForgeEntityTypeRepository ForgeEntityType = "repository" + ForgeEntityTypeOrganization ForgeEntityType = "organization" + ForgeEntityTypeEnterprise ForgeEntityType = "enterprise" ) const ( @@ -141,13 +141,13 @@ const ( ) const ( - // GithubAuthTypePAT is the OAuth token based authentication - GithubAuthTypePAT GithubAuthType = "pat" - // GithubAuthTypeApp is the GitHub App based authentication - GithubAuthTypeApp GithubAuthType = "app" + // ForgeAuthTypePAT is the OAuth token based authentication + ForgeAuthTypePAT ForgeAuthType = "pat" + // ForgeAuthTypeApp is the GitHub App based authentication + ForgeAuthTypeApp ForgeAuthType = "app" ) -func (e GithubEntityType) String() string { +func (e ForgeEntityType) String() string { return string(e) } @@ -380,13 +380,13 @@ type Pool struct { Priority uint `json:"priority,omitempty"` } -func (p Pool) BelongsTo(entity GithubEntity) bool { +func (p Pool) BelongsTo(entity ForgeEntity) bool { switch p.PoolType() { - case GithubEntityTypeRepository: + case ForgeEntityTypeRepository: return p.RepoID == entity.ID - case GithubEntityTypeOrganization: + case ForgeEntityTypeOrganization: return p.OrgID == entity.ID - case GithubEntityTypeEnterprise: + case ForgeEntityTypeEnterprise: return p.EnterpriseID == entity.ID } return false @@ -411,25 +411,25 @@ func (p Pool) MaxRunnersAsInt() int { return int(p.MaxRunners) } -func (p Pool) GetEntity() (GithubEntity, error) { +func (p Pool) GetEntity() (ForgeEntity, error) { switch p.PoolType() { - case GithubEntityTypeRepository: - return GithubEntity{ + case ForgeEntityTypeRepository: + return ForgeEntity{ ID: p.RepoID, - EntityType: GithubEntityTypeRepository, + EntityType: ForgeEntityTypeRepository, }, nil - case GithubEntityTypeOrganization: - return GithubEntity{ + case ForgeEntityTypeOrganization: + return ForgeEntity{ ID: p.OrgID, - EntityType: GithubEntityTypeOrganization, + EntityType: ForgeEntityTypeOrganization, }, nil - case GithubEntityTypeEnterprise: - return GithubEntity{ + case ForgeEntityTypeEnterprise: + return ForgeEntity{ ID: p.EnterpriseID, - EntityType: GithubEntityTypeEnterprise, + EntityType: ForgeEntityTypeEnterprise, }, nil } - return GithubEntity{}, fmt.Errorf("pool has no associated entity") + return ForgeEntity{}, fmt.Errorf("pool has no associated entity") } func (p Pool) GetID() string { @@ -443,14 +443,14 @@ func (p *Pool) RunnerTimeout() uint { return p.RunnerBootstrapTimeout } -func (p *Pool) PoolType() GithubEntityType { +func (p *Pool) PoolType() ForgeEntityType { switch { case p.RepoID != "": - return GithubEntityTypeRepository + return ForgeEntityTypeRepository case p.OrgID != "": - return GithubEntityTypeOrganization + return ForgeEntityTypeOrganization case p.EnterpriseID != "": - return GithubEntityTypeEnterprise + return ForgeEntityTypeEnterprise } return "" } @@ -519,13 +519,13 @@ type ScaleSet struct { LastMessageID int64 `json:"-"` } -func (p ScaleSet) BelongsTo(entity GithubEntity) bool { +func (p ScaleSet) BelongsTo(entity ForgeEntity) bool { switch p.ScaleSetType() { - case GithubEntityTypeRepository: + case ForgeEntityTypeRepository: return p.RepoID == entity.ID - case GithubEntityTypeOrganization: + case ForgeEntityTypeOrganization: return p.OrgID == entity.ID - case GithubEntityTypeEnterprise: + case ForgeEntityTypeEnterprise: return p.EnterpriseID == entity.ID } return false @@ -535,35 +535,35 @@ func (p ScaleSet) GetID() uint { return p.ID } -func (p ScaleSet) GetEntity() (GithubEntity, error) { +func (p ScaleSet) GetEntity() (ForgeEntity, error) { switch p.ScaleSetType() { - case GithubEntityTypeRepository: - return GithubEntity{ + case ForgeEntityTypeRepository: + return ForgeEntity{ ID: p.RepoID, - EntityType: GithubEntityTypeRepository, + EntityType: ForgeEntityTypeRepository, }, nil - case GithubEntityTypeOrganization: - return GithubEntity{ + case ForgeEntityTypeOrganization: + return ForgeEntity{ ID: p.OrgID, - EntityType: GithubEntityTypeOrganization, + EntityType: ForgeEntityTypeOrganization, }, nil - case GithubEntityTypeEnterprise: - return GithubEntity{ + case ForgeEntityTypeEnterprise: + return ForgeEntity{ ID: p.EnterpriseID, - EntityType: GithubEntityTypeEnterprise, + EntityType: ForgeEntityTypeEnterprise, }, nil } - return GithubEntity{}, fmt.Errorf("pool has no associated entity") + return ForgeEntity{}, fmt.Errorf("pool has no associated entity") } -func (p *ScaleSet) ScaleSetType() GithubEntityType { +func (p *ScaleSet) ScaleSetType() ForgeEntityType { switch { case p.RepoID != "": - return GithubEntityTypeRepository + return ForgeEntityTypeRepository case p.OrgID != "": - return GithubEntityTypeOrganization + return ForgeEntityTypeOrganization case p.EnterpriseID != "": - return GithubEntityTypeEnterprise + return ForgeEntityTypeEnterprise } return "" } @@ -591,7 +591,7 @@ type Repository struct { Credentials GithubCredentials `json:"credentials,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - Endpoint GithubEndpoint `json:"endpoint,omitempty"` + Endpoint ForgeEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` // Do not serialize sensitive info. @@ -602,19 +602,23 @@ func (r Repository) CreationDateGetter() time.Time { return r.CreatedAt } -func (r Repository) GetEntity() (GithubEntity, error) { +func (r Repository) GetEntity() (ForgeEntity, error) { if r.ID == "" { - return GithubEntity{}, fmt.Errorf("repository has no ID") + return ForgeEntity{}, fmt.Errorf("repository has no ID") } - return GithubEntity{ + return ForgeEntity{ ID: r.ID, - EntityType: GithubEntityTypeRepository, + EntityType: ForgeEntityTypeRepository, Owner: r.Owner, Name: r.Name, PoolBalancerType: r.PoolBalancerType, - Credentials: r.Credentials, - WebhookSecret: r.WebhookSecret, - CreatedAt: r.CreatedAt, + Credentials: ForgeCredentials{ + ForgeType: GithubEndpointType, + GithubCredentials: r.Credentials, + }, + WebhookSecret: r.WebhookSecret, + CreatedAt: r.CreatedAt, + UpdatedAt: r.UpdatedAt, }, nil } @@ -652,7 +656,7 @@ type Organization struct { CredentialsID uint `json:"credentials_id,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - Endpoint GithubEndpoint `json:"endpoint,omitempty"` + Endpoint ForgeEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` // Do not serialize sensitive info. @@ -663,18 +667,22 @@ func (o Organization) GetCreatedAt() time.Time { return o.CreatedAt } -func (o Organization) GetEntity() (GithubEntity, error) { +func (o Organization) GetEntity() (ForgeEntity, error) { if o.ID == "" { - return GithubEntity{}, fmt.Errorf("organization has no ID") + return ForgeEntity{}, fmt.Errorf("organization has no ID") } - return GithubEntity{ + return ForgeEntity{ ID: o.ID, - EntityType: GithubEntityTypeOrganization, + EntityType: ForgeEntityTypeOrganization, Owner: o.Name, WebhookSecret: o.WebhookSecret, PoolBalancerType: o.PoolBalancerType, - Credentials: o.Credentials, - CreatedAt: o.CreatedAt, + Credentials: ForgeCredentials{ + ForgeType: GithubEndpointType, + GithubCredentials: o.Credentials, + }, + CreatedAt: o.CreatedAt, + UpdatedAt: o.UpdatedAt, }, nil } @@ -708,7 +716,7 @@ type Enterprise struct { CredentialsID uint `json:"credentials_id,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - Endpoint GithubEndpoint `json:"endpoint,omitempty"` + Endpoint ForgeEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` // Do not serialize sensitive info. @@ -719,18 +727,22 @@ func (e Enterprise) GetCreatedAt() time.Time { return e.CreatedAt } -func (e Enterprise) GetEntity() (GithubEntity, error) { +func (e Enterprise) GetEntity() (ForgeEntity, error) { if e.ID == "" { - return GithubEntity{}, fmt.Errorf("enterprise has no ID") + return ForgeEntity{}, fmt.Errorf("enterprise has no ID") } - return GithubEntity{ + return ForgeEntity{ ID: e.ID, - EntityType: GithubEntityTypeEnterprise, + EntityType: ForgeEntityTypeEnterprise, Owner: e.Name, WebhookSecret: e.WebhookSecret, PoolBalancerType: e.PoolBalancerType, - Credentials: e.Credentials, - CreatedAt: e.CreatedAt, + Credentials: ForgeCredentials{ + ForgeType: GithubEndpointType, + GithubCredentials: e.Credentials, + }, + CreatedAt: e.CreatedAt, + UpdatedAt: e.UpdatedAt, }, nil } @@ -843,20 +855,113 @@ func (g GithubRateLimit) ResetAt() time.Time { return time.Unix(g.Reset, 0) } +type ForgeCredentials struct { + ForgeType EndpointType `json:"type,omitempty"` + GithubCredentials GithubCredentials `json:"github,omitempty"` +} + +func (f ForgeCredentials) CABundle() []byte { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.CABundle + case GiteaEndpointType: + return nil + default: + return nil + } +} + +func (f ForgeCredentials) Endpoint() ForgeEndpoint { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.Endpoint + case GiteaEndpointType: + return ForgeEndpoint{} + default: + return ForgeEndpoint{} + } +} + +func (f ForgeCredentials) APIBaseURL() string { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.APIBaseURL + case GiteaEndpointType: + return "" + default: + return "" + } +} + +func (f ForgeCredentials) UploadBaseURL() string { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.UploadBaseURL + case GiteaEndpointType: + return "" + default: + return "" + } +} + +func (f ForgeCredentials) BaseURL() string { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.BaseURL + case GiteaEndpointType: + return "" + default: + return "" + } +} + +func (f ForgeCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.GetHTTPClient(ctx) + case GiteaEndpointType: + return nil, fmt.Errorf("gitea credentials not supported") + default: + return nil, fmt.Errorf("unknown credentials type") + } +} + +func (f ForgeCredentials) GetID() uint { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.ID + case GiteaEndpointType: + return 0 + default: + return 0 + } +} + +func (f ForgeCredentials) RootCertificateBundle() (CertificateBundle, error) { + switch f.ForgeType { + case GithubEndpointType: + return f.GithubCredentials.RootCertificateBundle() + case GiteaEndpointType: + return CertificateBundle{}, fmt.Errorf("gitea credentials not supported") + default: + return CertificateBundle{}, fmt.Errorf("unknown credentials type") + } +} + type GithubCredentials struct { - ID uint `json:"id,omitempty"` - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - APIBaseURL string `json:"api_base_url,omitempty"` - UploadBaseURL string `json:"upload_base_url,omitempty"` - BaseURL string `json:"base_url,omitempty"` - CABundle []byte `json:"ca_bundle,omitempty"` - AuthType GithubAuthType `json:"auth-type,omitempty"` + ID uint `json:"id,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + APIBaseURL string `json:"api_base_url,omitempty"` + UploadBaseURL string `json:"upload_base_url,omitempty"` + BaseURL string `json:"base_url,omitempty"` + CABundle []byte `json:"ca_bundle,omitempty"` + AuthType ForgeAuthType `json:"auth-type,omitempty"` Repositories []Repository `json:"repositories,omitempty"` Organizations []Organization `json:"organizations,omitempty"` Enterprises []Enterprise `json:"enterprises,omitempty"` - Endpoint GithubEndpoint `json:"endpoint,omitempty"` + Endpoint ForgeEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` RateLimit GithubRateLimit `json:"rate_limit,omitempty"` @@ -869,6 +974,13 @@ func (g GithubCredentials) GetID() uint { return g.ID } +func (g GithubCredentials) GetForgeCredentials() ForgeCredentials { + return ForgeCredentials{ + ForgeType: GithubEndpointType, + GithubCredentials: g, + } +} + func (g GithubCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { var roots *x509.CertPool if g.CABundle != nil { @@ -888,7 +1000,7 @@ func (g GithubCredentials) GetHTTPClient(ctx context.Context) (*http.Client, err var tc *http.Client switch g.AuthType { - case GithubAuthTypeApp: + case ForgeAuthTypeApp: var app GithubApp if err := json.Unmarshal(g.CredentialsPayload, &app); err != nil { return nil, fmt.Errorf("failed to unmarshal github app credentials: %w", err) @@ -1063,64 +1175,65 @@ type UpdateSystemInfoParams struct { AgentID *int64 `json:"agent_id,omitempty"` } -type GithubEntity struct { - Owner string `json:"owner,omitempty"` - Name string `json:"name,omitempty"` - ID string `json:"id,omitempty"` - EntityType GithubEntityType `json:"entity_type,omitempty"` - Credentials GithubCredentials `json:"credentials,omitempty"` - PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` +type ForgeEntity struct { + Owner string `json:"owner,omitempty"` + Name string `json:"name,omitempty"` + ID string `json:"id,omitempty"` + EntityType ForgeEntityType `json:"entity_type,omitempty"` + Credentials ForgeCredentials `json:"credentials,omitempty"` + PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` + UpdatedAt time.Time `json:"updated_at,omitempty"` WebhookSecret string `json:"-"` } -func (g GithubEntity) GetCreatedAt() time.Time { +func (g ForgeEntity) GetCreatedAt() time.Time { return g.CreatedAt } -func (g GithubEntity) GithubURL() string { +func (g ForgeEntity) ForgeURL() string { switch g.EntityType { - case GithubEntityTypeRepository: - return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL, g.Owner, g.Name) - case GithubEntityTypeOrganization: - return fmt.Sprintf("%s/%s", g.Credentials.BaseURL, g.Owner) - case GithubEntityTypeEnterprise: - return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL, g.Owner) + case ForgeEntityTypeRepository: + return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL(), g.Owner, g.Name) + case ForgeEntityTypeOrganization: + return fmt.Sprintf("%s/%s", g.Credentials.BaseURL(), g.Owner) + case ForgeEntityTypeEnterprise: + return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL(), g.Owner) } return "" } -func (g GithubEntity) GetPoolBalancerType() PoolBalancerType { +func (g ForgeEntity) GetPoolBalancerType() PoolBalancerType { if g.PoolBalancerType == "" { return PoolBalancerTypeRoundRobin } return g.PoolBalancerType } -func (g GithubEntity) LabelScope() string { +func (g ForgeEntity) LabelScope() string { switch g.EntityType { - case GithubEntityTypeRepository: + case ForgeEntityTypeRepository: return MetricsLabelRepositoryScope - case GithubEntityTypeOrganization: + case ForgeEntityTypeOrganization: return MetricsLabelOrganizationScope - case GithubEntityTypeEnterprise: + case ForgeEntityTypeEnterprise: return MetricsLabelEnterpriseScope } return "" } -func (g GithubEntity) String() string { +func (g ForgeEntity) String() string { switch g.EntityType { - case GithubEntityTypeRepository: + case ForgeEntityTypeRepository: return fmt.Sprintf("%s/%s", g.Owner, g.Name) - case GithubEntityTypeOrganization, GithubEntityTypeEnterprise: + case ForgeEntityTypeOrganization, ForgeEntityTypeEnterprise: return g.Owner } return "" } -func (g GithubEntity) GetIDAsUUID() (uuid.UUID, error) { +func (g ForgeEntity) GetIDAsUUID() (uuid.UUID, error) { if g.ID == "" { return uuid.Nil, nil } @@ -1132,9 +1245,9 @@ func (g GithubEntity) GetIDAsUUID() (uuid.UUID, error) { } // used by swagger client generated code -type GithubEndpoints []GithubEndpoint +type ForgeEndpoints []ForgeEndpoint -type GithubEndpoint struct { +type ForgeEndpoint struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` APIBaseURL string `json:"api_base_url,omitempty"` @@ -1145,6 +1258,4 @@ type GithubEndpoint struct { UpdatedAt time.Time `json:"updated_at,omitempty"` EndpointType EndpointType `json:"endpoint_type,omitempty"` - - Credentials []GithubCredentials `json:"credentials,omitempty"` } diff --git a/params/requests.go b/params/requests.go index 12d9b838..7ab1fa91 100644 --- a/params/requests.go +++ b/params/requests.go @@ -448,12 +448,12 @@ func (g GithubApp) Validate() error { } type CreateGithubCredentialsParams struct { - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - Endpoint string `json:"endpoint,omitempty"` - AuthType GithubAuthType `json:"auth_type,omitempty"` - PAT GithubPAT `json:"pat,omitempty"` - App GithubApp `json:"app,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + AuthType ForgeAuthType `json:"auth_type,omitempty"` + PAT GithubPAT `json:"pat,omitempty"` + App GithubApp `json:"app,omitempty"` } func (c CreateGithubCredentialsParams) Validate() error { @@ -466,18 +466,18 @@ func (c CreateGithubCredentialsParams) Validate() error { } switch c.AuthType { - case GithubAuthTypePAT, GithubAuthTypeApp: + case ForgeAuthTypePAT, ForgeAuthTypeApp: default: return runnerErrors.NewBadRequestError("invalid auth_type") } - if c.AuthType == GithubAuthTypePAT { + if c.AuthType == ForgeAuthTypePAT { if c.PAT.OAuth2Token == "" { return runnerErrors.NewBadRequestError("missing oauth2_token") } } - if c.AuthType == GithubAuthTypeApp { + if c.AuthType == ForgeAuthTypeApp { if err := c.App.Validate(); err != nil { return errors.Wrap(err, "invalid app") } diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index f1009d5a..6ba39d48 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -118,18 +118,18 @@ func (_m *GithubClient) DeleteEntityHook(ctx context.Context, id int64) (*github } // GetEntity provides a mock function with no fields -func (_m *GithubClient) GetEntity() params.GithubEntity { +func (_m *GithubClient) GetEntity() params.ForgeEntity { ret := _m.Called() if len(ret) == 0 { panic("no return value specified for GetEntity") } - var r0 params.GithubEntity - if rf, ok := ret.Get(0).(func() params.GithubEntity); ok { + var r0 params.ForgeEntity + if rf, ok := ret.Get(0).(func() params.ForgeEntity); ok { r0 = rf() } else { - r0 = ret.Get(0).(params.GithubEntity) + r0 = ret.Get(0).(params.ForgeEntity) } return r0 diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index a482a985..567d4ebc 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -118,18 +118,18 @@ func (_m *GithubEntityOperations) DeleteEntityHook(ctx context.Context, id int64 } // GetEntity provides a mock function with no fields -func (_m *GithubEntityOperations) GetEntity() params.GithubEntity { +func (_m *GithubEntityOperations) GetEntity() params.ForgeEntity { ret := _m.Called() if len(ret) == 0 { panic("no return value specified for GetEntity") } - var r0 params.GithubEntity - if rf, ok := ret.Get(0).(func() params.GithubEntity); ok { + var r0 params.ForgeEntity + if rf, ok := ret.Get(0).(func() params.ForgeEntity); ok { r0 = rf() } else { - r0 = ret.Get(0).(params.GithubEntity) + r0 = ret.Get(0).(params.ForgeEntity) } return r0 diff --git a/runner/common/util.go b/runner/common/util.go index 55e8fb00..39c03651 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -23,7 +23,7 @@ type GithubEntityOperations interface { GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (jitConfigMap map[string]string, runner *github.Runner, err error) // GetEntity returns the GitHub entity for which the github client was instanciated. - GetEntity() params.GithubEntity + GetEntity() params.ForgeEntity // GithubBaseURL returns the base URL for the github or GHES API. GithubBaseURL() *url.URL } diff --git a/runner/enterprises.go b/runner/enterprises.go index fb3f528b..6cbe54d0 100644 --- a/runner/enterprises.go +++ b/runner/enterprises.go @@ -206,9 +206,9 @@ func (r *Runner) CreateEnterprisePool(ctx context.Context, enterpriseID string, param.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: enterpriseID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) @@ -223,9 +223,9 @@ func (r *Runner) GetEnterprisePoolByID(ctx context.Context, enterpriseID, poolID if !auth.IsAdmin(ctx) { return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: enterpriseID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { @@ -239,9 +239,9 @@ func (r *Runner) DeleteEnterprisePool(ctx context.Context, enterpriseID, poolID return runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: enterpriseID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) @@ -270,9 +270,9 @@ func (r *Runner) ListEnterprisePools(ctx context.Context, enterpriseID string) ( return []params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: enterpriseID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { @@ -286,9 +286,9 @@ func (r *Runner) UpdateEnterprisePool(ctx context.Context, enterpriseID, poolID return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: enterpriseID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { @@ -320,9 +320,9 @@ func (r *Runner) ListEnterpriseInstances(ctx context.Context, enterpriseID strin if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: enterpriseID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { diff --git a/runner/enterprises_test.go b/runner/enterprises_test.go index 94bc4807..7e4545d4 100644 --- a/runner/enterprises_test.go +++ b/runner/enterprises_test.go @@ -58,7 +58,7 @@ type EnterpriseTestSuite struct { testCreds params.GithubCredentials secondaryTestCreds params.GithubCredentials - githubEndpoint params.GithubEndpoint + forgeEndpoint params.ForgeEndpoint } func (s *EnterpriseTestSuite) SetupTest() { @@ -70,9 +70,9 @@ func (s *EnterpriseTestSuite) SetupTest() { } adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) - s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) - s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) - s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) + s.forgeEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.forgeEndpoint) + s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.forgeEndpoint) // create some organization objects in the database, for testing purposes enterprises := map[string]params.Enterprise{} @@ -270,9 +270,9 @@ func (s *EnterpriseTestSuite) TestDeleteEnterpriseErrUnauthorized() { } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolDefinedFailed() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -377,9 +377,9 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolFetchPoolParamsFailed() { } func (s *EnterpriseTestSuite) TestGetEnterprisePoolByID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } enterprisePool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -399,9 +399,9 @@ func (s *EnterpriseTestSuite) TestGetEnterprisePoolByIDErrUnauthorized() { } func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -423,9 +423,9 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolErrUnauthorized() { } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolRunnersFailed() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -442,9 +442,9 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolRunnersFailed() { } func (s *EnterpriseTestSuite) TestListEnterprisePools() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } enterprisePools := []params.Pool{} for i := 1; i <= 2; i++ { @@ -469,9 +469,9 @@ func (s *EnterpriseTestSuite) TestListOrgPoolsErrUnauthorized() { } func (s *EnterpriseTestSuite) TestUpdateEnterprisePool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } enterprisePool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -492,9 +492,9 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolErrUnauthorized() { } func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolMinIdleGreaterThanMax() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -511,9 +511,9 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolMinIdleGreaterThanMax() { } func (s *EnterpriseTestSuite) TestListEnterpriseInstances() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreEnterprises["test-enterprise-1"].ID, - EntityType: params.GithubEntityTypeEnterprise, + EntityType: params.ForgeEntityTypeEnterprise, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { diff --git a/runner/github_endpoints.go b/runner/github_endpoints.go index 1f6431ea..3f4fb308 100644 --- a/runner/github_endpoints.go +++ b/runner/github_endpoints.go @@ -10,30 +10,30 @@ import ( "github.com/cloudbase/garm/params" ) -func (r *Runner) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.GithubEndpoint, error) { +func (r *Runner) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.ForgeEndpoint, error) { if !auth.IsAdmin(ctx) { - return params.GithubEndpoint{}, runnerErrors.ErrUnauthorized + return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") } ep, err := r.store.CreateGithubEndpoint(ctx, param) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "failed to create github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to create github endpoint") } return ep, nil } -func (r *Runner) GetGithubEndpoint(ctx context.Context, name string) (params.GithubEndpoint, error) { +func (r *Runner) GetGithubEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) { if !auth.IsAdmin(ctx) { - return params.GithubEndpoint{}, runnerErrors.ErrUnauthorized + return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized } endpoint, err := r.store.GetGithubEndpoint(ctx, name) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "failed to get github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to get github endpoint") } return endpoint, nil @@ -52,23 +52,23 @@ func (r *Runner) DeleteGithubEndpoint(ctx context.Context, name string) error { return nil } -func (r *Runner) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.GithubEndpoint, error) { +func (r *Runner) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error) { if !auth.IsAdmin(ctx) { - return params.GithubEndpoint{}, runnerErrors.ErrUnauthorized + return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") } newEp, err := r.store.UpdateGithubEndpoint(ctx, name, param) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "failed to update github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to update github endpoint") } return newEp, nil } -func (r *Runner) ListGithubEndpoints(ctx context.Context) ([]params.GithubEndpoint, error) { +func (r *Runner) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } diff --git a/runner/metadata.go b/runner/metadata.go index 3892d350..8a9c8469 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -56,7 +56,7 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { ctx, "failed to get instance params") return "", runnerErrors.ErrUnauthorized } - var entity params.GithubEntity + var entity params.ForgeEntity switch { case instance.PoolID != "": @@ -96,11 +96,11 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { tpl := "actions.runner.%s.%s" var serviceName string switch entity.EntityType { - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: serviceName = fmt.Sprintf(tpl, entity.Owner, instance.Name) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: serviceName = fmt.Sprintf(tpl, entity.Owner, instance.Name) - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: serviceName = fmt.Sprintf(tpl, fmt.Sprintf("%s-%s", entity.Owner, entity.Name), instance.Name) } return serviceName, nil diff --git a/runner/organizations.go b/runner/organizations.go index 4b5e3fd7..49f143ea 100644 --- a/runner/organizations.go +++ b/runner/organizations.go @@ -235,9 +235,9 @@ func (r *Runner) CreateOrgPool(ctx context.Context, orgID string, param params.C param.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: orgID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) @@ -253,9 +253,9 @@ func (r *Runner) GetOrgPoolByID(ctx context.Context, orgID, poolID string) (para return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: orgID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) @@ -271,9 +271,9 @@ func (r *Runner) DeleteOrgPool(ctx context.Context, orgID, poolID string) error return runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: orgID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) @@ -304,9 +304,9 @@ func (r *Runner) ListOrgPools(ctx context.Context, orgID string) ([]params.Pool, if !auth.IsAdmin(ctx) { return []params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: orgID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { @@ -320,9 +320,9 @@ func (r *Runner) UpdateOrgPool(ctx context.Context, orgID, poolID string, param return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: orgID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) @@ -356,9 +356,9 @@ func (r *Runner) ListOrgInstances(ctx context.Context, orgID string) ([]params.I return nil, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: orgID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } instances, err := r.store.ListEntityInstances(ctx, entity) diff --git a/runner/organizations_test.go b/runner/organizations_test.go index ae0af3cf..3609b941 100644 --- a/runner/organizations_test.go +++ b/runner/organizations_test.go @@ -58,7 +58,7 @@ type OrgTestSuite struct { testCreds params.GithubCredentials secondaryTestCreds params.GithubCredentials - githubEndpoint params.GithubEndpoint + githubEndpoint params.ForgeEndpoint } func (s *OrgTestSuite) SetupTest() { @@ -284,9 +284,9 @@ func (s *OrgTestSuite) TestDeleteOrganizationErrUnauthorized() { } func (s *OrgTestSuite) TestDeleteOrganizationPoolDefinedFailed() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -402,9 +402,9 @@ func (s *OrgTestSuite) TestCreateOrgPoolFetchPoolParamsFailed() { } func (s *OrgTestSuite) TestGetOrgPoolByID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } orgPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -424,9 +424,9 @@ func (s *OrgTestSuite) TestGetOrgPoolByIDErrUnauthorized() { } func (s *OrgTestSuite) TestDeleteOrgPool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -448,9 +448,9 @@ func (s *OrgTestSuite) TestDeleteOrgPoolErrUnauthorized() { } func (s *OrgTestSuite) TestDeleteOrgPoolRunnersFailed() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -467,9 +467,9 @@ func (s *OrgTestSuite) TestDeleteOrgPoolRunnersFailed() { } func (s *OrgTestSuite) TestListOrgPools() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } orgPools := []params.Pool{} for i := 1; i <= 2; i++ { @@ -494,9 +494,9 @@ func (s *OrgTestSuite) TestListOrgPoolsErrUnauthorized() { } func (s *OrgTestSuite) TestUpdateOrgPool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } orgPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -517,9 +517,9 @@ func (s *OrgTestSuite) TestUpdateOrgPoolErrUnauthorized() { } func (s *OrgTestSuite) TestUpdateOrgPoolMinIdleGreaterThanMax() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -536,9 +536,9 @@ func (s *OrgTestSuite) TestUpdateOrgPoolMinIdleGreaterThanMax() { } func (s *OrgTestSuite) TestListOrgInstances() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreOrgs["test-org-1"].ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 3cb8bff3..e24aa69b 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -67,7 +67,7 @@ const ( maxCreateAttempts = 5 ) -func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, instanceTokenGetter auth.InstanceTokenGetter, providers map[string]common.Provider, store dbCommon.Store) (common.PoolManager, error) { +func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instanceTokenGetter auth.InstanceTokenGetter, providers map[string]common.Provider, store dbCommon.Store) (common.PoolManager, error) { ctx = garmUtil.WithSlogContext(ctx, slog.Any("pool_mgr", entity.String()), slog.Any("pool_type", entity.EntityType)) ghc, err := ghClient.Client(ctx, entity) if err != nil { @@ -83,7 +83,7 @@ func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, insta return nil, errors.Wrap(err, "getting controller info") } - consumerID := fmt.Sprintf("pool-manager-%s-%s", entity.String(), entity.Credentials.Endpoint.Name) + consumerID := fmt.Sprintf("pool-manager-%s-%s", entity.String(), entity.Credentials.Endpoint().Name) slog.InfoContext(ctx, "registering consumer", "consumer_id", consumerID) consumer, err := watcher.RegisterConsumer( ctx, consumerID, @@ -120,7 +120,7 @@ func NewEntityPoolManager(ctx context.Context, entity params.GithubEntity, insta type basePoolManager struct { ctx context.Context consumerID string - entity params.GithubEntity + entity params.ForgeEntity ghcli common.GithubClient controllerInfo params.ControllerInfo instanceTokenGetter auth.InstanceTokenGetter @@ -877,7 +877,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error bootstrapArgs := commonParams.BootstrapInstance{ Name: instance.Name, Tools: r.tools, - RepoURL: r.entity.GithubURL(), + RepoURL: r.entity.ForgeURL(), MetadataURL: instance.MetadataURL, CallbackURL: instance.CallbackURL, InstanceToken: jwtToken, @@ -887,7 +887,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error Image: pool.Image, ExtraSpecs: pool.ExtraSpecs, PoolID: instance.PoolID, - CACertBundle: r.entity.Credentials.CABundle, + CACertBundle: r.entity.Credentials.CABundle(), GitHubRunnerGroup: instance.GitHubRunnerGroup, JitConfigEnabled: hasJITConfig, } @@ -981,11 +981,11 @@ func (r *basePoolManager) paramsWorkflowJobToParamsJob(job params.WorkflowJob) ( } switch r.entity.EntityType { - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: jobParams.EnterpriseID = &asUUID - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: jobParams.RepoID = &asUUID - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: jobParams.OrgID = &asUUID default: return jobParams, errors.Errorf("unknown pool type: %s", r.entity.EntityType) @@ -1931,15 +1931,15 @@ func (r *basePoolManager) InstallWebhook(ctx context.Context, param params.Insta func (r *basePoolManager) ValidateOwner(job params.WorkflowJob) error { switch r.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: if !strings.EqualFold(job.Repository.Name, r.entity.Name) || !strings.EqualFold(job.Repository.Owner.Login, r.entity.Owner) { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: if !strings.EqualFold(job.Organization.Login, r.entity.Owner) { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: if !strings.EqualFold(job.Enterprise.Slug, r.entity.Owner) { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index 2518ce9c..e8f1e7c6 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -57,8 +57,8 @@ func (s *stubGithubClient) GetWorkflowJobByID(_ context.Context, _, _ string, _ return nil, nil, s.err } -func (s *stubGithubClient) GetEntity() params.GithubEntity { - return params.GithubEntity{} +func (s *stubGithubClient) GetEntity() params.ForgeEntity { + return params.ForgeEntity{} } func (s *stubGithubClient) GithubBaseURL() *url.URL { diff --git a/runner/pool/util.go b/runner/pool/util.go index 9b7b7f14..25fdc73f 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -119,7 +119,7 @@ func isManagedRunner(labels []string, controllerID string) bool { return runnerControllerID == controllerID } -func composeWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFunc { +func composeWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { // We want to watch for changes in either the controller or the // entity itself. return watcher.WithAny( @@ -131,6 +131,6 @@ func composeWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFun // Any operation on the entity we're managing the pool for. watcher.WithEntityFilter(entity), // Watch for changes to the github credentials - watcher.WithGithubCredentialsFilter(entity.Credentials), + watcher.WithForgeCredentialsFilter(entity.Credentials), ) } diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 7f05d93b..56427e89 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -14,7 +14,7 @@ import ( // entityGetter is implemented by all github entities (repositories, organizations and enterprises) type entityGetter interface { - GetEntity() (params.GithubEntity, error) + GetEntity() (params.ForgeEntity, error) } func (r *basePoolManager) handleControllerUpdateEvent(controllerInfo params.ControllerInfo) { @@ -38,7 +38,7 @@ func (r *basePoolManager) getClientOrStub() runnerCommon.GithubClient { return ghc } -func (r *basePoolManager) handleEntityUpdate(entity params.GithubEntity, operation common.OperationType) { +func (r *basePoolManager) handleEntityUpdate(entity params.ForgeEntity, operation common.OperationType) { slog.DebugContext(r.ctx, "received entity operation", "entity", entity.ID, "operation", operation) if r.entity.ID != entity.ID { slog.WarnContext(r.ctx, "entity ID mismatch; stale event? refusing to update", "entity", entity.ID) @@ -56,7 +56,7 @@ func (r *basePoolManager) handleEntityUpdate(entity params.GithubEntity, operati return } - credentialsUpdate := r.entity.Credentials.ID != entity.Credentials.ID + credentialsUpdate := r.entity.Credentials.GetID() != entity.Credentials.GetID() defer func() { slog.DebugContext(r.ctx, "deferred tools update", "credentials_update", credentialsUpdate) if !credentialsUpdate { @@ -85,7 +85,7 @@ func (r *basePoolManager) handleEntityUpdate(entity params.GithubEntity, operati slog.DebugContext(r.ctx, "lock released", "entity", entity.ID) } -func (r *basePoolManager) handleCredentialsUpdate(credentials params.GithubCredentials) { +func (r *basePoolManager) handleCredentialsUpdate(credentials params.ForgeCredentials) { // when we switch credentials on an entity (like from one app to another or from an app // to a PAT), we may still get events for the previous credentials as the channel is buffered. // The watcher will watch for changes to the entity itself, which includes events that @@ -97,12 +97,12 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.GithubCrede // test-repo. This function would handle situations where "org_pat" is updated. // If "test-repo" is updated with new credentials, that event is handled above in // handleEntityUpdate. - shouldUpdateTools := r.entity.Credentials.ID == credentials.ID + shouldUpdateTools := r.entity.Credentials.GetID() == credentials.GetID() defer func() { if !shouldUpdateTools { return } - slog.DebugContext(r.ctx, "deferred tools update", "credentials_id", credentials.ID) + slog.DebugContext(r.ctx, "deferred tools update", "credentials_id", credentials.GetID()) if err := r.updateTools(); err != nil { slog.ErrorContext(r.ctx, "failed to update tools", "error", err) } @@ -110,12 +110,12 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.GithubCrede r.mux.Lock() if !shouldUpdateTools { - slog.InfoContext(r.ctx, "credential ID mismatch; stale event?", "credentials_id", credentials.ID) + slog.InfoContext(r.ctx, "credential ID mismatch; stale event?", "credentials_id", credentials.GetID()) r.mux.Unlock() return } - slog.DebugContext(r.ctx, "updating credentials", "credentials_id", credentials.ID) + slog.DebugContext(r.ctx, "updating credentials", "credentials_id", credentials.GetID()) r.entity.Credentials = credentials r.ghcli = r.getClientOrStub() r.mux.Unlock() @@ -130,7 +130,7 @@ func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { slog.ErrorContext(r.ctx, "failed to cast payload to github credentials") return } - r.handleCredentialsUpdate(credentials) + r.handleCredentialsUpdate(credentials.GetForgeCredentials()) case common.ControllerEntityType: controllerInfo, ok := event.Payload.(params.ControllerInfo) if !ok { diff --git a/runner/pools_test.go b/runner/pools_test.go index 918598d1..95c6b6bd 100644 --- a/runner/pools_test.go +++ b/runner/pools_test.go @@ -49,7 +49,7 @@ type PoolTestSuite struct { adminCtx context.Context testCreds params.GithubCredentials secondaryTestCreds params.GithubCredentials - githubEndpoint params.GithubEndpoint + githubEndpoint params.ForgeEndpoint } func (s *PoolTestSuite) SetupTest() { @@ -75,9 +75,9 @@ func (s *PoolTestSuite) SetupTest() { } // create some pool objects in the database, for testing purposes - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: org.ID, - EntityType: params.GithubEntityTypeOrganization, + EntityType: params.ForgeEntityTypeOrganization, } orgPools := []params.Pool{} for i := 1; i <= 3; i++ { diff --git a/runner/repositories.go b/runner/repositories.go index ab4f8e90..83876c5d 100644 --- a/runner/repositories.go +++ b/runner/repositories.go @@ -235,9 +235,9 @@ func (r *Runner) CreateRepoPool(ctx context.Context, repoID string, param params createPoolParams.RunnerBootstrapTimeout = appdefaults.DefaultRunnerBootstrapTimeout } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: repoID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) @@ -253,9 +253,9 @@ func (r *Runner) GetRepoPoolByID(ctx context.Context, repoID, poolID string) (pa return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: repoID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) @@ -271,9 +271,9 @@ func (r *Runner) DeleteRepoPool(ctx context.Context, repoID, poolID string) erro return runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: repoID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { @@ -300,9 +300,9 @@ func (r *Runner) ListRepoPools(ctx context.Context, repoID string) ([]params.Poo if !auth.IsAdmin(ctx) { return []params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: repoID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { @@ -328,9 +328,9 @@ func (r *Runner) UpdateRepoPool(ctx context.Context, repoID, poolID string, para return params.Pool{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: repoID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { @@ -362,9 +362,9 @@ func (r *Runner) ListRepoInstances(ctx context.Context, repoID string) ([]params if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: repoID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { diff --git a/runner/repositories_test.go b/runner/repositories_test.go index c1aa04b4..47bfb003 100644 --- a/runner/repositories_test.go +++ b/runner/repositories_test.go @@ -62,7 +62,7 @@ type RepoTestSuite struct { testCreds params.GithubCredentials secondaryTestCreds params.GithubCredentials - githubEndpoint params.GithubEndpoint + githubEndpoint params.ForgeEndpoint } func (s *RepoTestSuite) SetupTest() { @@ -299,9 +299,9 @@ func (s *RepoTestSuite) TestDeleteRepositoryErrUnauthorized() { } func (s *RepoTestSuite) TestDeleteRepositoryPoolDefinedFailed() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -419,9 +419,9 @@ func (s *RepoTestSuite) TestCreateRepoPoolFetchPoolParamsFailed() { } func (s *RepoTestSuite) TestGetRepoPoolByID() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } repoPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -441,9 +441,9 @@ func (s *RepoTestSuite) TestGetRepoPoolByIDErrUnauthorized() { } func (s *RepoTestSuite) TestDeleteRepoPool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -465,9 +465,9 @@ func (s *RepoTestSuite) TestDeleteRepoPoolErrUnauthorized() { } func (s *RepoTestSuite) TestDeleteRepoPoolRunnersFailed() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -484,9 +484,9 @@ func (s *RepoTestSuite) TestDeleteRepoPoolRunnersFailed() { } func (s *RepoTestSuite) TestListRepoPools() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } repoPools := []params.Pool{} for i := 1; i <= 2; i++ { @@ -511,9 +511,9 @@ func (s *RepoTestSuite) TestListRepoPoolsErrUnauthorized() { } func (s *RepoTestSuite) TestListPoolInstances() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -542,9 +542,9 @@ func (s *RepoTestSuite) TestListPoolInstancesErrUnauthorized() { } func (s *RepoTestSuite) TestUpdateRepoPool() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } repoPool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -565,9 +565,9 @@ func (s *RepoTestSuite) TestUpdateRepoPoolErrUnauthorized() { } func (s *RepoTestSuite) TestUpdateRepoPoolMinIdleGreaterThanMax() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { @@ -584,9 +584,9 @@ func (s *RepoTestSuite) TestUpdateRepoPoolMinIdleGreaterThanMax() { } func (s *RepoTestSuite) TestListRepoInstances() { - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: s.Fixtures.StoreRepos["test-repo-1"].ID, - EntityType: params.GithubEntityTypeRepository, + EntityType: params.ForgeEntityTypeRepository, } pool, err := s.Fixtures.Store.CreateEntityPool(s.Fixtures.AdminContext, entity, s.Fixtures.CreatePoolParams) if err != nil { diff --git a/runner/runner.go b/runner/runner.go index 42a955fc..6d5bc5eb 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -602,10 +602,10 @@ func (r *Runner) validateHookBody(signature, secret string, body []byte) error { return nil } -func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.GithubEndpoint, error) { +func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.ForgeEndpoint, error) { uri, err := url.ParseRequestURI(job.WorkflowJob.HTMLURL) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "parsing job URL") + return params.ForgeEndpoint{}, errors.Wrap(err, "parsing job URL") } baseURI := fmt.Sprintf("%s://%s", uri.Scheme, uri.Host) @@ -616,7 +616,7 @@ func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.GithubEndpoi // that much about the performance of this function. endpoints, err := r.store.ListGithubEndpoints(r.ctx) if err != nil { - return params.GithubEndpoint{}, errors.Wrap(err, "fetching github endpoints") + return params.ForgeEndpoint{}, errors.Wrap(err, "fetching github endpoints") } for _, ep := range endpoints { if ep.BaseURL == baseURI { @@ -624,7 +624,7 @@ func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.GithubEndpoi } } - return params.GithubEndpoint{}, runnerErrors.NewNotFoundError("no endpoint found for job") + return params.ForgeEndpoint{}, runnerErrors.NewNotFoundError("no endpoint found for job") } func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, jobData []byte) error { @@ -928,7 +928,7 @@ func (r *Runner) getGHCliFromInstance(ctx context.Context, instance params.Insta } // Fetching the entity from the database will populate all fields, including credentials. - entity, err = r.store.GetGithubEntity(ctx, entity.EntityType, entity.ID) + entity, err = r.store.GetForgeEntity(ctx, entity.EntityType, entity.ID) if err != nil { return nil, nil, errors.Wrap(err, "fetching entity") } diff --git a/runner/scalesets.go b/runner/scalesets.go index 83432e63..6796dc2d 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -80,7 +80,7 @@ func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error return errors.Wrap(err, "getting entity") } - entity, err := r.store.GetGithubEntity(ctx, paramEntity.EntityType, paramEntity.ID) + entity, err := r.store.GetForgeEntity(ctx, paramEntity.EntityType, paramEntity.ID) if err != nil { return errors.Wrap(err, "getting entity") } @@ -143,7 +143,7 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param return params.ScaleSet{}, errors.Wrap(err, "getting entity") } - entity, err := r.store.GetGithubEntity(ctx, paramEntity.EntityType, paramEntity.ID) + entity, err := r.store.GetForgeEntity(ctx, paramEntity.EntityType, paramEntity.ID) if err != nil { return params.ScaleSet{}, errors.Wrap(err, "getting entity") } @@ -198,7 +198,7 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param return newScaleSet, nil } -func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.GithubEntityType, entityID string, param params.CreateScaleSetParams) (scaleSetRet params.ScaleSet, err error) { +func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.ForgeEntityType, entityID string, param params.CreateScaleSetParams) (scaleSetRet params.ScaleSet, err error) { if !auth.IsAdmin(ctx) { return params.ScaleSet{}, runnerErrors.ErrUnauthorized } @@ -211,7 +211,7 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.Git param.GitHubRunnerGroup = "Default" } - entity, err := r.store.GetGithubEntity(ctx, entityType, entityID) + entity, err := r.store.GetForgeEntity(ctx, entityType, entityID) if err != nil { return params.ScaleSet{}, errors.Wrap(err, "getting entity") } @@ -287,11 +287,11 @@ func (r *Runner) ListScaleSetInstances(ctx context.Context, scalesetID uint) ([] return instances, nil } -func (r *Runner) ListEntityScaleSets(ctx context.Context, entityType params.GithubEntityType, entityID string) ([]params.ScaleSet, error) { +func (r *Runner) ListEntityScaleSets(ctx context.Context, entityType params.ForgeEntityType, entityID string) ([]params.ScaleSet, error) { if !auth.IsAdmin(ctx) { return []params.ScaleSet{}, runnerErrors.ErrUnauthorized } - entity := params.GithubEntity{ + entity := params.ForgeEntity{ ID: entityID, EntityType: entityType, } diff --git a/test/integration/client_utils.go b/test/integration/client_utils.go index a0f17893..977cc11c 100644 --- a/test/integration/client_utils.go +++ b/test/integration/client_utils.go @@ -77,7 +77,7 @@ func updateGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.Client return &updateCredentialsResponse.Payload, nil } -func createGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointParams params.CreateGithubEndpointParams) (*params.GithubEndpoint, error) { +func createGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointParams params.CreateGithubEndpointParams) (*params.ForgeEndpoint, error) { createEndpointResponse, err := apiCli.Endpoints.CreateGithubEndpoint( clientEndpoints.NewCreateGithubEndpointParams().WithBody(endpointParams), apiAuthToken) @@ -87,7 +87,7 @@ func createGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAut return &createEndpointResponse.Payload, nil } -func listGithubEndpoints(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter) (params.GithubEndpoints, error) { +func listGithubEndpoints(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter) (params.ForgeEndpoints, error) { listEndpointsResponse, err := apiCli.Endpoints.ListGithubEndpoints( clientEndpoints.NewListGithubEndpointsParams(), apiAuthToken) @@ -97,7 +97,7 @@ func listGithubEndpoints(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuth return listEndpointsResponse.Payload, nil } -func getGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string) (*params.GithubEndpoint, error) { +func getGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string) (*params.ForgeEndpoint, error) { getEndpointResponse, err := apiCli.Endpoints.GetGithubEndpoint( clientEndpoints.NewGetGithubEndpointParams().WithName(endpointName), apiAuthToken) @@ -113,7 +113,7 @@ func deleteGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAut apiAuthToken) } -func updateGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string, endpointParams params.UpdateGithubEndpointParams) (*params.GithubEndpoint, error) { +func updateGithubEndpoint(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, endpointName string, endpointParams params.UpdateGithubEndpointParams) (*params.ForgeEndpoint, error) { updateEndpointResponse, err := apiCli.Endpoints.UpdateGithubEndpoint( clientEndpoints.NewUpdateGithubEndpointParams().WithName(endpointName).WithBody(endpointParams), apiAuthToken) diff --git a/test/integration/credentials_test.go b/test/integration/credentials_test.go index 8d92bf22..f7c9c691 100644 --- a/test/integration/credentials_test.go +++ b/test/integration/credentials_test.go @@ -25,7 +25,7 @@ func (suite *GarmSuite) TestGithubCredentialsErrorOnDuplicateCredentialsName() { Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -68,7 +68,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsOnInvalidAuthType() { Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthType("invalid"), + AuthType: params.ForgeAuthType("invalid"), PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -87,7 +87,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsWhenAuthTypeParamsAreIncorrect Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, App: params.GithubApp{ AppID: 123, InstallationID: 456, @@ -107,7 +107,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsWhenAuthTypeParamsAreMissing() Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthTypeApp, + AuthType: params.ForgeAuthTypeApp, } _, err := createGithubCredentials(suite.cli, suite.authToken, createCredsParams) suite.Error(err, "expected error when creating credentials with missing auth type params") @@ -147,7 +147,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailWhenAppKeyIsInvalid() { Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthTypeApp, + AuthType: params.ForgeAuthTypeApp, App: params.GithubApp{ AppID: 123, InstallationID: 456, @@ -166,7 +166,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailWhenEndpointDoesntExist() { Name: dummyCredentialsName, Endpoint: "iDontExist.example.com", Description: "GARM test credentials", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -189,7 +189,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsOnDuplicateName() { Name: dummyCredentialsName, Endpoint: defaultEndpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, @@ -204,7 +204,7 @@ func (suite *GarmSuite) createDummyCredentials(name, endpointName string) (*para Name: name, Endpoint: endpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: "dummy", }, diff --git a/test/integration/endpoints.go b/test/integration/endpoints.go index 9e47d854..9f1320ca 100644 --- a/test/integration/endpoints.go +++ b/test/integration/endpoints.go @@ -8,7 +8,7 @@ import ( "github.com/cloudbase/garm/params" ) -func checkEndpointParamsAreEqual(a, b params.GithubEndpoint) error { +func checkEndpointParamsAreEqual(a, b params.ForgeEndpoint) error { if a.Name != b.Name { return fmt.Errorf("endpoint name mismatch") } diff --git a/test/integration/endpoints_test.go b/test/integration/endpoints_test.go index e09916bc..a958259f 100644 --- a/test/integration/endpoints_test.go +++ b/test/integration/endpoints_test.go @@ -163,7 +163,7 @@ func (suite *GarmSuite) MustDefaultGithubEndpoint() { suite.Equal(ep.Name, "github.com", "default GitHub endpoint name mismatch") } -func (suite *GarmSuite) GetGithubEndpoint(name string) *params.GithubEndpoint { +func (suite *GarmSuite) GetGithubEndpoint(name string) *params.ForgeEndpoint { t := suite.T() t.Log("Get GitHub endpoint") endpoint, err := getGithubEndpoint(suite.cli, suite.authToken, name) @@ -172,7 +172,7 @@ func (suite *GarmSuite) GetGithubEndpoint(name string) *params.GithubEndpoint { return endpoint } -func (suite *GarmSuite) CreateGithubEndpoint(params params.CreateGithubEndpointParams) (*params.GithubEndpoint, error) { +func (suite *GarmSuite) CreateGithubEndpoint(params params.CreateGithubEndpointParams) (*params.ForgeEndpoint, error) { t := suite.T() t.Log("Create GitHub endpoint") endpoint, err := createGithubEndpoint(suite.cli, suite.authToken, params) @@ -190,7 +190,7 @@ func (suite *GarmSuite) DeleteGithubEndpoint(name string) error { return nil } -func (suite *GarmSuite) ListGithubEndpoints() params.GithubEndpoints { +func (suite *GarmSuite) ListGithubEndpoints() params.ForgeEndpoints { t := suite.T() t.Log("List GitHub endpoints") endpoints, err := listGithubEndpoints(suite.cli, suite.authToken) @@ -199,7 +199,7 @@ func (suite *GarmSuite) ListGithubEndpoints() params.GithubEndpoints { return endpoints } -func (suite *GarmSuite) createDummyEndpoint(name string) (*params.GithubEndpoint, error) { +func (suite *GarmSuite) createDummyEndpoint(name string) (*params.ForgeEndpoint, error) { endpointParams := params.CreateGithubEndpointParams{ Name: name, Description: "Dummy endpoint", diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index 7b396ffc..1f111fcc 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -22,7 +22,7 @@ func (suite *GarmSuite) EnsureTestCredentials(name string, oauthToken string, en Name: name, Endpoint: endpointName, Description: "GARM test credentials", - AuthType: params.GithubAuthTypePAT, + AuthType: params.ForgeAuthTypePAT, PAT: params.GithubPAT{ OAuth2Token: oauthToken, }, diff --git a/util/github/client.go b/util/github/client.go index 77803f4f..1480561a 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -22,6 +22,7 @@ import ( "log/slog" "net/http" "net/url" + "strings" "github.com/google/go-github/v71/github" "github.com/pkg/errors" @@ -39,7 +40,7 @@ type githubClient struct { enterprise *github.EnterpriseService rateLimit *github.RateLimitService - entity params.GithubEntity + entity params.ForgeEntity cli *github.Client } @@ -57,9 +58,9 @@ func (g *githubClient) ListEntityHooks(ctx context.Context, opts *github.ListOpt } }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, response, err = g.repo.ListHooks(ctx, g.entity.Owner, g.entity.Name, opts) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, response, err = g.org.ListHooks(ctx, g.entity.Owner, opts) default: return nil, nil, fmt.Errorf("invalid entity type: %s", g.entity.EntityType) @@ -81,9 +82,9 @@ func (g *githubClient) GetEntityHook(ctx context.Context, id int64) (ret *github } }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, _, err = g.repo.GetHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, _, err = g.org.GetHook(ctx, g.entity.Owner, id) default: return nil, errors.New("invalid entity type") @@ -105,9 +106,9 @@ func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) } }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, _, err = g.repo.CreateHook(ctx, g.entity.Owner, g.entity.Name, hook) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, _, err = g.org.CreateHook(ctx, g.entity.Owner, hook) default: return nil, errors.New("invalid entity type") @@ -129,9 +130,9 @@ func (g *githubClient) DeleteEntityHook(ctx context.Context, id int64) (ret *git } }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, err = g.repo.DeleteHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, err = g.org.DeleteHook(ctx, g.entity.Owner, id) default: return nil, errors.New("invalid entity type") @@ -153,9 +154,9 @@ func (g *githubClient) PingEntityHook(ctx context.Context, id int64) (ret *githu } }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, err = g.repo.PingHook(ctx, g.entity.Owner, g.entity.Name, id) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, err = g.org.PingHook(ctx, g.entity.Owner, id) default: return nil, errors.New("invalid entity type") @@ -182,11 +183,11 @@ func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListR }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, response, err = g.ListRunners(ctx, g.entity.Owner, g.entity.Name, opts) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, response, err = g.ListOrganizationRunners(ctx, g.entity.Owner, opts) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: ret, response, err = g.enterprise.ListRunners(ctx, g.entity.Owner, opts) default: return nil, nil, errors.New("invalid entity type") @@ -214,11 +215,11 @@ func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, response, err = g.ListRunnerApplicationDownloads(ctx, g.entity.Owner, g.entity.Name) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, response, err = g.ListOrganizationRunnerApplicationDownloads(ctx, g.entity.Owner) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: ret, response, err = g.enterprise.ListRunnerApplicationDownloads(ctx, g.entity.Owner) default: return nil, nil, errors.New("invalid entity type") @@ -277,11 +278,11 @@ func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) e }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: response, err = g.RemoveRunner(ctx, g.entity.Owner, g.entity.Name, runnerID) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: response, err = g.RemoveOrganizationRunner(ctx, g.entity.Owner, runnerID) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: response, err = g.enterprise.RemoveRunner(ctx, g.entity.Owner, runnerID) default: return errors.New("invalid entity type") @@ -313,11 +314,11 @@ func (g *githubClient) CreateEntityRegistrationToken(ctx context.Context) (*gith }() switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, response, err = g.CreateRegistrationToken(ctx, g.entity.Owner, g.entity.Name) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, response, err = g.CreateOrganizationRegistrationToken(ctx, g.entity.Owner) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: ret, response, err = g.enterprise.CreateRegistrationToken(ctx, g.entity.Owner) default: return nil, nil, errors.New("invalid entity type") @@ -326,7 +327,7 @@ func (g *githubClient) CreateEntityRegistrationToken(ctx context.Context) (*gith return ret, response, err } -func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { +func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, entity params.ForgeEntity, rgName string) (int64, error) { opts := github.ListOrgRunnerGroupOptions{ ListOptions: github.ListOptions{ PerPage: 100, @@ -362,7 +363,7 @@ func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, e return 0, runnerErrors.NewNotFoundError("runner group %s not found", rgName) } -func (g *githubClient) getEnterpriseRunnerGroupIDByName(ctx context.Context, entity params.GithubEntity, rgName string) (int64, error) { +func (g *githubClient) getEnterpriseRunnerGroupIDByName(ctx context.Context, entity params.ForgeEntity, rgName string) (int64, error) { opts := github.ListEnterpriseRunnerGroupOptions{ ListOptions: github.ListOptions{ PerPage: 100, @@ -405,9 +406,9 @@ func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, if pool.GitHubRunnerGroup != "" { switch g.entity.EntityType { - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: rgID, err = g.getOrganizationRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: rgID, err = g.getEnterpriseRunnerGroupIDByName(ctx, g.entity, pool.GitHubRunnerGroup) } @@ -434,11 +435,11 @@ func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, var response *github.Response switch g.entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: ret, response, err = g.GenerateRepoJITConfig(ctx, g.entity.Owner, g.entity.Name, &req) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: ret, response, err = g.GenerateOrgJITConfig(ctx, g.entity.Owner, &req) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: ret, response, err = g.enterprise.GenerateEnterpriseJITConfig(ctx, g.entity.Owner, &req) } if err != nil { @@ -482,7 +483,7 @@ func (g *githubClient) RateLimit(ctx context.Context) (*github.RateLimits, error return limits, nil } -func (g *githubClient) GetEntity() params.GithubEntity { +func (g *githubClient) GetEntity() params.ForgeEntity { return g.entity } @@ -514,8 +515,49 @@ func NewRateLimitClient(ctx context.Context, credentials params.GithubCredential return cli, nil } -func Client(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { - // func GithubClient(ctx context.Context, entity params.GithubEntity) (common.GithubClient, error) { +func withGiteaURLs(client *github.Client, apiBaseURL, uploadBaseURL string) (*github.Client, error) { + if client == nil { + return nil, errors.New("client is nil") + } + + if apiBaseURL == "" || uploadBaseURL == "" { + return nil, errors.New("invalid gitea URLs") + } + + parsedBaseURL, err := url.ParseRequestURI(apiBaseURL) + if err != nil { + return nil, errors.Wrap(err, "parsing gitea base URL") + } + + if !strings.HasSuffix(parsedBaseURL.Path, "/") { + parsedBaseURL.Path += "/" + } + + if !strings.HasSuffix(parsedBaseURL.Path, "/api/v1/") { + parsedBaseURL.Path += "api/v1/" + } + + parsedUploadURL, err := url.ParseRequestURI(uploadBaseURL) + if err != nil { + return nil, errors.Wrap(err, "parsing gitea upload URL") + } + + if !strings.HasSuffix(parsedUploadURL.Path, "/") { + parsedUploadURL.Path += "/" + } + + if !strings.HasSuffix(parsedUploadURL.Path, "/api/v1/") { + parsedUploadURL.Path += "api/v1/" + } + + client.BaseURL = parsedBaseURL + client.UploadURL = parsedUploadURL + + return client, nil +} + +func Client(ctx context.Context, entity params.ForgeEntity) (common.GithubClient, error) { + // func GithubClient(ctx context.Context, entity params.ForgeEntity) (common.GithubClient, error) { httpClient, err := entity.Credentials.GetHTTPClient(ctx) if err != nil { return nil, errors.Wrap(err, "fetching http client") @@ -523,11 +565,17 @@ func Client(ctx context.Context, entity params.GithubEntity) (common.GithubClien slog.DebugContext( ctx, "creating client for entity", - "entity", entity.String(), "base_url", entity.Credentials.APIBaseURL, - "upload_url", entity.Credentials.UploadBaseURL) + "entity", entity.String(), "base_url", entity.Credentials.APIBaseURL(), + "upload_url", entity.Credentials.UploadBaseURL()) + + ghClient := github.NewClient(httpClient) + switch entity.Credentials.ForgeType { + case params.GithubEndpointType: + ghClient, err = ghClient.WithEnterpriseURLs(entity.Credentials.APIBaseURL(), entity.Credentials.UploadBaseURL()) + case params.GiteaEndpointType: + ghClient, err = withGiteaURLs(ghClient, entity.Credentials.APIBaseURL(), entity.Credentials.UploadBaseURL()) + } - ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs( - entity.Credentials.APIBaseURL, entity.Credentials.UploadBaseURL) if err != nil { return nil, errors.Wrap(err, "fetching github client") } diff --git a/util/github/scalesets/token.go b/util/github/scalesets/token.go index 47aa764f..1491b748 100644 --- a/util/github/scalesets/token.go +++ b/util/github/scalesets/token.go @@ -36,7 +36,7 @@ func (s *ScaleSetClient) getActionServiceInfo(ctx context.Context) (params.Actio entity := s.ghCli.GetEntity() body := params.ActionsServiceAdminInfoRequest{ - URL: entity.GithubURL(), + URL: entity.ForgeURL(), RunnerEvent: "register", } diff --git a/workers/cache/cache.go b/workers/cache/cache.go index 13400a3a..ce23d269 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -262,7 +262,7 @@ func (w *Worker) handleEntityEvent(entityGetter params.EntityGetter, op common.O w.toolsWorkes[entity.ID] = worker } else if hasOld { // probably an update operation - if old.Credentials.ID != entity.Credentials.ID { + if old.Credentials.GetID() != entity.Credentials.GetID() { worker.Reset() } } diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index 6133580d..7d8d5737 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -16,7 +16,7 @@ import ( "github.com/cloudbase/garm/util/github" ) -func newToolsUpdater(ctx context.Context, entity params.GithubEntity) *toolsUpdater { +func newToolsUpdater(ctx context.Context, entity params.ForgeEntity) *toolsUpdater { return &toolsUpdater{ ctx: ctx, entity: entity, @@ -27,7 +27,7 @@ func newToolsUpdater(ctx context.Context, entity params.GithubEntity) *toolsUpda type toolsUpdater struct { ctx context.Context - entity params.GithubEntity + entity params.ForgeEntity tools []commonParams.RunnerApplicationDownload lastUpdate time.Time diff --git a/workers/entity/util.go b/workers/entity/util.go index 95c9b2cc..877758a7 100644 --- a/workers/entity/util.go +++ b/workers/entity/util.go @@ -31,7 +31,7 @@ func composeControllerWatcherFilters() dbCommon.PayloadFilterFunc { ) } -func composeWorkerWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFunc { +func composeWorkerWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { return watcher.WithAny( watcher.WithAll( watcher.WithEntityFilter(entity), @@ -39,7 +39,7 @@ func composeWorkerWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFil ), // Watch for credentials updates. watcher.WithAll( - watcher.WithGithubCredentialsFilter(entity.Credentials), + watcher.WithForgeCredentialsFilter(entity.Credentials), watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), ), ) diff --git a/workers/entity/worker.go b/workers/entity/worker.go index fcfd2a60..7f0f79e6 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -7,6 +7,8 @@ import ( "sync" "time" + "golang.org/x/sync/errgroup" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" @@ -16,10 +18,9 @@ import ( "github.com/cloudbase/garm/util/github" "github.com/cloudbase/garm/util/github/scalesets" "github.com/cloudbase/garm/workers/scaleset" - "golang.org/x/sync/errgroup" ) -func NewWorker(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Worker, error) { +func NewWorker(ctx context.Context, store dbCommon.Store, entity params.ForgeEntity, providers map[string]common.Provider) (*Worker, error) { consumerID := fmt.Sprintf("entity-worker-%s", entity.String()) ctx = garmUtil.WithSlogContext( @@ -43,7 +44,7 @@ type Worker struct { store dbCommon.Store ghCli common.GithubClient - Entity params.GithubEntity + Entity params.ForgeEntity providers map[string]common.Provider scaleSetController *scaleset.Controller diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go index be0b6b3c..04e20a65 100644 --- a/workers/entity/worker_watcher.go +++ b/workers/entity/worker_watcher.go @@ -44,7 +44,7 @@ func (w *Worker) handleEntityEventPayload(event dbCommon.ChangePayload) { defer w.mux.Unlock() credentials := entity.Credentials - if w.Entity.Credentials.ID != credentials.ID { + if w.Entity.Credentials.GetID() != credentials.GetID() { // credentials were swapped on the entity. We need to recompose the watcher // filters. w.consumer.SetFilters(composeWorkerWatcherFilters(entity)) @@ -63,18 +63,29 @@ func (w *Worker) handleEntityEventPayload(event dbCommon.ChangePayload) { } func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayload) { - credentials, ok := event.Payload.(params.GithubCredentials) + var credsGetter params.ForgeCredentialsGetter + var ok bool + + switch event.EntityType { + case dbCommon.GithubCredentialsEntityType: + credsGetter, ok = event.Payload.(params.GithubCredentials) + default: + slog.ErrorContext(w.ctx, "invalid entity type", "entity_type", event.EntityType) + return + } if !ok { slog.ErrorContext(w.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) return } + credentials := credsGetter.GetForgeCredentials() + switch event.Operation { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got delete operation") w.mux.Lock() defer w.mux.Unlock() - if w.Entity.Credentials.ID != credentials.ID { + if w.Entity.Credentials.GetID() != credentials.GetID() { // The channel is buffered. We may get an old update. If credentials get updated // immediately after they are swapped on the entity, we may still get an update // pushed to the channel before the filters are swapped. We can ignore the update. diff --git a/workers/pools/controller.go b/workers/pools/controller.go index 458766a9..058ebec6 100644 --- a/workers/pools/controller.go +++ b/workers/pools/controller.go @@ -1,3 +1 @@ package pools - - diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index 37680cd0..9ba94553 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -52,7 +52,7 @@ type instanceManager struct { helper providerHelper scaleSet params.ScaleSet - scaleSetEntity params.GithubEntity + scaleSetEntity params.ForgeEntity deleteBackoff time.Duration @@ -120,14 +120,14 @@ func (i *instanceManager) incrementBackOff() { } } -func (i *instanceManager) getEntity() (params.GithubEntity, error) { +func (i *instanceManager) getEntity() (params.ForgeEntity, error) { entity, err := i.scaleSet.GetEntity() if err != nil { - return params.GithubEntity{}, fmt.Errorf("getting entity: %w", err) + return params.ForgeEntity{}, fmt.Errorf("getting entity: %w", err) } ghEntity, err := i.helper.GetGithubEntity(entity) if err != nil { - return params.GithubEntity{}, fmt.Errorf("getting entity: %w", err) + return params.ForgeEntity{}, fmt.Errorf("getting entity: %w", err) } return ghEntity, nil } @@ -156,7 +156,7 @@ func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instanc bootstrapArgs := commonParams.BootstrapInstance{ Name: instance.Name, Tools: tools, - RepoURL: entity.GithubURL(), + RepoURL: entity.ForgeURL(), MetadataURL: instance.MetadataURL, CallbackURL: instance.CallbackURL, InstanceToken: token, @@ -167,7 +167,7 @@ func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instanc ExtraSpecs: i.scaleSet.ExtraSpecs, // This is temporary. We need to extend providers to know about scale sets. PoolID: i.pseudoPoolID(), - CACertBundle: entity.Credentials.CABundle, + CACertBundle: entity.Credentials.CABundle(), GitHubRunnerGroup: i.scaleSet.GitHubRunnerGroup, JitConfigEnabled: true, } diff --git a/workers/provider/provider_helper.go b/workers/provider/provider_helper.go index 6a53bab3..96762135 100644 --- a/workers/provider/provider_helper.go +++ b/workers/provider/provider_helper.go @@ -14,7 +14,7 @@ type providerHelper interface { InstanceTokenGetter() auth.InstanceTokenGetter updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) GetControllerInfo() (params.ControllerInfo, error) - GetGithubEntity(entity params.GithubEntity) (params.GithubEntity, error) + GetGithubEntity(entity params.ForgeEntity) (params.ForgeEntity, error) } func (p *Provider) updateArgsFromProviderInstance(instanceName string, providerInstance commonParams.ProviderInstance) (params.Instance, error) { @@ -71,10 +71,10 @@ func (p *Provider) InstanceTokenGetter() auth.InstanceTokenGetter { return p.tokenGetter } -func (p *Provider) GetGithubEntity(entity params.GithubEntity) (params.GithubEntity, error) { - ghEntity, err := p.store.GetGithubEntity(p.ctx, entity.EntityType, entity.ID) +func (p *Provider) GetGithubEntity(entity params.ForgeEntity) (params.ForgeEntity, error) { + ghEntity, err := p.store.GetForgeEntity(p.ctx, entity.EntityType, entity.ID) if err != nil { - return params.GithubEntity{}, fmt.Errorf("getting github entity: %w", err) + return params.ForgeEntity{}, fmt.Errorf("getting github entity: %w", err) } return ghEntity, nil diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 6e3170a0..45dfbfa3 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -15,7 +15,7 @@ import ( garmUtil "github.com/cloudbase/garm/util" ) -func NewController(ctx context.Context, store dbCommon.Store, entity params.GithubEntity, providers map[string]common.Provider) (*Controller, error) { +func NewController(ctx context.Context, store dbCommon.Store, entity params.ForgeEntity, providers map[string]common.Provider) (*Controller, error) { consumerID := fmt.Sprintf("scaleset-controller-%s", entity.String()) ctx = garmUtil.WithSlogContext( @@ -57,7 +57,7 @@ type Controller struct { ScaleSets map[uint]*scaleSet - Entity params.GithubEntity + Entity params.ForgeEntity consumer dbCommon.Consumer store dbCommon.Store diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index ec4771fc..551d711d 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -127,11 +127,11 @@ func (c *Controller) handleEntityEvent(event dbCommon.ChangePayload) { var entityGetter params.EntityGetter var ok bool switch c.Entity.EntityType { - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: entityGetter, ok = event.Payload.(params.Repository) - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: entityGetter, ok = event.Payload.(params.Organization) - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: entityGetter, ok = event.Payload.(params.Enterprise) } if !ok { diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 26f845ff..54d9b52e 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -60,11 +60,11 @@ func (w *Worker) recordOrUpdateJob(job params.ScaleSetJobMessage) error { jobParams.RunnerGroupName = w.scaleSet.GitHubRunnerGroup switch entity.EntityType { - case params.GithubEntityTypeEnterprise: + case params.ForgeEntityTypeEnterprise: jobParams.EnterpriseID = &asUUID - case params.GithubEntityTypeRepository: + case params.ForgeEntityTypeRepository: jobParams.RepoID = &asUUID - case params.GithubEntityTypeOrganization: + case params.ForgeEntityTypeOrganization: jobParams.OrgID = &asUUID default: return fmt.Errorf("unknown entity type: %s", entity.EntityType) diff --git a/workers/scaleset/util.go b/workers/scaleset/util.go index 02d33b69..1d8d6c51 100644 --- a/workers/scaleset/util.go +++ b/workers/scaleset/util.go @@ -6,7 +6,7 @@ import ( "github.com/cloudbase/garm/params" ) -func composeControllerWatcherFilters(entity params.GithubEntity) dbCommon.PayloadFilterFunc { +func composeControllerWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc { return watcher.WithAny( watcher.WithAll( watcher.WithEntityScaleSetFilter(entity), From 823a9e4b82189284877f93d7c04ac90c39cd24fa Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 00:34:54 +0000 Subject: [PATCH 060/179] Add Gitea endpoints and credentials Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/gitea_credentials.go | 228 +++++++++ apiserver/controllers/gitea_endpoints.go | 186 +++++++ .../{credentials.go => github_credentials.go} | 6 +- .../{endpoints.go => github_endpoints.go} | 8 +- apiserver/routers/routers.go | 38 ++ apiserver/swagger-models.yaml | 44 +- apiserver/swagger.yaml | 278 ++++++++++- cache/cache_test.go | 29 +- cache/credentials_cache.go | 66 ++- cache/entity_cache.go | 27 +- cache/tools_cache.go | 19 +- .../create_credentials_responses.go | 6 +- .../create_gitea_credentials_parameters.go | 151 ++++++ .../create_gitea_credentials_responses.go | 179 +++++++ client/credentials/credentials_client.go | 198 ++++++++ .../delete_gitea_credentials_parameters.go | 152 ++++++ .../delete_gitea_credentials_responses.go | 106 ++++ .../credentials/get_credentials_responses.go | 6 +- .../get_gitea_credentials_parameters.go | 152 ++++++ .../get_gitea_credentials_responses.go | 179 +++++++ .../list_gitea_credentials_parameters.go | 128 +++++ .../list_gitea_credentials_responses.go | 179 +++++++ .../update_credentials_responses.go | 6 +- .../update_gitea_credentials_parameters.go | 174 +++++++ .../update_gitea_credentials_responses.go | 179 +++++++ .../create_gitea_endpoint_parameters.go | 151 ++++++ .../create_gitea_endpoint_responses.go | 184 +++++++ .../create_github_endpoint_responses.go | 2 +- .../delete_gitea_endpoint_parameters.go | 151 ++++++ .../delete_gitea_endpoint_responses.go | 106 ++++ client/endpoints/endpoints_client.go | 194 ++++++++ .../get_gitea_endpoint_parameters.go | 151 ++++++ .../endpoints/get_gitea_endpoint_responses.go | 184 +++++++ .../get_github_endpoint_responses.go | 2 +- .../list_gitea_endpoints_parameters.go | 128 +++++ .../list_gitea_endpoints_responses.go | 184 +++++++ .../list_github_endpoints_responses.go | 2 +- .../update_gitea_endpoint_parameters.go | 173 +++++++ .../update_gitea_endpoint_responses.go | 184 +++++++ .../update_github_endpoint_responses.go | 2 +- cmd/garm-cli/cmd/gitea.go | 21 + cmd/garm-cli/cmd/gitea_credentials.go | 317 ++++++++++++ cmd/garm-cli/cmd/gitea_endpoints.go | 218 ++++++++ cmd/garm-cli/cmd/github_credentials.go | 4 +- cmd/garm-cli/cmd/github_endpoints.go | 10 +- cmd/garm-cli/cmd/repository.go | 7 +- cmd/garm/main.go | 1 + database/common/mocks/Store.go | 416 +++++++++++++--- database/common/store.go | 31 +- database/common/watcher.go | 1 + database/sql/enterprise.go | 2 - database/sql/enterprise_test.go | 4 +- database/sql/gitea.go | 469 ++++++++++++++++++ database/sql/github.go | 140 ++---- database/sql/github_test.go | 2 +- database/sql/models.go | 78 +-- database/sql/organizations.go | 3 - database/sql/organizations_test.go | 4 +- database/sql/pools_test.go | 4 +- database/sql/repositories.go | 41 +- database/sql/repositories_test.go | 12 +- database/sql/scalesets_test.go | 4 +- database/sql/sql.go | 1 + database/sql/util.go | 171 ++++++- database/watcher/filters.go | 2 +- database/watcher/watcher_store_test.go | 12 +- go.mod | 1 + go.sum | 2 + internal/testing/testing.go | 2 +- params/params.go | 222 +++------ params/requests.go | 204 +++++++- runner/common/mocks/GithubClient.go | 40 +- runner/common/mocks/GithubEntityOperations.go | 40 +- runner/common/mocks/RateLimitClient.go | 59 +++ runner/enterprises_test.go | 8 +- runner/gitea_credentials.go | 86 ++++ runner/gitea_endpoints.go | 82 +++ runner/github_credentials.go | 24 +- runner/organizations_test.go | 8 +- runner/pool/pool.go | 21 +- runner/pool/watcher.go | 4 +- runner/pools_test.go | 4 +- runner/repositories.go | 13 +- runner/repositories_test.go | 10 +- test/integration/client_utils.go | 4 +- test/integration/credentials_test.go | 4 +- util/github/client.go | 37 +- vendor/golang.org/x/mod/LICENSE | 27 + vendor/golang.org/x/mod/PATENTS | 22 + vendor/golang.org/x/mod/semver/semver.go | 401 +++++++++++++++ vendor/modules.txt | 3 + workers/cache/cache.go | 37 +- workers/cache/gitea_tools.go | 152 ++++++ workers/cache/tool_cache.go | 70 ++- workers/entity/controller.go | 1 + workers/entity/worker.go | 1 + workers/entity/worker_watcher.go | 7 +- workers/provider/instance_manager.go | 2 +- workers/scaleset/controller.go | 2 +- workers/scaleset/scaleset.go | 2 +- 100 files changed, 7439 insertions(+), 660 deletions(-) create mode 100644 apiserver/controllers/gitea_credentials.go create mode 100644 apiserver/controllers/gitea_endpoints.go rename apiserver/controllers/{credentials.go => github_credentials.go} (98%) rename apiserver/controllers/{endpoints.go => github_endpoints.go} (98%) create mode 100644 client/credentials/create_gitea_credentials_parameters.go create mode 100644 client/credentials/create_gitea_credentials_responses.go create mode 100644 client/credentials/delete_gitea_credentials_parameters.go create mode 100644 client/credentials/delete_gitea_credentials_responses.go create mode 100644 client/credentials/get_gitea_credentials_parameters.go create mode 100644 client/credentials/get_gitea_credentials_responses.go create mode 100644 client/credentials/list_gitea_credentials_parameters.go create mode 100644 client/credentials/list_gitea_credentials_responses.go create mode 100644 client/credentials/update_gitea_credentials_parameters.go create mode 100644 client/credentials/update_gitea_credentials_responses.go create mode 100644 client/endpoints/create_gitea_endpoint_parameters.go create mode 100644 client/endpoints/create_gitea_endpoint_responses.go create mode 100644 client/endpoints/delete_gitea_endpoint_parameters.go create mode 100644 client/endpoints/delete_gitea_endpoint_responses.go create mode 100644 client/endpoints/get_gitea_endpoint_parameters.go create mode 100644 client/endpoints/get_gitea_endpoint_responses.go create mode 100644 client/endpoints/list_gitea_endpoints_parameters.go create mode 100644 client/endpoints/list_gitea_endpoints_responses.go create mode 100644 client/endpoints/update_gitea_endpoint_parameters.go create mode 100644 client/endpoints/update_gitea_endpoint_responses.go create mode 100644 cmd/garm-cli/cmd/gitea.go create mode 100644 cmd/garm-cli/cmd/gitea_credentials.go create mode 100644 cmd/garm-cli/cmd/gitea_endpoints.go create mode 100644 database/sql/gitea.go create mode 100644 runner/common/mocks/RateLimitClient.go create mode 100644 runner/gitea_credentials.go create mode 100644 runner/gitea_endpoints.go create mode 100644 vendor/golang.org/x/mod/LICENSE create mode 100644 vendor/golang.org/x/mod/PATENTS create mode 100644 vendor/golang.org/x/mod/semver/semver.go create mode 100644 workers/cache/gitea_tools.go diff --git a/apiserver/controllers/gitea_credentials.go b/apiserver/controllers/gitea_credentials.go new file mode 100644 index 00000000..e1be0fb7 --- /dev/null +++ b/apiserver/controllers/gitea_credentials.go @@ -0,0 +1,228 @@ +package controllers + +import ( + "encoding/json" + "log/slog" + "math" + "net/http" + "strconv" + + "github.com/gorilla/mux" + + gErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" +) + +// swagger:route GET /gitea/credentials credentials ListGiteaCredentials +// +// List all credentials. +// +// Responses: +// 200: Credentials +// 400: APIErrorResponse +func (a *APIController) ListGiteaCredentials(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + creds, err := a.r.ListGiteaCredentials(ctx) + if err != nil { + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(creds); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route POST /gitea/credentials credentials CreateGiteaCredentials +// +// Create a Gitea credential. +// +// Parameters: +// + name: Body +// description: Parameters used when creating a Gitea credential. +// type: CreateGiteaCredentialsParams +// in: body +// required: true +// +// Responses: +// 200: ForgeCredentials +// 400: APIErrorResponse +func (a *APIController) CreateGiteaCredential(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + var params params.CreateGiteaCredentialsParams + if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + cred, err := a.r.CreateGiteaCredentials(ctx, params) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to create Gitea credential") + handleError(ctx, w, err) + return + } + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(cred); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route GET /gitea/credentials/{id} credentials GetGiteaCredentials +// +// Get a Gitea credential. +// +// Parameters: +// + name: id +// description: ID of the Gitea credential. +// type: integer +// in: path +// required: true +// +// Responses: +// 200: ForgeCredentials +// 400: APIErrorResponse +func (a *APIController) GetGiteaCredential(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + vars := mux.Vars(r) + idParam, ok := vars["id"] + if !ok { + slog.ErrorContext(ctx, "missing id in request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + id, err := strconv.ParseUint(idParam, 10, 64) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + if id > math.MaxUint { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "id is too large") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + cred, err := a.r.GetGiteaCredentials(ctx, uint(id)) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to get Gitea credential") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(cred); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route DELETE /gitea/credentials/{id} credentials DeleteGiteaCredentials +// +// Delete a Gitea credential. +// +// Parameters: +// + name: id +// description: ID of the Gitea credential. +// type: integer +// in: path +// required: true +// +// Responses: +// default: APIErrorResponse +func (a *APIController) DeleteGiteaCredential(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + vars := mux.Vars(r) + idParam, ok := vars["id"] + if !ok { + slog.ErrorContext(ctx, "missing id in request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + id, err := strconv.ParseUint(idParam, 10, 64) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + if id > math.MaxUint { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "id is too large") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + if err := a.r.DeleteGiteaCredentials(ctx, uint(id)); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to delete Gitea credential") + handleError(ctx, w, err) + return + } + + w.WriteHeader(http.StatusNoContent) +} + +// swagger:route PUT /gitea/credentials/{id} credentials UpdateGiteaCredentials +// +// Update a Gitea credential. +// +// Parameters: +// + name: id +// description: ID of the Gitea credential. +// type: integer +// in: path +// required: true +// + name: Body +// description: Parameters used when updating a Gitea credential. +// type: UpdateGiteaCredentialsParams +// in: body +// required: true +// +// Responses: +// 200: ForgeCredentials +// 400: APIErrorResponse +func (a *APIController) UpdateGiteaCredential(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + vars := mux.Vars(r) + idParam, ok := vars["id"] + if !ok { + slog.ErrorContext(ctx, "missing id in request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + id, err := strconv.ParseUint(idParam, 10, 64) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to parse id") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + if id > math.MaxUint { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "id is too large") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + var params params.UpdateGiteaCredentialsParams + if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + cred, err := a.r.UpdateGiteaCredentials(ctx, uint(id), params) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to update Gitea credential") + handleError(ctx, w, err) + return + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(cred); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} diff --git a/apiserver/controllers/gitea_endpoints.go b/apiserver/controllers/gitea_endpoints.go new file mode 100644 index 00000000..6f1525d5 --- /dev/null +++ b/apiserver/controllers/gitea_endpoints.go @@ -0,0 +1,186 @@ +package controllers + +import ( + "encoding/json" + "log/slog" + "net/http" + + "github.com/gorilla/mux" + + gErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" +) + +// swagger:route POST /gitea/endpoints endpoints CreateGiteaEndpoint +// +// Create a Gitea Endpoint. +// +// Parameters: +// + name: Body +// description: Parameters used when creating a Gitea endpoint. +// type: CreateGiteaEndpointParams +// in: body +// required: true +// +// Responses: +// 200: ForgeEndpoint +// default: APIErrorResponse +func (a *APIController) CreateGiteaEndpoint(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + var params params.CreateGiteaEndpointParams + if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + endpoint, err := a.r.CreateGiteaEndpoint(ctx, params) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to create Gitea endpoint") + handleError(ctx, w, err) + return + } + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(endpoint); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route GET /gitea/endpoints endpoints ListGiteaEndpoints +// +// List all Gitea Endpoints. +// +// Responses: +// 200: ForgeEndpoints +// default: APIErrorResponse +func (a *APIController) ListGiteaEndpoints(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + endpoints, err := a.r.ListGiteaEndpoints(ctx) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to list Gitea endpoints") + handleError(ctx, w, err) + return + } + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(endpoints); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route GET /gitea/endpoints/{name} endpoints GetGiteaEndpoint +// +// Get a Gitea Endpoint. +// +// Parameters: +// + name: name +// description: The name of the Gitea endpoint. +// type: string +// in: path +// required: true +// +// Responses: +// 200: ForgeEndpoint +// default: APIErrorResponse +func (a *APIController) GetGiteaEndpoint(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + name, ok := vars["name"] + if !ok { + slog.ErrorContext(ctx, "missing name in request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + endpoint, err := a.r.GetGiteaEndpoint(ctx, name) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to get Gitea endpoint") + handleError(ctx, w, err) + return + } + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(endpoint); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} + +// swagger:route DELETE /gitea/endpoints/{name} endpoints DeleteGiteaEndpoint +// +// Delete a Gitea Endpoint. +// +// Parameters: +// + name: name +// description: The name of the Gitea endpoint. +// type: string +// in: path +// required: true +// +// Responses: +// default: APIErrorResponse +func (a *APIController) DeleteGiteaEndpoint(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + name, ok := vars["name"] + if !ok { + slog.ErrorContext(ctx, "missing name in request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + if err := a.r.DeleteGiteaEndpoint(ctx, name); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to delete Gitea endpoint") + handleError(ctx, w, err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +// swagger:route PUT /gitea/endpoints/{name} endpoints UpdateGiteaEndpoint +// +// Update a Gitea Endpoint. +// +// Parameters: +// + name: name +// description: The name of the Gitea endpoint. +// type: string +// in: path +// required: true +// + name: Body +// description: Parameters used when updating a Gitea endpoint. +// type: UpdateGiteaEndpointParams +// in: body +// required: true +// +// Responses: +// 200: ForgeEndpoint +// default: APIErrorResponse +func (a *APIController) UpdateGiteaEndpoint(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + vars := mux.Vars(r) + name, ok := vars["name"] + if !ok { + slog.ErrorContext(ctx, "missing name in request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + var params params.UpdateGiteaEndpointParams + if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to decode request") + handleError(ctx, w, gErrors.ErrBadRequest) + return + } + + endpoint, err := a.r.UpdateGiteaEndpoint(ctx, name, params) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to update GitHub endpoint") + handleError(ctx, w, err) + return + } + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(endpoint); err != nil { + slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to encode response") + } +} diff --git a/apiserver/controllers/credentials.go b/apiserver/controllers/github_credentials.go similarity index 98% rename from apiserver/controllers/credentials.go rename to apiserver/controllers/github_credentials.go index 70869b54..c7544357 100644 --- a/apiserver/controllers/credentials.go +++ b/apiserver/controllers/github_credentials.go @@ -47,7 +47,7 @@ func (a *APIController) ListCredentials(w http.ResponseWriter, r *http.Request) // required: true // // Responses: -// 200: GithubCredentials +// 200: ForgeCredentials // 400: APIErrorResponse func (a *APIController) CreateGithubCredential(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -83,7 +83,7 @@ func (a *APIController) CreateGithubCredential(w http.ResponseWriter, r *http.Re // required: true // // Responses: -// 200: GithubCredentials +// 200: ForgeCredentials // 400: APIErrorResponse func (a *APIController) GetGithubCredential(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -183,7 +183,7 @@ func (a *APIController) DeleteGithubCredential(w http.ResponseWriter, r *http.Re // required: true // // Responses: -// 200: GithubCredentials +// 200: ForgeCredentials // 400: APIErrorResponse func (a *APIController) UpdateGithubCredential(w http.ResponseWriter, r *http.Request) { ctx := r.Context() diff --git a/apiserver/controllers/endpoints.go b/apiserver/controllers/github_endpoints.go similarity index 98% rename from apiserver/controllers/endpoints.go rename to apiserver/controllers/github_endpoints.go index 81e984d4..491c5716 100644 --- a/apiserver/controllers/endpoints.go +++ b/apiserver/controllers/github_endpoints.go @@ -23,7 +23,7 @@ import ( // required: true // // Responses: -// 200: GithubEndpoint +// 200: ForgeEndpoint // default: APIErrorResponse func (a *APIController) CreateGithubEndpoint(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -52,7 +52,7 @@ func (a *APIController) CreateGithubEndpoint(w http.ResponseWriter, r *http.Requ // List all GitHub Endpoints. // // Responses: -// 200: GithubEndpoints +// 200: ForgeEndpoints // default: APIErrorResponse func (a *APIController) ListGithubEndpoints(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -81,7 +81,7 @@ func (a *APIController) ListGithubEndpoints(w http.ResponseWriter, r *http.Reque // required: true // // Responses: -// 200: GithubEndpoint +// 200: ForgeEndpoint // default: APIErrorResponse func (a *APIController) GetGithubEndpoint(w http.ResponseWriter, r *http.Request) { ctx := r.Context() @@ -153,7 +153,7 @@ func (a *APIController) DeleteGithubEndpoint(w http.ResponseWriter, r *http.Requ // required: true // // Responses: -// 200: GithubEndpoint +// 200: ForgeEndpoint // default: APIErrorResponse func (a *APIController) UpdateGithubEndpoint(w http.ResponseWriter, r *http.Request) { ctx := r.Context() diff --git a/apiserver/routers/routers.go b/apiserver/routers/routers.go index ec135292..2036b5f1 100644 --- a/apiserver/routers/routers.go +++ b/apiserver/routers/routers.go @@ -454,6 +454,44 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/github/credentials/{id}/", http.HandlerFunc(han.UpdateGithubCredential)).Methods("PUT", "OPTIONS") apiRouter.Handle("/github/credentials/{id}", http.HandlerFunc(han.UpdateGithubCredential)).Methods("PUT", "OPTIONS") + ////////////////////// + // Gitea Endpoints // + ////////////////////// + // Create Gitea Endpoint + apiRouter.Handle("/gitea/endpoints/", http.HandlerFunc(han.CreateGiteaEndpoint)).Methods("POST", "OPTIONS") + apiRouter.Handle("/gitea/endpoints", http.HandlerFunc(han.CreateGiteaEndpoint)).Methods("POST", "OPTIONS") + // List Gitea Endpoints + apiRouter.Handle("/gitea/endpoints/", http.HandlerFunc(han.ListGiteaEndpoints)).Methods("GET", "OPTIONS") + apiRouter.Handle("/gitea/endpoints", http.HandlerFunc(han.ListGiteaEndpoints)).Methods("GET", "OPTIONS") + // Get Gitea Endpoint + apiRouter.Handle("/gitea/endpoints/{name}/", http.HandlerFunc(han.GetGiteaEndpoint)).Methods("GET", "OPTIONS") + apiRouter.Handle("/gitea/endpoints/{name}", http.HandlerFunc(han.GetGiteaEndpoint)).Methods("GET", "OPTIONS") + // Delete Gitea Endpoint + apiRouter.Handle("/gitea/endpoints/{name}/", http.HandlerFunc(han.DeleteGiteaEndpoint)).Methods("DELETE", "OPTIONS") + apiRouter.Handle("/gitea/endpoints/{name}", http.HandlerFunc(han.DeleteGiteaEndpoint)).Methods("DELETE", "OPTIONS") + // Update Gitea Endpoint + apiRouter.Handle("/gitea/endpoints/{name}/", http.HandlerFunc(han.UpdateGiteaEndpoint)).Methods("PUT", "OPTIONS") + apiRouter.Handle("/gitea/endpoints/{name}", http.HandlerFunc(han.UpdateGiteaEndpoint)).Methods("PUT", "OPTIONS") + + //////////////////////// + // Gitea credentials // + //////////////////////// + // List Gitea Credentials + apiRouter.Handle("/gitea/credentials/", http.HandlerFunc(han.ListGiteaCredentials)).Methods("GET", "OPTIONS") + apiRouter.Handle("/gitea/credentials", http.HandlerFunc(han.ListGiteaCredentials)).Methods("GET", "OPTIONS") + // Create Gitea Credentials + apiRouter.Handle("/gitea/credentials/", http.HandlerFunc(han.CreateGiteaCredential)).Methods("POST", "OPTIONS") + apiRouter.Handle("/gitea/credentials", http.HandlerFunc(han.CreateGiteaCredential)).Methods("POST", "OPTIONS") + // Get Gitea Credential + apiRouter.Handle("/gitea/credentials/{id}/", http.HandlerFunc(han.GetGiteaCredential)).Methods("GET", "OPTIONS") + apiRouter.Handle("/gitea/credentials/{id}", http.HandlerFunc(han.GetGiteaCredential)).Methods("GET", "OPTIONS") + // Delete Gitea Credential + apiRouter.Handle("/gitea/credentials/{id}/", http.HandlerFunc(han.DeleteGiteaCredential)).Methods("DELETE", "OPTIONS") + apiRouter.Handle("/gitea/credentials/{id}", http.HandlerFunc(han.DeleteGiteaCredential)).Methods("DELETE", "OPTIONS") + // Update Gitea Credential + apiRouter.Handle("/gitea/credentials/{id}/", http.HandlerFunc(han.UpdateGiteaCredential)).Methods("PUT", "OPTIONS") + apiRouter.Handle("/gitea/credentials/{id}", http.HandlerFunc(han.UpdateGiteaCredential)).Methods("PUT", "OPTIONS") + ///////////////////////// // Websocket endpoints // ///////////////////////// diff --git a/apiserver/swagger-models.yaml b/apiserver/swagger-models.yaml index ad83d6c8..74eaac84 100644 --- a/apiserver/swagger-models.yaml +++ b/apiserver/swagger-models.yaml @@ -74,11 +74,11 @@ definitions: package: github.com/cloudbase/garm/params alias: garm_params items: - $ref: '#/definitions/GithubCredentials' - GithubCredentials: + $ref: '#/definitions/ForgeCredentials' + ForgeCredentials: type: object x-go-type: - type: GithubCredentials + type: ForgeCredentials import: package: github.com/cloudbase/garm/params alias: garm_params @@ -271,22 +271,29 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params - GithubEndpoint: + UpdateGiteaEndpointParams: type: object x-go-type: - type: GithubEndpoint + type: UpdateGiteaEndpointParams import: package: github.com/cloudbase/garm/params alias: garm_params - GithubEndpoints: + ForgeEndpoint: + type: object + x-go-type: + type: ForgeEndpoint + import: + package: github.com/cloudbase/garm/params + alias: garm_params + ForgeEndpoints: type: array x-go-type: - type: GithubEndpoints + type: ForgeEndpoints import: package: github.com/cloudbase/garm/params alias: garm_params items: - $ref: '#/definitions/GithubEndpoint' + $ref: '#/definitions/ForgeEndpoint' CreateGithubEndpointParams: type: object x-go-type: @@ -294,6 +301,13 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params + CreateGiteaEndpointParams: + type: object + x-go-type: + type: CreateGiteaEndpointParams + import: + package: github.com/cloudbase/garm/params + alias: garm_params CreateGithubCredentialsParams: type: object x-go-type: @@ -301,6 +315,13 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params + CreateGiteaCredentialsParams: + type: object + x-go-type: + type: CreateGiteaCredentialsParams + import: + package: github.com/cloudbase/garm/params + alias: garm_params UpdateGithubCredentialsParams: type: object x-go-type: @@ -308,6 +329,13 @@ definitions: import: package: github.com/cloudbase/garm/params alias: garm_params + UpdateGiteaCredentialsParams: + type: object + x-go-type: + type: UpdateGiteaCredentialsParams + import: + package: github.com/cloudbase/garm/params + alias: garm_params UpdateControllerParams: type: object x-go-type: diff --git a/apiserver/swagger.yaml b/apiserver/swagger.yaml index 2f89ab77..66e7a655 100644 --- a/apiserver/swagger.yaml +++ b/apiserver/swagger.yaml @@ -23,6 +23,20 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: CreateEnterpriseParams + CreateGiteaCredentialsParams: + type: object + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: CreateGiteaCredentialsParams + CreateGiteaEndpointParams: + type: object + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: CreateGiteaEndpointParams CreateGithubCredentialsParams: type: object x-go-type: @@ -74,7 +88,7 @@ definitions: type: CreateScaleSetParams Credentials: items: - $ref: '#/definitions/GithubCredentials' + $ref: '#/definitions/ForgeCredentials' type: array x-go-type: import: @@ -97,29 +111,29 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: Enterprises - GithubCredentials: + ForgeCredentials: type: object x-go-type: import: alias: garm_params package: github.com/cloudbase/garm/params - type: GithubCredentials - GithubEndpoint: + type: ForgeCredentials + ForgeEndpoint: type: object x-go-type: import: alias: garm_params package: github.com/cloudbase/garm/params - type: GithubEndpoint - GithubEndpoints: + type: ForgeEndpoint + ForgeEndpoints: items: - $ref: '#/definitions/GithubEndpoint' + $ref: '#/definitions/ForgeEndpoint' type: array x-go-type: import: alias: garm_params package: github.com/cloudbase/garm/params - type: GithubEndpoints + type: ForgeEndpoints HookInfo: type: object x-go-type: @@ -281,6 +295,20 @@ definitions: alias: garm_params package: github.com/cloudbase/garm/params type: UpdateEntityParams + UpdateGiteaCredentialsParams: + type: object + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: UpdateGiteaCredentialsParams + UpdateGiteaEndpointParams: + type: object + x-go-type: + import: + alias: garm_params + package: github.com/cloudbase/garm/params + type: UpdateGiteaEndpointParams UpdateGithubCredentialsParams: type: object x-go-type: @@ -721,6 +749,212 @@ paths: summary: Initialize the first run of the controller. tags: - first-run + /gitea/credentials: + get: + operationId: ListGiteaCredentials + responses: + "200": + description: Credentials + schema: + $ref: '#/definitions/Credentials' + "400": + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: List all credentials. + tags: + - credentials + post: + operationId: CreateGiteaCredentials + parameters: + - description: Parameters used when creating a Gitea credential. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/CreateGiteaCredentialsParams' + description: Parameters used when creating a Gitea credential. + type: object + responses: + "200": + description: ForgeCredentials + schema: + $ref: '#/definitions/ForgeCredentials' + "400": + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Create a Gitea credential. + tags: + - credentials + /gitea/credentials/{id}: + delete: + operationId: DeleteGiteaCredentials + parameters: + - description: ID of the Gitea credential. + in: path + name: id + required: true + type: integer + responses: + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Delete a Gitea credential. + tags: + - credentials + get: + operationId: GetGiteaCredentials + parameters: + - description: ID of the Gitea credential. + in: path + name: id + required: true + type: integer + responses: + "200": + description: ForgeCredentials + schema: + $ref: '#/definitions/ForgeCredentials' + "400": + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Get a Gitea credential. + tags: + - credentials + put: + operationId: UpdateGiteaCredentials + parameters: + - description: ID of the Gitea credential. + in: path + name: id + required: true + type: integer + - description: Parameters used when updating a Gitea credential. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/UpdateGiteaCredentialsParams' + description: Parameters used when updating a Gitea credential. + type: object + responses: + "200": + description: ForgeCredentials + schema: + $ref: '#/definitions/ForgeCredentials' + "400": + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Update a Gitea credential. + tags: + - credentials + /gitea/endpoints: + get: + operationId: ListGiteaEndpoints + responses: + "200": + description: ForgeEndpoints + schema: + $ref: '#/definitions/ForgeEndpoints' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: List all Gitea Endpoints. + tags: + - endpoints + post: + operationId: CreateGiteaEndpoint + parameters: + - description: Parameters used when creating a Gitea endpoint. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/CreateGiteaEndpointParams' + description: Parameters used when creating a Gitea endpoint. + type: object + responses: + "200": + description: ForgeEndpoint + schema: + $ref: '#/definitions/ForgeEndpoint' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Create a Gitea Endpoint. + tags: + - endpoints + /gitea/endpoints/{name}: + delete: + operationId: DeleteGiteaEndpoint + parameters: + - description: The name of the Gitea endpoint. + in: path + name: name + required: true + type: string + responses: + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Delete a Gitea Endpoint. + tags: + - endpoints + get: + operationId: GetGiteaEndpoint + parameters: + - description: The name of the Gitea endpoint. + in: path + name: name + required: true + type: string + responses: + "200": + description: ForgeEndpoint + schema: + $ref: '#/definitions/ForgeEndpoint' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Get a Gitea Endpoint. + tags: + - endpoints + put: + operationId: UpdateGiteaEndpoint + parameters: + - description: The name of the Gitea endpoint. + in: path + name: name + required: true + type: string + - description: Parameters used when updating a Gitea endpoint. + in: body + name: Body + required: true + schema: + $ref: '#/definitions/UpdateGiteaEndpointParams' + description: Parameters used when updating a Gitea endpoint. + type: object + responses: + "200": + description: ForgeEndpoint + schema: + $ref: '#/definitions/ForgeEndpoint' + default: + description: APIErrorResponse + schema: + $ref: '#/definitions/APIErrorResponse' + summary: Update a Gitea Endpoint. + tags: + - endpoints /github/credentials: get: operationId: ListCredentials @@ -749,9 +983,9 @@ paths: type: object responses: "200": - description: GithubCredentials + description: ForgeCredentials schema: - $ref: '#/definitions/GithubCredentials' + $ref: '#/definitions/ForgeCredentials' "400": description: APIErrorResponse schema: @@ -786,9 +1020,9 @@ paths: type: integer responses: "200": - description: GithubCredentials + description: ForgeCredentials schema: - $ref: '#/definitions/GithubCredentials' + $ref: '#/definitions/ForgeCredentials' "400": description: APIErrorResponse schema: @@ -814,9 +1048,9 @@ paths: type: object responses: "200": - description: GithubCredentials + description: ForgeCredentials schema: - $ref: '#/definitions/GithubCredentials' + $ref: '#/definitions/ForgeCredentials' "400": description: APIErrorResponse schema: @@ -829,9 +1063,9 @@ paths: operationId: ListGithubEndpoints responses: "200": - description: GithubEndpoints + description: ForgeEndpoints schema: - $ref: '#/definitions/GithubEndpoints' + $ref: '#/definitions/ForgeEndpoints' default: description: APIErrorResponse schema: @@ -852,9 +1086,9 @@ paths: type: object responses: "200": - description: GithubEndpoint + description: ForgeEndpoint schema: - $ref: '#/definitions/GithubEndpoint' + $ref: '#/definitions/ForgeEndpoint' default: description: APIErrorResponse schema: @@ -889,9 +1123,9 @@ paths: type: string responses: "200": - description: GithubEndpoint + description: ForgeEndpoint schema: - $ref: '#/definitions/GithubEndpoint' + $ref: '#/definitions/ForgeEndpoint' default: description: APIErrorResponse schema: @@ -917,9 +1151,9 @@ paths: type: object responses: "200": - description: GithubEndpoint + description: ForgeEndpoint schema: - $ref: '#/definitions/GithubEndpoint' + $ref: '#/definitions/ForgeEndpoint' default: description: APIErrorResponse schema: diff --git a/cache/cache_test.go b/cache/cache_test.go index 08b269b8..3e7ed559 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -30,7 +30,7 @@ func (c *CacheTestSuite) TearDownTest() { githubToolsCache.mux.Lock() defer githubToolsCache.mux.Unlock() githubToolsCache.entities = make(map[string]GithubEntityTools) - credentialsCache.cache = make(map[uint]params.GithubCredentials) + credentialsCache.cache = make(map[uint]params.ForgeCredentials) instanceCache.cache = make(map[string]params.Instance) entityCache = &EntityCache{ entities: make(map[string]EntityItem), @@ -90,7 +90,7 @@ func (c *CacheTestSuite) TestGetInexistentCache() { } func (c *CacheTestSuite) TestSetGithubCredentials() { - credentials := params.GithubCredentials{ + credentials := params.ForgeCredentials{ ID: 1, } SetGithubCredentials(credentials) @@ -100,7 +100,7 @@ func (c *CacheTestSuite) TestSetGithubCredentials() { } func (c *CacheTestSuite) TestGetGithubCredentials() { - credentials := params.GithubCredentials{ + credentials := params.ForgeCredentials{ ID: 1, } SetGithubCredentials(credentials) @@ -110,11 +110,11 @@ func (c *CacheTestSuite) TestGetGithubCredentials() { nonExisting, ok := GetGithubCredentials(2) c.Require().False(ok) - c.Require().Equal(params.GithubCredentials{}, nonExisting) + c.Require().Equal(params.ForgeCredentials{}, nonExisting) } func (c *CacheTestSuite) TestDeleteGithubCredentials() { - credentials := params.GithubCredentials{ + credentials := params.ForgeCredentials{ ID: 1, } SetGithubCredentials(credentials) @@ -125,14 +125,14 @@ func (c *CacheTestSuite) TestDeleteGithubCredentials() { DeleteGithubCredentials(1) cachedCreds, ok = GetGithubCredentials(1) c.Require().False(ok) - c.Require().Equal(params.GithubCredentials{}, cachedCreds) + c.Require().Equal(params.ForgeCredentials{}, cachedCreds) } func (c *CacheTestSuite) TestGetAllGithubCredentials() { - credentials1 := params.GithubCredentials{ + credentials1 := params.ForgeCredentials{ ID: 1, } - credentials2 := params.GithubCredentials{ + credentials2 := params.ForgeCredentials{ ID: 2, } SetGithubCredentials(credentials1) @@ -265,12 +265,12 @@ func (c *CacheTestSuite) TestSetGetEntityCache() { c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) - entity.Credentials.GithubCredentials.Description = "test description" + entity.Credentials.Description = "test description" SetEntity(entity) cachedEntity, ok = GetEntity("test-entity") c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) - c.Require().Equal(entity.Credentials.GithubCredentials.Description, cachedEntity.Credentials.GithubCredentials.Description) + c.Require().Equal(entity.Credentials.Description, cachedEntity.Credentials.Description) } func (c *CacheTestSuite) TestReplaceEntityPools() { @@ -280,10 +280,7 @@ func (c *CacheTestSuite) TestReplaceEntityPools() { Name: "test", Owner: "test", Credentials: params.ForgeCredentials{ - ForgeType: params.GithubEndpointType, - GithubCredentials: params.GithubCredentials{ - ID: 1, - }, + ID: 1, }, } pool1 := params.Pool{ @@ -293,7 +290,7 @@ func (c *CacheTestSuite) TestReplaceEntityPools() { ID: "pool-2", } - credentials := params.GithubCredentials{ + credentials := params.ForgeCredentials{ ID: 1, Name: "test", } @@ -304,7 +301,7 @@ func (c *CacheTestSuite) TestReplaceEntityPools() { cachedEntity, ok := GetEntity(entity.ID) c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) - c.Require().Equal("test", cachedEntity.Credentials.GithubCredentials.Name) + c.Require().Equal("test", cachedEntity.Credentials.Name) pools := GetEntityPools(entity.ID) c.Require().Len(pools, 2) diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go index 060b076a..d5626f40 100644 --- a/cache/credentials_cache.go +++ b/cache/credentials_cache.go @@ -6,61 +6,67 @@ import ( "github.com/cloudbase/garm/params" ) -var credentialsCache *GithubCredentials +var credentialsCache *CredentialCache +var giteaCredentialsCache *CredentialCache func init() { - ghCredentialsCache := &GithubCredentials{ - cache: make(map[uint]params.GithubCredentials), + ghCredentialsCache := &CredentialCache{ + cache: make(map[uint]params.ForgeCredentials), } + gtCredentialsCache := &CredentialCache{ + cache: make(map[uint]params.ForgeCredentials), + } + credentialsCache = ghCredentialsCache + giteaCredentialsCache = gtCredentialsCache } -type GithubCredentials struct { +type CredentialCache struct { mux sync.Mutex - cache map[uint]params.GithubCredentials + cache map[uint]params.ForgeCredentials } -func (g *GithubCredentials) SetCredentialsRateLimit(credsID uint, rateLimit params.GithubRateLimit) { +func (g *CredentialCache) SetCredentialsRateLimit(credsID uint, rateLimit params.GithubRateLimit) { g.mux.Lock() defer g.mux.Unlock() if creds, ok := g.cache[credsID]; ok { - creds.RateLimit = rateLimit + creds.RateLimit = &rateLimit g.cache[credsID] = creds } } -func (g *GithubCredentials) SetCredentials(credentials params.GithubCredentials) { +func (g *CredentialCache) SetCredentials(credentials params.ForgeCredentials) { g.mux.Lock() defer g.mux.Unlock() g.cache[credentials.ID] = credentials - UpdateCredentialsInAffectedEntities(credentials.GetForgeCredentials()) + UpdateCredentialsInAffectedEntities(credentials) } -func (g *GithubCredentials) GetCredentials(id uint) (params.GithubCredentials, bool) { +func (g *CredentialCache) GetCredentials(id uint) (params.ForgeCredentials, bool) { g.mux.Lock() defer g.mux.Unlock() if creds, ok := g.cache[id]; ok { return creds, true } - return params.GithubCredentials{}, false + return params.ForgeCredentials{}, false } -func (g *GithubCredentials) DeleteCredentials(id uint) { +func (g *CredentialCache) DeleteCredentials(id uint) { g.mux.Lock() defer g.mux.Unlock() delete(g.cache, id) } -func (g *GithubCredentials) GetAllCredentials() []params.GithubCredentials { +func (g *CredentialCache) GetAllCredentials() []params.ForgeCredentials { g.mux.Lock() defer g.mux.Unlock() - creds := make([]params.GithubCredentials, 0, len(g.cache)) + creds := make([]params.ForgeCredentials, 0, len(g.cache)) for _, cred := range g.cache { creds = append(creds, cred) } @@ -70,11 +76,11 @@ func (g *GithubCredentials) GetAllCredentials() []params.GithubCredentials { return creds } -func (g *GithubCredentials) GetAllCredentialsAsMap() map[uint]params.GithubCredentials { +func (g *CredentialCache) GetAllCredentialsAsMap() map[uint]params.ForgeCredentials { g.mux.Lock() defer g.mux.Unlock() - creds := make(map[uint]params.GithubCredentials, len(g.cache)) + creds := make(map[uint]params.ForgeCredentials, len(g.cache)) for id, cred := range g.cache { creds[id] = cred } @@ -82,11 +88,11 @@ func (g *GithubCredentials) GetAllCredentialsAsMap() map[uint]params.GithubCrede return creds } -func SetGithubCredentials(credentials params.GithubCredentials) { +func SetGithubCredentials(credentials params.ForgeCredentials) { credentialsCache.SetCredentials(credentials) } -func GetGithubCredentials(id uint) (params.GithubCredentials, bool) { +func GetGithubCredentials(id uint) (params.ForgeCredentials, bool) { return credentialsCache.GetCredentials(id) } @@ -94,7 +100,7 @@ func DeleteGithubCredentials(id uint) { credentialsCache.DeleteCredentials(id) } -func GetAllGithubCredentials() []params.GithubCredentials { +func GetAllGithubCredentials() []params.ForgeCredentials { return credentialsCache.GetAllCredentials() } @@ -102,6 +108,26 @@ func SetCredentialsRateLimit(credsID uint, rateLimit params.GithubRateLimit) { credentialsCache.SetCredentialsRateLimit(credsID, rateLimit) } -func GetAllGithubCredentialsAsMap() map[uint]params.GithubCredentials { +func GetAllGithubCredentialsAsMap() map[uint]params.ForgeCredentials { return credentialsCache.GetAllCredentialsAsMap() } + +func SetGiteaCredentials(credentials params.ForgeCredentials) { + giteaCredentialsCache.SetCredentials(credentials) +} + +func GetGiteaCredentials(id uint) (params.ForgeCredentials, bool) { + return giteaCredentialsCache.GetCredentials(id) +} + +func DeleteGiteaCredentials(id uint) { + giteaCredentialsCache.DeleteCredentials(id) +} + +func GetAllGiteaCredentials() []params.ForgeCredentials { + return giteaCredentialsCache.GetAllCredentials() +} + +func GetAllGiteaCredentialsAsMap() map[uint]params.ForgeCredentials { + return giteaCredentialsCache.GetAllCredentialsAsMap() +} diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 74f406de..6bd1f2c7 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -44,18 +44,16 @@ func (e *EntityCache) GetEntity(entityID string) (params.ForgeEntity, bool) { defer e.mux.Unlock() if cache, ok := e.entities[entityID]; ok { - // Get the credentials from the credentials cache. - var forgeCredsGetter params.ForgeCredentialsGetter - var credsOk bool + var creds params.ForgeCredentials + var ok bool switch cache.Entity.Credentials.ForgeType { case params.GithubEndpointType: - forgeCredsGetter, credsOk = GetGithubCredentials(cache.Entity.Credentials.GetID()) + creds, ok = GetGithubCredentials(cache.Entity.Credentials.ID) case params.GiteaEndpointType: - // add gitea credentials getter - return cache.Entity, false + creds, ok = GetGiteaCredentials(cache.Entity.Credentials.ID) } - if credsOk { - cache.Entity.Credentials = forgeCredsGetter.GetForgeCredentials() + if ok { + cache.Entity.Credentials = creds } return cache.Entity, true } @@ -254,17 +252,16 @@ func (e *EntityCache) GetAllEntities() []params.ForgeEntity { var entities []params.ForgeEntity for _, cache := range e.entities { // Get the credentials from the credentials cache. - var forgeCredsGetter params.ForgeCredentialsGetter - var credsOk bool + var creds params.ForgeCredentials + var ok bool switch cache.Entity.Credentials.ForgeType { case params.GithubEndpointType: - forgeCredsGetter, credsOk = GetGithubCredentials(cache.Entity.Credentials.GetID()) + creds, ok = GetGithubCredentials(cache.Entity.Credentials.ID) case params.GiteaEndpointType: - // add gitea credentials getter - return nil + creds, ok = GetGiteaCredentials(cache.Entity.Credentials.ID) } - if credsOk { - cache.Entity.Credentials = forgeCredsGetter.GetForgeCredentials() + if ok { + cache.Entity.Credentials = creds } entities = append(entities, cache.Entity) } diff --git a/cache/tools_cache.go b/cache/tools_cache.go index f4a2db62..0698c41e 100644 --- a/cache/tools_cache.go +++ b/cache/tools_cache.go @@ -19,6 +19,7 @@ func init() { type GithubEntityTools struct { updatedAt time.Time + expiresAt time.Time entity params.ForgeEntity tools []commonParams.RunnerApplicationDownload } @@ -34,10 +35,12 @@ func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicatio defer g.mux.Unlock() if cache, ok := g.entities[entityID]; ok { - if time.Since(cache.updatedAt) > 1*time.Hour { - // Stale cache, remove it. - delete(g.entities, entityID) - return nil, false + if cache.entity.Credentials.ForgeType == params.GithubEndpointType { + if time.Now().UTC().After(cache.expiresAt.Add(-5 * time.Minute)) { + // Stale cache, remove it. + delete(g.entities, entityID) + return nil, false + } } return cache.tools, true } @@ -48,11 +51,17 @@ func (g *GithubToolsCache) Set(entity params.ForgeEntity, tools []commonParams.R g.mux.Lock() defer g.mux.Unlock() - g.entities[entity.ID] = GithubEntityTools{ + forgeTools := GithubEntityTools{ updatedAt: time.Now(), entity: entity, tools: tools, } + + if entity.Credentials.ForgeType == params.GithubEndpointType { + forgeTools.expiresAt = time.Now().Add(24 * time.Hour) + } + + g.entities[entity.ID] = forgeTools } func SetGithubToolsCache(entity params.ForgeEntity, tools []commonParams.RunnerApplicationDownload) { diff --git a/client/credentials/create_credentials_responses.go b/client/credentials/create_credentials_responses.go index cc5dc5dc..a0037edf 100644 --- a/client/credentials/create_credentials_responses.go +++ b/client/credentials/create_credentials_responses.go @@ -50,10 +50,10 @@ func NewCreateCredentialsOK() *CreateCredentialsOK { /* CreateCredentialsOK describes a response with status code 200, with default header values. -GithubCredentials +ForgeCredentials */ type CreateCredentialsOK struct { - Payload garm_params.GithubCredentials + Payload garm_params.ForgeCredentials } // IsSuccess returns true when this create credentials o k response has a 2xx status code @@ -96,7 +96,7 @@ func (o *CreateCredentialsOK) String() string { return fmt.Sprintf("[POST /github/credentials][%d] createCredentialsOK %s", 200, payload) } -func (o *CreateCredentialsOK) GetPayload() garm_params.GithubCredentials { +func (o *CreateCredentialsOK) GetPayload() garm_params.ForgeCredentials { return o.Payload } diff --git a/client/credentials/create_gitea_credentials_parameters.go b/client/credentials/create_gitea_credentials_parameters.go new file mode 100644 index 00000000..6e255bfa --- /dev/null +++ b/client/credentials/create_gitea_credentials_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewCreateGiteaCredentialsParams creates a new CreateGiteaCredentialsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateGiteaCredentialsParams() *CreateGiteaCredentialsParams { + return &CreateGiteaCredentialsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateGiteaCredentialsParamsWithTimeout creates a new CreateGiteaCredentialsParams object +// with the ability to set a timeout on a request. +func NewCreateGiteaCredentialsParamsWithTimeout(timeout time.Duration) *CreateGiteaCredentialsParams { + return &CreateGiteaCredentialsParams{ + timeout: timeout, + } +} + +// NewCreateGiteaCredentialsParamsWithContext creates a new CreateGiteaCredentialsParams object +// with the ability to set a context for a request. +func NewCreateGiteaCredentialsParamsWithContext(ctx context.Context) *CreateGiteaCredentialsParams { + return &CreateGiteaCredentialsParams{ + Context: ctx, + } +} + +// NewCreateGiteaCredentialsParamsWithHTTPClient creates a new CreateGiteaCredentialsParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateGiteaCredentialsParamsWithHTTPClient(client *http.Client) *CreateGiteaCredentialsParams { + return &CreateGiteaCredentialsParams{ + HTTPClient: client, + } +} + +/* +CreateGiteaCredentialsParams contains all the parameters to send to the API endpoint + + for the create gitea credentials operation. + + Typically these are written to a http.Request. +*/ +type CreateGiteaCredentialsParams struct { + + /* Body. + + Parameters used when creating a Gitea credential. + */ + Body garm_params.CreateGiteaCredentialsParams + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateGiteaCredentialsParams) WithDefaults() *CreateGiteaCredentialsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateGiteaCredentialsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) WithTimeout(timeout time.Duration) *CreateGiteaCredentialsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) WithContext(ctx context.Context) *CreateGiteaCredentialsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) WithHTTPClient(client *http.Client) *CreateGiteaCredentialsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) WithBody(body garm_params.CreateGiteaCredentialsParams) *CreateGiteaCredentialsParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create gitea credentials params +func (o *CreateGiteaCredentialsParams) SetBody(body garm_params.CreateGiteaCredentialsParams) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/credentials/create_gitea_credentials_responses.go b/client/credentials/create_gitea_credentials_responses.go new file mode 100644 index 00000000..2389cb04 --- /dev/null +++ b/client/credentials/create_gitea_credentials_responses.go @@ -0,0 +1,179 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// CreateGiteaCredentialsReader is a Reader for the CreateGiteaCredentials structure. +type CreateGiteaCredentialsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateGiteaCredentialsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + case 400: + result := NewCreateGiteaCredentialsBadRequest() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return nil, result + default: + return nil, runtime.NewAPIError("[POST /gitea/credentials] CreateGiteaCredentials", response, response.Code()) + } +} + +// NewCreateGiteaCredentialsOK creates a CreateGiteaCredentialsOK with default headers values +func NewCreateGiteaCredentialsOK() *CreateGiteaCredentialsOK { + return &CreateGiteaCredentialsOK{} +} + +/* +CreateGiteaCredentialsOK describes a response with status code 200, with default header values. + +ForgeCredentials +*/ +type CreateGiteaCredentialsOK struct { + Payload garm_params.ForgeCredentials +} + +// IsSuccess returns true when this create gitea credentials o k response has a 2xx status code +func (o *CreateGiteaCredentialsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create gitea credentials o k response has a 3xx status code +func (o *CreateGiteaCredentialsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create gitea credentials o k response has a 4xx status code +func (o *CreateGiteaCredentialsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create gitea credentials o k response has a 5xx status code +func (o *CreateGiteaCredentialsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create gitea credentials o k response a status code equal to that given +func (o *CreateGiteaCredentialsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create gitea credentials o k response +func (o *CreateGiteaCredentialsOK) Code() int { + return 200 +} + +func (o *CreateGiteaCredentialsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsOK %s", 200, payload) +} + +func (o *CreateGiteaCredentialsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsOK %s", 200, payload) +} + +func (o *CreateGiteaCredentialsOK) GetPayload() garm_params.ForgeCredentials { + return o.Payload +} + +func (o *CreateGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateGiteaCredentialsBadRequest creates a CreateGiteaCredentialsBadRequest with default headers values +func NewCreateGiteaCredentialsBadRequest() *CreateGiteaCredentialsBadRequest { + return &CreateGiteaCredentialsBadRequest{} +} + +/* +CreateGiteaCredentialsBadRequest describes a response with status code 400, with default header values. + +APIErrorResponse +*/ +type CreateGiteaCredentialsBadRequest struct { + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this create gitea credentials bad request response has a 2xx status code +func (o *CreateGiteaCredentialsBadRequest) IsSuccess() bool { + return false +} + +// IsRedirect returns true when this create gitea credentials bad request response has a 3xx status code +func (o *CreateGiteaCredentialsBadRequest) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create gitea credentials bad request response has a 4xx status code +func (o *CreateGiteaCredentialsBadRequest) IsClientError() bool { + return true +} + +// IsServerError returns true when this create gitea credentials bad request response has a 5xx status code +func (o *CreateGiteaCredentialsBadRequest) IsServerError() bool { + return false +} + +// IsCode returns true when this create gitea credentials bad request response a status code equal to that given +func (o *CreateGiteaCredentialsBadRequest) IsCode(code int) bool { + return code == 400 +} + +// Code gets the status code for the create gitea credentials bad request response +func (o *CreateGiteaCredentialsBadRequest) Code() int { + return 400 +} + +func (o *CreateGiteaCredentialsBadRequest) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *CreateGiteaCredentialsBadRequest) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/credentials][%d] createGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *CreateGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *CreateGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/credentials/credentials_client.go b/client/credentials/credentials_client.go index 9d7b0563..3dfe1abd 100644 --- a/client/credentials/credentials_client.go +++ b/client/credentials/credentials_client.go @@ -58,14 +58,24 @@ type ClientOption func(*runtime.ClientOperation) type ClientService interface { CreateCredentials(params *CreateCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateCredentialsOK, error) + CreateGiteaCredentials(params *CreateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaCredentialsOK, error) + DeleteCredentials(params *DeleteCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error + DeleteGiteaCredentials(params *DeleteGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error + GetCredentials(params *GetCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetCredentialsOK, error) + GetGiteaCredentials(params *GetGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaCredentialsOK, error) + ListCredentials(params *ListCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListCredentialsOK, error) + ListGiteaCredentials(params *ListGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaCredentialsOK, error) + UpdateCredentials(params *UpdateCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateCredentialsOK, error) + UpdateGiteaCredentials(params *UpdateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaCredentialsOK, error) + SetTransport(transport runtime.ClientTransport) } @@ -108,6 +118,45 @@ func (a *Client) CreateCredentials(params *CreateCredentialsParams, authInfo run panic(msg) } +/* +CreateGiteaCredentials creates a gitea credential +*/ +func (a *Client) CreateGiteaCredentials(params *CreateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaCredentialsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateGiteaCredentialsParams() + } + op := &runtime.ClientOperation{ + ID: "CreateGiteaCredentials", + Method: "POST", + PathPattern: "/gitea/credentials", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &CreateGiteaCredentialsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateGiteaCredentialsOK) + if ok { + return success, nil + } + // unexpected success response + // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue + msg := fmt.Sprintf("unexpected success response for CreateGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) + panic(msg) +} + /* DeleteCredentials deletes a git hub credential */ @@ -140,6 +189,38 @@ func (a *Client) DeleteCredentials(params *DeleteCredentialsParams, authInfo run return nil } +/* +DeleteGiteaCredentials deletes a gitea credential +*/ +func (a *Client) DeleteGiteaCredentials(params *DeleteGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error { + // TODO: Validate the params before sending + if params == nil { + params = NewDeleteGiteaCredentialsParams() + } + op := &runtime.ClientOperation{ + ID: "DeleteGiteaCredentials", + Method: "DELETE", + PathPattern: "/gitea/credentials/{id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &DeleteGiteaCredentialsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + _, err := a.transport.Submit(op) + if err != nil { + return err + } + return nil +} + /* GetCredentials gets a git hub credential */ @@ -179,6 +260,45 @@ func (a *Client) GetCredentials(params *GetCredentialsParams, authInfo runtime.C panic(msg) } +/* +GetGiteaCredentials gets a gitea credential +*/ +func (a *Client) GetGiteaCredentials(params *GetGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaCredentialsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewGetGiteaCredentialsParams() + } + op := &runtime.ClientOperation{ + ID: "GetGiteaCredentials", + Method: "GET", + PathPattern: "/gitea/credentials/{id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &GetGiteaCredentialsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*GetGiteaCredentialsOK) + if ok { + return success, nil + } + // unexpected success response + // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue + msg := fmt.Sprintf("unexpected success response for GetGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) + panic(msg) +} + /* ListCredentials lists all credentials */ @@ -218,6 +338,45 @@ func (a *Client) ListCredentials(params *ListCredentialsParams, authInfo runtime panic(msg) } +/* +ListGiteaCredentials lists all credentials +*/ +func (a *Client) ListGiteaCredentials(params *ListGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaCredentialsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListGiteaCredentialsParams() + } + op := &runtime.ClientOperation{ + ID: "ListGiteaCredentials", + Method: "GET", + PathPattern: "/gitea/credentials", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &ListGiteaCredentialsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListGiteaCredentialsOK) + if ok { + return success, nil + } + // unexpected success response + // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue + msg := fmt.Sprintf("unexpected success response for ListGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) + panic(msg) +} + /* UpdateCredentials updates a git hub credential */ @@ -257,6 +416,45 @@ func (a *Client) UpdateCredentials(params *UpdateCredentialsParams, authInfo run panic(msg) } +/* +UpdateGiteaCredentials updates a gitea credential +*/ +func (a *Client) UpdateGiteaCredentials(params *UpdateGiteaCredentialsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaCredentialsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewUpdateGiteaCredentialsParams() + } + op := &runtime.ClientOperation{ + ID: "UpdateGiteaCredentials", + Method: "PUT", + PathPattern: "/gitea/credentials/{id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &UpdateGiteaCredentialsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*UpdateGiteaCredentialsOK) + if ok { + return success, nil + } + // unexpected success response + // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue + msg := fmt.Sprintf("unexpected success response for UpdateGiteaCredentials: API contract not enforced by server. Client expected to get an error, but got: %T", result) + panic(msg) +} + // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport diff --git a/client/credentials/delete_gitea_credentials_parameters.go b/client/credentials/delete_gitea_credentials_parameters.go new file mode 100644 index 00000000..598ac477 --- /dev/null +++ b/client/credentials/delete_gitea_credentials_parameters.go @@ -0,0 +1,152 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// NewDeleteGiteaCredentialsParams creates a new DeleteGiteaCredentialsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewDeleteGiteaCredentialsParams() *DeleteGiteaCredentialsParams { + return &DeleteGiteaCredentialsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewDeleteGiteaCredentialsParamsWithTimeout creates a new DeleteGiteaCredentialsParams object +// with the ability to set a timeout on a request. +func NewDeleteGiteaCredentialsParamsWithTimeout(timeout time.Duration) *DeleteGiteaCredentialsParams { + return &DeleteGiteaCredentialsParams{ + timeout: timeout, + } +} + +// NewDeleteGiteaCredentialsParamsWithContext creates a new DeleteGiteaCredentialsParams object +// with the ability to set a context for a request. +func NewDeleteGiteaCredentialsParamsWithContext(ctx context.Context) *DeleteGiteaCredentialsParams { + return &DeleteGiteaCredentialsParams{ + Context: ctx, + } +} + +// NewDeleteGiteaCredentialsParamsWithHTTPClient creates a new DeleteGiteaCredentialsParams object +// with the ability to set a custom HTTPClient for a request. +func NewDeleteGiteaCredentialsParamsWithHTTPClient(client *http.Client) *DeleteGiteaCredentialsParams { + return &DeleteGiteaCredentialsParams{ + HTTPClient: client, + } +} + +/* +DeleteGiteaCredentialsParams contains all the parameters to send to the API endpoint + + for the delete gitea credentials operation. + + Typically these are written to a http.Request. +*/ +type DeleteGiteaCredentialsParams struct { + + /* ID. + + ID of the Gitea credential. + */ + ID int64 + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the delete gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *DeleteGiteaCredentialsParams) WithDefaults() *DeleteGiteaCredentialsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the delete gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *DeleteGiteaCredentialsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) WithTimeout(timeout time.Duration) *DeleteGiteaCredentialsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) WithContext(ctx context.Context) *DeleteGiteaCredentialsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) WithHTTPClient(client *http.Client) *DeleteGiteaCredentialsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithID adds the id to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) WithID(id int64) *DeleteGiteaCredentialsParams { + o.SetID(id) + return o +} + +// SetID adds the id to the delete gitea credentials params +func (o *DeleteGiteaCredentialsParams) SetID(id int64) { + o.ID = id +} + +// WriteToRequest writes these params to a swagger request +func (o *DeleteGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param id + if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/credentials/delete_gitea_credentials_responses.go b/client/credentials/delete_gitea_credentials_responses.go new file mode 100644 index 00000000..d1df7b0b --- /dev/null +++ b/client/credentials/delete_gitea_credentials_responses.go @@ -0,0 +1,106 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" +) + +// DeleteGiteaCredentialsReader is a Reader for the DeleteGiteaCredentials structure. +type DeleteGiteaCredentialsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *DeleteGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + result := NewDeleteGiteaCredentialsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result +} + +// NewDeleteGiteaCredentialsDefault creates a DeleteGiteaCredentialsDefault with default headers values +func NewDeleteGiteaCredentialsDefault(code int) *DeleteGiteaCredentialsDefault { + return &DeleteGiteaCredentialsDefault{ + _statusCode: code, + } +} + +/* +DeleteGiteaCredentialsDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type DeleteGiteaCredentialsDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this delete gitea credentials default response has a 2xx status code +func (o *DeleteGiteaCredentialsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this delete gitea credentials default response has a 3xx status code +func (o *DeleteGiteaCredentialsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this delete gitea credentials default response has a 4xx status code +func (o *DeleteGiteaCredentialsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this delete gitea credentials default response has a 5xx status code +func (o *DeleteGiteaCredentialsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this delete gitea credentials default response a status code equal to that given +func (o *DeleteGiteaCredentialsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the delete gitea credentials default response +func (o *DeleteGiteaCredentialsDefault) Code() int { + return o._statusCode +} + +func (o *DeleteGiteaCredentialsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /gitea/credentials/{id}][%d] DeleteGiteaCredentials default %s", o._statusCode, payload) +} + +func (o *DeleteGiteaCredentialsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /gitea/credentials/{id}][%d] DeleteGiteaCredentials default %s", o._statusCode, payload) +} + +func (o *DeleteGiteaCredentialsDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *DeleteGiteaCredentialsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/credentials/get_credentials_responses.go b/client/credentials/get_credentials_responses.go index 1c2b800b..4538c16e 100644 --- a/client/credentials/get_credentials_responses.go +++ b/client/credentials/get_credentials_responses.go @@ -50,10 +50,10 @@ func NewGetCredentialsOK() *GetCredentialsOK { /* GetCredentialsOK describes a response with status code 200, with default header values. -GithubCredentials +ForgeCredentials */ type GetCredentialsOK struct { - Payload garm_params.GithubCredentials + Payload garm_params.ForgeCredentials } // IsSuccess returns true when this get credentials o k response has a 2xx status code @@ -96,7 +96,7 @@ func (o *GetCredentialsOK) String() string { return fmt.Sprintf("[GET /github/credentials/{id}][%d] getCredentialsOK %s", 200, payload) } -func (o *GetCredentialsOK) GetPayload() garm_params.GithubCredentials { +func (o *GetCredentialsOK) GetPayload() garm_params.ForgeCredentials { return o.Payload } diff --git a/client/credentials/get_gitea_credentials_parameters.go b/client/credentials/get_gitea_credentials_parameters.go new file mode 100644 index 00000000..a844c326 --- /dev/null +++ b/client/credentials/get_gitea_credentials_parameters.go @@ -0,0 +1,152 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// NewGetGiteaCredentialsParams creates a new GetGiteaCredentialsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewGetGiteaCredentialsParams() *GetGiteaCredentialsParams { + return &GetGiteaCredentialsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewGetGiteaCredentialsParamsWithTimeout creates a new GetGiteaCredentialsParams object +// with the ability to set a timeout on a request. +func NewGetGiteaCredentialsParamsWithTimeout(timeout time.Duration) *GetGiteaCredentialsParams { + return &GetGiteaCredentialsParams{ + timeout: timeout, + } +} + +// NewGetGiteaCredentialsParamsWithContext creates a new GetGiteaCredentialsParams object +// with the ability to set a context for a request. +func NewGetGiteaCredentialsParamsWithContext(ctx context.Context) *GetGiteaCredentialsParams { + return &GetGiteaCredentialsParams{ + Context: ctx, + } +} + +// NewGetGiteaCredentialsParamsWithHTTPClient creates a new GetGiteaCredentialsParams object +// with the ability to set a custom HTTPClient for a request. +func NewGetGiteaCredentialsParamsWithHTTPClient(client *http.Client) *GetGiteaCredentialsParams { + return &GetGiteaCredentialsParams{ + HTTPClient: client, + } +} + +/* +GetGiteaCredentialsParams contains all the parameters to send to the API endpoint + + for the get gitea credentials operation. + + Typically these are written to a http.Request. +*/ +type GetGiteaCredentialsParams struct { + + /* ID. + + ID of the Gitea credential. + */ + ID int64 + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the get gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetGiteaCredentialsParams) WithDefaults() *GetGiteaCredentialsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the get gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetGiteaCredentialsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the get gitea credentials params +func (o *GetGiteaCredentialsParams) WithTimeout(timeout time.Duration) *GetGiteaCredentialsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the get gitea credentials params +func (o *GetGiteaCredentialsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the get gitea credentials params +func (o *GetGiteaCredentialsParams) WithContext(ctx context.Context) *GetGiteaCredentialsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the get gitea credentials params +func (o *GetGiteaCredentialsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the get gitea credentials params +func (o *GetGiteaCredentialsParams) WithHTTPClient(client *http.Client) *GetGiteaCredentialsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the get gitea credentials params +func (o *GetGiteaCredentialsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithID adds the id to the get gitea credentials params +func (o *GetGiteaCredentialsParams) WithID(id int64) *GetGiteaCredentialsParams { + o.SetID(id) + return o +} + +// SetID adds the id to the get gitea credentials params +func (o *GetGiteaCredentialsParams) SetID(id int64) { + o.ID = id +} + +// WriteToRequest writes these params to a swagger request +func (o *GetGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param id + if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/credentials/get_gitea_credentials_responses.go b/client/credentials/get_gitea_credentials_responses.go new file mode 100644 index 00000000..ba116d63 --- /dev/null +++ b/client/credentials/get_gitea_credentials_responses.go @@ -0,0 +1,179 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// GetGiteaCredentialsReader is a Reader for the GetGiteaCredentials structure. +type GetGiteaCredentialsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *GetGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewGetGiteaCredentialsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + case 400: + result := NewGetGiteaCredentialsBadRequest() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return nil, result + default: + return nil, runtime.NewAPIError("[GET /gitea/credentials/{id}] GetGiteaCredentials", response, response.Code()) + } +} + +// NewGetGiteaCredentialsOK creates a GetGiteaCredentialsOK with default headers values +func NewGetGiteaCredentialsOK() *GetGiteaCredentialsOK { + return &GetGiteaCredentialsOK{} +} + +/* +GetGiteaCredentialsOK describes a response with status code 200, with default header values. + +ForgeCredentials +*/ +type GetGiteaCredentialsOK struct { + Payload garm_params.ForgeCredentials +} + +// IsSuccess returns true when this get gitea credentials o k response has a 2xx status code +func (o *GetGiteaCredentialsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this get gitea credentials o k response has a 3xx status code +func (o *GetGiteaCredentialsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this get gitea credentials o k response has a 4xx status code +func (o *GetGiteaCredentialsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this get gitea credentials o k response has a 5xx status code +func (o *GetGiteaCredentialsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this get gitea credentials o k response a status code equal to that given +func (o *GetGiteaCredentialsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the get gitea credentials o k response +func (o *GetGiteaCredentialsOK) Code() int { + return 200 +} + +func (o *GetGiteaCredentialsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsOK %s", 200, payload) +} + +func (o *GetGiteaCredentialsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsOK %s", 200, payload) +} + +func (o *GetGiteaCredentialsOK) GetPayload() garm_params.ForgeCredentials { + return o.Payload +} + +func (o *GetGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewGetGiteaCredentialsBadRequest creates a GetGiteaCredentialsBadRequest with default headers values +func NewGetGiteaCredentialsBadRequest() *GetGiteaCredentialsBadRequest { + return &GetGiteaCredentialsBadRequest{} +} + +/* +GetGiteaCredentialsBadRequest describes a response with status code 400, with default header values. + +APIErrorResponse +*/ +type GetGiteaCredentialsBadRequest struct { + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this get gitea credentials bad request response has a 2xx status code +func (o *GetGiteaCredentialsBadRequest) IsSuccess() bool { + return false +} + +// IsRedirect returns true when this get gitea credentials bad request response has a 3xx status code +func (o *GetGiteaCredentialsBadRequest) IsRedirect() bool { + return false +} + +// IsClientError returns true when this get gitea credentials bad request response has a 4xx status code +func (o *GetGiteaCredentialsBadRequest) IsClientError() bool { + return true +} + +// IsServerError returns true when this get gitea credentials bad request response has a 5xx status code +func (o *GetGiteaCredentialsBadRequest) IsServerError() bool { + return false +} + +// IsCode returns true when this get gitea credentials bad request response a status code equal to that given +func (o *GetGiteaCredentialsBadRequest) IsCode(code int) bool { + return code == 400 +} + +// Code gets the status code for the get gitea credentials bad request response +func (o *GetGiteaCredentialsBadRequest) Code() int { + return 400 +} + +func (o *GetGiteaCredentialsBadRequest) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *GetGiteaCredentialsBadRequest) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials/{id}][%d] getGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *GetGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *GetGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/credentials/list_gitea_credentials_parameters.go b/client/credentials/list_gitea_credentials_parameters.go new file mode 100644 index 00000000..5e321a88 --- /dev/null +++ b/client/credentials/list_gitea_credentials_parameters.go @@ -0,0 +1,128 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewListGiteaCredentialsParams creates a new ListGiteaCredentialsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListGiteaCredentialsParams() *ListGiteaCredentialsParams { + return &ListGiteaCredentialsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListGiteaCredentialsParamsWithTimeout creates a new ListGiteaCredentialsParams object +// with the ability to set a timeout on a request. +func NewListGiteaCredentialsParamsWithTimeout(timeout time.Duration) *ListGiteaCredentialsParams { + return &ListGiteaCredentialsParams{ + timeout: timeout, + } +} + +// NewListGiteaCredentialsParamsWithContext creates a new ListGiteaCredentialsParams object +// with the ability to set a context for a request. +func NewListGiteaCredentialsParamsWithContext(ctx context.Context) *ListGiteaCredentialsParams { + return &ListGiteaCredentialsParams{ + Context: ctx, + } +} + +// NewListGiteaCredentialsParamsWithHTTPClient creates a new ListGiteaCredentialsParams object +// with the ability to set a custom HTTPClient for a request. +func NewListGiteaCredentialsParamsWithHTTPClient(client *http.Client) *ListGiteaCredentialsParams { + return &ListGiteaCredentialsParams{ + HTTPClient: client, + } +} + +/* +ListGiteaCredentialsParams contains all the parameters to send to the API endpoint + + for the list gitea credentials operation. + + Typically these are written to a http.Request. +*/ +type ListGiteaCredentialsParams struct { + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListGiteaCredentialsParams) WithDefaults() *ListGiteaCredentialsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListGiteaCredentialsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list gitea credentials params +func (o *ListGiteaCredentialsParams) WithTimeout(timeout time.Duration) *ListGiteaCredentialsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list gitea credentials params +func (o *ListGiteaCredentialsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list gitea credentials params +func (o *ListGiteaCredentialsParams) WithContext(ctx context.Context) *ListGiteaCredentialsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list gitea credentials params +func (o *ListGiteaCredentialsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list gitea credentials params +func (o *ListGiteaCredentialsParams) WithHTTPClient(client *http.Client) *ListGiteaCredentialsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list gitea credentials params +func (o *ListGiteaCredentialsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WriteToRequest writes these params to a swagger request +func (o *ListGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/credentials/list_gitea_credentials_responses.go b/client/credentials/list_gitea_credentials_responses.go new file mode 100644 index 00000000..f27864be --- /dev/null +++ b/client/credentials/list_gitea_credentials_responses.go @@ -0,0 +1,179 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// ListGiteaCredentialsReader is a Reader for the ListGiteaCredentials structure. +type ListGiteaCredentialsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListGiteaCredentialsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + case 400: + result := NewListGiteaCredentialsBadRequest() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return nil, result + default: + return nil, runtime.NewAPIError("[GET /gitea/credentials] ListGiteaCredentials", response, response.Code()) + } +} + +// NewListGiteaCredentialsOK creates a ListGiteaCredentialsOK with default headers values +func NewListGiteaCredentialsOK() *ListGiteaCredentialsOK { + return &ListGiteaCredentialsOK{} +} + +/* +ListGiteaCredentialsOK describes a response with status code 200, with default header values. + +Credentials +*/ +type ListGiteaCredentialsOK struct { + Payload garm_params.Credentials +} + +// IsSuccess returns true when this list gitea credentials o k response has a 2xx status code +func (o *ListGiteaCredentialsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list gitea credentials o k response has a 3xx status code +func (o *ListGiteaCredentialsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list gitea credentials o k response has a 4xx status code +func (o *ListGiteaCredentialsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list gitea credentials o k response has a 5xx status code +func (o *ListGiteaCredentialsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list gitea credentials o k response a status code equal to that given +func (o *ListGiteaCredentialsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list gitea credentials o k response +func (o *ListGiteaCredentialsOK) Code() int { + return 200 +} + +func (o *ListGiteaCredentialsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsOK %s", 200, payload) +} + +func (o *ListGiteaCredentialsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsOK %s", 200, payload) +} + +func (o *ListGiteaCredentialsOK) GetPayload() garm_params.Credentials { + return o.Payload +} + +func (o *ListGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListGiteaCredentialsBadRequest creates a ListGiteaCredentialsBadRequest with default headers values +func NewListGiteaCredentialsBadRequest() *ListGiteaCredentialsBadRequest { + return &ListGiteaCredentialsBadRequest{} +} + +/* +ListGiteaCredentialsBadRequest describes a response with status code 400, with default header values. + +APIErrorResponse +*/ +type ListGiteaCredentialsBadRequest struct { + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this list gitea credentials bad request response has a 2xx status code +func (o *ListGiteaCredentialsBadRequest) IsSuccess() bool { + return false +} + +// IsRedirect returns true when this list gitea credentials bad request response has a 3xx status code +func (o *ListGiteaCredentialsBadRequest) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list gitea credentials bad request response has a 4xx status code +func (o *ListGiteaCredentialsBadRequest) IsClientError() bool { + return true +} + +// IsServerError returns true when this list gitea credentials bad request response has a 5xx status code +func (o *ListGiteaCredentialsBadRequest) IsServerError() bool { + return false +} + +// IsCode returns true when this list gitea credentials bad request response a status code equal to that given +func (o *ListGiteaCredentialsBadRequest) IsCode(code int) bool { + return code == 400 +} + +// Code gets the status code for the list gitea credentials bad request response +func (o *ListGiteaCredentialsBadRequest) Code() int { + return 400 +} + +func (o *ListGiteaCredentialsBadRequest) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *ListGiteaCredentialsBadRequest) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/credentials][%d] listGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *ListGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *ListGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/credentials/update_credentials_responses.go b/client/credentials/update_credentials_responses.go index b0254604..6a9f37f8 100644 --- a/client/credentials/update_credentials_responses.go +++ b/client/credentials/update_credentials_responses.go @@ -50,10 +50,10 @@ func NewUpdateCredentialsOK() *UpdateCredentialsOK { /* UpdateCredentialsOK describes a response with status code 200, with default header values. -GithubCredentials +ForgeCredentials */ type UpdateCredentialsOK struct { - Payload garm_params.GithubCredentials + Payload garm_params.ForgeCredentials } // IsSuccess returns true when this update credentials o k response has a 2xx status code @@ -96,7 +96,7 @@ func (o *UpdateCredentialsOK) String() string { return fmt.Sprintf("[PUT /github/credentials/{id}][%d] updateCredentialsOK %s", 200, payload) } -func (o *UpdateCredentialsOK) GetPayload() garm_params.GithubCredentials { +func (o *UpdateCredentialsOK) GetPayload() garm_params.ForgeCredentials { return o.Payload } diff --git a/client/credentials/update_gitea_credentials_parameters.go b/client/credentials/update_gitea_credentials_parameters.go new file mode 100644 index 00000000..1907a0f2 --- /dev/null +++ b/client/credentials/update_gitea_credentials_parameters.go @@ -0,0 +1,174 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewUpdateGiteaCredentialsParams creates a new UpdateGiteaCredentialsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewUpdateGiteaCredentialsParams() *UpdateGiteaCredentialsParams { + return &UpdateGiteaCredentialsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewUpdateGiteaCredentialsParamsWithTimeout creates a new UpdateGiteaCredentialsParams object +// with the ability to set a timeout on a request. +func NewUpdateGiteaCredentialsParamsWithTimeout(timeout time.Duration) *UpdateGiteaCredentialsParams { + return &UpdateGiteaCredentialsParams{ + timeout: timeout, + } +} + +// NewUpdateGiteaCredentialsParamsWithContext creates a new UpdateGiteaCredentialsParams object +// with the ability to set a context for a request. +func NewUpdateGiteaCredentialsParamsWithContext(ctx context.Context) *UpdateGiteaCredentialsParams { + return &UpdateGiteaCredentialsParams{ + Context: ctx, + } +} + +// NewUpdateGiteaCredentialsParamsWithHTTPClient creates a new UpdateGiteaCredentialsParams object +// with the ability to set a custom HTTPClient for a request. +func NewUpdateGiteaCredentialsParamsWithHTTPClient(client *http.Client) *UpdateGiteaCredentialsParams { + return &UpdateGiteaCredentialsParams{ + HTTPClient: client, + } +} + +/* +UpdateGiteaCredentialsParams contains all the parameters to send to the API endpoint + + for the update gitea credentials operation. + + Typically these are written to a http.Request. +*/ +type UpdateGiteaCredentialsParams struct { + + /* Body. + + Parameters used when updating a Gitea credential. + */ + Body garm_params.UpdateGiteaCredentialsParams + + /* ID. + + ID of the Gitea credential. + */ + ID int64 + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the update gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateGiteaCredentialsParams) WithDefaults() *UpdateGiteaCredentialsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the update gitea credentials params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateGiteaCredentialsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) WithTimeout(timeout time.Duration) *UpdateGiteaCredentialsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) WithContext(ctx context.Context) *UpdateGiteaCredentialsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) WithHTTPClient(client *http.Client) *UpdateGiteaCredentialsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) WithBody(body garm_params.UpdateGiteaCredentialsParams) *UpdateGiteaCredentialsParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) SetBody(body garm_params.UpdateGiteaCredentialsParams) { + o.Body = body +} + +// WithID adds the id to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) WithID(id int64) *UpdateGiteaCredentialsParams { + o.SetID(id) + return o +} + +// SetID adds the id to the update gitea credentials params +func (o *UpdateGiteaCredentialsParams) SetID(id int64) { + o.ID = id +} + +// WriteToRequest writes these params to a swagger request +func (o *UpdateGiteaCredentialsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + // path param id + if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/credentials/update_gitea_credentials_responses.go b/client/credentials/update_gitea_credentials_responses.go new file mode 100644 index 00000000..edbb54d8 --- /dev/null +++ b/client/credentials/update_gitea_credentials_responses.go @@ -0,0 +1,179 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package credentials + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// UpdateGiteaCredentialsReader is a Reader for the UpdateGiteaCredentials structure. +type UpdateGiteaCredentialsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *UpdateGiteaCredentialsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewUpdateGiteaCredentialsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + case 400: + result := NewUpdateGiteaCredentialsBadRequest() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return nil, result + default: + return nil, runtime.NewAPIError("[PUT /gitea/credentials/{id}] UpdateGiteaCredentials", response, response.Code()) + } +} + +// NewUpdateGiteaCredentialsOK creates a UpdateGiteaCredentialsOK with default headers values +func NewUpdateGiteaCredentialsOK() *UpdateGiteaCredentialsOK { + return &UpdateGiteaCredentialsOK{} +} + +/* +UpdateGiteaCredentialsOK describes a response with status code 200, with default header values. + +ForgeCredentials +*/ +type UpdateGiteaCredentialsOK struct { + Payload garm_params.ForgeCredentials +} + +// IsSuccess returns true when this update gitea credentials o k response has a 2xx status code +func (o *UpdateGiteaCredentialsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this update gitea credentials o k response has a 3xx status code +func (o *UpdateGiteaCredentialsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this update gitea credentials o k response has a 4xx status code +func (o *UpdateGiteaCredentialsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this update gitea credentials o k response has a 5xx status code +func (o *UpdateGiteaCredentialsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this update gitea credentials o k response a status code equal to that given +func (o *UpdateGiteaCredentialsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the update gitea credentials o k response +func (o *UpdateGiteaCredentialsOK) Code() int { + return 200 +} + +func (o *UpdateGiteaCredentialsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsOK %s", 200, payload) +} + +func (o *UpdateGiteaCredentialsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsOK %s", 200, payload) +} + +func (o *UpdateGiteaCredentialsOK) GetPayload() garm_params.ForgeCredentials { + return o.Payload +} + +func (o *UpdateGiteaCredentialsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewUpdateGiteaCredentialsBadRequest creates a UpdateGiteaCredentialsBadRequest with default headers values +func NewUpdateGiteaCredentialsBadRequest() *UpdateGiteaCredentialsBadRequest { + return &UpdateGiteaCredentialsBadRequest{} +} + +/* +UpdateGiteaCredentialsBadRequest describes a response with status code 400, with default header values. + +APIErrorResponse +*/ +type UpdateGiteaCredentialsBadRequest struct { + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this update gitea credentials bad request response has a 2xx status code +func (o *UpdateGiteaCredentialsBadRequest) IsSuccess() bool { + return false +} + +// IsRedirect returns true when this update gitea credentials bad request response has a 3xx status code +func (o *UpdateGiteaCredentialsBadRequest) IsRedirect() bool { + return false +} + +// IsClientError returns true when this update gitea credentials bad request response has a 4xx status code +func (o *UpdateGiteaCredentialsBadRequest) IsClientError() bool { + return true +} + +// IsServerError returns true when this update gitea credentials bad request response has a 5xx status code +func (o *UpdateGiteaCredentialsBadRequest) IsServerError() bool { + return false +} + +// IsCode returns true when this update gitea credentials bad request response a status code equal to that given +func (o *UpdateGiteaCredentialsBadRequest) IsCode(code int) bool { + return code == 400 +} + +// Code gets the status code for the update gitea credentials bad request response +func (o *UpdateGiteaCredentialsBadRequest) Code() int { + return 400 +} + +func (o *UpdateGiteaCredentialsBadRequest) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *UpdateGiteaCredentialsBadRequest) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/credentials/{id}][%d] updateGiteaCredentialsBadRequest %s", 400, payload) +} + +func (o *UpdateGiteaCredentialsBadRequest) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *UpdateGiteaCredentialsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/endpoints/create_gitea_endpoint_parameters.go b/client/endpoints/create_gitea_endpoint_parameters.go new file mode 100644 index 00000000..11dfa73f --- /dev/null +++ b/client/endpoints/create_gitea_endpoint_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewCreateGiteaEndpointParams creates a new CreateGiteaEndpointParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateGiteaEndpointParams() *CreateGiteaEndpointParams { + return &CreateGiteaEndpointParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateGiteaEndpointParamsWithTimeout creates a new CreateGiteaEndpointParams object +// with the ability to set a timeout on a request. +func NewCreateGiteaEndpointParamsWithTimeout(timeout time.Duration) *CreateGiteaEndpointParams { + return &CreateGiteaEndpointParams{ + timeout: timeout, + } +} + +// NewCreateGiteaEndpointParamsWithContext creates a new CreateGiteaEndpointParams object +// with the ability to set a context for a request. +func NewCreateGiteaEndpointParamsWithContext(ctx context.Context) *CreateGiteaEndpointParams { + return &CreateGiteaEndpointParams{ + Context: ctx, + } +} + +// NewCreateGiteaEndpointParamsWithHTTPClient creates a new CreateGiteaEndpointParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateGiteaEndpointParamsWithHTTPClient(client *http.Client) *CreateGiteaEndpointParams { + return &CreateGiteaEndpointParams{ + HTTPClient: client, + } +} + +/* +CreateGiteaEndpointParams contains all the parameters to send to the API endpoint + + for the create gitea endpoint operation. + + Typically these are written to a http.Request. +*/ +type CreateGiteaEndpointParams struct { + + /* Body. + + Parameters used when creating a Gitea endpoint. + */ + Body garm_params.CreateGiteaEndpointParams + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateGiteaEndpointParams) WithDefaults() *CreateGiteaEndpointParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateGiteaEndpointParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) WithTimeout(timeout time.Duration) *CreateGiteaEndpointParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) WithContext(ctx context.Context) *CreateGiteaEndpointParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) WithHTTPClient(client *http.Client) *CreateGiteaEndpointParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) WithBody(body garm_params.CreateGiteaEndpointParams) *CreateGiteaEndpointParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create gitea endpoint params +func (o *CreateGiteaEndpointParams) SetBody(body garm_params.CreateGiteaEndpointParams) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/endpoints/create_gitea_endpoint_responses.go b/client/endpoints/create_gitea_endpoint_responses.go new file mode 100644 index 00000000..6e99a973 --- /dev/null +++ b/client/endpoints/create_gitea_endpoint_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// CreateGiteaEndpointReader is a Reader for the CreateGiteaEndpoint structure. +type CreateGiteaEndpointReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateGiteaEndpointOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewCreateGiteaEndpointDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreateGiteaEndpointOK creates a CreateGiteaEndpointOK with default headers values +func NewCreateGiteaEndpointOK() *CreateGiteaEndpointOK { + return &CreateGiteaEndpointOK{} +} + +/* +CreateGiteaEndpointOK describes a response with status code 200, with default header values. + +ForgeEndpoint +*/ +type CreateGiteaEndpointOK struct { + Payload garm_params.ForgeEndpoint +} + +// IsSuccess returns true when this create gitea endpoint o k response has a 2xx status code +func (o *CreateGiteaEndpointOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create gitea endpoint o k response has a 3xx status code +func (o *CreateGiteaEndpointOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create gitea endpoint o k response has a 4xx status code +func (o *CreateGiteaEndpointOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create gitea endpoint o k response has a 5xx status code +func (o *CreateGiteaEndpointOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create gitea endpoint o k response a status code equal to that given +func (o *CreateGiteaEndpointOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create gitea endpoint o k response +func (o *CreateGiteaEndpointOK) Code() int { + return 200 +} + +func (o *CreateGiteaEndpointOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/endpoints][%d] createGiteaEndpointOK %s", 200, payload) +} + +func (o *CreateGiteaEndpointOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/endpoints][%d] createGiteaEndpointOK %s", 200, payload) +} + +func (o *CreateGiteaEndpointOK) GetPayload() garm_params.ForgeEndpoint { + return o.Payload +} + +func (o *CreateGiteaEndpointOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateGiteaEndpointDefault creates a CreateGiteaEndpointDefault with default headers values +func NewCreateGiteaEndpointDefault(code int) *CreateGiteaEndpointDefault { + return &CreateGiteaEndpointDefault{ + _statusCode: code, + } +} + +/* +CreateGiteaEndpointDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type CreateGiteaEndpointDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this create gitea endpoint default response has a 2xx status code +func (o *CreateGiteaEndpointDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this create gitea endpoint default response has a 3xx status code +func (o *CreateGiteaEndpointDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this create gitea endpoint default response has a 4xx status code +func (o *CreateGiteaEndpointDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this create gitea endpoint default response has a 5xx status code +func (o *CreateGiteaEndpointDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this create gitea endpoint default response a status code equal to that given +func (o *CreateGiteaEndpointDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the create gitea endpoint default response +func (o *CreateGiteaEndpointDefault) Code() int { + return o._statusCode +} + +func (o *CreateGiteaEndpointDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/endpoints][%d] CreateGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *CreateGiteaEndpointDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /gitea/endpoints][%d] CreateGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *CreateGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *CreateGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/endpoints/create_github_endpoint_responses.go b/client/endpoints/create_github_endpoint_responses.go index 57016978..60961f3a 100644 --- a/client/endpoints/create_github_endpoint_responses.go +++ b/client/endpoints/create_github_endpoint_responses.go @@ -51,7 +51,7 @@ func NewCreateGithubEndpointOK() *CreateGithubEndpointOK { /* CreateGithubEndpointOK describes a response with status code 200, with default header values. -GithubEndpoint +ForgeEndpoint */ type CreateGithubEndpointOK struct { Payload garm_params.ForgeEndpoint diff --git a/client/endpoints/delete_gitea_endpoint_parameters.go b/client/endpoints/delete_gitea_endpoint_parameters.go new file mode 100644 index 00000000..f7ea5a5d --- /dev/null +++ b/client/endpoints/delete_gitea_endpoint_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewDeleteGiteaEndpointParams creates a new DeleteGiteaEndpointParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewDeleteGiteaEndpointParams() *DeleteGiteaEndpointParams { + return &DeleteGiteaEndpointParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewDeleteGiteaEndpointParamsWithTimeout creates a new DeleteGiteaEndpointParams object +// with the ability to set a timeout on a request. +func NewDeleteGiteaEndpointParamsWithTimeout(timeout time.Duration) *DeleteGiteaEndpointParams { + return &DeleteGiteaEndpointParams{ + timeout: timeout, + } +} + +// NewDeleteGiteaEndpointParamsWithContext creates a new DeleteGiteaEndpointParams object +// with the ability to set a context for a request. +func NewDeleteGiteaEndpointParamsWithContext(ctx context.Context) *DeleteGiteaEndpointParams { + return &DeleteGiteaEndpointParams{ + Context: ctx, + } +} + +// NewDeleteGiteaEndpointParamsWithHTTPClient creates a new DeleteGiteaEndpointParams object +// with the ability to set a custom HTTPClient for a request. +func NewDeleteGiteaEndpointParamsWithHTTPClient(client *http.Client) *DeleteGiteaEndpointParams { + return &DeleteGiteaEndpointParams{ + HTTPClient: client, + } +} + +/* +DeleteGiteaEndpointParams contains all the parameters to send to the API endpoint + + for the delete gitea endpoint operation. + + Typically these are written to a http.Request. +*/ +type DeleteGiteaEndpointParams struct { + + /* Name. + + The name of the Gitea endpoint. + */ + Name string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the delete gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *DeleteGiteaEndpointParams) WithDefaults() *DeleteGiteaEndpointParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the delete gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *DeleteGiteaEndpointParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) WithTimeout(timeout time.Duration) *DeleteGiteaEndpointParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) WithContext(ctx context.Context) *DeleteGiteaEndpointParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) WithHTTPClient(client *http.Client) *DeleteGiteaEndpointParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithName adds the name to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) WithName(name string) *DeleteGiteaEndpointParams { + o.SetName(name) + return o +} + +// SetName adds the name to the delete gitea endpoint params +func (o *DeleteGiteaEndpointParams) SetName(name string) { + o.Name = name +} + +// WriteToRequest writes these params to a swagger request +func (o *DeleteGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param name + if err := r.SetPathParam("name", o.Name); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/endpoints/delete_gitea_endpoint_responses.go b/client/endpoints/delete_gitea_endpoint_responses.go new file mode 100644 index 00000000..787d6585 --- /dev/null +++ b/client/endpoints/delete_gitea_endpoint_responses.go @@ -0,0 +1,106 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" +) + +// DeleteGiteaEndpointReader is a Reader for the DeleteGiteaEndpoint structure. +type DeleteGiteaEndpointReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *DeleteGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + result := NewDeleteGiteaEndpointDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result +} + +// NewDeleteGiteaEndpointDefault creates a DeleteGiteaEndpointDefault with default headers values +func NewDeleteGiteaEndpointDefault(code int) *DeleteGiteaEndpointDefault { + return &DeleteGiteaEndpointDefault{ + _statusCode: code, + } +} + +/* +DeleteGiteaEndpointDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type DeleteGiteaEndpointDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this delete gitea endpoint default response has a 2xx status code +func (o *DeleteGiteaEndpointDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this delete gitea endpoint default response has a 3xx status code +func (o *DeleteGiteaEndpointDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this delete gitea endpoint default response has a 4xx status code +func (o *DeleteGiteaEndpointDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this delete gitea endpoint default response has a 5xx status code +func (o *DeleteGiteaEndpointDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this delete gitea endpoint default response a status code equal to that given +func (o *DeleteGiteaEndpointDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the delete gitea endpoint default response +func (o *DeleteGiteaEndpointDefault) Code() int { + return o._statusCode +} + +func (o *DeleteGiteaEndpointDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /gitea/endpoints/{name}][%d] DeleteGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *DeleteGiteaEndpointDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /gitea/endpoints/{name}][%d] DeleteGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *DeleteGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *DeleteGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/endpoints/endpoints_client.go b/client/endpoints/endpoints_client.go index 9b951b2c..74019577 100644 --- a/client/endpoints/endpoints_client.go +++ b/client/endpoints/endpoints_client.go @@ -54,19 +54,67 @@ type ClientOption func(*runtime.ClientOperation) // ClientService is the interface for Client methods type ClientService interface { + CreateGiteaEndpoint(params *CreateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaEndpointOK, error) + CreateGithubEndpoint(params *CreateGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGithubEndpointOK, error) + DeleteGiteaEndpoint(params *DeleteGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error + DeleteGithubEndpoint(params *DeleteGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error + GetGiteaEndpoint(params *GetGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaEndpointOK, error) + GetGithubEndpoint(params *GetGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGithubEndpointOK, error) + ListGiteaEndpoints(params *ListGiteaEndpointsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaEndpointsOK, error) + ListGithubEndpoints(params *ListGithubEndpointsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGithubEndpointsOK, error) + UpdateGiteaEndpoint(params *UpdateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaEndpointOK, error) + UpdateGithubEndpoint(params *UpdateGithubEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGithubEndpointOK, error) SetTransport(transport runtime.ClientTransport) } +/* +CreateGiteaEndpoint creates a gitea endpoint +*/ +func (a *Client) CreateGiteaEndpoint(params *CreateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateGiteaEndpointOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateGiteaEndpointParams() + } + op := &runtime.ClientOperation{ + ID: "CreateGiteaEndpoint", + Method: "POST", + PathPattern: "/gitea/endpoints", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &CreateGiteaEndpointReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateGiteaEndpointOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*CreateGiteaEndpointDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* CreateGithubEndpoint creates a git hub endpoint */ @@ -105,6 +153,38 @@ func (a *Client) CreateGithubEndpoint(params *CreateGithubEndpointParams, authIn return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +DeleteGiteaEndpoint deletes a gitea endpoint +*/ +func (a *Client) DeleteGiteaEndpoint(params *DeleteGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) error { + // TODO: Validate the params before sending + if params == nil { + params = NewDeleteGiteaEndpointParams() + } + op := &runtime.ClientOperation{ + ID: "DeleteGiteaEndpoint", + Method: "DELETE", + PathPattern: "/gitea/endpoints/{name}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &DeleteGiteaEndpointReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + _, err := a.transport.Submit(op) + if err != nil { + return err + } + return nil +} + /* DeleteGithubEndpoint deletes a git hub endpoint */ @@ -137,6 +217,44 @@ func (a *Client) DeleteGithubEndpoint(params *DeleteGithubEndpointParams, authIn return nil } +/* +GetGiteaEndpoint gets a gitea endpoint +*/ +func (a *Client) GetGiteaEndpoint(params *GetGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetGiteaEndpointOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewGetGiteaEndpointParams() + } + op := &runtime.ClientOperation{ + ID: "GetGiteaEndpoint", + Method: "GET", + PathPattern: "/gitea/endpoints/{name}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &GetGiteaEndpointReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*GetGiteaEndpointOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*GetGiteaEndpointDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* GetGithubEndpoint gets a git hub endpoint */ @@ -175,6 +293,44 @@ func (a *Client) GetGithubEndpoint(params *GetGithubEndpointParams, authInfo run return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +ListGiteaEndpoints lists all gitea endpoints +*/ +func (a *Client) ListGiteaEndpoints(params *ListGiteaEndpointsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListGiteaEndpointsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListGiteaEndpointsParams() + } + op := &runtime.ClientOperation{ + ID: "ListGiteaEndpoints", + Method: "GET", + PathPattern: "/gitea/endpoints", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &ListGiteaEndpointsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListGiteaEndpointsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListGiteaEndpointsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* ListGithubEndpoints lists all git hub endpoints */ @@ -213,6 +369,44 @@ func (a *Client) ListGithubEndpoints(params *ListGithubEndpointsParams, authInfo return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +UpdateGiteaEndpoint updates a gitea endpoint +*/ +func (a *Client) UpdateGiteaEndpoint(params *UpdateGiteaEndpointParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateGiteaEndpointOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewUpdateGiteaEndpointParams() + } + op := &runtime.ClientOperation{ + ID: "UpdateGiteaEndpoint", + Method: "PUT", + PathPattern: "/gitea/endpoints/{name}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http"}, + Params: params, + Reader: &UpdateGiteaEndpointReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*UpdateGiteaEndpointOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*UpdateGiteaEndpointDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + /* UpdateGithubEndpoint updates a git hub endpoint */ diff --git a/client/endpoints/get_gitea_endpoint_parameters.go b/client/endpoints/get_gitea_endpoint_parameters.go new file mode 100644 index 00000000..0d7f883b --- /dev/null +++ b/client/endpoints/get_gitea_endpoint_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewGetGiteaEndpointParams creates a new GetGiteaEndpointParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewGetGiteaEndpointParams() *GetGiteaEndpointParams { + return &GetGiteaEndpointParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewGetGiteaEndpointParamsWithTimeout creates a new GetGiteaEndpointParams object +// with the ability to set a timeout on a request. +func NewGetGiteaEndpointParamsWithTimeout(timeout time.Duration) *GetGiteaEndpointParams { + return &GetGiteaEndpointParams{ + timeout: timeout, + } +} + +// NewGetGiteaEndpointParamsWithContext creates a new GetGiteaEndpointParams object +// with the ability to set a context for a request. +func NewGetGiteaEndpointParamsWithContext(ctx context.Context) *GetGiteaEndpointParams { + return &GetGiteaEndpointParams{ + Context: ctx, + } +} + +// NewGetGiteaEndpointParamsWithHTTPClient creates a new GetGiteaEndpointParams object +// with the ability to set a custom HTTPClient for a request. +func NewGetGiteaEndpointParamsWithHTTPClient(client *http.Client) *GetGiteaEndpointParams { + return &GetGiteaEndpointParams{ + HTTPClient: client, + } +} + +/* +GetGiteaEndpointParams contains all the parameters to send to the API endpoint + + for the get gitea endpoint operation. + + Typically these are written to a http.Request. +*/ +type GetGiteaEndpointParams struct { + + /* Name. + + The name of the Gitea endpoint. + */ + Name string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the get gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetGiteaEndpointParams) WithDefaults() *GetGiteaEndpointParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the get gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetGiteaEndpointParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the get gitea endpoint params +func (o *GetGiteaEndpointParams) WithTimeout(timeout time.Duration) *GetGiteaEndpointParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the get gitea endpoint params +func (o *GetGiteaEndpointParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the get gitea endpoint params +func (o *GetGiteaEndpointParams) WithContext(ctx context.Context) *GetGiteaEndpointParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the get gitea endpoint params +func (o *GetGiteaEndpointParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the get gitea endpoint params +func (o *GetGiteaEndpointParams) WithHTTPClient(client *http.Client) *GetGiteaEndpointParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the get gitea endpoint params +func (o *GetGiteaEndpointParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithName adds the name to the get gitea endpoint params +func (o *GetGiteaEndpointParams) WithName(name string) *GetGiteaEndpointParams { + o.SetName(name) + return o +} + +// SetName adds the name to the get gitea endpoint params +func (o *GetGiteaEndpointParams) SetName(name string) { + o.Name = name +} + +// WriteToRequest writes these params to a swagger request +func (o *GetGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param name + if err := r.SetPathParam("name", o.Name); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/endpoints/get_gitea_endpoint_responses.go b/client/endpoints/get_gitea_endpoint_responses.go new file mode 100644 index 00000000..e4bacd03 --- /dev/null +++ b/client/endpoints/get_gitea_endpoint_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// GetGiteaEndpointReader is a Reader for the GetGiteaEndpoint structure. +type GetGiteaEndpointReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *GetGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewGetGiteaEndpointOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewGetGiteaEndpointDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewGetGiteaEndpointOK creates a GetGiteaEndpointOK with default headers values +func NewGetGiteaEndpointOK() *GetGiteaEndpointOK { + return &GetGiteaEndpointOK{} +} + +/* +GetGiteaEndpointOK describes a response with status code 200, with default header values. + +ForgeEndpoint +*/ +type GetGiteaEndpointOK struct { + Payload garm_params.ForgeEndpoint +} + +// IsSuccess returns true when this get gitea endpoint o k response has a 2xx status code +func (o *GetGiteaEndpointOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this get gitea endpoint o k response has a 3xx status code +func (o *GetGiteaEndpointOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this get gitea endpoint o k response has a 4xx status code +func (o *GetGiteaEndpointOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this get gitea endpoint o k response has a 5xx status code +func (o *GetGiteaEndpointOK) IsServerError() bool { + return false +} + +// IsCode returns true when this get gitea endpoint o k response a status code equal to that given +func (o *GetGiteaEndpointOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the get gitea endpoint o k response +func (o *GetGiteaEndpointOK) Code() int { + return 200 +} + +func (o *GetGiteaEndpointOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] getGiteaEndpointOK %s", 200, payload) +} + +func (o *GetGiteaEndpointOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] getGiteaEndpointOK %s", 200, payload) +} + +func (o *GetGiteaEndpointOK) GetPayload() garm_params.ForgeEndpoint { + return o.Payload +} + +func (o *GetGiteaEndpointOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewGetGiteaEndpointDefault creates a GetGiteaEndpointDefault with default headers values +func NewGetGiteaEndpointDefault(code int) *GetGiteaEndpointDefault { + return &GetGiteaEndpointDefault{ + _statusCode: code, + } +} + +/* +GetGiteaEndpointDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type GetGiteaEndpointDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this get gitea endpoint default response has a 2xx status code +func (o *GetGiteaEndpointDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this get gitea endpoint default response has a 3xx status code +func (o *GetGiteaEndpointDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this get gitea endpoint default response has a 4xx status code +func (o *GetGiteaEndpointDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this get gitea endpoint default response has a 5xx status code +func (o *GetGiteaEndpointDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this get gitea endpoint default response a status code equal to that given +func (o *GetGiteaEndpointDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the get gitea endpoint default response +func (o *GetGiteaEndpointDefault) Code() int { + return o._statusCode +} + +func (o *GetGiteaEndpointDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] GetGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *GetGiteaEndpointDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints/{name}][%d] GetGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *GetGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *GetGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/endpoints/get_github_endpoint_responses.go b/client/endpoints/get_github_endpoint_responses.go index 4b4881cd..e2b97a60 100644 --- a/client/endpoints/get_github_endpoint_responses.go +++ b/client/endpoints/get_github_endpoint_responses.go @@ -51,7 +51,7 @@ func NewGetGithubEndpointOK() *GetGithubEndpointOK { /* GetGithubEndpointOK describes a response with status code 200, with default header values. -GithubEndpoint +ForgeEndpoint */ type GetGithubEndpointOK struct { Payload garm_params.ForgeEndpoint diff --git a/client/endpoints/list_gitea_endpoints_parameters.go b/client/endpoints/list_gitea_endpoints_parameters.go new file mode 100644 index 00000000..93ec6ae6 --- /dev/null +++ b/client/endpoints/list_gitea_endpoints_parameters.go @@ -0,0 +1,128 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewListGiteaEndpointsParams creates a new ListGiteaEndpointsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListGiteaEndpointsParams() *ListGiteaEndpointsParams { + return &ListGiteaEndpointsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListGiteaEndpointsParamsWithTimeout creates a new ListGiteaEndpointsParams object +// with the ability to set a timeout on a request. +func NewListGiteaEndpointsParamsWithTimeout(timeout time.Duration) *ListGiteaEndpointsParams { + return &ListGiteaEndpointsParams{ + timeout: timeout, + } +} + +// NewListGiteaEndpointsParamsWithContext creates a new ListGiteaEndpointsParams object +// with the ability to set a context for a request. +func NewListGiteaEndpointsParamsWithContext(ctx context.Context) *ListGiteaEndpointsParams { + return &ListGiteaEndpointsParams{ + Context: ctx, + } +} + +// NewListGiteaEndpointsParamsWithHTTPClient creates a new ListGiteaEndpointsParams object +// with the ability to set a custom HTTPClient for a request. +func NewListGiteaEndpointsParamsWithHTTPClient(client *http.Client) *ListGiteaEndpointsParams { + return &ListGiteaEndpointsParams{ + HTTPClient: client, + } +} + +/* +ListGiteaEndpointsParams contains all the parameters to send to the API endpoint + + for the list gitea endpoints operation. + + Typically these are written to a http.Request. +*/ +type ListGiteaEndpointsParams struct { + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list gitea endpoints params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListGiteaEndpointsParams) WithDefaults() *ListGiteaEndpointsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list gitea endpoints params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListGiteaEndpointsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list gitea endpoints params +func (o *ListGiteaEndpointsParams) WithTimeout(timeout time.Duration) *ListGiteaEndpointsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list gitea endpoints params +func (o *ListGiteaEndpointsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list gitea endpoints params +func (o *ListGiteaEndpointsParams) WithContext(ctx context.Context) *ListGiteaEndpointsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list gitea endpoints params +func (o *ListGiteaEndpointsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list gitea endpoints params +func (o *ListGiteaEndpointsParams) WithHTTPClient(client *http.Client) *ListGiteaEndpointsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list gitea endpoints params +func (o *ListGiteaEndpointsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WriteToRequest writes these params to a swagger request +func (o *ListGiteaEndpointsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/endpoints/list_gitea_endpoints_responses.go b/client/endpoints/list_gitea_endpoints_responses.go new file mode 100644 index 00000000..0fdd90ec --- /dev/null +++ b/client/endpoints/list_gitea_endpoints_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// ListGiteaEndpointsReader is a Reader for the ListGiteaEndpoints structure. +type ListGiteaEndpointsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListGiteaEndpointsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListGiteaEndpointsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListGiteaEndpointsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListGiteaEndpointsOK creates a ListGiteaEndpointsOK with default headers values +func NewListGiteaEndpointsOK() *ListGiteaEndpointsOK { + return &ListGiteaEndpointsOK{} +} + +/* +ListGiteaEndpointsOK describes a response with status code 200, with default header values. + +ForgeEndpoints +*/ +type ListGiteaEndpointsOK struct { + Payload garm_params.ForgeEndpoints +} + +// IsSuccess returns true when this list gitea endpoints o k response has a 2xx status code +func (o *ListGiteaEndpointsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list gitea endpoints o k response has a 3xx status code +func (o *ListGiteaEndpointsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list gitea endpoints o k response has a 4xx status code +func (o *ListGiteaEndpointsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list gitea endpoints o k response has a 5xx status code +func (o *ListGiteaEndpointsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list gitea endpoints o k response a status code equal to that given +func (o *ListGiteaEndpointsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list gitea endpoints o k response +func (o *ListGiteaEndpointsOK) Code() int { + return 200 +} + +func (o *ListGiteaEndpointsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints][%d] listGiteaEndpointsOK %s", 200, payload) +} + +func (o *ListGiteaEndpointsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints][%d] listGiteaEndpointsOK %s", 200, payload) +} + +func (o *ListGiteaEndpointsOK) GetPayload() garm_params.ForgeEndpoints { + return o.Payload +} + +func (o *ListGiteaEndpointsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListGiteaEndpointsDefault creates a ListGiteaEndpointsDefault with default headers values +func NewListGiteaEndpointsDefault(code int) *ListGiteaEndpointsDefault { + return &ListGiteaEndpointsDefault{ + _statusCode: code, + } +} + +/* +ListGiteaEndpointsDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type ListGiteaEndpointsDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this list gitea endpoints default response has a 2xx status code +func (o *ListGiteaEndpointsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list gitea endpoints default response has a 3xx status code +func (o *ListGiteaEndpointsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list gitea endpoints default response has a 4xx status code +func (o *ListGiteaEndpointsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list gitea endpoints default response has a 5xx status code +func (o *ListGiteaEndpointsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list gitea endpoints default response a status code equal to that given +func (o *ListGiteaEndpointsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list gitea endpoints default response +func (o *ListGiteaEndpointsDefault) Code() int { + return o._statusCode +} + +func (o *ListGiteaEndpointsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints][%d] ListGiteaEndpoints default %s", o._statusCode, payload) +} + +func (o *ListGiteaEndpointsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /gitea/endpoints][%d] ListGiteaEndpoints default %s", o._statusCode, payload) +} + +func (o *ListGiteaEndpointsDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *ListGiteaEndpointsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/endpoints/list_github_endpoints_responses.go b/client/endpoints/list_github_endpoints_responses.go index f7b10a1c..33485f9b 100644 --- a/client/endpoints/list_github_endpoints_responses.go +++ b/client/endpoints/list_github_endpoints_responses.go @@ -51,7 +51,7 @@ func NewListGithubEndpointsOK() *ListGithubEndpointsOK { /* ListGithubEndpointsOK describes a response with status code 200, with default header values. -GithubEndpoints +ForgeEndpoints */ type ListGithubEndpointsOK struct { Payload garm_params.ForgeEndpoints diff --git a/client/endpoints/update_gitea_endpoint_parameters.go b/client/endpoints/update_gitea_endpoint_parameters.go new file mode 100644 index 00000000..bfd18e2e --- /dev/null +++ b/client/endpoints/update_gitea_endpoint_parameters.go @@ -0,0 +1,173 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + garm_params "github.com/cloudbase/garm/params" +) + +// NewUpdateGiteaEndpointParams creates a new UpdateGiteaEndpointParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewUpdateGiteaEndpointParams() *UpdateGiteaEndpointParams { + return &UpdateGiteaEndpointParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewUpdateGiteaEndpointParamsWithTimeout creates a new UpdateGiteaEndpointParams object +// with the ability to set a timeout on a request. +func NewUpdateGiteaEndpointParamsWithTimeout(timeout time.Duration) *UpdateGiteaEndpointParams { + return &UpdateGiteaEndpointParams{ + timeout: timeout, + } +} + +// NewUpdateGiteaEndpointParamsWithContext creates a new UpdateGiteaEndpointParams object +// with the ability to set a context for a request. +func NewUpdateGiteaEndpointParamsWithContext(ctx context.Context) *UpdateGiteaEndpointParams { + return &UpdateGiteaEndpointParams{ + Context: ctx, + } +} + +// NewUpdateGiteaEndpointParamsWithHTTPClient creates a new UpdateGiteaEndpointParams object +// with the ability to set a custom HTTPClient for a request. +func NewUpdateGiteaEndpointParamsWithHTTPClient(client *http.Client) *UpdateGiteaEndpointParams { + return &UpdateGiteaEndpointParams{ + HTTPClient: client, + } +} + +/* +UpdateGiteaEndpointParams contains all the parameters to send to the API endpoint + + for the update gitea endpoint operation. + + Typically these are written to a http.Request. +*/ +type UpdateGiteaEndpointParams struct { + + /* Body. + + Parameters used when updating a Gitea endpoint. + */ + Body garm_params.UpdateGiteaEndpointParams + + /* Name. + + The name of the Gitea endpoint. + */ + Name string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the update gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateGiteaEndpointParams) WithDefaults() *UpdateGiteaEndpointParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the update gitea endpoint params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateGiteaEndpointParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) WithTimeout(timeout time.Duration) *UpdateGiteaEndpointParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) WithContext(ctx context.Context) *UpdateGiteaEndpointParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) WithHTTPClient(client *http.Client) *UpdateGiteaEndpointParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) WithBody(body garm_params.UpdateGiteaEndpointParams) *UpdateGiteaEndpointParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) SetBody(body garm_params.UpdateGiteaEndpointParams) { + o.Body = body +} + +// WithName adds the name to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) WithName(name string) *UpdateGiteaEndpointParams { + o.SetName(name) + return o +} + +// SetName adds the name to the update gitea endpoint params +func (o *UpdateGiteaEndpointParams) SetName(name string) { + o.Name = name +} + +// WriteToRequest writes these params to a swagger request +func (o *UpdateGiteaEndpointParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + + // path param name + if err := r.SetPathParam("name", o.Name); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/client/endpoints/update_gitea_endpoint_responses.go b/client/endpoints/update_gitea_endpoint_responses.go new file mode 100644 index 00000000..052f45fa --- /dev/null +++ b/client/endpoints/update_gitea_endpoint_responses.go @@ -0,0 +1,184 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package endpoints + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + apiserver_params "github.com/cloudbase/garm/apiserver/params" + garm_params "github.com/cloudbase/garm/params" +) + +// UpdateGiteaEndpointReader is a Reader for the UpdateGiteaEndpoint structure. +type UpdateGiteaEndpointReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *UpdateGiteaEndpointReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewUpdateGiteaEndpointOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewUpdateGiteaEndpointDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewUpdateGiteaEndpointOK creates a UpdateGiteaEndpointOK with default headers values +func NewUpdateGiteaEndpointOK() *UpdateGiteaEndpointOK { + return &UpdateGiteaEndpointOK{} +} + +/* +UpdateGiteaEndpointOK describes a response with status code 200, with default header values. + +ForgeEndpoint +*/ +type UpdateGiteaEndpointOK struct { + Payload garm_params.ForgeEndpoint +} + +// IsSuccess returns true when this update gitea endpoint o k response has a 2xx status code +func (o *UpdateGiteaEndpointOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this update gitea endpoint o k response has a 3xx status code +func (o *UpdateGiteaEndpointOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this update gitea endpoint o k response has a 4xx status code +func (o *UpdateGiteaEndpointOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this update gitea endpoint o k response has a 5xx status code +func (o *UpdateGiteaEndpointOK) IsServerError() bool { + return false +} + +// IsCode returns true when this update gitea endpoint o k response a status code equal to that given +func (o *UpdateGiteaEndpointOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the update gitea endpoint o k response +func (o *UpdateGiteaEndpointOK) Code() int { + return 200 +} + +func (o *UpdateGiteaEndpointOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] updateGiteaEndpointOK %s", 200, payload) +} + +func (o *UpdateGiteaEndpointOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] updateGiteaEndpointOK %s", 200, payload) +} + +func (o *UpdateGiteaEndpointOK) GetPayload() garm_params.ForgeEndpoint { + return o.Payload +} + +func (o *UpdateGiteaEndpointOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewUpdateGiteaEndpointDefault creates a UpdateGiteaEndpointDefault with default headers values +func NewUpdateGiteaEndpointDefault(code int) *UpdateGiteaEndpointDefault { + return &UpdateGiteaEndpointDefault{ + _statusCode: code, + } +} + +/* +UpdateGiteaEndpointDefault describes a response with status code -1, with default header values. + +APIErrorResponse +*/ +type UpdateGiteaEndpointDefault struct { + _statusCode int + + Payload apiserver_params.APIErrorResponse +} + +// IsSuccess returns true when this update gitea endpoint default response has a 2xx status code +func (o *UpdateGiteaEndpointDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this update gitea endpoint default response has a 3xx status code +func (o *UpdateGiteaEndpointDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this update gitea endpoint default response has a 4xx status code +func (o *UpdateGiteaEndpointDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this update gitea endpoint default response has a 5xx status code +func (o *UpdateGiteaEndpointDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this update gitea endpoint default response a status code equal to that given +func (o *UpdateGiteaEndpointDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the update gitea endpoint default response +func (o *UpdateGiteaEndpointDefault) Code() int { + return o._statusCode +} + +func (o *UpdateGiteaEndpointDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] UpdateGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *UpdateGiteaEndpointDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PUT /gitea/endpoints/{name}][%d] UpdateGiteaEndpoint default %s", o._statusCode, payload) +} + +func (o *UpdateGiteaEndpointDefault) GetPayload() apiserver_params.APIErrorResponse { + return o.Payload +} + +func (o *UpdateGiteaEndpointDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/client/endpoints/update_github_endpoint_responses.go b/client/endpoints/update_github_endpoint_responses.go index 969d6bbd..27cd4a71 100644 --- a/client/endpoints/update_github_endpoint_responses.go +++ b/client/endpoints/update_github_endpoint_responses.go @@ -51,7 +51,7 @@ func NewUpdateGithubEndpointOK() *UpdateGithubEndpointOK { /* UpdateGithubEndpointOK describes a response with status code 200, with default header values. -GithubEndpoint +ForgeEndpoint */ type UpdateGithubEndpointOK struct { Payload garm_params.ForgeEndpoint diff --git a/cmd/garm-cli/cmd/gitea.go b/cmd/garm-cli/cmd/gitea.go new file mode 100644 index 00000000..10d086bd --- /dev/null +++ b/cmd/garm-cli/cmd/gitea.go @@ -0,0 +1,21 @@ +package cmd + +import "github.com/spf13/cobra" + +// giteaCmd represents the the gitea command. This command has a set +// of subcommands that allow configuring and managing Gitea endpoints +// and credentials. +var giteaCmd = &cobra.Command{ + Use: "gitea", + Aliases: []string{"gt"}, + SilenceUsage: true, + Short: "Manage Gitea resources", + Long: `Manage Gitea related resources. + +This command allows you to configure and manage Gitea endpoints and credentials`, + Run: nil, +} + +func init() { + rootCmd.AddCommand(giteaCmd) +} diff --git a/cmd/garm-cli/cmd/gitea_credentials.go b/cmd/garm-cli/cmd/gitea_credentials.go new file mode 100644 index 00000000..c744c8a7 --- /dev/null +++ b/cmd/garm-cli/cmd/gitea_credentials.go @@ -0,0 +1,317 @@ +// Copyright 2022 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package cmd + +import ( + "fmt" + "strconv" + + "github.com/jedib0t/go-pretty/v6/table" + "github.com/spf13/cobra" + + apiClientCreds "github.com/cloudbase/garm/client/credentials" + "github.com/cloudbase/garm/cmd/garm-cli/common" + "github.com/cloudbase/garm/params" +) + +// giteaCredentialsCmd represents the gitea credentials command +var giteaCredentialsCmd = &cobra.Command{ + Use: "credentials", + Aliases: []string{"creds"}, + Short: "Manage gitea credentials", + Long: `Manage Gitea credentials stored in GARM. + +This command allows you to add, update, list and delete Gitea credentials.`, + Run: nil, +} + +var giteaCredentialsListCmd = &cobra.Command{ + Use: "list", + Aliases: []string{"ls"}, + Short: "List configured gitea credentials", + Long: `List the names of the gitea personal access tokens available to the garm.`, + SilenceUsage: true, + RunE: func(_ *cobra.Command, _ []string) error { + if needsInit { + return errNeedsInitError + } + + listCredsReq := apiClientCreds.NewListGiteaCredentialsParams() + response, err := apiCli.Credentials.ListGiteaCredentials(listCredsReq, authToken) + if err != nil { + return err + } + formatGiteaCredentials(response.Payload) + return nil + }, +} + +var giteaCredentialsShowCmd = &cobra.Command{ + Use: "show", + Aliases: []string{"get"}, + Short: "Show details of a configured gitea credential", + Long: `Show the details of a configured gitea credential.`, + SilenceUsage: true, + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + if len(args) < 1 { + return fmt.Errorf("missing required argument: credential ID") + } + + credID, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + return fmt.Errorf("invalid credential ID: %s", args[0]) + } + showCredsReq := apiClientCreds.NewGetGiteaCredentialsParams().WithID(credID) + response, err := apiCli.Credentials.GetGiteaCredentials(showCredsReq, authToken) + if err != nil { + return err + } + formatOneGiteaCredential(response.Payload) + return nil + }, +} + +var giteaCredentialsUpdateCmd = &cobra.Command{ + Use: "update", + Short: "Update a gitea credential", + Long: "Update a gitea credential", + SilenceUsage: true, + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + if len(args) < 1 { + return fmt.Errorf("missing required argument: credential ID") + } + + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + credID, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + return fmt.Errorf("invalid credential ID: %s", args[0]) + } + + updateParams, err := parseGiteaCredentialsUpdateParams() + if err != nil { + return err + } + + updateCredsReq := apiClientCreds.NewUpdateGiteaCredentialsParams().WithID(credID) + updateCredsReq.Body = updateParams + + response, err := apiCli.Credentials.UpdateGiteaCredentials(updateCredsReq, authToken) + if err != nil { + return err + } + formatOneGiteaCredential(response.Payload) + return nil + }, +} + +var giteaCredentialsDeleteCmd = &cobra.Command{ + Use: "delete", + Aliases: []string{"remove", "rm"}, + Short: "Delete a gitea credential", + Long: "Delete a gitea credential", + SilenceUsage: true, + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + if len(args) < 1 { + return fmt.Errorf("missing required argument: credential ID") + } + + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + credID, err := strconv.ParseInt(args[0], 10, 64) + if err != nil { + return fmt.Errorf("invalid credential ID: %s", args[0]) + } + + deleteCredsReq := apiClientCreds.NewDeleteGiteaCredentialsParams().WithID(credID) + if err := apiCli.Credentials.DeleteGiteaCredentials(deleteCredsReq, authToken); err != nil { + return err + } + return nil + }, +} + +var giteaCredentialsAddCmd = &cobra.Command{ + Use: "add", + Short: "Add a gitea credential", + Long: "Add a gitea credential", + SilenceUsage: true, + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + + if len(args) > 0 { + return fmt.Errorf("too many arguments") + } + + addParams, err := parseGiteaCredentialsAddParams() + if err != nil { + return err + } + + addCredsReq := apiClientCreds.NewCreateGiteaCredentialsParams() + addCredsReq.Body = addParams + + response, err := apiCli.Credentials.CreateGiteaCredentials(addCredsReq, authToken) + if err != nil { + return err + } + formatOneGiteaCredential(response.Payload) + return nil + }, +} + +func init() { + giteaCredentialsUpdateCmd.Flags().StringVar(&credentialsName, "name", "", "Name of the credential") + giteaCredentialsUpdateCmd.Flags().StringVar(&credentialsDescription, "description", "", "Description of the credential") + giteaCredentialsUpdateCmd.Flags().StringVar(&credentialsOAuthToken, "pat-oauth-token", "", "If the credential is a personal access token, the OAuth token") + + giteaCredentialsListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") + + giteaCredentialsAddCmd.Flags().StringVar(&credentialsName, "name", "", "Name of the credential") + giteaCredentialsAddCmd.Flags().StringVar(&credentialsDescription, "description", "", "Description of the credential") + giteaCredentialsAddCmd.Flags().StringVar(&credentialsOAuthToken, "pat-oauth-token", "", "If the credential is a personal access token, the OAuth token") + giteaCredentialsAddCmd.Flags().StringVar(&credentialsType, "auth-type", "", "The type of the credential") + giteaCredentialsAddCmd.Flags().StringVar(&credentialsEndpoint, "endpoint", "", "The endpoint to associate the credential with") + + giteaCredentialsAddCmd.MarkFlagRequired("name") + giteaCredentialsAddCmd.MarkFlagRequired("auth-type") + giteaCredentialsAddCmd.MarkFlagRequired("description") + giteaCredentialsAddCmd.MarkFlagRequired("endpoint") + + giteaCredentialsCmd.AddCommand( + giteaCredentialsListCmd, + giteaCredentialsShowCmd, + giteaCredentialsUpdateCmd, + giteaCredentialsDeleteCmd, + giteaCredentialsAddCmd, + ) + giteaCmd.AddCommand(giteaCredentialsCmd) +} + +func parseGiteaCredentialsAddParams() (ret params.CreateGiteaCredentialsParams, err error) { + ret.Name = credentialsName + ret.Description = credentialsDescription + ret.AuthType = params.ForgeAuthType(credentialsType) + ret.Endpoint = credentialsEndpoint + switch ret.AuthType { + case params.ForgeAuthTypePAT: + ret.PAT.OAuth2Token = credentialsOAuthToken + default: + return params.CreateGiteaCredentialsParams{}, fmt.Errorf("invalid auth type: %s (supported are: pat)", credentialsType) + } + + return ret, nil +} + +func parseGiteaCredentialsUpdateParams() (params.UpdateGiteaCredentialsParams, error) { + var updateParams params.UpdateGiteaCredentialsParams + + if credentialsName != "" { + updateParams.Name = &credentialsName + } + + if credentialsDescription != "" { + updateParams.Description = &credentialsDescription + } + + if credentialsOAuthToken != "" { + if updateParams.PAT == nil { + updateParams.PAT = ¶ms.GithubPAT{} + } + updateParams.PAT.OAuth2Token = credentialsOAuthToken + } + + return updateParams, nil +} + +func formatGiteaCredentials(creds []params.ForgeCredentials) { + if outputFormat == common.OutputFormatJSON { + printAsJSON(creds) + return + } + t := table.NewWriter() + header := table.Row{"ID", "Name", "Description", "Base URL", "API URL", "Type"} + if long { + header = append(header, "Created At", "Updated At") + } + t.AppendHeader(header) + for _, val := range creds { + row := table.Row{val.ID, val.Name, val.Description, val.BaseURL, val.APIBaseURL, val.AuthType} + if long { + row = append(row, val.CreatedAt, val.UpdatedAt) + } + t.AppendRow(row) + t.AppendSeparator() + } + fmt.Println(t.Render()) +} + +func formatOneGiteaCredential(cred params.ForgeCredentials) { + if outputFormat == common.OutputFormatJSON { + printAsJSON(cred) + return + } + t := table.NewWriter() + header := table.Row{"Field", "Value"} + t.AppendHeader(header) + + t.AppendRow(table.Row{"ID", cred.ID}) + t.AppendRow(table.Row{"Created At", cred.CreatedAt}) + t.AppendRow(table.Row{"Updated At", cred.UpdatedAt}) + t.AppendRow(table.Row{"Name", cred.Name}) + t.AppendRow(table.Row{"Description", cred.Description}) + t.AppendRow(table.Row{"Base URL", cred.BaseURL}) + t.AppendRow(table.Row{"API URL", cred.APIBaseURL}) + t.AppendRow(table.Row{"Type", cred.AuthType}) + t.AppendRow(table.Row{"Endpoint", cred.Endpoint.Name}) + + if len(cred.Repositories) > 0 { + t.AppendRow(table.Row{"", ""}) + for _, repo := range cred.Repositories { + t.AppendRow(table.Row{"Repositories", repo.String()}) + } + } + + if len(cred.Organizations) > 0 { + t.AppendRow(table.Row{"", ""}) + for _, org := range cred.Organizations { + t.AppendRow(table.Row{"Organizations", org.Name}) + } + } + + t.SetColumnConfigs([]table.ColumnConfig{ + {Number: 1, AutoMerge: true}, + {Number: 2, AutoMerge: false, WidthMax: 100}, + }) + fmt.Println(t.Render()) +} diff --git a/cmd/garm-cli/cmd/gitea_endpoints.go b/cmd/garm-cli/cmd/gitea_endpoints.go new file mode 100644 index 00000000..d3504f17 --- /dev/null +++ b/cmd/garm-cli/cmd/gitea_endpoints.go @@ -0,0 +1,218 @@ +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" + + apiClientEndpoints "github.com/cloudbase/garm/client/endpoints" + "github.com/cloudbase/garm/params" +) + +var giteaEndpointCmd = &cobra.Command{ + Use: "endpoint", + SilenceUsage: true, + Short: "Manage Gitea endpoints", + Long: `Manage Gitea endpoints. + +This command allows you to configure and manage Gitea endpoints`, + Run: nil, +} + +var giteaEndpointListCmd = &cobra.Command{ + Use: "list", + Aliases: []string{"ls"}, + SilenceUsage: true, + Short: "List Gitea endpoints", + Long: `List all configured Gitea endpoints.`, + RunE: func(_ *cobra.Command, _ []string) error { + if needsInit { + return errNeedsInitError + } + + newListReq := apiClientEndpoints.NewListGiteaEndpointsParams() + response, err := apiCli.Endpoints.ListGiteaEndpoints(newListReq, authToken) + if err != nil { + return err + } + formatEndpoints(response.Payload) + return nil + }, +} + +var giteaEndpointShowCmd = &cobra.Command{ + Use: "show", + Aliases: []string{"get"}, + SilenceUsage: true, + Short: "Show Gitea endpoint", + Long: `Show details of a Gitea endpoint.`, + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + if len(args) == 0 { + return fmt.Errorf("requires an endpoint name") + } + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + newShowReq := apiClientEndpoints.NewGetGiteaEndpointParams() + newShowReq.Name = args[0] + response, err := apiCli.Endpoints.GetGiteaEndpoint(newShowReq, authToken) + if err != nil { + return err + } + formatOneEndpoint(response.Payload) + return nil + }, +} + +var giteaEndpointCreateCmd = &cobra.Command{ + Use: "create", + SilenceUsage: true, + Short: "Create Gitea endpoint", + Long: `Create a new Gitea endpoint.`, + RunE: func(_ *cobra.Command, _ []string) error { + if needsInit { + return errNeedsInitError + } + + createParams, err := parseGiteaCreateParams() + if err != nil { + return err + } + + newCreateReq := apiClientEndpoints.NewCreateGiteaEndpointParams() + newCreateReq.Body = createParams + + response, err := apiCli.Endpoints.CreateGiteaEndpoint(newCreateReq, authToken) + if err != nil { + return err + } + formatOneEndpoint(response.Payload) + return nil + }, +} + +var giteaEndpointDeleteCmd = &cobra.Command{ + Use: "delete", + Aliases: []string{"remove", "rm"}, + SilenceUsage: true, + Short: "Delete Gitea endpoint", + Long: "Delete a Gitea endpoint", + RunE: func(_ *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + if len(args) == 0 { + return fmt.Errorf("requires an endpoint name") + } + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + newDeleteReq := apiClientEndpoints.NewDeleteGiteaEndpointParams() + newDeleteReq.Name = args[0] + if err := apiCli.Endpoints.DeleteGiteaEndpoint(newDeleteReq, authToken); err != nil { + return err + } + return nil + }, +} + +var giteaEndpointUpdateCmd = &cobra.Command{ + Use: "update", + Short: "Update Gitea endpoint", + Long: "Update a Gitea endpoint", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + if needsInit { + return errNeedsInitError + } + if len(args) == 0 { + return fmt.Errorf("requires an endpoint name") + } + if len(args) > 1 { + return fmt.Errorf("too many arguments") + } + + updateParams := params.UpdateGiteaEndpointParams{} + + if cmd.Flags().Changed("ca-cert-path") { + cert, err := parseAndReadCABundle() + if err != nil { + return err + } + updateParams.CACertBundle = cert + } + + if cmd.Flags().Changed("description") { + updateParams.Description = &endpointDescription + } + + if cmd.Flags().Changed("base-url") { + updateParams.BaseURL = &endpointBaseURL + } + + if cmd.Flags().Changed("api-base-url") { + updateParams.APIBaseURL = &endpointAPIBaseURL + } + + newEndpointUpdateReq := apiClientEndpoints.NewUpdateGiteaEndpointParams() + newEndpointUpdateReq.Name = args[0] + newEndpointUpdateReq.Body = updateParams + + response, err := apiCli.Endpoints.UpdateGiteaEndpoint(newEndpointUpdateReq, authToken) + if err != nil { + return err + } + formatOneEndpoint(response.Payload) + return nil + }, +} + +func init() { + giteaEndpointCreateCmd.Flags().StringVar(&endpointName, "name", "", "Name of the Gitea endpoint") + giteaEndpointCreateCmd.Flags().StringVar(&endpointDescription, "description", "", "Description for the github endpoint") + giteaEndpointCreateCmd.Flags().StringVar(&endpointBaseURL, "base-url", "", "Base URL of the Gitea endpoint") + giteaEndpointCreateCmd.Flags().StringVar(&endpointAPIBaseURL, "api-base-url", "", "API Base URL of the Gitea endpoint") + giteaEndpointCreateCmd.Flags().StringVar(&endpointCACertPath, "ca-cert-path", "", "CA Cert Path of the Gitea endpoint") + + giteaEndpointListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") + + giteaEndpointCreateCmd.MarkFlagRequired("name") + giteaEndpointCreateCmd.MarkFlagRequired("base-url") + giteaEndpointCreateCmd.MarkFlagRequired("api-base-url") + + giteaEndpointUpdateCmd.Flags().StringVar(&endpointDescription, "description", "", "Description for the gitea endpoint") + giteaEndpointUpdateCmd.Flags().StringVar(&endpointBaseURL, "base-url", "", "Base URL of the Gitea endpoint") + giteaEndpointUpdateCmd.Flags().StringVar(&endpointAPIBaseURL, "api-base-url", "", "API Base URL of the Gitea endpoint") + giteaEndpointUpdateCmd.Flags().StringVar(&endpointCACertPath, "ca-cert-path", "", "CA Cert Path of the Gitea endpoint") + + giteaEndpointCmd.AddCommand( + giteaEndpointListCmd, + giteaEndpointShowCmd, + giteaEndpointCreateCmd, + giteaEndpointDeleteCmd, + giteaEndpointUpdateCmd, + ) + + giteaCmd.AddCommand(giteaEndpointCmd) +} + +func parseGiteaCreateParams() (params.CreateGiteaEndpointParams, error) { + certBundleBytes, err := parseAndReadCABundle() + if err != nil { + return params.CreateGiteaEndpointParams{}, err + } + + ret := params.CreateGiteaEndpointParams{ + Name: endpointName, + BaseURL: endpointBaseURL, + APIBaseURL: endpointAPIBaseURL, + Description: endpointDescription, + CACertBundle: certBundleBytes, + } + return ret, nil +} diff --git a/cmd/garm-cli/cmd/github_credentials.go b/cmd/garm-cli/cmd/github_credentials.go index fb3853d8..ae2374f6 100644 --- a/cmd/garm-cli/cmd/github_credentials.go +++ b/cmd/garm-cli/cmd/github_credentials.go @@ -344,7 +344,7 @@ func parseCredentialsUpdateParams() (params.UpdateGithubCredentialsParams, error return updateParams, nil } -func formatGithubCredentials(creds []params.GithubCredentials) { +func formatGithubCredentials(creds []params.ForgeCredentials) { if outputFormat == common.OutputFormatJSON { printAsJSON(creds) return @@ -366,7 +366,7 @@ func formatGithubCredentials(creds []params.GithubCredentials) { fmt.Println(t.Render()) } -func formatOneGithubCredential(cred params.GithubCredentials) { +func formatOneGithubCredential(cred params.ForgeCredentials) { if outputFormat == common.OutputFormatJSON { printAsJSON(cred) return diff --git a/cmd/garm-cli/cmd/github_endpoints.go b/cmd/garm-cli/cmd/github_endpoints.go index f119a1a2..fbdca86c 100644 --- a/cmd/garm-cli/cmd/github_endpoints.go +++ b/cmd/garm-cli/cmd/github_endpoints.go @@ -145,7 +145,7 @@ var githubEndpointUpdateCmd = &cobra.Command{ updateParams := params.UpdateGithubEndpointParams{} if cmd.Flags().Changed("ca-cert-path") { - cert, err := parseReadAndParsCABundle() + cert, err := parseAndReadCABundle() if err != nil { return err } @@ -213,7 +213,7 @@ func init() { githubCmd.AddCommand(githubEndpointCmd) } -func parseReadAndParsCABundle() ([]byte, error) { +func parseAndReadCABundle() ([]byte, error) { if endpointCACertPath == "" { return nil, nil } @@ -236,7 +236,7 @@ func parseReadAndParsCABundle() ([]byte, error) { } func parseCreateParams() (params.CreateGithubEndpointParams, error) { - certBundleBytes, err := parseReadAndParsCABundle() + certBundleBytes, err := parseAndReadCABundle() if err != nil { return params.CreateGithubEndpointParams{}, err } @@ -287,7 +287,9 @@ func formatOneEndpoint(endpoint params.ForgeEndpoint) { t.AppendRow([]interface{}{"Created At", endpoint.CreatedAt}) t.AppendRow([]interface{}{"Updated At", endpoint.UpdatedAt}) t.AppendRow([]interface{}{"Base URL", endpoint.BaseURL}) - t.AppendRow([]interface{}{"Upload URL", endpoint.UploadBaseURL}) + if endpoint.UploadBaseURL != "" { + t.AppendRow([]interface{}{"Upload URL", endpoint.UploadBaseURL}) + } t.AppendRow([]interface{}{"API Base URL", endpoint.APIBaseURL}) if len(endpoint.CACertBundle) > 0 { t.AppendRow([]interface{}{"CA Cert Bundle", string(endpoint.CACertBundle)}) diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index 1c453836..b24cf039 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -31,6 +31,7 @@ var ( repoName string repoWebhookSecret string repoCreds string + forgeType string randomWebhookSecret bool insecureRepoWebhook bool keepRepoWebhook bool @@ -169,6 +170,7 @@ var repoAddCmd = &cobra.Command{ Name: repoName, WebhookSecret: repoWebhookSecret, CredentialsName: repoCreds, + ForgeType: params.EndpointType(forgeType), PoolBalancerType: params.PoolBalancerType(poolBalancerType), } response, err := apiCli.Repositories.CreateRepo(newRepoReq, authToken) @@ -309,6 +311,7 @@ func init() { repoAddCmd.Flags().StringVar(&repoOwner, "owner", "", "The owner of this repository") repoAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") repoAddCmd.Flags().StringVar(&repoName, "name", "", "The name of the repository") + repoAddCmd.Flags().StringVar(&forgeType, "forge-type", string(params.GithubEndpointType), "The forge type of the repository. Supported values: github, gitea.") repoAddCmd.Flags().StringVar(&repoWebhookSecret, "webhook-secret", "", "The webhook secret for this repository") repoAddCmd.Flags().StringVar(&repoCreds, "credentials", "", "Credentials name. See credentials list.") repoAddCmd.Flags().BoolVar(&randomWebhookSecret, "random-webhook-secret", false, "Generate a random webhook secret for this repository.") @@ -360,7 +363,7 @@ func formatRepositories(repos []params.Repository) { } t.AppendHeader(header) for _, val := range repos { - row := table.Row{val.ID, val.Owner, val.Name, val.Endpoint.Name, val.CredentialsName, val.GetBalancerType(), val.PoolManagerStatus.IsRunning} + row := table.Row{val.ID, val.Owner, val.Name, val.Endpoint.Name, val.GetCredentialsName(), val.GetBalancerType(), val.PoolManagerStatus.IsRunning} if long { row = append(row, val.CreatedAt, val.UpdatedAt) } @@ -386,7 +389,7 @@ func formatOneRepository(repo params.Repository) { t.AppendRow(table.Row{"Name", repo.Name}) t.AppendRow(table.Row{"Endpoint", repo.Endpoint.Name}) t.AppendRow(table.Row{"Pool balancer type", repo.GetBalancerType()}) - t.AppendRow(table.Row{"Credentials", repo.CredentialsName}) + t.AppendRow(table.Row{"Credentials", repo.GetCredentialsName()}) t.AppendRow(table.Row{"Pool manager running", repo.PoolManagerStatus.IsRunning}) if !repo.PoolManagerStatus.IsRunning { t.AppendRow(table.Row{"Failure reason", repo.PoolManagerStatus.FailureReason}) diff --git a/cmd/garm/main.go b/cmd/garm/main.go index 20f34eba..f37248d3 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -384,6 +384,7 @@ func main() { slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to stop provider worker") } + slog.InfoContext(ctx, "shutting down http server") shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 60*time.Second) defer shutdownCancel() if err := srv.Shutdown(shutdownCtx); err != nil { diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index 53b90720..30f1774c 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -180,23 +180,79 @@ func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.ForgeEn return r0, r1 } +// CreateGiteaCredentials provides a mock function with given fields: ctx, param +func (_m *Store) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error) { + ret := _m.Called(ctx, param) + + if len(ret) == 0 { + panic("no return value specified for CreateGiteaCredentials") + } + + var r0 params.ForgeCredentials + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error)); ok { + return rf(ctx, param) + } + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaCredentialsParams) params.ForgeCredentials); ok { + r0 = rf(ctx, param) + } else { + r0 = ret.Get(0).(params.ForgeCredentials) + } + + if rf, ok := ret.Get(1).(func(context.Context, params.CreateGiteaCredentialsParams) error); ok { + r1 = rf(ctx, param) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateGiteaEndpoint provides a mock function with given fields: _a0, param +func (_m *Store) CreateGiteaEndpoint(_a0 context.Context, param params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error) { + ret := _m.Called(_a0, param) + + if len(ret) == 0 { + panic("no return value specified for CreateGiteaEndpoint") + } + + var r0 params.ForgeEndpoint + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error)); ok { + return rf(_a0, param) + } + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGiteaEndpointParams) params.ForgeEndpoint); ok { + r0 = rf(_a0, param) + } else { + r0 = ret.Get(0).(params.ForgeEndpoint) + } + + if rf, ok := ret.Get(1).(func(context.Context, params.CreateGiteaEndpointParams) error); ok { + r1 = rf(_a0, param) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CreateGithubCredentials provides a mock function with given fields: ctx, param -func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.GithubCredentials, error) { +func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.ForgeCredentials, error) { ret := _m.Called(ctx, param) if len(ret) == 0 { panic("no return value specified for CreateGithubCredentials") } - var r0 params.GithubCredentials + var r0 params.ForgeCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) (params.GithubCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) (params.ForgeCredentials, error)); ok { return rf(ctx, param) } - if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) params.GithubCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, params.CreateGithubCredentialsParams) params.ForgeCredentials); ok { r0 = rf(ctx, param) } else { - r0 = ret.Get(0).(params.GithubCredentials) + r0 = ret.Get(0).(params.ForgeCredentials) } if rf, ok := ret.Get(1).(func(context.Context, params.CreateGithubCredentialsParams) error); ok { @@ -320,9 +376,9 @@ func (_m *Store) CreateOrganization(ctx context.Context, name string, credential return r0, r1 } -// CreateRepository provides a mock function with given fields: ctx, owner, name, credentialsName, webhookSecret, poolBalancerType -func (_m *Store) CreateRepository(ctx context.Context, owner string, name string, credentialsName string, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Repository, error) { - ret := _m.Called(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) +// CreateRepository provides a mock function with given fields: ctx, owner, name, credentials, webhookSecret, poolBalancerType +func (_m *Store) CreateRepository(ctx context.Context, owner string, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Repository, error) { + ret := _m.Called(ctx, owner, name, credentials, webhookSecret, poolBalancerType) if len(ret) == 0 { panic("no return value specified for CreateRepository") @@ -330,17 +386,17 @@ func (_m *Store) CreateRepository(ctx context.Context, owner string, name string var r0 params.Repository var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, params.PoolBalancerType) (params.Repository, error)); ok { - return rf(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Repository, error)); ok { + return rf(ctx, owner, name, credentials, webhookSecret, poolBalancerType) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string, params.PoolBalancerType) params.Repository); ok { - r0 = rf(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) params.Repository); ok { + r0 = rf(ctx, owner, name, credentials, webhookSecret, poolBalancerType) } else { r0 = ret.Get(0).(params.Repository) } - if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string, params.PoolBalancerType) error); ok { - r1 = rf(ctx, owner, name, credentialsName, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(1).(func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) error); ok { + r1 = rf(ctx, owner, name, credentials, webhookSecret, poolBalancerType) } else { r1 = ret.Error(1) } @@ -458,6 +514,42 @@ func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.ForgeEntity return r0 } +// DeleteGiteaCredentials provides a mock function with given fields: ctx, id +func (_m *Store) DeleteGiteaCredentials(ctx context.Context, id uint) error { + ret := _m.Called(ctx, id) + + if len(ret) == 0 { + panic("no return value specified for DeleteGiteaCredentials") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteGiteaEndpoint provides a mock function with given fields: _a0, name +func (_m *Store) DeleteGiteaEndpoint(_a0 context.Context, name string) error { + ret := _m.Called(_a0, name) + + if len(ret) == 0 { + panic("no return value specified for DeleteGiteaEndpoint") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(_a0, name) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // DeleteGithubCredentials provides a mock function with given fields: ctx, id func (_m *Store) DeleteGithubCredentials(ctx context.Context, id uint) error { ret := _m.Called(ctx, id) @@ -762,23 +854,135 @@ func (_m *Store) GetEntityPool(ctx context.Context, entity params.ForgeEntity, p return r0, r1 } +// GetForgeEntity provides a mock function with given fields: _a0, entityType, entityID +func (_m *Store) GetForgeEntity(_a0 context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) { + ret := _m.Called(_a0, entityType, entityID) + + if len(ret) == 0 { + panic("no return value specified for GetForgeEntity") + } + + var r0 params.ForgeEntity + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) (params.ForgeEntity, error)); ok { + return rf(_a0, entityType, entityID) + } + if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) params.ForgeEntity); ok { + r0 = rf(_a0, entityType, entityID) + } else { + r0 = ret.Get(0).(params.ForgeEntity) + } + + if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string) error); ok { + r1 = rf(_a0, entityType, entityID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetGiteaCredentials provides a mock function with given fields: ctx, id, detailed +func (_m *Store) GetGiteaCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { + ret := _m.Called(ctx, id, detailed) + + if len(ret) == 0 { + panic("no return value specified for GetGiteaCredentials") + } + + var r0 params.ForgeCredentials + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint, bool) (params.ForgeCredentials, error)); ok { + return rf(ctx, id, detailed) + } + if rf, ok := ret.Get(0).(func(context.Context, uint, bool) params.ForgeCredentials); ok { + r0 = rf(ctx, id, detailed) + } else { + r0 = ret.Get(0).(params.ForgeCredentials) + } + + if rf, ok := ret.Get(1).(func(context.Context, uint, bool) error); ok { + r1 = rf(ctx, id, detailed) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetGiteaCredentialsByName provides a mock function with given fields: ctx, name, detailed +func (_m *Store) GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { + ret := _m.Called(ctx, name, detailed) + + if len(ret) == 0 { + panic("no return value specified for GetGiteaCredentialsByName") + } + + var r0 params.ForgeCredentials + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, bool) (params.ForgeCredentials, error)); ok { + return rf(ctx, name, detailed) + } + if rf, ok := ret.Get(0).(func(context.Context, string, bool) params.ForgeCredentials); ok { + r0 = rf(ctx, name, detailed) + } else { + r0 = ret.Get(0).(params.ForgeCredentials) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { + r1 = rf(ctx, name, detailed) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetGiteaEndpoint provides a mock function with given fields: _a0, name +func (_m *Store) GetGiteaEndpoint(_a0 context.Context, name string) (params.ForgeEndpoint, error) { + ret := _m.Called(_a0, name) + + if len(ret) == 0 { + panic("no return value specified for GetGiteaEndpoint") + } + + var r0 params.ForgeEndpoint + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (params.ForgeEndpoint, error)); ok { + return rf(_a0, name) + } + if rf, ok := ret.Get(0).(func(context.Context, string) params.ForgeEndpoint); ok { + r0 = rf(_a0, name) + } else { + r0 = ret.Get(0).(params.ForgeEndpoint) + } + + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(_a0, name) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetGithubCredentials provides a mock function with given fields: ctx, id, detailed -func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.GithubCredentials, error) { +func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { ret := _m.Called(ctx, id, detailed) if len(ret) == 0 { panic("no return value specified for GetGithubCredentials") } - var r0 params.GithubCredentials + var r0 params.ForgeCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint, bool) (params.GithubCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, bool) (params.ForgeCredentials, error)); ok { return rf(ctx, id, detailed) } - if rf, ok := ret.Get(0).(func(context.Context, uint, bool) params.GithubCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, bool) params.ForgeCredentials); ok { r0 = rf(ctx, id, detailed) } else { - r0 = ret.Get(0).(params.GithubCredentials) + r0 = ret.Get(0).(params.ForgeCredentials) } if rf, ok := ret.Get(1).(func(context.Context, uint, bool) error); ok { @@ -791,22 +995,22 @@ func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed boo } // GetGithubCredentialsByName provides a mock function with given fields: ctx, name, detailed -func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.GithubCredentials, error) { +func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { ret := _m.Called(ctx, name, detailed) if len(ret) == 0 { panic("no return value specified for GetGithubCredentialsByName") } - var r0 params.GithubCredentials + var r0 params.ForgeCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, bool) (params.GithubCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, bool) (params.ForgeCredentials, error)); ok { return rf(ctx, name, detailed) } - if rf, ok := ret.Get(0).(func(context.Context, string, bool) params.GithubCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, bool) params.ForgeCredentials); ok { r0 = rf(ctx, name, detailed) } else { - r0 = ret.Get(0).(params.GithubCredentials) + r0 = ret.Get(0).(params.ForgeCredentials) } if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { @@ -846,34 +1050,6 @@ func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.For return r0, r1 } -// GetForgeEntity provides a mock function with given fields: _a0, entityType, entityID -func (_m *Store) GetForgeEntity(_a0 context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) { - ret := _m.Called(_a0, entityType, entityID) - - if len(ret) == 0 { - panic("no return value specified for GetForgeEntity") - } - - var r0 params.ForgeEntity - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) (params.ForgeEntity, error)); ok { - return rf(_a0, entityType, entityID) - } - if rf, ok := ret.Get(0).(func(context.Context, params.ForgeEntityType, string) params.ForgeEntity); ok { - r0 = rf(_a0, entityType, entityID) - } else { - r0 = ret.Get(0).(params.ForgeEntity) - } - - if rf, ok := ret.Get(1).(func(context.Context, params.ForgeEntityType, string) error); ok { - r1 = rf(_a0, entityType, entityID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // GetInstanceByName provides a mock function with given fields: ctx, instanceName func (_m *Store) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { ret := _m.Called(ctx, instanceName) @@ -1498,24 +1674,84 @@ func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.ForgeEnt return r0, r1 } +// ListGiteaCredentials provides a mock function with given fields: ctx +func (_m *Store) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for ListGiteaCredentials") + } + + var r0 []params.ForgeCredentials + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeCredentials, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeCredentials); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]params.ForgeCredentials) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListGiteaEndpoints provides a mock function with given fields: _a0 +func (_m *Store) ListGiteaEndpoints(_a0 context.Context) ([]params.ForgeEndpoint, error) { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for ListGiteaEndpoints") + } + + var r0 []params.ForgeEndpoint + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeEndpoint, error)); ok { + return rf(_a0) + } + if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeEndpoint); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]params.ForgeEndpoint) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // ListGithubCredentials provides a mock function with given fields: ctx -func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.GithubCredentials, error) { +func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { ret := _m.Called(ctx) if len(ret) == 0 { panic("no return value specified for ListGithubCredentials") } - var r0 []params.GithubCredentials + var r0 []params.ForgeCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.GithubCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context) ([]params.ForgeCredentials, error)); ok { return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context) []params.GithubCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context) []params.ForgeCredentials); ok { r0 = rf(ctx) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]params.GithubCredentials) + r0 = ret.Get(0).([]params.ForgeCredentials) } } @@ -1920,23 +2156,79 @@ func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.ForgeEn return r0, r1 } +// UpdateGiteaCredentials provides a mock function with given fields: ctx, id, param +func (_m *Store) UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error) { + ret := _m.Called(ctx, id, param) + + if len(ret) == 0 { + panic("no return value specified for UpdateGiteaCredentials") + } + + var r0 params.ForgeCredentials + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error)); ok { + return rf(ctx, id, param) + } + if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGiteaCredentialsParams) params.ForgeCredentials); ok { + r0 = rf(ctx, id, param) + } else { + r0 = ret.Get(0).(params.ForgeCredentials) + } + + if rf, ok := ret.Get(1).(func(context.Context, uint, params.UpdateGiteaCredentialsParams) error); ok { + r1 = rf(ctx, id, param) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateGiteaEndpoint provides a mock function with given fields: _a0, name, param +func (_m *Store) UpdateGiteaEndpoint(_a0 context.Context, name string, param params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error) { + ret := _m.Called(_a0, name, param) + + if len(ret) == 0 { + panic("no return value specified for UpdateGiteaEndpoint") + } + + var r0 params.ForgeEndpoint + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error)); ok { + return rf(_a0, name, param) + } + if rf, ok := ret.Get(0).(func(context.Context, string, params.UpdateGiteaEndpointParams) params.ForgeEndpoint); ok { + r0 = rf(_a0, name, param) + } else { + r0 = ret.Get(0).(params.ForgeEndpoint) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, params.UpdateGiteaEndpointParams) error); ok { + r1 = rf(_a0, name, param) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // UpdateGithubCredentials provides a mock function with given fields: ctx, id, param -func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.GithubCredentials, error) { +func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error) { ret := _m.Called(ctx, id, param) if len(ret) == 0 { panic("no return value specified for UpdateGithubCredentials") } - var r0 params.GithubCredentials + var r0 params.ForgeCredentials var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) (params.GithubCredentials, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error)); ok { return rf(ctx, id, param) } - if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) params.GithubCredentials); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint, params.UpdateGithubCredentialsParams) params.ForgeCredentials); ok { r0 = rf(ctx, id, param) } else { - r0 = ret.Get(0).(params.GithubCredentials) + r0 = ret.Get(0).(params.ForgeCredentials) } if rf, ok := ret.Get(1).(func(context.Context, uint, params.UpdateGithubCredentialsParams) error); ok { diff --git a/database/common/store.go b/database/common/store.go index e5458eaf..1f5b013b 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -29,16 +29,16 @@ type GithubEndpointStore interface { } type GithubCredentialsStore interface { - CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.GithubCredentials, error) - GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.GithubCredentials, error) - GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.GithubCredentials, error) - ListGithubCredentials(ctx context.Context) ([]params.GithubCredentials, error) - UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.GithubCredentials, error) + CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.ForgeCredentials, error) + GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) + GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) + ListGithubCredentials(ctx context.Context) ([]params.ForgeCredentials, error) + UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error) DeleteGithubCredentials(ctx context.Context, id uint) error } type RepoStore interface { - CreateRepository(ctx context.Context, owner, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Repository, error) + CreateRepository(ctx context.Context, owner, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) GetRepository(ctx context.Context, owner, name, endpointName string) (params.Repository, error) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) ListRepositories(ctx context.Context) ([]params.Repository, error) @@ -152,6 +152,23 @@ type ScaleSetInstanceStore interface { CreateScaleSetInstance(_ context.Context, scaleSetID uint, param params.CreateInstanceParams) (instance params.Instance, err error) } +type GiteaEndpointStore interface { + CreateGiteaEndpoint(_ context.Context, param params.CreateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) + ListGiteaEndpoints(_ context.Context) ([]params.ForgeEndpoint, error) + DeleteGiteaEndpoint(_ context.Context, name string) (err error) + GetGiteaEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) + UpdateGiteaEndpoint(_ context.Context, name string, param params.UpdateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) +} + +type GiteaCredentialsStore interface { + CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) + GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) + GetGiteaCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) + ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) + UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) + DeleteGiteaCredentials(ctx context.Context, id uint) (err error) +} + //go:generate mockery --name=Store type Store interface { RepoStore @@ -167,6 +184,8 @@ type Store interface { EntityPoolStore ScaleSetsStore ScaleSetInstanceStore + GiteaEndpointStore + GiteaCredentialsStore ControllerInfo() (params.ControllerInfo, error) InitController() (params.ControllerInfo, error) diff --git a/database/common/watcher.go b/database/common/watcher.go index 85df1151..4dc18437 100644 --- a/database/common/watcher.go +++ b/database/common/watcher.go @@ -18,6 +18,7 @@ const ( JobEntityType DatabaseEntityType = "job" ControllerEntityType DatabaseEntityType = "controller" GithubCredentialsEntityType DatabaseEntityType = "github_credentials" // #nosec G101 + GiteaCredentialsEntityType DatabaseEntityType = "gitea_credentials" // #nosec G101 GithubEndpointEntityType DatabaseEntityType = "github_endpoint" ScaleSetEntityType DatabaseEntityType = "scaleset" ) diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index 414a7aaf..26406ac5 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -45,7 +45,6 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name, credentialsNam newEnterprise := Enterprise{ Name: name, WebhookSecret: secret, - CredentialsName: credentialsName, PoolBalancerType: poolBalancerType, } err = s.conn.Transaction(func(tx *gorm.DB) error { @@ -57,7 +56,6 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name, credentialsNam return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") } newEnterprise.CredentialsID = &creds.ID - newEnterprise.CredentialsName = creds.Name newEnterprise.EndpointName = creds.EndpointName q := tx.Create(&newEnterprise) diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 3e8f6493..4971f78f 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -53,8 +53,8 @@ type EnterpriseTestSuite struct { adminCtx context.Context adminUserID string - testCreds params.GithubCredentials - secondaryTestCreds params.GithubCredentials + testCreds params.ForgeCredentials + secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint } diff --git a/database/sql/gitea.go b/database/sql/gitea.go new file mode 100644 index 00000000..5ce46663 --- /dev/null +++ b/database/sql/gitea.go @@ -0,0 +1,469 @@ +package sql + +import ( + "context" + "log/slog" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/database/common" + "github.com/cloudbase/garm/params" + "github.com/pkg/errors" + "gorm.io/gorm" +) + +func (s *sqlDatabase) CreateGiteaEndpoint(_ context.Context, param params.CreateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { + defer func() { + if err == nil { + s.sendNotify(common.GithubEndpointEntityType, common.CreateOperation, ghEndpoint) + } + }() + var endpoint GithubEndpoint + err = s.conn.Transaction(func(tx *gorm.DB) error { + if err := tx.Where("name = ?", param.Name).First(&endpoint).Error; err == nil { + return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github endpoint already exists") + } + endpoint = GithubEndpoint{ + Name: param.Name, + Description: param.Description, + APIBaseURL: param.APIBaseURL, + BaseURL: param.BaseURL, + CACertBundle: param.CACertBundle, + EndpointType: params.GiteaEndpointType, + } + + if err := tx.Create(&endpoint).Error; err != nil { + return errors.Wrap(err, "creating github endpoint") + } + return nil + }) + if err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "creating github endpoint") + } + ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) + if err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "converting github endpoint") + } + return ghEndpoint, nil +} + +func (s *sqlDatabase) ListGiteaEndpoints(_ context.Context) ([]params.ForgeEndpoint, error) { + var endpoints []GithubEndpoint + err := s.conn.Where("endpoint_type = ?", params.GiteaEndpointType).Find(&endpoints).Error + if err != nil { + return nil, errors.Wrap(err, "fetching github endpoints") + } + + var ret []params.ForgeEndpoint + for _, ep := range endpoints { + commonEp, err := s.sqlToCommonGithubEndpoint(ep) + if err != nil { + return nil, errors.Wrap(err, "converting github endpoint") + } + ret = append(ret, commonEp) + } + return ret, nil +} + +func (s *sqlDatabase) UpdateGiteaEndpoint(_ context.Context, name string, param params.UpdateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { + if name == defaultGithubEndpoint { + return params.ForgeEndpoint{}, runnerErrors.NewBadRequestError("cannot update default endpoint %s", defaultGithubEndpoint) + } + + defer func() { + if err == nil { + s.sendNotify(common.GithubEndpointEntityType, common.UpdateOperation, ghEndpoint) + } + }() + var endpoint GithubEndpoint + err = s.conn.Transaction(func(tx *gorm.DB) error { + if err := tx.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(runnerErrors.ErrNotFound, "gitea endpoint not found") + } + return errors.Wrap(err, "fetching gitea endpoint") + } + if param.APIBaseURL != nil { + endpoint.APIBaseURL = *param.APIBaseURL + } + + if param.BaseURL != nil { + endpoint.BaseURL = *param.BaseURL + } + + if param.CACertBundle != nil { + endpoint.CACertBundle = param.CACertBundle + } + + if param.Description != nil { + endpoint.Description = *param.Description + } + + if err := tx.Save(&endpoint).Error; err != nil { + return errors.Wrap(err, "updating gitea endpoint") + } + + return nil + }) + if err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "updating gitea endpoint") + } + ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) + if err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "converting gitea endpoint") + } + return ghEndpoint, nil +} + +func (s *sqlDatabase) GetGiteaEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) { + var endpoint GithubEndpoint + + err := s.conn.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return params.ForgeEndpoint{}, errors.Wrap(runnerErrors.ErrNotFound, "gitea endpoint not found") + } + return params.ForgeEndpoint{}, errors.Wrap(err, "fetching gitea endpoint") + } + + return s.sqlToCommonGithubEndpoint(endpoint) +} + +func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err error) { + if name == defaultGithubEndpoint { + return runnerErrors.NewBadRequestError("cannot delete default endpoint %s", defaultGithubEndpoint) + } + + defer func() { + if err == nil { + s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.ForgeEndpoint{Name: name}) + } + }() + err = s.conn.Transaction(func(tx *gorm.DB) error { + var endpoint GithubEndpoint + if err := tx.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil + } + return errors.Wrap(err, "fetching gitea endpoint") + } + + var credsCount int64 + if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "fetching gitea credentials") + } + } + + var repoCnt int64 + if err := tx.Model(&Repository{}).Where("endpoint_name = ?", endpoint.Name).Count(&repoCnt).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "fetching gitea repositories") + } + } + + var orgCnt int64 + if err := tx.Model(&Organization{}).Where("endpoint_name = ?", endpoint.Name).Count(&orgCnt).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "fetching gitea organizations") + } + } + + var entCnt int64 + if err := tx.Model(&Enterprise{}).Where("endpoint_name = ?", endpoint.Name).Count(&entCnt).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "fetching gitea enterprises") + } + } + + if credsCount > 0 || repoCnt > 0 || orgCnt > 0 || entCnt > 0 { + return errors.New("cannot delete endpoint with associated entities") + } + + if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { + return errors.Wrap(err, "deleting gitea endpoint") + } + return nil + }) + if err != nil { + return errors.Wrap(err, "deleting gitea endpoint") + } + return nil +} + +func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) { + userID, err := getUIDFromContext(ctx) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") + } + if param.Endpoint == "" { + return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrBadRequest, "endpoint name is required") + } + + defer func() { + if err == nil { + s.sendNotify(common.GiteaCredentialsEntityType, common.CreateOperation, gtCreds) + } + }() + var creds GiteaCredentials + err = s.conn.Transaction(func(tx *gorm.DB) error { + var endpoint GithubEndpoint + if err := tx.Where("name = ? and endpoint_type = ?", param.Endpoint, params.GiteaEndpointType).First(&endpoint).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") + } + return errors.Wrap(err, "fetching github endpoint") + } + + if err := tx.Where("name = ? and user_id = ?", param.Name, userID).First(&creds).Error; err == nil { + return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github credentials already exists") + } + + var data []byte + var err error + switch param.AuthType { + case params.ForgeAuthTypePAT: + data, err = s.marshalAndSeal(param.PAT) + case params.ForgeAuthTypeApp: + data, err = s.marshalAndSeal(param.App) + default: + return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") + } + if err != nil { + return errors.Wrap(err, "marshaling and sealing credentials") + } + + creds = GiteaCredentials{ + Name: param.Name, + Description: param.Description, + EndpointName: &endpoint.Name, + AuthType: param.AuthType, + Payload: data, + UserID: &userID, + } + + if err := tx.Create(&creds).Error; err != nil { + return errors.Wrap(err, "creating github credentials") + } + // Skip making an extra query. + creds.Endpoint = endpoint + + return nil + }) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") + } + gtCreds, err = s.sqlGiteaToCommonForgeCredentials(creds) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting github credentials") + } + return gtCreds, nil +} + +func (s *sqlDatabase) getGiteaCredentialsByName(ctx context.Context, tx *gorm.DB, name string, detailed bool) (GiteaCredentials, error) { + var creds GiteaCredentials + q := tx.Preload("Endpoint") + + if detailed { + q = q. + Preload("Repositories"). + Preload("Organizations") + } + + userID, err := getUIDFromContext(ctx) + if err != nil { + return GiteaCredentials{}, errors.Wrap(err, "fetching gitea credentials") + } + q = q.Where("user_id = ?", userID) + + err = q.Where("name = ?", name).First(&creds).Error + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return GiteaCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "gitea credentials not found") + } + return GiteaCredentials{}, errors.Wrap(err, "fetching gitea credentials") + } + + return creds, nil +} + +func (s *sqlDatabase) GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { + creds, err := s.getGiteaCredentialsByName(ctx, s.conn, name, detailed) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + } + + return s.sqlGiteaToCommonForgeCredentials(creds) +} + +func (s *sqlDatabase) GetGiteaCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { + var creds GiteaCredentials + q := s.conn.Preload("Endpoint") + + if detailed { + q = q. + Preload("Repositories"). + Preload("Organizations") + } + + if !auth.IsAdmin(ctx) { + userID, err := getUIDFromContext(ctx) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + } + q = q.Where("user_id = ?", userID) + } + + err := q.Where("id = ?", id).First(&creds).Error + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "gitea credentials not found") + } + return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + } + + return s.sqlGiteaToCommonForgeCredentials(creds) +} + +func (s *sqlDatabase) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { + q := s.conn.Preload("Endpoint") + if !auth.IsAdmin(ctx) { + userID, err := getUIDFromContext(ctx) + if err != nil { + return nil, errors.Wrap(err, "fetching gitea credentials") + } + q = q.Where("user_id = ?", userID) + } + + var creds []GiteaCredentials + err := q.Preload("Endpoint").Find(&creds).Error + if err != nil { + return nil, errors.Wrap(err, "fetching gitea credentials") + } + + var ret []params.ForgeCredentials + for _, c := range creds { + commonCreds, err := s.sqlGiteaToCommonForgeCredentials(c) + if err != nil { + return nil, errors.Wrap(err, "converting gitea credentials") + } + ret = append(ret, commonCreds) + } + return ret, nil +} + +func (s *sqlDatabase) UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) { + defer func() { + if err == nil { + s.sendNotify(common.GiteaCredentialsEntityType, common.UpdateOperation, gtCreds) + } + }() + var creds GiteaCredentials + err = s.conn.Transaction(func(tx *gorm.DB) error { + q := tx.Preload("Endpoint") + if !auth.IsAdmin(ctx) { + userID, err := getUIDFromContext(ctx) + if err != nil { + return errors.Wrap(err, "updating gitea credentials") + } + q = q.Where("user_id = ?", userID) + } + + if err := q.Where("id = ?", id).First(&creds).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(runnerErrors.ErrNotFound, "gitea credentials not found") + } + return errors.Wrap(err, "fetching gitea credentials") + } + + if param.Name != nil { + creds.Name = *param.Name + } + if param.Description != nil { + creds.Description = *param.Description + } + + var data []byte + var err error + switch creds.AuthType { + case params.ForgeAuthTypePAT: + if param.PAT != nil { + data, err = s.marshalAndSeal(param.PAT) + } + default: + return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") + } + + if err != nil { + return errors.Wrap(err, "marshaling and sealing credentials") + } + if len(data) > 0 { + creds.Payload = data + } + + if err := tx.Save(&creds).Error; err != nil { + return errors.Wrap(err, "updating gitea credentials") + } + return nil + }) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "updating gitea credentials") + } + + gtCreds, err = s.sqlGiteaToCommonForgeCredentials(creds) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting gitea credentials") + } + return gtCreds, nil +} + +func (s *sqlDatabase) DeleteGiteaCredentials(ctx context.Context, id uint) (err error) { + var creds GiteaCredentials + defer func() { + if err == nil { + forgeCreds, innerErr := s.sqlGiteaToCommonForgeCredentials(creds) + if innerErr != nil { + slog.ErrorContext(ctx, "converting gitea credentials", "error", innerErr) + } + if creds.ID == 0 || creds.Name == "" { + return + } + s.sendNotify(common.GiteaCredentialsEntityType, common.DeleteOperation, forgeCreds) + } + }() + err = s.conn.Transaction(func(tx *gorm.DB) error { + q := tx.Where("id = ?", id). + Preload("Repositories"). + Preload("Organizations") + if !auth.IsAdmin(ctx) { + userID, err := getUIDFromContext(ctx) + if err != nil { + return errors.Wrap(err, "deleting gitea credentials") + } + q = q.Where("user_id = ?", userID) + } + + err := q.First(&creds).Error + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil + } + return errors.Wrap(err, "fetching gitea credentials") + } + + if len(creds.Repositories) > 0 { + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with repositories") + } + if len(creds.Organizations) > 0 { + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with organizations") + } + if err := tx.Unscoped().Delete(&creds).Error; err != nil { + return errors.Wrap(err, "deleting gitea credentials") + } + return nil + }) + if err != nil { + return errors.Wrap(err, "deleting gitea credentials") + } + return nil +} diff --git a/database/sql/github.go b/database/sql/github.go index 861c824c..d2c05244 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -17,12 +17,10 @@ package sql import ( "context" - "github.com/google/uuid" "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" - "github.com/cloudbase/garm-provider-common/util" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" @@ -32,89 +30,6 @@ const ( defaultGithubEndpoint string = "github.com" ) -func (s *sqlDatabase) sqlToCommonGithubCredentials(creds GithubCredentials) (params.GithubCredentials, error) { - if len(creds.Payload) == 0 { - return params.GithubCredentials{}, errors.New("empty credentials payload") - } - data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) - if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "unsealing credentials") - } - - ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) - if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "converting github endpoint") - } - - commonCreds := params.GithubCredentials{ - ID: creds.ID, - Name: creds.Name, - Description: creds.Description, - APIBaseURL: creds.Endpoint.APIBaseURL, - BaseURL: creds.Endpoint.BaseURL, - UploadBaseURL: creds.Endpoint.UploadBaseURL, - CABundle: creds.Endpoint.CACertBundle, - AuthType: creds.AuthType, - CreatedAt: creds.CreatedAt, - UpdatedAt: creds.UpdatedAt, - Endpoint: ep, - CredentialsPayload: data, - } - - for _, repo := range creds.Repositories { - commonRepo, err := s.sqlToCommonRepository(repo, false) - if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "converting github repository") - } - commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) - } - - for _, org := range creds.Organizations { - commonOrg, err := s.sqlToCommonOrganization(org, false) - if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "converting github organization") - } - commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) - } - - for _, ent := range creds.Enterprises { - commonEnt, err := s.sqlToCommonEnterprise(ent, false) - if err != nil { - return params.GithubCredentials{}, errors.Wrapf(err, "converting github enterprise: %s", ent.Name) - } - commonCreds.Enterprises = append(commonCreds.Enterprises, commonEnt) - } - - return commonCreds, nil -} - -func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.ForgeEndpoint, error) { - return params.ForgeEndpoint{ - Name: ep.Name, - Description: ep.Description, - APIBaseURL: ep.APIBaseURL, - BaseURL: ep.BaseURL, - UploadBaseURL: ep.UploadBaseURL, - CACertBundle: ep.CACertBundle, - CreatedAt: ep.CreatedAt, - EndpointType: ep.EndpointType, - UpdatedAt: ep.UpdatedAt, - }, nil -} - -func getUIDFromContext(ctx context.Context) (uuid.UUID, error) { - userID := auth.UserID(ctx) - if userID == "" { - return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "getting UID from context") - } - - asUUID, err := uuid.Parse(userID) - if err != nil { - return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "parsing UID from context") - } - return asUUID, nil -} - func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.CreateGithubEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { defer func() { if err == nil { @@ -133,6 +48,7 @@ func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.Creat BaseURL: param.BaseURL, UploadBaseURL: param.UploadBaseURL, CACertBundle: param.CACertBundle, + EndpointType: params.GithubEndpointType, } if err := tx.Create(&endpoint).Error; err != nil { @@ -152,7 +68,7 @@ func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.Creat func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.ForgeEndpoint, error) { var endpoints []GithubEndpoint - err := s.conn.Find(&endpoints).Error + err := s.conn.Where("endpoint_type = ?", params.GithubEndpointType).Find(&endpoints).Error if err != nil { return nil, errors.Wrap(err, "fetching github endpoints") } @@ -180,7 +96,7 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param }() var endpoint GithubEndpoint err = s.conn.Transaction(func(tx *gorm.DB) error { - if err := tx.Where("name = ?", name).First(&endpoint).Error; err != nil { + if err := tx.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") } @@ -225,7 +141,7 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) { var endpoint GithubEndpoint - err := s.conn.Where("name = ?", name).First(&endpoint).Error + err := s.conn.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { return params.ForgeEndpoint{}, errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") @@ -248,7 +164,7 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err }() err = s.conn.Transaction(func(tx *gorm.DB) error { var endpoint GithubEndpoint - if err := tx.Where("name = ?", name).First(&endpoint).Error; err != nil { + if err := tx.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { return nil } @@ -298,13 +214,13 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err return nil } -func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (ghCreds params.GithubCredentials, err error) { +func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (ghCreds params.ForgeCredentials, err error) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "creating github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") } if param.Endpoint == "" { - return params.GithubCredentials{}, errors.Wrap(runnerErrors.ErrBadRequest, "endpoint name is required") + return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrBadRequest, "endpoint name is required") } defer func() { @@ -315,7 +231,7 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. var creds GithubCredentials err = s.conn.Transaction(func(tx *gorm.DB) error { var endpoint GithubEndpoint - if err := tx.Where("name = ?", param.Endpoint).First(&endpoint).Error; err != nil { + if err := tx.Where("name = ? and endpoint_type = ?", param.Endpoint, params.GithubEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") } @@ -358,11 +274,11 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. return nil }) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "creating github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") } - ghCreds, err = s.sqlToCommonGithubCredentials(creds) + ghCreds, err = s.sqlToCommonForgeCredentials(creds) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "converting github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "converting github credentials") } return ghCreds, nil } @@ -395,16 +311,16 @@ func (s *sqlDatabase) getGithubCredentialsByName(ctx context.Context, tx *gorm.D return creds, nil } -func (s *sqlDatabase) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.GithubCredentials, error) { +func (s *sqlDatabase) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { creds, err := s.getGithubCredentialsByName(ctx, s.conn, name, detailed) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "fetching github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") } - return s.sqlToCommonGithubCredentials(creds) + return s.sqlToCommonForgeCredentials(creds) } -func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.GithubCredentials, error) { +func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { var creds GithubCredentials q := s.conn.Preload("Endpoint") @@ -418,7 +334,7 @@ func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detaile if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "fetching github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") } q = q.Where("user_id = ?", userID) } @@ -426,15 +342,15 @@ func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detaile err := q.Where("id = ?", id).First(&creds).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.GithubCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") + return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") } - return params.GithubCredentials{}, errors.Wrap(err, "fetching github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") } - return s.sqlToCommonGithubCredentials(creds) + return s.sqlToCommonForgeCredentials(creds) } -func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.GithubCredentials, error) { +func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { q := s.conn.Preload("Endpoint") if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) @@ -450,9 +366,9 @@ func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.Githu return nil, errors.Wrap(err, "fetching github credentials") } - var ret []params.GithubCredentials + var ret []params.ForgeCredentials for _, c := range creds { - commonCreds, err := s.sqlToCommonGithubCredentials(c) + commonCreds, err := s.sqlToCommonForgeCredentials(c) if err != nil { return nil, errors.Wrap(err, "converting github credentials") } @@ -461,7 +377,7 @@ func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.Githu return ret, nil } -func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (ghCreds params.GithubCredentials, err error) { +func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (ghCreds params.ForgeCredentials, err error) { defer func() { if err == nil { s.sendNotify(common.GithubCredentialsEntityType, common.UpdateOperation, ghCreds) @@ -530,12 +446,12 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para return nil }) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "updating github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "updating github credentials") } - ghCreds, err = s.sqlToCommonGithubCredentials(creds) + ghCreds, err = s.sqlToCommonForgeCredentials(creds) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "converting github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "converting github credentials") } return ghCreds, nil } @@ -544,7 +460,7 @@ func (s *sqlDatabase) DeleteGithubCredentials(ctx context.Context, id uint) (err var name string defer func() { if err == nil { - s.sendNotify(common.GithubCredentialsEntityType, common.DeleteOperation, params.GithubCredentials{ID: id, Name: name}) + s.sendNotify(common.GithubCredentialsEntityType, common.DeleteOperation, params.ForgeCredentials{ID: id, Name: name}) } }() err = s.conn.Transaction(func(tx *gorm.DB) error { diff --git a/database/sql/github_test.go b/database/sql/github_test.go index e46d963d..49de9aa3 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -533,7 +533,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() s.Require().NoError(err) s.Require().NotNil(creds) - repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds.Name, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotNil(repo) diff --git a/database/sql/models.go b/database/sql/models.go index 2accccc4..0ff2d8f4 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -1,17 +1,3 @@ -// Copyright 2022 Cloudbase Solutions SRL -// -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - package sql import ( @@ -46,6 +32,22 @@ func (b *Base) BeforeCreate(_ *gorm.DB) error { return nil } +type ControllerInfo struct { + Base + + ControllerID uuid.UUID + + CallbackURL string + MetadataURL string + WebhookBaseURL string + // MinimumJobAgeBackoff is the minimum time that a job must be in the queue + // before GARM will attempt to allocate a runner to service it. This backoff + // is useful if you have idle runners in various pools that could potentially + // pick up the job. GARM would allow this amount of time for runners to react + // before spinning up a new one and potentially having to scale down later. + MinimumJobAgeBackoff uint +} + type Tag struct { Base @@ -152,11 +154,12 @@ type RepositoryEvent struct { type Repository struct { Base - CredentialsName string - CredentialsID *uint `gorm:"index"` Credentials GithubCredentials `gorm:"foreignKey:CredentialsID;constraint:OnDelete:SET NULL"` + GiteaCredentialsID *uint `gorm:"index"` + GiteaCredentials GiteaCredentials `gorm:"foreignKey:GiteaCredentialsID;constraint:OnDelete:SET NULL"` + Owner string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` Name string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` WebhookSecret []byte @@ -184,11 +187,12 @@ type OrganizationEvent struct { type Organization struct { Base - CredentialsName string - CredentialsID *uint `gorm:"index"` Credentials GithubCredentials `gorm:"foreignKey:CredentialsID;constraint:OnDelete:SET NULL"` + GiteaCredentialsID *uint `gorm:"index"` + GiteaCredentials GiteaCredentials `gorm:"foreignKey:GiteaCredentialsID;constraint:OnDelete:SET NULL"` + Name string `gorm:"index:idx_org_name_nocase,collate:nocase"` WebhookSecret []byte Pools []Pool `gorm:"foreignKey:OrgID"` @@ -216,8 +220,6 @@ type EnterpriseEvent struct { type Enterprise struct { Base - CredentialsName string - CredentialsID *uint `gorm:"index"` Credentials GithubCredentials `gorm:"foreignKey:CredentialsID;constraint:OnDelete:SET NULL"` @@ -300,22 +302,6 @@ type User struct { Enabled bool } -type ControllerInfo struct { - Base - - ControllerID uuid.UUID - - CallbackURL string - MetadataURL string - WebhookBaseURL string - // MinimumJobAgeBackoff is the minimum time that a job must be in the queue - // before GARM will attempt to allocate a runner to service it. This backoff - // is useful if you have idle runners in various pools that could potentially - // pick up the job. GARM would allow this amount of time for runners to react - // before spinning up a new one and potentially having to scale down later. - MinimumJobAgeBackoff uint -} - type WorkflowJob struct { // ID is the ID of the job. ID int64 `gorm:"index"` @@ -381,7 +367,7 @@ type GithubEndpoint struct { UpdatedAt time.Time DeletedAt gorm.DeletedAt `gorm:"index"` - EndpointType params.EndpointType + EndpointType params.EndpointType `gorm:"index:idx_endpoint_type"` Description string `gorm:"type:text"` APIBaseURL string `gorm:"type:text collate nocase"` @@ -408,3 +394,21 @@ type GithubCredentials struct { Organizations []Organization `gorm:"foreignKey:CredentialsID"` Enterprises []Enterprise `gorm:"foreignKey:CredentialsID"` } + +type GiteaCredentials struct { + gorm.Model + + Name string `gorm:"index:idx_gitea_credentials,unique;type:varchar(64) collate nocase"` + UserID *uuid.UUID `gorm:"index:idx_gitea_credentials,unique"` + User User `gorm:"foreignKey:UserID"` + + Description string `gorm:"type:text"` + AuthType params.ForgeAuthType `gorm:"index"` + Payload []byte `gorm:"type:longblob"` + + Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName"` + EndpointName *string `gorm:"index"` + + Repositories []Repository `gorm:"foreignKey:GiteaCredentialsID"` + Organizations []Organization `gorm:"foreignKey:GiteaCredentialsID"` +} diff --git a/database/sql/organizations.go b/database/sql/organizations.go index 07ce32d8..bf270445 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -46,7 +46,6 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name, credentialsN newOrg := Organization{ Name: name, WebhookSecret: secret, - CredentialsName: credentialsName, PoolBalancerType: poolBalancerType, } @@ -59,7 +58,6 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name, credentialsN return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") } newOrg.CredentialsID = &creds.ID - newOrg.CredentialsName = creds.Name newOrg.EndpointName = creds.EndpointName q := tx.Create(&newOrg) @@ -166,7 +164,6 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para } if param.CredentialsName != "" { - org.CredentialsName = param.CredentialsName creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { return errors.Wrap(err, "fetching credentials") diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index 030a3abe..a7ad23b4 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -53,8 +53,8 @@ type OrgTestSuite struct { adminCtx context.Context adminUserID string - testCreds params.GithubCredentials - secondaryTestCreds params.GithubCredentials + testCreds params.ForgeCredentials + secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint } diff --git a/database/sql/pools_test.go b/database/sql/pools_test.go index 758dcacd..dfb82510 100644 --- a/database/sql/pools_test.go +++ b/database/sql/pools_test.go @@ -211,7 +211,7 @@ func (s *PoolsTestSuite) TestEntityPoolOperations() { ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.Store, s.T()) creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.Store, s.T(), ep) s.T().Cleanup(func() { s.Store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.Store.DeleteRepository(s.ctx, repo.ID) }) @@ -291,7 +291,7 @@ func (s *PoolsTestSuite) TestListEntityInstances() { ep := garmTesting.CreateDefaultGithubEndpoint(s.ctx, s.Store, s.T()) creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.Store, s.T(), ep) s.T().Cleanup(func() { s.Store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.Store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.Store.DeleteRepository(s.ctx, repo.ID) }) diff --git a/database/sql/repositories.go b/database/sql/repositories.go index 6b744163..d7419070 100644 --- a/database/sql/repositories.go +++ b/database/sql/repositories.go @@ -29,7 +29,7 @@ import ( "github.com/cloudbase/garm/params" ) -func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) { +func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) { defer func() { if err == nil { s.sendNotify(common.RepositoryEntityType, common.CreateOperation, param) @@ -51,32 +51,32 @@ func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name, credent PoolBalancerType: poolBalancerType, } err = s.conn.Transaction(func(tx *gorm.DB) error { - creds, err := s.getGithubCredentialsByName(ctx, tx, credentialsName, false) - if err != nil { - return errors.Wrap(err, "creating repository") + switch credentials.ForgeType { + case params.GithubEndpointType: + newRepo.CredentialsID = &credentials.ID + case params.GiteaEndpointType: + newRepo.GiteaCredentialsID = &credentials.ID + default: + return errors.Wrap(runnerErrors.ErrBadRequest, "unsupported credentials type") } - if creds.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") - } - newRepo.CredentialsID = &creds.ID - newRepo.CredentialsName = creds.Name - newRepo.EndpointName = creds.EndpointName + newRepo.EndpointName = &credentials.Endpoint.Name q := tx.Create(&newRepo) if q.Error != nil { return errors.Wrap(q.Error, "creating repository") } - - newRepo.Credentials = creds - newRepo.Endpoint = creds.Endpoint - return nil }) if err != nil { return params.Repository{}, errors.Wrap(err, "creating repository") } - param, err = s.sqlToCommonRepository(newRepo, true) + repo, err := s.getRepoByID(ctx, s.conn, newRepo.ID.String(), "Endpoint", "Credentials", "GiteaCredentials", "Credentials.Endpoint", "GiteaCredentials.Endpoint") + if err != nil { + return params.Repository{}, errors.Wrap(err, "creating repository") + } + + param, err = s.sqlToCommonRepository(repo, true) if err != nil { return params.Repository{}, errors.Wrap(err, "creating repository") } @@ -102,7 +102,9 @@ func (s *sqlDatabase) ListRepositories(_ context.Context) ([]params.Repository, var repos []Repository q := s.conn. Preload("Credentials"). + Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). + Preload("GiteaCredentials.Endpoint"). Preload("Endpoint"). Find(&repos) if q.Error != nil { @@ -122,7 +124,7 @@ func (s *sqlDatabase) ListRepositories(_ context.Context) ([]params.Repository, } func (s *sqlDatabase) DeleteRepository(ctx context.Context, repoID string) (err error) { - repo, err := s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint") + repo, err := s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { return errors.Wrap(err, "fetching repo") } @@ -165,7 +167,6 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param } if param.CredentialsName != "" { - repo.CredentialsName = param.CredentialsName creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { return errors.Wrap(err, "fetching credentials") @@ -203,7 +204,7 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param return params.Repository{}, errors.Wrap(err, "saving repo") } - repo, err = s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint") + repo, err = s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { return params.Repository{}, errors.Wrap(err, "updating enterprise") } @@ -216,7 +217,7 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param } func (s *sqlDatabase) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) { - repo, err := s.getRepoByID(ctx, s.conn, repoID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") + repo, err := s.getRepoByID(ctx, s.conn, repoID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { return params.Repository{}, errors.Wrap(err, "fetching repo") } @@ -234,6 +235,8 @@ func (s *sqlDatabase) getRepo(_ context.Context, owner, name, endpointName strin q := s.conn.Where("name = ? COLLATE NOCASE and owner = ? COLLATE NOCASE and endpoint_name = ? COLLATE NOCASE", name, owner, endpointName). Preload("Credentials"). Preload("Credentials.Endpoint"). + Preload("GiteaCredentials"). + Preload("GiteaCredentials.Endpoint"). Preload("Endpoint"). First(&repo) diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index f43b9357..73104a2f 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -58,8 +58,8 @@ type RepoTestSuite struct { adminCtx context.Context adminUserID string - testCreds params.GithubCredentials - secondaryTestCreds params.GithubCredentials + testCreds params.ForgeCredentials + secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint } @@ -119,7 +119,7 @@ func (s *RepoTestSuite) SetupTest() { adminCtx, fmt.Sprintf("test-owner-%d", i), fmt.Sprintf("test-repo-%d", i), - s.testCreds.Name, + s.testCreds, fmt.Sprintf("test-webhook-secret-%d", i), params.PoolBalancerTypeRoundRobin, ) @@ -204,7 +204,7 @@ func (s *RepoTestSuite) TestCreateRepository() { s.adminCtx, s.Fixtures.CreateRepoParams.Owner, s.Fixtures.CreateRepoParams.Name, - s.Fixtures.CreateRepoParams.CredentialsName, + s.testCreds, s.Fixtures.CreateRepoParams.WebhookSecret, params.PoolBalancerTypeRoundRobin, ) @@ -238,7 +238,7 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidDBPassphrase() { s.adminCtx, s.Fixtures.CreateRepoParams.Owner, s.Fixtures.CreateRepoParams.Name, - s.Fixtures.CreateRepoParams.CredentialsName, + s.testCreds, s.Fixtures.CreateRepoParams.WebhookSecret, params.PoolBalancerTypeRoundRobin, ) @@ -267,7 +267,7 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidDBCreateErr() { s.adminCtx, s.Fixtures.CreateRepoParams.Owner, s.Fixtures.CreateRepoParams.Name, - s.Fixtures.CreateRepoParams.CredentialsName, + s.testCreds, s.Fixtures.CreateRepoParams.WebhookSecret, params.PoolBalancerTypeRoundRobin, ) diff --git a/database/sql/scalesets_test.go b/database/sql/scalesets_test.go index 9b8b241d..1313af59 100644 --- a/database/sql/scalesets_test.go +++ b/database/sql/scalesets_test.go @@ -19,7 +19,7 @@ type ScaleSetsTestSuite struct { suite.Suite Store dbCommon.Store adminCtx context.Context - creds params.GithubCredentials + creds params.ForgeCredentials org params.Organization repo params.Repository @@ -53,7 +53,7 @@ func (s *ScaleSetsTestSuite) SetupTest() { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } - s.repo, err = s.Store.CreateRepository(s.adminCtx, "test-org", "test-repo", s.creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + s.repo, err = s.Store.CreateRepository(s.adminCtx, "test-org", "test-repo", s.creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create repo: %s", err)) } diff --git a/database/sql/sql.go b/database/sql/sql.go index 82601316..167e90ed 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -435,6 +435,7 @@ func (s *sqlDatabase) migrateDB() error { &User{}, &GithubEndpoint{}, &GithubCredentials{}, + &GiteaCredentials{}, &Tag{}, &Pool{}, &Repository{}, diff --git a/database/sql/util.go b/database/sql/util.go index a2531449..0c71261d 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -27,6 +27,7 @@ import ( runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm-provider-common/util" + "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" ) @@ -155,7 +156,7 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( } if detailed { - creds, err := s.sqlToCommonGithubCredentials(org.Credentials) + creds, err := s.sqlToCommonForgeCredentials(org.Credentials) if err != nil { return params.Organization{}, errors.Wrap(err, "converting credentials") } @@ -206,7 +207,7 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool } if detailed { - creds, err := s.sqlToCommonGithubCredentials(enterprise.Credentials) + creds, err := s.sqlToCommonForgeCredentials(enterprise.Credentials) if err != nil { return params.Enterprise{}, errors.Wrap(err, "converting credentials") } @@ -371,16 +372,28 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par Endpoint: endpoint, } + if repo.CredentialsID != nil && repo.GiteaCredentialsID != nil { + return params.Repository{}, runnerErrors.NewConflictError("both gitea and github credentials are set for repo %s", repo.Name) + } + + var forgeCreds params.ForgeCredentials if repo.CredentialsID != nil { ret.CredentialsID = *repo.CredentialsID + forgeCreds, err = s.sqlToCommonForgeCredentials(repo.Credentials) + } + + if repo.GiteaCredentialsID != nil { + ret.CredentialsID = *repo.GiteaCredentialsID + forgeCreds, err = s.sqlGiteaToCommonForgeCredentials(repo.GiteaCredentials) + } + + if err != nil { + return params.Repository{}, errors.Wrap(err, "converting credentials") } if detailed { - creds, err := s.sqlToCommonGithubCredentials(repo.Credentials) - if err != nil { - return params.Repository{}, errors.Wrap(err, "converting credentials") - } - ret.Credentials = creds + ret.Credentials = forgeCreds + ret.CredentialsName = forgeCreds.Name } if ret.PoolBalancerType == "" { @@ -638,7 +651,7 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve return errors.Wrap(err, "updating instance") } - msg := InstanceStatusUpdate{ + msg := RepositoryEvent{ Message: statusMessage, EventType: event, EventLevel: eventLevel, @@ -653,8 +666,8 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve if err != nil { return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") } - var latestEvents []OrganizationEvent - q := s.conn.Model(&OrganizationEvent{}). + var latestEvents []RepositoryEvent + q := s.conn.Model(&RepositoryEvent{}). Limit(maxEvents).Order("id desc"). Where("repo_id = ?", repoID).Find(&latestEvents) if q.Error != nil { @@ -662,7 +675,7 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve } if len(latestEvents) == maxEvents { lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("repo_id = ? and id < ?", repoID, lastInList.ID).Unscoped().Delete(&OrganizationEvent{}).Error; err != nil { + if err := s.conn.Where("repo_id = ? and id < ?", repoID, lastInList.ID).Unscoped().Delete(&RepositoryEvent{}).Error; err != nil { return errors.Wrap(err, "deleting old events") } } @@ -676,7 +689,7 @@ func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event param return errors.Wrap(err, "updating instance") } - msg := InstanceStatusUpdate{ + msg := OrganizationEvent{ Message: statusMessage, EventType: event, EventLevel: eventLevel, @@ -714,7 +727,7 @@ func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, even return errors.Wrap(err, "updating instance") } - msg := InstanceStatusUpdate{ + msg := EnterpriseEvent{ Message: statusMessage, EventType: event, EventLevel: eventLevel, @@ -763,3 +776,135 @@ func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.ForgeEnt return errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") } } + +func (s *sqlDatabase) sqlToCommonForgeCredentials(creds GithubCredentials) (params.ForgeCredentials, error) { + if len(creds.Payload) == 0 { + return params.ForgeCredentials{}, errors.New("empty credentials payload") + } + data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "unsealing credentials") + } + + ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting github endpoint") + } + + commonCreds := params.ForgeCredentials{ + ID: creds.ID, + Name: creds.Name, + Description: creds.Description, + APIBaseURL: creds.Endpoint.APIBaseURL, + BaseURL: creds.Endpoint.BaseURL, + UploadBaseURL: creds.Endpoint.UploadBaseURL, + CABundle: creds.Endpoint.CACertBundle, + AuthType: creds.AuthType, + CreatedAt: creds.CreatedAt, + UpdatedAt: creds.UpdatedAt, + ForgeType: creds.Endpoint.EndpointType, + Endpoint: ep, + CredentialsPayload: data, + } + + for _, repo := range creds.Repositories { + commonRepo, err := s.sqlToCommonRepository(repo, false) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting github repository") + } + commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) + } + + for _, org := range creds.Organizations { + commonOrg, err := s.sqlToCommonOrganization(org, false) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting github organization") + } + commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) + } + + for _, ent := range creds.Enterprises { + commonEnt, err := s.sqlToCommonEnterprise(ent, false) + if err != nil { + return params.ForgeCredentials{}, errors.Wrapf(err, "converting github enterprise: %s", ent.Name) + } + commonCreds.Enterprises = append(commonCreds.Enterprises, commonEnt) + } + + return commonCreds, nil +} + +func (s *sqlDatabase) sqlGiteaToCommonForgeCredentials(creds GiteaCredentials) (params.ForgeCredentials, error) { + if len(creds.Payload) == 0 { + return params.ForgeCredentials{}, errors.New("empty credentials payload") + } + data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "unsealing credentials") + } + + ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting github endpoint") + } + + commonCreds := params.ForgeCredentials{ + ID: creds.ID, + Name: creds.Name, + Description: creds.Description, + APIBaseURL: creds.Endpoint.APIBaseURL, + BaseURL: creds.Endpoint.BaseURL, + CABundle: creds.Endpoint.CACertBundle, + AuthType: creds.AuthType, + CreatedAt: creds.CreatedAt, + UpdatedAt: creds.UpdatedAt, + ForgeType: creds.Endpoint.EndpointType, + Endpoint: ep, + CredentialsPayload: data, + } + + for _, repo := range creds.Repositories { + commonRepo, err := s.sqlToCommonRepository(repo, false) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting github repository") + } + commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) + } + + for _, org := range creds.Organizations { + commonOrg, err := s.sqlToCommonOrganization(org, false) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "converting github organization") + } + commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) + } + + return commonCreds, nil +} + +func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.ForgeEndpoint, error) { + return params.ForgeEndpoint{ + Name: ep.Name, + Description: ep.Description, + APIBaseURL: ep.APIBaseURL, + BaseURL: ep.BaseURL, + UploadBaseURL: ep.UploadBaseURL, + CACertBundle: ep.CACertBundle, + CreatedAt: ep.CreatedAt, + EndpointType: ep.EndpointType, + UpdatedAt: ep.UpdatedAt, + }, nil +} + +func getUIDFromContext(ctx context.Context) (uuid.UUID, error) { + userID := auth.UserID(ctx) + if userID == "" { + return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "getting UID from context") + } + + asUUID, err := uuid.Parse(userID) + if err != nil { + return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "parsing UID from context") + } + return asUUID, nil +} diff --git a/database/watcher/filters.go b/database/watcher/filters.go index 51820270..c355890b 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -189,7 +189,7 @@ func WithForgeCredentialsFilter(creds params.ForgeCredentials) dbCommon.PayloadF var ok bool switch payload.EntityType { case dbCommon.GithubCredentialsEntityType: - idGetter, ok = payload.Payload.(params.GithubCredentials) + idGetter, ok = payload.Payload.(params.ForgeCredentials) default: return false } diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index 2300ac0a..8791a514 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -155,7 +155,7 @@ func (s *WatcherStoreTestSuite) TestInstanceWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) @@ -259,7 +259,7 @@ func (s *WatcherStoreTestSuite) TestScaleSetInstanceWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) @@ -369,7 +369,7 @@ func (s *WatcherStoreTestSuite) TestPoolWatcher() { } }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) @@ -490,7 +490,7 @@ func (s *WatcherStoreTestSuite) TestScaleSetWatcher() { } }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) s.T().Cleanup(func() { s.store.DeleteRepository(s.ctx, repo.ID) }) @@ -780,7 +780,7 @@ func (s *WatcherStoreTestSuite) TestRepoWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + repo, err := s.store.CreateRepository(s.ctx, "test-owner", "test-repo", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(repo.ID) @@ -898,7 +898,7 @@ func (s *WatcherStoreTestSuite) TestGithubCredentialsWatcher() { EntityType: common.GithubCredentialsEntityType, Operation: common.DeleteOperation, // We only get the ID and Name of the deleted entity - Payload: params.GithubCredentials{ID: ghCred.ID, Name: ghCred.Name}, + Payload: params.ForgeCredentials{ID: ghCred.ID, Name: ghCred.Name}, }, event) case <-time.After(1 * time.Second): s.T().Fatal("expected payload not received") diff --git a/go.mod b/go.mod index a0b3901f..5070dbfe 100644 --- a/go.mod +++ b/go.mod @@ -29,6 +29,7 @@ require ( github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 golang.org/x/crypto v0.38.0 + golang.org/x/mod v0.17.0 golang.org/x/oauth2 v0.30.0 golang.org/x/sync v0.14.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 diff --git a/go.sum b/go.sum index 3c9af9bb..1cbc5ee0 100644 --- a/go.sum +++ b/go.sum @@ -190,6 +190,8 @@ go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= diff --git a/internal/testing/testing.go b/internal/testing/testing.go index 6f253267..0fcc1dda 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -110,7 +110,7 @@ func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testin return ep } -func CreateTestGithubCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.ForgeEndpoint) params.GithubCredentials { +func CreateTestGithubCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.ForgeEndpoint) params.ForgeCredentials { newCredsParams := params.CreateGithubCredentialsParams{ Name: credsName, Description: "Test creds", diff --git a/params/params.go b/params/params.go index daa54b61..73afa0f4 100644 --- a/params/params.go +++ b/params/params.go @@ -344,26 +344,30 @@ type Tag struct { type Pool struct { RunnerPrefix - ID string `json:"id,omitempty"` - ProviderName string `json:"provider_name,omitempty"` - MaxRunners uint `json:"max_runners,omitempty"` - MinIdleRunners uint `json:"min_idle_runners,omitempty"` - Image string `json:"image,omitempty"` - Flavor string `json:"flavor,omitempty"` - OSType commonParams.OSType `json:"os_type,omitempty"` - OSArch commonParams.OSArch `json:"os_arch,omitempty"` - Tags []Tag `json:"tags,omitempty"` - Enabled bool `json:"enabled,omitempty"` - Instances []Instance `json:"instances,omitempty"` - RepoID string `json:"repo_id,omitempty"` - RepoName string `json:"repo_name,omitempty"` - OrgID string `json:"org_id,omitempty"` - OrgName string `json:"org_name,omitempty"` - EnterpriseID string `json:"enterprise_id,omitempty"` - EnterpriseName string `json:"enterprise_name,omitempty"` - RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` + ID string `json:"id,omitempty"` + ProviderName string `json:"provider_name,omitempty"` + MaxRunners uint `json:"max_runners,omitempty"` + MinIdleRunners uint `json:"min_idle_runners,omitempty"` + Image string `json:"image,omitempty"` + Flavor string `json:"flavor,omitempty"` + OSType commonParams.OSType `json:"os_type,omitempty"` + OSArch commonParams.OSArch `json:"os_arch,omitempty"` + Tags []Tag `json:"tags,omitempty"` + Enabled bool `json:"enabled,omitempty"` + Instances []Instance `json:"instances,omitempty"` + + RepoID string `json:"repo_id,omitempty"` + RepoName string `json:"repo_name,omitempty"` + + OrgID string `json:"org_id,omitempty"` + OrgName string `json:"org_name,omitempty"` + + EnterpriseID string `json:"enterprise_id,omitempty"` + EnterpriseName string `json:"enterprise_name,omitempty"` + + RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` + UpdatedAt time.Time `json:"updated_at,omitempty"` // ExtraSpecs is an opaque raw json that gets sent to the provider // as part of the bootstrap params for instances. It can contain // any kind of data needed by providers. The contents of this field means @@ -586,9 +590,11 @@ type Repository struct { // CredentialName is the name of the credentials associated with the enterprise. // This field is now deprecated. Use CredentialsID instead. This field will be // removed in v0.2.0. - CredentialsName string `json:"credentials_name,omitempty"` - CredentialsID uint `json:"credentials_id,omitempty"` - Credentials GithubCredentials `json:"credentials,omitempty"` + CredentialsName string `json:"credentials_name,omitempty"` + + CredentialsID uint `json:"credentials_id,omitempty"` + Credentials ForgeCredentials `json:"credentials,omitempty"` + PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` Endpoint ForgeEndpoint `json:"endpoint,omitempty"` @@ -598,6 +604,13 @@ type Repository struct { WebhookSecret string `json:"-"` } +func (r Repository) GetCredentialsName() string { + if r.CredentialsName != "" { + return r.CredentialsName + } + return r.Credentials.Name +} + func (r Repository) CreationDateGetter() time.Time { return r.CreatedAt } @@ -612,13 +625,10 @@ func (r Repository) GetEntity() (ForgeEntity, error) { Owner: r.Owner, Name: r.Name, PoolBalancerType: r.PoolBalancerType, - Credentials: ForgeCredentials{ - ForgeType: GithubEndpointType, - GithubCredentials: r.Credentials, - }, - WebhookSecret: r.WebhookSecret, - CreatedAt: r.CreatedAt, - UpdatedAt: r.UpdatedAt, + Credentials: r.Credentials, + WebhookSecret: r.WebhookSecret, + CreatedAt: r.CreatedAt, + UpdatedAt: r.UpdatedAt, }, nil } @@ -652,7 +662,7 @@ type Organization struct { // This field is now deprecated. Use CredentialsID instead. This field will be // removed in v0.2.0. CredentialsName string `json:"credentials_name,omitempty"` - Credentials GithubCredentials `json:"credentials,omitempty"` + Credentials ForgeCredentials `json:"credentials,omitempty"` CredentialsID uint `json:"credentials_id,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` @@ -677,12 +687,9 @@ func (o Organization) GetEntity() (ForgeEntity, error) { Owner: o.Name, WebhookSecret: o.WebhookSecret, PoolBalancerType: o.PoolBalancerType, - Credentials: ForgeCredentials{ - ForgeType: GithubEndpointType, - GithubCredentials: o.Credentials, - }, - CreatedAt: o.CreatedAt, - UpdatedAt: o.UpdatedAt, + Credentials: o.Credentials, + CreatedAt: o.CreatedAt, + UpdatedAt: o.UpdatedAt, }, nil } @@ -712,7 +719,7 @@ type Enterprise struct { // This field is now deprecated. Use CredentialsID instead. This field will be // removed in v0.2.0. CredentialsName string `json:"credentials_name,omitempty"` - Credentials GithubCredentials `json:"credentials,omitempty"` + Credentials ForgeCredentials `json:"credentials,omitempty"` CredentialsID uint `json:"credentials_id,omitempty"` PoolManagerStatus PoolManagerStatus `json:"pool_manager_status,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancing_type,omitempty"` @@ -737,12 +744,9 @@ func (e Enterprise) GetEntity() (ForgeEntity, error) { Owner: e.Name, WebhookSecret: e.WebhookSecret, PoolBalancerType: e.PoolBalancerType, - Credentials: ForgeCredentials{ - ForgeType: GithubEndpointType, - GithubCredentials: e.Credentials, - }, - CreatedAt: e.CreatedAt, - UpdatedAt: e.UpdatedAt, + Credentials: e.Credentials, + CreatedAt: e.CreatedAt, + UpdatedAt: e.UpdatedAt, }, nil } @@ -856,99 +860,6 @@ func (g GithubRateLimit) ResetAt() time.Time { } type ForgeCredentials struct { - ForgeType EndpointType `json:"type,omitempty"` - GithubCredentials GithubCredentials `json:"github,omitempty"` -} - -func (f ForgeCredentials) CABundle() []byte { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.CABundle - case GiteaEndpointType: - return nil - default: - return nil - } -} - -func (f ForgeCredentials) Endpoint() ForgeEndpoint { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.Endpoint - case GiteaEndpointType: - return ForgeEndpoint{} - default: - return ForgeEndpoint{} - } -} - -func (f ForgeCredentials) APIBaseURL() string { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.APIBaseURL - case GiteaEndpointType: - return "" - default: - return "" - } -} - -func (f ForgeCredentials) UploadBaseURL() string { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.UploadBaseURL - case GiteaEndpointType: - return "" - default: - return "" - } -} - -func (f ForgeCredentials) BaseURL() string { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.BaseURL - case GiteaEndpointType: - return "" - default: - return "" - } -} - -func (f ForgeCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.GetHTTPClient(ctx) - case GiteaEndpointType: - return nil, fmt.Errorf("gitea credentials not supported") - default: - return nil, fmt.Errorf("unknown credentials type") - } -} - -func (f ForgeCredentials) GetID() uint { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.ID - case GiteaEndpointType: - return 0 - default: - return 0 - } -} - -func (f ForgeCredentials) RootCertificateBundle() (CertificateBundle, error) { - switch f.ForgeType { - case GithubEndpointType: - return f.GithubCredentials.RootCertificateBundle() - case GiteaEndpointType: - return CertificateBundle{}, fmt.Errorf("gitea credentials not supported") - default: - return CertificateBundle{}, fmt.Errorf("unknown credentials type") - } -} - -type GithubCredentials struct { ID uint `json:"id,omitempty"` Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` @@ -958,30 +869,25 @@ type GithubCredentials struct { CABundle []byte `json:"ca_bundle,omitempty"` AuthType ForgeAuthType `json:"auth-type,omitempty"` - Repositories []Repository `json:"repositories,omitempty"` - Organizations []Organization `json:"organizations,omitempty"` - Enterprises []Enterprise `json:"enterprises,omitempty"` - Endpoint ForgeEndpoint `json:"endpoint,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` - RateLimit GithubRateLimit `json:"rate_limit,omitempty"` + ForgeType EndpointType `json:"forge_type,omitempty"` + + Repositories []Repository `json:"repositories,omitempty"` + Organizations []Organization `json:"organizations,omitempty"` + Enterprises []Enterprise `json:"enterprises,omitempty"` + Endpoint ForgeEndpoint `json:"endpoint,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` + UpdatedAt time.Time `json:"updated_at,omitempty"` + RateLimit *GithubRateLimit `json:"rate_limit,omitempty"` // Do not serialize sensitive info. CredentialsPayload []byte `json:"-"` } -func (g GithubCredentials) GetID() uint { +func (g ForgeCredentials) GetID() uint { return g.ID } -func (g GithubCredentials) GetForgeCredentials() ForgeCredentials { - return ForgeCredentials{ - ForgeType: GithubEndpointType, - GithubCredentials: g, - } -} - -func (g GithubCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { +func (g ForgeCredentials) GetHTTPClient(ctx context.Context) (*http.Client, error) { var roots *x509.CertPool if g.CABundle != nil { roots = x509.NewCertPool() @@ -1036,7 +942,7 @@ func (g GithubCredentials) GetHTTPClient(ctx context.Context) (*http.Client, err return tc, nil } -func (g GithubCredentials) RootCertificateBundle() (CertificateBundle, error) { +func (g ForgeCredentials) RootCertificateBundle() (CertificateBundle, error) { if len(g.CABundle) == 0 { return CertificateBundle{}, nil } @@ -1067,7 +973,7 @@ func (g GithubCredentials) RootCertificateBundle() (CertificateBundle, error) { } // used by swagger client generated code -type Credentials []GithubCredentials +type Credentials []ForgeCredentials type Provider struct { Name string `json:"name,omitempty"` @@ -1195,11 +1101,11 @@ func (g ForgeEntity) GetCreatedAt() time.Time { func (g ForgeEntity) ForgeURL() string { switch g.EntityType { case ForgeEntityTypeRepository: - return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL(), g.Owner, g.Name) + return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL, g.Owner, g.Name) case ForgeEntityTypeOrganization: - return fmt.Sprintf("%s/%s", g.Credentials.BaseURL(), g.Owner) + return fmt.Sprintf("%s/%s", g.Credentials.BaseURL, g.Owner) case ForgeEntityTypeEnterprise: - return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL(), g.Owner) + return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL, g.Owner) } return "" } diff --git a/params/requests.go b/params/requests.go index 7ab1fa91..82cbf113 100644 --- a/params/requests.go +++ b/params/requests.go @@ -45,6 +45,16 @@ type CreateRepoParams struct { CredentialsName string `json:"credentials_name,omitempty"` WebhookSecret string `json:"webhook_secret,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancer_type,omitempty"` + ForgeType EndpointType `json:"forge_type,omitempty"` +} + +func (c CreateRepoParams) GetForgeType() EndpointType { + switch c.ForgeType { + case GithubEndpointType, GiteaEndpointType: + return c.ForgeType + default: + return GithubEndpointType + } } func (c *CreateRepoParams) Validate() error { @@ -77,6 +87,16 @@ type CreateOrgParams struct { CredentialsName string `json:"credentials_name,omitempty"` WebhookSecret string `json:"webhook_secret,omitempty"` PoolBalancerType PoolBalancerType `json:"pool_balancer_type,omitempty"` + ForgeType EndpointType `json:"forge_type,omitempty"` +} + +func (c CreateOrgParams) GetForgeType() EndpointType { + switch c.ForgeType { + case GithubEndpointType, GiteaEndpointType: + return c.ForgeType + default: + return GithubEndpointType + } } func (c *CreateOrgParams) Validate() error { @@ -281,7 +301,6 @@ type CreateGithubEndpointParams struct { APIBaseURL string `json:"api_base_url,omitempty"` UploadBaseURL string `json:"upload_base_url,omitempty"` BaseURL string `json:"base_url,omitempty"` - EndpointType string `json:"endpoint_type,omitempty"` CACertBundle []byte `json:"ca_cert_bundle,omitempty"` } @@ -290,14 +309,6 @@ func (c CreateGithubEndpointParams) Validate() error { return runnerErrors.NewBadRequestError("missing api_base_url") } - if c.EndpointType != "" { - switch c.EndpointType { - case string(GithubEndpointType), string(GiteaEndpointType): - default: - return runnerErrors.NewBadRequestError("invalid endpoint_type: %s", c.EndpointType) - } - } - url, err := url.Parse(c.APIBaseURL) if err != nil || url.Scheme == "" || url.Host == "" { return runnerErrors.NewBadRequestError("invalid api_base_url") @@ -308,21 +319,19 @@ func (c CreateGithubEndpointParams) Validate() error { return runnerErrors.NewBadRequestError("invalid api_base_url") } - if c.EndpointType == string(GithubEndpointType) { - if c.UploadBaseURL == "" { - return runnerErrors.NewBadRequestError("missing upload_base_url") - } + if c.UploadBaseURL == "" { + return runnerErrors.NewBadRequestError("missing upload_base_url") + } - url, err = url.Parse(c.UploadBaseURL) - if err != nil || url.Scheme == "" || url.Host == "" { - return runnerErrors.NewBadRequestError("invalid upload_base_url") - } + url, err = url.Parse(c.UploadBaseURL) + if err != nil || url.Scheme == "" || url.Host == "" { + return runnerErrors.NewBadRequestError("invalid upload_base_url") + } - switch url.Scheme { - case httpsScheme, httpScheme: - default: - return runnerErrors.NewBadRequestError("invalid api_base_url") - } + switch url.Scheme { + case httpsScheme, httpScheme: + default: + return runnerErrors.NewBadRequestError("invalid api_base_url") } if c.BaseURL == "" { @@ -617,3 +626,154 @@ type UpdateScaleSetParams struct { State *ScaleSetState `json:"state"` ExtendedState *string `json:"extended_state"` } + +type CreateGiteaEndpointParams struct { + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + APIBaseURL string `json:"api_base_url,omitempty"` + BaseURL string `json:"base_url,omitempty"` + CACertBundle []byte `json:"ca_cert_bundle,omitempty"` +} + +func (c CreateGiteaEndpointParams) Validate() error { + if c.APIBaseURL == "" { + return runnerErrors.NewBadRequestError("missing api_base_url") + } + + url, err := url.Parse(c.APIBaseURL) + if err != nil || url.Scheme == "" || url.Host == "" { + return runnerErrors.NewBadRequestError("invalid api_base_url") + } + switch url.Scheme { + case httpsScheme, httpScheme: + default: + return runnerErrors.NewBadRequestError("invalid api_base_url") + } + + switch url.Scheme { + case httpsScheme, httpScheme: + default: + return runnerErrors.NewBadRequestError("invalid api_base_url") + } + + if c.BaseURL == "" { + return runnerErrors.NewBadRequestError("missing base_url") + } + + url, err = url.Parse(c.BaseURL) + if err != nil || url.Scheme == "" || url.Host == "" { + return runnerErrors.NewBadRequestError("invalid base_url") + } + + switch url.Scheme { + case httpsScheme, httpScheme: + default: + return runnerErrors.NewBadRequestError("invalid api_base_url") + } + + if c.CACertBundle != nil { + block, _ := pem.Decode(c.CACertBundle) + if block == nil { + return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") + } + if _, err := x509.ParseCertificates(block.Bytes); err != nil { + return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") + } + } + + return nil +} + +type UpdateGiteaEndpointParams struct { + Description *string `json:"description,omitempty"` + APIBaseURL *string `json:"api_base_url,omitempty"` + BaseURL *string `json:"base_url,omitempty"` + CACertBundle []byte `json:"ca_cert_bundle,omitempty"` +} + +func (u UpdateGiteaEndpointParams) Validate() error { + if u.APIBaseURL != nil { + url, err := url.Parse(*u.APIBaseURL) + if err != nil || url.Scheme == "" || url.Host == "" { + return runnerErrors.NewBadRequestError("invalid api_base_url") + } + switch url.Scheme { + case httpsScheme, httpScheme: + default: + return runnerErrors.NewBadRequestError("invalid api_base_url") + } + } + + if u.BaseURL != nil { + url, err := url.Parse(*u.BaseURL) + if err != nil || url.Scheme == "" || url.Host == "" { + return runnerErrors.NewBadRequestError("invalid base_url") + } + switch url.Scheme { + case httpsScheme, httpScheme: + default: + return runnerErrors.NewBadRequestError("invalid api_base_url") + } + } + + if u.CACertBundle != nil { + block, _ := pem.Decode(u.CACertBundle) + if block == nil { + return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") + } + if _, err := x509.ParseCertificates(block.Bytes); err != nil { + return runnerErrors.NewBadRequestError("invalid ca_cert_bundle") + } + } + + return nil +} + +type CreateGiteaCredentialsParams struct { + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Endpoint string `json:"endpoint,omitempty"` + AuthType ForgeAuthType `json:"auth_type,omitempty"` + PAT GithubPAT `json:"pat,omitempty"` + App GithubApp `json:"app,omitempty"` +} + +func (c CreateGiteaCredentialsParams) Validate() error { + if c.Name == "" { + return runnerErrors.NewBadRequestError("missing name") + } + + if c.Endpoint == "" { + return runnerErrors.NewBadRequestError("missing endpoint") + } + + switch c.AuthType { + case ForgeAuthTypePAT: + default: + return runnerErrors.NewBadRequestError("invalid auth_type: %s", c.AuthType) + } + + if c.AuthType == ForgeAuthTypePAT { + if c.PAT.OAuth2Token == "" { + return runnerErrors.NewBadRequestError("missing oauth2_token") + } + } + + return nil +} + +type UpdateGiteaCredentialsParams struct { + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + PAT *GithubPAT `json:"pat,omitempty"` +} + +func (u UpdateGiteaCredentialsParams) Validate() error { + if u.PAT != nil { + if u.PAT.OAuth2Token == "" { + return runnerErrors.NewBadRequestError("missing oauth2_token") + } + } + + return nil +} diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index 6ba39d48..36ef1079 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -342,7 +342,7 @@ func (_m *GithubClient) ListEntityRunnerApplicationDownloads(ctx context.Context } // ListEntityRunners provides a mock function with given fields: ctx, opts -func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { +func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) if len(ret) == 0 { @@ -352,10 +352,10 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List var r0 *github.Runners var r1 *github.Response var r2 error - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) (*github.Runners, *github.Response, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)); ok { return rf(ctx, opts) } - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) *github.Runners); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) *github.Runners); ok { r0 = rf(ctx, opts) } else { if ret.Get(0) != nil { @@ -363,7 +363,7 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List } } - if rf, ok := ret.Get(1).(func(context.Context, *github.ListOptions) *github.Response); ok { + if rf, ok := ret.Get(1).(func(context.Context, *github.ListRunnersOptions) *github.Response); ok { r1 = rf(ctx, opts) } else { if ret.Get(1) != nil { @@ -371,7 +371,7 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List } } - if rf, ok := ret.Get(2).(func(context.Context, *github.ListOptions) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, *github.ListRunnersOptions) error); ok { r2 = rf(ctx, opts) } else { r2 = ret.Error(2) @@ -410,6 +410,36 @@ func (_m *GithubClient) PingEntityHook(ctx context.Context, id int64) (*github.R return r0, r1 } +// RateLimit provides a mock function with given fields: ctx +func (_m *GithubClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for RateLimit") + } + + var r0 *github.RateLimits + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*github.RateLimits, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) *github.RateLimits); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*github.RateLimits) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID func (_m *GithubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) error { ret := _m.Called(ctx, runnerID) diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index 567d4ebc..0aab9943 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -303,7 +303,7 @@ func (_m *GithubEntityOperations) ListEntityRunnerApplicationDownloads(ctx conte } // ListEntityRunners provides a mock function with given fields: ctx, opts -func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { +func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) if len(ret) == 0 { @@ -313,10 +313,10 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g var r0 *github.Runners var r1 *github.Response var r2 error - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) (*github.Runners, *github.Response, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)); ok { return rf(ctx, opts) } - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) *github.Runners); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) *github.Runners); ok { r0 = rf(ctx, opts) } else { if ret.Get(0) != nil { @@ -324,7 +324,7 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g } } - if rf, ok := ret.Get(1).(func(context.Context, *github.ListOptions) *github.Response); ok { + if rf, ok := ret.Get(1).(func(context.Context, *github.ListRunnersOptions) *github.Response); ok { r1 = rf(ctx, opts) } else { if ret.Get(1) != nil { @@ -332,7 +332,7 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g } } - if rf, ok := ret.Get(2).(func(context.Context, *github.ListOptions) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, *github.ListRunnersOptions) error); ok { r2 = rf(ctx, opts) } else { r2 = ret.Error(2) @@ -371,6 +371,36 @@ func (_m *GithubEntityOperations) PingEntityHook(ctx context.Context, id int64) return r0, r1 } +// RateLimit provides a mock function with given fields: ctx +func (_m *GithubEntityOperations) RateLimit(ctx context.Context) (*github.RateLimits, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for RateLimit") + } + + var r0 *github.RateLimits + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*github.RateLimits, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) *github.RateLimits); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*github.RateLimits) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID func (_m *GithubEntityOperations) RemoveEntityRunner(ctx context.Context, runnerID int64) error { ret := _m.Called(ctx, runnerID) diff --git a/runner/common/mocks/RateLimitClient.go b/runner/common/mocks/RateLimitClient.go new file mode 100644 index 00000000..2c360217 --- /dev/null +++ b/runner/common/mocks/RateLimitClient.go @@ -0,0 +1,59 @@ +// Code generated by mockery v2.53.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + github "github.com/google/go-github/v71/github" + mock "github.com/stretchr/testify/mock" +) + +// RateLimitClient is an autogenerated mock type for the RateLimitClient type +type RateLimitClient struct { + mock.Mock +} + +// RateLimit provides a mock function with given fields: ctx +func (_m *RateLimitClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for RateLimit") + } + + var r0 *github.RateLimits + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*github.RateLimits, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) *github.RateLimits); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*github.RateLimits) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewRateLimitClient creates a new instance of RateLimitClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewRateLimitClient(t interface { + mock.TestingT + Cleanup(func()) +}) *RateLimitClient { + mock := &RateLimitClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/runner/enterprises_test.go b/runner/enterprises_test.go index 7e4545d4..5c09ae5d 100644 --- a/runner/enterprises_test.go +++ b/runner/enterprises_test.go @@ -39,7 +39,7 @@ type EnterpriseTestFixtures struct { Store dbCommon.Store StoreEnterprises map[string]params.Enterprise Providers map[string]common.Provider - Credentials map[string]params.GithubCredentials + Credentials map[string]params.ForgeCredentials CreateEnterpriseParams params.CreateEnterpriseParams CreatePoolParams params.CreatePoolParams CreateInstanceParams params.CreateInstanceParams @@ -56,8 +56,8 @@ type EnterpriseTestSuite struct { Fixtures *EnterpriseTestFixtures Runner *Runner - testCreds params.GithubCredentials - secondaryTestCreds params.GithubCredentials + testCreds params.ForgeCredentials + secondaryTestCreds params.ForgeCredentials forgeEndpoint params.ForgeEndpoint } @@ -103,7 +103,7 @@ func (s *EnterpriseTestSuite) SetupTest() { Providers: map[string]common.Provider{ "test-provider": providerMock, }, - Credentials: map[string]params.GithubCredentials{ + Credentials: map[string]params.ForgeCredentials{ s.testCreds.Name: s.testCreds, s.secondaryTestCreds.Name: s.secondaryTestCreds, }, diff --git a/runner/gitea_credentials.go b/runner/gitea_credentials.go new file mode 100644 index 00000000..749f2346 --- /dev/null +++ b/runner/gitea_credentials.go @@ -0,0 +1,86 @@ +package runner + +import ( + "context" + + "github.com/pkg/errors" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/params" +) + +func (r *Runner) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { + if !auth.IsAdmin(ctx) { + return nil, runnerErrors.ErrUnauthorized + } + + // Get the credentials from the store. The cache is always updated after the database successfully + // commits the transaction that created/updated the credentials. + // If we create a set of credentials then immediately after we call ListGiteaCredentials, + // there is a posibillity that not all creds will be in the cache. + creds, err := r.store.ListGiteaCredentials(ctx) + if err != nil { + return nil, errors.Wrap(err, "fetching gitea credentials") + } + return creds, nil +} + +func (r *Runner) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error) { + if !auth.IsAdmin(ctx) { + return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized + } + + if err := param.Validate(); err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate gitea credentials params") + } + + creds, err := r.store.CreateGiteaCredentials(ctx, param) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "failed to create gitea credentials") + } + + return creds, nil +} + +func (r *Runner) GetGiteaCredentials(ctx context.Context, id uint) (params.ForgeCredentials, error) { + if !auth.IsAdmin(ctx) { + return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized + } + + creds, err := r.store.GetGiteaCredentials(ctx, id, true) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "failed to get gitea credentials") + } + + return creds, nil +} + +func (r *Runner) DeleteGiteaCredentials(ctx context.Context, id uint) error { + if !auth.IsAdmin(ctx) { + return runnerErrors.ErrUnauthorized + } + + if err := r.store.DeleteGiteaCredentials(ctx, id); err != nil { + return errors.Wrap(err, "failed to delete gitea credentials") + } + + return nil +} + +func (r *Runner) UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error) { + if !auth.IsAdmin(ctx) { + return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized + } + + if err := param.Validate(); err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate gitea credentials params") + } + + newCreds, err := r.store.UpdateGiteaCredentials(ctx, id, param) + if err != nil { + return params.ForgeCredentials{}, errors.Wrap(err, "failed to update gitea credentials") + } + + return newCreds, nil +} diff --git a/runner/gitea_endpoints.go b/runner/gitea_endpoints.go new file mode 100644 index 00000000..847dbab9 --- /dev/null +++ b/runner/gitea_endpoints.go @@ -0,0 +1,82 @@ +package runner + +import ( + "context" + + "github.com/pkg/errors" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/params" +) + +func (r *Runner) CreateGiteaEndpoint(ctx context.Context, param params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error) { + if !auth.IsAdmin(ctx) { + return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized + } + + if err := param.Validate(); err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate gitea endpoint params") + } + + ep, err := r.store.CreateGiteaEndpoint(ctx, param) + if err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to create gitea endpoint") + } + + return ep, nil +} + +func (r *Runner) GetGiteaEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) { + if !auth.IsAdmin(ctx) { + return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized + } + endpoint, err := r.store.GetGiteaEndpoint(ctx, name) + if err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to get gitea endpoint") + } + + return endpoint, nil +} + +func (r *Runner) DeleteGiteaEndpoint(ctx context.Context, name string) error { + if !auth.IsAdmin(ctx) { + return runnerErrors.ErrUnauthorized + } + + err := r.store.DeleteGiteaEndpoint(ctx, name) + if err != nil { + return errors.Wrap(err, "failed to delete gitea endpoint") + } + + return nil +} + +func (r *Runner) UpdateGiteaEndpoint(ctx context.Context, name string, param params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error) { + if !auth.IsAdmin(ctx) { + return params.ForgeEndpoint{}, runnerErrors.ErrUnauthorized + } + + if err := param.Validate(); err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate gitea endpoint params") + } + + newEp, err := r.store.UpdateGiteaEndpoint(ctx, name, param) + if err != nil { + return params.ForgeEndpoint{}, errors.Wrap(err, "failed to update gitea endpoint") + } + return newEp, nil +} + +func (r *Runner) ListGiteaEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) { + if !auth.IsAdmin(ctx) { + return nil, runnerErrors.ErrUnauthorized + } + + endpoints, err := r.store.ListGiteaEndpoints(ctx) + if err != nil { + return nil, errors.Wrap(err, "failed to list gitea endpoints") + } + + return endpoints, nil +} diff --git a/runner/github_credentials.go b/runner/github_credentials.go index 7cd4e74c..7c368c99 100644 --- a/runner/github_credentials.go +++ b/runner/github_credentials.go @@ -11,7 +11,7 @@ import ( "github.com/cloudbase/garm/params" ) -func (r *Runner) ListCredentials(ctx context.Context) ([]params.GithubCredentials, error) { +func (r *Runner) ListCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } @@ -37,31 +37,31 @@ func (r *Runner) ListCredentials(ctx context.Context) ([]params.GithubCredential return creds, nil } -func (r *Runner) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.GithubCredentials, error) { +func (r *Runner) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.ForgeCredentials, error) { if !auth.IsAdmin(ctx) { - return params.GithubCredentials{}, runnerErrors.ErrUnauthorized + return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "failed to validate github credentials params") + return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate github credentials params") } creds, err := r.store.CreateGithubCredentials(ctx, param) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "failed to create github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "failed to create github credentials") } return creds, nil } -func (r *Runner) GetGithubCredentials(ctx context.Context, id uint) (params.GithubCredentials, error) { +func (r *Runner) GetGithubCredentials(ctx context.Context, id uint) (params.ForgeCredentials, error) { if !auth.IsAdmin(ctx) { - return params.GithubCredentials{}, runnerErrors.ErrUnauthorized + return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized } creds, err := r.store.GetGithubCredentials(ctx, id, true) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "failed to get github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "failed to get github credentials") } cached, ok := cache.GetGithubCredentials((creds.ID)) @@ -84,18 +84,18 @@ func (r *Runner) DeleteGithubCredentials(ctx context.Context, id uint) error { return nil } -func (r *Runner) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.GithubCredentials, error) { +func (r *Runner) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error) { if !auth.IsAdmin(ctx) { - return params.GithubCredentials{}, runnerErrors.ErrUnauthorized + return params.ForgeCredentials{}, runnerErrors.ErrUnauthorized } if err := param.Validate(); err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "failed to validate github credentials params") + return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate github credentials params") } newCreds, err := r.store.UpdateGithubCredentials(ctx, id, param) if err != nil { - return params.GithubCredentials{}, errors.Wrap(err, "failed to update github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "failed to update github credentials") } return newCreds, nil diff --git a/runner/organizations_test.go b/runner/organizations_test.go index 3609b941..4d4a26e1 100644 --- a/runner/organizations_test.go +++ b/runner/organizations_test.go @@ -39,7 +39,7 @@ type OrgTestFixtures struct { Store dbCommon.Store StoreOrgs map[string]params.Organization Providers map[string]common.Provider - Credentials map[string]params.GithubCredentials + Credentials map[string]params.ForgeCredentials CreateOrgParams params.CreateOrgParams CreatePoolParams params.CreatePoolParams CreateInstanceParams params.CreateInstanceParams @@ -56,8 +56,8 @@ type OrgTestSuite struct { Fixtures *OrgTestFixtures Runner *Runner - testCreds params.GithubCredentials - secondaryTestCreds params.GithubCredentials + testCreds params.ForgeCredentials + secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint } @@ -104,7 +104,7 @@ func (s *OrgTestSuite) SetupTest() { Providers: map[string]common.Provider{ "test-provider": providerMock, }, - Credentials: map[string]params.GithubCredentials{ + Credentials: map[string]params.ForgeCredentials{ s.testCreds.Name: s.testCreds, s.secondaryTestCreds.Name: s.secondaryTestCreds, }, diff --git a/runner/pool/pool.go b/runner/pool/pool.go index e24aa69b..68de0ec3 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -83,7 +83,7 @@ func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instan return nil, errors.Wrap(err, "getting controller info") } - consumerID := fmt.Sprintf("pool-manager-%s-%s", entity.String(), entity.Credentials.Endpoint().Name) + consumerID := fmt.Sprintf("pool-manager-%s-%s", entity.String(), entity.Credentials.Endpoint.Name) slog.InfoContext(ctx, "registering consumer", "consumer_id", consumerID) consumer, err := watcher.RegisterConsumer( ctx, consumerID, @@ -887,7 +887,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error Image: pool.Image, ExtraSpecs: pool.ExtraSpecs, PoolID: instance.PoolID, - CACertBundle: r.entity.Credentials.CABundle(), + CACertBundle: r.entity.Credentials.CABundle, GitHubRunnerGroup: instance.GitHubRunnerGroup, JitConfigEnabled: hasJITConfig, } @@ -1366,6 +1366,19 @@ func (r *basePoolManager) deleteInstanceFromProvider(ctx context.Context, instan return nil } +func (r *basePoolManager) sleepWithCancel(sleepTime time.Duration) (canceled bool) { + ticker := time.NewTicker(sleepTime) + defer ticker.Stop() + + select { + case <-ticker.C: + return false + case <-r.quit: + case <-r.ctx.Done(): + } + return true +} + func (r *basePoolManager) deletePendingInstances() error { instances, err := r.store.ListEntityInstances(r.ctx, r.entity) if err != nil { @@ -1414,7 +1427,9 @@ func (r *basePoolManager) deletePendingInstances() error { return fmt.Errorf("failed to generate random number: %w", err) } jitter := time.Duration(num.Int64()) * time.Millisecond - time.Sleep(jitter) + if canceled := r.sleepWithCancel(jitter); canceled { + return nil + } currentStatus := instance.Status deleteMux := false diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 56427e89..455f4239 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -125,12 +125,12 @@ func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { dbEntityType := common.DatabaseEntityType(r.entity.EntityType) switch event.EntityType { case common.GithubCredentialsEntityType: - credentials, ok := event.Payload.(params.GithubCredentials) + credentials, ok := event.Payload.(params.ForgeCredentials) if !ok { slog.ErrorContext(r.ctx, "failed to cast payload to github credentials") return } - r.handleCredentialsUpdate(credentials.GetForgeCredentials()) + r.handleCredentialsUpdate(credentials) case common.ControllerEntityType: controllerInfo, ok := event.Payload.(params.ControllerInfo) if !ok { diff --git a/runner/pools_test.go b/runner/pools_test.go index 95c6b6bd..587addce 100644 --- a/runner/pools_test.go +++ b/runner/pools_test.go @@ -47,8 +47,8 @@ type PoolTestSuite struct { Runner *Runner adminCtx context.Context - testCreds params.GithubCredentials - secondaryTestCreds params.GithubCredentials + testCreds params.ForgeCredentials + secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint } diff --git a/runner/repositories.go b/runner/repositories.go index 83876c5d..058e1a02 100644 --- a/runner/repositories.go +++ b/runner/repositories.go @@ -38,7 +38,16 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa return params.Repository{}, errors.Wrap(err, "validating params") } - creds, err := r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) + var creds params.ForgeCredentials + switch param.GetForgeType() { + case params.GithubEndpointType: + creds, err = r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) + case params.GiteaEndpointType: + creds, err = r.store.GetGiteaCredentialsByName(ctx, param.CredentialsName, true) + default: + return params.Repository{}, runnerErrors.NewBadRequestError("invalid forge type: %s", param.GetForgeType()) + } + if err != nil { return params.Repository{}, runnerErrors.NewBadRequestError("credentials %s not defined", param.CredentialsName) } @@ -52,7 +61,7 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa return params.Repository{}, runnerErrors.NewConflictError("repository %s/%s already exists", param.Owner, param.Name) } - repo, err = r.store.CreateRepository(ctx, param.Owner, param.Name, creds.Name, param.WebhookSecret, param.PoolBalancerType) + repo, err = r.store.CreateRepository(ctx, param.Owner, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) if err != nil { return params.Repository{}, errors.Wrap(err, "creating repository") } diff --git a/runner/repositories_test.go b/runner/repositories_test.go index 47bfb003..4e891e4b 100644 --- a/runner/repositories_test.go +++ b/runner/repositories_test.go @@ -39,7 +39,7 @@ type RepoTestFixtures struct { Store dbCommon.Store StoreRepos map[string]params.Repository Providers map[string]common.Provider - Credentials map[string]params.GithubCredentials + Credentials map[string]params.ForgeCredentials CreateRepoParams params.CreateRepoParams CreatePoolParams params.CreatePoolParams CreateInstanceParams params.CreateInstanceParams @@ -60,8 +60,8 @@ type RepoTestSuite struct { Fixtures *RepoTestFixtures Runner *Runner - testCreds params.GithubCredentials - secondaryTestCreds params.GithubCredentials + testCreds params.ForgeCredentials + secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint } @@ -86,7 +86,7 @@ func (s *RepoTestSuite) SetupTest() { adminCtx, fmt.Sprintf("test-owner-%v", i), name, - s.testCreds.Name, + s.testCreds, fmt.Sprintf("test-webhook-secret-%v", i), params.PoolBalancerTypeRoundRobin, ) @@ -107,7 +107,7 @@ func (s *RepoTestSuite) SetupTest() { Providers: map[string]common.Provider{ "test-provider": providerMock, }, - Credentials: map[string]params.GithubCredentials{ + Credentials: map[string]params.ForgeCredentials{ s.testCreds.Name: s.testCreds, s.secondaryTestCreds.Name: s.secondaryTestCreds, }, diff --git a/test/integration/client_utils.go b/test/integration/client_utils.go index 977cc11c..c986be2d 100644 --- a/test/integration/client_utils.go +++ b/test/integration/client_utils.go @@ -51,7 +51,7 @@ func listCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfo return listCredentialsResponse.Payload, nil } -func createGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsParams params.CreateGithubCredentialsParams) (*params.GithubCredentials, error) { +func createGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsParams params.CreateGithubCredentialsParams) (*params.ForgeCredentials, error) { createCredentialsResponse, err := apiCli.Credentials.CreateCredentials( clientCredentials.NewCreateCredentialsParams().WithBody(credentialsParams), apiAuthToken) @@ -67,7 +67,7 @@ func deleteGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.Client apiAuthToken) } -func updateGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsID int64, credentialsParams params.UpdateGithubCredentialsParams) (*params.GithubCredentials, error) { +func updateGithubCredentials(apiCli *client.GarmAPI, apiAuthToken runtime.ClientAuthInfoWriter, credentialsID int64, credentialsParams params.UpdateGithubCredentialsParams) (*params.ForgeCredentials, error) { updateCredentialsResponse, err := apiCli.Credentials.UpdateCredentials( clientCredentials.NewUpdateCredentialsParams().WithID(credentialsID).WithBody(credentialsParams), apiAuthToken) diff --git a/test/integration/credentials_test.go b/test/integration/credentials_test.go index f7c9c691..f1594f87 100644 --- a/test/integration/credentials_test.go +++ b/test/integration/credentials_test.go @@ -199,7 +199,7 @@ func (suite *GarmSuite) TestGithubCredentialsFailsOnDuplicateName() { expectAPIStatusCode(err, 409) } -func (suite *GarmSuite) createDummyCredentials(name, endpointName string) (*params.GithubCredentials, error) { +func (suite *GarmSuite) createDummyCredentials(name, endpointName string) (*params.ForgeCredentials, error) { createCredsParams := params.CreateGithubCredentialsParams{ Name: name, Endpoint: endpointName, @@ -212,7 +212,7 @@ func (suite *GarmSuite) createDummyCredentials(name, endpointName string) (*para return suite.CreateGithubCredentials(createCredsParams) } -func (suite *GarmSuite) CreateGithubCredentials(credentialsParams params.CreateGithubCredentialsParams) (*params.GithubCredentials, error) { +func (suite *GarmSuite) CreateGithubCredentials(credentialsParams params.CreateGithubCredentialsParams) (*params.ForgeCredentials, error) { t := suite.T() t.Log("Create GitHub credentials") credentials, err := createGithubCredentials(suite.cli, suite.authToken, credentialsParams) diff --git a/util/github/client.go b/util/github/client.go index 1480561a..bcdebc13 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -477,6 +477,12 @@ func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, func (g *githubClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { limits, resp, err := g.rateLimit.Get(ctx) + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "GetRateLimit", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } if err := parseError(resp, err); err != nil { return nil, fmt.Errorf("getting rate limit: %w", err) } @@ -491,7 +497,7 @@ func (g *githubClient) GithubBaseURL() *url.URL { return g.cli.BaseURL } -func NewRateLimitClient(ctx context.Context, credentials params.GithubCredentials) (common.RateLimitClient, error) { +func NewRateLimitClient(ctx context.Context, credentials params.ForgeCredentials) (common.RateLimitClient, error) { httpClient, err := credentials.GetHTTPClient(ctx) if err != nil { return nil, errors.Wrap(err, "fetching http client") @@ -515,12 +521,12 @@ func NewRateLimitClient(ctx context.Context, credentials params.GithubCredential return cli, nil } -func withGiteaURLs(client *github.Client, apiBaseURL, uploadBaseURL string) (*github.Client, error) { +func withGiteaURLs(client *github.Client, apiBaseURL string) (*github.Client, error) { if client == nil { return nil, errors.New("client is nil") } - if apiBaseURL == "" || uploadBaseURL == "" { + if apiBaseURL == "" { return nil, errors.New("invalid gitea URLs") } @@ -537,21 +543,8 @@ func withGiteaURLs(client *github.Client, apiBaseURL, uploadBaseURL string) (*gi parsedBaseURL.Path += "api/v1/" } - parsedUploadURL, err := url.ParseRequestURI(uploadBaseURL) - if err != nil { - return nil, errors.Wrap(err, "parsing gitea upload URL") - } - - if !strings.HasSuffix(parsedUploadURL.Path, "/") { - parsedUploadURL.Path += "/" - } - - if !strings.HasSuffix(parsedUploadURL.Path, "/api/v1/") { - parsedUploadURL.Path += "api/v1/" - } - client.BaseURL = parsedBaseURL - client.UploadURL = parsedUploadURL + client.UploadURL = parsedBaseURL return client, nil } @@ -565,15 +558,15 @@ func Client(ctx context.Context, entity params.ForgeEntity) (common.GithubClient slog.DebugContext( ctx, "creating client for entity", - "entity", entity.String(), "base_url", entity.Credentials.APIBaseURL(), - "upload_url", entity.Credentials.UploadBaseURL()) + "entity", entity.String(), "base_url", entity.Credentials.APIBaseURL, + "upload_url", entity.Credentials.UploadBaseURL) ghClient := github.NewClient(httpClient) - switch entity.Credentials.ForgeType { + switch entity.Credentials.Endpoint.EndpointType { case params.GithubEndpointType: - ghClient, err = ghClient.WithEnterpriseURLs(entity.Credentials.APIBaseURL(), entity.Credentials.UploadBaseURL()) + ghClient, err = ghClient.WithEnterpriseURLs(entity.Credentials.APIBaseURL, entity.Credentials.UploadBaseURL) case params.GiteaEndpointType: - ghClient, err = withGiteaURLs(ghClient, entity.Credentials.APIBaseURL(), entity.Credentials.UploadBaseURL()) + ghClient, err = withGiteaURLs(ghClient, entity.Credentials.APIBaseURL) } if err != nil { diff --git a/vendor/golang.org/x/mod/LICENSE b/vendor/golang.org/x/mod/LICENSE new file mode 100644 index 00000000..6a66aea5 --- /dev/null +++ b/vendor/golang.org/x/mod/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/mod/PATENTS b/vendor/golang.org/x/mod/PATENTS new file mode 100644 index 00000000..73309904 --- /dev/null +++ b/vendor/golang.org/x/mod/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/mod/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go new file mode 100644 index 00000000..9a2dfd33 --- /dev/null +++ b/vendor/golang.org/x/mod/semver/semver.go @@ -0,0 +1,401 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package semver implements comparison of semantic version strings. +// In this package, semantic version strings must begin with a leading "v", +// as in "v1.0.0". +// +// The general form of a semantic version string accepted by this package is +// +// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]] +// +// where square brackets indicate optional parts of the syntax; +// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros; +// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers +// using only alphanumeric characters and hyphens; and +// all-numeric PRERELEASE identifiers must not have leading zeros. +// +// This package follows Semantic Versioning 2.0.0 (see semver.org) +// with two exceptions. First, it requires the "v" prefix. Second, it recognizes +// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes) +// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0. +package semver + +import "sort" + +// parsed returns the parsed form of a semantic version string. +type parsed struct { + major string + minor string + patch string + short string + prerelease string + build string +} + +// IsValid reports whether v is a valid semantic version string. +func IsValid(v string) bool { + _, ok := parse(v) + return ok +} + +// Canonical returns the canonical formatting of the semantic version v. +// It fills in any missing .MINOR or .PATCH and discards build metadata. +// Two semantic versions compare equal only if their canonical formattings +// are identical strings. +// The canonical invalid semantic version is the empty string. +func Canonical(v string) string { + p, ok := parse(v) + if !ok { + return "" + } + if p.build != "" { + return v[:len(v)-len(p.build)] + } + if p.short != "" { + return v + p.short + } + return v +} + +// Major returns the major version prefix of the semantic version v. +// For example, Major("v2.1.0") == "v2". +// If v is an invalid semantic version string, Major returns the empty string. +func Major(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return v[:1+len(pv.major)] +} + +// MajorMinor returns the major.minor version prefix of the semantic version v. +// For example, MajorMinor("v2.1.0") == "v2.1". +// If v is an invalid semantic version string, MajorMinor returns the empty string. +func MajorMinor(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + i := 1 + len(pv.major) + if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor { + return v[:j] + } + return v[:i] + "." + pv.minor +} + +// Prerelease returns the prerelease suffix of the semantic version v. +// For example, Prerelease("v2.1.0-pre+meta") == "-pre". +// If v is an invalid semantic version string, Prerelease returns the empty string. +func Prerelease(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.prerelease +} + +// Build returns the build suffix of the semantic version v. +// For example, Build("v2.1.0+meta") == "+meta". +// If v is an invalid semantic version string, Build returns the empty string. +func Build(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.build +} + +// Compare returns an integer comparing two versions according to +// semantic version precedence. +// The result will be 0 if v == w, -1 if v < w, or +1 if v > w. +// +// An invalid semantic version string is considered less than a valid one. +// All invalid semantic version strings compare equal to each other. +func Compare(v, w string) int { + pv, ok1 := parse(v) + pw, ok2 := parse(w) + if !ok1 && !ok2 { + return 0 + } + if !ok1 { + return -1 + } + if !ok2 { + return +1 + } + if c := compareInt(pv.major, pw.major); c != 0 { + return c + } + if c := compareInt(pv.minor, pw.minor); c != 0 { + return c + } + if c := compareInt(pv.patch, pw.patch); c != 0 { + return c + } + return comparePrerelease(pv.prerelease, pw.prerelease) +} + +// Max canonicalizes its arguments and then returns the version string +// that compares greater. +// +// Deprecated: use [Compare] instead. In most cases, returning a canonicalized +// version is not expected or desired. +func Max(v, w string) string { + v = Canonical(v) + w = Canonical(w) + if Compare(v, w) > 0 { + return v + } + return w +} + +// ByVersion implements [sort.Interface] for sorting semantic version strings. +type ByVersion []string + +func (vs ByVersion) Len() int { return len(vs) } +func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] } +func (vs ByVersion) Less(i, j int) bool { + cmp := Compare(vs[i], vs[j]) + if cmp != 0 { + return cmp < 0 + } + return vs[i] < vs[j] +} + +// Sort sorts a list of semantic version strings using [ByVersion]. +func Sort(list []string) { + sort.Sort(ByVersion(list)) +} + +func parse(v string) (p parsed, ok bool) { + if v == "" || v[0] != 'v' { + return + } + p.major, v, ok = parseInt(v[1:]) + if !ok { + return + } + if v == "" { + p.minor = "0" + p.patch = "0" + p.short = ".0.0" + return + } + if v[0] != '.' { + ok = false + return + } + p.minor, v, ok = parseInt(v[1:]) + if !ok { + return + } + if v == "" { + p.patch = "0" + p.short = ".0" + return + } + if v[0] != '.' { + ok = false + return + } + p.patch, v, ok = parseInt(v[1:]) + if !ok { + return + } + if len(v) > 0 && v[0] == '-' { + p.prerelease, v, ok = parsePrerelease(v) + if !ok { + return + } + } + if len(v) > 0 && v[0] == '+' { + p.build, v, ok = parseBuild(v) + if !ok { + return + } + } + if v != "" { + ok = false + return + } + ok = true + return +} + +func parseInt(v string) (t, rest string, ok bool) { + if v == "" { + return + } + if v[0] < '0' || '9' < v[0] { + return + } + i := 1 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + if v[0] == '0' && i != 1 { + return + } + return v[:i], v[i:], true +} + +func parsePrerelease(v string) (t, rest string, ok bool) { + // "A pre-release version MAY be denoted by appending a hyphen and + // a series of dot separated identifiers immediately following the patch version. + // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes." + if v == "" || v[0] != '-' { + return + } + i := 1 + start := 1 + for i < len(v) && v[i] != '+' { + if !isIdentChar(v[i]) && v[i] != '.' { + return + } + if v[i] == '.' { + if start == i || isBadNum(v[start:i]) { + return + } + start = i + 1 + } + i++ + } + if start == i || isBadNum(v[start:i]) { + return + } + return v[:i], v[i:], true +} + +func parseBuild(v string) (t, rest string, ok bool) { + if v == "" || v[0] != '+' { + return + } + i := 1 + start := 1 + for i < len(v) { + if !isIdentChar(v[i]) && v[i] != '.' { + return + } + if v[i] == '.' { + if start == i { + return + } + start = i + 1 + } + i++ + } + if start == i { + return + } + return v[:i], v[i:], true +} + +func isIdentChar(c byte) bool { + return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-' +} + +func isBadNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) && i > 1 && v[0] == '0' +} + +func isNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) +} + +func compareInt(x, y string) int { + if x == y { + return 0 + } + if len(x) < len(y) { + return -1 + } + if len(x) > len(y) { + return +1 + } + if x < y { + return -1 + } else { + return +1 + } +} + +func comparePrerelease(x, y string) int { + // "When major, minor, and patch are equal, a pre-release version has + // lower precedence than a normal version. + // Example: 1.0.0-alpha < 1.0.0. + // Precedence for two pre-release versions with the same major, minor, + // and patch version MUST be determined by comparing each dot separated + // identifier from left to right until a difference is found as follows: + // identifiers consisting of only digits are compared numerically and + // identifiers with letters or hyphens are compared lexically in ASCII + // sort order. Numeric identifiers always have lower precedence than + // non-numeric identifiers. A larger set of pre-release fields has a + // higher precedence than a smaller set, if all of the preceding + // identifiers are equal. + // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < + // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0." + if x == y { + return 0 + } + if x == "" { + return +1 + } + if y == "" { + return -1 + } + for x != "" && y != "" { + x = x[1:] // skip - or . + y = y[1:] // skip - or . + var dx, dy string + dx, x = nextIdent(x) + dy, y = nextIdent(y) + if dx != dy { + ix := isNum(dx) + iy := isNum(dy) + if ix != iy { + if ix { + return -1 + } else { + return +1 + } + } + if ix { + if len(dx) < len(dy) { + return -1 + } + if len(dx) > len(dy) { + return +1 + } + } + if dx < dy { + return -1 + } else { + return +1 + } + } + } + if x == "" { + return -1 + } else { + return +1 + } +} + +func nextIdent(x string) (dx, rest string) { + i := 0 + for i < len(x) && x[i] != '.' { + i++ + } + return x[:i], x[i:] +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 5cb70bb1..dbd42ce3 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -286,6 +286,9 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 +# golang.org/x/mod v0.17.0 +## explicit; go 1.18 +golang.org/x/mod/semver # golang.org/x/net v0.40.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks diff --git a/workers/cache/cache.go b/workers/cache/cache.go index ce23d269..918b3438 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -138,7 +138,7 @@ func (w *Worker) loadAllInstances() error { return nil } -func (w *Worker) loadAllCredentials() error { +func (w *Worker) loadAllGithubCredentials() error { creds, err := w.store.ListGithubCredentials(w.ctx) if err != nil { return fmt.Errorf("listing github credentials: %w", err) @@ -150,6 +150,18 @@ func (w *Worker) loadAllCredentials() error { return nil } +func (w *Worker) loadAllGiteaCredentials() error { + creds, err := w.store.ListGiteaCredentials(w.ctx) + if err != nil { + return fmt.Errorf("listing gitea credentials: %w", err) + } + + for _, cred := range creds { + cache.SetGiteaCredentials(cred) + } + return nil +} + func (w *Worker) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { if g == nil { return nil @@ -182,6 +194,20 @@ func (w *Worker) Start() error { g, _ := errgroup.WithContext(w.ctx) + g.Go(func() error { + if err := w.loadAllGithubCredentials(); err != nil { + return fmt.Errorf("loading all github credentials: %w", err) + } + return nil + }) + + g.Go(func() error { + if err := w.loadAllGiteaCredentials(); err != nil { + return fmt.Errorf("loading all gitea credentials: %w", err) + } + return nil + }) + g.Go(func() error { if err := w.loadAllEntities(); err != nil { return fmt.Errorf("loading all entities: %w", err) @@ -196,13 +222,6 @@ func (w *Worker) Start() error { return nil }) - g.Go(func() error { - if err := w.loadAllCredentials(); err != nil { - return fmt.Errorf("loading all credentials: %w", err) - } - return nil - }) - if err := w.waitForErrorGroupOrContextCancelled(g); err != nil { return fmt.Errorf("waiting for error group: %w", err) } @@ -360,7 +379,7 @@ func (w *Worker) handleInstanceEvent(event common.ChangePayload) { } func (w *Worker) handleCredentialsEvent(event common.ChangePayload) { - credentials, ok := event.Payload.(params.GithubCredentials) + credentials, ok := event.Payload.(params.ForgeCredentials) if !ok { slog.DebugContext(w.ctx, "invalid payload type for credentials event", "payload", event.Payload) return diff --git a/workers/cache/gitea_tools.go b/workers/cache/gitea_tools.go new file mode 100644 index 00000000..8b2fc758 --- /dev/null +++ b/workers/cache/gitea_tools.go @@ -0,0 +1,152 @@ +package cache + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" + + commonParams "github.com/cloudbase/garm-provider-common/params" + "golang.org/x/mod/semver" +) + +const ( + // GiteaRunnerReleasesURL is the public API URL that returns a json of all Gitea runner releases. + // By default it returns the last 10 releases, which is enough for our needs. + GiteaRunnerReleasesURL = "https://gitea.com/api/v1/repos/gitea/act_runner/releases" + // GiteaRunnerMinimumVersion is the minimum version we need in order to support ephemeral runners. + GiteaRunnerMinimumVersion = "v0.2.12" +) + +var nightlyActRunner = GiteaEntityTool{ + TagName: "nightly", + Name: "nightly", + TarballURL: "https://gitea.com/gitea/act_runner/archive/main.tar.gz", + Assets: []GiteaToolsAssets{ + { + Name: "act_runner-nightly-linux-amd64.xz", + DownloadURL: "https://dl.gitea.com/act_runner/nightly/act_runner-nightly-linux-amd64.xz", + }, + { + Name: "act_runner-nightly-linux-arm64.xz", + DownloadURL: "https://dl.gitea.com/act_runner/nightly/act_runner-nightly-linux-arm64.xz", + }, + { + Name: "act_runner-nightly-windows-amd64.exe.xz", + DownloadURL: "https://dl.gitea.com/act_runner/nightly/act_runner-nightly-windows-amd64.exe.xz", + }, + }, +} + +type GiteaToolsAssets struct { + ID uint `json:"id"` + Name string `json:"name"` + Size uint `json:"size"` + DownloadCount uint `json:"download_count"` + CreatedAt time.Time `json:"created_at"` + UUID string `json:"uuid"` + DownloadURL string `json:"browser_download_url"` +} + +func (g GiteaToolsAssets) GetOS() *string { + if g.Name == "" { + return nil + } + + parts := strings.SplitN(g.Name, "-", 4) + if len(parts) != 4 { + return nil + } + + os := parts[2] + return &os +} + +func (g GiteaToolsAssets) GetArch() *string { + if g.Name == "" { + return nil + } + + parts := strings.SplitN(g.Name, "-", 4) + if len(parts) != 4 { + return nil + } + + archParts := strings.SplitN(parts[3], ".", 2) + if len(archParts) == 0 { + return nil + } + arch := archParts[0] + return &arch +} + +type GiteaEntityTool struct { + // TagName is the semver version of the release. + TagName string `json:"tag_name"` + Name string `json:"name"` + TarballURL string `json:"tarball_url"` + Assets []GiteaToolsAssets `json:"assets"` +} + +type GiteaEntityTools []GiteaEntityTool + +func (g GiteaEntityTools) GetLatestVersion() string { + if len(g) == 0 { + return "" + } + return g[0].TagName +} + +func (g GiteaEntityTools) MinimumVersion() (GiteaEntityTool, bool) { + if len(g) == 0 { + return GiteaEntityTool{}, false + } + for _, tool := range g { + if semver.Compare(tool.TagName, GiteaRunnerMinimumVersion) >= 0 { + return tool, true + } + } + return GiteaEntityTool{}, false +} + +func getTools() ([]commonParams.RunnerApplicationDownload, error) { + resp, err := http.Get(GiteaRunnerReleasesURL) + if err != nil { + return nil, err + } + defer resp.Body.Close() + data, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var tools GiteaEntityTools + err = json.Unmarshal(data, &tools) + if err != nil { + return nil, err + } + + if len(tools) == 0 { + return nil, fmt.Errorf("no tools found") + } + + latest, ok := tools.MinimumVersion() + if !ok { + latest = nightlyActRunner + } + + ret := []commonParams.RunnerApplicationDownload{} + + for _, asset := range latest.Assets { + ret = append(ret, commonParams.RunnerApplicationDownload{ + OS: asset.GetOS(), + Architecture: asset.GetArch(), + DownloadURL: &asset.DownloadURL, + Filename: &asset.Name, + }) + } + + return ret, nil +} diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index 7d8d5737..d3c74673 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -49,7 +49,14 @@ func (t *toolsUpdater) Start() error { t.running = true t.quit = make(chan struct{}) - go t.loop() + slog.DebugContext(t.ctx, "starting tools updater", "entity", t.entity.String(), "forge_type", t.entity.Credentials) + + switch t.entity.Credentials.ForgeType { + case params.GithubEndpointType: + go t.loop() + case params.GiteaEndpointType: + go t.giteaUpdateLoop() + } return nil } @@ -68,7 +75,7 @@ func (t *toolsUpdater) Stop() error { } func (t *toolsUpdater) updateTools() error { - slog.DebugContext(t.ctx, "updating tools", "entity", t.entity.String()) + slog.DebugContext(t.ctx, "updating tools", "entity", t.entity.String(), "forge_type", t.entity.Credentials.ForgeType) entity, ok := cache.GetEntity(t.entity.ID) if !ok { return fmt.Errorf("getting entity from cache: %s", t.entity.ID) @@ -98,12 +105,69 @@ func (t *toolsUpdater) Reset() { return } + if t.entity.Credentials.ForgeType == params.GiteaEndpointType { + // no need to reset the gitea tools updater when credentials + // are updated. + return + } + if t.reset != nil { close(t.reset) t.reset = nil } } +func (t *toolsUpdater) sleepWithCancel(sleepTime time.Duration) (canceled bool) { + ticker := time.NewTicker(sleepTime) + defer ticker.Stop() + + select { + case <-ticker.C: + return false + case <-t.quit: + case <-t.ctx.Done(): + } + return true +} + +// giteaUpdateLoop updates tools for gitea. The act runner can be downloaded +// without a token, unlike the github tools, which for GHES require a token. +func (t *toolsUpdater) giteaUpdateLoop() { + defer t.Stop() + + // add some jitter. When spinning up multiple entities, we add + // jitter to prevent stampeeding herd. + randInt, err := rand.Int(rand.Reader, big.NewInt(3000)) + if err != nil { + randInt = big.NewInt(0) + } + t.sleepWithCancel(time.Duration(randInt.Int64()) * time.Millisecond) + tools, err := getTools() + if err == nil { + cache.SetGithubToolsCache(t.entity, tools) + } + + // Once every 3 hours should be enough. Tools don't expire. + ticker := time.NewTicker(3 * time.Hour) + + for { + select { + case <-t.quit: + slog.DebugContext(t.ctx, "stopping tools updater") + return + case <-t.ctx.Done(): + return + case <-ticker.C: + tools, err := getTools() + if err != nil { + slog.DebugContext(t.ctx, "failed to update gitea tools", "error", err) + continue + } + cache.SetGithubToolsCache(t.entity, tools) + } + } +} + func (t *toolsUpdater) loop() { defer t.Stop() @@ -113,7 +177,7 @@ func (t *toolsUpdater) loop() { if err != nil { randInt = big.NewInt(0) } - time.Sleep(time.Duration(randInt.Int64()) * time.Millisecond) + t.sleepWithCancel(time.Duration(randInt.Int64()) * time.Millisecond) var resetTime time.Time now := time.Now().UTC() diff --git a/workers/entity/controller.go b/workers/entity/controller.go index db353f0e..2cb910b3 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -215,6 +215,7 @@ func (c *Controller) Stop() error { c.running = false close(c.quit) c.consumer.Close() + slog.DebugContext(c.ctx, "stopped entity controller", "entity", c.consumerID) return nil } diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 7f0f79e6..597d5797 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -70,6 +70,7 @@ func (w *Worker) Stop() error { w.running = false close(w.quit) w.consumer.Close() + slog.DebugContext(w.ctx, "entity worker stopped", "entity", w.consumerID) return nil } diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go index 04e20a65..f445b73d 100644 --- a/workers/entity/worker_watcher.go +++ b/workers/entity/worker_watcher.go @@ -63,12 +63,11 @@ func (w *Worker) handleEntityEventPayload(event dbCommon.ChangePayload) { } func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayload) { - var credsGetter params.ForgeCredentialsGetter + var creds params.ForgeCredentials var ok bool - switch event.EntityType { case dbCommon.GithubCredentialsEntityType: - credsGetter, ok = event.Payload.(params.GithubCredentials) + creds, ok = event.Payload.(params.ForgeCredentials) default: slog.ErrorContext(w.ctx, "invalid entity type", "entity_type", event.EntityType) return @@ -78,7 +77,7 @@ func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayloa return } - credentials := credsGetter.GetForgeCredentials() + credentials := creds switch event.Operation { case dbCommon.UpdateOperation: diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index 9ba94553..47e875a0 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -167,7 +167,7 @@ func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instanc ExtraSpecs: i.scaleSet.ExtraSpecs, // This is temporary. We need to extend providers to know about scale sets. PoolID: i.pseudoPoolID(), - CACertBundle: entity.Credentials.CABundle(), + CACertBundle: entity.Credentials.CABundle, GitHubRunnerGroup: i.scaleSet.GitHubRunnerGroup, JitConfigEnabled: true, } diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 45dfbfa3..e28eb7ee 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -137,7 +137,7 @@ func (c *Controller) Stop() error { c.running = false close(c.quit) c.consumer.Close() - + slog.DebugContext(c.ctx, "stopped scale set controller", "entity", c.Entity.String()) return nil } diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 097a8680..1090388d 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -910,7 +910,7 @@ func (w *Worker) handleAutoScale() { } if !hasTools { - time.Sleep(1 * time.Second) + w.sleepWithCancel(1 * time.Second) continue } From 8538a4ae8ad1fbb652671323a82ed3b903b34c44 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 15:22:27 +0000 Subject: [PATCH 061/179] Make sure websocket hub exits properly Signed-off-by: Gabriel Adrian Samfira --- websocket/client.go | 4 ++-- websocket/websocket.go | 41 ++++++++++++++++++++++++++++++++++------- 2 files changed, 36 insertions(+), 9 deletions(-) diff --git a/websocket/client.go b/websocket/client.go index 5b80ba81..657aa49e 100644 --- a/websocket/client.go +++ b/websocket/client.go @@ -58,8 +58,6 @@ func NewClient(ctx context.Context, conn *websocket.Conn) (*Client, error) { userID: user, passwordGeneration: generation, consumer: consumer, - done: make(chan struct{}), - send: make(chan []byte, 100), }, nil } @@ -116,6 +114,8 @@ func (c *Client) Start() error { defer c.mux.Unlock() c.running = true + c.send = make(chan []byte, 100) + c.done = make(chan struct{}) go c.runWatcher() go c.clientReader() diff --git a/websocket/websocket.go b/websocket/websocket.go index 57820449..14b5e785 100644 --- a/websocket/websocket.go +++ b/websocket/websocket.go @@ -28,14 +28,15 @@ type Hub struct { // Inbound messages from the clients. broadcast chan []byte - mux sync.Mutex - once sync.Once + mux sync.Mutex + running bool + once sync.Once } func (h *Hub) run() { - defer func() { - close(h.closed) - }() + defer close(h.closed) + defer h.Stop() + for { select { case <-h.quit: @@ -59,8 +60,7 @@ func (h *Hub) run() { for _, id := range staleClients { if client, ok := h.clients[id]; ok { if client != nil { - client.conn.Close() - close(client.send) + client.Stop() } delete(h.clients, id) } @@ -105,6 +105,13 @@ func (h *Hub) Unregister(client *Client) error { } func (h *Hub) Write(msg []byte) (int, error) { + h.mux.Lock() + if !h.running { + h.mux.Unlock() + return 0, fmt.Errorf("websocket writer is not running") + } + h.mux.Unlock() + tmp := make([]byte, len(msg)) copy(tmp, msg) timer := time.NewTimer(5 * time.Second) @@ -118,6 +125,15 @@ func (h *Hub) Write(msg []byte) (int, error) { } func (h *Hub) Start() error { + h.mux.Lock() + defer h.mux.Unlock() + + if h.running { + return nil + } + + h.running = true + go h.run() return nil } @@ -130,11 +146,22 @@ func (h *Hub) Close() error { } func (h *Hub) Stop() error { + h.mux.Lock() + defer h.mux.Unlock() + + if !h.running { + return nil + } + + h.running = false h.Close() return h.Wait() } func (h *Hub) Wait() error { + if !h.running { + return nil + } timer := time.NewTimer(60 * time.Second) defer timer.Stop() select { From 56be5eb698ae251ef211174529aec662655b4f4d Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 15:36:53 +0000 Subject: [PATCH 062/179] Do not load scalesets and runners in parallel Both functions read and write to the same map. We should switch to sync.Map Signed-off-by: Gabriel Adrian Samfira --- database/sql/models.go | 6 +++--- workers/provider/provider.go | 24 +++++------------------- workers/provider/util.go | 23 ----------------------- 3 files changed, 8 insertions(+), 45 deletions(-) diff --git a/database/sql/models.go b/database/sql/models.go index 0ff2d8f4..e7fad261 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -171,7 +171,7 @@ type Repository struct { EndpointName *string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - Events []RepositoryEvent `gorm:"foreignKey:RepoID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` + Events []*RepositoryEvent `gorm:"foreignKey:RepoID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type OrganizationEvent struct { @@ -203,7 +203,7 @@ type Organization struct { EndpointName *string `gorm:"index:idx_org_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - Events []OrganizationEvent `gorm:"foreignKey:OrgID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` + Events []*OrganizationEvent `gorm:"foreignKey:OrgID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type EnterpriseEvent struct { @@ -233,7 +233,7 @@ type Enterprise struct { EndpointName *string `gorm:"index:idx_ent_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - Events []EnterpriseEvent `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` + Events []*EnterpriseEvent `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type Address struct { diff --git a/workers/provider/provider.go b/workers/provider/provider.go index ffc5183d..b1ab1220 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -6,8 +6,6 @@ import ( "log/slog" "sync" - "golang.org/x/sync/errgroup" - commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" @@ -133,24 +131,12 @@ func (p *Provider) Start() error { return nil } - g, _ := errgroup.WithContext(p.ctx) + if err := p.loadAllScaleSets(); err != nil { + return fmt.Errorf("loading all scale sets: %w", err) + } - g.Go(func() error { - if err := p.loadAllScaleSets(); err != nil { - return fmt.Errorf("loading all scale sets: %w", err) - } - return nil - }) - - g.Go(func() error { - if err := p.loadAllRunners(); err != nil { - return fmt.Errorf("loading all runners: %w", err) - } - return nil - }) - - if err := p.waitForErrorGroupOrContextCancelled(g); err != nil { - return fmt.Errorf("waiting for error group: %w", err) + if err := p.loadAllRunners(); err != nil { + return fmt.Errorf("loading all runners: %w", err) } consumer, err := watcher.RegisterConsumer( diff --git a/workers/provider/util.go b/workers/provider/util.go index ca2626c0..8cd33525 100644 --- a/workers/provider/util.go +++ b/workers/provider/util.go @@ -1,8 +1,6 @@ package provider import ( - "golang.org/x/sync/errgroup" - dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" ) @@ -13,24 +11,3 @@ func composeProviderWatcher() dbCommon.PayloadFilterFunc { watcher.WithEntityTypeFilter(dbCommon.ScaleSetEntityType), ) } - -func (p *Provider) waitForErrorGroupOrContextCancelled(g *errgroup.Group) error { - if g == nil { - return nil - } - - done := make(chan error, 1) - go func() { - waitErr := g.Wait() - done <- waitErr - }() - - select { - case err := <-done: - return err - case <-p.ctx.Done(): - return p.ctx.Err() - case <-p.quit: - return nil - } -} From f66b651b594808fd4b070d5f6088676c68665c69 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 21:09:02 +0000 Subject: [PATCH 063/179] Fix findEndpointForJob Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/controllers.go | 9 +- cache/cache_test.go | 12 +- cache/tools_cache.go | 40 ++++++- params/params.go | 19 ++-- runner/metadata.go | 159 ++++++++++++++++++--------- runner/pool/pool.go | 31 ++++-- runner/pool/util.go | 21 +++- runner/runner.go | 42 ++++--- util/github/client.go | 5 + workers/cache/gitea_tools.go | 47 ++++++-- workers/cache/tool_cache.go | 2 +- workers/provider/instance_manager.go | 6 +- workers/scaleset/scaleset.go | 7 +- 13 files changed, 287 insertions(+), 113 deletions(-) diff --git a/apiserver/controllers/controllers.go b/apiserver/controllers/controllers.go index 3e6413e0..32da79b3 100644 --- a/apiserver/controllers/controllers.go +++ b/apiserver/controllers/controllers.go @@ -107,8 +107,15 @@ func (a *APIController) handleWorkflowJobEvent(ctx context.Context, w http.Respo signature := r.Header.Get("X-Hub-Signature-256") hookType := r.Header.Get("X-Github-Hook-Installation-Target-Type") + giteaTargetType := r.Header.Get("X-Gitea-Hook-Installation-Target-Type") - if err := a.r.DispatchWorkflowJob(hookType, signature, body); err != nil { + forgeType := runnerParams.GithubEndpointType + if giteaTargetType != "" { + forgeType = runnerParams.GiteaEndpointType + hookType = giteaTargetType + } + + if err := a.r.DispatchWorkflowJob(hookType, signature, forgeType, body); err != nil { switch { case errors.Is(err, gErrors.ErrNotFound): metrics.WebhooksReceived.WithLabelValues( diff --git a/cache/cache_test.go b/cache/cache_test.go index 3e7ed559..2ad63420 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -55,8 +55,8 @@ func (c *CacheTestSuite) TestSetCacheWorks() { c.Require().Len(githubToolsCache.entities, 0) SetGithubToolsCache(c.entity, tools) c.Require().Len(githubToolsCache.entities, 1) - cachedTools, ok := GetGithubToolsCache(c.entity.ID) - c.Require().True(ok) + cachedTools, err := GetGithubToolsCache(c.entity.ID) + c.Require().NoError(err) c.Require().Len(cachedTools, 1) c.Require().Equal(tools[0].GetDownloadURL(), cachedTools[0].GetDownloadURL()) } @@ -76,16 +76,16 @@ func (c *CacheTestSuite) TestTimedOutToolsCache() { entity.updatedAt = entity.updatedAt.Add(-2 * time.Hour) githubToolsCache.entities[c.entity.ID] = entity - cachedTools, ok := GetGithubToolsCache(c.entity.ID) - c.Require().False(ok) + cachedTools, err := GetGithubToolsCache(c.entity.ID) + c.Require().NoError(err) c.Require().Nil(cachedTools) } func (c *CacheTestSuite) TestGetInexistentCache() { c.Require().NotNil(githubToolsCache) c.Require().Len(githubToolsCache.entities, 0) - cachedTools, ok := GetGithubToolsCache(c.entity.ID) - c.Require().False(ok) + cachedTools, err := GetGithubToolsCache(c.entity.ID) + c.Require().NoError(err) c.Require().Nil(cachedTools) } diff --git a/cache/tools_cache.go b/cache/tools_cache.go index 0698c41e..98b58b19 100644 --- a/cache/tools_cache.go +++ b/cache/tools_cache.go @@ -1,6 +1,7 @@ package cache import ( + "fmt" "sync" "time" @@ -20,17 +21,25 @@ func init() { type GithubEntityTools struct { updatedAt time.Time expiresAt time.Time + err error entity params.ForgeEntity tools []commonParams.RunnerApplicationDownload } +func (g GithubEntityTools) Error() string { + if g.err != nil { + return g.err.Error() + } + return "" +} + type GithubToolsCache struct { mux sync.Mutex // entity IDs are UUID4s. It is highly unlikely they will collide (🤞). entities map[string]GithubEntityTools } -func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicationDownload, bool) { +func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicationDownload, error) { g.mux.Lock() defer g.mux.Unlock() @@ -39,12 +48,12 @@ func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicatio if time.Now().UTC().After(cache.expiresAt.Add(-5 * time.Minute)) { // Stale cache, remove it. delete(g.entities, entityID) - return nil, false + return nil, fmt.Errorf("cache expired for entity %s", entityID) } } - return cache.tools, true + return cache.tools, cache.err } - return nil, false + return nil, fmt.Errorf("no cache found for entity %s", entityID) } func (g *GithubToolsCache) Set(entity params.ForgeEntity, tools []commonParams.RunnerApplicationDownload) { @@ -55,6 +64,7 @@ func (g *GithubToolsCache) Set(entity params.ForgeEntity, tools []commonParams.R updatedAt: time.Now(), entity: entity, tools: tools, + err: nil, } if entity.Credentials.ForgeType == params.GithubEndpointType { @@ -64,10 +74,30 @@ func (g *GithubToolsCache) Set(entity params.ForgeEntity, tools []commonParams.R g.entities[entity.ID] = forgeTools } +func (g *GithubToolsCache) SetToolsError(entity params.ForgeEntity, err error) { + g.mux.Lock() + defer g.mux.Unlock() + + // If the entity is not in the cache, add it with the error. + cache, ok := g.entities[entity.ID] + if !ok { + g.entities[entity.ID] = GithubEntityTools{ + updatedAt: time.Now(), + entity: entity, + err: err, + } + return + } + + // Update the error for the existing entity. + cache.err = err + g.entities[entity.ID] = cache +} + func SetGithubToolsCache(entity params.ForgeEntity, tools []commonParams.RunnerApplicationDownload) { githubToolsCache.Set(entity, tools) } -func GetGithubToolsCache(entityID string) ([]commonParams.RunnerApplicationDownload, bool) { +func GetGithubToolsCache(entityID string) ([]commonParams.RunnerApplicationDownload, error) { return githubToolsCache.Get(entityID) } diff --git a/params/params.go b/params/params.go index 73afa0f4..052b2c8b 100644 --- a/params/params.go +++ b/params/params.go @@ -1099,13 +1099,18 @@ func (g ForgeEntity) GetCreatedAt() time.Time { } func (g ForgeEntity) ForgeURL() string { - switch g.EntityType { - case ForgeEntityTypeRepository: - return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL, g.Owner, g.Name) - case ForgeEntityTypeOrganization: - return fmt.Sprintf("%s/%s", g.Credentials.BaseURL, g.Owner) - case ForgeEntityTypeEnterprise: - return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL, g.Owner) + switch g.Credentials.ForgeType { + case GiteaEndpointType: + return g.Credentials.Endpoint.APIBaseURL + default: + switch g.EntityType { + case ForgeEntityTypeRepository: + return fmt.Sprintf("%s/%s/%s", g.Credentials.BaseURL, g.Owner, g.Name) + case ForgeEntityTypeOrganization: + return fmt.Sprintf("%s/%s", g.Credentials.BaseURL, g.Owner) + case ForgeEntityTypeEnterprise: + return fmt.Sprintf("%s/enterprises/%s", g.Credentials.BaseURL, g.Owner) + } } return "" } diff --git a/runner/metadata.go b/runner/metadata.go index 8a9c8469..3df7966a 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -16,7 +16,7 @@ import ( "github.com/cloudbase/garm/params" ) -var systemdUnitTemplate = `[Unit] +var githubSystemdUnitTemplate = `[Unit] Description=GitHub Actions Runner ({{.ServiceName}}) After=network.target @@ -32,11 +32,24 @@ TimeoutStopSec=5min WantedBy=multi-user.target ` -func validateInstanceState(ctx context.Context) (params.Instance, error) { - if !auth.InstanceHasJITConfig(ctx) { - return params.Instance{}, fmt.Errorf("instance not configured for JIT: %w", runnerErrors.ErrNotFound) - } +var giteaSystemdUnitTemplate = `[Unit] +Description=Act Runner ({{.ServiceName}}) +After=network.target +[Service] +ExecStart=/home/{{.RunAsUser}}/act-runner/act_runner daemon --once +User={{.RunAsUser}} +WorkingDirectory=/home/{{.RunAsUser}}/act-runner +KillMode=process +KillSignal=SIGTERM +TimeoutStopSec=5min +Restart=always + +[Install] +WantedBy=multi-user.target +` + +func validateInstanceState(ctx context.Context) (params.Instance, error) { status := auth.InstanceRunnerStatus(ctx) if status != params.RunnerPending && status != params.RunnerInstalling { return params.Instance{}, runnerErrors.ErrUnauthorized @@ -49,6 +62,56 @@ func validateInstanceState(ctx context.Context) (params.Instance, error) { return instance, nil } +func (r *Runner) getForgeEntityFromInstance(ctx context.Context, instance params.Instance) (params.ForgeEntity, error) { + var entityGetter params.EntityGetter + var err error + switch { + case instance.PoolID != "": + entityGetter, err = r.store.GetPoolByID(r.ctx, instance.PoolID) + case instance.ScaleSetID != 0: + entityGetter, err = r.store.GetScaleSetByID(r.ctx, instance.ScaleSetID) + default: + return params.ForgeEntity{}, errors.New("instance not associated with a pool or scale set") + } + + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get entity getter", + "instance", instance.Name) + return params.ForgeEntity{}, errors.Wrap(err, "fetching entity getter") + } + + poolEntity, err := entityGetter.GetEntity() + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get entity", + "instance", instance.Name) + return params.ForgeEntity{}, errors.Wrap(err, "fetching entity") + } + + entity, err := r.store.GetForgeEntity(r.ctx, poolEntity.EntityType, poolEntity.ID) + if err != nil { + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get entity", + "instance", instance.Name) + return params.ForgeEntity{}, errors.Wrap(err, "fetching entity") + } + return entity, nil +} + +func (r *Runner) getServiceNameForEntity(entity params.ForgeEntity) (string, error) { + switch entity.EntityType { + case params.ForgeEntityTypeEnterprise: + return fmt.Sprintf("actions.runner.%s.%s", entity.Owner, entity.Name), nil + case params.ForgeEntityTypeOrganization: + return fmt.Sprintf("actions.runner.%s.%s", entity.Owner, entity.Name), nil + case params.ForgeEntityTypeRepository: + return fmt.Sprintf("actions.runner.%s-%s.%s", entity.Owner, entity.Name, entity.Name), nil + default: + return "", errors.New("unknown entity type") + } +} + func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { instance, err := validateInstanceState(ctx) if err != nil { @@ -56,64 +119,51 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { ctx, "failed to get instance params") return "", runnerErrors.ErrUnauthorized } - var entity params.ForgeEntity - - switch { - case instance.PoolID != "": - pool, err := r.store.GetPoolByID(r.ctx, instance.PoolID) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get pool", - "pool_id", instance.PoolID) - return "", errors.Wrap(err, "fetching pool") - } - entity, err = pool.GetEntity() - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get pool entity", - "pool_id", instance.PoolID) - return "", errors.Wrap(err, "fetching pool entity") - } - case instance.ScaleSetID != 0: - scaleSet, err := r.store.GetScaleSetByID(r.ctx, instance.ScaleSetID) - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get scale set", - "scale_set_id", instance.ScaleSetID) - return "", errors.Wrap(err, "fetching scale set") - } - entity, err = scaleSet.GetEntity() - if err != nil { - slog.With(slog.Any("error", err)).ErrorContext( - ctx, "failed to get scale set entity", - "scale_set_id", instance.ScaleSetID) - return "", errors.Wrap(err, "fetching scale set entity") - } - default: - return "", errors.New("instance not associated with a pool or scale set") + entity, err := r.getForgeEntityFromInstance(ctx, instance) + if err != nil { + slog.ErrorContext(r.ctx, "failed to get entity", "error", err) + return "", errors.Wrap(err, "fetching entity") } - tpl := "actions.runner.%s.%s" - var serviceName string - switch entity.EntityType { - case params.ForgeEntityTypeEnterprise: - serviceName = fmt.Sprintf(tpl, entity.Owner, instance.Name) - case params.ForgeEntityTypeOrganization: - serviceName = fmt.Sprintf(tpl, entity.Owner, instance.Name) - case params.ForgeEntityTypeRepository: - serviceName = fmt.Sprintf(tpl, fmt.Sprintf("%s-%s", entity.Owner, entity.Name), instance.Name) + serviceName, err := r.getServiceNameForEntity(entity) + if err != nil { + slog.ErrorContext(r.ctx, "failed to get service name", "error", err) + return "", errors.Wrap(err, "fetching service name") } return serviceName, nil } func (r *Runner) GenerateSystemdUnitFile(ctx context.Context, runAsUser string) ([]byte, error) { - serviceName, err := r.GetRunnerServiceName(ctx) + instance, err := validateInstanceState(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching runner service name") + slog.With(slog.Any("error", err)).ErrorContext( + ctx, "failed to get instance params") + return nil, runnerErrors.ErrUnauthorized + } + entity, err := r.getForgeEntityFromInstance(ctx, instance) + if err != nil { + slog.ErrorContext(r.ctx, "failed to get entity", "error", err) + return nil, errors.Wrap(err, "fetching entity") } - unitTemplate, err := template.New("").Parse(systemdUnitTemplate) + serviceName, err := r.getServiceNameForEntity(entity) if err != nil { + slog.ErrorContext(r.ctx, "failed to get service name", "error", err) + return nil, errors.Wrap(err, "fetching service name") + } + + var unitTemplate *template.Template + switch entity.Credentials.ForgeType { + case params.GithubEndpointType: + unitTemplate, err = template.New("").Parse(githubSystemdUnitTemplate) + case params.GiteaEndpointType: + unitTemplate, err = template.New("").Parse(giteaSystemdUnitTemplate) + default: + slog.ErrorContext(r.ctx, "unknown forge type", "forge_type", entity.Credentials.ForgeType) + return nil, errors.New("unknown forge type") + } + if err != nil { + slog.ErrorContext(r.ctx, "failed to parse template", "error", err) return nil, errors.Wrap(err, "parsing template") } @@ -131,12 +181,17 @@ func (r *Runner) GenerateSystemdUnitFile(ctx context.Context, runAsUser string) var unitFile bytes.Buffer if err := unitTemplate.Execute(&unitFile, data); err != nil { + slog.ErrorContext(r.ctx, "failed to execute template", "error", err) return nil, errors.Wrap(err, "executing template") } return unitFile.Bytes(), nil } func (r *Runner) GetJITConfigFile(ctx context.Context, file string) ([]byte, error) { + if !auth.InstanceHasJITConfig(ctx) { + return nil, fmt.Errorf("instance not configured for JIT: %w", runnerErrors.ErrNotFound) + } + instance, err := validateInstanceState(ctx) if err != nil { slog.With(slog.Any("error", err)).ErrorContext( diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 68de0ec3..8b02b593 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -47,15 +47,15 @@ import ( ) var ( - poolIDLabelprefix = "runner-pool-id:" - controllerLabelPrefix = "runner-controller-id:" + poolIDLabelprefix = "runner-pool-id" + controllerLabelPrefix = "runner-controller-id" // We tag runners that have been spawned as a result of a queued job with the job ID // that spawned them. There is no way to guarantee that the runner spawned in response to a particular // job, will be picked up by that job. We mark them so as in the very likely event that the runner // has picked up a different job, we can clear the lock on the job that spaned it. // The job it picked up would already be transitioned to in_progress so it will be ignored by the // consume loop. - jobLabelPrefix = "in_response_to_job:" + jobLabelPrefix = "in_response_to_job" ) const ( @@ -296,7 +296,8 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { func jobIDFromLabels(labels []string) int64 { for _, lbl := range labels { if strings.HasPrefix(lbl, jobLabelPrefix) { - jobID, err := strconv.ParseInt(lbl[len(jobLabelPrefix):], 10, 64) + trimLength := min(len(jobLabelPrefix)+1, len(lbl)) + jobID, err := strconv.ParseInt(lbl[trimLength:], 10, 64) if err != nil { return 0 } @@ -361,21 +362,21 @@ func (r *basePoolManager) startLoopForFunction(f func() error, interval time.Dur } func (r *basePoolManager) updateTools() error { - // Update tools cache. - tools, err := r.FetchTools() + tools, err := cache.GetGithubToolsCache(r.entity.ID) if err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update tools for entity", "entity", r.entity.String()) r.SetPoolRunningState(false, err.Error()) return fmt.Errorf("failed to update tools for entity %s: %w", r.entity.String(), err) } + r.mux.Lock() r.tools = tools r.mux.Unlock() slog.DebugContext(r.ctx, "successfully updated tools") r.SetPoolRunningState(true, "") - return err + return nil } // cleanupOrphanedProviderRunners compares runners in github with local runners and removes @@ -995,11 +996,11 @@ func (r *basePoolManager) paramsWorkflowJobToParamsJob(job params.WorkflowJob) ( } func (r *basePoolManager) poolLabel(poolID string) string { - return fmt.Sprintf("%s%s", poolIDLabelprefix, poolID) + return fmt.Sprintf("%s=%s", poolIDLabelprefix, poolID) } func (r *basePoolManager) controllerLabel() string { - return fmt.Sprintf("%s%s", controllerLabelPrefix, r.controllerInfo.ControllerID.String()) + return fmt.Sprintf("%s=%s", controllerLabelPrefix, r.controllerInfo.ControllerID.String()) } func (r *basePoolManager) updateArgsFromProviderInstance(providerInstance commonParams.ProviderInstance) params.UpdateInstanceParams { @@ -1613,6 +1614,16 @@ func (r *basePoolManager) Start() error { initialToolUpdate := make(chan struct{}, 1) go func() { slog.Info("running initial tool update") + for { + slog.DebugContext(r.ctx, "waiting for tools to be available") + hasTools, stopped := r.waitForToolsOrCancel() + if stopped { + return + } + if hasTools { + break + } + } if err := r.updateTools(); err != nil { slog.With(slog.Any("error", err)).Error("failed to update tools") } @@ -1804,7 +1815,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { } jobLabels := []string{ - fmt.Sprintf("%s%d", jobLabelPrefix, job.ID), + fmt.Sprintf("%s=%d", jobLabelPrefix, job.ID), } for i := 0; i < poolRR.Len(); i++ { pool, err := poolRR.Next() diff --git a/runner/pool/util.go b/runner/pool/util.go index 25fdc73f..4c4bf5b1 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -5,11 +5,13 @@ import ( "strings" "sync" "sync/atomic" + "time" "github.com/google/go-github/v71/github" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" + "github.com/cloudbase/garm/cache" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" "github.com/cloudbase/garm/params" @@ -91,7 +93,8 @@ func instanceInList(instanceName string, instances []commonParams.ProviderInstan func controllerIDFromLabels(labels []string) string { for _, lbl := range labels { if strings.HasPrefix(lbl, controllerLabelPrefix) { - return lbl[len(controllerLabelPrefix):] + trimLength := min(len(controllerLabelPrefix)+1, len(lbl)) + return lbl[trimLength:] } } return "" @@ -134,3 +137,19 @@ func composeWatcherFilters(entity params.ForgeEntity) dbCommon.PayloadFilterFunc watcher.WithForgeCredentialsFilter(entity.Credentials), ) } + +func (r *basePoolManager) waitForToolsOrCancel() (hasTools, stopped bool) { + ticker := time.NewTicker(1 * time.Second) + defer ticker.Stop() + select { + case <-ticker.C: + if _, err := cache.GetGithubToolsCache(r.entity.ID); err != nil { + return false, false + } + return true, false + case <-r.quit: + return false, true + case <-r.ctx.Done(): + return false, true + } +} diff --git a/runner/runner.go b/runner/runner.go index 6d5bc5eb..e02ee698 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -602,7 +602,7 @@ func (r *Runner) validateHookBody(signature, secret string, body []byte) error { return nil } -func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.ForgeEndpoint, error) { +func (r *Runner) findEndpointForJob(job params.WorkflowJob, forgeType params.EndpointType) (params.ForgeEndpoint, error) { uri, err := url.ParseRequestURI(job.WorkflowJob.HTMLURL) if err != nil { return params.ForgeEndpoint{}, errors.Wrap(err, "parsing job URL") @@ -614,12 +614,23 @@ func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.ForgeEndpoin // a GHES involved, those users will have just one extra endpoint or 2 (if they also have a // test env). But there should be a relatively small number, regardless. So we don't really care // that much about the performance of this function. - endpoints, err := r.store.ListGithubEndpoints(r.ctx) + var endpoints []params.ForgeEndpoint + switch forgeType { + case params.GithubEndpointType: + endpoints, err = r.store.ListGithubEndpoints(r.ctx) + case params.GiteaEndpointType: + endpoints, err = r.store.ListGiteaEndpoints(r.ctx) + default: + return params.ForgeEndpoint{}, runnerErrors.NewBadRequestError("unknown forge type %s", forgeType) + } + if err != nil { return params.ForgeEndpoint{}, errors.Wrap(err, "fetching github endpoints") } for _, ep := range endpoints { - if ep.BaseURL == baseURI { + slog.DebugContext(r.ctx, "checking endpoint", "base_uri", baseURI, "endpoint", ep.BaseURL) + epBaseURI := strings.TrimSuffix(ep.BaseURL, "/") + if epBaseURI == baseURI { return ep, nil } } @@ -627,18 +638,21 @@ func (r *Runner) findEndpointForJob(job params.WorkflowJob) (params.ForgeEndpoin return params.ForgeEndpoint{}, runnerErrors.NewNotFoundError("no endpoint found for job") } -func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, jobData []byte) error { +func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType params.EndpointType, jobData []byte) error { if len(jobData) == 0 { + slog.ErrorContext(r.ctx, "missing job data") return runnerErrors.NewBadRequestError("missing job data") } var job params.WorkflowJob if err := json.Unmarshal(jobData, &job); err != nil { + slog.ErrorContext(r.ctx, "failed to unmarshal job data", "error", err) return errors.Wrapf(runnerErrors.ErrBadRequest, "invalid job data: %s", err) } - endpoint, err := r.findEndpointForJob(job) + endpoint, err := r.findEndpointForJob(job, forgeType) if err != nil { + slog.ErrorContext(r.ctx, "failed to find endpoint for job", "error", err) return errors.Wrap(err, "finding endpoint for job") } @@ -867,15 +881,17 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel } if err != nil { - if errors.Is(err, runnerErrors.ErrUnauthorized) && instance.PoolID != "" { - poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) - if err != nil { - return errors.Wrap(err, "fetching pool manager for instance") + if !errors.Is(err, runnerErrors.ErrNotFound) { + if errors.Is(err, runnerErrors.ErrUnauthorized) && instance.PoolID != "" { + poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) + if err != nil { + return errors.Wrap(err, "fetching pool manager for instance") + } + poolMgr.SetPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) + } + if !bypassGithubUnauthorized { + return errors.Wrap(err, "removing runner from github") } - poolMgr.SetPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) - } - if !bypassGithubUnauthorized { - return errors.Wrap(err, "removing runner from github") } } } diff --git a/util/github/client.go b/util/github/client.go index bcdebc13..a46e4ab7 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -229,6 +229,7 @@ func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) } func parseError(response *github.Response, err error) error { + slog.Debug("parsing error", "status_code", response.StatusCode, "response", response, "error", err) switch response.StatusCode { case http.StatusNotFound: return runnerErrors.ErrNotFound @@ -251,6 +252,10 @@ func parseError(response *github.Response, err error) error { case http.StatusUnprocessableEntity: return runnerErrors.ErrBadRequest default: + // ugly hack. Gitea returns 500 if we try to remove a runner that does not exist. + if strings.Contains(err.Error(), "does not exist") { + return runnerErrors.ErrNotFound + } return err } } diff --git a/workers/cache/gitea_tools.go b/workers/cache/gitea_tools.go index 8b2fc758..9d6b2307 100644 --- a/workers/cache/gitea_tools.go +++ b/workers/cache/gitea_tools.go @@ -20,6 +20,18 @@ const ( GiteaRunnerMinimumVersion = "v0.2.12" ) +var ( + githubArchMapping map[string]string = map[string]string{ + "x86_64": "x64", + "amd64": "x64", + "armv7l": "arm", + "aarch64": "arm64", + "x64": "x64", + "arm": "arm", + "arm64": "arm64", + } +) + var nightlyActRunner = GiteaEntityTool{ TagName: "nightly", Name: "nightly", @@ -50,36 +62,39 @@ type GiteaToolsAssets struct { DownloadURL string `json:"browser_download_url"` } -func (g GiteaToolsAssets) GetOS() *string { +func (g GiteaToolsAssets) GetOS() (*string, error) { if g.Name == "" { - return nil + return nil, fmt.Errorf("gitea tools name is empty") } parts := strings.SplitN(g.Name, "-", 4) if len(parts) != 4 { - return nil + return nil, fmt.Errorf("could not parse asset name") } os := parts[2] - return &os + return &os, nil } -func (g GiteaToolsAssets) GetArch() *string { +func (g GiteaToolsAssets) GetArch() (*string, error) { if g.Name == "" { - return nil + return nil, fmt.Errorf("gitea tools name is empty") } parts := strings.SplitN(g.Name, "-", 4) if len(parts) != 4 { - return nil + return nil, fmt.Errorf("could not parse asset name") } archParts := strings.SplitN(parts[3], ".", 2) if len(archParts) == 0 { - return nil + return nil, fmt.Errorf("unexpected asset name format") } - arch := archParts[0] - return &arch + arch := githubArchMapping[archParts[0]] + if arch == "" { + return nil, fmt.Errorf("could not find arch for %s", archParts[0]) + } + return &arch, nil } type GiteaEntityTool struct { @@ -140,9 +155,17 @@ func getTools() ([]commonParams.RunnerApplicationDownload, error) { ret := []commonParams.RunnerApplicationDownload{} for _, asset := range latest.Assets { + arch, err := asset.GetArch() + if err != nil { + return nil, fmt.Errorf("getting arch: %w", err) + } + os, err := asset.GetOS() + if err != nil { + return nil, fmt.Errorf("getting os: %w", err) + } ret = append(ret, commonParams.RunnerApplicationDownload{ - OS: asset.GetOS(), - Architecture: asset.GetArch(), + OS: os, + Architecture: arch, DownloadURL: &asset.DownloadURL, Filename: &asset.Name, }) diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index d3c74673..941131d7 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -49,7 +49,7 @@ func (t *toolsUpdater) Start() error { t.running = true t.quit = make(chan struct{}) - slog.DebugContext(t.ctx, "starting tools updater", "entity", t.entity.String(), "forge_type", t.entity.Credentials) + slog.DebugContext(t.ctx, "starting tools updater", "entity", t.entity.String(), "forge_type", t.entity.Credentials.ForgeType) switch t.entity.Credentials.ForgeType { case params.GithubEndpointType: diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index 47e875a0..d0e61b72 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -148,9 +148,9 @@ func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instanc if err != nil { return fmt.Errorf("creating instance token: %w", err) } - tools, ok := cache.GetGithubToolsCache(entity.ID) - if !ok { - return fmt.Errorf("tools not found in cache for entity %s", entity.String()) + tools, err := cache.GetGithubToolsCache(entity.ID) + if err != nil { + return fmt.Errorf("tools not found in cache for entity %s: %w", entity.String(), err) } bootstrapArgs := commonParams.BootstrapInstance{ diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 1090388d..b3bfe332 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -776,8 +776,11 @@ func (w *Worker) waitForToolsOrCancel() (hasTools, stopped bool) { if err != nil { slog.ErrorContext(w.ctx, "error getting entity", "error", err) } - _, ok := cache.GetGithubToolsCache(entity.ID) - return ok, false + if _, err := cache.GetGithubToolsCache(entity.ID); err != nil { + slog.DebugContext(w.ctx, "tools not found in cache; waiting for tools") + return false, false + } + return true, false case <-w.quit: return false, true case <-w.ctx.Done(): From 3fe4cef8849444b54c0dfce90480bc10ead869ce Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 21:14:31 +0000 Subject: [PATCH 064/179] Cleanup unused code Signed-off-by: Gabriel Adrian Samfira --- workers/pools/controller.go | 1 - 1 file changed, 1 deletion(-) delete mode 100644 workers/pools/controller.go diff --git a/workers/pools/controller.go b/workers/pools/controller.go deleted file mode 100644 index 058ebec6..00000000 --- a/workers/pools/controller.go +++ /dev/null @@ -1 +0,0 @@ -package pools From 39ac658527c67b60161873e729870527d8b550e5 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 21:33:46 +0000 Subject: [PATCH 065/179] Add forge type to repo list Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/repository.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index b24cf039..c94495cd 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -357,13 +357,17 @@ func formatRepositories(repos []params.Repository) { return } t := table.NewWriter() - header := table.Row{"ID", "Owner", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Pool mgr running"} + header := table.Row{"ID", "Owner", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Forge type", "Pool mgr running"} if long { header = append(header, "Created At", "Updated At") } t.AppendHeader(header) for _, val := range repos { - row := table.Row{val.ID, val.Owner, val.Name, val.Endpoint.Name, val.GetCredentialsName(), val.GetBalancerType(), val.PoolManagerStatus.IsRunning} + forgeType := val.Endpoint.EndpointType + if forgeType == "" { + forgeType = params.GithubEndpointType + } + row := table.Row{val.ID, val.Owner, val.Name, val.Endpoint.Name, val.GetCredentialsName(), val.GetBalancerType(), forgeType, val.PoolManagerStatus.IsRunning} if long { row = append(row, val.CreatedAt, val.UpdatedAt) } From 0270117e8d0d3ced40e47e748c6498fb02c8e163 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 22:01:11 +0000 Subject: [PATCH 066/179] Fix lint errors Signed-off-by: Gabriel Adrian Samfira --- cache/credentials_cache.go | 6 ++++-- database/sql/gitea.go | 5 +++-- workers/cache/gitea_tools.go | 23 +++++++++++------------ 3 files changed, 18 insertions(+), 16 deletions(-) diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go index d5626f40..6dbef775 100644 --- a/cache/credentials_cache.go +++ b/cache/credentials_cache.go @@ -6,8 +6,10 @@ import ( "github.com/cloudbase/garm/params" ) -var credentialsCache *CredentialCache -var giteaCredentialsCache *CredentialCache +var ( + credentialsCache *CredentialCache + giteaCredentialsCache *CredentialCache +) func init() { ghCredentialsCache := &CredentialCache{ diff --git a/database/sql/gitea.go b/database/sql/gitea.go index 5ce46663..8d89b82d 100644 --- a/database/sql/gitea.go +++ b/database/sql/gitea.go @@ -4,12 +4,13 @@ import ( "context" "log/slog" + "github.com/pkg/errors" + "gorm.io/gorm" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" - "github.com/pkg/errors" - "gorm.io/gorm" ) func (s *sqlDatabase) CreateGiteaEndpoint(_ context.Context, param params.CreateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { diff --git a/workers/cache/gitea_tools.go b/workers/cache/gitea_tools.go index 9d6b2307..4474f7df 100644 --- a/workers/cache/gitea_tools.go +++ b/workers/cache/gitea_tools.go @@ -8,8 +8,9 @@ import ( "strings" "time" - commonParams "github.com/cloudbase/garm-provider-common/params" "golang.org/x/mod/semver" + + commonParams "github.com/cloudbase/garm-provider-common/params" ) const ( @@ -20,17 +21,15 @@ const ( GiteaRunnerMinimumVersion = "v0.2.12" ) -var ( - githubArchMapping map[string]string = map[string]string{ - "x86_64": "x64", - "amd64": "x64", - "armv7l": "arm", - "aarch64": "arm64", - "x64": "x64", - "arm": "arm", - "arm64": "arm64", - } -) +var githubArchMapping = map[string]string{ + "x86_64": "x64", + "amd64": "x64", + "armv7l": "arm", + "aarch64": "arm64", + "x64": "x64", + "arm": "arm", + "arm64": "arm64", +} var nightlyActRunner = GiteaEntityTool{ TagName: "nightly", From b4e92a69c98b4750f7003a3612f7e02902939776 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 14 May 2025 23:20:18 +0000 Subject: [PATCH 067/179] Fix existing tests Signed-off-by: Gabriel Adrian Samfira --- cache/cache_test.go | 26 ++++++++++++++++++-------- cache/tools_cache.go | 2 +- database/sql/github.go | 11 ++++++++--- database/sql/repositories_test.go | 9 --------- runner/repositories_test.go | 1 + 5 files changed, 28 insertions(+), 21 deletions(-) diff --git a/cache/cache_test.go b/cache/cache_test.go index 2ad63420..0f514329 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -22,6 +22,11 @@ func (c *CacheTestSuite) SetupTest() { EntityType: params.ForgeEntityTypeOrganization, Name: "test", Owner: "test", + Credentials: params.ForgeCredentials{ + ID: 1, + Name: "test", + ForgeType: params.GithubEndpointType, + }, } } @@ -50,7 +55,6 @@ func (c *CacheTestSuite) TestSetCacheWorks() { DownloadURL: garmTesting.Ptr("https://example.com"), }, } - c.Require().NotNil(githubToolsCache) c.Require().Len(githubToolsCache.entities, 0) SetGithubToolsCache(c.entity, tools) @@ -71,13 +75,17 @@ func (c *CacheTestSuite) TestTimedOutToolsCache() { c.Require().NotNil(githubToolsCache) c.Require().Len(githubToolsCache.entities, 0) SetGithubToolsCache(c.entity, tools) - c.Require().Len(githubToolsCache.entities, 1) entity := githubToolsCache.entities[c.entity.ID] - entity.updatedAt = entity.updatedAt.Add(-2 * time.Hour) + + c.Require().Equal(int64(entity.expiresAt.Sub(entity.updatedAt).Minutes()), int64(60)) + c.Require().Len(githubToolsCache.entities, 1) + entity = githubToolsCache.entities[c.entity.ID] + entity.updatedAt = entity.updatedAt.Add(-3 * time.Hour) + entity.expiresAt = entity.updatedAt.Add(-2 * time.Hour) githubToolsCache.entities[c.entity.ID] = entity cachedTools, err := GetGithubToolsCache(c.entity.ID) - c.Require().NoError(err) + c.Require().Error(err) c.Require().Nil(cachedTools) } @@ -85,7 +93,7 @@ func (c *CacheTestSuite) TestGetInexistentCache() { c.Require().NotNil(githubToolsCache) c.Require().Len(githubToolsCache.entities, 0) cachedTools, err := GetGithubToolsCache(c.entity.ID) - c.Require().NoError(err) + c.Require().Error(err) c.Require().Nil(cachedTools) } @@ -280,7 +288,8 @@ func (c *CacheTestSuite) TestReplaceEntityPools() { Name: "test", Owner: "test", Credentials: params.ForgeCredentials{ - ID: 1, + ID: 1, + ForgeType: params.GithubEndpointType, }, } pool1 := params.Pool{ @@ -291,8 +300,9 @@ func (c *CacheTestSuite) TestReplaceEntityPools() { } credentials := params.ForgeCredentials{ - ID: 1, - Name: "test", + ID: 1, + Name: "test", + ForgeType: params.GithubEndpointType, } SetGithubCredentials(credentials) diff --git a/cache/tools_cache.go b/cache/tools_cache.go index 98b58b19..8675bec9 100644 --- a/cache/tools_cache.go +++ b/cache/tools_cache.go @@ -68,7 +68,7 @@ func (g *GithubToolsCache) Set(entity params.ForgeEntity, tools []commonParams.R } if entity.Credentials.ForgeType == params.GithubEndpointType { - forgeTools.expiresAt = time.Now().Add(24 * time.Hour) + forgeTools.expiresAt = time.Now().Add(1 * time.Hour) } g.entities[entity.ID] = forgeTools diff --git a/database/sql/github.go b/database/sql/github.go index d2c05244..08d6bdb3 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -290,8 +290,11 @@ func (s *sqlDatabase) getGithubCredentialsByName(ctx context.Context, tx *gorm.D if detailed { q = q. Preload("Repositories"). + Preload("Repositories.Credentials"). Preload("Organizations"). - Preload("Enterprises") + Preload("Organizations.Credentials"). + Preload("Enterprises"). + Preload("Enterprises.Credentials") } userID, err := getUIDFromContext(ctx) @@ -316,7 +319,6 @@ func (s *sqlDatabase) GetGithubCredentialsByName(ctx context.Context, name strin if err != nil { return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") } - return s.sqlToCommonForgeCredentials(creds) } @@ -327,8 +329,11 @@ func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detaile if detailed { q = q. Preload("Repositories"). + Preload("Repositories.Credentials"). Preload("Organizations"). - Preload("Enterprises") + Preload("Organizations.Credentials"). + Preload("Enterprises"). + Preload("Enterprises.Credentials") } if !auth.IsAdmin(ctx) { diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index 73104a2f..0e2fa08f 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -249,15 +249,6 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidDBPassphrase() { func (s *RepoTestSuite) TestCreateRepositoryInvalidDBCreateErr() { s.Fixtures.SQLMock.ExpectBegin() - s.Fixtures.SQLMock. - ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_credentials` WHERE user_id = ? AND name = ? AND `github_credentials`.`deleted_at` IS NULL ORDER BY `github_credentials`.`id` LIMIT ?")). - WithArgs(s.adminUserID, s.Fixtures.Repos[0].CredentialsName, 1). - WillReturnRows(sqlmock.NewRows([]string{"id", "endpoint_name"}). - AddRow(s.testCreds.ID, s.githubEndpoint.Name)) - s.Fixtures.SQLMock.ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_endpoints` WHERE `github_endpoints`.`name` = ? AND `github_endpoints`.`deleted_at` IS NULL")). - WithArgs(s.testCreds.Endpoint.Name). - WillReturnRows(sqlmock.NewRows([]string{"name"}). - AddRow(s.githubEndpoint.Name)) s.Fixtures.SQLMock. ExpectExec(regexp.QuoteMeta("INSERT INTO `repositories`")). WillReturnError(fmt.Errorf("creating repo mock error")) diff --git a/runner/repositories_test.go b/runner/repositories_test.go index 4e891e4b..0adf40d7 100644 --- a/runner/repositories_test.go +++ b/runner/repositories_test.go @@ -116,6 +116,7 @@ func (s *RepoTestSuite) SetupTest() { Name: "test-repo-create", CredentialsName: s.testCreds.Name, WebhookSecret: "test-create-repo-webhook-secret", + ForgeType: params.GithubEndpointType, }, CreatePoolParams: params.CreatePoolParams{ ProviderName: "test-provider", From 08511e2e7fa5b817a5f9aef7a032b621d1ce8bb0 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 15 May 2025 09:15:44 +0000 Subject: [PATCH 068/179] Account for gitea credentials in cache and watchers Signed-off-by: Gabriel Adrian Samfira --- apiserver/events/params.go | 2 +- cache/entity_cache.go | 12 ++++++++---- database/watcher/filters.go | 2 +- runner/pool/watcher.go | 2 +- workers/cache/cache.go | 14 +++++++++++--- workers/entity/worker_watcher.go | 4 ++-- 6 files changed, 24 insertions(+), 12 deletions(-) diff --git a/apiserver/events/params.go b/apiserver/events/params.go index 274d3f1e..49bf47fa 100644 --- a/apiserver/events/params.go +++ b/apiserver/events/params.go @@ -14,7 +14,7 @@ func (f Filter) Validate() error { case common.RepositoryEntityType, common.OrganizationEntityType, common.EnterpriseEntityType, common.PoolEntityType, common.UserEntityType, common.InstanceEntityType, common.JobEntityType, common.ControllerEntityType, common.GithubCredentialsEntityType, - common.GithubEndpointEntityType: + common.GiteaCredentialsEntityType, common.ScaleSetEntityType, common.GithubEndpointEntityType: default: return common.ErrInvalidEntityType } diff --git a/cache/entity_cache.go b/cache/entity_cache.go index 6bd1f2c7..bbbc385d 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -231,13 +231,17 @@ func (e *EntityCache) GetEntityScaleSets(entityID string) []params.ScaleSet { return nil } -func (e *EntityCache) GetEntitiesUsingGredentials(credsID uint) []params.ForgeEntity { +func (e *EntityCache) GetEntitiesUsingCredentials(creds params.ForgeCredentials) []params.ForgeEntity { e.mux.Lock() defer e.mux.Unlock() var entities []params.ForgeEntity for _, cache := range e.entities { - if cache.Entity.Credentials.GetID() == credsID { + if cache.Entity.Credentials.ForgeType != creds.ForgeType { + continue + } + + if cache.Entity.Credentials.GetID() == creds.GetID() { entities = append(entities, cache.Entity) } } @@ -357,8 +361,8 @@ func UpdateCredentialsInAffectedEntities(creds params.ForgeCredentials) { entityCache.UpdateCredentialsInAffectedEntities(creds) } -func GetEntitiesUsingGredentials(credsID uint) []params.ForgeEntity { - return entityCache.GetEntitiesUsingGredentials(credsID) +func GetEntitiesUsingCredentials(creds params.ForgeCredentials) []params.ForgeEntity { + return entityCache.GetEntitiesUsingCredentials(creds) } func GetAllEntities() []params.ForgeEntity { diff --git a/database/watcher/filters.go b/database/watcher/filters.go index c355890b..421fd6bf 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -188,7 +188,7 @@ func WithForgeCredentialsFilter(creds params.ForgeCredentials) dbCommon.PayloadF var idGetter params.IDGetter var ok bool switch payload.EntityType { - case dbCommon.GithubCredentialsEntityType: + case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: idGetter, ok = payload.Payload.(params.ForgeCredentials) default: return false diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 455f4239..97089133 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -124,7 +124,7 @@ func (r *basePoolManager) handleCredentialsUpdate(credentials params.ForgeCreden func (r *basePoolManager) handleWatcherEvent(event common.ChangePayload) { dbEntityType := common.DatabaseEntityType(r.entity.EntityType) switch event.EntityType { - case common.GithubCredentialsEntityType: + case common.GithubCredentialsEntityType, common.GiteaCredentialsEntityType: credentials, ok := event.Payload.(params.ForgeCredentials) if !ok { slog.ErrorContext(r.ctx, "failed to cast payload to github credentials") diff --git a/workers/cache/cache.go b/workers/cache/cache.go index 918b3438..8cc14224 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -386,8 +386,16 @@ func (w *Worker) handleCredentialsEvent(event common.ChangePayload) { } switch event.Operation { case common.CreateOperation, common.UpdateOperation: - cache.SetGithubCredentials(credentials) - entities := cache.GetEntitiesUsingGredentials(credentials.ID) + switch credentials.ForgeType { + case params.GithubEndpointType: + cache.SetGithubCredentials(credentials) + case params.GiteaEndpointType: + cache.SetGiteaCredentials(credentials) + default: + slog.DebugContext(w.ctx, "invalid credentials type", "credentials_type", credentials.ForgeType) + return + } + entities := cache.GetEntitiesUsingCredentials(credentials) for _, entity := range entities { worker, ok := w.toolsWorkes[entity.ID] if ok { @@ -414,7 +422,7 @@ func (w *Worker) handleEvent(event common.ChangePayload) { w.handleOrgEvent(event) case common.EnterpriseEntityType: w.handleEnterpriseEvent(event) - case common.GithubCredentialsEntityType: + case common.GithubCredentialsEntityType, common.GiteaCredentialsEntityType: w.handleCredentialsEvent(event) default: slog.DebugContext(w.ctx, "unknown entity type", "entity_type", event.EntityType) diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go index f445b73d..273a8f02 100644 --- a/workers/entity/worker_watcher.go +++ b/workers/entity/worker_watcher.go @@ -17,7 +17,7 @@ func (w *Worker) handleWorkerWatcherEvent(event dbCommon.ChangePayload) { case entityType: w.handleEntityEventPayload(event) return - case dbCommon.GithubCredentialsEntityType: + case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: slog.DebugContext(w.ctx, "got github credentials payload event") w.handleEntityCredentialsEventPayload(event) default: @@ -66,7 +66,7 @@ func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayloa var creds params.ForgeCredentials var ok bool switch event.EntityType { - case dbCommon.GithubCredentialsEntityType: + case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: creds, ok = event.Payload.(params.ForgeCredentials) default: slog.ErrorContext(w.ctx, "invalid entity type", "entity_type", event.EntityType) From 5dfcfc542e8de81891aa433766f29a45da18db19 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 16 May 2025 20:18:30 +0000 Subject: [PATCH 069/179] Implement webhooks install for gitea Signed-off-by: Gabriel Adrian Samfira --- database/watcher/filters.go | 2 +- util/github/client.go | 13 ++++- util/github/gitea.go | 100 ++++++++++++++++++++++++++++++++++++ 3 files changed, 113 insertions(+), 2 deletions(-) create mode 100644 util/github/gitea.go diff --git a/database/watcher/filters.go b/database/watcher/filters.go index 421fd6bf..1f747372 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -182,7 +182,7 @@ func WithEntityJobFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc } } -// WithGithubCredentialsFilter returns a filter function that filters payloads by Github credentials. +// WithForgeCredentialsFilter returns a filter function that filters payloads by Github credentials. func WithForgeCredentialsFilter(creds params.ForgeCredentials) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { var idGetter params.IDGetter diff --git a/util/github/client.go b/util/github/client.go index a46e4ab7..0e7fa7d6 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -92,7 +92,7 @@ func (g *githubClient) GetEntityHook(ctx context.Context, id int64) (ret *github return ret, err } -func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { +func (g *githubClient) createGithubEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { metrics.GithubOperationCount.WithLabelValues( "CreateHook", // label: operation g.entity.LabelScope(), // label: scope @@ -116,6 +116,17 @@ func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) return ret, err } +func (g *githubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { + switch g.entity.Credentials.ForgeType { + case params.GithubEndpointType: + return g.createGithubEntityHook(ctx, hook) + case params.GiteaEndpointType: + return g.createGiteaEntityHook(ctx, hook) + default: + return nil, errors.New("invalid entity type") + } +} + func (g *githubClient) DeleteEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) { metrics.GithubOperationCount.WithLabelValues( "DeleteHook", // label: operation diff --git a/util/github/gitea.go b/util/github/gitea.go new file mode 100644 index 00000000..4c83846c --- /dev/null +++ b/util/github/gitea.go @@ -0,0 +1,100 @@ +package github + +import ( + "context" + "fmt" + "net/http" + + "github.com/google/go-github/v71/github" + "github.com/pkg/errors" + + "github.com/cloudbase/garm/metrics" + "github.com/cloudbase/garm/params" +) + +type createGiteaHookOptions struct { + Type string `json:"type"` + Config map[string]string `json:"config"` + Events []string `json:"events"` + BranchFilter string `json:"branch_filter"` + Active bool `json:"active"` + AuthorizationHeader string `json:"authorization_header"` +} + +func (g *githubClient) createGiteaRepoHook(ctx context.Context, owner, name string, hook *github.Hook) (ret *github.Hook, err error) { + u := fmt.Sprintf("repos/%v/%v/hooks", owner, name) + createOpts := &createGiteaHookOptions{ + Type: "gitea", + Events: hook.Events, + Active: hook.GetActive(), + BranchFilter: "*", + Config: map[string]string{ + "content_type": hook.GetConfig().GetContentType(), + "url": hook.GetConfig().GetURL(), + "http_method": "post", + }, + } + + req, err := g.cli.NewRequest(http.MethodPost, u, createOpts) + if err != nil { + return nil, fmt.Errorf("failed to construct request: %w", err) + } + + hook = new(github.Hook) + _, err = g.cli.Do(ctx, req, hook) + if err != nil { + return nil, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + return hook, nil +} + +func (g *githubClient) createGiteaOrgHook(ctx context.Context, owner string, hook *github.Hook) (ret *github.Hook, err error) { + u := fmt.Sprintf("orgs/%v/hooks", owner) + createOpts := &createGiteaHookOptions{ + Type: "gitea", + Events: hook.Events, + Active: hook.GetActive(), + BranchFilter: "*", + Config: map[string]string{ + "content_type": hook.GetConfig().GetContentType(), + "url": hook.GetConfig().GetURL(), + "http_method": "post", + }, + } + + req, err := g.cli.NewRequest(http.MethodPost, u, createOpts) + if err != nil { + return nil, fmt.Errorf("failed to construct request: %w", err) + } + + hook = new(github.Hook) + _, err = g.cli.Do(ctx, req, hook) + if err != nil { + return nil, fmt.Errorf("request failed for %s: %w", req.URL.String(), err) + } + return hook, nil +} + +func (g *githubClient) createGiteaEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) { + metrics.GithubOperationCount.WithLabelValues( + "CreateHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + defer func() { + if err != nil { + metrics.GithubOperationFailedCount.WithLabelValues( + "CreateHook", // label: operation + g.entity.LabelScope(), // label: scope + ).Inc() + } + }() + switch g.entity.EntityType { + case params.ForgeEntityTypeRepository: + ret, err = g.createGiteaRepoHook(ctx, g.entity.Owner, g.entity.Name, hook) + case params.ForgeEntityTypeOrganization: + ret, err = g.createGiteaOrgHook(ctx, g.entity.Owner, hook) + default: + return nil, errors.New("invalid entity type") + } + return ret, err +} From 6a168ba81392f13985f346c2c52498c7de64a21f Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 16 May 2025 22:43:21 +0000 Subject: [PATCH 070/179] Enable orgs Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/organization.go | 10 ++++-- cmd/garm/main.go | 4 +-- database/common/mocks/Store.go | 18 +++++------ database/common/store.go | 2 +- database/sql/gitea.go | 12 +++++-- database/sql/github_test.go | 2 +- database/sql/instances_test.go | 2 +- database/sql/organizations.go | 44 ++++++++++++++------------ database/sql/organizations_test.go | 17 +++------- database/sql/pools_test.go | 2 +- database/sql/scalesets_test.go | 2 +- database/sql/util.go | 18 ++++++++--- database/watcher/filters.go | 8 +++++ database/watcher/watcher_store_test.go | 2 +- params/params.go | 7 ++++ runner/organizations.go | 15 +++++++-- runner/organizations_test.go | 2 +- runner/pools_test.go | 2 +- runner/scalesets.go | 12 +++---- util/github/client.go | 2 +- workers/entity/worker.go | 2 +- workers/scaleset/controller.go | 17 +++++++--- 22 files changed, 125 insertions(+), 77 deletions(-) diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index c7b80fec..c35fd75b 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -167,6 +167,7 @@ var orgAddCmd = &cobra.Command{ Name: orgName, WebhookSecret: orgWebhookSecret, CredentialsName: orgCreds, + ForgeType: params.EndpointType(forgeType), PoolBalancerType: params.PoolBalancerType(poolBalancerType), } response, err := apiCli.Organizations.CreateOrg(newOrgReq, authToken) @@ -306,6 +307,7 @@ func init() { orgAddCmd.Flags().StringVar(&orgName, "name", "", "The name of the organization") orgAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") orgAddCmd.Flags().StringVar(&orgWebhookSecret, "webhook-secret", "", "The webhook secret for this organization") + orgAddCmd.Flags().StringVar(&forgeType, "forge-type", string(params.GithubEndpointType), "The forge type of the organization. Supported values: github, gitea.") orgAddCmd.Flags().StringVar(&orgCreds, "credentials", "", "Credentials name. See credentials list.") orgAddCmd.Flags().BoolVar(&orgRandomWebhookSecret, "random-webhook-secret", false, "Generate a random webhook secret for this organization.") orgAddCmd.Flags().BoolVar(&installOrgWebhook, "install-webhook", false, "Install the webhook as part of the add operation.") @@ -347,13 +349,17 @@ func formatOrganizations(orgs []params.Organization) { return } t := table.NewWriter() - header := table.Row{"ID", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Pool mgr running"} + header := table.Row{"ID", "Name", "Endpoint", "Credentials name", "Pool Balancer Type", "Forge type", "Pool mgr running"} if long { header = append(header, "Created At", "Updated At") } t.AppendHeader(header) for _, val := range orgs { - row := table.Row{val.ID, val.Name, val.Endpoint.Name, val.CredentialsName, val.GetBalancerType(), val.PoolManagerStatus.IsRunning} + forgeType := val.Endpoint.EndpointType + if forgeType == "" { + forgeType = params.GithubEndpointType + } + row := table.Row{val.ID, val.Name, val.Endpoint.Name, val.CredentialsName, val.GetBalancerType(), forgeType, val.PoolManagerStatus.IsRunning} if long { row = append(row, val.CreatedAt, val.UpdatedAt) } diff --git a/cmd/garm/main.go b/cmd/garm/main.go index f37248d3..f0cca079 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -240,10 +240,10 @@ func main() { cacheWorker := cache.NewWorker(ctx, db) if err != nil { - log.Fatalf("failed to create credentials worker: %+v", err) + log.Fatalf("failed to create cache worker: %+v", err) } if err := cacheWorker.Start(); err != nil { - log.Fatalf("failed to start credentials worker: %+v", err) + log.Fatalf("failed to start cache worker: %+v", err) } providers, err := providers.LoadProvidersFromConfig(ctx, *cfg, controllerInfo.ControllerID.String()) diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index 30f1774c..c5994b87 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -348,9 +348,9 @@ func (_m *Store) CreateOrUpdateJob(ctx context.Context, job params.Job) (params. return r0, r1 } -// CreateOrganization provides a mock function with given fields: ctx, name, credentialsName, webhookSecret, poolBalancerType -func (_m *Store) CreateOrganization(ctx context.Context, name string, credentialsName string, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Organization, error) { - ret := _m.Called(ctx, name, credentialsName, webhookSecret, poolBalancerType) +// CreateOrganization provides a mock function with given fields: ctx, name, credentials, webhookSecret, poolBalancerType +func (_m *Store) CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Organization, error) { + ret := _m.Called(ctx, name, credentials, webhookSecret, poolBalancerType) if len(ret) == 0 { panic("no return value specified for CreateOrganization") @@ -358,17 +358,17 @@ func (_m *Store) CreateOrganization(ctx context.Context, name string, credential var r0 params.Organization var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) (params.Organization, error)); ok { - return rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Organization, error)); ok { + return rf(ctx, name, credentials, webhookSecret, poolBalancerType) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) params.Organization); ok { - r0 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) params.Organization); ok { + r0 = rf(ctx, name, credentials, webhookSecret, poolBalancerType) } else { r0 = ret.Get(0).(params.Organization) } - if rf, ok := ret.Get(1).(func(context.Context, string, string, string, params.PoolBalancerType) error); ok { - r1 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) + if rf, ok := ret.Get(1).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) error); ok { + r1 = rf(ctx, name, credentials, webhookSecret, poolBalancerType) } else { r1 = ret.Error(1) } diff --git a/database/common/store.go b/database/common/store.go index 1f5b013b..7da0e5bb 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -47,7 +47,7 @@ type RepoStore interface { } type OrgStore interface { - CreateOrganization(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Organization, error) + CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (org params.Organization, err error) GetOrganization(ctx context.Context, name, endpointName string) (params.Organization, error) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) ListOrganizations(ctx context.Context) ([]params.Organization, error) diff --git a/database/sql/gitea.go b/database/sql/gitea.go index 8d89b82d..27084a0a 100644 --- a/database/sql/gitea.go +++ b/database/sql/gitea.go @@ -268,7 +268,11 @@ func (s *sqlDatabase) getGiteaCredentialsByName(ctx context.Context, tx *gorm.DB if detailed { q = q. Preload("Repositories"). - Preload("Organizations") + Preload("Organizations"). + Preload("Repositories.GiteaCredentials"). + Preload("Organizations.GiteaCredentials"). + Preload("Repositories.Credentials"). + Preload("Organizations.Credentials") } userID, err := getUIDFromContext(ctx) @@ -304,7 +308,11 @@ func (s *sqlDatabase) GetGiteaCredentials(ctx context.Context, id uint, detailed if detailed { q = q. Preload("Repositories"). - Preload("Organizations") + Preload("Organizations"). + Preload("Repositories.GiteaCredentials"). + Preload("Organizations.GiteaCredentials"). + Preload("Repositories.Credentials"). + Preload("Organizations.Credentials") } if !auth.IsAdmin(ctx) { diff --git a/database/sql/github_test.go b/database/sql/github_test.go index 49de9aa3..2e6eb507 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -544,7 +544,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() err = s.db.DeleteRepository(ctx, repo.ID) s.Require().NoError(err) - org, err := s.db.CreateOrganization(ctx, "test-org", creds.Name, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotNil(org) diff --git a/database/sql/instances_test.go b/database/sql/instances_test.go index 8610409b..c70e35dd 100644 --- a/database/sql/instances_test.go +++ b/database/sql/instances_test.go @@ -84,7 +84,7 @@ func (s *InstancesTestSuite) SetupTest() { creds := garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), githubEndpoint) // create an organization for testing purposes - org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } diff --git a/database/sql/organizations.go b/database/sql/organizations.go index bf270445..6f8eaa10 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -29,7 +29,7 @@ import ( "github.com/cloudbase/garm/params" ) -func (s *sqlDatabase) CreateOrganization(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (org params.Organization, err error) { +func (s *sqlDatabase) CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Organization, err error) { if webhookSecret == "" { return params.Organization{}, errors.New("creating org: missing secret") } @@ -40,7 +40,7 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name, credentialsN defer func() { if err == nil { - s.sendNotify(common.OrganizationEntityType, common.CreateOperation, org) + s.sendNotify(common.OrganizationEntityType, common.CreateOperation, param) } }() newOrg := Organization{ @@ -50,37 +50,37 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name, credentialsN } err = s.conn.Transaction(func(tx *gorm.DB) error { - creds, err := s.getGithubCredentialsByName(ctx, tx, credentialsName, false) - if err != nil { - return errors.Wrap(err, "creating org") + switch credentials.ForgeType { + case params.GithubEndpointType: + newOrg.CredentialsID = &credentials.ID + case params.GiteaEndpointType: + newOrg.GiteaCredentialsID = &credentials.ID + default: + return errors.Wrap(runnerErrors.ErrBadRequest, "unsupported credentials type") } - if creds.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") - } - newOrg.CredentialsID = &creds.ID - newOrg.EndpointName = creds.EndpointName + newOrg.EndpointName = &credentials.Endpoint.Name q := tx.Create(&newOrg) if q.Error != nil { return errors.Wrap(q.Error, "creating org") } - - newOrg.Credentials = creds - newOrg.Endpoint = creds.Endpoint - return nil }) if err != nil { return params.Organization{}, errors.Wrap(err, "creating org") } - org, err = s.sqlToCommonOrganization(newOrg, true) + org, err := s.getOrgByID(ctx, s.conn, newOrg.ID.String(), "Pools", "Endpoint", "Credentials", "GiteaCredentials", "Credentials.Endpoint", "GiteaCredentials.Endpoint") if err != nil { return params.Organization{}, errors.Wrap(err, "creating org") } - org.WebhookSecret = webhookSecret - return org, nil + param, err = s.sqlToCommonOrganization(org, true) + if err != nil { + return params.Organization{}, errors.Wrap(err, "creating org") + } + + return param, nil } func (s *sqlDatabase) GetOrganization(ctx context.Context, name, endpointName string) (params.Organization, error) { @@ -101,7 +101,9 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context) ([]params.Organizatio var orgs []Organization q := s.conn. Preload("Credentials"). + Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). + Preload("GiteaCredentials.Endpoint"). Preload("Endpoint"). Find(&orgs) if q.Error != nil { @@ -121,7 +123,7 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context) ([]params.Organizatio } func (s *sqlDatabase) DeleteOrganization(ctx context.Context, orgID string) (err error) { - org, err := s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint") + org, err := s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { return errors.Wrap(err, "fetching org") } @@ -201,7 +203,7 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para return params.Organization{}, errors.Wrap(err, "saving org") } - org, err = s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint") + org, err = s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { return params.Organization{}, errors.Wrap(err, "updating enterprise") } @@ -213,7 +215,7 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para } func (s *sqlDatabase) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) { - org, err := s.getOrgByID(ctx, s.conn, orgID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") + org, err := s.getOrgByID(ctx, s.conn, orgID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { return params.Organization{}, errors.Wrap(err, "fetching org") } @@ -254,7 +256,9 @@ func (s *sqlDatabase) getOrg(_ context.Context, name, endpointName string) (Orga q := s.conn.Where("name = ? COLLATE NOCASE and endpoint_name = ? COLLATE NOCASE", name, endpointName). Preload("Credentials"). + Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). + Preload("GiteaCredentials.Endpoint"). Preload("Endpoint"). First(&org) if q.Error != nil { diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index a7ad23b4..3c2ba337 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -100,7 +100,7 @@ func (s *OrgTestSuite) SetupTest() { org, err := db.CreateOrganization( s.adminCtx, fmt.Sprintf("test-org-%d", i), - s.testCreds.Name, + s.testCreds, fmt.Sprintf("test-webhook-secret-%d", i), params.PoolBalancerTypeRoundRobin, ) @@ -179,7 +179,7 @@ func (s *OrgTestSuite) TestCreateOrganization() { org, err := s.Store.CreateOrganization( s.adminCtx, s.Fixtures.CreateOrgParams.Name, - s.Fixtures.CreateOrgParams.CredentialsName, + s.testCreds, s.Fixtures.CreateOrgParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) @@ -210,7 +210,7 @@ func (s *OrgTestSuite) TestCreateOrganizationInvalidDBPassphrase() { _, err = sqlDB.CreateOrganization( s.adminCtx, s.Fixtures.CreateOrgParams.Name, - s.Fixtures.CreateOrgParams.CredentialsName, + s.testCreds, s.Fixtures.CreateOrgParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) @@ -220,15 +220,6 @@ func (s *OrgTestSuite) TestCreateOrganizationInvalidDBPassphrase() { func (s *OrgTestSuite) TestCreateOrganizationDBCreateErr() { s.Fixtures.SQLMock.ExpectBegin() - s.Fixtures.SQLMock. - ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_credentials` WHERE user_id = ? AND name = ? AND `github_credentials`.`deleted_at` IS NULL ORDER BY `github_credentials`.`id` LIMIT ?")). - WithArgs(s.adminUserID, s.Fixtures.Orgs[0].CredentialsName, 1). - WillReturnRows(sqlmock.NewRows([]string{"id", "endpoint_name"}). - AddRow(s.testCreds.ID, s.githubEndpoint.Name)) - s.Fixtures.SQLMock.ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_endpoints` WHERE `github_endpoints`.`name` = ? AND `github_endpoints`.`deleted_at` IS NULL")). - WithArgs(s.testCreds.Endpoint.Name). - WillReturnRows(sqlmock.NewRows([]string{"name"}). - AddRow(s.githubEndpoint.Name)) s.Fixtures.SQLMock. ExpectExec(regexp.QuoteMeta("INSERT INTO `organizations`")). WillReturnError(fmt.Errorf("creating org mock error")) @@ -237,7 +228,7 @@ func (s *OrgTestSuite) TestCreateOrganizationDBCreateErr() { _, err := s.StoreSQLMocked.CreateOrganization( s.adminCtx, s.Fixtures.CreateOrgParams.Name, - s.Fixtures.CreateOrgParams.CredentialsName, + s.testCreds, s.Fixtures.CreateOrgParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) diff --git a/database/sql/pools_test.go b/database/sql/pools_test.go index dfb82510..9044bf18 100644 --- a/database/sql/pools_test.go +++ b/database/sql/pools_test.go @@ -81,7 +81,7 @@ func (s *PoolsTestSuite) SetupTest() { creds := garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), githubEndpoint) // create an organization for testing purposes - org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + org, err := s.Store.CreateOrganization(s.adminCtx, "test-org", creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } diff --git a/database/sql/scalesets_test.go b/database/sql/scalesets_test.go index 1313af59..54937c2d 100644 --- a/database/sql/scalesets_test.go +++ b/database/sql/scalesets_test.go @@ -48,7 +48,7 @@ func (s *ScaleSetsTestSuite) SetupTest() { s.creds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), githubEndpoint) // create an organization for testing purposes - s.org, err = s.Store.CreateOrganization(s.adminCtx, "test-org", s.creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + s.org, err = s.Store.CreateOrganization(s.adminCtx, "test-org", s.creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } diff --git a/database/sql/util.go b/database/sql/util.go index 0c71261d..11d338ba 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -151,16 +151,24 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( UpdatedAt: org.UpdatedAt, } + var forgeCreds params.ForgeCredentials if org.CredentialsID != nil { ret.CredentialsID = *org.CredentialsID + forgeCreds, err = s.sqlToCommonForgeCredentials(org.Credentials) + } + + if org.GiteaCredentialsID != nil { + ret.CredentialsID = *org.GiteaCredentialsID + forgeCreds, err = s.sqlGiteaToCommonForgeCredentials(org.GiteaCredentials) + } + + if err != nil { + return params.Organization{}, errors.Wrap(err, "converting credentials") } if detailed { - creds, err := s.sqlToCommonForgeCredentials(org.Credentials) - if err != nil { - return params.Organization{}, errors.Wrap(err, "converting credentials") - } - ret.Credentials = creds + ret.Credentials = forgeCreds + ret.CredentialsName = forgeCreds.Name } if ret.PoolBalancerType == "" { diff --git a/database/watcher/filters.go b/database/watcher/filters.go index 1f747372..e462183d 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -92,6 +92,14 @@ func WithEntityPoolFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFun // in pools that belong to it. func WithEntityScaleSetFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { + forgeType, err := ghEntity.GetForgeType() + if err != nil { + return false + } + if forgeType != params.GiteaEndpointType { + return false + } + switch payload.EntityType { case dbCommon.ScaleSetEntityType: scaleSet, ok := payload.Payload.(params.ScaleSet) diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index 8791a514..3de7c01a 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -713,7 +713,7 @@ func (s *WatcherStoreTestSuite) TestOrgWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - org, err := s.store.CreateOrganization(s.ctx, "test-org", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + org, err := s.store.CreateOrganization(s.ctx, "test-org", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(org.ID) diff --git a/params/params.go b/params/params.go index 052b2c8b..e154b2df 100644 --- a/params/params.go +++ b/params/params.go @@ -1098,6 +1098,13 @@ func (g ForgeEntity) GetCreatedAt() time.Time { return g.CreatedAt } +func (g ForgeEntity) GetForgeType() (EndpointType, error) { + if g.Credentials.ForgeType == "" { + return "", fmt.Errorf("credentials forge type is empty") + } + return g.Credentials.ForgeType, nil +} + func (g ForgeEntity) ForgeURL() string { switch g.Credentials.ForgeType { case GiteaEndpointType: diff --git a/runner/organizations.go b/runner/organizations.go index 49f143ea..bddab87c 100644 --- a/runner/organizations.go +++ b/runner/organizations.go @@ -38,7 +38,18 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP return params.Organization{}, errors.Wrap(err, "validating params") } - creds, err := r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) + var creds params.ForgeCredentials + switch param.GetForgeType() { + case params.GithubEndpointType: + slog.DebugContext(ctx, "getting github credentials") + creds, err = r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) + case params.GiteaEndpointType: + slog.DebugContext(ctx, "getting gitea credentials") + creds, err = r.store.GetGiteaCredentialsByName(ctx, param.CredentialsName, true) + default: + return params.Organization{}, runnerErrors.NewBadRequestError("invalid forge type: %s", param.GetForgeType()) + } + if err != nil { return params.Organization{}, runnerErrors.NewBadRequestError("credentials %s not defined", param.CredentialsName) } @@ -52,7 +63,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP return params.Organization{}, runnerErrors.NewConflictError("organization %s already exists", param.Name) } - org, err = r.store.CreateOrganization(ctx, param.Name, creds.Name, param.WebhookSecret, param.PoolBalancerType) + org, err = r.store.CreateOrganization(ctx, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) if err != nil { return params.Organization{}, errors.Wrap(err, "creating organization") } diff --git a/runner/organizations_test.go b/runner/organizations_test.go index 4d4a26e1..90075c87 100644 --- a/runner/organizations_test.go +++ b/runner/organizations_test.go @@ -82,7 +82,7 @@ func (s *OrgTestSuite) SetupTest() { org, err := db.CreateOrganization( adminCtx, name, - s.testCreds.Name, + s.testCreds, fmt.Sprintf("test-webhook-secret-%v", i), params.PoolBalancerTypeRoundRobin, ) diff --git a/runner/pools_test.go b/runner/pools_test.go index 587addce..3bc5d4b3 100644 --- a/runner/pools_test.go +++ b/runner/pools_test.go @@ -69,7 +69,7 @@ func (s *PoolTestSuite) SetupTest() { s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(s.adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create an organization for testing purposes - org, err := db.CreateOrganization(s.adminCtx, "test-org", s.testCreds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + org, err := db.CreateOrganization(s.adminCtx, "test-org", s.testCreds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create org: %s", err)) } diff --git a/runner/scalesets.go b/runner/scalesets.go index 6796dc2d..e7af9c22 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -16,7 +16,6 @@ package runner import ( "context" - "encoding/json" "fmt" "log/slog" @@ -181,12 +180,10 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param } if hasUpdates { - result, err := scalesetCli.UpdateRunnerScaleSet(ctx, newSet.ScaleSetID, updateParams) + _, err := scalesetCli.UpdateRunnerScaleSet(ctx, newSet.ScaleSetID, updateParams) if err != nil { return fmt.Errorf("failed to update scaleset in github: %w", err) } - asJs, _ := json.MarshalIndent(result, "", " ") - slog.Info("update result", "data", string(asJs)) } return nil } @@ -216,6 +213,10 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.For return params.ScaleSet{}, errors.Wrap(err, "getting entity") } + if entity.Credentials.ForgeType != params.GithubEndpointType { + return params.ScaleSet{}, runnerErrors.NewBadRequestError("scale sets are only supported for github entities") + } + ghCli, err := github.Client(ctx, entity) if err != nil { return params.ScaleSet{}, errors.Wrap(err, "creating github client") @@ -255,9 +256,6 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.For return params.ScaleSet{}, errors.Wrap(err, "creating runner scale set") } - asJs, _ := json.MarshalIndent(runnerScaleSet, "", " ") - slog.InfoContext(ctx, "scale set", "data", string(asJs)) - defer func() { if err != nil { if innerErr := scalesetCli.DeleteRunnerScaleSet(ctx, runnerScaleSet.ID); innerErr != nil { diff --git a/util/github/client.go b/util/github/client.go index 0e7fa7d6..f25329c7 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -578,7 +578,7 @@ func Client(ctx context.Context, entity params.ForgeEntity) (common.GithubClient "upload_url", entity.Credentials.UploadBaseURL) ghClient := github.NewClient(httpClient) - switch entity.Credentials.Endpoint.EndpointType { + switch entity.Credentials.ForgeType { case params.GithubEndpointType: ghClient, err = ghClient.WithEnterpriseURLs(entity.Credentials.APIBaseURL, entity.Credentials.UploadBaseURL) case params.GiteaEndpointType: diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 597d5797..efb40ac1 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -21,7 +21,7 @@ import ( ) func NewWorker(ctx context.Context, store dbCommon.Store, entity params.ForgeEntity, providers map[string]common.Provider) (*Worker, error) { - consumerID := fmt.Sprintf("entity-worker-%s", entity.String()) + consumerID := fmt.Sprintf("entity-worker-%s", entity.ID) ctx = garmUtil.WithSlogContext( ctx, diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index e28eb7ee..4aba42c2 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -16,7 +16,7 @@ import ( ) func NewController(ctx context.Context, store dbCommon.Store, entity params.ForgeEntity, providers map[string]common.Provider) (*Controller, error) { - consumerID := fmt.Sprintf("scaleset-controller-%s", entity.String()) + consumerID := fmt.Sprintf("scaleset-controller-%s", entity.ID) ctx = garmUtil.WithSlogContext( ctx, @@ -93,9 +93,16 @@ func (c *Controller) Start() (err error) { } c.mux.Unlock() - slog.DebugContext(c.ctx, "loaging scale sets", "entity", c.Entity.String()) - if err := c.loadAllScaleSets(); err != nil { - return fmt.Errorf("loading all scale sets: %w", err) + forgeType, err := c.Entity.GetForgeType() + if err != nil { + return fmt.Errorf("getting forge type: %w", err) + } + if forgeType == params.GithubEndpointType { + // scale sets are only available in Github + slog.DebugContext(c.ctx, "loaging scale sets", "entity", c.Entity.String()) + if err := c.loadAllScaleSets(); err != nil { + return fmt.Errorf("loading all scale sets: %w", err) + } } consumer, err := watcher.RegisterConsumer( @@ -103,7 +110,7 @@ func (c *Controller) Start() (err error) { composeControllerWatcherFilters(c.Entity), ) if err != nil { - return fmt.Errorf("registering consumer: %w", err) + return fmt.Errorf("registering consumer %q: %w", c.consumerID, err) } c.mux.Lock() From bb798a288a263d6f482b5baf0905696a872b018c Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 16 May 2025 23:58:39 +0000 Subject: [PATCH 071/179] Properly set webhook secret Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/controllers.go | 4 ---- params/github.go | 11 ++++++++++- runner/pool/pool.go | 4 +++- runner/runner.go | 8 ++++++-- util/github/gitea.go | 2 ++ 5 files changed, 21 insertions(+), 8 deletions(-) diff --git a/apiserver/controllers/controllers.go b/apiserver/controllers/controllers.go index 32da79b3..2a57f9cf 100644 --- a/apiserver/controllers/controllers.go +++ b/apiserver/controllers/controllers.go @@ -103,7 +103,6 @@ func (a *APIController) handleWorkflowJobEvent(ctx context.Context, w http.Respo handleError(ctx, w, gErrors.NewBadRequestError("invalid post body: %s", err)) return } - slog.Debug("received workflow job event", "body", string(body)) signature := r.Header.Get("X-Hub-Signature-256") hookType := r.Header.Get("X-Github-Hook-Installation-Target-Type") @@ -162,9 +161,6 @@ func (a *APIController) WebhookHandler(w http.ResponseWriter, r *http.Request) { } headers := r.Header.Clone() - for k, v := range headers { - slog.Debug("header", "key", k, "value", v) - } event := runnerParams.Event(headers.Get("X-Github-Event")) switch event { diff --git a/params/github.go b/params/github.go index 0f963090..9859f717 100644 --- a/params/github.go +++ b/params/github.go @@ -171,7 +171,9 @@ type WorkflowJob struct { DefaultBranch string `json:"default_branch"` } `json:"repository"` Organization struct { - Login string `json:"login"` + Login string `json:"login"` + // Name is a gitea specific field + Name string `json:"name"` ID int64 `json:"id"` NodeID string `json:"node_id"` URL string `json:"url"` @@ -218,6 +220,13 @@ type WorkflowJob struct { } `json:"sender"` } +func (w WorkflowJob) GetOrgName(forgeType EndpointType) string { + if forgeType == GiteaEndpointType { + return w.Organization.Name + } + return w.Organization.Login +} + type RunnerSetting struct { Ephemeral bool `json:"ephemeral,omitempty"` IsElastic bool `json:"isElastic,omitempty"` diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 8b02b593..86ce52f0 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -152,6 +152,7 @@ func (r *basePoolManager) getProviderBaseParams(pool params.Pool) common.Provide func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { if err := r.ValidateOwner(job); err != nil { + slog.ErrorContext(r.ctx, "failed to validate owner", "error", err) return errors.Wrap(err, "validating owner") } @@ -164,6 +165,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { jobParams, err := r.paramsWorkflowJobToParamsJob(job) if err != nil { + slog.ErrorContext(r.ctx, "failed to convert job to params", "error", err) return errors.Wrap(err, "converting job to params") } @@ -1962,7 +1964,7 @@ func (r *basePoolManager) ValidateOwner(job params.WorkflowJob) error { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } case params.ForgeEntityTypeOrganization: - if !strings.EqualFold(job.Organization.Login, r.entity.Owner) { + if !strings.EqualFold(job.GetOrgName(r.entity.Credentials.ForgeType), r.entity.Owner) { return runnerErrors.NewBadRequestError("job not meant for this pool manager") } case params.ForgeEntityTypeEnterprise: diff --git a/runner/runner.go b/runner/runner.go index e02ee698..186799f6 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -668,8 +668,8 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType case OrganizationHook: slog.DebugContext( r.ctx, "got hook for organization", - "organization", util.SanitizeLogEntry(job.Organization.Login)) - poolManager, err = r.findOrgPoolManager(job.Organization.Login, endpoint.Name) + "organization", util.SanitizeLogEntry(job.GetOrgName(forgeType))) + poolManager, err = r.findOrgPoolManager(job.GetOrgName(forgeType), endpoint.Name) case EnterpriseHook: slog.DebugContext( r.ctx, "got hook for enterprise", @@ -679,7 +679,9 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType return runnerErrors.NewBadRequestError("cannot handle hook target type %s", hookTargetType) } + slog.DebugContext(r.ctx, "found pool manager", "pool_manager", poolManager.ID()) if err != nil { + slog.ErrorContext(r.ctx, "failed to find pool manager", "error", err, "hook_target_type", hookTargetType) // We don't have a repository or organization configured that // can handle this workflow job. return errors.Wrap(err, "fetching poolManager") @@ -689,10 +691,12 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType // we make sure that the source of this workflow job is valid. secret := poolManager.WebhookSecret() if err := r.validateHookBody(signature, secret, jobData); err != nil { + slog.ErrorContext(r.ctx, "failed to validate webhook data", "error", err) return errors.Wrap(err, "validating webhook data") } if err := poolManager.HandleWorkflowJob(job); err != nil { + slog.ErrorContext(r.ctx, "failed to handle workflow job", "error", err) return errors.Wrap(err, "handling workflow job") } diff --git a/util/github/gitea.go b/util/github/gitea.go index 4c83846c..0359836e 100644 --- a/util/github/gitea.go +++ b/util/github/gitea.go @@ -32,6 +32,7 @@ func (g *githubClient) createGiteaRepoHook(ctx context.Context, owner, name stri "content_type": hook.GetConfig().GetContentType(), "url": hook.GetConfig().GetURL(), "http_method": "post", + "secret": hook.GetConfig().GetSecret(), }, } @@ -59,6 +60,7 @@ func (g *githubClient) createGiteaOrgHook(ctx context.Context, owner string, hoo "content_type": hook.GetConfig().GetContentType(), "url": hook.GetConfig().GetURL(), "http_method": "post", + "secret": hook.GetConfig().GetSecret(), }, } From b2d56093529d9acdaee2fe59b28f69e5326449c0 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 19 May 2025 19:45:45 +0000 Subject: [PATCH 072/179] Add some tests Signed-off-by: Gabriel Adrian Samfira --- auth/context.go | 16 +- auth/instance_middleware.go | 8 +- auth/interfaces.go | 2 +- cache/cache_test.go | 400 ++++++++++++++++++++++++++- cache/tools_cache.go | 16 +- database/watcher/filters.go | 9 +- runner/pool/pool.go | 3 +- runner/runner.go | 9 +- workers/provider/instance_manager.go | 2 +- 9 files changed, 441 insertions(+), 24 deletions(-) diff --git a/auth/context.go b/auth/context.go index 0d95be56..1b648bb6 100644 --- a/auth/context.go +++ b/auth/context.go @@ -44,8 +44,21 @@ const ( instanceTokenFetched contextFlags = "tokenFetched" instanceHasJITConfig contextFlags = "hasJITConfig" instanceParams contextFlags = "instanceParams" + instanceForgeTypeKey contextFlags = "forge_type" ) +func SetInstanceForgeType(ctx context.Context, val string) context.Context { + return context.WithValue(ctx, instanceForgeTypeKey, val) +} + +func InstanceForgeType(ctx context.Context) params.EndpointType { + elem := ctx.Value(instanceForgeTypeKey) + if elem == nil { + return "" + } + return elem.(params.EndpointType) +} + func SetInstanceID(ctx context.Context, id string) context.Context { return context.WithValue(ctx, instanceIDKey, id) } @@ -159,7 +172,7 @@ func InstanceEntity(ctx context.Context) string { return elem.(string) } -func PopulateInstanceContext(ctx context.Context, instance params.Instance) context.Context { +func PopulateInstanceContext(ctx context.Context, instance params.Instance, claims *InstanceJWTClaims) context.Context { ctx = SetInstanceID(ctx, instance.ID) ctx = SetInstanceName(ctx, instance.Name) ctx = SetInstancePoolID(ctx, instance.PoolID) @@ -167,6 +180,7 @@ func PopulateInstanceContext(ctx context.Context, instance params.Instance) cont ctx = SetInstanceTokenFetched(ctx, instance.TokenFetched) ctx = SetInstanceHasJITConfig(ctx, instance.JitConfiguration) ctx = SetInstanceParams(ctx, instance) + ctx = SetInstanceForgeType(ctx, claims.ForgeType) return ctx } diff --git a/auth/instance_middleware.go b/auth/instance_middleware.go index 38e1fdcf..bcae0b0a 100644 --- a/auth/instance_middleware.go +++ b/auth/instance_middleware.go @@ -44,6 +44,7 @@ type InstanceJWTClaims struct { // Entity is the repo or org name Entity string `json:"entity"` CreateAttempt int `json:"create_attempt"` + ForgeType string `json:"forge_type"` jwt.RegisteredClaims } @@ -60,7 +61,7 @@ type instanceToken struct { jwtSecret string } -func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity string, entityType params.ForgeEntityType, ttlMinutes uint) (string, error) { +func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity params.ForgeEntity, entityType params.ForgeEntityType, ttlMinutes uint) (string, error) { // Token expiration is equal to the bootstrap timeout set on the pool plus the polling // interval garm uses to check for timed out runners. Runners that have not sent their info // by the end of this interval are most likely failed and will be reaped by garm anyway. @@ -83,7 +84,8 @@ func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity str Name: instance.Name, PoolID: instance.PoolID, Scope: entityType, - Entity: entity, + Entity: entity.String(), + ForgeType: string(entity.Credentials.ForgeType), CreateAttempt: instance.CreateAttempt, } token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) @@ -124,7 +126,7 @@ func (amw *instanceMiddleware) claimsToContext(ctx context.Context, claims *Inst return ctx, runnerErrors.ErrUnauthorized } - ctx = PopulateInstanceContext(ctx, instanceInfo) + ctx = PopulateInstanceContext(ctx, instanceInfo, claims) return ctx, nil } diff --git a/auth/interfaces.go b/auth/interfaces.go index 095db4b2..ab68dbd7 100644 --- a/auth/interfaces.go +++ b/auth/interfaces.go @@ -26,5 +26,5 @@ type Middleware interface { } type InstanceTokenGetter interface { - NewInstanceJWTToken(instance params.Instance, entity string, poolType params.ForgeEntityType, ttlMinutes uint) (string, error) + NewInstanceJWTToken(instance params.Instance, entity params.ForgeEntity, poolType params.ForgeEntityType, ttlMinutes uint) (string, error) } diff --git a/cache/cache_test.go b/cache/cache_test.go index 0f514329..4b7cf332 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -6,6 +6,7 @@ import ( "github.com/stretchr/testify/suite" + runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" garmTesting "github.com/cloudbase/garm/internal/testing" "github.com/cloudbase/garm/params" @@ -35,6 +36,7 @@ func (c *CacheTestSuite) TearDownTest() { githubToolsCache.mux.Lock() defer githubToolsCache.mux.Unlock() githubToolsCache.entities = make(map[string]GithubEntityTools) + giteaCredentialsCache.cache = make(map[uint]params.ForgeCredentials) credentialsCache.cache = make(map[uint]params.ForgeCredentials) instanceCache.cache = make(map[string]params.Instance) entityCache = &EntityCache{ @@ -49,7 +51,7 @@ func (c *CacheTestSuite) TestCacheIsInitialized() { c.Require().NotNil(entityCache) } -func (c *CacheTestSuite) TestSetCacheWorks() { +func (c *CacheTestSuite) TestSetToolsCacheWorks() { tools := []commonParams.RunnerApplicationDownload{ { DownloadURL: garmTesting.Ptr("https://example.com"), @@ -65,6 +67,39 @@ func (c *CacheTestSuite) TestSetCacheWorks() { c.Require().Equal(tools[0].GetDownloadURL(), cachedTools[0].GetDownloadURL()) } +func (c *CacheTestSuite) TestSetToolsCacheWithError() { + tools := []commonParams.RunnerApplicationDownload{ + { + DownloadURL: garmTesting.Ptr("https://example.com"), + }, + } + c.Require().NotNil(githubToolsCache) + c.Require().Len(githubToolsCache.entities, 0) + SetGithubToolsCache(c.entity, tools) + entity := githubToolsCache.entities[c.entity.ID] + + c.Require().Equal(int64(entity.expiresAt.Sub(entity.updatedAt).Minutes()), int64(60)) + c.Require().Len(githubToolsCache.entities, 1) + SetGithubToolsCacheError(c.entity, runnerErrors.ErrNotFound) + + cachedTools, err := GetGithubToolsCache(c.entity.ID) + c.Require().Error(err) + c.Require().Nil(cachedTools) +} + +func (c *CacheTestSuite) TestSetErrorOnNonExistingCacheEntity() { + entity := params.ForgeEntity{ + ID: "non-existing-entity", + } + c.Require().NotNil(githubToolsCache) + c.Require().Len(githubToolsCache.entities, 0) + SetGithubToolsCacheError(entity, runnerErrors.ErrNotFound) + + storedEntity, err := GetGithubToolsCache(entity.ID) + c.Require().Error(err) + c.Require().Nil(storedEntity) +} + func (c *CacheTestSuite) TestTimedOutToolsCache() { tools := []commonParams.RunnerApplicationDownload{ { @@ -273,12 +308,23 @@ func (c *CacheTestSuite) TestSetGetEntityCache() { c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) + pool := params.Pool{ + ID: "pool-1", + } + SetEntityPool(entity.ID, pool) + cachedEntityPools := GetEntityPools("test-entity") + c.Require().Equal(1, len(cachedEntityPools)) + entity.Credentials.Description = "test description" SetEntity(entity) cachedEntity, ok = GetEntity("test-entity") c.Require().True(ok) c.Require().Equal(entity.ID, cachedEntity.ID) c.Require().Equal(entity.Credentials.Description, cachedEntity.Credentials.Description) + + // Make sure we don't clobber pools after updating the entity + cachedEntityPools = GetEntityPools("test-entity") + c.Require().Equal(1, len(cachedEntityPools)) } func (c *CacheTestSuite) TestReplaceEntityPools() { @@ -623,6 +669,358 @@ func (c *CacheTestSuite) TestGetEntityPool() { c.Require().Equal(pool.ID, poolFromCache.ID) } +func (c *CacheTestSuite) TestSetGiteaCredentials() { + credentials := params.ForgeCredentials{ + ID: 1, + Description: "test description", + } + SetGiteaCredentials(credentials) + cachedCreds, ok := GetGiteaCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) + + cachedCreds.Description = "new description" + SetGiteaCredentials(cachedCreds) + cachedCreds, ok = GetGiteaCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) + c.Require().Equal("new description", cachedCreds.Description) +} + +func (c *CacheTestSuite) TestGetAllGiteaCredentials() { + credentials1 := params.ForgeCredentials{ + ID: 1, + } + credentials2 := params.ForgeCredentials{ + ID: 2, + } + SetGiteaCredentials(credentials1) + SetGiteaCredentials(credentials2) + + cachedCreds := GetAllGiteaCredentials() + c.Require().Len(cachedCreds, 2) + c.Require().Contains(cachedCreds, credentials1) + c.Require().Contains(cachedCreds, credentials2) +} + +func (c *CacheTestSuite) TestDeleteGiteaCredentials() { + credentials := params.ForgeCredentials{ + ID: 1, + } + SetGiteaCredentials(credentials) + cachedCreds, ok := GetGiteaCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) + + DeleteGiteaCredentials(1) + cachedCreds, ok = GetGiteaCredentials(1) + c.Require().False(ok) + c.Require().Equal(params.ForgeCredentials{}, cachedCreds) +} + +func (c *CacheTestSuite) TestDeleteGiteaCredentialsNotFound() { + credentials := params.ForgeCredentials{ + ID: 1, + } + SetGiteaCredentials(credentials) + cachedCreds, ok := GetGiteaCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) + + DeleteGiteaCredentials(2) + cachedCreds, ok = GetGiteaCredentials(1) + c.Require().True(ok) + c.Require().Equal(credentials.ID, cachedCreds.ID) +} + +func (c *CacheTestSuite) TestUpdateCredentialsInAffectedEntities() { + credentials := params.ForgeCredentials{ + ID: 1, + Description: "test description", + } + entity1 := params.ForgeEntity{ + ID: "test-entity-1", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials, + } + + entity2 := params.ForgeEntity{ + ID: "test-entity-2", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials, + } + + SetEntity(entity1) + SetEntity(entity2) + + cachedEntity1, ok := GetEntity(entity1.ID) + c.Require().True(ok) + c.Require().Equal(entity1.ID, cachedEntity1.ID) + cachedEntity2, ok := GetEntity(entity2.ID) + c.Require().True(ok) + c.Require().Equal(entity2.ID, cachedEntity2.ID) + + c.Require().Equal(credentials.ID, cachedEntity1.Credentials.ID) + c.Require().Equal(credentials.ID, cachedEntity2.Credentials.ID) + c.Require().Equal(credentials.Description, cachedEntity1.Credentials.Description) + c.Require().Equal(credentials.Description, cachedEntity2.Credentials.Description) + + credentials.Description = "new description" + SetGiteaCredentials(credentials) + + cachedEntity1, ok = GetEntity(entity1.ID) + c.Require().True(ok) + c.Require().Equal(entity1.ID, cachedEntity1.ID) + cachedEntity2, ok = GetEntity(entity2.ID) + c.Require().True(ok) + c.Require().Equal(entity2.ID, cachedEntity2.ID) + + c.Require().Equal(credentials.ID, cachedEntity1.Credentials.ID) + c.Require().Equal(credentials.ID, cachedEntity2.Credentials.ID) + c.Require().Equal(credentials.Description, cachedEntity1.Credentials.Description) + c.Require().Equal(credentials.Description, cachedEntity2.Credentials.Description) +} + +func (c *CacheTestSuite) TestSetGiteaEntity() { + credentials := params.ForgeCredentials{ + ID: 1, + Description: "test description", + ForgeType: params.GiteaEndpointType, + } + entity := params.ForgeEntity{ + ID: "test-entity", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials, + } + + SetGiteaCredentials(credentials) + SetEntity(entity) + + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + c.Require().Equal(credentials.ID, cachedEntity.Credentials.ID) + c.Require().Equal(credentials.Description, cachedEntity.Credentials.Description) + c.Require().Equal(credentials.ForgeType, cachedEntity.Credentials.ForgeType) +} + +func (c *CacheTestSuite) TestGetEntitiesUsingCredentials() { + credentials := params.ForgeCredentials{ + ID: 1, + Description: "test description", + Name: "test", + ForgeType: params.GithubEndpointType, + } + + credentials2 := params.ForgeCredentials{ + ID: 2, + Description: "test description2", + Name: "test", + ForgeType: params.GiteaEndpointType, + } + + entity1 := params.ForgeEntity{ + ID: "test-entity-1", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials, + } + + entity2 := params.ForgeEntity{ + ID: "test-entity-2", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials, + } + entity3 := params.ForgeEntity{ + ID: "test-entity-3", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials2, + } + + SetEntity(entity1) + SetEntity(entity2) + SetEntity(entity3) + + cachedEntities := GetEntitiesUsingCredentials(credentials) + c.Require().Len(cachedEntities, 2) + c.Require().Contains(cachedEntities, entity1) + c.Require().Contains(cachedEntities, entity2) + + cachedEntities = GetEntitiesUsingCredentials(credentials2) + c.Require().Len(cachedEntities, 1) + c.Require().Contains(cachedEntities, entity3) +} + +func (c *CacheTestSuite) TestGetallEntities() { + credentials := params.ForgeCredentials{ + ID: 1, + Description: "test description", + Name: "test", + ForgeType: params.GithubEndpointType, + } + + credentials2 := params.ForgeCredentials{ + ID: 2, + Description: "test description2", + Name: "test", + ForgeType: params.GiteaEndpointType, + } + + entity1 := params.ForgeEntity{ + ID: "test-entity-1", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials, + CreatedAt: time.Now(), + } + + entity2 := params.ForgeEntity{ + ID: "test-entity-2", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials, + CreatedAt: time.Now().Add(1 * time.Second), + } + + entity3 := params.ForgeEntity{ + ID: "test-entity-3", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + Credentials: credentials2, + CreatedAt: time.Now().Add(2 * time.Second), + } + + SetEntity(entity1) + SetEntity(entity2) + SetEntity(entity3) + + // Sorted by creation date + cachedEntities := GetAllEntities() + c.Require().Len(cachedEntities, 3) + c.Require().Equal(cachedEntities[0], entity1) + c.Require().Equal(cachedEntities[1], entity2) + c.Require().Equal(cachedEntities[2], entity3) +} + +func (c *CacheTestSuite) TestGetAllPools() { + entity := params.ForgeEntity{ + ID: "test-entity", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + } + pool1 := params.Pool{ + ID: "pool-1", + CreatedAt: time.Now(), + Tags: []params.Tag{ + { + Name: "tag1", + }, + { + Name: "tag2", + }, + }, + } + + pool2 := params.Pool{ + ID: "pool-2", + CreatedAt: time.Now().Add(1 * time.Second), + Tags: []params.Tag{ + { + Name: "tag1", + }, + { + Name: "tag3", + }, + }, + } + + SetEntity(entity) + SetEntityPool(entity.ID, pool1) + SetEntityPool(entity.ID, pool2) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + pools := GetAllPools() + c.Require().Len(pools, 2) + c.Require().Equal(pools[0].ID, pool1.ID) + c.Require().Equal(pools[1].ID, pool2.ID) +} + +func (c *CacheTestSuite) TestGetAllScaleSets() { + entity := params.ForgeEntity{ + ID: "test-entity", + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + Owner: "test", + } + scaleSet1 := params.ScaleSet{ + ID: 1, + } + scaleSet2 := params.ScaleSet{ + ID: 2, + } + + SetEntity(entity) + SetEntityScaleSet(entity.ID, scaleSet1) + SetEntityScaleSet(entity.ID, scaleSet2) + cachedEntity, ok := GetEntity(entity.ID) + c.Require().True(ok) + c.Require().Equal(entity.ID, cachedEntity.ID) + scaleSets := GetAllScaleSets() + c.Require().Len(scaleSets, 2) + c.Require().Equal(scaleSets[0].ID, scaleSet1.ID) + c.Require().Equal(scaleSets[1].ID, scaleSet2.ID) +} + +func (c *CacheTestSuite) TestGetAllGetAllGithubCredentialsAsMap() { + credentials1 := params.ForgeCredentials{ + ID: 1, + } + credentials2 := params.ForgeCredentials{ + ID: 2, + } + SetGithubCredentials(credentials1) + SetGithubCredentials(credentials2) + + cachedCreds := GetAllGithubCredentialsAsMap() + c.Require().Len(cachedCreds, 2) + c.Require().Contains(cachedCreds, credentials1.ID) + c.Require().Contains(cachedCreds, credentials2.ID) +} + +func (c *CacheTestSuite) TestGetAllGiteaCredentialsAsMap() { + credentials1 := params.ForgeCredentials{ + ID: 1, + CreatedAt: time.Now(), + } + credentials2 := params.ForgeCredentials{ + ID: 2, + CreatedAt: time.Now().Add(1 * time.Second), + } + SetGiteaCredentials(credentials1) + SetGiteaCredentials(credentials2) + + cachedCreds := GetAllGiteaCredentialsAsMap() + c.Require().Len(cachedCreds, 2) + c.Require().Contains(cachedCreds, credentials1.ID) + c.Require().Contains(cachedCreds, credentials2.ID) +} + func TestCacheTestSuite(t *testing.T) { t.Parallel() suite.Run(t, new(CacheTestSuite)) diff --git a/cache/tools_cache.go b/cache/tools_cache.go index 8675bec9..6e3c4636 100644 --- a/cache/tools_cache.go +++ b/cache/tools_cache.go @@ -26,13 +26,6 @@ type GithubEntityTools struct { tools []commonParams.RunnerApplicationDownload } -func (g GithubEntityTools) Error() string { - if g.err != nil { - return g.err.Error() - } - return "" -} - type GithubToolsCache struct { mux sync.Mutex // entity IDs are UUID4s. It is highly unlikely they will collide (🤞). @@ -51,7 +44,10 @@ func (g *GithubToolsCache) Get(entityID string) ([]commonParams.RunnerApplicatio return nil, fmt.Errorf("cache expired for entity %s", entityID) } } - return cache.tools, cache.err + if cache.err != nil { + return nil, cache.err + } + return cache.tools, nil } return nil, fmt.Errorf("no cache found for entity %s", entityID) } @@ -101,3 +97,7 @@ func SetGithubToolsCache(entity params.ForgeEntity, tools []commonParams.RunnerA func GetGithubToolsCache(entityID string) ([]commonParams.RunnerApplicationDownload, error) { return githubToolsCache.Get(entityID) } + +func SetGithubToolsCacheError(entity params.ForgeEntity, err error) { + githubToolsCache.SetToolsError(entity, err) +} diff --git a/database/watcher/filters.go b/database/watcher/filters.go index e462183d..7f7a7ab9 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -87,16 +87,17 @@ func WithEntityPoolFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFun } } -// WithEntityPoolFilter returns true if the change payload is a pool that belongs to the -// supplied Github entity. This is useful when an entity worker wants to watch for changes -// in pools that belong to it. +// WithEntityScaleSetFilter returns true if the change payload is a scale set that belongs to the +// supplied Github entity. func WithEntityScaleSetFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { forgeType, err := ghEntity.GetForgeType() if err != nil { return false } - if forgeType != params.GiteaEndpointType { + + // Gitea does not have scale sets. + if forgeType == params.GiteaEndpointType { return false } diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 86ce52f0..ca95867f 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -869,8 +869,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error jwtValidity := pool.RunnerTimeout() - entity := r.entity.String() - jwtToken, err := r.instanceTokenGetter.NewInstanceJWTToken(instance, entity, pool.PoolType(), jwtValidity) + jwtToken, err := r.instanceTokenGetter.NewInstanceJWTToken(instance, r.entity, pool.PoolType(), jwtValidity) if err != nil { return errors.Wrap(err, "fetching instance jwt token") } diff --git a/runner/runner.go b/runner/runner.go index 186799f6..aa55ee4f 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -663,17 +663,20 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType slog.DebugContext( r.ctx, "got hook for repo", "repo_owner", util.SanitizeLogEntry(job.Repository.Owner.Login), - "repo_name", util.SanitizeLogEntry(job.Repository.Name)) + "repo_name", util.SanitizeLogEntry(job.Repository.Name), + "endpoint", endpoint.Name) poolManager, err = r.findRepoPoolManager(job.Repository.Owner.Login, job.Repository.Name, endpoint.Name) case OrganizationHook: slog.DebugContext( r.ctx, "got hook for organization", - "organization", util.SanitizeLogEntry(job.GetOrgName(forgeType))) + "organization", util.SanitizeLogEntry(job.GetOrgName(forgeType)), + "endpoint", endpoint.Name) poolManager, err = r.findOrgPoolManager(job.GetOrgName(forgeType), endpoint.Name) case EnterpriseHook: slog.DebugContext( r.ctx, "got hook for enterprise", - "enterprise", util.SanitizeLogEntry(job.Enterprise.Slug)) + "enterprise", util.SanitizeLogEntry(job.Enterprise.Slug), + "endpoint", endpoint.Name) poolManager, err = r.findEnterprisePoolManager(job.Enterprise.Slug, endpoint.Name) default: return runnerErrors.NewBadRequestError("cannot handle hook target type %s", hookTargetType) diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index d0e61b72..3900abaf 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -144,7 +144,7 @@ func (i *instanceManager) handleCreateInstanceInProvider(instance params.Instanc } token, err := i.helper.InstanceTokenGetter().NewInstanceJWTToken( - instance, entity.String(), entity.EntityType, i.scaleSet.RunnerBootstrapTimeout) + instance, entity, entity.EntityType, i.scaleSet.RunnerBootstrapTimeout) if err != nil { return fmt.Errorf("creating instance token: %w", err) } From f0753eeb22f3f41a68a204bfe9adcce830112589 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 09:19:19 +0000 Subject: [PATCH 073/179] Add more tests Signed-off-by: Gabriel Adrian Samfira --- database/common/store.go | 2 +- database/sql/enterprise.go | 24 +- database/sql/enterprise_test.go | 8 +- database/sql/gitea.go | 46 +- database/sql/gitea_test.go | 793 +++++++++++++++++++++++++ database/sql/github.go | 2 +- database/sql/github_test.go | 64 +- database/sql/organizations_test.go | 60 ++ database/sql/repositories_test.go | 66 ++ database/sql/scalesets_test.go | 2 +- database/watcher/watcher_store_test.go | 2 +- internal/testing/testing.go | 41 ++ runner/enterprises.go | 2 +- runner/enterprises_test.go | 2 +- 14 files changed, 1060 insertions(+), 54 deletions(-) create mode 100644 database/sql/gitea_test.go diff --git a/database/common/store.go b/database/common/store.go index 7da0e5bb..db5fbb04 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -56,7 +56,7 @@ type OrgStore interface { } type EnterpriseStore interface { - CreateEnterprise(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) + CreateEnterprise(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) GetEnterprise(ctx context.Context, name, endpointName string) (params.Enterprise, error) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) ListEnterprises(ctx context.Context) ([]params.Enterprise, error) diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index 26406ac5..e9c2ed08 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -28,10 +28,14 @@ import ( "github.com/cloudbase/garm/params" ) -func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (paramEnt params.Enterprise, err error) { +func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (paramEnt params.Enterprise, err error) { if webhookSecret == "" { return params.Enterprise{}, errors.New("creating enterprise: missing secret") } + if credentials.ForgeType != params.GithubEndpointType { + return params.Enterprise{}, errors.Wrap(runnerErrors.ErrBadRequest, "enterprises are not supported on this forge type") + } + secret, err := util.Seal([]byte(webhookSecret), []byte(s.cfg.Passphrase)) if err != nil { return params.Enterprise{}, errors.Wrap(err, "encoding secret") @@ -48,24 +52,18 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name, credentialsNam PoolBalancerType: poolBalancerType, } err = s.conn.Transaction(func(tx *gorm.DB) error { - creds, err := s.getGithubCredentialsByName(ctx, tx, credentialsName, false) - if err != nil { - return errors.Wrap(err, "creating enterprise") - } - if creds.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") - } - newEnterprise.CredentialsID = &creds.ID - newEnterprise.EndpointName = creds.EndpointName + newEnterprise.CredentialsID = &credentials.ID + newEnterprise.EndpointName = &credentials.Endpoint.Name q := tx.Create(&newEnterprise) if q.Error != nil { return errors.Wrap(q.Error, "creating enterprise") } - newEnterprise.Credentials = creds - newEnterprise.Endpoint = creds.Endpoint - + newEnterprise, err = s.getEnterpriseByID(ctx, tx, newEnterprise.ID.String(), "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") + if err != nil { + return errors.Wrap(err, "creating enterprise") + } return nil }) if err != nil { diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 4971f78f..9d81287d 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -99,7 +99,7 @@ func (s *EnterpriseTestSuite) SetupTest() { enterprise, err := db.CreateEnterprise( s.adminCtx, fmt.Sprintf("test-enterprise-%d", i), - s.testCreds.Name, + s.testCreds, fmt.Sprintf("test-webhook-secret-%d", i), params.PoolBalancerTypeRoundRobin, ) @@ -178,7 +178,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprise() { enterprise, err := s.Store.CreateEnterprise( s.adminCtx, s.Fixtures.CreateEnterpriseParams.Name, - s.Fixtures.CreateEnterpriseParams.CredentialsName, + s.testCreds, s.Fixtures.CreateEnterpriseParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) @@ -209,7 +209,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseInvalidDBPassphrase() { _, err = sqlDB.CreateEnterprise( s.adminCtx, s.Fixtures.CreateEnterpriseParams.Name, - s.Fixtures.CreateEnterpriseParams.CredentialsName, + s.testCreds, s.Fixtures.CreateEnterpriseParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) @@ -235,7 +235,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseDBCreateErr() { _, err := s.StoreSQLMocked.CreateEnterprise( s.adminCtx, s.Fixtures.CreateEnterpriseParams.Name, - s.Fixtures.CreateEnterpriseParams.CredentialsName, + s.testCreds, s.Fixtures.CreateEnterpriseParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) diff --git a/database/sql/gitea.go b/database/sql/gitea.go index 27084a0a..79680287 100644 --- a/database/sql/gitea.go +++ b/database/sql/gitea.go @@ -22,7 +22,7 @@ func (s *sqlDatabase) CreateGiteaEndpoint(_ context.Context, param params.Create var endpoint GithubEndpoint err = s.conn.Transaction(func(tx *gorm.DB) error { if err := tx.Where("name = ?", param.Name).First(&endpoint).Error; err == nil { - return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github endpoint already exists") + return errors.Wrap(runnerErrors.ErrDuplicateEntity, "gitea endpoint already exists") } endpoint = GithubEndpoint{ Name: param.Name, @@ -34,16 +34,16 @@ func (s *sqlDatabase) CreateGiteaEndpoint(_ context.Context, param params.Create } if err := tx.Create(&endpoint).Error; err != nil { - return errors.Wrap(err, "creating github endpoint") + return errors.Wrap(err, "creating gitea endpoint") } return nil }) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "creating github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "creating gitea endpoint") } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "converting github endpoint") + return params.ForgeEndpoint{}, errors.Wrap(err, "converting gitea endpoint") } return ghEndpoint, nil } @@ -52,14 +52,14 @@ func (s *sqlDatabase) ListGiteaEndpoints(_ context.Context) ([]params.ForgeEndpo var endpoints []GithubEndpoint err := s.conn.Where("endpoint_type = ?", params.GiteaEndpointType).Find(&endpoints).Error if err != nil { - return nil, errors.Wrap(err, "fetching github endpoints") + return nil, errors.Wrap(err, "fetching gitea endpoints") } var ret []params.ForgeEndpoint for _, ep := range endpoints { commonEp, err := s.sqlToCommonGithubEndpoint(ep) if err != nil { - return nil, errors.Wrap(err, "converting github endpoint") + return nil, errors.Wrap(err, "converting gitea endpoint") } ret = append(ret, commonEp) } @@ -67,10 +67,6 @@ func (s *sqlDatabase) ListGiteaEndpoints(_ context.Context) ([]params.ForgeEndpo } func (s *sqlDatabase) UpdateGiteaEndpoint(_ context.Context, name string, param params.UpdateGiteaEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { - if name == defaultGithubEndpoint { - return params.ForgeEndpoint{}, runnerErrors.NewBadRequestError("cannot update default endpoint %s", defaultGithubEndpoint) - } - defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.UpdateOperation, ghEndpoint) @@ -118,7 +114,6 @@ func (s *sqlDatabase) UpdateGiteaEndpoint(_ context.Context, name string, param func (s *sqlDatabase) GetGiteaEndpoint(_ context.Context, name string) (params.ForgeEndpoint, error) { var endpoint GithubEndpoint - err := s.conn.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { @@ -150,7 +145,7 @@ func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err e } var credsCount int64 - if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { + if err := tx.Model(&GiteaCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { return errors.Wrap(err, "fetching gitea credentials") } @@ -170,15 +165,8 @@ func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err e } } - var entCnt int64 - if err := tx.Model(&Enterprise{}).Where("endpoint_name = ?", endpoint.Name).Count(&entCnt).Error; err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching gitea enterprises") - } - } - - if credsCount > 0 || repoCnt > 0 || orgCnt > 0 || entCnt > 0 { - return errors.New("cannot delete endpoint with associated entities") + if credsCount > 0 || repoCnt > 0 || orgCnt > 0 { + return runnerErrors.NewBadRequestError("cannot delete endpoint with associated entities") } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { @@ -195,7 +183,7 @@ func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err e func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "creating gitea credentials") } if param.Endpoint == "" { return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrBadRequest, "endpoint name is required") @@ -211,13 +199,13 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C var endpoint GithubEndpoint if err := tx.Where("name = ? and endpoint_type = ?", param.Endpoint, params.GiteaEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") + return errors.Wrap(runnerErrors.ErrNotFound, "gitea endpoint not found") } - return errors.Wrap(err, "fetching github endpoint") + return errors.Wrap(err, "fetching gitea endpoint") } if err := tx.Where("name = ? and user_id = ?", param.Name, userID).First(&creds).Error; err == nil { - return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github credentials already exists") + return errors.Wrap(runnerErrors.ErrDuplicateEntity, "gitea credentials already exists") } var data []byte @@ -225,8 +213,6 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C switch param.AuthType { case params.ForgeAuthTypePAT: data, err = s.marshalAndSeal(param.PAT) - case params.ForgeAuthTypeApp: - data, err = s.marshalAndSeal(param.App) default: return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") } @@ -244,7 +230,7 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C } if err := tx.Create(&creds).Error; err != nil { - return errors.Wrap(err, "creating github credentials") + return errors.Wrap(err, "creating gitea credentials") } // Skip making an extra query. creds.Endpoint = endpoint @@ -252,11 +238,11 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C return nil }) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "creating gitea credentials") } gtCreds, err = s.sqlGiteaToCommonForgeCredentials(creds) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github credentials") + return params.ForgeCredentials{}, errors.Wrap(err, "converting gitea credentials") } return gtCreds, nil } diff --git a/database/sql/gitea_test.go b/database/sql/gitea_test.go new file mode 100644 index 00000000..c7a96d66 --- /dev/null +++ b/database/sql/gitea_test.go @@ -0,0 +1,793 @@ +// Copyright 2024 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package sql + +import ( + "context" + "fmt" + "os" + "testing" + + "github.com/stretchr/testify/suite" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/database/common" + garmTesting "github.com/cloudbase/garm/internal/testing" + "github.com/cloudbase/garm/params" +) + +type GiteaTestSuite struct { + suite.Suite + + giteaEndpoint params.ForgeEndpoint + db common.Store +} + +func (s *GiteaTestSuite) SetupTest() { + db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T())) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create db connection: %s", err)) + } + + s.db = db + + createEpParams := params.CreateGiteaEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + endpoint, err := s.db.CreateGiteaEndpoint(context.Background(), createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + s.Require().Equal(testEndpointName, endpoint.Name) + s.giteaEndpoint = endpoint +} + +func (s *GiteaTestSuite) TestCreatingEndpoint() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + s.Require().Equal(testEndpointName, endpoint.Name) +} + +func (s *GiteaTestSuite) TestCreatingDuplicateEndpointFails() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + _, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + + _, err = s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrDuplicateEntity) +} + +func (s *GiteaTestSuite) TestGetEndpoint() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: "deleteme", + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + newEndpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + + endpoint, err := s.db.GetGiteaEndpoint(ctx, createEpParams.Name) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + s.Require().Equal(newEndpoint.Name, endpoint.Name) +} + +func (s *GiteaTestSuite) TestGetNonExistingEndpointFailsWithNotFoundError() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + _, err := s.db.GetGiteaEndpoint(ctx, "non-existing") + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestDeletingNonExistingEndpointIsANoop() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + err := s.db.DeleteGiteaEndpoint(ctx, "non-existing") + s.Require().NoError(err) +} + +func (s *GiteaTestSuite) TestDeletingEndpoint() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + + err = s.db.DeleteGiteaEndpoint(ctx, testEndpointName) + s.Require().NoError(err) + + _, err = s.db.GetGiteaEndpoint(ctx, testEndpointName) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestUpdateEndpoint() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: "deleteme", + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + + newDescription := "new description" + newAPIBaseURL := "https://new-api.example.com" + newBaseURL := "https://new.example.com" + caCertBundle, err := os.ReadFile("../../testdata/certs/srv-pub.pem") + s.Require().NoError(err) + updateEpParams := params.UpdateGiteaEndpointParams{ + Description: &newDescription, + APIBaseURL: &newAPIBaseURL, + BaseURL: &newBaseURL, + CACertBundle: caCertBundle, + } + + updatedEndpoint, err := s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().NoError(err) + s.Require().NotNil(updatedEndpoint) + s.Require().Equal(newDescription, updatedEndpoint.Description) + s.Require().Equal(newAPIBaseURL, updatedEndpoint.APIBaseURL) + s.Require().Equal(newBaseURL, updatedEndpoint.BaseURL) + s.Require().Equal(caCertBundle, updatedEndpoint.CACertBundle) +} + +func (s *GiteaTestSuite) TestUpdatingNonExistingEndpointReturnsNotFoundError() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + newDescription := "test" + updateEpParams := params.UpdateGiteaEndpointParams{ + Description: &newDescription, + } + + _, err := s.db.UpdateGiteaEndpoint(ctx, "non-existing", updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestListEndpoints() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + _, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + + endpoints, err := s.db.ListGiteaEndpoints(ctx) + s.Require().NoError(err) + s.Require().Len(endpoints, 1) +} + +func (s *GiteaTestSuite) TestCreateCredentialsFailsWithUnauthorizedForAnonUser() { + ctx := context.Background() + + _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{}) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrUnauthorized) +} + +func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenEndpointNameIsEmpty() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{}) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().Regexp("endpoint name is required", err.Error()) +} + +func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenEndpointDoesNotExist() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{Endpoint: "non-existing"}) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) + s.Require().Regexp("endpoint not found", err.Error()) +} + +func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenAuthTypeIsInvalid() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{Endpoint: s.giteaEndpoint.Name, AuthType: "invalid"}) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().Regexp("invalid auth type", err.Error()) +} + +func (s *GiteaTestSuite) TestCreateCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + s.Require().Equal(credParams.Name, creds.Name) + s.Require().Equal(credParams.Description, creds.Description) + s.Require().Equal(credParams.Endpoint, creds.Endpoint.Name) + s.Require().Equal(credParams.AuthType, creds.AuthType) +} + +func (s *GiteaTestSuite) TestCreateCredentialsFailsOnDuplicateCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + testUser := garmTesting.CreateGARMTestUser(ctx, "testuser", s.db, s.T()) + testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + _, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + + // Creating creds with the same parameters should fail for the same user. + _, err = s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrDuplicateEntity) + + // Creating creds with the same parameters should work for different users. + _, err = s.db.CreateGiteaCredentials(testUserCtx, credParams) + s.Require().NoError(err) +} + +func (s *GiteaTestSuite) TestNormalUsersCanOnlySeeTheirOwnCredentialsAdminCanSeeAll() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + testUser := garmTesting.CreateGARMTestUser(ctx, "testuser1", s.db, s.T()) + testUser2 := garmTesting.CreateGARMTestUser(ctx, "testuser2", s.db, s.T()) + testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) + testUser2Ctx := auth.PopulateContext(context.Background(), testUser2, nil) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + credParams.Name = "test-creds2" + creds2, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds2) + + credParams.Name = "test-creds3" + creds3, err := s.db.CreateGiteaCredentials(testUser2Ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds3) + + credsList, err := s.db.ListGiteaCredentials(ctx) + s.Require().NoError(err) + s.Require().Len(credsList, 3) + + credsList, err = s.db.ListGiteaCredentials(testUserCtx) + s.Require().NoError(err) + s.Require().Len(credsList, 1) + s.Require().Equal("test-creds2", credsList[0].Name) + + credsList, err = s.db.ListGiteaCredentials(testUser2Ctx) + s.Require().NoError(err) + s.Require().Len(credsList, 1) + s.Require().Equal("test-creds3", credsList[0].Name) +} + +func (s *GiteaTestSuite) TestGetGiteaCredentialsFailsWhenCredentialsDontExist() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + _, err := s.db.GetGiteaCredentials(ctx, 1, true) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) + + _, err = s.db.GetGiteaCredentialsByName(ctx, "non-existing", true) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestGetGithubCredentialsByNameReturnsOnlyCurrentUserCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + testUser := garmTesting.CreateGARMTestUser(ctx, "test-user1", s.db, s.T()) + testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + creds2, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds2) + + creds2Get, err := s.db.GetGiteaCredentialsByName(testUserCtx, testCredsName, true) + s.Require().NoError(err) + s.Require().NotNil(creds2) + s.Require().Equal(testCredsName, creds2Get.Name) + s.Require().Equal(creds2.ID, creds2Get.ID) + + credsGet, err := s.db.GetGiteaCredentialsByName(ctx, testCredsName, true) + s.Require().NoError(err) + s.Require().NotNil(creds) + s.Require().Equal(testCredsName, credsGet.Name) + s.Require().Equal(creds.ID, credsGet.ID) + + // Admin can get any creds by ID + credsGet, err = s.db.GetGiteaCredentials(ctx, creds2.ID, true) + s.Require().NoError(err) + s.Require().NotNil(creds2) + s.Require().Equal(creds2.ID, credsGet.ID) + + // Normal user cannot get other user creds by ID + _, err = s.db.GetGiteaCredentials(testUserCtx, creds.ID, true) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestGetGithubCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + creds2, err := s.db.GetGiteaCredentialsByName(ctx, testCredsName, true) + s.Require().NoError(err) + s.Require().NotNil(creds2) + s.Require().Equal(creds.Name, creds2.Name) + s.Require().Equal(creds.ID, creds2.ID) + + creds2, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) + s.Require().NoError(err) + s.Require().NotNil(creds2) + s.Require().Equal(creds.Name, creds2.Name) + s.Require().Equal(creds.ID, creds2.ID) +} + +func (s *GiteaTestSuite) TestDeleteGiteaCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().NoError(err) + + _, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestDeleteGiteaCredentialsByNonAdminUser() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + testUser := garmTesting.CreateGARMTestUser(ctx, "test-user4", s.db, s.T()) + testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test-creds4", + }, + } + + // Create creds as admin + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + // Deleting non existent creds will return a nil error. For the test user + // the creds created by the admin should not be visible, which leads to not found + // which in turn returns no error. + err = s.db.DeleteGiteaCredentials(testUserCtx, creds.ID) + s.Require().NoError(err) + + // Check that the creds created by the admin are still there. + credsGet, err := s.db.GetGiteaCredentials(ctx, creds.ID, true) + s.Require().NoError(err) + s.Require().NotNil(credsGet) + s.Require().Equal(creds.ID, credsGet.ID) + + // Create the same creds with the test user. + creds2, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds2) + + // Remove creds created by test user. + err = s.db.DeleteGiteaCredentials(testUserCtx, creds2.ID) + s.Require().NoError(err) + + // The creds created by the test user should be gone. + _, err = s.db.GetGiteaCredentials(testUserCtx, creds2.ID, true) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(repo) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + + err = s.db.DeleteRepository(ctx, repo.ID) + s.Require().NoError(err) + + org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(org) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + + err = s.db.DeleteOrganization(ctx, org.ID) + s.Require().NoError(err) + + enterprise, err := s.db.CreateEnterprise(ctx, "test-enterprise", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().Equal(params.Enterprise{}, enterprise) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().NoError(err) + + _, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestUpdateCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + newDescription := "new description" + newName := "new-name" + newToken := "new-token" + updateCredParams := params.UpdateGiteaCredentialsParams{ + Description: &newDescription, + Name: &newName, + PAT: ¶ms.GithubPAT{ + OAuth2Token: newToken, + }, + } + + updatedCreds, err := s.db.UpdateGiteaCredentials(ctx, creds.ID, updateCredParams) + s.Require().NoError(err) + s.Require().NotNil(updatedCreds) + s.Require().Equal(newDescription, updatedCreds.Description) + s.Require().Equal(newName, updatedCreds.Name) +} + +func (s *GiteaTestSuite) TestUpdateCredentialsFailsForNonExistingCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + updateCredParams := params.UpdateGiteaCredentialsParams{ + Description: nil, + } + + _, err := s.db.UpdateGiteaCredentials(ctx, 1, updateCredParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestUpdateCredentialsFailsIfCredentialsAreOwnedByNonAdminUser() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + testUser := garmTesting.CreateGARMTestUser(ctx, "test-user5", s.db, s.T()) + testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test-creds5", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + newDescription := "new description2" + updateCredParams := params.UpdateGiteaCredentialsParams{ + Description: &newDescription, + } + + _, err = s.db.UpdateGiteaCredentials(testUserCtx, creds.ID, updateCredParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestAdminUserCanUpdateAnyGiteaCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + testUser := garmTesting.CreateGARMTestUser(ctx, "test-user5", s.db, s.T()) + testUserCtx := auth.PopulateContext(context.Background(), testUser, nil) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test-creds5", + }, + } + + creds, err := s.db.CreateGiteaCredentials(testUserCtx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + newDescription := "new description2" + updateCredParams := params.UpdateGiteaCredentialsParams{ + Description: &newDescription, + } + + newCreds, err := s.db.UpdateGiteaCredentials(ctx, creds.ID, updateCredParams) + s.Require().NoError(err) + s.Require().Equal(newDescription, newCreds.Description) +} + +func (s *GiteaTestSuite) TestDeleteCredentialsWithOrgsOrReposFails() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: s.giteaEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test-creds5", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(repo) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + + err = s.db.DeleteRepository(ctx, repo.ID) + s.Require().NoError(err) + + org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(org) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + + err = s.db.DeleteOrganization(ctx, org.ID) + s.Require().NoError(err) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().NoError(err) + + _, err = s.db.GetGiteaCredentials(ctx, creds.ID, true) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestDeleteGiteaEndpointFailsWithOrgsReposOrCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + endpointParams := params.CreateGiteaEndpointParams{ + Name: "deleteme", + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + ep, err := s.db.CreateGiteaEndpoint(ctx, endpointParams) + s.Require().NoError(err) + s.Require().NotNil(ep) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: ep.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test-creds5", + }, + } + + creds, err := s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(repo) + + badRequest := &runnerErrors.BadRequestError{} + err = s.db.DeleteGiteaEndpoint(ctx, ep.Name) + s.Require().Error(err) + s.Require().ErrorAs(err, &badRequest) + + err = s.db.DeleteRepository(ctx, repo.ID) + s.Require().NoError(err) + + org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(org) + + err = s.db.DeleteGiteaEndpoint(ctx, ep.Name) + s.Require().Error(err) + s.Require().ErrorAs(err, &badRequest) + + err = s.db.DeleteOrganization(ctx, org.ID) + s.Require().NoError(err) + + err = s.db.DeleteGiteaCredentials(ctx, creds.ID) + s.Require().NoError(err) + + err = s.db.DeleteGiteaEndpoint(ctx, ep.Name) + s.Require().NoError(err) + + _, err = s.db.GetGiteaEndpoint(ctx, ep.Name) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + +func (s *GiteaTestSuite) TestListGiteaEndpoints() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: "deleteme", + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + _, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + + endpoints, err := s.db.ListGiteaEndpoints(ctx) + s.Require().NoError(err) + s.Require().Len(endpoints, 2) +} + +func TestGiteaTestSuite(t *testing.T) { + suite.Run(t, new(GiteaTestSuite)) +} diff --git a/database/sql/github.go b/database/sql/github.go index 08d6bdb3..8dd20225 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -200,7 +200,7 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err } if credsCount > 0 || repoCnt > 0 || orgCnt > 0 || entCnt > 0 { - return errors.New("cannot delete endpoint with associated entities") + return runnerErrors.NewBadRequestError("cannot delete endpoint with associated entities") } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { diff --git a/database/sql/github_test.go b/database/sql/github_test.go index 2e6eb507..f2e83fec 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -555,7 +555,7 @@ func (s *GithubTestSuite) TestDeleteCredentialsFailsIfReposOrgsOrEntitiesUseIt() err = s.db.DeleteOrganization(ctx, org.ID) s.Require().NoError(err) - enterprise, err := s.db.CreateEnterprise(ctx, "test-enterprise", creds.Name, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + enterprise, err := s.db.CreateEnterprise(ctx, "test-enterprise", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotNil(enterprise) @@ -737,6 +737,68 @@ func (s *GithubTestSuite) TestAdminUserCanUpdateAnyGithubCredentials() { s.Require().Equal(newDescription, newCreds.Description) } +func (s *GithubTestSuite) TestDeleteGithubEndpointFailsWithOrgsReposOrCredentials() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + endpointParams := params.CreateGithubEndpointParams{ + Name: "deleteme", + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + ep, err := s.db.CreateGithubEndpoint(ctx, endpointParams) + s.Require().NoError(err) + s.Require().NotNil(ep) + + credParams := params.CreateGithubCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: ep.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test-creds5", + }, + } + + creds, err := s.db.CreateGithubCredentials(ctx, credParams) + s.Require().NoError(err) + s.Require().NotNil(creds) + + repo, err := s.db.CreateRepository(ctx, "test-owner", "test-repo", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(repo) + + badRequest := &runnerErrors.BadRequestError{} + err = s.db.DeleteGithubEndpoint(ctx, ep.Name) + s.Require().Error(err) + s.Require().ErrorAs(err, &badRequest) + + err = s.db.DeleteRepository(ctx, repo.ID) + s.Require().NoError(err) + + org, err := s.db.CreateOrganization(ctx, "test-org", creds, "superSecret@123BlaBla", params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + s.Require().NotNil(org) + + err = s.db.DeleteGithubEndpoint(ctx, ep.Name) + s.Require().Error(err) + s.Require().ErrorAs(err, &badRequest) + + err = s.db.DeleteOrganization(ctx, org.ID) + s.Require().NoError(err) + + err = s.db.DeleteGithubCredentials(ctx, creds.ID) + s.Require().NoError(err) + + err = s.db.DeleteGithubEndpoint(ctx, ep.Name) + s.Require().NoError(err) + + _, err = s.db.GetGithubEndpoint(ctx, ep.Name) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrNotFound) +} + func TestGithubTestSuite(t *testing.T) { suite.Run(t, new(GithubTestSuite)) } diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index 3c2ba337..a93ef372 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -54,8 +54,10 @@ type OrgTestSuite struct { adminUserID string testCreds params.ForgeCredentials + testCredsGitea params.ForgeCredentials secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint + giteaEndpoint params.ForgeEndpoint } func (s *OrgTestSuite) equalInstancesByName(expected, actual []params.Instance) { @@ -91,7 +93,9 @@ func (s *OrgTestSuite) SetupTest() { s.Require().NotEmpty(s.adminUserID) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) + s.testCredsGitea = garmTesting.CreateTestGiteaCredentials(adminCtx, "new-creds", db, s.T(), s.giteaEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some organization objects in the database, for testing purposes @@ -192,6 +196,62 @@ func (s *OrgTestSuite) TestCreateOrganization() { s.Require().Equal(storeOrg.Name, org.Name) s.Require().Equal(storeOrg.Credentials.Name, org.Credentials.Name) s.Require().Equal(storeOrg.WebhookSecret, org.WebhookSecret) + + entity, err := org.GetEntity() + s.Require().Nil(err) + s.Require().Equal(entity.EntityType, params.ForgeEntityTypeOrganization) + s.Require().Equal(entity.ID, org.ID) + + forgeType, err := entity.GetForgeType() + s.Require().Nil(err) + s.Require().Equal(forgeType, params.GithubEndpointType) +} + +func (s *OrgTestSuite) TestCreateOrgForGitea() { + // call tested function + org, err := s.Store.CreateOrganization( + s.adminCtx, + s.Fixtures.CreateOrgParams.Name, + s.testCredsGitea, + s.Fixtures.CreateOrgParams.WebhookSecret, + params.PoolBalancerTypeRoundRobin) + + // assertions + s.Require().Nil(err) + storeOrg, err := s.Store.GetOrganizationByID(s.adminCtx, org.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to get organization by id: %v", err)) + } + s.Require().Equal(storeOrg.Name, org.Name) + s.Require().Equal(storeOrg.Credentials.Name, org.Credentials.Name) + s.Require().Equal(storeOrg.WebhookSecret, org.WebhookSecret) + + entity, err := org.GetEntity() + s.Require().Nil(err) + s.Require().Equal(entity.EntityType, params.ForgeEntityTypeOrganization) + s.Require().Equal(entity.ID, org.ID) + + forgeType, err := entity.GetForgeType() + s.Require().Nil(err) + s.Require().Equal(forgeType, params.GiteaEndpointType) +} + +func (s *OrgTestSuite) TestCreateOrganizationInvalidForgeType() { + credentials := params.ForgeCredentials{ + Name: "test-creds", + Endpoint: s.githubEndpoint, + ID: 99, + ForgeType: params.EndpointType("invalid-forge-type"), + } + + _, err := s.Store.CreateOrganization( + s.adminCtx, + s.Fixtures.CreateOrgParams.Name, + credentials, + s.Fixtures.CreateOrgParams.WebhookSecret, + params.PoolBalancerTypeRoundRobin) + s.Require().NotNil(err) + s.Require().Equal("creating org: unsupported credentials type: invalid request", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationInvalidDBPassphrase() { diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index 0e2fa08f..3f8d8ca2 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -59,8 +59,10 @@ type RepoTestSuite struct { adminUserID string testCreds params.ForgeCredentials + testCredsGitea params.ForgeCredentials secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint + giteaEndpoint params.ForgeEndpoint } func (s *RepoTestSuite) equalReposByName(expected, actual []params.Repository) { @@ -109,7 +111,9 @@ func (s *RepoTestSuite) SetupTest() { s.Require().NotEmpty(s.adminUserID) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) + s.testCredsGitea = garmTesting.CreateTestGiteaCredentials(adminCtx, "new-creds", db, s.T(), s.giteaEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some repository objects in the database, for testing purposes @@ -219,6 +223,68 @@ func (s *RepoTestSuite) TestCreateRepository() { s.Require().Equal(storeRepo.Name, repo.Name) s.Require().Equal(storeRepo.Credentials.Name, repo.Credentials.Name) s.Require().Equal(storeRepo.WebhookSecret, repo.WebhookSecret) + + entity, err := repo.GetEntity() + s.Require().Nil(err) + s.Require().Equal(s.Fixtures.CreateRepoParams.Owner, entity.ID) + s.Require().Equal(entity.EntityType, params.ForgeEntityTypeRepository) + + forgeType, err := entity.GetForgeType() + s.Require().Nil(err) + s.Require().Equal(forgeType, params.GithubEndpointType) +} + +func (s *RepoTestSuite) TestCreateRepositoryGitea() { + // call tested function + repo, err := s.Store.CreateRepository( + s.adminCtx, + s.Fixtures.CreateRepoParams.Owner, + s.Fixtures.CreateRepoParams.Name, + s.testCredsGitea, + s.Fixtures.CreateRepoParams.WebhookSecret, + params.PoolBalancerTypeRoundRobin, + ) + + // assertions + s.Require().Nil(err) + storeRepo, err := s.Store.GetRepositoryByID(s.adminCtx, repo.ID) + if err != nil { + s.FailNow(fmt.Sprintf("failed to get repository by id: %v", err)) + } + s.Require().Equal(storeRepo.Owner, repo.Owner) + s.Require().Equal(storeRepo.Name, repo.Name) + s.Require().Equal(storeRepo.Credentials.Name, repo.Credentials.Name) + s.Require().Equal(storeRepo.WebhookSecret, repo.WebhookSecret) + + entity, err := repo.GetEntity() + s.Require().Nil(err) + s.Require().Equal(repo.ID, entity.ID) + s.Require().Equal(entity.EntityType, params.ForgeEntityTypeRepository) + + forgeType, err := entity.GetForgeType() + s.Require().Nil(err) + s.Require().Equal(forgeType, params.GiteaEndpointType) +} + +func (s *RepoTestSuite) TestCreateRepositoryInvalidForgeType() { + // call tested function + _, err := s.Store.CreateRepository( + s.adminCtx, + s.Fixtures.CreateRepoParams.Owner, + s.Fixtures.CreateRepoParams.Name, + params.ForgeCredentials{ + Name: "test-creds", + ForgeType: "invalid-forge-type", + Endpoint: params.ForgeEndpoint{ + Name: "test-endpoint", + }, + }, + s.Fixtures.CreateRepoParams.WebhookSecret, + params.PoolBalancerTypeRoundRobin, + ) + + s.Require().NotNil(err) + s.Require().Equal("creating repository: unsupported credentials type: invalid request", err.Error()) } func (s *RepoTestSuite) TestCreateRepositoryInvalidDBPassphrase() { diff --git a/database/sql/scalesets_test.go b/database/sql/scalesets_test.go index 54937c2d..7d49b397 100644 --- a/database/sql/scalesets_test.go +++ b/database/sql/scalesets_test.go @@ -58,7 +58,7 @@ func (s *ScaleSetsTestSuite) SetupTest() { s.FailNow(fmt.Sprintf("failed to create repo: %s", err)) } - s.enterprise, err = s.Store.CreateEnterprise(s.adminCtx, "test-enterprise", s.creds.Name, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) + s.enterprise, err = s.Store.CreateEnterprise(s.adminCtx, "test-enterprise", s.creds, "test-webhookSecret", params.PoolBalancerTypeRoundRobin) if err != nil { s.FailNow(fmt.Sprintf("failed to create enterprise: %s", err)) } diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index 3de7c01a..7045455e 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -646,7 +646,7 @@ func (s *WatcherStoreTestSuite) TestEnterpriseWatcher() { creds := garmTesting.CreateTestGithubCredentials(s.ctx, "test-creds", s.store, s.T(), ep) s.T().Cleanup(func() { s.store.DeleteGithubCredentials(s.ctx, creds.ID) }) - ent, err := s.store.CreateEnterprise(s.ctx, "test-enterprise", creds.Name, "test-secret", params.PoolBalancerTypeRoundRobin) + ent, err := s.store.CreateEnterprise(s.ctx, "test-enterprise", creds, "test-secret", params.PoolBalancerTypeRoundRobin) s.Require().NoError(err) s.Require().NotEmpty(ent.ID) diff --git a/internal/testing/testing.go b/internal/testing/testing.go index 0fcc1dda..84b4d48c 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -110,6 +110,30 @@ func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testin return ep } +func CreateDefaultGiteaEndpoint(ctx context.Context, db common.Store, s *testing.T) params.ForgeEndpoint { + endpointParams := params.CreateGiteaEndpointParams{ + Name: "gitea.example.com", + Description: "gitea endpoint", + APIBaseURL: "https://gitea.example.com/", + BaseURL: "https://gitea.example.com/", + } + + ep, err := db.GetGithubEndpoint(ctx, endpointParams.Name) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + s.Fatalf("failed to get database object (github.com): %v", err) + } + ep, err = db.CreateGiteaEndpoint(ctx, endpointParams) + if err != nil { + if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { + s.Fatalf("failed to create database object (github.com): %v", err) + } + } + } + + return ep +} + func CreateTestGithubCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.ForgeEndpoint) params.ForgeCredentials { newCredsParams := params.CreateGithubCredentialsParams{ Name: credsName, @@ -127,6 +151,23 @@ func CreateTestGithubCredentials(ctx context.Context, credsName string, db commo return newCreds } +func CreateTestGiteaCredentials(ctx context.Context, credsName string, db common.Store, s *testing.T, endpoint params.ForgeEndpoint) params.ForgeCredentials { + newCredsParams := params.CreateGiteaCredentialsParams{ + Name: credsName, + Description: "Test creds", + AuthType: params.ForgeAuthTypePAT, + Endpoint: endpoint.Name, + PAT: params.GithubPAT{ + OAuth2Token: "test-token", + }, + } + newCreds, err := db.CreateGiteaCredentials(ctx, newCredsParams) + if err != nil { + s.Fatalf("failed to create database object (%s): %v", credsName, err) + } + return newCreds +} + func GetTestSqliteDBConfig(t *testing.T) config.Database { dir, err := os.MkdirTemp("", "garm-config-test") if err != nil { diff --git a/runner/enterprises.go b/runner/enterprises.go index 6cbe54d0..1dbb2171 100644 --- a/runner/enterprises.go +++ b/runner/enterprises.go @@ -39,7 +39,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp return params.Enterprise{}, runnerErrors.NewConflictError("enterprise %s already exists", param.Name) } - enterprise, err = r.store.CreateEnterprise(ctx, param.Name, creds.Name, param.WebhookSecret, param.PoolBalancerType) + enterprise, err = r.store.CreateEnterprise(ctx, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) if err != nil { return params.Enterprise{}, errors.Wrap(err, "creating enterprise") } diff --git a/runner/enterprises_test.go b/runner/enterprises_test.go index 5c09ae5d..d5eef463 100644 --- a/runner/enterprises_test.go +++ b/runner/enterprises_test.go @@ -81,7 +81,7 @@ func (s *EnterpriseTestSuite) SetupTest() { enterprise, err := db.CreateEnterprise( adminCtx, name, - s.testCreds.Name, + s.testCreds, fmt.Sprintf("test-webhook-secret-%v", i), params.PoolBalancerTypeRoundRobin, ) From 6994c8ce05e5930e3e98fa34e33cff839ff026df Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 09:40:15 +0000 Subject: [PATCH 074/179] Add copyright header Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/gitea_credentials.go | 13 +++++++++++++ apiserver/controllers/gitea_endpoints.go | 13 +++++++++++++ apiserver/controllers/github_credentials.go | 13 +++++++++++++ apiserver/controllers/github_endpoints.go | 13 +++++++++++++ apiserver/events/events.go | 13 +++++++++++++ apiserver/events/params.go | 13 +++++++++++++ auth/admin_required.go | 13 +++++++++++++ auth/metrics.go | 13 +++++++++++++ cache/cache_test.go | 13 +++++++++++++ cache/credentials_cache.go | 13 +++++++++++++ cache/entity_cache.go | 13 +++++++++++++ cache/github_client.go | 13 +++++++++++++ cache/instance_cache.go | 13 +++++++++++++ cache/tools_cache.go | 13 +++++++++++++ cache/util.go | 13 +++++++++++++ cmd/garm-cli/cmd/events.go | 13 +++++++++++++ cmd/garm-cli/cmd/gitea.go | 13 +++++++++++++ cmd/garm-cli/cmd/gitea_credentials.go | 20 ++++++++++---------- cmd/garm-cli/cmd/gitea_endpoints.go | 13 +++++++++++++ cmd/garm-cli/cmd/github.go | 13 +++++++++++++ cmd/garm-cli/cmd/github_endpoints.go | 13 +++++++++++++ cmd/garm-cli/cmd/log.go | 13 +++++++++++++ cmd/garm-cli/common/cobra.go | 13 +++++++++++++ cmd/garm-cli/config/home_nix.go | 13 +++++++++++++ cmd/garm-cli/config/home_windows.go | 13 +++++++++++++ database/common/errors.go | 14 ++++++++++++++ database/common/watcher.go | 14 ++++++++++++++ database/sql/common_test.go | 14 ++++++++++++++ database/sql/gitea.go | 14 ++++++++++++++ database/sql/gitea_test.go | 10 +++++----- database/sql/jobs.go | 14 ++++++++++++++ database/sql/models.go | 14 ++++++++++++++ database/sql/scaleset_instances.go | 14 ++++++++++++++ database/sql/scalesets.go | 20 ++++++++++---------- database/sql/scalesets_test.go | 14 ++++++++++++++ database/watcher/consumer.go | 14 ++++++++++++++ database/watcher/filters.go | 14 ++++++++++++++ database/watcher/producer.go | 14 ++++++++++++++ database/watcher/test_export.go | 13 +++++++++++++ database/watcher/watcher.go | 14 ++++++++++++++ database/watcher/watcher_store_test.go | 14 ++++++++++++++ database/watcher/watcher_test.go | 13 +++++++++++++ internal/testing/mock_watcher.go | 14 ++++++++++++++ locking/interface.go | 14 ++++++++++++++ locking/local_backoff_locker.go | 14 ++++++++++++++ locking/local_backoff_locker_test.go | 14 ++++++++++++++ locking/local_locker.go | 14 ++++++++++++++ locking/local_locker_test.go | 13 +++++++++++++ locking/locking.go | 14 ++++++++++++++ metrics/enterprise.go | 14 ++++++++++++++ metrics/github.go | 14 ++++++++++++++ metrics/health.go | 14 ++++++++++++++ metrics/instance.go | 14 ++++++++++++++ metrics/metrics.go | 14 ++++++++++++++ metrics/organization.go | 14 ++++++++++++++ metrics/pool.go | 14 ++++++++++++++ metrics/provider.go | 14 ++++++++++++++ metrics/repository.go | 14 ++++++++++++++ metrics/util.go | 14 ++++++++++++++ metrics/webhooks.go | 14 ++++++++++++++ params/interfaces.go | 14 ++++++++++++++ runner/common/util.go | 14 ++++++++++++++ runner/common_test.go | 14 ++++++++++++++ runner/enterprises.go | 14 ++++++++++++++ runner/gitea_credentials.go | 14 ++++++++++++++ runner/gitea_endpoints.go | 14 ++++++++++++++ runner/github_credentials.go | 14 ++++++++++++++ runner/github_endpoints.go | 14 ++++++++++++++ runner/metadata.go | 14 ++++++++++++++ runner/metrics/enterprise.go | 14 ++++++++++++++ runner/metrics/health.go | 14 ++++++++++++++ runner/metrics/instance.go | 14 ++++++++++++++ runner/metrics/metrics.go | 14 ++++++++++++++ runner/metrics/organization.go | 14 ++++++++++++++ runner/metrics/pool.go | 14 ++++++++++++++ runner/metrics/provider.go | 14 ++++++++++++++ runner/metrics/repository.go | 14 ++++++++++++++ runner/pool/common.go | 14 ++++++++++++++ runner/pool/stub_client.go | 14 ++++++++++++++ runner/pool/util.go | 14 ++++++++++++++ runner/pool/util_test.go | 14 ++++++++++++++ runner/pool/watcher.go | 14 ++++++++++++++ runner/providers/common/common.go | 14 ++++++++++++++ runner/providers/external/external.go | 14 ++++++++++++++ runner/providers/util/util.go | 14 ++++++++++++++ runner/providers/v0.1.0/external.go | 14 ++++++++++++++ runner/providers/v0.1.1/external.go | 14 ++++++++++++++ test/integration/client_utils.go | 14 ++++++++++++++ test/integration/credentials_test.go | 13 +++++++++++++ test/integration/endpoints.go | 14 ++++++++++++++ test/integration/endpoints_test.go | 14 ++++++++++++++ test/integration/external_provider_test.go | 13 +++++++++++++ test/integration/gh_cleanup/main.go | 14 ++++++++++++++ test/integration/jobs_test.go | 13 +++++++++++++ test/integration/list_info_test.go | 13 +++++++++++++ test/integration/organizations_test.go | 13 +++++++++++++ test/integration/repositories_test.go | 13 +++++++++++++ test/integration/suite_test.go | 13 +++++++++++++ test/integration/utils.go | 13 +++++++++++++ util/appdefaults/appdefaults.go | 13 +++++++++++++ util/github/gitea.go | 14 ++++++++++++++ util/logging.go | 14 ++++++++++++++ websocket/client.go | 14 ++++++++++++++ websocket/websocket.go | 13 +++++++++++++ workers/cache/cache.go | 13 +++++++++++++ workers/cache/gitea_tools.go | 14 ++++++++++++++ workers/cache/tool_cache.go | 14 ++++++++++++++ workers/common/interfaces.go | 14 ++++++++++++++ workers/entity/controller.go | 13 +++++++++++++ workers/entity/controller_watcher.go | 13 +++++++++++++ workers/entity/util.go | 13 +++++++++++++ workers/entity/worker.go | 13 +++++++++++++ workers/entity/worker_watcher.go | 13 +++++++++++++ workers/provider/errors.go | 13 +++++++++++++ workers/provider/instance_manager.go | 13 +++++++++++++ workers/provider/provider.go | 13 +++++++++++++ workers/provider/provider_helper.go | 13 +++++++++++++ workers/provider/util.go | 13 +++++++++++++ workers/scaleset/controller.go | 13 +++++++++++++ workers/scaleset/controller_watcher.go | 13 +++++++++++++ workers/scaleset/interfaces.go | 13 +++++++++++++ workers/scaleset/scaleset.go | 13 +++++++++++++ workers/scaleset/scaleset_helper.go | 13 +++++++++++++ workers/scaleset/scaleset_listener.go | 13 +++++++++++++ workers/scaleset/util.go | 13 +++++++++++++ 125 files changed, 1678 insertions(+), 25 deletions(-) diff --git a/apiserver/controllers/gitea_credentials.go b/apiserver/controllers/gitea_credentials.go index e1be0fb7..777be982 100644 --- a/apiserver/controllers/gitea_credentials.go +++ b/apiserver/controllers/gitea_credentials.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package controllers import ( diff --git a/apiserver/controllers/gitea_endpoints.go b/apiserver/controllers/gitea_endpoints.go index 6f1525d5..67e85178 100644 --- a/apiserver/controllers/gitea_endpoints.go +++ b/apiserver/controllers/gitea_endpoints.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package controllers import ( diff --git a/apiserver/controllers/github_credentials.go b/apiserver/controllers/github_credentials.go index c7544357..04e087e5 100644 --- a/apiserver/controllers/github_credentials.go +++ b/apiserver/controllers/github_credentials.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package controllers import ( diff --git a/apiserver/controllers/github_endpoints.go b/apiserver/controllers/github_endpoints.go index 491c5716..482f9d03 100644 --- a/apiserver/controllers/github_endpoints.go +++ b/apiserver/controllers/github_endpoints.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package controllers import ( diff --git a/apiserver/events/events.go b/apiserver/events/events.go index 30e0b386..94d707f2 100644 --- a/apiserver/events/events.go +++ b/apiserver/events/events.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package events import ( diff --git a/apiserver/events/params.go b/apiserver/events/params.go index 49bf47fa..a2b996a9 100644 --- a/apiserver/events/params.go +++ b/apiserver/events/params.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package events import ( diff --git a/auth/admin_required.go b/auth/admin_required.go index 8ab6cbac..b3ca3624 100644 --- a/auth/admin_required.go +++ b/auth/admin_required.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package auth import "net/http" diff --git a/auth/metrics.go b/auth/metrics.go index 55cede44..5ea688e2 100644 --- a/auth/metrics.go +++ b/auth/metrics.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package auth import ( diff --git a/cache/cache_test.go b/cache/cache_test.go index 4b7cf332..7a8ebed3 100644 --- a/cache/cache_test.go +++ b/cache/cache_test.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/cache/credentials_cache.go b/cache/credentials_cache.go index 6dbef775..3cb5c71d 100644 --- a/cache/credentials_cache.go +++ b/cache/credentials_cache.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/cache/entity_cache.go b/cache/entity_cache.go index bbbc385d..4800dd9c 100644 --- a/cache/entity_cache.go +++ b/cache/entity_cache.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/cache/github_client.go b/cache/github_client.go index 0126e45b..179a9718 100644 --- a/cache/github_client.go +++ b/cache/github_client.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/cache/instance_cache.go b/cache/instance_cache.go index b96db5e9..baf09945 100644 --- a/cache/instance_cache.go +++ b/cache/instance_cache.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/cache/tools_cache.go b/cache/tools_cache.go index 6e3c4636..30e83a0e 100644 --- a/cache/tools_cache.go +++ b/cache/tools_cache.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/cache/util.go b/cache/util.go index f8769c65..5fd234a9 100644 --- a/cache/util.go +++ b/cache/util.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/cmd/garm-cli/cmd/events.go b/cmd/garm-cli/cmd/events.go index f38e9ea6..da44732a 100644 --- a/cmd/garm-cli/cmd/events.go +++ b/cmd/garm-cli/cmd/events.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cmd import ( diff --git a/cmd/garm-cli/cmd/gitea.go b/cmd/garm-cli/cmd/gitea.go index 10d086bd..6627fd6f 100644 --- a/cmd/garm-cli/cmd/gitea.go +++ b/cmd/garm-cli/cmd/gitea.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cmd import "github.com/spf13/cobra" diff --git a/cmd/garm-cli/cmd/gitea_credentials.go b/cmd/garm-cli/cmd/gitea_credentials.go index c744c8a7..d26f95ed 100644 --- a/cmd/garm-cli/cmd/gitea_credentials.go +++ b/cmd/garm-cli/cmd/gitea_credentials.go @@ -1,16 +1,16 @@ -// Copyright 2022 Cloudbase Solutions SRL +// Copyright 2025 Cloudbase Solutions SRL // -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cmd diff --git a/cmd/garm-cli/cmd/gitea_endpoints.go b/cmd/garm-cli/cmd/gitea_endpoints.go index d3504f17..55fa09c9 100644 --- a/cmd/garm-cli/cmd/gitea_endpoints.go +++ b/cmd/garm-cli/cmd/gitea_endpoints.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cmd import ( diff --git a/cmd/garm-cli/cmd/github.go b/cmd/garm-cli/cmd/github.go index 8b79a381..71342026 100644 --- a/cmd/garm-cli/cmd/github.go +++ b/cmd/garm-cli/cmd/github.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cmd import "github.com/spf13/cobra" diff --git a/cmd/garm-cli/cmd/github_endpoints.go b/cmd/garm-cli/cmd/github_endpoints.go index fbdca86c..61f46810 100644 --- a/cmd/garm-cli/cmd/github_endpoints.go +++ b/cmd/garm-cli/cmd/github_endpoints.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cmd import ( diff --git a/cmd/garm-cli/cmd/log.go b/cmd/garm-cli/cmd/log.go index 901e8e0f..e930ae69 100644 --- a/cmd/garm-cli/cmd/log.go +++ b/cmd/garm-cli/cmd/log.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cmd import ( diff --git a/cmd/garm-cli/common/cobra.go b/cmd/garm-cli/common/cobra.go index e59a2aca..399a4b92 100644 --- a/cmd/garm-cli/common/cobra.go +++ b/cmd/garm-cli/common/cobra.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package common import "fmt" diff --git a/cmd/garm-cli/config/home_nix.go b/cmd/garm-cli/config/home_nix.go index 27aed4f8..e9ffa521 100644 --- a/cmd/garm-cli/config/home_nix.go +++ b/cmd/garm-cli/config/home_nix.go @@ -1,6 +1,19 @@ //go:build !windows // +build !windows +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package config import ( diff --git a/cmd/garm-cli/config/home_windows.go b/cmd/garm-cli/config/home_windows.go index d34379b4..c70fb645 100644 --- a/cmd/garm-cli/config/home_windows.go +++ b/cmd/garm-cli/config/home_windows.go @@ -1,6 +1,19 @@ //go:build windows && !linux // +build windows,!linux +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package config import ( diff --git a/database/common/errors.go b/database/common/errors.go index df2d936a..5e6a5087 100644 --- a/database/common/errors.go +++ b/database/common/errors.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package common import "fmt" diff --git a/database/common/watcher.go b/database/common/watcher.go index 4dc18437..94152094 100644 --- a/database/common/watcher.go +++ b/database/common/watcher.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package common import "context" diff --git a/database/sql/common_test.go b/database/sql/common_test.go index af0adcf9..a3c62e06 100644 --- a/database/sql/common_test.go +++ b/database/sql/common_test.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package sql const ( diff --git a/database/sql/gitea.go b/database/sql/gitea.go index 79680287..3b4c55ec 100644 --- a/database/sql/gitea.go +++ b/database/sql/gitea.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package sql import ( diff --git a/database/sql/gitea_test.go b/database/sql/gitea_test.go index c7a96d66..8eaaf23b 100644 --- a/database/sql/gitea_test.go +++ b/database/sql/gitea_test.go @@ -161,7 +161,7 @@ func (s *GiteaTestSuite) TestUpdateEndpoint() { s.Require().NoError(err) s.Require().NotNil(endpoint) - newDescription := "new description" + newDescription := "another description" newAPIBaseURL := "https://new-api.example.com" newBaseURL := "https://new.example.com" caCertBundle, err := os.ReadFile("../../testdata/certs/srv-pub.pem") @@ -185,7 +185,7 @@ func (s *GiteaTestSuite) TestUpdateEndpoint() { func (s *GiteaTestSuite) TestUpdatingNonExistingEndpointReturnsNotFoundError() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - newDescription := "test" + newDescription := "test desc" updateEpParams := params.UpdateGiteaEndpointParams{ Description: &newDescription, } @@ -573,7 +573,7 @@ func (s *GiteaTestSuite) TestUpdateCredentials() { s.Require().NoError(err) s.Require().NotNil(creds) - newDescription := "new description" + newDescription := "just a description" newName := "new-name" newToken := "new-token" updateCredParams := params.UpdateGiteaCredentialsParams{ @@ -622,7 +622,7 @@ func (s *GiteaTestSuite) TestUpdateCredentialsFailsIfCredentialsAreOwnedByNonAdm s.Require().NoError(err) s.Require().NotNil(creds) - newDescription := "new description2" + newDescription := "new params desc" updateCredParams := params.UpdateGiteaCredentialsParams{ Description: &newDescription, } @@ -651,7 +651,7 @@ func (s *GiteaTestSuite) TestAdminUserCanUpdateAnyGiteaCredentials() { s.Require().NoError(err) s.Require().NotNil(creds) - newDescription := "new description2" + newDescription := "another new description" updateCredParams := params.UpdateGiteaCredentialsParams{ Description: &newDescription, } diff --git a/database/sql/jobs.go b/database/sql/jobs.go index 7f9b7b00..1215e3f3 100644 --- a/database/sql/jobs.go +++ b/database/sql/jobs.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package sql import ( diff --git a/database/sql/models.go b/database/sql/models.go index e7fad261..154fb51d 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package sql import ( diff --git a/database/sql/scaleset_instances.go b/database/sql/scaleset_instances.go index fcb9e1f2..bbc4f593 100644 --- a/database/sql/scaleset_instances.go +++ b/database/sql/scaleset_instances.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package sql import ( diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 1d272704..930ae17d 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -1,16 +1,16 @@ -// Copyright 2024 Cloudbase Solutions SRL +// Copyright 2025 Cloudbase Solutions SRL // -// Licensed under the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package sql diff --git a/database/sql/scalesets_test.go b/database/sql/scalesets_test.go index 7d49b397..f1f9fbba 100644 --- a/database/sql/scalesets_test.go +++ b/database/sql/scalesets_test.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package sql import ( diff --git a/database/watcher/consumer.go b/database/watcher/consumer.go index 9282ece8..ed0967e9 100644 --- a/database/watcher/consumer.go +++ b/database/watcher/consumer.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package watcher import ( diff --git a/database/watcher/filters.go b/database/watcher/filters.go index 7f7a7ab9..b09c422d 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package watcher import ( diff --git a/database/watcher/producer.go b/database/watcher/producer.go index 159ad843..927aada0 100644 --- a/database/watcher/producer.go +++ b/database/watcher/producer.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package watcher import ( diff --git a/database/watcher/test_export.go b/database/watcher/test_export.go index f9b4ecf1..eb3d38b6 100644 --- a/database/watcher/test_export.go +++ b/database/watcher/test_export.go @@ -1,6 +1,19 @@ //go:build testing // +build testing +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package watcher import "github.com/cloudbase/garm/database/common" diff --git a/database/watcher/watcher.go b/database/watcher/watcher.go index fda318c6..a7e1cd67 100644 --- a/database/watcher/watcher.go +++ b/database/watcher/watcher.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package watcher import ( diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index 7045455e..8fe7d0a4 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package watcher_test import ( diff --git a/database/watcher/watcher_test.go b/database/watcher/watcher_test.go index c5b56fe2..b272bda7 100644 --- a/database/watcher/watcher_test.go +++ b/database/watcher/watcher_test.go @@ -1,5 +1,18 @@ //go:build testing +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package watcher_test import ( diff --git a/internal/testing/mock_watcher.go b/internal/testing/mock_watcher.go index 67ae5da4..112f0de5 100644 --- a/internal/testing/mock_watcher.go +++ b/internal/testing/mock_watcher.go @@ -1,6 +1,20 @@ //go:build testing // +build testing +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package testing import ( diff --git a/locking/interface.go b/locking/interface.go index 2b6ffb47..43ed1737 100644 --- a/locking/interface.go +++ b/locking/interface.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package locking import "time" diff --git a/locking/local_backoff_locker.go b/locking/local_backoff_locker.go index 9c2fecb1..93344566 100644 --- a/locking/local_backoff_locker.go +++ b/locking/local_backoff_locker.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package locking import ( diff --git a/locking/local_backoff_locker_test.go b/locking/local_backoff_locker_test.go index a9a986e2..00fe09c8 100644 --- a/locking/local_backoff_locker_test.go +++ b/locking/local_backoff_locker_test.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package locking import ( diff --git a/locking/local_locker.go b/locking/local_locker.go index fc5ea847..312d85ec 100644 --- a/locking/local_locker.go +++ b/locking/local_locker.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package locking import ( diff --git a/locking/local_locker_test.go b/locking/local_locker_test.go index 6decf512..75b4dac0 100644 --- a/locking/local_locker_test.go +++ b/locking/local_locker_test.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package locking import ( diff --git a/locking/locking.go b/locking/locking.go index d485f5ff..312d2e6a 100644 --- a/locking/locking.go +++ b/locking/locking.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package locking import ( diff --git a/metrics/enterprise.go b/metrics/enterprise.go index f8382edf..882b64df 100644 --- a/metrics/enterprise.go +++ b/metrics/enterprise.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/github.go b/metrics/github.go index 0c050652..0d6f5fa7 100644 --- a/metrics/github.go +++ b/metrics/github.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import "github.com/prometheus/client_golang/prometheus" diff --git a/metrics/health.go b/metrics/health.go index 4acfbb36..13194231 100644 --- a/metrics/health.go +++ b/metrics/health.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/instance.go b/metrics/instance.go index 7c2f2f96..b9d7e1cf 100644 --- a/metrics/instance.go +++ b/metrics/instance.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/metrics.go b/metrics/metrics.go index edceb30a..1a566116 100644 --- a/metrics/metrics.go +++ b/metrics/metrics.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/organization.go b/metrics/organization.go index 38d7c611..d04e7a4e 100644 --- a/metrics/organization.go +++ b/metrics/organization.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/pool.go b/metrics/pool.go index 5803af90..fc6f2520 100644 --- a/metrics/pool.go +++ b/metrics/pool.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/provider.go b/metrics/provider.go index 8285ca1e..3262ab3b 100644 --- a/metrics/provider.go +++ b/metrics/provider.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/repository.go b/metrics/repository.go index a84dd120..21714233 100644 --- a/metrics/repository.go +++ b/metrics/repository.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/metrics/util.go b/metrics/util.go index b2edb580..d83b4973 100644 --- a/metrics/util.go +++ b/metrics/util.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics func Bool2float64(b bool) float64 { diff --git a/metrics/webhooks.go b/metrics/webhooks.go index 839219a4..48a08f9c 100644 --- a/metrics/webhooks.go +++ b/metrics/webhooks.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import "github.com/prometheus/client_golang/prometheus" diff --git a/params/interfaces.go b/params/interfaces.go index ccf04e06..31ef635f 100644 --- a/params/interfaces.go +++ b/params/interfaces.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package params import "time" diff --git a/runner/common/util.go b/runner/common/util.go index 39c03651..2720c496 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package common import ( diff --git a/runner/common_test.go b/runner/common_test.go index b9b53545..247b5ab1 100644 --- a/runner/common_test.go +++ b/runner/common_test.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package runner const ( diff --git a/runner/enterprises.go b/runner/enterprises.go index 1dbb2171..f192c7cd 100644 --- a/runner/enterprises.go +++ b/runner/enterprises.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package runner import ( diff --git a/runner/gitea_credentials.go b/runner/gitea_credentials.go index 749f2346..4fdad1d2 100644 --- a/runner/gitea_credentials.go +++ b/runner/gitea_credentials.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package runner import ( diff --git a/runner/gitea_endpoints.go b/runner/gitea_endpoints.go index 847dbab9..181f8e7e 100644 --- a/runner/gitea_endpoints.go +++ b/runner/gitea_endpoints.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package runner import ( diff --git a/runner/github_credentials.go b/runner/github_credentials.go index 7c368c99..ec524056 100644 --- a/runner/github_credentials.go +++ b/runner/github_credentials.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package runner import ( diff --git a/runner/github_endpoints.go b/runner/github_endpoints.go index 3f4fb308..0e144447 100644 --- a/runner/github_endpoints.go +++ b/runner/github_endpoints.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package runner import ( diff --git a/runner/metadata.go b/runner/metadata.go index 3df7966a..2c917ea0 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package runner import ( diff --git a/runner/metrics/enterprise.go b/runner/metrics/enterprise.go index 407c0fc4..3ab9003c 100644 --- a/runner/metrics/enterprise.go +++ b/runner/metrics/enterprise.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/metrics/health.go b/runner/metrics/health.go index 05e1ed9b..fcd254df 100644 --- a/runner/metrics/health.go +++ b/runner/metrics/health.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/metrics/instance.go b/runner/metrics/instance.go index 06fd4881..bc6bed0a 100644 --- a/runner/metrics/instance.go +++ b/runner/metrics/instance.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/metrics/metrics.go b/runner/metrics/metrics.go index f9f70864..772ba86a 100644 --- a/runner/metrics/metrics.go +++ b/runner/metrics/metrics.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/metrics/organization.go b/runner/metrics/organization.go index 6b9f6b71..3716cca1 100644 --- a/runner/metrics/organization.go +++ b/runner/metrics/organization.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/metrics/pool.go b/runner/metrics/pool.go index 44ad27a8..6b06a8b9 100644 --- a/runner/metrics/pool.go +++ b/runner/metrics/pool.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/metrics/provider.go b/runner/metrics/provider.go index e2b38a9f..1d7a065d 100644 --- a/runner/metrics/provider.go +++ b/runner/metrics/provider.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/metrics/repository.go b/runner/metrics/repository.go index b76fcc0e..36e07bf0 100644 --- a/runner/metrics/repository.go +++ b/runner/metrics/repository.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package metrics import ( diff --git a/runner/pool/common.go b/runner/pool/common.go index 6820be1a..5316e07e 100644 --- a/runner/pool/common.go +++ b/runner/pool/common.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package pool import ( diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index e8f1e7c6..6fa44e74 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package pool import ( diff --git a/runner/pool/util.go b/runner/pool/util.go index 4c4bf5b1..dd55e1db 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package pool import ( diff --git a/runner/pool/util_test.go b/runner/pool/util_test.go index bcfea879..67d31f76 100644 --- a/runner/pool/util_test.go +++ b/runner/pool/util_test.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package pool import ( diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 97089133..324643ce 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package pool import ( diff --git a/runner/providers/common/common.go b/runner/providers/common/common.go index 4e49e080..f1a5a66d 100644 --- a/runner/providers/common/common.go +++ b/runner/providers/common/common.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package common import ( diff --git a/runner/providers/external/external.go b/runner/providers/external/external.go index 23b9b894..46e3dd47 100644 --- a/runner/providers/external/external.go +++ b/runner/providers/external/external.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package external import ( diff --git a/runner/providers/util/util.go b/runner/providers/util/util.go index 2948730b..fb3c12bd 100644 --- a/runner/providers/util/util.go +++ b/runner/providers/util/util.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package util import ( diff --git a/runner/providers/v0.1.0/external.go b/runner/providers/v0.1.0/external.go index 6dd0ef46..60c5ca1b 100644 --- a/runner/providers/v0.1.0/external.go +++ b/runner/providers/v0.1.0/external.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package v010 import ( diff --git a/runner/providers/v0.1.1/external.go b/runner/providers/v0.1.1/external.go index 530a2645..192f735d 100644 --- a/runner/providers/v0.1.1/external.go +++ b/runner/providers/v0.1.1/external.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package v011 import ( diff --git a/test/integration/client_utils.go b/test/integration/client_utils.go index c986be2d..e423c107 100644 --- a/test/integration/client_utils.go +++ b/test/integration/client_utils.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package integration import ( diff --git a/test/integration/credentials_test.go b/test/integration/credentials_test.go index f1594f87..9b9387f6 100644 --- a/test/integration/credentials_test.go +++ b/test/integration/credentials_test.go @@ -1,6 +1,19 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/test/integration/endpoints.go b/test/integration/endpoints.go index 9f1320ca..720f43d2 100644 --- a/test/integration/endpoints.go +++ b/test/integration/endpoints.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package integration import ( diff --git a/test/integration/endpoints_test.go b/test/integration/endpoints_test.go index a958259f..fe0dd160 100644 --- a/test/integration/endpoints_test.go +++ b/test/integration/endpoints_test.go @@ -1,6 +1,20 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package integration import ( diff --git a/test/integration/external_provider_test.go b/test/integration/external_provider_test.go index ceb5b162..2c85eb35 100644 --- a/test/integration/external_provider_test.go +++ b/test/integration/external_provider_test.go @@ -1,6 +1,19 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/test/integration/gh_cleanup/main.go b/test/integration/gh_cleanup/main.go index 6ec50304..95f1aa78 100644 --- a/test/integration/gh_cleanup/main.go +++ b/test/integration/gh_cleanup/main.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package main import ( diff --git a/test/integration/jobs_test.go b/test/integration/jobs_test.go index 4d87c077..8da94414 100644 --- a/test/integration/jobs_test.go +++ b/test/integration/jobs_test.go @@ -1,6 +1,19 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/test/integration/list_info_test.go b/test/integration/list_info_test.go index 1eef816e..ddb3ff86 100644 --- a/test/integration/list_info_test.go +++ b/test/integration/list_info_test.go @@ -1,6 +1,19 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/test/integration/organizations_test.go b/test/integration/organizations_test.go index a96e625c..8acfb6d3 100644 --- a/test/integration/organizations_test.go +++ b/test/integration/organizations_test.go @@ -1,6 +1,19 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index 1f111fcc..43a5d8ec 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -1,6 +1,19 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/test/integration/suite_test.go b/test/integration/suite_test.go index c2f4bd5f..ca6b3030 100644 --- a/test/integration/suite_test.go +++ b/test/integration/suite_test.go @@ -1,6 +1,19 @@ //go:build integration // +build integration +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/test/integration/utils.go b/test/integration/utils.go index 24e97b7f..1fa35b5e 100644 --- a/test/integration/utils.go +++ b/test/integration/utils.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package integration import ( diff --git a/util/appdefaults/appdefaults.go b/util/appdefaults/appdefaults.go index 479db08f..cc53f794 100644 --- a/util/appdefaults/appdefaults.go +++ b/util/appdefaults/appdefaults.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package appdefaults import "time" diff --git a/util/github/gitea.go b/util/github/gitea.go index 0359836e..e657db48 100644 --- a/util/github/gitea.go +++ b/util/github/gitea.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package github import ( diff --git a/util/logging.go b/util/logging.go index bb7b0562..4c37ed34 100644 --- a/util/logging.go +++ b/util/logging.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package util import ( diff --git a/websocket/client.go b/websocket/client.go index 657aa49e..70777265 100644 --- a/websocket/client.go +++ b/websocket/client.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package websocket import ( diff --git a/websocket/websocket.go b/websocket/websocket.go index 14b5e785..e59eb70e 100644 --- a/websocket/websocket.go +++ b/websocket/websocket.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package websocket import ( diff --git a/workers/cache/cache.go b/workers/cache/cache.go index 8cc14224..8f53cb67 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package cache import ( diff --git a/workers/cache/gitea_tools.go b/workers/cache/gitea_tools.go index 4474f7df..5d09ccb3 100644 --- a/workers/cache/gitea_tools.go +++ b/workers/cache/gitea_tools.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package cache import ( diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index 941131d7..2e91bf50 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package cache import ( diff --git a/workers/common/interfaces.go b/workers/common/interfaces.go index 4791a500..a04f16a6 100644 --- a/workers/common/interfaces.go +++ b/workers/common/interfaces.go @@ -1,3 +1,17 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package common import ( diff --git a/workers/entity/controller.go b/workers/entity/controller.go index 2cb910b3..99618194 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package entity import ( diff --git a/workers/entity/controller_watcher.go b/workers/entity/controller_watcher.go index ace63702..6bd3e173 100644 --- a/workers/entity/controller_watcher.go +++ b/workers/entity/controller_watcher.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package entity import ( diff --git a/workers/entity/util.go b/workers/entity/util.go index 877758a7..2216c326 100644 --- a/workers/entity/util.go +++ b/workers/entity/util.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package entity import ( diff --git a/workers/entity/worker.go b/workers/entity/worker.go index efb40ac1..583ab2c8 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package entity import ( diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go index 273a8f02..9acfbc60 100644 --- a/workers/entity/worker_watcher.go +++ b/workers/entity/worker_watcher.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package entity import ( diff --git a/workers/provider/errors.go b/workers/provider/errors.go index 40cfc9a8..7c9247dc 100644 --- a/workers/provider/errors.go +++ b/workers/provider/errors.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package provider import "fmt" diff --git a/workers/provider/instance_manager.go b/workers/provider/instance_manager.go index 3900abaf..84e5bcca 100644 --- a/workers/provider/instance_manager.go +++ b/workers/provider/instance_manager.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package provider import ( diff --git a/workers/provider/provider.go b/workers/provider/provider.go index b1ab1220..78e50955 100644 --- a/workers/provider/provider.go +++ b/workers/provider/provider.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package provider import ( diff --git a/workers/provider/provider_helper.go b/workers/provider/provider_helper.go index 96762135..13694794 100644 --- a/workers/provider/provider_helper.go +++ b/workers/provider/provider_helper.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package provider import ( diff --git a/workers/provider/util.go b/workers/provider/util.go index 8cd33525..cf27d14f 100644 --- a/workers/provider/util.go +++ b/workers/provider/util.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package provider import ( diff --git a/workers/scaleset/controller.go b/workers/scaleset/controller.go index 4aba42c2..63112f43 100644 --- a/workers/scaleset/controller.go +++ b/workers/scaleset/controller.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package scaleset import ( diff --git a/workers/scaleset/controller_watcher.go b/workers/scaleset/controller_watcher.go index 551d711d..8344cac5 100644 --- a/workers/scaleset/controller_watcher.go +++ b/workers/scaleset/controller_watcher.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package scaleset import ( diff --git a/workers/scaleset/interfaces.go b/workers/scaleset/interfaces.go index ca529ce1..b8acfceb 100644 --- a/workers/scaleset/interfaces.go +++ b/workers/scaleset/interfaces.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package scaleset import ( diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index b3bfe332..e4faba70 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package scaleset import ( diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 54d9b52e..7b3fdf03 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package scaleset import ( diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 7e0ec869..76a321f4 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package scaleset import ( diff --git a/workers/scaleset/util.go b/workers/scaleset/util.go index 1d8d6c51..7852cb89 100644 --- a/workers/scaleset/util.go +++ b/workers/scaleset/util.go @@ -1,3 +1,16 @@ +// Copyright 2025 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. package scaleset import ( From b4268e6bab4e6335f7057a948d6f248fa36f01a5 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 09:52:13 +0000 Subject: [PATCH 075/179] Fix tests Signed-off-by: Gabriel Adrian Samfira --- database/sql/enterprise_test.go | 8 -------- database/sql/gitea_test.go | 18 +++++++++--------- database/sql/github_test.go | 15 ++++++++------- database/sql/repositories_test.go | 2 +- 4 files changed, 18 insertions(+), 25 deletions(-) diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 9d81287d..24e7bee7 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -219,14 +219,6 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseInvalidDBPassphrase() { func (s *EnterpriseTestSuite) TestCreateEnterpriseDBCreateErr() { s.Fixtures.SQLMock.ExpectBegin() - s.Fixtures.SQLMock. - ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_credentials` WHERE user_id = ? AND name = ? AND `github_credentials`.`deleted_at` IS NULL ORDER BY `github_credentials`.`id` LIMIT ?")). - WithArgs(s.adminUserID, s.Fixtures.Enterprises[0].CredentialsName, 1). - WillReturnRows(sqlmock.NewRows([]string{"id", "endpoint_name"}).AddRow(s.testCreds.ID, s.testCreds.Endpoint.Name)) - s.Fixtures.SQLMock.ExpectQuery(regexp.QuoteMeta("SELECT * FROM `github_endpoints` WHERE `github_endpoints`.`name` = ? AND `github_endpoints`.`deleted_at` IS NULL")). - WithArgs(s.testCreds.Endpoint.Name). - WillReturnRows(sqlmock.NewRows([]string{"name"}). - AddRow(s.testCreds.Endpoint.Name)) s.Fixtures.SQLMock. ExpectExec(regexp.QuoteMeta("INSERT INTO `enterprises`")). WillReturnError(fmt.Errorf("creating enterprise mock error")) diff --git a/database/sql/gitea_test.go b/database/sql/gitea_test.go index 8eaaf23b..a70d3b1f 100644 --- a/database/sql/gitea_test.go +++ b/database/sql/gitea_test.go @@ -61,7 +61,7 @@ func (s *GiteaTestSuite) TestCreatingEndpoint() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) createEpParams := params.CreateGiteaEndpointParams{ - Name: testEndpointName, + Name: alternetTestEndpointName, Description: testEndpointDescription, APIBaseURL: testAPIBaseURL, BaseURL: testBaseURL, @@ -70,14 +70,14 @@ func (s *GiteaTestSuite) TestCreatingEndpoint() { endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) s.Require().NoError(err) s.Require().NotNil(endpoint) - s.Require().Equal(testEndpointName, endpoint.Name) + s.Require().Equal(alternetTestEndpointName, endpoint.Name) } func (s *GiteaTestSuite) TestCreatingDuplicateEndpointFails() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) createEpParams := params.CreateGiteaEndpointParams{ - Name: testEndpointName, + Name: alternetTestEndpointName, Description: testEndpointDescription, APIBaseURL: testAPIBaseURL, BaseURL: testBaseURL, @@ -95,7 +95,7 @@ func (s *GiteaTestSuite) TestGetEndpoint() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) createEpParams := params.CreateGiteaEndpointParams{ - Name: "deleteme", + Name: alternetTestEndpointName, Description: testEndpointDescription, APIBaseURL: testAPIBaseURL, BaseURL: testBaseURL, @@ -129,7 +129,7 @@ func (s *GiteaTestSuite) TestDeletingEndpoint() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) createEpParams := params.CreateGiteaEndpointParams{ - Name: testEndpointName, + Name: alternetTestEndpointName, Description: testEndpointDescription, APIBaseURL: testAPIBaseURL, BaseURL: testBaseURL, @@ -139,10 +139,10 @@ func (s *GiteaTestSuite) TestDeletingEndpoint() { s.Require().NoError(err) s.Require().NotNil(endpoint) - err = s.db.DeleteGiteaEndpoint(ctx, testEndpointName) + err = s.db.DeleteGiteaEndpoint(ctx, alternetTestEndpointName) s.Require().NoError(err) - _, err = s.db.GetGiteaEndpoint(ctx, testEndpointName) + _, err = s.db.GetGiteaEndpoint(ctx, alternetTestEndpointName) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrNotFound) } @@ -199,7 +199,7 @@ func (s *GiteaTestSuite) TestListEndpoints() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) createEpParams := params.CreateGiteaEndpointParams{ - Name: testEndpointName, + Name: alternetTestEndpointName, Description: testEndpointDescription, APIBaseURL: testAPIBaseURL, BaseURL: testBaseURL, @@ -210,7 +210,7 @@ func (s *GiteaTestSuite) TestListEndpoints() { endpoints, err := s.db.ListGiteaEndpoints(ctx) s.Require().NoError(err) - s.Require().Len(endpoints, 1) + s.Require().Len(endpoints, 2) } func (s *GiteaTestSuite) TestCreateCredentialsFailsWithUnauthorizedForAnonUser() { diff --git a/database/sql/github_test.go b/database/sql/github_test.go index f2e83fec..7b99d5e2 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -33,13 +33,14 @@ import ( ) const ( - testUploadBaseURL string = "https://uploads.example.com" - testBaseURL string = "https://example.com" - testAPIBaseURL string = "https://api.example.com" - testEndpointName string = "test-endpoint" - testEndpointDescription string = "test description" - testCredsName string = "test-creds" - testCredsDescription string = "test creds" + testUploadBaseURL string = "https://uploads.example.com" + testBaseURL string = "https://example.com" + testAPIBaseURL string = "https://api.example.com" + testEndpointName string = "test-endpoint" + alternetTestEndpointName string = "test-endpoint-alternate" + testEndpointDescription string = "test description" + testCredsName string = "test-creds" + testCredsDescription string = "test creds" ) type GithubTestSuite struct { diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index 3f8d8ca2..f27e10b5 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -226,7 +226,7 @@ func (s *RepoTestSuite) TestCreateRepository() { entity, err := repo.GetEntity() s.Require().Nil(err) - s.Require().Equal(s.Fixtures.CreateRepoParams.Owner, entity.ID) + s.Require().Equal(s.Fixtures.CreateRepoParams.Owner, entity.Owner) s.Require().Equal(entity.EntityType, params.ForgeEntityTypeRepository) forgeType, err := entity.GetForgeType() From 1fe09548bca0f770e9d8f09a4ae4ead38bbd1331 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 13:50:56 +0000 Subject: [PATCH 076/179] Add more tests Signed-off-by: Gabriel Adrian Samfira --- database/watcher/filters.go | 13 +- database/watcher/util_test.go | 16 + database/watcher/watcher_test.go | 1274 +++++++++++++++++++++++++++++- 3 files changed, 1288 insertions(+), 15 deletions(-) create mode 100644 database/watcher/util_test.go diff --git a/database/watcher/filters.go b/database/watcher/filters.go index b09c422d..acf79ba8 100644 --- a/database/watcher/filters.go +++ b/database/watcher/filters.go @@ -205,21 +205,26 @@ func WithEntityJobFilter(ghEntity params.ForgeEntity) dbCommon.PayloadFilterFunc } } -// WithForgeCredentialsFilter returns a filter function that filters payloads by Github credentials. +// WithForgeCredentialsFilter returns a filter function that filters payloads by Github or Gitea credentials. func WithForgeCredentialsFilter(creds params.ForgeCredentials) dbCommon.PayloadFilterFunc { return func(payload dbCommon.ChangePayload) bool { - var idGetter params.IDGetter + var forgeCreds params.ForgeCredentials var ok bool switch payload.EntityType { case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: - idGetter, ok = payload.Payload.(params.ForgeCredentials) + forgeCreds, ok = payload.Payload.(params.ForgeCredentials) default: return false } if !ok { return false } - return idGetter.GetID() == creds.GetID() + // Gite and Github creds have different models. The ID is uint, so we + // need to explicitly check their type, or risk a clash. + if forgeCreds.ForgeType != creds.ForgeType { + return false + } + return forgeCreds.GetID() == creds.GetID() } } diff --git a/database/watcher/util_test.go b/database/watcher/util_test.go new file mode 100644 index 00000000..82b94491 --- /dev/null +++ b/database/watcher/util_test.go @@ -0,0 +1,16 @@ +package watcher_test + +import ( + "time" + + "github.com/cloudbase/garm/database/common" +) + +func waitForPayload(ch <-chan common.ChangePayload, timeout time.Duration) *common.ChangePayload { + select { + case payload := <-ch: + return &payload + case <-time.After(timeout): + return nil + } +} diff --git a/database/watcher/watcher_test.go b/database/watcher/watcher_test.go index b272bda7..ab4653a9 100644 --- a/database/watcher/watcher_test.go +++ b/database/watcher/watcher_test.go @@ -20,13 +20,16 @@ import ( "testing" "time" + "github.com/google/uuid" "github.com/pkg/errors" "github.com/stretchr/testify/suite" + commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/database" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" garmTesting "github.com/cloudbase/garm/internal/testing" + "github.com/cloudbase/garm/params" ) type WatcherTestSuite struct { @@ -50,6 +53,7 @@ func (s *WatcherTestSuite) TearDownTest() { currentWatcher := watcher.GetWatcher() if currentWatcher != nil { currentWatcher.Close() + watcher.SetWatcher(nil) } } @@ -126,7 +130,7 @@ func (s *WatcherTestSuite) TestProducerAndConsumer() { s.Require().Equal(payload, receivedPayload) } -func (s *WatcherTestSuite) TestConsumetWithFilter() { +func (s *WatcherTestSuite) TestConsumeWithFilter() { producer, err := watcher.RegisterProducer(s.ctx, "test-producer") s.Require().NoError(err) s.Require().NotNil(producer) @@ -146,12 +150,9 @@ func (s *WatcherTestSuite) TestConsumetWithFilter() { err = producer.Notify(payload) s.Require().NoError(err) - select { - case receivedPayload := <-consumer.Watch(): - s.Require().Equal(payload, receivedPayload) - case <-time.After(1 * time.Second): - s.T().Fatal("expected payload not received") - } + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) payload = common.ChangePayload{ EntityType: common.ControllerEntityType, @@ -161,11 +162,141 @@ func (s *WatcherTestSuite) TestConsumetWithFilter() { err = producer.Notify(payload) s.Require().NoError(err) - select { - case <-consumer.Watch(): - s.T().Fatal("unexpected payload received") - case <-time.After(1 * time.Second): + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithAnyFilter() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithAny( + watcher.WithEntityTypeFilter(common.ControllerEntityType), + watcher.WithEntityFilter(params.ForgeEntity{ + EntityType: params.ForgeEntityTypeRepository, + Owner: "test", + Name: "test", + ID: "test", + }), + )) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.ControllerEntityType, + Operation: common.UpdateOperation, + Payload: "test", } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + Owner: "test", + Name: "test", + ID: "test", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + // We're not watching for this repo + payload = common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + Owner: "test", + Name: "test", + ID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + // We're not watching for orgs + payload = common.ChangePayload{ + EntityType: common.OrganizationEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + Owner: "test", + Name: "test", + ID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithAllFilter() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithAll( + watcher.WithEntityFilter(params.ForgeEntity{ + EntityType: params.ForgeEntityTypeRepository, + Owner: "test", + Name: "test", + ID: "test", + }), + watcher.WithOperationTypeFilter(common.CreateOperation), + )) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.CreateOperation, + Payload: params.Repository{ + Owner: "test", + Name: "test", + ID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + Owner: "test", + Name: "test", + ID: "test", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) } func maybeInitController(db common.Store) error { @@ -180,6 +311,1127 @@ func maybeInitController(db common.Store) error { return nil } +func (s *WatcherTestSuite) TestWithEntityPoolFilterRepository() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeRepository, + Owner: "test", + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityPoolFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + RepoID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + RepoID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityPoolFilterOrg() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityPoolFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + OrgID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + OrgID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityPoolFilterEnterprise() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeEnterprise, + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityPoolFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + EnterpriseID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + EnterpriseID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + // Invalid payload for declared entity type + payload = common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + EnterpriseID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityPoolFilterBogusEntityType() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + // This should trigger the default branch in the filter and + // return false + EntityType: params.ForgeEntityType("bogus"), + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityPoolFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + EnterpriseID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.PoolEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{ + ID: "test", + EnterpriseID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityScaleSetFilterRepository() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeRepository, + Owner: "test", + Name: "test", + ID: "test", + Credentials: params.ForgeCredentials{ + ForgeType: params.GithubEndpointType, + }, + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityScaleSetFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + RepoID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + RepoID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityScaleSetFilterOrg() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + ID: "test", + Credentials: params.ForgeCredentials{ + ForgeType: params.GithubEndpointType, + }, + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityScaleSetFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + OrgID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + OrgID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityScaleSetFilterEnterprise() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeEnterprise, + Name: "test", + ID: "test", + Credentials: params.ForgeCredentials{ + ForgeType: params.GithubEndpointType, + }, + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityScaleSetFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + EnterpriseID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + EnterpriseID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityScaleSetFilterBogusEntityType() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + // This should trigger the default branch in the filter and + // return false + EntityType: params.ForgeEntityType("bogus"), + Name: "test", + ID: "test", + Credentials: params.ForgeCredentials{ + ForgeType: params.GithubEndpointType, + }, + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityScaleSetFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + EnterpriseID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + EnterpriseID: "test2", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityScaleSetFilterReturnsFalseForGiteaEndpoints() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeRepository, + Owner: "test", + Name: "test", + ID: "test", + Credentials: params.ForgeCredentials{ + ForgeType: params.GiteaEndpointType, + }, + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityScaleSetFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + RepoID: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityFilterRepository() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeRepository, + Owner: "test", + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + ID: "test", + Name: "test", + Owner: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + ID: "test2", + Name: "test", + Owner: "test", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityFilterOrg() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.OrganizationEntityType, + Operation: common.UpdateOperation, + Payload: params.Organization{ + ID: "test", + Name: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.OrganizationEntityType, + Operation: common.UpdateOperation, + Payload: params.Organization{ + ID: "test2", + Name: "test", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityFilterEnterprise() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeEnterprise, + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.EnterpriseEntityType, + Operation: common.UpdateOperation, + Payload: params.Enterprise{ + ID: "test", + Name: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.EnterpriseEntityType, + Operation: common.UpdateOperation, + Payload: params.Enterprise{ + ID: "test2", + Name: "test", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityJobFilterRepository() { + repoUUID, err := uuid.NewUUID() + s.Require().NoError(err) + + repoUUID2, err := uuid.NewUUID() + s.Require().NoError(err) + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeRepository, + Owner: "test", + Name: "test", + ID: repoUUID.String(), + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityJobFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + RepoID: &repoUUID, + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + RepoID: &repoUUID2, + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityJobFilterOrg() { + orgUUID, err := uuid.NewUUID() + s.Require().NoError(err) + + orgUUID2, err := uuid.NewUUID() + s.Require().NoError(err) + + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeOrganization, + Name: "test", + ID: orgUUID.String(), + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityJobFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + OrgID: &orgUUID, + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + OrgID: &orgUUID2, + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityJobFilterEnterprise() { + entUUID, err := uuid.NewUUID() + s.Require().NoError(err) + + entUUID2, err := uuid.NewUUID() + s.Require().NoError(err) + + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + EntityType: params.ForgeEntityTypeEnterprise, + Name: "test", + ID: entUUID.String(), + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityJobFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + EnterpriseID: &entUUID, + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + EnterpriseID: &entUUID2, + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithEntityJobFilterBogusEntityType() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + entity := params.ForgeEntity{ + // This should trigger the default branch in the filter and + // return false + EntityType: params.ForgeEntityType("bogus"), + Name: "test", + ID: "test", + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithEntityJobFilter(entity), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + EnterpriseID: nil, + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.JobEntityType, + Operation: common.UpdateOperation, + Payload: params.Job{ + ID: 1, + Name: "test", + EnterpriseID: nil, + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithNone() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithNone(), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + ID: "test", + Name: "test", + Owner: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithUserIDFilter() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + userID, err := uuid.NewUUID() + s.Require().NoError(err) + + userID2, err := uuid.NewUUID() + s.Require().NoError(err) + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithUserIDFilter(userID.String()), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.UserEntityType, + Operation: common.UpdateOperation, + Payload: params.User{ + ID: userID.String(), + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.UserEntityType, + Operation: common.UpdateOperation, + Payload: params.User{ + ID: userID2.String(), + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.UserEntityType, + Operation: common.UpdateOperation, + // Declare as user, but payload is a pool. Filter should return false. + Payload: params.Pool{}, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithForgeCredentialsGithub() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + creds := params.ForgeCredentials{ + ForgeType: params.GithubEndpointType, + ID: 1, + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithForgeCredentialsFilter(creds), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.GithubCredentialsEntityType, + Operation: common.UpdateOperation, + Payload: params.ForgeCredentials{ + ForgeType: params.GithubEndpointType, + ID: 1, + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.GiteaCredentialsEntityType, + Operation: common.UpdateOperation, + Payload: params.ForgeCredentials{ + ForgeType: params.GiteaEndpointType, + ID: 1, + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.GiteaCredentialsEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{}, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithcaleSetFilter() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + scaleSet := params.ScaleSet{ + ID: 1, + } + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithScaleSetFilter(scaleSet), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 1, + Name: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.ScaleSet{ + ID: 2, + Name: "test", + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.ScaleSetEntityType, + Operation: common.UpdateOperation, + Payload: params.Pool{}, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) +} + +func (s *WatcherTestSuite) TestWithExcludeEntityTypeFilter() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithExcludeEntityTypeFilter(common.RepositoryEntityType), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.RepositoryEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + ID: "test", + Name: "test", + Owner: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.OrganizationEntityType, + Operation: common.UpdateOperation, + Payload: params.Repository{ + ID: "test", + Name: "test", + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) +} + +func (s *WatcherTestSuite) TestWithInstanceStatusFilter() { + producer, err := watcher.RegisterProducer(s.ctx, "test-producer") + s.Require().NoError(err) + s.Require().NotNil(producer) + + consumer, err := watcher.RegisterConsumer( + s.ctx, "test-consumer", + watcher.WithInstanceStatusFilter( + commonParams.InstanceCreating, + commonParams.InstanceDeleting), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + + payload := common.ChangePayload{ + EntityType: common.InstanceEntityType, + Operation: common.UpdateOperation, + Payload: params.Instance{ + ID: "test-instance", + Status: commonParams.InstanceCreating, + }, + } + err = producer.Notify(payload) + s.Require().NoError(err) + + receivedPayload := waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.InstanceEntityType, + Operation: common.UpdateOperation, + Payload: params.Instance{ + ID: "test-instance", + Status: commonParams.InstanceDeleted, + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().Nil(receivedPayload) + + payload = common.ChangePayload{ + EntityType: common.InstanceEntityType, + Operation: common.UpdateOperation, + Payload: params.Instance{ + ID: "test-instance", + Status: commonParams.InstanceDeleting, + }, + } + + err = producer.Notify(payload) + s.Require().NoError(err) + receivedPayload = waitForPayload(consumer.Watch(), 100*time.Millisecond) + s.Require().NotNil(receivedPayload) + s.Require().Equal(payload, *receivedPayload) +} + func TestWatcherTestSuite(t *testing.T) { // Watcher tests watcherSuite := &WatcherTestSuite{ From 1fc72ab5c8c1f6d4f2bf3dcdb4a014939070733e Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 16:11:37 +0000 Subject: [PATCH 077/179] Add more tests Signed-off-by: Gabriel Adrian Samfira --- database/watcher/watcher_store_test.go | 94 +++++++++++++++++++++++++- database/watcher/watcher_test.go | 27 ++++++++ 2 files changed, 120 insertions(+), 1 deletion(-) diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index 8fe7d0a4..e682270a 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -919,6 +919,98 @@ func (s *WatcherStoreTestSuite) TestGithubCredentialsWatcher() { } } +func (s *WatcherStoreTestSuite) TestGiteaCredentialsWatcher() { + consumer, err := watcher.RegisterConsumer( + s.ctx, "gitea-cred-test", + watcher.WithEntityTypeFilter(common.GiteaCredentialsEntityType), + watcher.WithAny( + watcher.WithOperationTypeFilter(common.CreateOperation), + watcher.WithOperationTypeFilter(common.UpdateOperation), + watcher.WithOperationTypeFilter(common.DeleteOperation)), + ) + s.Require().NoError(err) + s.Require().NotNil(consumer) + s.T().Cleanup(func() { consumer.Close() }) + consumeEvents(consumer) + + testEndpointParams := params.CreateGiteaEndpointParams{ + Name: "test", + Description: "test endpoint", + APIBaseURL: "https://api.gitea.example.com", + BaseURL: "https://gitea.example.com", + } + + testEndpoint, err := s.store.CreateGiteaEndpoint(s.ctx, testEndpointParams) + s.Require().NoError(err) + s.Require().NotEmpty(testEndpoint.Name) + + s.T().Cleanup(func() { + if err := s.store.DeleteGiteaEndpoint(s.ctx, testEndpoint.Name); err != nil { + s.T().Logf("failed to delete Gitea endpoint: %v", err) + } + consumeEvents(consumer) + }) + + giteaCredParams := params.CreateGiteaCredentialsParams{ + Name: "test-creds", + Description: "test credentials", + Endpoint: testEndpoint.Name, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "bogus", + }, + } + + giteaCred, err := s.store.CreateGiteaCredentials(s.ctx, giteaCredParams) + s.Require().NoError(err) + s.Require().NotEmpty(giteaCred.ID) + + select { + case event := <-consumer.Watch(): + s.Require().Equal(common.ChangePayload{ + EntityType: common.GiteaCredentialsEntityType, + Operation: common.CreateOperation, + Payload: giteaCred, + }, event) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + newDesc := "updated test description" + updateParams := params.UpdateGiteaCredentialsParams{ + Description: &newDesc, + } + + updatedGiteaCred, err := s.store.UpdateGiteaCredentials(s.ctx, giteaCred.ID, updateParams) + s.Require().NoError(err) + s.Require().Equal(newDesc, updatedGiteaCred.Description) + + select { + case event := <-consumer.Watch(): + s.Require().Equal(common.ChangePayload{ + EntityType: common.GiteaCredentialsEntityType, + Operation: common.UpdateOperation, + Payload: updatedGiteaCred, + }, event) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } + + err = s.store.DeleteGiteaCredentials(s.ctx, giteaCred.ID) + s.Require().NoError(err) + + select { + case event := <-consumer.Watch(): + asCreds, ok := event.Payload.(params.ForgeCredentials) + s.Require().True(ok) + s.Require().Equal(event.Operation, common.DeleteOperation) + s.Require().Equal(event.EntityType, common.GiteaCredentialsEntityType) + s.Require().Equal(asCreds.ID, updatedGiteaCred.ID) + case <-time.After(1 * time.Second): + s.T().Fatal("expected payload not received") + } +} + func (s *WatcherStoreTestSuite) TestGithubEndpointWatcher() { consumer, err := watcher.RegisterConsumer( s.ctx, "gh-ep-test", @@ -1001,7 +1093,7 @@ consume: if !ok { return } - case <-time.After(100 * time.Millisecond): + case <-time.After(20 * time.Millisecond): break consume } } diff --git a/database/watcher/watcher_test.go b/database/watcher/watcher_test.go index ab4653a9..5b7ecdce 100644 --- a/database/watcher/watcher_test.go +++ b/database/watcher/watcher_test.go @@ -61,6 +61,7 @@ func (s *WatcherTestSuite) TestRegisterConsumerTwiceWillError() { consumer, err := watcher.RegisterConsumer(s.ctx, "test") s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) consumer, err = watcher.RegisterConsumer(s.ctx, "test") s.Require().ErrorIs(err, common.ErrConsumerAlreadyRegistered) @@ -117,6 +118,7 @@ func (s *WatcherTestSuite) TestProducerAndConsumer() { watcher.WithOperationTypeFilter(common.UpdateOperation)) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ControllerEntityType, @@ -141,6 +143,7 @@ func (s *WatcherTestSuite) TestConsumeWithFilter() { watcher.WithOperationTypeFilter(common.UpdateOperation)) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ControllerEntityType, @@ -184,6 +187,7 @@ func (s *WatcherTestSuite) TestWithAnyFilter() { )) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ControllerEntityType, @@ -265,6 +269,7 @@ func (s *WatcherTestSuite) TestWithAllFilter() { )) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.RepositoryEntityType, @@ -329,6 +334,7 @@ func (s *WatcherTestSuite) TestWithEntityPoolFilterRepository() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.PoolEntityType, @@ -377,6 +383,7 @@ func (s *WatcherTestSuite) TestWithEntityPoolFilterOrg() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.PoolEntityType, @@ -425,6 +432,7 @@ func (s *WatcherTestSuite) TestWithEntityPoolFilterEnterprise() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.PoolEntityType, @@ -490,6 +498,7 @@ func (s *WatcherTestSuite) TestWithEntityPoolFilterBogusEntityType() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.PoolEntityType, @@ -541,6 +550,7 @@ func (s *WatcherTestSuite) TestWithEntityScaleSetFilterRepository() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ScaleSetEntityType, @@ -592,6 +602,7 @@ func (s *WatcherTestSuite) TestWithEntityScaleSetFilterOrg() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ScaleSetEntityType, @@ -643,6 +654,7 @@ func (s *WatcherTestSuite) TestWithEntityScaleSetFilterEnterprise() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ScaleSetEntityType, @@ -696,6 +708,7 @@ func (s *WatcherTestSuite) TestWithEntityScaleSetFilterBogusEntityType() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ScaleSetEntityType, @@ -747,6 +760,7 @@ func (s *WatcherTestSuite) TestWithEntityScaleSetFilterReturnsFalseForGiteaEndpo ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ScaleSetEntityType, @@ -781,6 +795,7 @@ func (s *WatcherTestSuite) TestWithEntityFilterRepository() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.RepositoryEntityType, @@ -831,6 +846,7 @@ func (s *WatcherTestSuite) TestWithEntityFilterOrg() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.OrganizationEntityType, @@ -879,6 +895,7 @@ func (s *WatcherTestSuite) TestWithEntityFilterEnterprise() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.EnterpriseEntityType, @@ -933,6 +950,7 @@ func (s *WatcherTestSuite) TestWithEntityJobFilterRepository() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.JobEntityType, @@ -989,6 +1007,7 @@ func (s *WatcherTestSuite) TestWithEntityJobFilterOrg() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.JobEntityType, @@ -1045,6 +1064,7 @@ func (s *WatcherTestSuite) TestWithEntityJobFilterEnterprise() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.JobEntityType, @@ -1097,6 +1117,7 @@ func (s *WatcherTestSuite) TestWithEntityJobFilterBogusEntityType() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.JobEntityType, @@ -1140,6 +1161,7 @@ func (s *WatcherTestSuite) TestWithNone() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.RepositoryEntityType, @@ -1174,6 +1196,7 @@ func (s *WatcherTestSuite) TestWithUserIDFilter() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.UserEntityType, @@ -1231,6 +1254,7 @@ func (s *WatcherTestSuite) TestWithForgeCredentialsGithub() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.GithubCredentialsEntityType, @@ -1288,6 +1312,7 @@ func (s *WatcherTestSuite) TestWithcaleSetFilter() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.ScaleSetEntityType, @@ -1341,6 +1366,7 @@ func (s *WatcherTestSuite) TestWithExcludeEntityTypeFilter() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.RepositoryEntityType, @@ -1386,6 +1412,7 @@ func (s *WatcherTestSuite) TestWithInstanceStatusFilter() { ) s.Require().NoError(err) s.Require().NotNil(consumer) + consumeEvents(consumer) payload := common.ChangePayload{ EntityType: common.InstanceEntityType, From b41318c9483e129aa41dac9687650b00ec0d52b5 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 16:52:17 +0000 Subject: [PATCH 078/179] Allow for nightly builds Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/build-and-push.yml | 30 +++++++++++++++++++++- Dockerfile | 38 +++++++++++++++++----------- 2 files changed, 52 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 0adb62b6..117aa763 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -39,8 +39,28 @@ jobs: - name: Build and push run: | + set -x cd src/github.com/cloudbase/garm - VERSION=$(git describe --tags --match='v[0-9]*' --always ${{ github.event.inputs.ref }}) + VERSION=$(git describe --tags --match='v[0-9]*' --always) + AZURE_REF=v0.1.0 + OPENSTACK_REF=v0.1.0 + LXD_REF=v0.1.0 + INCUS_REF=v0.1.0 + AWS_REF=v0.1.0 + GCP_REF=v0.1.0 + EQUINIX_REF=v0.1.0 + K8S_REF=v0.3.2 + if [ "${{ github.event.inputs.ref }}" == "main" ]; then + AZURE_REF="main" + OPENSTACK_REF="main" + LXD_REF="main" + INCUS_REF="main" + AWS_REF="main" + GCP_REF="main" + EQUINIX_REF="main" + K8S_REF="main" + VERSION="nightly" + fi docker buildx build \ --provenance=false \ --platform linux/amd64,linux/arm64 \ @@ -48,5 +68,13 @@ jobs: --label "org.opencontainers.image.description=GARM ${{ github.event.inputs.ref }}" \ --label "org.opencontainers.image.licenses=Apache 2.0" \ --build-arg="GARM_REF=${{ github.event.inputs.ref }}" \ + --build-arg="AZURE_REF=${AZURE_REF}" \ + --build-arg="OPENSTACK_REF=${OPENSTACK_REF}" \ + --build-arg="LXD_REF=${LXD_REF}" \ + --build-arg="INCUS_REF=${INCUS_REF}" \ + --build-arg="AWS_REF=${AWS_REF}" \ + --build-arg="GCP_REF=${GCP_REF}" \ + --build-arg="EQUINIX_REF=${EQUINIX_REF}" \ + --build-arg="K8S_REF=${K8S_REF}" \ -t ${{ github.event.inputs.push_to_project }}/garm:"${VERSION}" \ --push . diff --git a/Dockerfile b/Dockerfile index 44f96a59..8e317723 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,13 @@ FROM docker.io/golang:alpine AS builder ARG GARM_REF +ARG AZURE_REF=v0.1.0 +ARG OPENSTACK_REF=v0.1.0 +ARG LXD_REF=v0.1.0 +ARG INCUS_REF=v0.1.0 +ARG AWS_REF=v0.1.0 +ARG GCP_REF=v0.1.0 +ARG EQUINIX_REF=v0.1.0 +ARG K8S_REF=v0.3.2 LABEL stage=builder @@ -8,28 +16,28 @@ RUN git config --global --add safe.directory /build ADD . /build/garm RUN cd /build/garm && git checkout ${GARM_REF} -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-azure /build/garm-provider-azure -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-openstack /build/garm-provider-openstack -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-lxd /build/garm-provider-lxd -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-incus /build/garm-provider-incus -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-aws /build/garm-provider-aws -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-gcp /build/garm-provider-gcp -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-equinix /build/garm-provider-equinix +RUN git clone --depth 1 --branch ${AZURE_REF} https://github.com/cloudbase/garm-provider-azure /build/garm-provider-azure +RUN git clone --depth 1 --branch ${OPENSTACK_REF} https://github.com/cloudbase/garm-provider-openstack /build/garm-provider-openstack +RUN git clone --depth 1 --branch ${LXD_REF} https://github.com/cloudbase/garm-provider-lxd /build/garm-provider-lxd +RUN git clone --depth 1 --branch ${INCUS_REF} https://github.com/cloudbase/garm-provider-incus /build/garm-provider-incus +RUN git clone --depth 1 --branch ${AWS_REF} https://github.com/cloudbase/garm-provider-aws /build/garm-provider-aws +RUN git clone --depth 1 --branch ${GCP_REF} https://github.com/cloudbase/garm-provider-gcp /build/garm-provider-gcp +RUN git clone --depth 1 --branch ${EQUINIX_REF} https://github.com/cloudbase/garm-provider-equinix /build/garm-provider-equinix -RUN git clone --depth 1 --branch v0.3.1 https://github.com/mercedes-benz/garm-provider-k8s /build/garm-provider-k8s +RUN git clone --depth 1 --branch ${K8S_REF} https://github.com/mercedes-benz/garm-provider-k8s /build/garm-provider-k8s RUN cd /build/garm && go build -o /bin/garm \ -tags osusergo,netgo,sqlite_omit_load_extension \ -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ /build/garm/cmd/garm && upx /bin/garm RUN mkdir -p /opt/garm/providers.d -RUN cd /build/garm-provider-azure && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-azure . && upx /opt/garm/providers.d/garm-provider-azure -RUN cd /build/garm-provider-openstack && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-openstack . && upx /opt/garm/providers.d/garm-provider-openstack -RUN cd /build/garm-provider-lxd && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-lxd . && upx /opt/garm/providers.d/garm-provider-lxd -RUN cd /build/garm-provider-incus && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-incus . && upx /opt/garm/providers.d/garm-provider-incus -RUN cd /build/garm-provider-aws && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-aws . && upx /opt/garm/providers.d/garm-provider-aws -RUN cd /build/garm-provider-gcp && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-gcp . && upx /opt/garm/providers.d/garm-provider-gcp -RUN cd /build/garm-provider-equinix && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-equinix +RUN cd /build/garm-provider-azure && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-azure . && upx /opt/garm/providers.d/garm-provider-azure +RUN cd /build/garm-provider-openstack && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-openstack . && upx /opt/garm/providers.d/garm-provider-openstack +RUN cd /build/garm-provider-lxd && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-lxd . && upx /opt/garm/providers.d/garm-provider-lxd +RUN cd /build/garm-provider-incus && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-incus . && upx /opt/garm/providers.d/garm-provider-incus +RUN cd /build/garm-provider-aws && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-aws . && upx /opt/garm/providers.d/garm-provider-aws +RUN cd /build/garm-provider-gcp && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-gcp . && upx /opt/garm/providers.d/garm-provider-gcp +RUN cd /build/garm-provider-equinix && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-equinix RUN cd /build/garm-provider-k8s/cmd/garm-provider-k8s && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-k8s . && upx /opt/garm/providers.d/garm-provider-k8s From bdcc817def0282f6e22e7baa0bf9beb748a48f3a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 20:10:08 +0000 Subject: [PATCH 079/179] Enable daily cron Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/build-and-push.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 117aa763..b7309242 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -10,6 +10,8 @@ on: description: "Ref to build" required: true default: "main" + schedule: + - cron: "0 2 * * *" permissions: contents: read From 269c6064e787f3602051e325cafcecb162425dd1 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 20 May 2025 21:12:13 +0000 Subject: [PATCH 080/179] Add CLI to docker image This change adds the GARM cli to the docker image. Signed-off-by: Gabriel Adrian Samfira --- Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Dockerfile b/Dockerfile index 8e317723..88e9bc27 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,6 +30,10 @@ RUN cd /build/garm && go build -o /bin/garm \ -tags osusergo,netgo,sqlite_omit_load_extension \ -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ /build/garm/cmd/garm && upx /bin/garm +RUN cd /build/garm/cmd/garm-cli && go build -o /bin/garm-cli \ + -tags osusergo,netgo,sqlite_omit_load_extension \ + -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ + . && upx /bin/garm-cli RUN mkdir -p /opt/garm/providers.d RUN cd /build/garm-provider-azure && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-azure . && upx /opt/garm/providers.d/garm-provider-azure RUN cd /build/garm-provider-openstack && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-openstack . && upx /opt/garm/providers.d/garm-provider-openstack From c5c35f13243dff0a78d9432d8f8f4be313b694ce Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 21 May 2025 04:57:47 +0000 Subject: [PATCH 081/179] Use default values for ref and push to project Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/build-and-push.yml | 18 ++++++++++++------ .github/workflows/integration-tests.yml | 1 - 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index b7309242..475ec651 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -42,7 +42,13 @@ jobs: - name: Build and push run: | set -x - cd src/github.com/cloudbase/garm + REGISTRY_INPUT="${{ github.event.inputs.push_to_project }}" + REF_INPUT="${{ github.event.inputs.ref }}" + + PUSH_TO_PROJECT="${REGISTRY_INPUT:-ghcr.io/cloudbase}" + GH_REF="${REF_INPUT:-main}" + cd src/github.com/cloudbase/garm && git checkout "${GH_REF}" + VERSION=$(git describe --tags --match='v[0-9]*' --always) AZURE_REF=v0.1.0 OPENSTACK_REF=v0.1.0 @@ -52,7 +58,7 @@ jobs: GCP_REF=v0.1.0 EQUINIX_REF=v0.1.0 K8S_REF=v0.3.2 - if [ "${{ github.event.inputs.ref }}" == "main" ]; then + if [ "$GH_REF" == "main" ]; then AZURE_REF="main" OPENSTACK_REF="main" LXD_REF="main" @@ -66,10 +72,10 @@ jobs: docker buildx build \ --provenance=false \ --platform linux/amd64,linux/arm64 \ - --label "org.opencontainers.image.source=https://github.com/cloudbase/garm/tree/${{ github.event.inputs.ref }}" \ - --label "org.opencontainers.image.description=GARM ${{ github.event.inputs.ref }}" \ + --label "org.opencontainers.image.source=https://github.com/cloudbase/garm/tree/${GH_REF}" \ + --label "org.opencontainers.image.description=GARM ${GH_REF}" \ --label "org.opencontainers.image.licenses=Apache 2.0" \ - --build-arg="GARM_REF=${{ github.event.inputs.ref }}" \ + --build-arg="GARM_REF=${GH_REF}" \ --build-arg="AZURE_REF=${AZURE_REF}" \ --build-arg="OPENSTACK_REF=${OPENSTACK_REF}" \ --build-arg="LXD_REF=${LXD_REF}" \ @@ -78,5 +84,5 @@ jobs: --build-arg="GCP_REF=${GCP_REF}" \ --build-arg="EQUINIX_REF=${EQUINIX_REF}" \ --build-arg="K8S_REF=${K8S_REF}" \ - -t ${{ github.event.inputs.push_to_project }}/garm:"${VERSION}" \ + -t ${PUSH_TO_PROJECT}/garm:"${VERSION}" \ --push . diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index dc1a68b8..04072b20 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -107,7 +107,6 @@ jobs: with: name: garm-logs path: /artifacts-logs - merge-multiple: true - name: Cleanup orphan GARM resources via GitHub API if: always() From 4cf14689079ecc45355eacac82ee9e62f636057e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 06:30:29 +0000 Subject: [PATCH 082/179] Bump golang.org/x/mod from 0.17.0 to 0.24.0 Bumps [golang.org/x/mod](https://github.com/golang/mod) from 0.17.0 to 0.24.0. - [Commits](https://github.com/golang/mod/compare/v0.17.0...v0.24.0) --- updated-dependencies: - dependency-name: golang.org/x/mod dependency-version: 0.24.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- vendor/golang.org/x/mod/LICENSE | 4 ++-- vendor/modules.txt | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/go.mod b/go.mod index 5070dbfe..097353a1 100644 --- a/go.mod +++ b/go.mod @@ -29,7 +29,7 @@ require ( github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 golang.org/x/crypto v0.38.0 - golang.org/x/mod v0.17.0 + golang.org/x/mod v0.24.0 golang.org/x/oauth2 v0.30.0 golang.org/x/sync v0.14.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 diff --git a/go.sum b/go.sum index 1cbc5ee0..28e4606e 100644 --- a/go.sum +++ b/go.sum @@ -190,8 +190,8 @@ go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= -golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= -golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= diff --git a/vendor/golang.org/x/mod/LICENSE b/vendor/golang.org/x/mod/LICENSE index 6a66aea5..2a7cf70d 100644 --- a/vendor/golang.org/x/mod/LICENSE +++ b/vendor/golang.org/x/mod/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. +Copyright 2009 The Go Authors. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are @@ -10,7 +10,7 @@ notice, this list of conditions and the following disclaimer. copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - * Neither the name of Google Inc. nor the names of its + * Neither the name of Google LLC nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. diff --git a/vendor/modules.txt b/vendor/modules.txt index dbd42ce3..46b3a5e6 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -286,8 +286,8 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/mod v0.17.0 -## explicit; go 1.18 +# golang.org/x/mod v0.24.0 +## explicit; go 1.23.0 golang.org/x/mod/semver # golang.org/x/net v0.40.0 ## explicit; go 1.23.0 From 1c758d010a34a19f1ad5fe23f967157d9616b762 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 21 May 2025 10:27:44 +0000 Subject: [PATCH 083/179] Add examples for setting up GARM with Gitea Signed-off-by: Gabriel Adrian Samfira --- README.md | 12 +- doc/gitea.md | 361 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 370 insertions(+), 3 deletions(-) create mode 100644 doc/gitea.md diff --git a/README.md b/README.md index 69f4ee6f..175b0556 100644 --- a/README.md +++ b/README.md @@ -22,12 +22,10 @@ Welcome to GARM! -GARM enables you to create and automatically maintain pools of [self-hosted GitHub runners](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners), with auto-scaling that can be used inside your github workflow runs. +GARM enables you to create and automatically maintain pools of self-hosted runners in both [Github](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners) and [Gitea](https://github.com/go-gitea/gitea/) with auto-scaling that can be used inside your workflow runs. The goal of ```GARM``` is to be simple to set up, simple to configure and simple to use. The server itself is a single binary that can run on any GNU/Linux machine without any other requirements other than the providers you want to enable in your setup. It is intended to be easy to deploy in any environment and can create runners in virtually any system you can write a provider for (if one does not alreay exist). There is no complicated setup process and no extremely complex concepts to understand. Once set up, it's meant to stay out of your way. -GARM supports creating pools and scale sets in either GitHub itself or in your own deployment of [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.10/admin/overview/about-github-enterprise-server). For instructions on how to use ```GARM``` with GHE, see the [credentials](/doc/github_credentials.md) section of the documentation. - Through the use of providers, `GARM` can create runners in a variety of environments using the same `GARM` instance. Whether you want to create runners in your OpenStack cloud, your Azure cloud or your Kubernetes cluster, that is easily achieved by installing the appropriate providers, configuring them in `GARM` and creating pools that use them. You can create zero-runner pools for instances with high costs (large VMs, GPU enabled instances, etc) and have them spin up on demand, or you can create large pools of eagerly created k8s backed runners that can be used for your CI/CD pipelines at a moment's notice. You can mix them up and create pools in any combination of providers or resource allocations you want. GARM supports two modes of operation: @@ -62,6 +60,14 @@ Check out the [quickstart](/doc/quickstart.md) document for instructions on how Thanks to the efforts of the amazing folks at [@mercedes-benz](https://github.com/mercedes-benz/), GARM can now be integrated into k8s via their operator. Check out the [GARM operator](https://github.com/mercedes-benz/garm-operator/) for more details. +## Configuring GARM for GHES + +GARM supports creating pools and scale sets in either GitHub itself or in your own deployment of [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.10/admin/overview/about-github-enterprise-server). For instructions on how to use ```GARM``` with GHE, see the [credentials](/doc/github_credentials.md) section of the documentation. + +## Configuring GARM for Gitea + +GARM now has support for Gitea (>=1.24.0). For information on getting started with Gitea, see the [Gitea quickstart](/doc/gitea.md) document. + ## Using GARM GARM is designed with simplicity in mind. At least we try to keep it as simple as possible. We're aware that adding a new tool in your workflow can be painful, especially when you already have to deal with so many. The cognitive load for OPS has reached a level where it feels overwhelming at times to even wrap your head around a new tool. As such, we believe that tools should be simple, should take no more than a few hours to understand and set up and if you absolutely need to interact with the tool, it should be as intuitive as possible. Although we try our best to make this happen, we're aware that GARM has some rough edges, especially for new users. If you encounter issues or feel like the setup process was too complicated, please let us know. We're always looking to improve the user experience. diff --git a/doc/gitea.md b/doc/gitea.md new file mode 100644 index 00000000..2bf4e3d6 --- /dev/null +++ b/doc/gitea.md @@ -0,0 +1,361 @@ +# Using GARM with Gitea + +Starting with Gitea 1.24 and the latest version of GARM (upcomming v0.2.0 - currently `main`), GARM supports Gitea as a forge, side by side with GitHub/GHES. A new endpoint type has been added to represent Gitea instances, which you can configure and use along side your GitHub runners. + +You can essentially create runners for both GitHub and Gitea using the same GARM instance, using the same CLI and the same API. It's simply a matter of adding an endpoint and credentials. The rest is the same as for github. + +## Quickstart + +This is for testing purposes only. We'll assume you're running on an Ubuntu 24.04 VM or server. You can use anything you'd like, but this quickstart is tailored to get you up and running with the LXD provider. So we'll: + +* Initialize LXD +* Create a docker compose yaml +* Deploy Gitea and GARM +* Configure GARM to use Gitea + +You will have to install Docker-CE yourself. + +### Initialize LXD + +If you already have LXD initialized, you can skip this step. Otherwise, simply run: + +```bash +sudo lxd init --auto +``` + +This should set up LXD with default settings that should work on any system. + +LXD and Docker sometimes have issues with networking due to some conflicting iptables rules. In most cases, if you have docker installed and notice that you don't have access to the outside world from the containers, run the following command: + +```bash +sudo iptables -I DOCKER-USER -j ACCEPT +``` + +### Create the docker compose + +Create a docker compose file in `$HOME/compose.yaml`. This docker compose will deploy both gitea and GARM. If you already have a Gitea >=1.24.0, you can edit this docker compose to only deploy GARM. + +```yaml +version: "3" + +networks: + default: + external: false + +services: + gitea: + image: docker.gitea.com/gitea:1.24.0-rc0 + container_name: gitea + environment: + - USER_UID=1000 + - USER_GID=1000 + restart: always + networks: + - default + volumes: + - /etc/gitea/gitea:/data + - /etc/timezone:/etc/timezone:ro + - /etc/localtime:/etc/localtime:ro + ports: + - "80:80" + - "22:22" + garm: + image: ghcr.io/cloudbase/garm:${GARM_VERSION:-nightly} + container_name: garm + environment: + - USER_UID=1000 + - USER_GID=1000 + restart: always + networks: + - default + volumes: + - /etc/garm:/etc/garm + - /etc/timezone:/etc/timezone:ro + - /etc/localtime:/etc/localtime:ro + # Give GARM access to the LXD socker. We need this later in the LXD provider. + - /var/snap/lxd/common/lxd/unix.socket:/var/snap/lxd/common/lxd/unix.socket + ports: + - "9997:9997" +``` + +Create the folders for Gitea and GARM: + +```bash +sudo mkdir -p /etc/gitea /etc/garm +sudo chown 1000:1000 /etc/gitea /etc/garm +``` + +Create the GARM configuration file: + +```bash + +sudo tee /etc/garm/config.toml < Date: Wed, 21 May 2025 13:52:15 +0300 Subject: [PATCH 084/179] Update gitea.md --- doc/gitea.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/gitea.md b/doc/gitea.md index 2bf4e3d6..215a51ba 100644 --- a/doc/gitea.md +++ b/doc/gitea.md @@ -72,7 +72,7 @@ services: - /etc/garm:/etc/garm - /etc/timezone:/etc/timezone:ro - /etc/localtime:/etc/localtime:ro - # Give GARM access to the LXD socker. We need this later in the LXD provider. + # Give GARM access to the LXD socket. We need this later in the LXD provider. - /var/snap/lxd/common/lxd/unix.socket:/var/snap/lxd/common/lxd/unix.socket ports: - "9997:9997" @@ -358,4 +358,4 @@ To get more details about the runner, run: garm-cli runner show RUNNER_NAME_GOES_HERE ``` -That's it! You can now use GARM with Gitea. You can add more pools, more repos, more orgs, more endpoints and more providers. \ No newline at end of file +That's it! You can now use GARM with Gitea. You can add more pools, more repos, more orgs, more endpoints and more providers. From 8e8d7fd36407ed02e51162aa3141def27e156560 Mon Sep 17 00:00:00 2001 From: Kaspar Schleiser Date: Wed, 21 May 2025 16:39:22 +0200 Subject: [PATCH 085/179] Fix some typos in scalesets.md --- doc/scalesets.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/scalesets.md b/doc/scalesets.md index 867f8f61..de9d348e 100644 --- a/doc/scalesets.md +++ b/doc/scalesets.md @@ -83,11 +83,11 @@ That's it. You now have a scale set created, ready to accept jobs. Scale sets are a new way of managing runners. They were introduced by GitHub to enable more efficient scheduling of runners. Scale sets are meant to reduce API calls, improve reliability of message deliveries to the auto scaler and improve efficiency of runner scheduling. While webhooks work great most of the time, under heavy load, they may not fire or they may fire while the auto scaler is offline, leading to lost messages. If webhooks are fired while GARM is down, we will never know about those jobs unless we query the current workflow runs. -Listing workflow runs is not feisable for orgs or enterprises, as that would mean listing all repos withing an org then for each repository, listing all workflow runs. This gets worse for enterprises. Scale sets on the other hand allows GARM to subscribe to a message queue and get messages just for that scale set over HTTP long poll. +Listing workflow runs is not feasible for orgs or enterprises, as that would mean listing all repos within an org then for each repository, listing all workflow runs. This gets worse for enterprises. Scale sets on the other hand allow GARM to subscribe to a message queue and get messages just for that scale set over HTTP long poll. Advantages of scale sets over pools: * No more need to install a webhook, reducing your security footprint. * Scheduling is done by GitHub. GARM receives runner requests from GitHub and GARM can choose to acquire those jobs or leave them for some other scaler. * Easier use of runner groups. While GARM supports runner groups, github currently [does not send the group name](https://github.com/orgs/community/discussions/158000) as part of webhooks in `queued` state. This prevents GARM (or any other auto scaler) to efficiently schedule runners to pools that have runner groups set. But given that in the case of scale sets, GitHub schedules the runners to the scaleset itself, we can efficiently create runners in certain runner groups. -* scale set names must be unique within a runner group \ No newline at end of file +* scale set names must be unique within a runner group From cfe707e522bec0699d43e18ac066b4a1ebab9071 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 21 May 2025 19:45:23 +0000 Subject: [PATCH 086/179] Fix potential nil pointer dereference Signed-off-by: Gabriel Adrian Samfira --- util/github/client.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/util/github/client.go b/util/github/client.go index f25329c7..a553e1d8 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -240,8 +240,13 @@ func (g *githubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) } func parseError(response *github.Response, err error) error { - slog.Debug("parsing error", "status_code", response.StatusCode, "response", response, "error", err) - switch response.StatusCode { + var statusCode int + if response != nil { + statusCode = response.StatusCode + } + + slog.Debug("parsing error", "status_code", statusCode, "response", response, "error", err) + switch statusCode { case http.StatusNotFound: return runnerErrors.ErrNotFound case http.StatusUnauthorized: @@ -249,7 +254,7 @@ func parseError(response *github.Response, err error) error { case http.StatusUnprocessableEntity: return runnerErrors.ErrBadRequest default: - if response.StatusCode >= 100 && response.StatusCode < 300 { + if statusCode >= 100 && statusCode < 300 { return nil } if err != nil { From 4e157fc4ccc434e6760ba0412eba901fde07d08a Mon Sep 17 00:00:00 2001 From: Gabriel Date: Wed, 21 May 2025 23:46:29 +0300 Subject: [PATCH 087/179] Update Dockerfile --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 88e9bc27..47723a6b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -48,6 +48,7 @@ RUN cd /build/garm-provider-k8s/cmd/garm-provider-k8s && go build -ldflags="-lin FROM busybox COPY --from=builder /bin/garm /bin/garm +COPY --from=builder /bin/garm-cli /bin/garm-cli COPY --from=builder /opt/garm/providers.d/garm-provider-openstack /opt/garm/providers.d/garm-provider-openstack COPY --from=builder /opt/garm/providers.d/garm-provider-lxd /opt/garm/providers.d/garm-provider-lxd COPY --from=builder /opt/garm/providers.d/garm-provider-incus /opt/garm/providers.d/garm-provider-incus From 9660b28019c54c9e8a215c9eb5d8289cdf3819e3 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 21 May 2025 20:58:21 +0000 Subject: [PATCH 088/179] Avoid nil pointer dereference in CLI Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/github_credentials.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/cmd/garm-cli/cmd/github_credentials.go b/cmd/garm-cli/cmd/github_credentials.go index ae2374f6..6f9b6409 100644 --- a/cmd/garm-cli/cmd/github_credentials.go +++ b/cmd/garm-cli/cmd/github_credentials.go @@ -375,7 +375,10 @@ func formatOneGithubCredential(cred params.ForgeCredentials) { header := table.Row{"Field", "Value"} t.AppendHeader(header) - resetMinutes := cred.RateLimit.ResetIn().Minutes() + var resetMinutes float64 + if cred.RateLimit != nil { + resetMinutes = cred.RateLimit.ResetIn().Minutes() + } t.AppendRow(table.Row{"ID", cred.ID}) t.AppendRow(table.Row{"Created At", cred.CreatedAt}) From 7e576dc631deb97eb5308001cdbca4b36904b146 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 21 May 2025 21:29:43 +0000 Subject: [PATCH 089/179] Ensure that admin token exists Make sure that the admin info is populated when calling GenerateJitRunnerConfig. Signed-off-by: Gabriel Adrian Samfira --- util/github/scalesets/runners.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/util/github/scalesets/runners.go b/util/github/scalesets/runners.go index 178361a1..252d7e94 100644 --- a/util/github/scalesets/runners.go +++ b/util/github/scalesets/runners.go @@ -41,6 +41,10 @@ func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName return params.RunnerScaleSetJitRunnerConfig{}, err } + if err := s.ensureAdminInfo(ctx); err != nil { + return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to ensure admin info: %w", err) + } + serviceURL, err := s.actionsServiceInfo.GetURL() if err != nil { return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to get pipeline URL: %w", err) From 9921a7bfc8d0845056e3439a488b23400de5147d Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 22 May 2025 18:43:32 +0000 Subject: [PATCH 090/179] Fix AddInstanceEvent and expose events * We were passing the wrong type to GORM for events * We now expose entity events in the API and CLI Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/enterprise.go | 9 +++- cmd/garm-cli/cmd/organization.go | 8 ++- cmd/garm-cli/cmd/pool.go | 6 ++- cmd/garm-cli/cmd/repository.go | 10 +++- database/common/mocks/Store.go | 8 +-- database/sql/enterprise.go | 13 +++-- database/sql/enterprise_test.go | 26 ++++++++++ database/sql/models.go | 6 +-- database/sql/organizations.go | 20 +++++--- database/sql/organizations_test.go | 26 ++++++++++ database/sql/pools.go | 15 +++++- database/sql/repositories.go | 20 +++++--- database/sql/repositories_test.go | 27 ++++++++++ database/sql/util.go | 80 ++++++++++++++++++++++-------- params/params.go | 14 ++++++ util/github/client.go | 1 - workers/cache/cache.go | 4 +- workers/cache/tool_cache.go | 40 +++++++++++++-- 18 files changed, 273 insertions(+), 60 deletions(-) diff --git a/cmd/garm-cli/cmd/enterprise.go b/cmd/garm-cli/cmd/enterprise.go index eabfad26..1e6c3930 100644 --- a/cmd/garm-cli/cmd/enterprise.go +++ b/cmd/garm-cli/cmd/enterprise.go @@ -16,6 +16,7 @@ package cmd import ( "fmt" + "strings" "github.com/jedib0t/go-pretty/v6/table" "github.com/spf13/cobra" @@ -250,9 +251,15 @@ func formatOneEnterprise(enterprise params.Enterprise) { t.AppendRow(table.Row{"Pools", pool.ID}, rowConfigAutoMerge) } } + + if len(enterprise.Events) > 0 { + for _, event := range enterprise.Events { + t.AppendRow(table.Row{"Events", fmt.Sprintf("%s %s: %s", event.CreatedAt.Format("2006-01-02T15:04:05"), strings.ToUpper(string(event.EventLevel)), event.Message)}, rowConfigAutoMerge) + } + } t.SetColumnConfigs([]table.ColumnConfig{ {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false}, + {Number: 2, AutoMerge: false, WidthMax: 100}, }) fmt.Println(t.Render()) diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index c35fd75b..58110053 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -16,6 +16,7 @@ package cmd import ( "fmt" + "strings" "github.com/jedib0t/go-pretty/v6/table" "github.com/spf13/cobra" @@ -394,9 +395,14 @@ func formatOneOrganization(org params.Organization) { t.AppendRow(table.Row{"Pools", pool.ID}, rowConfigAutoMerge) } } + if len(org.Events) > 0 { + for _, event := range org.Events { + t.AppendRow(table.Row{"Events", fmt.Sprintf("%s %s: %s", event.CreatedAt.Format("2006-01-02T15:04:05"), strings.ToUpper(string(event.EventLevel)), event.Message)}, rowConfigAutoMerge) + } + } t.SetColumnConfigs([]table.ColumnConfig{ {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false}, + {Number: 2, AutoMerge: false, WidthMax: 100}, }) fmt.Println(t.Render()) diff --git a/cmd/garm-cli/cmd/pool.go b/cmd/garm-cli/cmd/pool.go index 0b891e96..b2c324ea 100644 --- a/cmd/garm-cli/cmd/pool.go +++ b/cmd/garm-cli/cmd/pool.go @@ -476,7 +476,7 @@ func formatPools(pools []params.Pool) { t.SetColumnConfigs([]table.ColumnConfig{ {Number: 2, WidthMax: 40}, }) - header := table.Row{"ID", "Image", "Flavor", "Tags", "Belongs to", "Enabled"} + header := table.Row{"ID", "Image", "Flavor", "Tags", "Belongs to", "Endpoint", "Forge Type", "Enabled"} if long { header = append(header, "Level", "Created At", "Updated at", "Runner Prefix", "Priority") } @@ -501,7 +501,7 @@ func formatPools(pools []params.Pool) { belongsTo = pool.EnterpriseName level = entityTypeEnterprise } - row := table.Row{pool.ID, pool.Image, pool.Flavor, strings.Join(tags, " "), belongsTo, pool.Enabled} + row := table.Row{pool.ID, pool.Image, pool.Flavor, strings.Join(tags, " "), belongsTo, pool.Endpoint.Name, pool.Endpoint.EndpointType, pool.Enabled} if long { row = append(row, level, pool.CreatedAt, pool.UpdatedAt, pool.GetRunnerPrefix(), pool.Priority) } @@ -561,6 +561,8 @@ func formatOnePool(pool params.Pool) { t.AppendRow(table.Row{"Runner Prefix", pool.GetRunnerPrefix()}) t.AppendRow(table.Row{"Extra specs", string(pool.ExtraSpecs)}) t.AppendRow(table.Row{"GitHub Runner Group", pool.GitHubRunnerGroup}) + t.AppendRow(table.Row{"Forge Type", pool.Endpoint.EndpointType}) + t.AppendRow(table.Row{"Endpoint Name", pool.Endpoint.Name}) if len(pool.Instances) > 0 { for _, instance := range pool.Instances { diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index c94495cd..96f214fd 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -16,6 +16,7 @@ package cmd import ( "fmt" + "strings" "github.com/jedib0t/go-pretty/v6/table" "github.com/spf13/cobra" @@ -404,9 +405,16 @@ func formatOneRepository(repo params.Repository) { t.AppendRow(table.Row{"Pools", pool.ID}, rowConfigAutoMerge) } } + + if len(repo.Events) > 0 { + for _, event := range repo.Events { + t.AppendRow(table.Row{"Events", fmt.Sprintf("%s %s: %s", event.CreatedAt.Format("2006-01-02T15:04:05"), strings.ToUpper(string(event.EventLevel)), event.Message)}, rowConfigAutoMerge) + } + } + t.SetColumnConfigs([]table.ColumnConfig{ {Number: 1, AutoMerge: true}, - {Number: 2, AutoMerge: false}, + {Number: 2, AutoMerge: false, WidthMax: 100}, }) fmt.Println(t.Render()) diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index c5994b87..97da1c06 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -97,7 +97,7 @@ func (_m *Store) ControllerInfo() (params.ControllerInfo, error) { } // CreateEnterprise provides a mock function with given fields: ctx, name, credentialsName, webhookSecret, poolBalancerType -func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsName string, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) { +func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) { ret := _m.Called(ctx, name, credentialsName, webhookSecret, poolBalancerType) if len(ret) == 0 { @@ -106,16 +106,16 @@ func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsN var r0 params.Enterprise var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) (params.Enterprise, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Enterprise, error)); ok { return rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } - if rf, ok := ret.Get(0).(func(context.Context, string, string, string, params.PoolBalancerType) params.Enterprise); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) params.Enterprise); ok { r0 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } else { r0 = ret.Get(0).(params.Enterprise) } - if rf, ok := ret.Get(1).(func(context.Context, string, string, string, params.PoolBalancerType) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) error); ok { r1 = rf(ctx, name, credentialsName, webhookSecret, poolBalancerType) } else { r1 = ret.Error(1) diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index e9c2ed08..41d95b26 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -70,12 +70,12 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name string, credent return params.Enterprise{}, errors.Wrap(err, "creating enterprise") } - paramEnt, err = s.sqlToCommonEnterprise(newEnterprise, true) + ret, err := s.GetEnterpriseByID(ctx, newEnterprise.ID.String()) if err != nil { return params.Enterprise{}, errors.Wrap(err, "creating enterprise") } - return paramEnt, nil + return ret, nil } func (s *sqlDatabase) GetEnterprise(ctx context.Context, name, endpointName string) (params.Enterprise, error) { @@ -92,7 +92,14 @@ func (s *sqlDatabase) GetEnterprise(ctx context.Context, name, endpointName stri } func (s *sqlDatabase) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) { - enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") + preloadList := []string{ + "Pools", + "Credentials", + "Endpoint", + "Credentials.Endpoint", + "Events", + } + enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, preloadList...) if err != nil { return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") } diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 24e7bee7..79b298d5 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -441,6 +441,10 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseByIDDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `enterprises` WHERE id = ? AND `enterprises`.`deleted_at` IS NULL ORDER BY `enterprises`.`id` LIMIT ?")). WithArgs(s.Fixtures.Enterprises[0].ID, 1). WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow(s.Fixtures.Enterprises[0].ID)) + s.Fixtures.SQLMock. + ExpectQuery(regexp.QuoteMeta("SELECT * FROM `enterprise_events` WHERE `enterprise_events`.`enterprise_id` = ? AND `enterprise_events`.`deleted_at` IS NULL")). + WithArgs(s.Fixtures.Enterprises[0].ID). + WillReturnRows(sqlmock.NewRows([]string{"enterprise_id"}).AddRow(s.Fixtures.Enterprises[0].ID)) s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `pools` WHERE `pools`.`enterprise_id` = ? AND `pools`.`deleted_at` IS NULL")). WithArgs(s.Fixtures.Enterprises[0].ID). @@ -773,6 +777,28 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolInvalidEnterpriseID() { s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } +func (s *EnterpriseTestSuite) TestAddRepoEntityEvent() { + enterprise, err := s.Store.CreateEnterprise( + s.adminCtx, + s.Fixtures.CreateEnterpriseParams.Name, + s.testCreds, + s.Fixtures.CreateEnterpriseParams.WebhookSecret, + params.PoolBalancerTypeRoundRobin) + + s.Require().Nil(err) + entity, err := enterprise.GetEntity() + s.Require().Nil(err) + err = s.Store.AddEntityEvent(s.adminCtx, entity, params.StatusEvent, params.EventInfo, "this is a test", 20) + s.Require().Nil(err) + + enterprise, err = s.Store.GetEnterpriseByID(s.adminCtx, enterprise.ID) + s.Require().Nil(err) + s.Require().Equal(1, len(enterprise.Events)) + s.Require().Equal(params.StatusEvent, enterprise.Events[0].EventType) + s.Require().Equal(params.EventInfo, enterprise.Events[0].EventLevel) + s.Require().Equal("this is a test", enterprise.Events[0].Message) +} + func TestEnterpriseTestSuite(t *testing.T) { suite.Run(t, new(EnterpriseTestSuite)) } diff --git a/database/sql/models.go b/database/sql/models.go index 154fb51d..4cdb9b8b 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -185,7 +185,7 @@ type Repository struct { EndpointName *string `gorm:"index:idx_owner_nocase,unique,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - Events []*RepositoryEvent `gorm:"foreignKey:RepoID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` + Events []RepositoryEvent `gorm:"foreignKey:RepoID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type OrganizationEvent struct { @@ -217,7 +217,7 @@ type Organization struct { EndpointName *string `gorm:"index:idx_org_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - Events []*OrganizationEvent `gorm:"foreignKey:OrgID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` + Events []OrganizationEvent `gorm:"foreignKey:OrgID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type EnterpriseEvent struct { @@ -247,7 +247,7 @@ type Enterprise struct { EndpointName *string `gorm:"index:idx_ent_name_nocase,collate:nocase"` Endpoint GithubEndpoint `gorm:"foreignKey:EndpointName;constraint:OnDelete:SET NULL"` - Events []*EnterpriseEvent `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` + Events []EnterpriseEvent `gorm:"foreignKey:EnterpriseID;constraint:OnDelete:CASCADE,OnUpdate:CASCADE;"` } type Address struct { diff --git a/database/sql/organizations.go b/database/sql/organizations.go index 6f8eaa10..73456362 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -70,17 +70,12 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name string, crede return params.Organization{}, errors.Wrap(err, "creating org") } - org, err := s.getOrgByID(ctx, s.conn, newOrg.ID.String(), "Pools", "Endpoint", "Credentials", "GiteaCredentials", "Credentials.Endpoint", "GiteaCredentials.Endpoint") + ret, err := s.GetOrganizationByID(ctx, newOrg.ID.String()) if err != nil { return params.Organization{}, errors.Wrap(err, "creating org") } - param, err = s.sqlToCommonOrganization(org, true) - if err != nil { - return params.Organization{}, errors.Wrap(err, "creating org") - } - - return param, nil + return ret, nil } func (s *sqlDatabase) GetOrganization(ctx context.Context, name, endpointName string) (params.Organization, error) { @@ -215,7 +210,16 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para } func (s *sqlDatabase) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) { - org, err := s.getOrgByID(ctx, s.conn, orgID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") + preloadList := []string{ + "Pools", + "Credentials", + "Endpoint", + "Credentials.Endpoint", + "GiteaCredentials", + "GiteaCredentials.Endpoint", + "Events", + } + org, err := s.getOrgByID(ctx, s.conn, orgID, preloadList...) if err != nil { return params.Organization{}, errors.Wrap(err, "fetching org") } diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index a93ef372..5c053cec 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -502,6 +502,10 @@ func (s *OrgTestSuite) TestGetOrganizationByIDDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `organizations` WHERE id = ? AND `organizations`.`deleted_at` IS NULL ORDER BY `organizations`.`id` LIMIT ?")). WithArgs(s.Fixtures.Orgs[0].ID, 1). WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow(s.Fixtures.Orgs[0].ID)) + s.Fixtures.SQLMock. + ExpectQuery(regexp.QuoteMeta("SELECT * FROM `organization_events` WHERE `organization_events`.`org_id` = ? AND `organization_events`.`deleted_at` IS NULL")). + WithArgs(s.Fixtures.Orgs[0].ID). + WillReturnRows(sqlmock.NewRows([]string{"org_id"}).AddRow(s.Fixtures.Orgs[0].ID)) s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `pools` WHERE `pools`.`org_id` = ? AND `pools`.`deleted_at` IS NULL")). WithArgs(s.Fixtures.Orgs[0].ID). @@ -826,6 +830,28 @@ func (s *OrgTestSuite) TestUpdateOrganizationPool() { s.Require().Equal(s.Fixtures.UpdatePoolParams.Flavor, pool.Flavor) } +func (s *OrgTestSuite) TestAddOrgEntityEvent() { + org, err := s.Store.CreateOrganization( + s.adminCtx, + s.Fixtures.CreateOrgParams.Name, + s.testCreds, + s.Fixtures.CreateOrgParams.WebhookSecret, + params.PoolBalancerTypeRoundRobin) + + s.Require().Nil(err) + entity, err := org.GetEntity() + s.Require().Nil(err) + err = s.Store.AddEntityEvent(s.adminCtx, entity, params.StatusEvent, params.EventInfo, "this is a test", 20) + s.Require().Nil(err) + + org, err = s.Store.GetOrganizationByID(s.adminCtx, org.ID) + s.Require().Nil(err) + s.Require().Equal(1, len(org.Events)) + s.Require().Equal(params.StatusEvent, org.Events[0].EventType) + s.Require().Equal(params.EventInfo, org.Events[0].EventLevel) + s.Require().Equal("this is a test", org.Events[0].Message) +} + func (s *OrgTestSuite) TestUpdateOrganizationPoolInvalidOrgID() { entity := params.ForgeEntity{ ID: "dummy-org-id", diff --git a/database/sql/pools.go b/database/sql/pools.go index 24476fe8..a4b3354e 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -40,8 +40,11 @@ func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { q := s.conn.Model(&Pool{}). Preload("Tags"). Preload("Organization"). + Preload("Organization.Endpoint"). Preload("Repository"). + Preload("Repository.Endpoint"). Preload("Enterprise"). + Preload("Enterprise.Endpoint"). Omit("extra_specs"). Find(&pools) if q.Error != nil { @@ -60,7 +63,17 @@ func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { } func (s *sqlDatabase) GetPoolByID(_ context.Context, poolID string) (params.Pool, error) { - pool, err := s.getPoolByID(s.conn, poolID, "Tags", "Instances", "Enterprise", "Organization", "Repository") + preloadList := []string{ + "Tags", + "Instances", + "Enterprise", + "Enterprise.Endpoint", + "Organization", + "Organization.Endpoint", + "Repository", + "Repository.Endpoint", + } + pool, err := s.getPoolByID(s.conn, poolID, preloadList...) if err != nil { return params.Pool{}, errors.Wrap(err, "fetching pool by ID") } diff --git a/database/sql/repositories.go b/database/sql/repositories.go index d7419070..03452df6 100644 --- a/database/sql/repositories.go +++ b/database/sql/repositories.go @@ -71,17 +71,12 @@ func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name string, return params.Repository{}, errors.Wrap(err, "creating repository") } - repo, err := s.getRepoByID(ctx, s.conn, newRepo.ID.String(), "Endpoint", "Credentials", "GiteaCredentials", "Credentials.Endpoint", "GiteaCredentials.Endpoint") + ret, err := s.GetRepositoryByID(ctx, newRepo.ID.String()) if err != nil { return params.Repository{}, errors.Wrap(err, "creating repository") } - param, err = s.sqlToCommonRepository(repo, true) - if err != nil { - return params.Repository{}, errors.Wrap(err, "creating repository") - } - - return param, nil + return ret, nil } func (s *sqlDatabase) GetRepository(ctx context.Context, owner, name, endpointName string) (params.Repository, error) { @@ -217,7 +212,16 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param } func (s *sqlDatabase) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) { - repo, err := s.getRepoByID(ctx, s.conn, repoID, "Pools", "Credentials", "Endpoint", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") + preloadList := []string{ + "Pools", + "Credentials", + "Endpoint", + "Credentials.Endpoint", + "GiteaCredentials", + "GiteaCredentials.Endpoint", + "Events", + } + repo, err := s.getRepoByID(ctx, s.conn, repoID, preloadList...) if err != nil { return params.Repository{}, errors.Wrap(err, "fetching repo") } diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index f27e10b5..f593ddce 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -558,6 +558,10 @@ func (s *RepoTestSuite) TestGetRepositoryByIDDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repositories` WHERE id = ? AND `repositories`.`deleted_at` IS NULL ORDER BY `repositories`.`id` LIMIT ?")). WithArgs(s.Fixtures.Repos[0].ID, 1). WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow(s.Fixtures.Repos[0].ID)) + s.Fixtures.SQLMock. + ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repository_events` WHERE `repository_events`.`repo_id` = ? AND `repository_events`.`deleted_at` IS NULL")). + WithArgs(s.Fixtures.Repos[0].ID). + WillReturnRows(sqlmock.NewRows([]string{"repo_id"}).AddRow(s.Fixtures.Repos[0].ID)) s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `pools` WHERE `pools`.`repo_id` = ? AND `pools`.`deleted_at` IS NULL")). WithArgs(s.Fixtures.Repos[0].ID). @@ -894,6 +898,29 @@ func (s *RepoTestSuite) TestUpdateRepositoryPoolInvalidRepoID() { s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) } +func (s *RepoTestSuite) TestAddRepoEntityEvent() { + repo, err := s.Store.CreateRepository( + s.adminCtx, + s.Fixtures.CreateRepoParams.Owner, + s.Fixtures.CreateRepoParams.Name, + s.testCreds, + s.Fixtures.CreateRepoParams.WebhookSecret, + params.PoolBalancerTypeRoundRobin) + + s.Require().Nil(err) + entity, err := repo.GetEntity() + s.Require().Nil(err) + err = s.Store.AddEntityEvent(s.adminCtx, entity, params.StatusEvent, params.EventInfo, "this is a test", 20) + s.Require().Nil(err) + + repo, err = s.Store.GetRepositoryByID(s.adminCtx, repo.ID) + s.Require().Nil(err) + s.Require().Equal(1, len(repo.Events)) + s.Require().Equal(params.StatusEvent, repo.Events[0].EventType) + s.Require().Equal(params.EventInfo, repo.Events[0].EventLevel) + s.Require().Equal("this is a test", repo.Events[0].Message) +} + func TestRepoTestSuite(t *testing.T) { suite.Run(t, new(RepoTestSuite)) } diff --git a/database/sql/util.go b/database/sql/util.go index 11d338ba..d55e0174 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -166,6 +166,19 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( return params.Organization{}, errors.Wrap(err, "converting credentials") } + if len(org.Events) > 0 { + ret.Events = make([]params.EntityEvent, len(org.Events)) + for idx, event := range org.Events { + ret.Events[idx] = params.EntityEvent{ + ID: event.ID, + Message: event.Message, + EventType: event.EventType, + EventLevel: event.EventLevel, + CreatedAt: event.CreatedAt, + } + } + } + if detailed { ret.Credentials = forgeCreds ret.CredentialsName = forgeCreds.Name @@ -214,6 +227,19 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool ret.CredentialsID = *enterprise.CredentialsID } + if len(enterprise.Events) > 0 { + ret.Events = make([]params.EntityEvent, len(enterprise.Events)) + for idx, event := range enterprise.Events { + ret.Events[idx] = params.EntityEvent{ + ID: event.ID, + Message: event.Message, + EventType: event.EventType, + EventLevel: event.EventLevel, + CreatedAt: event.CreatedAt, + } + } + } + if detailed { creds, err := s.sqlToCommonForgeCredentials(enterprise.Credentials) if err != nil { @@ -260,28 +286,37 @@ func (s *sqlDatabase) sqlToCommonPool(pool Pool) (params.Pool, error) { UpdatedAt: pool.UpdatedAt, } + var ep GithubEndpoint if pool.RepoID != nil { ret.RepoID = pool.RepoID.String() if pool.Repository.Owner != "" && pool.Repository.Name != "" { ret.RepoName = fmt.Sprintf("%s/%s", pool.Repository.Owner, pool.Repository.Name) } + ep = pool.Repository.Endpoint } if pool.OrgID != nil && pool.Organization.Name != "" { ret.OrgID = pool.OrgID.String() ret.OrgName = pool.Organization.Name + ep = pool.Organization.Endpoint } if pool.EnterpriseID != nil && pool.Enterprise.Name != "" { ret.EnterpriseID = pool.EnterpriseID.String() ret.EnterpriseName = pool.Enterprise.Name + ep = pool.Enterprise.Endpoint } + endpoint, err := s.sqlToCommonGithubEndpoint(ep) + if err != nil { + return params.Pool{}, errors.Wrap(err, "converting endpoint") + } + ret.Endpoint = endpoint + for idx, val := range pool.Tags { ret.Tags[idx] = s.sqlToCommonTags(*val) } - var err error for idx, inst := range pool.Instances { ret.Instances[idx], err = s.sqlToParamsInstance(inst) if err != nil { @@ -399,6 +434,19 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par return params.Repository{}, errors.Wrap(err, "converting credentials") } + if len(repo.Events) > 0 { + ret.Events = make([]params.EntityEvent, len(repo.Events)) + for idx, event := range repo.Events { + ret.Events[idx] = params.EntityEvent{ + ID: event.ID, + Message: event.Message, + EventType: event.EventType, + EventLevel: event.EventLevel, + CreatedAt: event.CreatedAt, + } + } + } + if detailed { ret.Credentials = forgeCreds ret.CredentialsName = forgeCreds.Name @@ -654,7 +702,7 @@ func (s *sqlDatabase) GetForgeEntity(_ context.Context, entityType params.ForgeE } func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - repo, err := s.GetRepositoryByID(ctx, repoID) + repo, err := s.getRepoByID(ctx, s.conn, repoID) if err != nil { return errors.Wrap(err, "updating instance") } @@ -670,20 +718,16 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve } if maxEvents > 0 { - repoID, err := uuid.Parse(repo.ID) - if err != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") - } var latestEvents []RepositoryEvent q := s.conn.Model(&RepositoryEvent{}). Limit(maxEvents).Order("id desc"). - Where("repo_id = ?", repoID).Find(&latestEvents) + Where("repo_id = ?", repo.ID).Find(&latestEvents) if q.Error != nil { return errors.Wrap(q.Error, "fetching latest events") } if len(latestEvents) == maxEvents { lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("repo_id = ? and id < ?", repoID, lastInList.ID).Unscoped().Delete(&RepositoryEvent{}).Error; err != nil { + if err := s.conn.Where("repo_id = ? and id < ?", repo.ID, lastInList.ID).Unscoped().Delete(&RepositoryEvent{}).Error; err != nil { return errors.Wrap(err, "deleting old events") } } @@ -692,7 +736,7 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve } func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - org, err := s.GetOrganizationByID(ctx, orgID) + org, err := s.getOrgByID(ctx, s.conn, orgID) if err != nil { return errors.Wrap(err, "updating instance") } @@ -708,20 +752,16 @@ func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event param } if maxEvents > 0 { - orgID, err := uuid.Parse(org.ID) - if err != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") - } var latestEvents []OrganizationEvent q := s.conn.Model(&OrganizationEvent{}). Limit(maxEvents).Order("id desc"). - Where("org_id = ?", orgID).Find(&latestEvents) + Where("org_id = ?", org.ID).Find(&latestEvents) if q.Error != nil { return errors.Wrap(q.Error, "fetching latest events") } if len(latestEvents) == maxEvents { lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("org_id = ? and id < ?", orgID, lastInList.ID).Unscoped().Delete(&OrganizationEvent{}).Error; err != nil { + if err := s.conn.Where("org_id = ? and id < ?", org.ID, lastInList.ID).Unscoped().Delete(&OrganizationEvent{}).Error; err != nil { return errors.Wrap(err, "deleting old events") } } @@ -730,7 +770,7 @@ func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event param } func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { - ent, err := s.GetEnterpriseByID(ctx, entID) + ent, err := s.getEnterpriseByID(ctx, s.conn, entID) if err != nil { return errors.Wrap(err, "updating instance") } @@ -746,20 +786,16 @@ func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, even } if maxEvents > 0 { - entID, err := uuid.Parse(ent.ID) - if err != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") - } var latestEvents []EnterpriseEvent q := s.conn.Model(&EnterpriseEvent{}). Limit(maxEvents).Order("id desc"). - Where("enterprise_id = ?", entID).Find(&latestEvents) + Where("enterprise_id = ?", ent.ID).Find(&latestEvents) if q.Error != nil { return errors.Wrap(q.Error, "fetching latest events") } if len(latestEvents) == maxEvents { lastInList := latestEvents[len(latestEvents)-1] - if err := s.conn.Where("enterprise_id = ? and id < ?", entID, lastInList.ID).Unscoped().Delete(&EnterpriseEvent{}).Error; err != nil { + if err := s.conn.Where("enterprise_id = ? and id < ?", ent.ID, lastInList.ID).Unscoped().Delete(&EnterpriseEvent{}).Error; err != nil { return errors.Wrap(err, "deleting old events") } } diff --git a/params/params.go b/params/params.go index e154b2df..a127d760 100644 --- a/params/params.go +++ b/params/params.go @@ -177,6 +177,15 @@ type StatusMessage struct { EventLevel EventLevel `json:"event_level,omitempty"` } +type EntityEvent struct { + ID uint `json:"id,omitempty"` + CreatedAt time.Time `json:"created_at,omitempty"` + + EventType EventType `json:"event_type,omitempty"` + EventLevel EventLevel `json:"event_level,omitempty"` + Message string `json:"message,omitempty"` +} + type Instance struct { // ID is the database ID of this instance. ID string `json:"id,omitempty"` @@ -365,6 +374,8 @@ type Pool struct { EnterpriseID string `json:"enterprise_id,omitempty"` EnterpriseName string `json:"enterprise_name,omitempty"` + Endpoint ForgeEndpoint `json:"forge_type,omitempty"` + RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` @@ -600,6 +611,7 @@ type Repository struct { Endpoint ForgeEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` + Events []EntityEvent `json:"events,omitempty"` // Do not serialize sensitive info. WebhookSecret string `json:"-"` } @@ -669,6 +681,7 @@ type Organization struct { Endpoint ForgeEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` + Events []EntityEvent `json:"events,omitempty"` // Do not serialize sensitive info. WebhookSecret string `json:"-"` } @@ -726,6 +739,7 @@ type Enterprise struct { Endpoint ForgeEndpoint `json:"endpoint,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"` + Events []EntityEvent `json:"events,omitempty"` // Do not serialize sensitive info. WebhookSecret string `json:"-"` } diff --git a/util/github/client.go b/util/github/client.go index a553e1d8..35d846ab 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -245,7 +245,6 @@ func parseError(response *github.Response, err error) error { statusCode = response.StatusCode } - slog.Debug("parsing error", "status_code", statusCode, "response", response, "error", err) switch statusCode { case http.StatusNotFound: return runnerErrors.ErrNotFound diff --git a/workers/cache/cache.go b/workers/cache/cache.go index 8f53cb67..a00c7667 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -130,7 +130,7 @@ func (w *Worker) loadAllEntities() error { } for _, entity := range cache.GetAllEntities() { - worker := newToolsUpdater(w.ctx, entity) + worker := newToolsUpdater(w.ctx, entity, w.store) if err := worker.Start(); err != nil { return fmt.Errorf("starting tools updater: %w", err) } @@ -286,7 +286,7 @@ func (w *Worker) handleEntityEvent(entityGetter params.EntityGetter, op common.O cache.SetEntity(entity) worker, ok := w.toolsWorkes[entity.ID] if !ok { - worker = newToolsUpdater(w.ctx, entity) + worker = newToolsUpdater(w.ctx, entity, w.store) if err := worker.Start(); err != nil { slog.ErrorContext(w.ctx, "starting tools updater", "error", err) return diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index 2e91bf50..3df103ec 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -25,16 +25,18 @@ import ( commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/cache" + "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" garmUtil "github.com/cloudbase/garm/util" "github.com/cloudbase/garm/util/github" ) -func newToolsUpdater(ctx context.Context, entity params.ForgeEntity) *toolsUpdater { +func newToolsUpdater(ctx context.Context, entity params.ForgeEntity, store common.Store) *toolsUpdater { return &toolsUpdater{ ctx: ctx, entity: entity, quit: make(chan struct{}), + store: store, } } @@ -44,6 +46,7 @@ type toolsUpdater struct { entity params.ForgeEntity tools []commonParams.RunnerApplicationDownload lastUpdate time.Time + store common.Store mux sync.Mutex running bool @@ -157,7 +160,14 @@ func (t *toolsUpdater) giteaUpdateLoop() { } t.sleepWithCancel(time.Duration(randInt.Int64()) * time.Millisecond) tools, err := getTools() - if err == nil { + if err != nil { + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update gitea tools: %q", err), 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } + } else { + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } cache.SetGithubToolsCache(t.entity, tools) } @@ -174,9 +184,15 @@ func (t *toolsUpdater) giteaUpdateLoop() { case <-ticker.C: tools, err := getTools() if err != nil { + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update gitea tools: %q", err), 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } slog.DebugContext(t.ctx, "failed to update gitea tools", "error", err) continue } + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } cache.SetGithubToolsCache(t.entity, tools) } } @@ -197,12 +213,18 @@ func (t *toolsUpdater) loop() { now := time.Now().UTC() if now.After(t.lastUpdate.Add(40 * time.Minute)) { if err := t.updateTools(); err != nil { + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err), 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } slog.ErrorContext(t.ctx, "initial tools update error", "error", err) resetTime = now.Add(5 * time.Minute) slog.ErrorContext(t.ctx, "initial tools update error", "error", err) } else { // Tools are usually valid for 1 hour. resetTime = t.lastUpdate.Add(40 * time.Minute) + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } } } @@ -224,12 +246,18 @@ func (t *toolsUpdater) loop() { case <-timer.C: slog.DebugContext(t.ctx, "updating tools") now = time.Now().UTC() - if err := t.updateTools(); err == nil { + if err := t.updateTools(); err != nil { slog.ErrorContext(t.ctx, "updating tools", "error", err) + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err), 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } resetTime = now.Add(5 * time.Minute) } else { // Tools are usually valid for 1 hour. resetTime = t.lastUpdate.Add(40 * time.Minute) + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } } case <-t.reset: slog.DebugContext(t.ctx, "resetting tools updater") @@ -237,10 +265,16 @@ func (t *toolsUpdater) loop() { now = time.Now().UTC() if err := t.updateTools(); err != nil { slog.ErrorContext(t.ctx, "updating tools", "error", err) + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err), 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } resetTime = now.Add(5 * time.Minute) } else { // Tools are usually valid for 1 hour. resetTime = t.lastUpdate.Add(40 * time.Minute) + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { + slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) + } } } timer.Stop() From 9f3b65118495269dd1976cb6606d9db3751294f5 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 23 May 2025 12:40:57 +0000 Subject: [PATCH 091/179] Fix deadline check logic Signed-off-by: Gabriel Adrian Samfira --- runner/pool/locking.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runner/pool/locking.go b/runner/pool/locking.go index 70471f98..7e20d990 100644 --- a/runner/pool/locking.go +++ b/runner/pool/locking.go @@ -63,7 +63,7 @@ func (i *instanceDeleteBackoff) ShouldProcess(key string) (bool, time.Time) { now := time.Now().UTC() deadline := ib.lastRecordedFailureTime.Add(time.Duration(ib.backoffSeconds) * time.Second) - return deadline.After(now), deadline + return now.After(deadline), deadline } func (i *instanceDeleteBackoff) Delete(key string) { From 750446acec63db101d750659bbe2fa53a4b78186 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 23 May 2025 17:15:42 +0000 Subject: [PATCH 092/179] Make the default github.com endpoint mutable The k8s operator seems to want to define its own endpoint. This change allows the removal of the default gh endpoint if no credentials are tied to it. Signed-off-by: Gabriel Adrian Samfira --- database/common/mocks/Store.go | 6 +- database/sql/github.go | 24 +- database/sql/github_test.go | 111 +++- database/sql/sql.go | 15 +- go.mod | 24 +- go.sum | 48 +- runner/common/mocks/GithubClient.go | 2 +- runner/common/mocks/GithubEntityOperations.go | 2 +- runner/common/mocks/PoolManager.go | 18 +- runner/common/mocks/Provider.go | 92 ++-- runner/mocks/PoolManagerController.go | 8 +- .../prometheus/common/expfmt/text_parse.go | 4 +- .../prometheus/common/model/labels.go | 3 +- .../prometheus/procfs/.golangci.yml | 72 +-- .../prometheus/procfs/Makefile.common | 4 +- .../procfs/internal/util/sysreadfile.go | 20 + .../prometheus/procfs/mountstats.go | 23 +- .../prometheus/procfs/net_protocols.go | 21 +- vendor/github.com/prometheus/procfs/proc.go | 8 +- .../prometheus/procfs/proc_netstat.go | 224 ++++---- .../github.com/prometheus/procfs/proc_snmp.go | 120 ++--- .../prometheus/procfs/proc_snmp6.go | 150 +++--- .../github.com/prometheus/procfs/proc_sys.go | 2 +- .../github.com/prometheus/procfs/softirqs.go | 22 +- vendor/go.opentelemetry.io/otel/.golangci.yml | 452 ++++++++-------- vendor/go.opentelemetry.io/otel/CHANGELOG.md | 54 +- .../go.opentelemetry.io/otel/CONTRIBUTING.md | 1 + vendor/go.opentelemetry.io/otel/Makefile | 19 +- vendor/go.opentelemetry.io/otel/README.md | 8 +- vendor/go.opentelemetry.io/otel/RELEASING.md | 18 + .../otel/attribute/filter.go | 4 +- .../internal}/attribute.go | 2 +- .../otel/attribute/rawhelpers.go | 37 ++ .../otel/attribute/value.go | 15 +- .../otel/dependencies.Dockerfile | 5 +- .../go.opentelemetry.io/otel/get_main_pkgs.sh | 30 -- .../go.opentelemetry.io/otel/internal/gen.go | 18 - .../otel/internal/global/handler.go | 1 + .../otel/internal/global/meter.go | 45 +- .../otel/internal/global/trace.go | 13 +- .../otel/internal/rawhelpers.go | 48 -- .../otel/metric/asyncfloat64.go | 12 +- .../otel/metric/asyncint64.go | 8 +- .../otel/metric/instrument.go | 16 +- .../go.opentelemetry.io/otel/metric/meter.go | 10 +- .../otel/propagation/baggage.go | 36 +- .../otel/propagation/propagation.go | 30 +- vendor/go.opentelemetry.io/otel/renovate.json | 7 +- .../otel/semconv/internal/v2/http.go | 1 + vendor/go.opentelemetry.io/otel/trace/auto.go | 5 +- .../otel/trace/internal/telemetry/span.go | 56 +- .../otel/trace/internal/telemetry/status.go | 12 +- .../otel/trace/internal/telemetry/traces.go | 4 +- .../otel/trace/internal/telemetry/value.go | 2 +- vendor/go.opentelemetry.io/otel/trace/noop.go | 2 + .../otel/verify_readmes.sh | 21 - vendor/go.opentelemetry.io/otel/version.go | 2 +- vendor/go.opentelemetry.io/otel/versions.yaml | 8 +- vendor/golang.org/x/oauth2/internal/doc.go | 2 +- vendor/golang.org/x/oauth2/internal/oauth2.go | 2 +- vendor/golang.org/x/oauth2/internal/token.go | 50 +- .../golang.org/x/oauth2/internal/transport.go | 4 +- vendor/golang.org/x/oauth2/oauth2.go | 55 +- vendor/golang.org/x/oauth2/pkce.go | 15 +- vendor/golang.org/x/oauth2/token.go | 15 +- vendor/golang.org/x/oauth2/transport.go | 24 +- vendor/golang.org/x/sync/errgroup/errgroup.go | 107 +++- vendor/golang.org/x/sys/cpu/cpu.go | 11 + .../golang.org/x/sys/cpu/cpu_linux_riscv64.go | 23 + vendor/golang.org/x/sys/cpu/cpu_riscv64.go | 12 + .../x/sys/windows/security_windows.go | 49 +- .../x/sys/windows/syscall_windows.go | 6 +- .../golang.org/x/sys/windows/types_windows.go | 212 ++++++++ .../x/sys/windows/zsyscall_windows.go | 9 + vendor/gorm.io/gorm/.golangci.yml | 15 +- vendor/gorm.io/gorm/CODE_OF_CONDUCT.md | 128 +++++ vendor/gorm.io/gorm/LICENSE | 2 +- vendor/gorm.io/gorm/callbacks/associations.go | 10 +- vendor/gorm.io/gorm/clause/returning.go | 9 +- vendor/gorm.io/gorm/finisher_api.go | 6 +- vendor/gorm.io/gorm/gorm.go | 16 +- vendor/gorm.io/gorm/internal/lru/lru.go | 493 ++++++++++++++++++ .../gorm/internal/stmt_store/stmt_store.go | 183 +++++++ vendor/gorm.io/gorm/logger/logger.go | 12 + vendor/gorm.io/gorm/migrator/migrator.go | 4 +- vendor/gorm.io/gorm/prepare_stmt.go | 144 ++--- vendor/gorm.io/gorm/scan.go | 4 +- vendor/gorm.io/gorm/schema/field.go | 2 +- vendor/gorm.io/gorm/schema/index.go | 31 +- vendor/gorm.io/gorm/schema/relationship.go | 6 +- vendor/gorm.io/gorm/schema/utils.go | 2 +- vendor/modules.txt | 39 +- 92 files changed, 2568 insertions(+), 1159 deletions(-) rename vendor/go.opentelemetry.io/otel/{internal/attribute => attribute/internal}/attribute.go (97%) create mode 100644 vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go delete mode 100644 vendor/go.opentelemetry.io/otel/get_main_pkgs.sh delete mode 100644 vendor/go.opentelemetry.io/otel/internal/gen.go delete mode 100644 vendor/go.opentelemetry.io/otel/internal/rawhelpers.go delete mode 100644 vendor/go.opentelemetry.io/otel/verify_readmes.sh create mode 100644 vendor/gorm.io/gorm/CODE_OF_CONDUCT.md create mode 100644 vendor/gorm.io/gorm/internal/lru/lru.go create mode 100644 vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index 0791ff36..841982b4 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -50,7 +50,7 @@ func (_m *Store) BreakLockJobIsQueued(ctx context.Context, jobID int64) error { return r0 } -// ControllerInfo provides a mock function with given fields: +// ControllerInfo provides a mock function with no fields func (_m *Store) ControllerInfo() (params.ControllerInfo, error) { ret := _m.Called() @@ -1034,7 +1034,7 @@ func (_m *Store) HasAdminUser(ctx context.Context) bool { return r0 } -// InitController provides a mock function with given fields: +// InitController provides a mock function with no fields func (_m *Store) InitController() (params.ControllerInfo, error) { ret := _m.Called() diff --git a/database/sql/github.go b/database/sql/github.go index 22e357bd..18526f5a 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -28,10 +28,6 @@ import ( "github.com/cloudbase/garm/params" ) -const ( - defaultGithubEndpoint string = "github.com" -) - func (s *sqlDatabase) sqlToCommonGithubCredentials(creds GithubCredentials) (params.GithubCredentials, error) { if len(creds.Payload) == 0 { return params.GithubCredentials{}, errors.New("empty credentials payload") @@ -168,10 +164,6 @@ func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.GithubEnd } func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param params.UpdateGithubEndpointParams) (ghEndpoint params.GithubEndpoint, err error) { - if name == defaultGithubEndpoint { - return params.GithubEndpoint{}, errors.Wrap(runnerErrors.ErrBadRequest, "cannot update default github endpoint") - } - defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.UpdateOperation, ghEndpoint) @@ -185,6 +177,16 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param } return errors.Wrap(err, "fetching github endpoint") } + + var credsCount int64 + if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "fetching github credentials") + } + } + if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil || param.UploadBaseURL != nil) { + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update endpoint URLs with existing credentials") + } if param.APIBaseURL != nil { endpoint.APIBaseURL = *param.APIBaseURL } @@ -236,10 +238,6 @@ func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params. } func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err error) { - if name == defaultGithubEndpoint { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete default github endpoint") - } - defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.GithubEndpoint{Name: name}) @@ -283,7 +281,7 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err } if credsCount > 0 || repoCnt > 0 || orgCnt > 0 || entCnt > 0 { - return errors.New("cannot delete endpoint with associated entities") + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete endpoint with associated entities") } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { diff --git a/database/sql/github_test.go b/database/sql/github_test.go index b0399a68..72617de4 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -40,6 +40,7 @@ const ( testEndpointDescription string = "test description" testCredsName string = "test-creds" testCredsDescription string = "test creds" + defaultGithubEndpoint string = "github.com" ) type GithubTestSuite struct { @@ -56,20 +57,13 @@ func (s *GithubTestSuite) SetupTest() { s.db = db } -func (s *GithubTestSuite) TestDefaultEndpointGetsCreatedAutomatically() { +func (s *GithubTestSuite) TestDefaultEndpointGetsCreatedAutomaticallyIfNoOtherEndpointExists() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) endpoint, err := s.db.GetGithubEndpoint(ctx, defaultGithubEndpoint) s.Require().NoError(err) s.Require().NotNil(endpoint) } -func (s *GithubTestSuite) TestDeletingDefaultEndpointFails() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - err := s.db.DeleteGithubEndpoint(ctx, defaultGithubEndpoint) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) -} - func (s *GithubTestSuite) TestCreatingEndpoint() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) @@ -153,6 +147,39 @@ func (s *GithubTestSuite) TestDeletingEndpoint() { s.Require().ErrorIs(err, runnerErrors.ErrNotFound) } +func (s *GithubTestSuite) TestDeleteGithubEndpointFailsWhenCredentialsExist() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGithubEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + UploadBaseURL: testUploadBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGithubEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + + credParams := params.CreateGithubCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: testEndpointName, + AuthType: params.GithubAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + _, err = s.db.CreateGithubCredentials(ctx, credParams) + s.Require().NoError(err) + + err = s.db.DeleteGithubEndpoint(ctx, testEndpointName) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) +} + func (s *GithubTestSuite) TestUpdateEndpoint() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) @@ -168,7 +195,7 @@ func (s *GithubTestSuite) TestUpdateEndpoint() { s.Require().NoError(err) s.Require().NotNil(endpoint) - newDescription := "new description" + newDescription := "the new description" newAPIBaseURL := "https://new-api.example.com" newUploadBaseURL := "https://new-uploads.example.com" newBaseURL := "https://new.example.com" @@ -192,6 +219,72 @@ func (s *GithubTestSuite) TestUpdateEndpoint() { s.Require().Equal(caCertBundle, updatedEndpoint.CACertBundle) } +func (s *GithubTestSuite) TestUpdateEndpointUDLsFailsIfCredentialsAreAssociated() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGithubEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + UploadBaseURL: testUploadBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGithubEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + + credParams := params.CreateGithubCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: testEndpointName, + AuthType: params.GithubAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + _, err = s.db.CreateGithubCredentials(ctx, credParams) + s.Require().NoError(err) + + newDescription := "new description" + newBaseURL := "https://new.example.com" + newAPIBaseURL := "https://new-api.example.com" + newUploadBaseURL := "https://new-uploads.example.com" + updateEpParams := params.UpdateGithubEndpointParams{ + BaseURL: &newBaseURL, + } + + _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGithubEndpointParams{ + UploadBaseURL: &newUploadBaseURL, + } + + _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGithubEndpointParams{ + APIBaseURL: &newAPIBaseURL, + } + _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGithubEndpointParams{ + Description: &newDescription, + } + ret, err := s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().NoError(err) + s.Require().Equal(newDescription, ret.Description) +} + func (s *GithubTestSuite) TestUpdatingNonExistingEndpointReturnsNotFoundError() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) diff --git a/database/sql/sql.go b/database/sql/sql.go index 1a024516..d4e6895a 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -212,9 +212,18 @@ func (s *sqlDatabase) ensureGithubEndpoint() error { UploadBaseURL: appdefaults.GithubDefaultUploadBaseURL, } - if _, err := s.CreateGithubEndpoint(context.Background(), createEndpointParams); err != nil { - if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { - return errors.Wrap(err, "creating default github endpoint") + var epCount int64 + if err := s.conn.Model(&GithubEndpoint{}).Count(&epCount).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "counting github endpoints") + } + } + + if epCount == 0 { + if _, err := s.CreateGithubEndpoint(context.Background(), createEndpointParams); err != nil { + if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { + return errors.Wrap(err, "creating default github endpoint") + } } } diff --git a/go.mod b/go.mod index 79a09894..30815255 100644 --- a/go.mod +++ b/go.mod @@ -28,15 +28,15 @@ require ( github.com/prometheus/client_golang v1.22.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 - golang.org/x/crypto v0.37.0 - golang.org/x/oauth2 v0.29.0 - golang.org/x/sync v0.13.0 + golang.org/x/crypto v0.38.0 + golang.org/x/oauth2 v0.30.0 + golang.org/x/sync v0.14.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gorm.io/datatypes v1.2.5 gorm.io/driver/mysql v1.5.7 gorm.io/driver/sqlite v1.5.7 - gorm.io/gorm v1.25.12 + gorm.io/gorm v1.26.1 ) require ( @@ -76,20 +76,20 @@ require ( github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect - github.com/prometheus/common v0.63.0 // indirect - github.com/prometheus/procfs v0.16.0 // indirect + github.com/prometheus/common v0.64.0 // indirect + github.com/prometheus/procfs v0.16.1 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/spf13/pflag v1.0.6 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 // indirect go.mongodb.org/mongo-driver v1.17.3 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/otel v1.35.0 // indirect - go.opentelemetry.io/otel/metric v1.35.0 // indirect - go.opentelemetry.io/otel/trace v1.35.0 // indirect - golang.org/x/net v0.39.0 // indirect - golang.org/x/sys v0.32.0 // indirect - golang.org/x/text v0.24.0 // indirect + go.opentelemetry.io/otel v1.36.0 // indirect + go.opentelemetry.io/otel/metric v1.36.0 // indirect + go.opentelemetry.io/otel/trace v1.36.0 // indirect + golang.org/x/net v0.40.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.25.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index e14f0c22..2f887203 100644 --- a/go.sum +++ b/go.sum @@ -155,10 +155,10 @@ github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/ github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= -github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= -github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18= -github.com/prometheus/procfs v0.16.0 h1:xh6oHhKwnOJKMYiYBDWmkHqQPyiY40sny36Cmx2bbsM= -github.com/prometheus/procfs v0.16.0/go.mod h1:8veyXUu3nGP7oaCxhX6yeaM5u4stL2FeMXnCqhDthZg= +github.com/prometheus/common v0.64.0 h1:pdZeA+g617P7oGv1CzdTzyeShxAGrTBsolKNOLQPGO4= +github.com/prometheus/common v0.64.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= @@ -182,29 +182,29 @@ go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeH go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= -go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= -go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= -go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= +go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= +go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= +go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= -go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= -go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= -golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= -golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= +go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= +golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= +golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= +golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= +golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= +golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= +golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= +golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= @@ -231,5 +231,5 @@ gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDa gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= -gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8= -gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ= +gorm.io/gorm v1.26.1 h1:ghB2gUI9FkS46luZtn6DLZ0f6ooBJ5IbVej2ENFDjRw= +gorm.io/gorm v1.26.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index c59c631b..3388a894 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index 488387f6..c04f9024 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks diff --git a/runner/common/mocks/PoolManager.go b/runner/common/mocks/PoolManager.go index bf1af0c0..5bb16672 100644 --- a/runner/common/mocks/PoolManager.go +++ b/runner/common/mocks/PoolManager.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -60,7 +60,7 @@ func (_m *PoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, err return r0, r1 } -// GithubRunnerRegistrationToken provides a mock function with given fields: +// GithubRunnerRegistrationToken provides a mock function with no fields func (_m *PoolManager) GithubRunnerRegistrationToken() (string, error) { ret := _m.Called() @@ -106,7 +106,7 @@ func (_m *PoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return r0 } -// ID provides a mock function with given fields: +// ID provides a mock function with no fields func (_m *PoolManager) ID() string { ret := _m.Called() @@ -152,7 +152,7 @@ func (_m *PoolManager) InstallWebhook(ctx context.Context, param params.InstallW return r0, r1 } -// RootCABundle provides a mock function with given fields: +// RootCABundle provides a mock function with no fields func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { ret := _m.Called() @@ -180,7 +180,7 @@ func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { return r0, r1 } -// Start provides a mock function with given fields: +// Start provides a mock function with no fields func (_m *PoolManager) Start() error { ret := _m.Called() @@ -198,7 +198,7 @@ func (_m *PoolManager) Start() error { return r0 } -// Status provides a mock function with given fields: +// Status provides a mock function with no fields func (_m *PoolManager) Status() params.PoolManagerStatus { ret := _m.Called() @@ -216,7 +216,7 @@ func (_m *PoolManager) Status() params.PoolManagerStatus { return r0 } -// Stop provides a mock function with given fields: +// Stop provides a mock function with no fields func (_m *PoolManager) Stop() error { ret := _m.Called() @@ -252,7 +252,7 @@ func (_m *PoolManager) UninstallWebhook(ctx context.Context) error { return r0 } -// Wait provides a mock function with given fields: +// Wait provides a mock function with no fields func (_m *PoolManager) Wait() error { ret := _m.Called() @@ -270,7 +270,7 @@ func (_m *PoolManager) Wait() error { return r0 } -// WebhookSecret provides a mock function with given fields: +// WebhookSecret provides a mock function with no fields func (_m *PoolManager) WebhookSecret() string { ret := _m.Called() diff --git a/runner/common/mocks/Provider.go b/runner/common/mocks/Provider.go index 92dece39..e7491ac5 100644 --- a/runner/common/mocks/Provider.go +++ b/runner/common/mocks/Provider.go @@ -1,15 +1,17 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks import ( context "context" + common "github.com/cloudbase/garm/runner/common" + garm_provider_commonparams "github.com/cloudbase/garm-provider-common/params" + mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" - "github.com/cloudbase/garm/runner/common" ) // Provider is an autogenerated mock type for the Provider type @@ -17,7 +19,7 @@ type Provider struct { mock.Mock } -// AsParams provides a mock function with given fields: +// AsParams provides a mock function with no fields func (_m *Provider) AsParams() params.Provider { ret := _m.Called() @@ -35,9 +37,9 @@ func (_m *Provider) AsParams() params.Provider { return r0 } -// CreateInstance provides a mock function with given fields: ctx, bootstrapParams +// CreateInstance provides a mock function with given fields: ctx, bootstrapParams, createInstanceParams func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_provider_commonparams.BootstrapInstance, createInstanceParams common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { - ret := _m.Called(ctx, bootstrapParams) + ret := _m.Called(ctx, bootstrapParams, createInstanceParams) if len(ret) == 0 { panic("no return value specified for CreateInstance") @@ -45,17 +47,17 @@ func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_pro var r0 garm_provider_commonparams.ProviderInstance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance) (garm_provider_commonparams.ProviderInstance, error)); ok { - return rf(ctx, bootstrapParams) + if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error)); ok { + return rf(ctx, bootstrapParams, createInstanceParams) } - if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance) garm_provider_commonparams.ProviderInstance); ok { - r0 = rf(ctx, bootstrapParams) + if rf, ok := ret.Get(0).(func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) garm_provider_commonparams.ProviderInstance); ok { + r0 = rf(ctx, bootstrapParams, createInstanceParams) } else { r0 = ret.Get(0).(garm_provider_commonparams.ProviderInstance) } - if rf, ok := ret.Get(1).(func(context.Context, garm_provider_commonparams.BootstrapInstance) error); ok { - r1 = rf(ctx, bootstrapParams) + if rf, ok := ret.Get(1).(func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) error); ok { + r1 = rf(ctx, bootstrapParams, createInstanceParams) } else { r1 = ret.Error(1) } @@ -63,17 +65,17 @@ func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_pro return r0, r1 } -// DeleteInstance provides a mock function with given fields: ctx, instance +// DeleteInstance provides a mock function with given fields: ctx, instance, deleteInstanceParams func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteInstanceParams common.DeleteInstanceParams) error { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, deleteInstanceParams) if len(ret) == 0 { panic("no return value specified for DeleteInstance") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.DeleteInstanceParams) error); ok { + r0 = rf(ctx, instance, deleteInstanceParams) } else { r0 = ret.Error(0) } @@ -81,7 +83,7 @@ func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteI return r0 } -// DisableJITConfig provides a mock function with given fields: +// DisableJITConfig provides a mock function with no fields func (_m *Provider) DisableJITConfig() bool { ret := _m.Called() @@ -99,9 +101,9 @@ func (_m *Provider) DisableJITConfig() bool { return r0 } -// GetInstance provides a mock function with given fields: ctx, instance +// GetInstance provides a mock function with given fields: ctx, instance, getInstanceParams func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanceParams common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, getInstanceParams) if len(ret) == 0 { panic("no return value specified for GetInstance") @@ -109,17 +111,17 @@ func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanc var r0 garm_provider_commonparams.ProviderInstance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (garm_provider_commonparams.ProviderInstance, error)); ok { - return rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error)); ok { + return rf(ctx, instance, getInstanceParams) } - if rf, ok := ret.Get(0).(func(context.Context, string) garm_provider_commonparams.ProviderInstance); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.GetInstanceParams) garm_provider_commonparams.ProviderInstance); ok { + r0 = rf(ctx, instance, getInstanceParams) } else { r0 = ret.Get(0).(garm_provider_commonparams.ProviderInstance) } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, instance) + if rf, ok := ret.Get(1).(func(context.Context, string, common.GetInstanceParams) error); ok { + r1 = rf(ctx, instance, getInstanceParams) } else { r1 = ret.Error(1) } @@ -127,9 +129,9 @@ func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanc return r0, r1 } -// ListInstances provides a mock function with given fields: ctx, poolID +// ListInstances provides a mock function with given fields: ctx, poolID, listInstancesParams func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstancesParams common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error) { - ret := _m.Called(ctx, poolID) + ret := _m.Called(ctx, poolID, listInstancesParams) if len(ret) == 0 { panic("no return value specified for ListInstances") @@ -137,19 +139,19 @@ func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstan var r0 []garm_provider_commonparams.ProviderInstance var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]garm_provider_commonparams.ProviderInstance, error)); ok { - return rf(ctx, poolID) + if rf, ok := ret.Get(0).(func(context.Context, string, common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error)); ok { + return rf(ctx, poolID, listInstancesParams) } - if rf, ok := ret.Get(0).(func(context.Context, string) []garm_provider_commonparams.ProviderInstance); ok { - r0 = rf(ctx, poolID) + if rf, ok := ret.Get(0).(func(context.Context, string, common.ListInstancesParams) []garm_provider_commonparams.ProviderInstance); ok { + r0 = rf(ctx, poolID, listInstancesParams) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]garm_provider_commonparams.ProviderInstance) } } - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, poolID) + if rf, ok := ret.Get(1).(func(context.Context, string, common.ListInstancesParams) error); ok { + r1 = rf(ctx, poolID, listInstancesParams) } else { r1 = ret.Error(1) } @@ -157,17 +159,17 @@ func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstan return r0, r1 } -// RemoveAllInstances provides a mock function with given fields: ctx -func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstances common.RemoveAllInstancesParams) error { - ret := _m.Called(ctx) +// RemoveAllInstances provides a mock function with given fields: ctx, removeAllInstancesParams +func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstancesParams common.RemoveAllInstancesParams) error { + ret := _m.Called(ctx, removeAllInstancesParams) if len(ret) == 0 { panic("no return value specified for RemoveAllInstances") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context) error); ok { - r0 = rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, common.RemoveAllInstancesParams) error); ok { + r0 = rf(ctx, removeAllInstancesParams) } else { r0 = ret.Error(0) } @@ -175,17 +177,17 @@ func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstances c return r0 } -// Start provides a mock function with given fields: ctx, instance +// Start provides a mock function with given fields: ctx, instance, startParams func (_m *Provider) Start(ctx context.Context, instance string, startParams common.StartParams) error { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, startParams) if len(ret) == 0 { panic("no return value specified for Start") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.StartParams) error); ok { + r0 = rf(ctx, instance, startParams) } else { r0 = ret.Error(0) } @@ -193,17 +195,17 @@ func (_m *Provider) Start(ctx context.Context, instance string, startParams comm return r0 } -// Stop provides a mock function with given fields: ctx, instance +// Stop provides a mock function with given fields: ctx, instance, stopParams func (_m *Provider) Stop(ctx context.Context, instance string, stopParams common.StopParams) error { - ret := _m.Called(ctx, instance) + ret := _m.Called(ctx, instance, stopParams) if len(ret) == 0 { panic("no return value specified for Stop") } var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, instance) + if rf, ok := ret.Get(0).(func(context.Context, string, common.StopParams) error); ok { + r0 = rf(ctx, instance, stopParams) } else { r0 = ret.Error(0) } diff --git a/runner/mocks/PoolManagerController.go b/runner/mocks/PoolManagerController.go index 2e680daa..05720ebe 100644 --- a/runner/mocks/PoolManagerController.go +++ b/runner/mocks/PoolManagerController.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.42.0. DO NOT EDIT. +// Code generated by mockery v2.53.3. DO NOT EDIT. package mocks @@ -193,7 +193,7 @@ func (_m *PoolManagerController) GetEnterprisePoolManager(enterprise params.Ente return r0, r1 } -// GetEnterprisePoolManagers provides a mock function with given fields: +// GetEnterprisePoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetEnterprisePoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -253,7 +253,7 @@ func (_m *PoolManagerController) GetOrgPoolManager(org params.Organization) (com return r0, r1 } -// GetOrgPoolManagers provides a mock function with given fields: +// GetOrgPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetOrgPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -313,7 +313,7 @@ func (_m *PoolManagerController) GetRepoPoolManager(repo params.Repository) (com return r0, r1 } -// GetRepoPoolManagers provides a mock function with given fields: +// GetRepoPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetRepoPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse.go b/vendor/github.com/prometheus/common/expfmt/text_parse.go index b4607fe4..4067978a 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_parse.go +++ b/vendor/github.com/prometheus/common/expfmt/text_parse.go @@ -345,8 +345,8 @@ func (p *TextParser) startLabelName() stateFn { } // Special summary/histogram treatment. Don't add 'quantile' and 'le' // labels to 'real' labels. - if !(p.currentMF.GetType() == dto.MetricType_SUMMARY && p.currentLabelPair.GetName() == model.QuantileLabel) && - !(p.currentMF.GetType() == dto.MetricType_HISTOGRAM && p.currentLabelPair.GetName() == model.BucketLabel) { + if (p.currentMF.GetType() != dto.MetricType_SUMMARY || p.currentLabelPair.GetName() != model.QuantileLabel) && + (p.currentMF.GetType() != dto.MetricType_HISTOGRAM || p.currentLabelPair.GetName() != model.BucketLabel) { p.currentLabelPairs = append(p.currentLabelPairs, p.currentLabelPair) } // Check for duplicate label names. diff --git a/vendor/github.com/prometheus/common/model/labels.go b/vendor/github.com/prometheus/common/model/labels.go index f4a38760..de83afe9 100644 --- a/vendor/github.com/prometheus/common/model/labels.go +++ b/vendor/github.com/prometheus/common/model/labels.go @@ -122,7 +122,8 @@ func (ln LabelName) IsValidLegacy() bool { return false } for i, b := range ln { - if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) { + // TODO: Apply De Morgan's law. Make sure there are tests for this. + if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) { //nolint:staticcheck return false } } diff --git a/vendor/github.com/prometheus/procfs/.golangci.yml b/vendor/github.com/prometheus/procfs/.golangci.yml index b43e09f6..3c3bf910 100644 --- a/vendor/github.com/prometheus/procfs/.golangci.yml +++ b/vendor/github.com/prometheus/procfs/.golangci.yml @@ -1,31 +1,45 @@ ---- +version: "2" linters: enable: - - errcheck - - forbidigo - - godot - - gofmt - - goimports - - gosimple - - govet - - ineffassign - - misspell - - revive - - staticcheck - - testifylint - - unused - -linters-settings: - forbidigo: - forbid: - - p: ^fmt\.Print.*$ - msg: Do not commit print statements. - godot: - capital: true - exclude: - # Ignore "See: URL" - - 'See:' - goimports: - local-prefixes: github.com/prometheus/procfs - misspell: - locale: US + - forbidigo + - godot + - misspell + - revive + - testifylint + settings: + forbidigo: + forbid: + - pattern: ^fmt\.Print.*$ + msg: Do not commit print statements. + godot: + exclude: + # Ignore "See: URL". + - 'See:' + capital: true + misspell: + locale: US + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - third_party$ + - builtin$ + - examples$ +formatters: + enable: + - gofmt + - goimports + settings: + goimports: + local-prefixes: + - github.com/prometheus/procfs + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common index cbb5d863..0ed55c2b 100644 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ b/vendor/github.com/prometheus/procfs/Makefile.common @@ -33,7 +33,7 @@ GOHOSTOS ?= $(shell $(GO) env GOHOSTOS) GOHOSTARCH ?= $(shell $(GO) env GOHOSTARCH) GO_VERSION ?= $(shell $(GO) version) -GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION)) +GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION))Error Parsing File PRE_GO_111 ?= $(shell echo $(GO_VERSION_NUMBER) | grep -E 'go1\.(10|[0-9])\.') PROMU := $(FIRST_GOPATH)/bin/promu @@ -61,7 +61,7 @@ PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_ SKIP_GOLANGCI_LINT := GOLANGCI_LINT := GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v1.60.2 +GOLANGCI_LINT_VERSION ?= v2.0.2 # golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64. # windows isn't included here because of the path separator being different. ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) diff --git a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go b/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go index 1ab875ce..d5404a6d 100644 --- a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go +++ b/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go @@ -20,6 +20,8 @@ package util import ( "bytes" "os" + "strconv" + "strings" "syscall" ) @@ -48,3 +50,21 @@ func SysReadFile(file string) (string, error) { return string(bytes.TrimSpace(b[:n])), nil } + +// SysReadUintFromFile reads a file using SysReadFile and attempts to parse a uint64 from it. +func SysReadUintFromFile(path string) (uint64, error) { + data, err := SysReadFile(path) + if err != nil { + return 0, err + } + return strconv.ParseUint(strings.TrimSpace(string(data)), 10, 64) +} + +// SysReadIntFromFile reads a file using SysReadFile and attempts to parse a int64 from it. +func SysReadIntFromFile(path string) (int64, error) { + data, err := SysReadFile(path) + if err != nil { + return 0, err + } + return strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) +} diff --git a/vendor/github.com/prometheus/procfs/mountstats.go b/vendor/github.com/prometheus/procfs/mountstats.go index b6c8d1a5..50caa732 100644 --- a/vendor/github.com/prometheus/procfs/mountstats.go +++ b/vendor/github.com/prometheus/procfs/mountstats.go @@ -601,11 +601,12 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats switch statVersion { case statVersion10: var expectedLength int - if protocol == "tcp" { + switch protocol { + case "tcp": expectedLength = fieldTransport10TCPLen - } else if protocol == "udp" { + case "udp": expectedLength = fieldTransport10UDPLen - } else { + default: return nil, fmt.Errorf("%w: Invalid NFS protocol \"%s\" in stats 1.0 statement: %v", ErrFileParse, protocol, ss) } if len(ss) != expectedLength { @@ -613,13 +614,14 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats } case statVersion11: var expectedLength int - if protocol == "tcp" { + switch protocol { + case "tcp": expectedLength = fieldTransport11TCPLen - } else if protocol == "udp" { + case "udp": expectedLength = fieldTransport11UDPLen - } else if protocol == "rdma" { + case "rdma": expectedLength = fieldTransport11RDMAMinLen - } else { + default: return nil, fmt.Errorf("%w: invalid NFS protocol \"%s\" in stats 1.1 statement: %v", ErrFileParse, protocol, ss) } if (len(ss) != expectedLength && (protocol == "tcp" || protocol == "udp")) || @@ -655,11 +657,12 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats // For the udp RPC transport there is no connection count, connect idle time, // or idle time (fields #3, #4, and #5); all other fields are the same. So // we set them to 0 here. - if protocol == "udp" { + switch protocol { + case "udp": ns = append(ns[:2], append(make([]uint64, 3), ns[2:]...)...) - } else if protocol == "tcp" { + case "tcp": ns = append(ns[:fieldTransport11TCPLen], make([]uint64, fieldTransport11RDMAMaxLen-fieldTransport11TCPLen+3)...) - } else if protocol == "rdma" { + case "rdma": ns = append(ns[:fieldTransport10TCPLen], append(make([]uint64, 3), ns[fieldTransport10TCPLen:]...)...) } diff --git a/vendor/github.com/prometheus/procfs/net_protocols.go b/vendor/github.com/prometheus/procfs/net_protocols.go index b6c77b70..8d4b1ac0 100644 --- a/vendor/github.com/prometheus/procfs/net_protocols.go +++ b/vendor/github.com/prometheus/procfs/net_protocols.go @@ -115,22 +115,24 @@ func (ps NetProtocolStats) parseLine(rawLine string) (*NetProtocolStatLine, erro if err != nil { return nil, err } - if fields[4] == enabled { + switch fields[4] { + case enabled: line.Pressure = 1 - } else if fields[4] == disabled { + case disabled: line.Pressure = 0 - } else { + default: line.Pressure = -1 } line.MaxHeader, err = strconv.ParseUint(fields[5], 10, 64) if err != nil { return nil, err } - if fields[6] == enabled { + switch fields[6] { + case enabled: line.Slab = true - } else if fields[6] == disabled { + case disabled: line.Slab = false - } else { + default: return nil, fmt.Errorf("%w: capability for protocol: %s", ErrFileParse, line.Name) } line.ModuleName = fields[7] @@ -168,11 +170,12 @@ func (pc *NetProtocolCapabilities) parseCapabilities(capabilities []string) erro } for i := 0; i < len(capabilities); i++ { - if capabilities[i] == "y" { + switch capabilities[i] { + case "y": *capabilityFields[i] = true - } else if capabilities[i] == "n" { + case "n": *capabilityFields[i] = false - } else { + default: return fmt.Errorf("%w: capability block for protocol: position %d", ErrFileParse, i) } } diff --git a/vendor/github.com/prometheus/procfs/proc.go b/vendor/github.com/prometheus/procfs/proc.go index 14279636..368187fa 100644 --- a/vendor/github.com/prometheus/procfs/proc.go +++ b/vendor/github.com/prometheus/procfs/proc.go @@ -37,9 +37,9 @@ type Proc struct { type Procs []Proc var ( - ErrFileParse = errors.New("Error Parsing File") - ErrFileRead = errors.New("Error Reading File") - ErrMountPoint = errors.New("Error Accessing Mount point") + ErrFileParse = errors.New("error parsing file") + ErrFileRead = errors.New("error reading file") + ErrMountPoint = errors.New("error accessing mount point") ) func (p Procs) Len() int { return len(p) } @@ -79,7 +79,7 @@ func (fs FS) Self() (Proc, error) { if err != nil { return Proc{}, err } - pid, err := strconv.Atoi(strings.Replace(p, string(fs.proc), "", -1)) + pid, err := strconv.Atoi(strings.ReplaceAll(p, string(fs.proc), "")) if err != nil { return Proc{}, err } diff --git a/vendor/github.com/prometheus/procfs/proc_netstat.go b/vendor/github.com/prometheus/procfs/proc_netstat.go index 8e3ff4d7..4248c171 100644 --- a/vendor/github.com/prometheus/procfs/proc_netstat.go +++ b/vendor/github.com/prometheus/procfs/proc_netstat.go @@ -209,232 +209,232 @@ func parseProcNetstat(r io.Reader, fileName string) (ProcNetstat, error) { case "TcpExt": switch key { case "SyncookiesSent": - procNetstat.TcpExt.SyncookiesSent = &value + procNetstat.SyncookiesSent = &value case "SyncookiesRecv": - procNetstat.TcpExt.SyncookiesRecv = &value + procNetstat.SyncookiesRecv = &value case "SyncookiesFailed": - procNetstat.TcpExt.SyncookiesFailed = &value + procNetstat.SyncookiesFailed = &value case "EmbryonicRsts": - procNetstat.TcpExt.EmbryonicRsts = &value + procNetstat.EmbryonicRsts = &value case "PruneCalled": - procNetstat.TcpExt.PruneCalled = &value + procNetstat.PruneCalled = &value case "RcvPruned": - procNetstat.TcpExt.RcvPruned = &value + procNetstat.RcvPruned = &value case "OfoPruned": - procNetstat.TcpExt.OfoPruned = &value + procNetstat.OfoPruned = &value case "OutOfWindowIcmps": - procNetstat.TcpExt.OutOfWindowIcmps = &value + procNetstat.OutOfWindowIcmps = &value case "LockDroppedIcmps": - procNetstat.TcpExt.LockDroppedIcmps = &value + procNetstat.LockDroppedIcmps = &value case "ArpFilter": - procNetstat.TcpExt.ArpFilter = &value + procNetstat.ArpFilter = &value case "TW": - procNetstat.TcpExt.TW = &value + procNetstat.TW = &value case "TWRecycled": - procNetstat.TcpExt.TWRecycled = &value + procNetstat.TWRecycled = &value case "TWKilled": - procNetstat.TcpExt.TWKilled = &value + procNetstat.TWKilled = &value case "PAWSActive": - procNetstat.TcpExt.PAWSActive = &value + procNetstat.PAWSActive = &value case "PAWSEstab": - procNetstat.TcpExt.PAWSEstab = &value + procNetstat.PAWSEstab = &value case "DelayedACKs": - procNetstat.TcpExt.DelayedACKs = &value + procNetstat.DelayedACKs = &value case "DelayedACKLocked": - procNetstat.TcpExt.DelayedACKLocked = &value + procNetstat.DelayedACKLocked = &value case "DelayedACKLost": - procNetstat.TcpExt.DelayedACKLost = &value + procNetstat.DelayedACKLost = &value case "ListenOverflows": - procNetstat.TcpExt.ListenOverflows = &value + procNetstat.ListenOverflows = &value case "ListenDrops": - procNetstat.TcpExt.ListenDrops = &value + procNetstat.ListenDrops = &value case "TCPHPHits": - procNetstat.TcpExt.TCPHPHits = &value + procNetstat.TCPHPHits = &value case "TCPPureAcks": - procNetstat.TcpExt.TCPPureAcks = &value + procNetstat.TCPPureAcks = &value case "TCPHPAcks": - procNetstat.TcpExt.TCPHPAcks = &value + procNetstat.TCPHPAcks = &value case "TCPRenoRecovery": - procNetstat.TcpExt.TCPRenoRecovery = &value + procNetstat.TCPRenoRecovery = &value case "TCPSackRecovery": - procNetstat.TcpExt.TCPSackRecovery = &value + procNetstat.TCPSackRecovery = &value case "TCPSACKReneging": - procNetstat.TcpExt.TCPSACKReneging = &value + procNetstat.TCPSACKReneging = &value case "TCPSACKReorder": - procNetstat.TcpExt.TCPSACKReorder = &value + procNetstat.TCPSACKReorder = &value case "TCPRenoReorder": - procNetstat.TcpExt.TCPRenoReorder = &value + procNetstat.TCPRenoReorder = &value case "TCPTSReorder": - procNetstat.TcpExt.TCPTSReorder = &value + procNetstat.TCPTSReorder = &value case "TCPFullUndo": - procNetstat.TcpExt.TCPFullUndo = &value + procNetstat.TCPFullUndo = &value case "TCPPartialUndo": - procNetstat.TcpExt.TCPPartialUndo = &value + procNetstat.TCPPartialUndo = &value case "TCPDSACKUndo": - procNetstat.TcpExt.TCPDSACKUndo = &value + procNetstat.TCPDSACKUndo = &value case "TCPLossUndo": - procNetstat.TcpExt.TCPLossUndo = &value + procNetstat.TCPLossUndo = &value case "TCPLostRetransmit": - procNetstat.TcpExt.TCPLostRetransmit = &value + procNetstat.TCPLostRetransmit = &value case "TCPRenoFailures": - procNetstat.TcpExt.TCPRenoFailures = &value + procNetstat.TCPRenoFailures = &value case "TCPSackFailures": - procNetstat.TcpExt.TCPSackFailures = &value + procNetstat.TCPSackFailures = &value case "TCPLossFailures": - procNetstat.TcpExt.TCPLossFailures = &value + procNetstat.TCPLossFailures = &value case "TCPFastRetrans": - procNetstat.TcpExt.TCPFastRetrans = &value + procNetstat.TCPFastRetrans = &value case "TCPSlowStartRetrans": - procNetstat.TcpExt.TCPSlowStartRetrans = &value + procNetstat.TCPSlowStartRetrans = &value case "TCPTimeouts": - procNetstat.TcpExt.TCPTimeouts = &value + procNetstat.TCPTimeouts = &value case "TCPLossProbes": - procNetstat.TcpExt.TCPLossProbes = &value + procNetstat.TCPLossProbes = &value case "TCPLossProbeRecovery": - procNetstat.TcpExt.TCPLossProbeRecovery = &value + procNetstat.TCPLossProbeRecovery = &value case "TCPRenoRecoveryFail": - procNetstat.TcpExt.TCPRenoRecoveryFail = &value + procNetstat.TCPRenoRecoveryFail = &value case "TCPSackRecoveryFail": - procNetstat.TcpExt.TCPSackRecoveryFail = &value + procNetstat.TCPSackRecoveryFail = &value case "TCPRcvCollapsed": - procNetstat.TcpExt.TCPRcvCollapsed = &value + procNetstat.TCPRcvCollapsed = &value case "TCPDSACKOldSent": - procNetstat.TcpExt.TCPDSACKOldSent = &value + procNetstat.TCPDSACKOldSent = &value case "TCPDSACKOfoSent": - procNetstat.TcpExt.TCPDSACKOfoSent = &value + procNetstat.TCPDSACKOfoSent = &value case "TCPDSACKRecv": - procNetstat.TcpExt.TCPDSACKRecv = &value + procNetstat.TCPDSACKRecv = &value case "TCPDSACKOfoRecv": - procNetstat.TcpExt.TCPDSACKOfoRecv = &value + procNetstat.TCPDSACKOfoRecv = &value case "TCPAbortOnData": - procNetstat.TcpExt.TCPAbortOnData = &value + procNetstat.TCPAbortOnData = &value case "TCPAbortOnClose": - procNetstat.TcpExt.TCPAbortOnClose = &value + procNetstat.TCPAbortOnClose = &value case "TCPDeferAcceptDrop": - procNetstat.TcpExt.TCPDeferAcceptDrop = &value + procNetstat.TCPDeferAcceptDrop = &value case "IPReversePathFilter": - procNetstat.TcpExt.IPReversePathFilter = &value + procNetstat.IPReversePathFilter = &value case "TCPTimeWaitOverflow": - procNetstat.TcpExt.TCPTimeWaitOverflow = &value + procNetstat.TCPTimeWaitOverflow = &value case "TCPReqQFullDoCookies": - procNetstat.TcpExt.TCPReqQFullDoCookies = &value + procNetstat.TCPReqQFullDoCookies = &value case "TCPReqQFullDrop": - procNetstat.TcpExt.TCPReqQFullDrop = &value + procNetstat.TCPReqQFullDrop = &value case "TCPRetransFail": - procNetstat.TcpExt.TCPRetransFail = &value + procNetstat.TCPRetransFail = &value case "TCPRcvCoalesce": - procNetstat.TcpExt.TCPRcvCoalesce = &value + procNetstat.TCPRcvCoalesce = &value case "TCPRcvQDrop": - procNetstat.TcpExt.TCPRcvQDrop = &value + procNetstat.TCPRcvQDrop = &value case "TCPOFOQueue": - procNetstat.TcpExt.TCPOFOQueue = &value + procNetstat.TCPOFOQueue = &value case "TCPOFODrop": - procNetstat.TcpExt.TCPOFODrop = &value + procNetstat.TCPOFODrop = &value case "TCPOFOMerge": - procNetstat.TcpExt.TCPOFOMerge = &value + procNetstat.TCPOFOMerge = &value case "TCPChallengeACK": - procNetstat.TcpExt.TCPChallengeACK = &value + procNetstat.TCPChallengeACK = &value case "TCPSYNChallenge": - procNetstat.TcpExt.TCPSYNChallenge = &value + procNetstat.TCPSYNChallenge = &value case "TCPFastOpenActive": - procNetstat.TcpExt.TCPFastOpenActive = &value + procNetstat.TCPFastOpenActive = &value case "TCPFastOpenActiveFail": - procNetstat.TcpExt.TCPFastOpenActiveFail = &value + procNetstat.TCPFastOpenActiveFail = &value case "TCPFastOpenPassive": - procNetstat.TcpExt.TCPFastOpenPassive = &value + procNetstat.TCPFastOpenPassive = &value case "TCPFastOpenPassiveFail": - procNetstat.TcpExt.TCPFastOpenPassiveFail = &value + procNetstat.TCPFastOpenPassiveFail = &value case "TCPFastOpenListenOverflow": - procNetstat.TcpExt.TCPFastOpenListenOverflow = &value + procNetstat.TCPFastOpenListenOverflow = &value case "TCPFastOpenCookieReqd": - procNetstat.TcpExt.TCPFastOpenCookieReqd = &value + procNetstat.TCPFastOpenCookieReqd = &value case "TCPFastOpenBlackhole": - procNetstat.TcpExt.TCPFastOpenBlackhole = &value + procNetstat.TCPFastOpenBlackhole = &value case "TCPSpuriousRtxHostQueues": - procNetstat.TcpExt.TCPSpuriousRtxHostQueues = &value + procNetstat.TCPSpuriousRtxHostQueues = &value case "BusyPollRxPackets": - procNetstat.TcpExt.BusyPollRxPackets = &value + procNetstat.BusyPollRxPackets = &value case "TCPAutoCorking": - procNetstat.TcpExt.TCPAutoCorking = &value + procNetstat.TCPAutoCorking = &value case "TCPFromZeroWindowAdv": - procNetstat.TcpExt.TCPFromZeroWindowAdv = &value + procNetstat.TCPFromZeroWindowAdv = &value case "TCPToZeroWindowAdv": - procNetstat.TcpExt.TCPToZeroWindowAdv = &value + procNetstat.TCPToZeroWindowAdv = &value case "TCPWantZeroWindowAdv": - procNetstat.TcpExt.TCPWantZeroWindowAdv = &value + procNetstat.TCPWantZeroWindowAdv = &value case "TCPSynRetrans": - procNetstat.TcpExt.TCPSynRetrans = &value + procNetstat.TCPSynRetrans = &value case "TCPOrigDataSent": - procNetstat.TcpExt.TCPOrigDataSent = &value + procNetstat.TCPOrigDataSent = &value case "TCPHystartTrainDetect": - procNetstat.TcpExt.TCPHystartTrainDetect = &value + procNetstat.TCPHystartTrainDetect = &value case "TCPHystartTrainCwnd": - procNetstat.TcpExt.TCPHystartTrainCwnd = &value + procNetstat.TCPHystartTrainCwnd = &value case "TCPHystartDelayDetect": - procNetstat.TcpExt.TCPHystartDelayDetect = &value + procNetstat.TCPHystartDelayDetect = &value case "TCPHystartDelayCwnd": - procNetstat.TcpExt.TCPHystartDelayCwnd = &value + procNetstat.TCPHystartDelayCwnd = &value case "TCPACKSkippedSynRecv": - procNetstat.TcpExt.TCPACKSkippedSynRecv = &value + procNetstat.TCPACKSkippedSynRecv = &value case "TCPACKSkippedPAWS": - procNetstat.TcpExt.TCPACKSkippedPAWS = &value + procNetstat.TCPACKSkippedPAWS = &value case "TCPACKSkippedSeq": - procNetstat.TcpExt.TCPACKSkippedSeq = &value + procNetstat.TCPACKSkippedSeq = &value case "TCPACKSkippedFinWait2": - procNetstat.TcpExt.TCPACKSkippedFinWait2 = &value + procNetstat.TCPACKSkippedFinWait2 = &value case "TCPACKSkippedTimeWait": - procNetstat.TcpExt.TCPACKSkippedTimeWait = &value + procNetstat.TCPACKSkippedTimeWait = &value case "TCPACKSkippedChallenge": - procNetstat.TcpExt.TCPACKSkippedChallenge = &value + procNetstat.TCPACKSkippedChallenge = &value case "TCPWinProbe": - procNetstat.TcpExt.TCPWinProbe = &value + procNetstat.TCPWinProbe = &value case "TCPKeepAlive": - procNetstat.TcpExt.TCPKeepAlive = &value + procNetstat.TCPKeepAlive = &value case "TCPMTUPFail": - procNetstat.TcpExt.TCPMTUPFail = &value + procNetstat.TCPMTUPFail = &value case "TCPMTUPSuccess": - procNetstat.TcpExt.TCPMTUPSuccess = &value + procNetstat.TCPMTUPSuccess = &value case "TCPWqueueTooBig": - procNetstat.TcpExt.TCPWqueueTooBig = &value + procNetstat.TCPWqueueTooBig = &value } case "IpExt": switch key { case "InNoRoutes": - procNetstat.IpExt.InNoRoutes = &value + procNetstat.InNoRoutes = &value case "InTruncatedPkts": - procNetstat.IpExt.InTruncatedPkts = &value + procNetstat.InTruncatedPkts = &value case "InMcastPkts": - procNetstat.IpExt.InMcastPkts = &value + procNetstat.InMcastPkts = &value case "OutMcastPkts": - procNetstat.IpExt.OutMcastPkts = &value + procNetstat.OutMcastPkts = &value case "InBcastPkts": - procNetstat.IpExt.InBcastPkts = &value + procNetstat.InBcastPkts = &value case "OutBcastPkts": - procNetstat.IpExt.OutBcastPkts = &value + procNetstat.OutBcastPkts = &value case "InOctets": - procNetstat.IpExt.InOctets = &value + procNetstat.InOctets = &value case "OutOctets": - procNetstat.IpExt.OutOctets = &value + procNetstat.OutOctets = &value case "InMcastOctets": - procNetstat.IpExt.InMcastOctets = &value + procNetstat.InMcastOctets = &value case "OutMcastOctets": - procNetstat.IpExt.OutMcastOctets = &value + procNetstat.OutMcastOctets = &value case "InBcastOctets": - procNetstat.IpExt.InBcastOctets = &value + procNetstat.InBcastOctets = &value case "OutBcastOctets": - procNetstat.IpExt.OutBcastOctets = &value + procNetstat.OutBcastOctets = &value case "InCsumErrors": - procNetstat.IpExt.InCsumErrors = &value + procNetstat.InCsumErrors = &value case "InNoECTPkts": - procNetstat.IpExt.InNoECTPkts = &value + procNetstat.InNoECTPkts = &value case "InECT1Pkts": - procNetstat.IpExt.InECT1Pkts = &value + procNetstat.InECT1Pkts = &value case "InECT0Pkts": - procNetstat.IpExt.InECT0Pkts = &value + procNetstat.InECT0Pkts = &value case "InCEPkts": - procNetstat.IpExt.InCEPkts = &value + procNetstat.InCEPkts = &value case "ReasmOverlaps": - procNetstat.IpExt.ReasmOverlaps = &value + procNetstat.ReasmOverlaps = &value } } } diff --git a/vendor/github.com/prometheus/procfs/proc_snmp.go b/vendor/github.com/prometheus/procfs/proc_snmp.go index b9d2cf64..4bdc90b0 100644 --- a/vendor/github.com/prometheus/procfs/proc_snmp.go +++ b/vendor/github.com/prometheus/procfs/proc_snmp.go @@ -173,138 +173,138 @@ func parseSnmp(r io.Reader, fileName string) (ProcSnmp, error) { case "Ip": switch key { case "Forwarding": - procSnmp.Ip.Forwarding = &value + procSnmp.Forwarding = &value case "DefaultTTL": - procSnmp.Ip.DefaultTTL = &value + procSnmp.DefaultTTL = &value case "InReceives": - procSnmp.Ip.InReceives = &value + procSnmp.InReceives = &value case "InHdrErrors": - procSnmp.Ip.InHdrErrors = &value + procSnmp.InHdrErrors = &value case "InAddrErrors": - procSnmp.Ip.InAddrErrors = &value + procSnmp.InAddrErrors = &value case "ForwDatagrams": - procSnmp.Ip.ForwDatagrams = &value + procSnmp.ForwDatagrams = &value case "InUnknownProtos": - procSnmp.Ip.InUnknownProtos = &value + procSnmp.InUnknownProtos = &value case "InDiscards": - procSnmp.Ip.InDiscards = &value + procSnmp.InDiscards = &value case "InDelivers": - procSnmp.Ip.InDelivers = &value + procSnmp.InDelivers = &value case "OutRequests": - procSnmp.Ip.OutRequests = &value + procSnmp.OutRequests = &value case "OutDiscards": - procSnmp.Ip.OutDiscards = &value + procSnmp.OutDiscards = &value case "OutNoRoutes": - procSnmp.Ip.OutNoRoutes = &value + procSnmp.OutNoRoutes = &value case "ReasmTimeout": - procSnmp.Ip.ReasmTimeout = &value + procSnmp.ReasmTimeout = &value case "ReasmReqds": - procSnmp.Ip.ReasmReqds = &value + procSnmp.ReasmReqds = &value case "ReasmOKs": - procSnmp.Ip.ReasmOKs = &value + procSnmp.ReasmOKs = &value case "ReasmFails": - procSnmp.Ip.ReasmFails = &value + procSnmp.ReasmFails = &value case "FragOKs": - procSnmp.Ip.FragOKs = &value + procSnmp.FragOKs = &value case "FragFails": - procSnmp.Ip.FragFails = &value + procSnmp.FragFails = &value case "FragCreates": - procSnmp.Ip.FragCreates = &value + procSnmp.FragCreates = &value } case "Icmp": switch key { case "InMsgs": - procSnmp.Icmp.InMsgs = &value + procSnmp.InMsgs = &value case "InErrors": procSnmp.Icmp.InErrors = &value case "InCsumErrors": procSnmp.Icmp.InCsumErrors = &value case "InDestUnreachs": - procSnmp.Icmp.InDestUnreachs = &value + procSnmp.InDestUnreachs = &value case "InTimeExcds": - procSnmp.Icmp.InTimeExcds = &value + procSnmp.InTimeExcds = &value case "InParmProbs": - procSnmp.Icmp.InParmProbs = &value + procSnmp.InParmProbs = &value case "InSrcQuenchs": - procSnmp.Icmp.InSrcQuenchs = &value + procSnmp.InSrcQuenchs = &value case "InRedirects": - procSnmp.Icmp.InRedirects = &value + procSnmp.InRedirects = &value case "InEchos": - procSnmp.Icmp.InEchos = &value + procSnmp.InEchos = &value case "InEchoReps": - procSnmp.Icmp.InEchoReps = &value + procSnmp.InEchoReps = &value case "InTimestamps": - procSnmp.Icmp.InTimestamps = &value + procSnmp.InTimestamps = &value case "InTimestampReps": - procSnmp.Icmp.InTimestampReps = &value + procSnmp.InTimestampReps = &value case "InAddrMasks": - procSnmp.Icmp.InAddrMasks = &value + procSnmp.InAddrMasks = &value case "InAddrMaskReps": - procSnmp.Icmp.InAddrMaskReps = &value + procSnmp.InAddrMaskReps = &value case "OutMsgs": - procSnmp.Icmp.OutMsgs = &value + procSnmp.OutMsgs = &value case "OutErrors": - procSnmp.Icmp.OutErrors = &value + procSnmp.OutErrors = &value case "OutDestUnreachs": - procSnmp.Icmp.OutDestUnreachs = &value + procSnmp.OutDestUnreachs = &value case "OutTimeExcds": - procSnmp.Icmp.OutTimeExcds = &value + procSnmp.OutTimeExcds = &value case "OutParmProbs": - procSnmp.Icmp.OutParmProbs = &value + procSnmp.OutParmProbs = &value case "OutSrcQuenchs": - procSnmp.Icmp.OutSrcQuenchs = &value + procSnmp.OutSrcQuenchs = &value case "OutRedirects": - procSnmp.Icmp.OutRedirects = &value + procSnmp.OutRedirects = &value case "OutEchos": - procSnmp.Icmp.OutEchos = &value + procSnmp.OutEchos = &value case "OutEchoReps": - procSnmp.Icmp.OutEchoReps = &value + procSnmp.OutEchoReps = &value case "OutTimestamps": - procSnmp.Icmp.OutTimestamps = &value + procSnmp.OutTimestamps = &value case "OutTimestampReps": - procSnmp.Icmp.OutTimestampReps = &value + procSnmp.OutTimestampReps = &value case "OutAddrMasks": - procSnmp.Icmp.OutAddrMasks = &value + procSnmp.OutAddrMasks = &value case "OutAddrMaskReps": - procSnmp.Icmp.OutAddrMaskReps = &value + procSnmp.OutAddrMaskReps = &value } case "IcmpMsg": switch key { case "InType3": - procSnmp.IcmpMsg.InType3 = &value + procSnmp.InType3 = &value case "OutType3": - procSnmp.IcmpMsg.OutType3 = &value + procSnmp.OutType3 = &value } case "Tcp": switch key { case "RtoAlgorithm": - procSnmp.Tcp.RtoAlgorithm = &value + procSnmp.RtoAlgorithm = &value case "RtoMin": - procSnmp.Tcp.RtoMin = &value + procSnmp.RtoMin = &value case "RtoMax": - procSnmp.Tcp.RtoMax = &value + procSnmp.RtoMax = &value case "MaxConn": - procSnmp.Tcp.MaxConn = &value + procSnmp.MaxConn = &value case "ActiveOpens": - procSnmp.Tcp.ActiveOpens = &value + procSnmp.ActiveOpens = &value case "PassiveOpens": - procSnmp.Tcp.PassiveOpens = &value + procSnmp.PassiveOpens = &value case "AttemptFails": - procSnmp.Tcp.AttemptFails = &value + procSnmp.AttemptFails = &value case "EstabResets": - procSnmp.Tcp.EstabResets = &value + procSnmp.EstabResets = &value case "CurrEstab": - procSnmp.Tcp.CurrEstab = &value + procSnmp.CurrEstab = &value case "InSegs": - procSnmp.Tcp.InSegs = &value + procSnmp.InSegs = &value case "OutSegs": - procSnmp.Tcp.OutSegs = &value + procSnmp.OutSegs = &value case "RetransSegs": - procSnmp.Tcp.RetransSegs = &value + procSnmp.RetransSegs = &value case "InErrs": - procSnmp.Tcp.InErrs = &value + procSnmp.InErrs = &value case "OutRsts": - procSnmp.Tcp.OutRsts = &value + procSnmp.OutRsts = &value case "InCsumErrors": procSnmp.Tcp.InCsumErrors = &value } diff --git a/vendor/github.com/prometheus/procfs/proc_snmp6.go b/vendor/github.com/prometheus/procfs/proc_snmp6.go index 3059cc6a..fb7fd399 100644 --- a/vendor/github.com/prometheus/procfs/proc_snmp6.go +++ b/vendor/github.com/prometheus/procfs/proc_snmp6.go @@ -182,161 +182,161 @@ func parseSNMP6Stats(r io.Reader) (ProcSnmp6, error) { case "Ip6": switch key { case "InReceives": - procSnmp6.Ip6.InReceives = &value + procSnmp6.InReceives = &value case "InHdrErrors": - procSnmp6.Ip6.InHdrErrors = &value + procSnmp6.InHdrErrors = &value case "InTooBigErrors": - procSnmp6.Ip6.InTooBigErrors = &value + procSnmp6.InTooBigErrors = &value case "InNoRoutes": - procSnmp6.Ip6.InNoRoutes = &value + procSnmp6.InNoRoutes = &value case "InAddrErrors": - procSnmp6.Ip6.InAddrErrors = &value + procSnmp6.InAddrErrors = &value case "InUnknownProtos": - procSnmp6.Ip6.InUnknownProtos = &value + procSnmp6.InUnknownProtos = &value case "InTruncatedPkts": - procSnmp6.Ip6.InTruncatedPkts = &value + procSnmp6.InTruncatedPkts = &value case "InDiscards": - procSnmp6.Ip6.InDiscards = &value + procSnmp6.InDiscards = &value case "InDelivers": - procSnmp6.Ip6.InDelivers = &value + procSnmp6.InDelivers = &value case "OutForwDatagrams": - procSnmp6.Ip6.OutForwDatagrams = &value + procSnmp6.OutForwDatagrams = &value case "OutRequests": - procSnmp6.Ip6.OutRequests = &value + procSnmp6.OutRequests = &value case "OutDiscards": - procSnmp6.Ip6.OutDiscards = &value + procSnmp6.OutDiscards = &value case "OutNoRoutes": - procSnmp6.Ip6.OutNoRoutes = &value + procSnmp6.OutNoRoutes = &value case "ReasmTimeout": - procSnmp6.Ip6.ReasmTimeout = &value + procSnmp6.ReasmTimeout = &value case "ReasmReqds": - procSnmp6.Ip6.ReasmReqds = &value + procSnmp6.ReasmReqds = &value case "ReasmOKs": - procSnmp6.Ip6.ReasmOKs = &value + procSnmp6.ReasmOKs = &value case "ReasmFails": - procSnmp6.Ip6.ReasmFails = &value + procSnmp6.ReasmFails = &value case "FragOKs": - procSnmp6.Ip6.FragOKs = &value + procSnmp6.FragOKs = &value case "FragFails": - procSnmp6.Ip6.FragFails = &value + procSnmp6.FragFails = &value case "FragCreates": - procSnmp6.Ip6.FragCreates = &value + procSnmp6.FragCreates = &value case "InMcastPkts": - procSnmp6.Ip6.InMcastPkts = &value + procSnmp6.InMcastPkts = &value case "OutMcastPkts": - procSnmp6.Ip6.OutMcastPkts = &value + procSnmp6.OutMcastPkts = &value case "InOctets": - procSnmp6.Ip6.InOctets = &value + procSnmp6.InOctets = &value case "OutOctets": - procSnmp6.Ip6.OutOctets = &value + procSnmp6.OutOctets = &value case "InMcastOctets": - procSnmp6.Ip6.InMcastOctets = &value + procSnmp6.InMcastOctets = &value case "OutMcastOctets": - procSnmp6.Ip6.OutMcastOctets = &value + procSnmp6.OutMcastOctets = &value case "InBcastOctets": - procSnmp6.Ip6.InBcastOctets = &value + procSnmp6.InBcastOctets = &value case "OutBcastOctets": - procSnmp6.Ip6.OutBcastOctets = &value + procSnmp6.OutBcastOctets = &value case "InNoECTPkts": - procSnmp6.Ip6.InNoECTPkts = &value + procSnmp6.InNoECTPkts = &value case "InECT1Pkts": - procSnmp6.Ip6.InECT1Pkts = &value + procSnmp6.InECT1Pkts = &value case "InECT0Pkts": - procSnmp6.Ip6.InECT0Pkts = &value + procSnmp6.InECT0Pkts = &value case "InCEPkts": - procSnmp6.Ip6.InCEPkts = &value + procSnmp6.InCEPkts = &value } case "Icmp6": switch key { case "InMsgs": - procSnmp6.Icmp6.InMsgs = &value + procSnmp6.InMsgs = &value case "InErrors": procSnmp6.Icmp6.InErrors = &value case "OutMsgs": - procSnmp6.Icmp6.OutMsgs = &value + procSnmp6.OutMsgs = &value case "OutErrors": - procSnmp6.Icmp6.OutErrors = &value + procSnmp6.OutErrors = &value case "InCsumErrors": procSnmp6.Icmp6.InCsumErrors = &value case "InDestUnreachs": - procSnmp6.Icmp6.InDestUnreachs = &value + procSnmp6.InDestUnreachs = &value case "InPktTooBigs": - procSnmp6.Icmp6.InPktTooBigs = &value + procSnmp6.InPktTooBigs = &value case "InTimeExcds": - procSnmp6.Icmp6.InTimeExcds = &value + procSnmp6.InTimeExcds = &value case "InParmProblems": - procSnmp6.Icmp6.InParmProblems = &value + procSnmp6.InParmProblems = &value case "InEchos": - procSnmp6.Icmp6.InEchos = &value + procSnmp6.InEchos = &value case "InEchoReplies": - procSnmp6.Icmp6.InEchoReplies = &value + procSnmp6.InEchoReplies = &value case "InGroupMembQueries": - procSnmp6.Icmp6.InGroupMembQueries = &value + procSnmp6.InGroupMembQueries = &value case "InGroupMembResponses": - procSnmp6.Icmp6.InGroupMembResponses = &value + procSnmp6.InGroupMembResponses = &value case "InGroupMembReductions": - procSnmp6.Icmp6.InGroupMembReductions = &value + procSnmp6.InGroupMembReductions = &value case "InRouterSolicits": - procSnmp6.Icmp6.InRouterSolicits = &value + procSnmp6.InRouterSolicits = &value case "InRouterAdvertisements": - procSnmp6.Icmp6.InRouterAdvertisements = &value + procSnmp6.InRouterAdvertisements = &value case "InNeighborSolicits": - procSnmp6.Icmp6.InNeighborSolicits = &value + procSnmp6.InNeighborSolicits = &value case "InNeighborAdvertisements": - procSnmp6.Icmp6.InNeighborAdvertisements = &value + procSnmp6.InNeighborAdvertisements = &value case "InRedirects": - procSnmp6.Icmp6.InRedirects = &value + procSnmp6.InRedirects = &value case "InMLDv2Reports": - procSnmp6.Icmp6.InMLDv2Reports = &value + procSnmp6.InMLDv2Reports = &value case "OutDestUnreachs": - procSnmp6.Icmp6.OutDestUnreachs = &value + procSnmp6.OutDestUnreachs = &value case "OutPktTooBigs": - procSnmp6.Icmp6.OutPktTooBigs = &value + procSnmp6.OutPktTooBigs = &value case "OutTimeExcds": - procSnmp6.Icmp6.OutTimeExcds = &value + procSnmp6.OutTimeExcds = &value case "OutParmProblems": - procSnmp6.Icmp6.OutParmProblems = &value + procSnmp6.OutParmProblems = &value case "OutEchos": - procSnmp6.Icmp6.OutEchos = &value + procSnmp6.OutEchos = &value case "OutEchoReplies": - procSnmp6.Icmp6.OutEchoReplies = &value + procSnmp6.OutEchoReplies = &value case "OutGroupMembQueries": - procSnmp6.Icmp6.OutGroupMembQueries = &value + procSnmp6.OutGroupMembQueries = &value case "OutGroupMembResponses": - procSnmp6.Icmp6.OutGroupMembResponses = &value + procSnmp6.OutGroupMembResponses = &value case "OutGroupMembReductions": - procSnmp6.Icmp6.OutGroupMembReductions = &value + procSnmp6.OutGroupMembReductions = &value case "OutRouterSolicits": - procSnmp6.Icmp6.OutRouterSolicits = &value + procSnmp6.OutRouterSolicits = &value case "OutRouterAdvertisements": - procSnmp6.Icmp6.OutRouterAdvertisements = &value + procSnmp6.OutRouterAdvertisements = &value case "OutNeighborSolicits": - procSnmp6.Icmp6.OutNeighborSolicits = &value + procSnmp6.OutNeighborSolicits = &value case "OutNeighborAdvertisements": - procSnmp6.Icmp6.OutNeighborAdvertisements = &value + procSnmp6.OutNeighborAdvertisements = &value case "OutRedirects": - procSnmp6.Icmp6.OutRedirects = &value + procSnmp6.OutRedirects = &value case "OutMLDv2Reports": - procSnmp6.Icmp6.OutMLDv2Reports = &value + procSnmp6.OutMLDv2Reports = &value case "InType1": - procSnmp6.Icmp6.InType1 = &value + procSnmp6.InType1 = &value case "InType134": - procSnmp6.Icmp6.InType134 = &value + procSnmp6.InType134 = &value case "InType135": - procSnmp6.Icmp6.InType135 = &value + procSnmp6.InType135 = &value case "InType136": - procSnmp6.Icmp6.InType136 = &value + procSnmp6.InType136 = &value case "InType143": - procSnmp6.Icmp6.InType143 = &value + procSnmp6.InType143 = &value case "OutType133": - procSnmp6.Icmp6.OutType133 = &value + procSnmp6.OutType133 = &value case "OutType135": - procSnmp6.Icmp6.OutType135 = &value + procSnmp6.OutType135 = &value case "OutType136": - procSnmp6.Icmp6.OutType136 = &value + procSnmp6.OutType136 = &value case "OutType143": - procSnmp6.Icmp6.OutType143 = &value + procSnmp6.OutType143 = &value } case "Udp6": switch key { @@ -355,7 +355,7 @@ func parseSNMP6Stats(r io.Reader) (ProcSnmp6, error) { case "InCsumErrors": procSnmp6.Udp6.InCsumErrors = &value case "IgnoredMulti": - procSnmp6.Udp6.IgnoredMulti = &value + procSnmp6.IgnoredMulti = &value } case "UdpLite6": switch key { diff --git a/vendor/github.com/prometheus/procfs/proc_sys.go b/vendor/github.com/prometheus/procfs/proc_sys.go index 5eefbe2e..3810d1ac 100644 --- a/vendor/github.com/prometheus/procfs/proc_sys.go +++ b/vendor/github.com/prometheus/procfs/proc_sys.go @@ -21,7 +21,7 @@ import ( ) func sysctlToPath(sysctl string) string { - return strings.Replace(sysctl, ".", "/", -1) + return strings.ReplaceAll(sysctl, ".", "/") } func (fs FS) SysctlStrings(sysctl string) ([]string, error) { diff --git a/vendor/github.com/prometheus/procfs/softirqs.go b/vendor/github.com/prometheus/procfs/softirqs.go index 28708e07..403e6ae7 100644 --- a/vendor/github.com/prometheus/procfs/softirqs.go +++ b/vendor/github.com/prometheus/procfs/softirqs.go @@ -68,8 +68,8 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { if len(parts) < 2 { continue } - switch { - case parts[0] == "HI:": + switch parts[0] { + case "HI:": perCPU := parts[1:] softirqs.Hi = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -77,7 +77,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (HI%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "TIMER:": + case "TIMER:": perCPU := parts[1:] softirqs.Timer = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -85,7 +85,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (TIMER%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "NET_TX:": + case "NET_TX:": perCPU := parts[1:] softirqs.NetTx = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -93,7 +93,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (NET_TX%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "NET_RX:": + case "NET_RX:": perCPU := parts[1:] softirqs.NetRx = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -101,7 +101,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (NET_RX%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "BLOCK:": + case "BLOCK:": perCPU := parts[1:] softirqs.Block = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -109,7 +109,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (BLOCK%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "IRQ_POLL:": + case "IRQ_POLL:": perCPU := parts[1:] softirqs.IRQPoll = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -117,7 +117,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (IRQ_POLL%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "TASKLET:": + case "TASKLET:": perCPU := parts[1:] softirqs.Tasklet = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -125,7 +125,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (TASKLET%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "SCHED:": + case "SCHED:": perCPU := parts[1:] softirqs.Sched = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -133,7 +133,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (SCHED%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "HRTIMER:": + case "HRTIMER:": perCPU := parts[1:] softirqs.HRTimer = make([]uint64, len(perCPU)) for i, count := range perCPU { @@ -141,7 +141,7 @@ func parseSoftirqs(r io.Reader) (Softirqs, error) { return Softirqs{}, fmt.Errorf("%w: couldn't parse %q (HRTIMER%d): %w", ErrFileParse, count, i, err) } } - case parts[0] == "RCU:": + case "RCU:": perCPU := parts[1:] softirqs.RCU = make([]uint64, len(perCPU)) for i, count := range perCPU { diff --git a/vendor/go.opentelemetry.io/otel/.golangci.yml b/vendor/go.opentelemetry.io/otel/.golangci.yml index c58e48ab..888e5da8 100644 --- a/vendor/go.opentelemetry.io/otel/.golangci.yml +++ b/vendor/go.opentelemetry.io/otel/.golangci.yml @@ -1,13 +1,9 @@ -# See https://github.com/golangci/golangci-lint#config-file +version: "2" run: - issues-exit-code: 1 #Default - tests: true #Default - + issues-exit-code: 1 + tests: true linters: - # Disable everything by default so upgrades to not include new "default - # enabled" linters. - disable-all: true - # Specifically enable linters we want to use. + default: none enable: - asasalint - bodyclose @@ -15,10 +11,7 @@ linters: - errcheck - errorlint - godot - - gofumpt - - goimports - gosec - - gosimple - govet - ineffassign - misspell @@ -26,227 +19,230 @@ linters: - revive - staticcheck - testifylint - - typecheck - unconvert - - unused - unparam + - unused - usestdlibvars - usetesting - + settings: + depguard: + rules: + auto/sdk: + files: + - '!internal/global/trace.go' + - ~internal/global/trace_test.go + deny: + - pkg: go.opentelemetry.io/auto/sdk + desc: Do not use SDK from automatic instrumentation. + non-tests: + files: + - '!$test' + - '!**/*test/*.go' + - '!**/internal/matchers/*.go' + deny: + - pkg: testing + - pkg: github.com/stretchr/testify + - pkg: crypto/md5 + - pkg: crypto/sha1 + - pkg: crypto/**/pkix + otel-internal: + files: + - '**/sdk/*.go' + - '**/sdk/**/*.go' + - '**/exporters/*.go' + - '**/exporters/**/*.go' + - '**/schema/*.go' + - '**/schema/**/*.go' + - '**/metric/*.go' + - '**/metric/**/*.go' + - '**/bridge/*.go' + - '**/bridge/**/*.go' + - '**/trace/*.go' + - '**/trace/**/*.go' + - '**/log/*.go' + - '**/log/**/*.go' + deny: + - pkg: go.opentelemetry.io/otel/internal$ + desc: Do not use cross-module internal packages. + - pkg: go.opentelemetry.io/otel/internal/internaltest + desc: Do not use cross-module internal packages. + - pkg: go.opentelemetry.io/otel/internal/matchers + desc: Do not use cross-module internal packages. + otlp-internal: + files: + - '!**/exporters/otlp/internal/**/*.go' + deny: + - pkg: go.opentelemetry.io/otel/exporters/otlp/internal + desc: Do not use cross-module internal packages. + otlpmetric-internal: + files: + - '!**/exporters/otlp/otlpmetric/internal/*.go' + - '!**/exporters/otlp/otlpmetric/internal/**/*.go' + deny: + - pkg: go.opentelemetry.io/otel/exporters/otlp/otlpmetric/internal + desc: Do not use cross-module internal packages. + otlptrace-internal: + files: + - '!**/exporters/otlp/otlptrace/*.go' + - '!**/exporters/otlp/otlptrace/internal/**.go' + deny: + - pkg: go.opentelemetry.io/otel/exporters/otlp/otlptrace/internal + desc: Do not use cross-module internal packages. + godot: + exclude: + # Exclude links. + - '^ *\[[^]]+\]:' + # Exclude sentence fragments for lists. + - ^[ ]*[-•] + # Exclude sentences prefixing a list. + - :$ + misspell: + locale: US + ignore-rules: + - cancelled + perfsprint: + int-conversion: true + err-error: true + errorf: true + sprintf1: true + strconcat: true + revive: + confidence: 0.01 + rules: + - name: blank-imports + - name: bool-literal-in-expr + - name: constant-logical-expr + - name: context-as-argument + arguments: + - allowTypesBefore: '*testing.T' + disabled: true + - name: context-keys-type + - name: deep-exit + - name: defer + arguments: + - - call-chain + - loop + - name: dot-imports + - name: duplicated-imports + - name: early-return + arguments: + - preserveScope + - name: empty-block + - name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: exported + arguments: + - sayRepetitiveInsteadOfStutters + - name: flag-parameter + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + arguments: + - preserveScope + - name: package-comments + - name: range + - name: range-val-in-closure + - name: range-val-address + - name: redefines-builtin-id + - name: string-format + arguments: + - - panic + - /^[^\n]*$/ + - must not contain line breaks + - name: struct-tag + - name: superfluous-else + arguments: + - preserveScope + - name: time-equal + - name: unconditional-recursion + - name: unexported-return + - name: unhandled-error + arguments: + - fmt.Fprint + - fmt.Fprintf + - fmt.Fprintln + - fmt.Print + - fmt.Printf + - fmt.Println + - name: unnecessary-stmt + - name: useless-break + - name: var-declaration + - name: var-naming + arguments: + - ["ID"] # AllowList + - ["Otel", "Aws", "Gcp"] # DenyList + - name: waitgroup-by-value + testifylint: + enable-all: true + disable: + - float-compare + - go-require + - require-error + exclusions: + generated: lax + presets: + - common-false-positives + - legacy + - std-error-handling + rules: + # TODO: Having appropriate comments for exported objects helps development, + # even for objects in internal packages. Appropriate comments for all + # exported objects should be added and this exclusion removed. + - linters: + - revive + path: .*internal/.* + text: exported (method|function|type|const) (.+) should have comment or be unexported + # Yes, they are, but it's okay in a test. + - linters: + - revive + path: _test\.go + text: exported func.*returns unexported type.*which can be annoying to use + # Example test functions should be treated like main. + - linters: + - revive + path: example.*_test\.go + text: calls to (.+) only in main[(][)] or init[(][)] functions + # It's okay to not run gosec and perfsprint in a test. + - linters: + - gosec + - perfsprint + path: _test\.go + # Ignoring gosec G404: Use of weak random number generator (math/rand instead of crypto/rand) + # as we commonly use it in tests and examples. + - linters: + - gosec + text: 'G404:' + # Ignoring gosec G402: TLS MinVersion too low + # as the https://pkg.go.dev/crypto/tls#Config handles MinVersion default well. + - linters: + - gosec + text: 'G402: TLS MinVersion too low.' + paths: + - third_party$ + - builtin$ + - examples$ issues: - # Maximum issues count per one linter. - # Set to 0 to disable. - # Default: 50 - # Setting to unlimited so the linter only is run once to debug all issues. max-issues-per-linter: 0 - # Maximum count of issues with the same text. - # Set to 0 to disable. - # Default: 3 - # Setting to unlimited so the linter only is run once to debug all issues. max-same-issues: 0 - # Excluding configuration per-path, per-linter, per-text and per-source. - exclude-rules: - # TODO: Having appropriate comments for exported objects helps development, - # even for objects in internal packages. Appropriate comments for all - # exported objects should be added and this exclusion removed. - - path: '.*internal/.*' - text: "exported (method|function|type|const) (.+) should have comment or be unexported" - linters: - - revive - # Yes, they are, but it's okay in a test. - - path: _test\.go - text: "exported func.*returns unexported type.*which can be annoying to use" - linters: - - revive - # Example test functions should be treated like main. - - path: example.*_test\.go - text: "calls to (.+) only in main[(][)] or init[(][)] functions" - linters: - - revive - # It's okay to not run gosec and perfsprint in a test. - - path: _test\.go - linters: - - gosec - - perfsprint - # Ignoring gosec G404: Use of weak random number generator (math/rand instead of crypto/rand) - # as we commonly use it in tests and examples. - - text: "G404:" - linters: - - gosec - # Ignoring gosec G402: TLS MinVersion too low - # as the https://pkg.go.dev/crypto/tls#Config handles MinVersion default well. - - text: "G402: TLS MinVersion too low." - linters: - - gosec - include: - # revive exported should have comment or be unexported. - - EXC0012 - # revive package comment should be of the form ... - - EXC0013 - -linters-settings: - depguard: - rules: - non-tests: - files: - - "!$test" - - "!**/*test/*.go" - - "!**/internal/matchers/*.go" - deny: - - pkg: "testing" - - pkg: "github.com/stretchr/testify" - - pkg: "crypto/md5" - - pkg: "crypto/sha1" - - pkg: "crypto/**/pkix" - auto/sdk: - files: - - "!internal/global/trace.go" - - "~internal/global/trace_test.go" - deny: - - pkg: "go.opentelemetry.io/auto/sdk" - desc: Do not use SDK from automatic instrumentation. - otlp-internal: - files: - - "!**/exporters/otlp/internal/**/*.go" - deny: - - pkg: "go.opentelemetry.io/otel/exporters/otlp/internal" - desc: Do not use cross-module internal packages. - otlptrace-internal: - files: - - "!**/exporters/otlp/otlptrace/*.go" - - "!**/exporters/otlp/otlptrace/internal/**.go" - deny: - - pkg: "go.opentelemetry.io/otel/exporters/otlp/otlptrace/internal" - desc: Do not use cross-module internal packages. - otlpmetric-internal: - files: - - "!**/exporters/otlp/otlpmetric/internal/*.go" - - "!**/exporters/otlp/otlpmetric/internal/**/*.go" - deny: - - pkg: "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/internal" - desc: Do not use cross-module internal packages. - otel-internal: - files: - - "**/sdk/*.go" - - "**/sdk/**/*.go" - - "**/exporters/*.go" - - "**/exporters/**/*.go" - - "**/schema/*.go" - - "**/schema/**/*.go" - - "**/metric/*.go" - - "**/metric/**/*.go" - - "**/bridge/*.go" - - "**/bridge/**/*.go" - - "**/trace/*.go" - - "**/trace/**/*.go" - - "**/log/*.go" - - "**/log/**/*.go" - deny: - - pkg: "go.opentelemetry.io/otel/internal$" - desc: Do not use cross-module internal packages. - - pkg: "go.opentelemetry.io/otel/internal/attribute" - desc: Do not use cross-module internal packages. - - pkg: "go.opentelemetry.io/otel/internal/internaltest" - desc: Do not use cross-module internal packages. - - pkg: "go.opentelemetry.io/otel/internal/matchers" - desc: Do not use cross-module internal packages. - godot: - exclude: - # Exclude links. - - '^ *\[[^]]+\]:' - # Exclude sentence fragments for lists. - - '^[ ]*[-•]' - # Exclude sentences prefixing a list. - - ':$' - goimports: - local-prefixes: go.opentelemetry.io - misspell: - locale: US - ignore-words: - - cancelled - perfsprint: - err-error: true - errorf: true - int-conversion: true - sprintf1: true - strconcat: true - revive: - # Sets the default failure confidence. - # This means that linting errors with less than 0.8 confidence will be ignored. - # Default: 0.8 - confidence: 0.01 - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md - rules: - - name: blank-imports - - name: bool-literal-in-expr - - name: constant-logical-expr - - name: context-as-argument - disabled: true - arguments: - - allowTypesBefore: "*testing.T" - - name: context-keys-type - - name: deep-exit - - name: defer - arguments: - - ["call-chain", "loop"] - - name: dot-imports - - name: duplicated-imports - - name: early-return - arguments: - - "preserveScope" - - name: empty-block - - name: empty-lines - - name: error-naming - - name: error-return - - name: error-strings - - name: errorf - - name: exported - arguments: - - "sayRepetitiveInsteadOfStutters" - - name: flag-parameter - - name: identical-branches - - name: if-return - - name: import-shadowing - - name: increment-decrement - - name: indent-error-flow - arguments: - - "preserveScope" - - name: package-comments - - name: range - - name: range-val-in-closure - - name: range-val-address - - name: redefines-builtin-id - - name: string-format - arguments: - - - panic - - '/^[^\n]*$/' - - must not contain line breaks - - name: struct-tag - - name: superfluous-else - arguments: - - "preserveScope" - - name: time-equal - - name: unconditional-recursion - - name: unexported-return - - name: unhandled-error - arguments: - - "fmt.Fprint" - - "fmt.Fprintf" - - "fmt.Fprintln" - - "fmt.Print" - - "fmt.Printf" - - "fmt.Println" - - name: unnecessary-stmt - - name: useless-break - - name: var-declaration - - name: var-naming - arguments: - - ["ID"] # AllowList - - ["Otel", "Aws", "Gcp"] # DenyList - - name: waitgroup-by-value - testifylint: - enable-all: true - disable: - - float-compare - - go-require - - require-error +formatters: + enable: + - gofumpt + - goimports + - golines + settings: + goimports: + local-prefixes: + - go.opentelemetry.io + golines: + max-len: 120 + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/vendor/go.opentelemetry.io/otel/CHANGELOG.md b/vendor/go.opentelemetry.io/otel/CHANGELOG.md index c076db28..648e4aba 100644 --- a/vendor/go.opentelemetry.io/otel/CHANGELOG.md +++ b/vendor/go.opentelemetry.io/otel/CHANGELOG.md @@ -11,6 +11,57 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +## [1.36.0/0.58.0/0.12.0] 2025-05-20 + +### Added + +- Add exponential histogram support in `go.opentelemetry.io/otel/exporters/prometheus`. (#6421) +- The `go.opentelemetry.io/otel/semconv/v1.31.0` package. + The package contains semantic conventions from the `v1.31.0` version of the OpenTelemetry Semantic Conventions. + See the [migration documentation](./semconv/v1.31.0/MIGRATION.md) for information on how to upgrade from `go.opentelemetry.io/otel/semconv/v1.30.0`. (#6479) +- Add `Recording`, `Scope`, and `Record` types in `go.opentelemetry.io/otel/log/logtest`. (#6507) +- Add `WithHTTPClient` option to configure the `http.Client` used by `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp`. (#6751) +- Add `WithHTTPClient` option to configure the `http.Client` used by `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp`. (#6752) +- Add `WithHTTPClient` option to configure the `http.Client` used by `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6688) +- Add `ValuesGetter` in `go.opentelemetry.io/otel/propagation`, a `TextMapCarrier` that supports retrieving multiple values for a single key. (#5973) +- Add `Values` method to `HeaderCarrier` to implement the new `ValuesGetter` interface in `go.opentelemetry.io/otel/propagation`. (#5973) +- Update `Baggage` in `go.opentelemetry.io/otel/propagation` to retrieve multiple values for a key when the carrier implements `ValuesGetter`. (#5973) +- Add `AssertEqual` function in `go.opentelemetry.io/otel/log/logtest`. (#6662) +- The `go.opentelemetry.io/otel/semconv/v1.32.0` package. + The package contains semantic conventions from the `v1.32.0` version of the OpenTelemetry Semantic Conventions. + See the [migration documentation](./semconv/v1.32.0/MIGRATION.md) for information on how to upgrade from `go.opentelemetry.io/otel/semconv/v1.31.0`(#6782) +- Add `Transform` option in `go.opentelemetry.io/otel/log/logtest`. (#6794) +- Add `Desc` option in `go.opentelemetry.io/otel/log/logtest`. (#6796) + +### Removed + +- Drop support for [Go 1.22]. (#6381, #6418) +- Remove `Resource` field from `EnabledParameters` in `go.opentelemetry.io/otel/sdk/log`. (#6494) +- Remove `RecordFactory` type from `go.opentelemetry.io/otel/log/logtest`. (#6492) +- Remove `ScopeRecords`, `EmittedRecord`, and `RecordFactory` types from `go.opentelemetry.io/otel/log/logtest`. (#6507) +- Remove `AssertRecordEqual` function in `go.opentelemetry.io/otel/log/logtest`, use `AssertEqual` instead. (#6662) + +### Changed + +- ⚠️ Update `github.com/prometheus/client_golang` to `v1.21.1`, which changes the `NameValidationScheme` to `UTF8Validation`. + This allows metrics names to keep original delimiters (e.g. `.`), rather than replacing with underscores. + This can be reverted by setting `github.com/prometheus/common/model.NameValidationScheme` to `LegacyValidation` in `github.com/prometheus/common/model`. (#6433) +- Initialize map with `len(keys)` in `NewAllowKeysFilter` and `NewDenyKeysFilter` to avoid unnecessary allocations in `go.opentelemetry.io/otel/attribute`. (#6455) +- `go.opentelemetry.io/otel/log/logtest` is now a separate Go module. (#6465) +- `go.opentelemetry.io/otel/sdk/log/logtest` is now a separate Go module. (#6466) +- `Recorder` in `go.opentelemetry.io/otel/log/logtest` no longer separately stores records emitted by loggers with the same instrumentation scope. (#6507) +- Improve performance of `BatchProcessor` in `go.opentelemetry.io/otel/sdk/log` by not exporting when exporter cannot accept more. (#6569, #6641) + +### Deprecated + +- Deprecate support for `model.LegacyValidation` for `go.opentelemetry.io/otel/exporters/prometheus`. (#6449) + +### Fixes + +- Stop percent encoding header environment variables in `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc` and `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6392) +- Ensure the `noopSpan.tracerProvider` method is not inlined in `go.opentelemetry.io/otel/trace` so the `go.opentelemetry.io/auto` instrumentation can instrument non-recording spans. (#6456) +- Use a `sync.Pool` instead of allocating `metricdata.ResourceMetrics` in `go.opentelemetry.io/otel/exporters/prometheus`. (#6472) + ## [1.35.0/0.57.0/0.11.0] 2025-03-05 This release is the last to support [Go 1.22]. @@ -3237,7 +3288,8 @@ It contains api and sdk for trace and meter. - CircleCI build CI manifest files. - CODEOWNERS file to track owners of this project. -[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.35.0...HEAD +[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.36.0...HEAD +[1.36.0/0.58.0/0.12.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.36.0 [1.35.0/0.57.0/0.11.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.35.0 [1.34.0/0.56.0/0.10.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.34.0 [1.33.0/0.55.0/0.9.0/0.0.12]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.33.0 diff --git a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md index 7b8af585..1902dac0 100644 --- a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md +++ b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md @@ -643,6 +643,7 @@ should be canceled. ### Triagers +- [Alex Kats](https://github.com/akats7), Capital One - [Cheng-Zhen Yang](https://github.com/scorpionknifes), Independent ### Approvers diff --git a/vendor/go.opentelemetry.io/otel/Makefile b/vendor/go.opentelemetry.io/otel/Makefile index 226410d7..62a56f4d 100644 --- a/vendor/go.opentelemetry.io/otel/Makefile +++ b/vendor/go.opentelemetry.io/otel/Makefile @@ -43,8 +43,11 @@ $(TOOLS)/crosslink: PACKAGE=go.opentelemetry.io/build-tools/crosslink SEMCONVKIT = $(TOOLS)/semconvkit $(TOOLS)/semconvkit: PACKAGE=go.opentelemetry.io/otel/$(TOOLS_MOD_DIR)/semconvkit +VERIFYREADMES = $(TOOLS)/verifyreadmes +$(TOOLS)/verifyreadmes: PACKAGE=go.opentelemetry.io/otel/$(TOOLS_MOD_DIR)/verifyreadmes + GOLANGCI_LINT = $(TOOLS)/golangci-lint -$(TOOLS)/golangci-lint: PACKAGE=github.com/golangci/golangci-lint/cmd/golangci-lint +$(TOOLS)/golangci-lint: PACKAGE=github.com/golangci/golangci-lint/v2/cmd/golangci-lint MISSPELL = $(TOOLS)/misspell $(TOOLS)/misspell: PACKAGE=github.com/client9/misspell/cmd/misspell @@ -68,7 +71,7 @@ GOVULNCHECK = $(TOOLS)/govulncheck $(TOOLS)/govulncheck: PACKAGE=golang.org/x/vuln/cmd/govulncheck .PHONY: tools -tools: $(CROSSLINK) $(GOLANGCI_LINT) $(MISSPELL) $(GOCOVMERGE) $(STRINGER) $(PORTO) $(SEMCONVGEN) $(MULTIMOD) $(SEMCONVKIT) $(GOTMPL) $(GORELEASE) +tools: $(CROSSLINK) $(GOLANGCI_LINT) $(MISSPELL) $(GOCOVMERGE) $(STRINGER) $(PORTO) $(SEMCONVGEN) $(VERIFYREADMES) $(MULTIMOD) $(SEMCONVKIT) $(GOTMPL) $(GORELEASE) # Virtualized python tools via docker @@ -213,11 +216,8 @@ go-mod-tidy/%: crosslink && cd $(DIR) \ && $(GO) mod tidy -compat=1.21 -.PHONY: lint-modules -lint-modules: go-mod-tidy - .PHONY: lint -lint: misspell lint-modules golangci-lint govulncheck +lint: misspell go-mod-tidy golangci-lint govulncheck .PHONY: vanity-import-check vanity-import-check: $(PORTO) @@ -319,10 +319,11 @@ add-tags: verify-mods @[ "${MODSET}" ] || ( echo ">> env var MODSET is not set"; exit 1 ) $(MULTIMOD) tag -m ${MODSET} -c ${COMMIT} +MARKDOWNIMAGE := $(shell awk '$$4=="markdown" {print $$2}' $(DEPENDENCIES_DOCKERFILE)) .PHONY: lint-markdown lint-markdown: - docker run -v "$(CURDIR):$(WORKDIR)" avtodev/markdown-lint:v1 -c $(WORKDIR)/.markdownlint.yaml $(WORKDIR)/**/*.md + docker run --rm -u $(DOCKER_USER) -v "$(CURDIR):$(WORKDIR)" $(MARKDOWNIMAGE) -c $(WORKDIR)/.markdownlint.yaml $(WORKDIR)/**/*.md .PHONY: verify-readmes -verify-readmes: - ./verify_readmes.sh +verify-readmes: $(VERIFYREADMES) + $(VERIFYREADMES) diff --git a/vendor/go.opentelemetry.io/otel/README.md b/vendor/go.opentelemetry.io/otel/README.md index 8421cd7e..b6007881 100644 --- a/vendor/go.opentelemetry.io/otel/README.md +++ b/vendor/go.opentelemetry.io/otel/README.md @@ -6,6 +6,7 @@ [![Go Report Card](https://goreportcard.com/badge/go.opentelemetry.io/otel)](https://goreportcard.com/report/go.opentelemetry.io/otel) [![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/open-telemetry/opentelemetry-go/badge)](https://scorecard.dev/viewer/?uri=github.com/open-telemetry/opentelemetry-go) [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9996/badge)](https://www.bestpractices.dev/projects/9996) +[![Fuzzing Status](https://oss-fuzz-build-logs.storage.googleapis.com/badges/opentelemetry-go.svg)](https://issues.oss-fuzz.com/issues?q=project:opentelemetry-go) [![Slack](https://img.shields.io/badge/slack-@cncf/otel--go-brightgreen.svg?logo=slack)](https://cloud-native.slack.com/archives/C01NPAXACKT) OpenTelemetry-Go is the [Go](https://golang.org/) implementation of [OpenTelemetry](https://opentelemetry.io/). @@ -53,25 +54,18 @@ Currently, this project supports the following environments. |----------|------------|--------------| | Ubuntu | 1.24 | amd64 | | Ubuntu | 1.23 | amd64 | -| Ubuntu | 1.22 | amd64 | | Ubuntu | 1.24 | 386 | | Ubuntu | 1.23 | 386 | -| Ubuntu | 1.22 | 386 | | Ubuntu | 1.24 | arm64 | | Ubuntu | 1.23 | arm64 | -| Ubuntu | 1.22 | arm64 | | macOS 13 | 1.24 | amd64 | | macOS 13 | 1.23 | amd64 | -| macOS 13 | 1.22 | amd64 | | macOS | 1.24 | arm64 | | macOS | 1.23 | arm64 | -| macOS | 1.22 | arm64 | | Windows | 1.24 | amd64 | | Windows | 1.23 | amd64 | -| Windows | 1.22 | amd64 | | Windows | 1.24 | 386 | | Windows | 1.23 | 386 | -| Windows | 1.22 | 386 | While this project should work for other systems, no compatibility guarantees are made for those systems currently. diff --git a/vendor/go.opentelemetry.io/otel/RELEASING.md b/vendor/go.opentelemetry.io/otel/RELEASING.md index 1e13ae54..7c1a9119 100644 --- a/vendor/go.opentelemetry.io/otel/RELEASING.md +++ b/vendor/go.opentelemetry.io/otel/RELEASING.md @@ -1,5 +1,9 @@ # Release Process +## Create a `Version Release` issue + +Create a `Version Release` issue to track the release process. + ## Semantic Convention Generation New versions of the [OpenTelemetry Semantic Conventions] mean new versions of the `semconv` package need to be generated. @@ -123,6 +127,16 @@ Importantly, bump any package versions referenced to be the latest one you just [Go instrumentation documentation]: https://opentelemetry.io/docs/languages/go/ [content/en/docs/languages/go]: https://github.com/open-telemetry/opentelemetry.io/tree/main/content/en/docs/languages/go +### Close the milestone + +Once a release is made, ensure all issues that were fixed and PRs that were merged as part of this release are added to the corresponding milestone. +This helps track what changes were included in each release. + +- To find issues that haven't been included in a milestone, use this [GitHub search query](https://github.com/open-telemetry/opentelemetry-go/issues?q=is%3Aissue%20no%3Amilestone%20is%3Aclosed%20sort%3Aupdated-desc%20reason%3Acompleted%20-label%3AStale%20linked%3Apr) +- To find merged PRs that haven't been included in a milestone, use this [GitHub search query](https://github.com/open-telemetry/opentelemetry-go/pulls?q=is%3Apr+no%3Amilestone+is%3Amerged). + +Once all related issues and PRs have been added to the milestone, close the milestone. + ### Demo Repository Bump the dependencies in the following Go services: @@ -130,3 +144,7 @@ Bump the dependencies in the following Go services: - [`accounting`](https://github.com/open-telemetry/opentelemetry-demo/tree/main/src/accounting) - [`checkoutservice`](https://github.com/open-telemetry/opentelemetry-demo/tree/main/src/checkout) - [`productcatalogservice`](https://github.com/open-telemetry/opentelemetry-demo/tree/main/src/product-catalog) + +### Close the `Version Release` issue + +Once the todo list in the `Version Release` issue is complete, close the issue. diff --git a/vendor/go.opentelemetry.io/otel/attribute/filter.go b/vendor/go.opentelemetry.io/otel/attribute/filter.go index be9cd922..3eeaa5d4 100644 --- a/vendor/go.opentelemetry.io/otel/attribute/filter.go +++ b/vendor/go.opentelemetry.io/otel/attribute/filter.go @@ -19,7 +19,7 @@ func NewAllowKeysFilter(keys ...Key) Filter { return func(kv KeyValue) bool { return false } } - allowed := make(map[Key]struct{}) + allowed := make(map[Key]struct{}, len(keys)) for _, k := range keys { allowed[k] = struct{}{} } @@ -38,7 +38,7 @@ func NewDenyKeysFilter(keys ...Key) Filter { return func(kv KeyValue) bool { return true } } - forbid := make(map[Key]struct{}) + forbid := make(map[Key]struct{}, len(keys)) for _, k := range keys { forbid[k] = struct{}{} } diff --git a/vendor/go.opentelemetry.io/otel/internal/attribute/attribute.go b/vendor/go.opentelemetry.io/otel/attribute/internal/attribute.go similarity index 97% rename from vendor/go.opentelemetry.io/otel/internal/attribute/attribute.go rename to vendor/go.opentelemetry.io/otel/attribute/internal/attribute.go index 691d96c7..b76d2bbf 100644 --- a/vendor/go.opentelemetry.io/otel/internal/attribute/attribute.go +++ b/vendor/go.opentelemetry.io/otel/attribute/internal/attribute.go @@ -5,7 +5,7 @@ Package attribute provide several helper functions for some commonly used logic of processing attributes. */ -package attribute // import "go.opentelemetry.io/otel/internal/attribute" +package attribute // import "go.opentelemetry.io/otel/attribute/internal" import ( "reflect" diff --git a/vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go b/vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go new file mode 100644 index 00000000..5791c6e7 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go @@ -0,0 +1,37 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package attribute // import "go.opentelemetry.io/otel/attribute" + +import ( + "math" +) + +func boolToRaw(b bool) uint64 { // nolint:revive // b is not a control flag. + if b { + return 1 + } + return 0 +} + +func rawToBool(r uint64) bool { + return r != 0 +} + +func int64ToRaw(i int64) uint64 { + // Assumes original was a valid int64 (overflow not checked). + return uint64(i) // nolint: gosec +} + +func rawToInt64(r uint64) int64 { + // Assumes original was a valid int64 (overflow not checked). + return int64(r) // nolint: gosec +} + +func float64ToRaw(f float64) uint64 { + return math.Float64bits(f) +} + +func rawToFloat64(r uint64) float64 { + return math.Float64frombits(r) +} diff --git a/vendor/go.opentelemetry.io/otel/attribute/value.go b/vendor/go.opentelemetry.io/otel/attribute/value.go index 9ea0ecbb..817eecac 100644 --- a/vendor/go.opentelemetry.io/otel/attribute/value.go +++ b/vendor/go.opentelemetry.io/otel/attribute/value.go @@ -9,8 +9,7 @@ import ( "reflect" "strconv" - "go.opentelemetry.io/otel/internal" - "go.opentelemetry.io/otel/internal/attribute" + attribute "go.opentelemetry.io/otel/attribute/internal" ) //go:generate stringer -type=Type @@ -51,7 +50,7 @@ const ( func BoolValue(v bool) Value { return Value{ vtype: BOOL, - numeric: internal.BoolToRaw(v), + numeric: boolToRaw(v), } } @@ -82,7 +81,7 @@ func IntSliceValue(v []int) Value { func Int64Value(v int64) Value { return Value{ vtype: INT64, - numeric: internal.Int64ToRaw(v), + numeric: int64ToRaw(v), } } @@ -95,7 +94,7 @@ func Int64SliceValue(v []int64) Value { func Float64Value(v float64) Value { return Value{ vtype: FLOAT64, - numeric: internal.Float64ToRaw(v), + numeric: float64ToRaw(v), } } @@ -125,7 +124,7 @@ func (v Value) Type() Type { // AsBool returns the bool value. Make sure that the Value's type is // BOOL. func (v Value) AsBool() bool { - return internal.RawToBool(v.numeric) + return rawToBool(v.numeric) } // AsBoolSlice returns the []bool value. Make sure that the Value's type is @@ -144,7 +143,7 @@ func (v Value) asBoolSlice() []bool { // AsInt64 returns the int64 value. Make sure that the Value's type is // INT64. func (v Value) AsInt64() int64 { - return internal.RawToInt64(v.numeric) + return rawToInt64(v.numeric) } // AsInt64Slice returns the []int64 value. Make sure that the Value's type is @@ -163,7 +162,7 @@ func (v Value) asInt64Slice() []int64 { // AsFloat64 returns the float64 value. Make sure that the Value's // type is FLOAT64. func (v Value) AsFloat64() float64 { - return internal.RawToFloat64(v.numeric) + return rawToFloat64(v.numeric) } // AsFloat64Slice returns the []float64 value. Make sure that the Value's type is diff --git a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile index e4c4a753..51fb76b3 100644 --- a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile +++ b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile @@ -1,3 +1,4 @@ # This is a renovate-friendly source of Docker images. -FROM python:3.13.2-slim-bullseye@sha256:31b581c8218e1f3c58672481b3b7dba8e898852866b408c6a984c22832523935 AS python -FROM otel/weaver:v0.13.2@sha256:ae7346b992e477f629ea327e0979e8a416a97f7956ab1f7e95ac1f44edf1a893 AS weaver +FROM python:3.13.3-slim-bullseye@sha256:9e3f9243e06fd68eb9519074b49878eda20ad39a855fac51aaffb741de20726e AS python +FROM otel/weaver:v0.15.0@sha256:1cf1c72eaed57dad813c2e359133b8a15bd4facf305aae5b13bdca6d3eccff56 AS weaver +FROM avtodev/markdown-lint:v1@sha256:6aeedc2f49138ce7a1cd0adffc1b1c0321b841dc2102408967d9301c031949ee AS markdown diff --git a/vendor/go.opentelemetry.io/otel/get_main_pkgs.sh b/vendor/go.opentelemetry.io/otel/get_main_pkgs.sh deleted file mode 100644 index 93e80ea3..00000000 --- a/vendor/go.opentelemetry.io/otel/get_main_pkgs.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env bash - -# Copyright The OpenTelemetry Authors -# SPDX-License-Identifier: Apache-2.0 - -set -euo pipefail - -top_dir='.' -if [[ $# -gt 0 ]]; then - top_dir="${1}" -fi - -p=$(pwd) -mod_dirs=() - -# Note `mapfile` does not exist in older bash versions: -# https://stackoverflow.com/questions/41475261/need-alternative-to-readarray-mapfile-for-script-on-older-version-of-bash - -while IFS= read -r line; do - mod_dirs+=("$line") -done < <(find "${top_dir}" -type f -name 'go.mod' -exec dirname {} \; | sort) - -for mod_dir in "${mod_dirs[@]}"; do - cd "${mod_dir}" - - while IFS= read -r line; do - echo ".${line#${p}}" - done < <(go list --find -f '{{.Name}}|{{.Dir}}' ./... | grep '^main|' | cut -f 2- -d '|') - cd "${p}" -done diff --git a/vendor/go.opentelemetry.io/otel/internal/gen.go b/vendor/go.opentelemetry.io/otel/internal/gen.go deleted file mode 100644 index 4259f032..00000000 --- a/vendor/go.opentelemetry.io/otel/internal/gen.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package internal // import "go.opentelemetry.io/otel/internal" - -//go:generate gotmpl --body=./shared/matchers/expectation.go.tmpl "--data={}" --out=matchers/expectation.go -//go:generate gotmpl --body=./shared/matchers/expecter.go.tmpl "--data={}" --out=matchers/expecter.go -//go:generate gotmpl --body=./shared/matchers/temporal_matcher.go.tmpl "--data={}" --out=matchers/temporal_matcher.go - -//go:generate gotmpl --body=./shared/internaltest/alignment.go.tmpl "--data={}" --out=internaltest/alignment.go -//go:generate gotmpl --body=./shared/internaltest/env.go.tmpl "--data={}" --out=internaltest/env.go -//go:generate gotmpl --body=./shared/internaltest/env_test.go.tmpl "--data={}" --out=internaltest/env_test.go -//go:generate gotmpl --body=./shared/internaltest/errors.go.tmpl "--data={}" --out=internaltest/errors.go -//go:generate gotmpl --body=./shared/internaltest/harness.go.tmpl "--data={\"matchersImportPath\": \"go.opentelemetry.io/otel/internal/matchers\"}" --out=internaltest/harness.go -//go:generate gotmpl --body=./shared/internaltest/text_map_carrier.go.tmpl "--data={}" --out=internaltest/text_map_carrier.go -//go:generate gotmpl --body=./shared/internaltest/text_map_carrier_test.go.tmpl "--data={}" --out=internaltest/text_map_carrier_test.go -//go:generate gotmpl --body=./shared/internaltest/text_map_propagator.go.tmpl "--data={}" --out=internaltest/text_map_propagator.go -//go:generate gotmpl --body=./shared/internaltest/text_map_propagator_test.go.tmpl "--data={}" --out=internaltest/text_map_propagator_test.go diff --git a/vendor/go.opentelemetry.io/otel/internal/global/handler.go b/vendor/go.opentelemetry.io/otel/internal/global/handler.go index c657ff8e..2e47b296 100644 --- a/vendor/go.opentelemetry.io/otel/internal/global/handler.go +++ b/vendor/go.opentelemetry.io/otel/internal/global/handler.go @@ -1,6 +1,7 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 +// Package global provides the OpenTelemetry global API. package global // import "go.opentelemetry.io/otel/internal/global" import ( diff --git a/vendor/go.opentelemetry.io/otel/internal/global/meter.go b/vendor/go.opentelemetry.io/otel/internal/global/meter.go index a6acd8dc..adb37b5b 100644 --- a/vendor/go.opentelemetry.io/otel/internal/global/meter.go +++ b/vendor/go.opentelemetry.io/otel/internal/global/meter.go @@ -169,7 +169,10 @@ func (m *meter) Int64Counter(name string, options ...metric.Int64CounterOption) return i, nil } -func (m *meter) Int64UpDownCounter(name string, options ...metric.Int64UpDownCounterOption) (metric.Int64UpDownCounter, error) { +func (m *meter) Int64UpDownCounter( + name string, + options ...metric.Int64UpDownCounterOption, +) (metric.Int64UpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -238,7 +241,10 @@ func (m *meter) Int64Gauge(name string, options ...metric.Int64GaugeOption) (met return i, nil } -func (m *meter) Int64ObservableCounter(name string, options ...metric.Int64ObservableCounterOption) (metric.Int64ObservableCounter, error) { +func (m *meter) Int64ObservableCounter( + name string, + options ...metric.Int64ObservableCounterOption, +) (metric.Int64ObservableCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -261,7 +267,10 @@ func (m *meter) Int64ObservableCounter(name string, options ...metric.Int64Obser return i, nil } -func (m *meter) Int64ObservableUpDownCounter(name string, options ...metric.Int64ObservableUpDownCounterOption) (metric.Int64ObservableUpDownCounter, error) { +func (m *meter) Int64ObservableUpDownCounter( + name string, + options ...metric.Int64ObservableUpDownCounterOption, +) (metric.Int64ObservableUpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -284,7 +293,10 @@ func (m *meter) Int64ObservableUpDownCounter(name string, options ...metric.Int6 return i, nil } -func (m *meter) Int64ObservableGauge(name string, options ...metric.Int64ObservableGaugeOption) (metric.Int64ObservableGauge, error) { +func (m *meter) Int64ObservableGauge( + name string, + options ...metric.Int64ObservableGaugeOption, +) (metric.Int64ObservableGauge, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -330,7 +342,10 @@ func (m *meter) Float64Counter(name string, options ...metric.Float64CounterOpti return i, nil } -func (m *meter) Float64UpDownCounter(name string, options ...metric.Float64UpDownCounterOption) (metric.Float64UpDownCounter, error) { +func (m *meter) Float64UpDownCounter( + name string, + options ...metric.Float64UpDownCounterOption, +) (metric.Float64UpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -353,7 +368,10 @@ func (m *meter) Float64UpDownCounter(name string, options ...metric.Float64UpDow return i, nil } -func (m *meter) Float64Histogram(name string, options ...metric.Float64HistogramOption) (metric.Float64Histogram, error) { +func (m *meter) Float64Histogram( + name string, + options ...metric.Float64HistogramOption, +) (metric.Float64Histogram, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -399,7 +417,10 @@ func (m *meter) Float64Gauge(name string, options ...metric.Float64GaugeOption) return i, nil } -func (m *meter) Float64ObservableCounter(name string, options ...metric.Float64ObservableCounterOption) (metric.Float64ObservableCounter, error) { +func (m *meter) Float64ObservableCounter( + name string, + options ...metric.Float64ObservableCounterOption, +) (metric.Float64ObservableCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -422,7 +443,10 @@ func (m *meter) Float64ObservableCounter(name string, options ...metric.Float64O return i, nil } -func (m *meter) Float64ObservableUpDownCounter(name string, options ...metric.Float64ObservableUpDownCounterOption) (metric.Float64ObservableUpDownCounter, error) { +func (m *meter) Float64ObservableUpDownCounter( + name string, + options ...metric.Float64ObservableUpDownCounterOption, +) (metric.Float64ObservableUpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -445,7 +469,10 @@ func (m *meter) Float64ObservableUpDownCounter(name string, options ...metric.Fl return i, nil } -func (m *meter) Float64ObservableGauge(name string, options ...metric.Float64ObservableGaugeOption) (metric.Float64ObservableGauge, error) { +func (m *meter) Float64ObservableGauge( + name string, + options ...metric.Float64ObservableGaugeOption, +) (metric.Float64ObservableGauge, error) { m.mtx.Lock() defer m.mtx.Unlock() diff --git a/vendor/go.opentelemetry.io/otel/internal/global/trace.go b/vendor/go.opentelemetry.io/otel/internal/global/trace.go index 8982aa0d..49e4ac4f 100644 --- a/vendor/go.opentelemetry.io/otel/internal/global/trace.go +++ b/vendor/go.opentelemetry.io/otel/internal/global/trace.go @@ -158,7 +158,18 @@ func (t *tracer) Start(ctx context.Context, name string, opts ...trace.SpanStart // a nonRecordingSpan by default. var autoInstEnabled = new(bool) -func (t *tracer) newSpan(ctx context.Context, autoSpan *bool, name string, opts []trace.SpanStartOption) (context.Context, trace.Span) { +// newSpan is called by tracer.Start so auto-instrumentation can attach an eBPF +// uprobe to this code. +// +// "noinline" pragma prevents the method from ever being inlined. +// +//go:noinline +func (t *tracer) newSpan( + ctx context.Context, + autoSpan *bool, + name string, + opts []trace.SpanStartOption, +) (context.Context, trace.Span) { // autoInstEnabled is passed to newSpan via the autoSpan parameter. This is // so the auto-instrumentation can define a uprobe for (*t).newSpan and be // provided with the address of the bool autoInstEnabled points to. It diff --git a/vendor/go.opentelemetry.io/otel/internal/rawhelpers.go b/vendor/go.opentelemetry.io/otel/internal/rawhelpers.go deleted file mode 100644 index b2fe3e41..00000000 --- a/vendor/go.opentelemetry.io/otel/internal/rawhelpers.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package internal // import "go.opentelemetry.io/otel/internal" - -import ( - "math" - "unsafe" -) - -func BoolToRaw(b bool) uint64 { // nolint:revive // b is not a control flag. - if b { - return 1 - } - return 0 -} - -func RawToBool(r uint64) bool { - return r != 0 -} - -func Int64ToRaw(i int64) uint64 { - // Assumes original was a valid int64 (overflow not checked). - return uint64(i) // nolint: gosec -} - -func RawToInt64(r uint64) int64 { - // Assumes original was a valid int64 (overflow not checked). - return int64(r) // nolint: gosec -} - -func Float64ToRaw(f float64) uint64 { - return math.Float64bits(f) -} - -func RawToFloat64(r uint64) float64 { - return math.Float64frombits(r) -} - -func RawPtrToFloat64Ptr(r *uint64) *float64 { - // Assumes original was a valid *float64 (overflow not checked). - return (*float64)(unsafe.Pointer(r)) // nolint: gosec -} - -func RawPtrToInt64Ptr(r *uint64) *int64 { - // Assumes original was a valid *int64 (overflow not checked). - return (*int64)(unsafe.Pointer(r)) // nolint: gosec -} diff --git a/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go b/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go index f8435d8f..b7fc973a 100644 --- a/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go +++ b/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go @@ -106,7 +106,9 @@ type Float64ObservableUpDownCounterConfig struct { // NewFloat64ObservableUpDownCounterConfig returns a new // [Float64ObservableUpDownCounterConfig] with all opts applied. -func NewFloat64ObservableUpDownCounterConfig(opts ...Float64ObservableUpDownCounterOption) Float64ObservableUpDownCounterConfig { +func NewFloat64ObservableUpDownCounterConfig( + opts ...Float64ObservableUpDownCounterOption, +) Float64ObservableUpDownCounterConfig { var config Float64ObservableUpDownCounterConfig for _, o := range opts { config = o.applyFloat64ObservableUpDownCounter(config) @@ -239,12 +241,16 @@ type float64CallbackOpt struct { cback Float64Callback } -func (o float64CallbackOpt) applyFloat64ObservableCounter(cfg Float64ObservableCounterConfig) Float64ObservableCounterConfig { +func (o float64CallbackOpt) applyFloat64ObservableCounter( + cfg Float64ObservableCounterConfig, +) Float64ObservableCounterConfig { cfg.callbacks = append(cfg.callbacks, o.cback) return cfg } -func (o float64CallbackOpt) applyFloat64ObservableUpDownCounter(cfg Float64ObservableUpDownCounterConfig) Float64ObservableUpDownCounterConfig { +func (o float64CallbackOpt) applyFloat64ObservableUpDownCounter( + cfg Float64ObservableUpDownCounterConfig, +) Float64ObservableUpDownCounterConfig { cfg.callbacks = append(cfg.callbacks, o.cback) return cfg } diff --git a/vendor/go.opentelemetry.io/otel/metric/asyncint64.go b/vendor/go.opentelemetry.io/otel/metric/asyncint64.go index e079aaef..4404b71a 100644 --- a/vendor/go.opentelemetry.io/otel/metric/asyncint64.go +++ b/vendor/go.opentelemetry.io/otel/metric/asyncint64.go @@ -105,7 +105,9 @@ type Int64ObservableUpDownCounterConfig struct { // NewInt64ObservableUpDownCounterConfig returns a new // [Int64ObservableUpDownCounterConfig] with all opts applied. -func NewInt64ObservableUpDownCounterConfig(opts ...Int64ObservableUpDownCounterOption) Int64ObservableUpDownCounterConfig { +func NewInt64ObservableUpDownCounterConfig( + opts ...Int64ObservableUpDownCounterOption, +) Int64ObservableUpDownCounterConfig { var config Int64ObservableUpDownCounterConfig for _, o := range opts { config = o.applyInt64ObservableUpDownCounter(config) @@ -242,7 +244,9 @@ func (o int64CallbackOpt) applyInt64ObservableCounter(cfg Int64ObservableCounter return cfg } -func (o int64CallbackOpt) applyInt64ObservableUpDownCounter(cfg Int64ObservableUpDownCounterConfig) Int64ObservableUpDownCounterConfig { +func (o int64CallbackOpt) applyInt64ObservableUpDownCounter( + cfg Int64ObservableUpDownCounterConfig, +) Int64ObservableUpDownCounterConfig { cfg.callbacks = append(cfg.callbacks, o.cback) return cfg } diff --git a/vendor/go.opentelemetry.io/otel/metric/instrument.go b/vendor/go.opentelemetry.io/otel/metric/instrument.go index a535782e..9f48d5f1 100644 --- a/vendor/go.opentelemetry.io/otel/metric/instrument.go +++ b/vendor/go.opentelemetry.io/otel/metric/instrument.go @@ -63,7 +63,9 @@ func (o descOpt) applyFloat64ObservableCounter(c Float64ObservableCounterConfig) return c } -func (o descOpt) applyFloat64ObservableUpDownCounter(c Float64ObservableUpDownCounterConfig) Float64ObservableUpDownCounterConfig { +func (o descOpt) applyFloat64ObservableUpDownCounter( + c Float64ObservableUpDownCounterConfig, +) Float64ObservableUpDownCounterConfig { c.description = string(o) return c } @@ -98,7 +100,9 @@ func (o descOpt) applyInt64ObservableCounter(c Int64ObservableCounterConfig) Int return c } -func (o descOpt) applyInt64ObservableUpDownCounter(c Int64ObservableUpDownCounterConfig) Int64ObservableUpDownCounterConfig { +func (o descOpt) applyInt64ObservableUpDownCounter( + c Int64ObservableUpDownCounterConfig, +) Int64ObservableUpDownCounterConfig { c.description = string(o) return c } @@ -138,7 +142,9 @@ func (o unitOpt) applyFloat64ObservableCounter(c Float64ObservableCounterConfig) return c } -func (o unitOpt) applyFloat64ObservableUpDownCounter(c Float64ObservableUpDownCounterConfig) Float64ObservableUpDownCounterConfig { +func (o unitOpt) applyFloat64ObservableUpDownCounter( + c Float64ObservableUpDownCounterConfig, +) Float64ObservableUpDownCounterConfig { c.unit = string(o) return c } @@ -173,7 +179,9 @@ func (o unitOpt) applyInt64ObservableCounter(c Int64ObservableCounterConfig) Int return c } -func (o unitOpt) applyInt64ObservableUpDownCounter(c Int64ObservableUpDownCounterConfig) Int64ObservableUpDownCounterConfig { +func (o unitOpt) applyInt64ObservableUpDownCounter( + c Int64ObservableUpDownCounterConfig, +) Int64ObservableUpDownCounterConfig { c.unit = string(o) return c } diff --git a/vendor/go.opentelemetry.io/otel/metric/meter.go b/vendor/go.opentelemetry.io/otel/metric/meter.go index 14e08c24..fdd2a701 100644 --- a/vendor/go.opentelemetry.io/otel/metric/meter.go +++ b/vendor/go.opentelemetry.io/otel/metric/meter.go @@ -110,7 +110,10 @@ type Meter interface { // The name needs to conform to the OpenTelemetry instrument name syntax. // See the Instrument Name section of the package documentation for more // information. - Int64ObservableUpDownCounter(name string, options ...Int64ObservableUpDownCounterOption) (Int64ObservableUpDownCounter, error) + Int64ObservableUpDownCounter( + name string, + options ...Int64ObservableUpDownCounterOption, + ) (Int64ObservableUpDownCounter, error) // Int64ObservableGauge returns a new Int64ObservableGauge instrument // identified by name and configured with options. The instrument is used @@ -194,7 +197,10 @@ type Meter interface { // The name needs to conform to the OpenTelemetry instrument name syntax. // See the Instrument Name section of the package documentation for more // information. - Float64ObservableUpDownCounter(name string, options ...Float64ObservableUpDownCounterOption) (Float64ObservableUpDownCounter, error) + Float64ObservableUpDownCounter( + name string, + options ...Float64ObservableUpDownCounterOption, + ) (Float64ObservableUpDownCounter, error) // Float64ObservableGauge returns a new Float64ObservableGauge instrument // identified by name and configured with options. The instrument is used diff --git a/vendor/go.opentelemetry.io/otel/propagation/baggage.go b/vendor/go.opentelemetry.io/otel/propagation/baggage.go index 552263ba..ebda5026 100644 --- a/vendor/go.opentelemetry.io/otel/propagation/baggage.go +++ b/vendor/go.opentelemetry.io/otel/propagation/baggage.go @@ -28,7 +28,21 @@ func (b Baggage) Inject(ctx context.Context, carrier TextMapCarrier) { } // Extract returns a copy of parent with the baggage from the carrier added. +// If carrier implements [ValuesGetter] (e.g. [HeaderCarrier]), Values is invoked +// for multiple values extraction. Otherwise, Get is called. func (b Baggage) Extract(parent context.Context, carrier TextMapCarrier) context.Context { + if multiCarrier, ok := carrier.(ValuesGetter); ok { + return extractMultiBaggage(parent, multiCarrier) + } + return extractSingleBaggage(parent, carrier) +} + +// Fields returns the keys who's values are set with Inject. +func (b Baggage) Fields() []string { + return []string{baggageHeader} +} + +func extractSingleBaggage(parent context.Context, carrier TextMapCarrier) context.Context { bStr := carrier.Get(baggageHeader) if bStr == "" { return parent @@ -41,7 +55,23 @@ func (b Baggage) Extract(parent context.Context, carrier TextMapCarrier) context return baggage.ContextWithBaggage(parent, bag) } -// Fields returns the keys who's values are set with Inject. -func (b Baggage) Fields() []string { - return []string{baggageHeader} +func extractMultiBaggage(parent context.Context, carrier ValuesGetter) context.Context { + bVals := carrier.Values(baggageHeader) + if len(bVals) == 0 { + return parent + } + var members []baggage.Member + for _, bStr := range bVals { + currBag, err := baggage.Parse(bStr) + if err != nil { + continue + } + members = append(members, currBag.Members()...) + } + + b, err := baggage.New(members...) + if err != nil || b.Len() == 0 { + return parent + } + return baggage.ContextWithBaggage(parent, b) } diff --git a/vendor/go.opentelemetry.io/otel/propagation/propagation.go b/vendor/go.opentelemetry.io/otel/propagation/propagation.go index 8c8286aa..5c8c26ea 100644 --- a/vendor/go.opentelemetry.io/otel/propagation/propagation.go +++ b/vendor/go.opentelemetry.io/otel/propagation/propagation.go @@ -9,6 +9,7 @@ import ( ) // TextMapCarrier is the storage medium used by a TextMapPropagator. +// See ValuesGetter for how a TextMapCarrier can get multiple values for a key. type TextMapCarrier interface { // DO NOT CHANGE: any modification will not be backwards compatible and // must never be done outside of a new major release. @@ -29,6 +30,18 @@ type TextMapCarrier interface { // must never be done outside of a new major release. } +// ValuesGetter can return multiple values for a single key, +// with contrast to TextMapCarrier.Get which returns a single value. +type ValuesGetter interface { + // DO NOT CHANGE: any modification will not be backwards compatible and + // must never be done outside of a new major release. + + // Values returns all values associated with the passed key. + Values(key string) []string + // DO NOT CHANGE: any modification will not be backwards compatible and + // must never be done outside of a new major release. +} + // MapCarrier is a TextMapCarrier that uses a map held in memory as a storage // medium for propagated key-value pairs. type MapCarrier map[string]string @@ -55,14 +68,25 @@ func (c MapCarrier) Keys() []string { return keys } -// HeaderCarrier adapts http.Header to satisfy the TextMapCarrier interface. +// HeaderCarrier adapts http.Header to satisfy the TextMapCarrier and ValuesGetter interfaces. type HeaderCarrier http.Header -// Get returns the value associated with the passed key. +// Compile time check that HeaderCarrier implements ValuesGetter. +var _ TextMapCarrier = HeaderCarrier{} + +// Compile time check that HeaderCarrier implements TextMapCarrier. +var _ ValuesGetter = HeaderCarrier{} + +// Get returns the first value associated with the passed key. func (hc HeaderCarrier) Get(key string) string { return http.Header(hc).Get(key) } +// Values returns all values associated with the passed key. +func (hc HeaderCarrier) Values(key string) []string { + return http.Header(hc).Values(key) +} + // Set stores the key-value pair. func (hc HeaderCarrier) Set(key string, value string) { http.Header(hc).Set(key, value) @@ -89,6 +113,8 @@ type TextMapPropagator interface { // must never be done outside of a new major release. // Extract reads cross-cutting concerns from the carrier into a Context. + // Implementations may check if the carrier implements ValuesGetter, + // to support extraction of multiple values per key. Extract(ctx context.Context, carrier TextMapCarrier) context.Context // DO NOT CHANGE: any modification will not be backwards compatible and // must never be done outside of a new major release. diff --git a/vendor/go.opentelemetry.io/otel/renovate.json b/vendor/go.opentelemetry.io/otel/renovate.json index a6fa353f..fa5acf2d 100644 --- a/vendor/go.opentelemetry.io/otel/renovate.json +++ b/vendor/go.opentelemetry.io/otel/renovate.json @@ -1,7 +1,8 @@ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ - "config:best-practices" + "config:best-practices", + "helpers:pinGitHubActionDigestsToSemver" ], "ignorePaths": [], "labels": ["Skip Changelog", "dependencies"], @@ -25,6 +26,10 @@ { "matchPackageNames": ["golang.org/x/**"], "groupName": "golang.org/x" + }, + { + "matchPackageNames": ["go.opentelemetry.io/otel/sdk/log/logtest"], + "enabled": false } ] } diff --git a/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go b/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go index 09e094de..3709ef09 100644 --- a/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go +++ b/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go @@ -1,6 +1,7 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 +// Package internal provides common semconv functionality. package internal // import "go.opentelemetry.io/otel/semconv/internal/v2" import ( diff --git a/vendor/go.opentelemetry.io/otel/trace/auto.go b/vendor/go.opentelemetry.io/otel/trace/auto.go index 7e291002..d90af8f6 100644 --- a/vendor/go.opentelemetry.io/otel/trace/auto.go +++ b/vendor/go.opentelemetry.io/otel/trace/auto.go @@ -57,14 +57,15 @@ type autoTracer struct { var _ Tracer = autoTracer{} func (t autoTracer) Start(ctx context.Context, name string, opts ...SpanStartOption) (context.Context, Span) { - var psc SpanContext + var psc, sc SpanContext sampled := true span := new(autoSpan) // Ask eBPF for sampling decision and span context info. - t.start(ctx, span, &psc, &sampled, &span.spanContext) + t.start(ctx, span, &psc, &sampled, &sc) span.sampled.Store(sampled) + span.spanContext = sc ctx = ContextWithSpan(ctx, span) diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go index 3c5e1cdb..e7ca62c6 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go @@ -251,13 +251,20 @@ func (s *Span) UnmarshalJSON(data []byte) error { type SpanFlags int32 const ( + // SpanFlagsTraceFlagsMask is a mask for trace-flags. + // // Bits 0-7 are used for trace flags. SpanFlagsTraceFlagsMask SpanFlags = 255 - // Bits 8 and 9 are used to indicate that the parent span or link span is remote. - // Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known. - // Bit 9 (`IS_REMOTE`) indicates whether the span or link is remote. + // SpanFlagsContextHasIsRemoteMask is a mask for HAS_IS_REMOTE status. + // + // Bits 8 and 9 are used to indicate that the parent span or link span is + // remote. Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known. SpanFlagsContextHasIsRemoteMask SpanFlags = 256 - // SpanFlagsContextHasIsRemoteMask indicates the Span is remote. + // SpanFlagsContextIsRemoteMask is a mask for IS_REMOTE status. + // + // Bits 8 and 9 are used to indicate that the parent span or link span is + // remote. Bit 9 (`IS_REMOTE`) indicates whether the span or link is + // remote. SpanFlagsContextIsRemoteMask SpanFlags = 512 ) @@ -266,27 +273,31 @@ const ( type SpanKind int32 const ( - // Indicates that the span represents an internal operation within an application, - // as opposed to an operation happening at the boundaries. Default value. + // SpanKindInternal indicates that the span represents an internal + // operation within an application, as opposed to an operation happening at + // the boundaries. SpanKindInternal SpanKind = 1 - // Indicates that the span covers server-side handling of an RPC or other - // remote network request. + // SpanKindServer indicates that the span covers server-side handling of an + // RPC or other remote network request. SpanKindServer SpanKind = 2 - // Indicates that the span describes a request to some remote service. + // SpanKindClient indicates that the span describes a request to some + // remote service. SpanKindClient SpanKind = 3 - // Indicates that the span describes a producer sending a message to a broker. - // Unlike CLIENT and SERVER, there is often no direct critical path latency relationship - // between producer and consumer spans. A PRODUCER span ends when the message was accepted - // by the broker while the logical processing of the message might span a much longer time. + // SpanKindProducer indicates that the span describes a producer sending a + // message to a broker. Unlike SpanKindClient and SpanKindServer, there is + // often no direct critical path latency relationship between producer and + // consumer spans. A SpanKindProducer span ends when the message was + // accepted by the broker while the logical processing of the message might + // span a much longer time. SpanKindProducer SpanKind = 4 - // Indicates that the span describes consumer receiving a message from a broker. - // Like the PRODUCER kind, there is often no direct critical path latency relationship - // between producer and consumer spans. + // SpanKindConsumer indicates that the span describes a consumer receiving + // a message from a broker. Like SpanKindProducer, there is often no direct + // critical path latency relationship between producer and consumer spans. SpanKindConsumer SpanKind = 5 ) -// Event is a time-stamped annotation of the span, consisting of user-supplied -// text description and key-value pairs. +// SpanEvent is a time-stamped annotation of the span, consisting of +// user-supplied text description and key-value pairs. type SpanEvent struct { // time_unix_nano is the time the event occurred. Time time.Time `json:"timeUnixNano,omitempty"` @@ -369,10 +380,11 @@ func (se *SpanEvent) UnmarshalJSON(data []byte) error { return nil } -// A pointer from the current span to another span in the same trace or in a -// different trace. For example, this can be used in batching operations, -// where a single batch handler processes multiple requests from different -// traces or when the handler receives a request from a different project. +// SpanLink is a reference from the current span to another span in the same +// trace or in a different trace. For example, this can be used in batching +// operations, where a single batch handler processes multiple requests from +// different traces or when the handler receives a request from a different +// project. type SpanLink struct { // A unique identifier of a trace that this linked span is part of. The ID is a // 16-byte array. diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go index 1d013a8f..1039bf40 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go @@ -3,17 +3,19 @@ package telemetry // import "go.opentelemetry.io/otel/trace/internal/telemetry" +// StatusCode is the status of a Span. +// // For the semantics of status codes see // https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#set-status type StatusCode int32 const ( - // The default status. + // StatusCodeUnset is the default status. StatusCodeUnset StatusCode = 0 - // The Span has been validated by an Application developer or Operator to - // have completed successfully. + // StatusCodeOK is used when the Span has been validated by an Application + // developer or Operator to have completed successfully. StatusCodeOK StatusCode = 1 - // The Span contains an error. + // StatusCodeError is used when the Span contains an error. StatusCodeError StatusCode = 2 ) @@ -30,7 +32,7 @@ func (s StatusCode) String() string { return "" } -// The Status type defines a logical error model that is suitable for different +// Status defines a logical error model that is suitable for different // programming environments, including REST APIs and RPC APIs. type Status struct { // A developer-facing human readable error message. diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go index b0394070..e5f10767 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go @@ -71,7 +71,7 @@ func (td *Traces) UnmarshalJSON(data []byte) error { return nil } -// A collection of ScopeSpans from a Resource. +// ResourceSpans is a collection of ScopeSpans from a Resource. type ResourceSpans struct { // The resource for the spans in this message. // If this field is not set then no resource info is known. @@ -128,7 +128,7 @@ func (rs *ResourceSpans) UnmarshalJSON(data []byte) error { return nil } -// A collection of Spans produced by an InstrumentationScope. +// ScopeSpans is a collection of Spans produced by an InstrumentationScope. type ScopeSpans struct { // The instrumentation scope information for the spans in this message. // Semantically when InstrumentationScope isn't set, it is equivalent with diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go index 7251492d..ae9ce102 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go @@ -316,7 +316,7 @@ func (v Value) String() string { case ValueKindBool: return strconv.FormatBool(v.asBool()) case ValueKindBytes: - return fmt.Sprint(v.asBytes()) + return string(v.asBytes()) case ValueKindMap: return fmt.Sprint(v.asMap()) case ValueKindSlice: diff --git a/vendor/go.opentelemetry.io/otel/trace/noop.go b/vendor/go.opentelemetry.io/otel/trace/noop.go index c8b1ae5d..0f56e4db 100644 --- a/vendor/go.opentelemetry.io/otel/trace/noop.go +++ b/vendor/go.opentelemetry.io/otel/trace/noop.go @@ -95,6 +95,8 @@ var autoInstEnabled = new(bool) // tracerProvider return a noopTracerProvider if autoEnabled is false, // otherwise it will return a TracerProvider from the sdk package used in // auto-instrumentation. +// +//go:noinline func (noopSpan) tracerProvider(autoEnabled *bool) TracerProvider { if *autoEnabled { return newAutoTracerProvider() diff --git a/vendor/go.opentelemetry.io/otel/verify_readmes.sh b/vendor/go.opentelemetry.io/otel/verify_readmes.sh deleted file mode 100644 index 1e87855e..00000000 --- a/vendor/go.opentelemetry.io/otel/verify_readmes.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -# Copyright The OpenTelemetry Authors -# SPDX-License-Identifier: Apache-2.0 - -set -euo pipefail - -dirs=$(find . -type d -not -path "*/internal*" -not -path "*/test*" -not -path "*/example*" -not -path "*/.*" | sort) - -missingReadme=false -for dir in $dirs; do - if [ ! -f "$dir/README.md" ]; then - echo "couldn't find README.md for $dir" - missingReadme=true - fi -done - -if [ "$missingReadme" = true ] ; then - echo "Error: some READMEs couldn't be found." - exit 1 -fi diff --git a/vendor/go.opentelemetry.io/otel/version.go b/vendor/go.opentelemetry.io/otel/version.go index d5fa71f6..ac3c0b15 100644 --- a/vendor/go.opentelemetry.io/otel/version.go +++ b/vendor/go.opentelemetry.io/otel/version.go @@ -5,5 +5,5 @@ package otel // import "go.opentelemetry.io/otel" // Version is the current release version of OpenTelemetry in use. func Version() string { - return "1.35.0" + return "1.36.0" } diff --git a/vendor/go.opentelemetry.io/otel/versions.yaml b/vendor/go.opentelemetry.io/otel/versions.yaml index 2b4cb4b4..79f82f3d 100644 --- a/vendor/go.opentelemetry.io/otel/versions.yaml +++ b/vendor/go.opentelemetry.io/otel/versions.yaml @@ -3,7 +3,7 @@ module-sets: stable-v1: - version: v1.35.0 + version: v1.36.0 modules: - go.opentelemetry.io/otel - go.opentelemetry.io/otel/bridge/opencensus @@ -23,11 +23,11 @@ module-sets: - go.opentelemetry.io/otel/sdk/metric - go.opentelemetry.io/otel/trace experimental-metrics: - version: v0.57.0 + version: v0.58.0 modules: - go.opentelemetry.io/otel/exporters/prometheus experimental-logs: - version: v0.11.0 + version: v0.12.0 modules: - go.opentelemetry.io/otel/log - go.opentelemetry.io/otel/sdk/log @@ -40,4 +40,6 @@ module-sets: - go.opentelemetry.io/otel/schema excluded-modules: - go.opentelemetry.io/otel/internal/tools + - go.opentelemetry.io/otel/log/logtest + - go.opentelemetry.io/otel/sdk/log/logtest - go.opentelemetry.io/otel/trace/internal/telemetry/test diff --git a/vendor/golang.org/x/oauth2/internal/doc.go b/vendor/golang.org/x/oauth2/internal/doc.go index 03265e88..8c7c475f 100644 --- a/vendor/golang.org/x/oauth2/internal/doc.go +++ b/vendor/golang.org/x/oauth2/internal/doc.go @@ -2,5 +2,5 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package internal contains support packages for oauth2 package. +// Package internal contains support packages for [golang.org/x/oauth2]. package internal diff --git a/vendor/golang.org/x/oauth2/internal/oauth2.go b/vendor/golang.org/x/oauth2/internal/oauth2.go index 14989bea..71ea6ad1 100644 --- a/vendor/golang.org/x/oauth2/internal/oauth2.go +++ b/vendor/golang.org/x/oauth2/internal/oauth2.go @@ -13,7 +13,7 @@ import ( ) // ParseKey converts the binary contents of a private key file -// to an *rsa.PrivateKey. It detects whether the private key is in a +// to an [*rsa.PrivateKey]. It detects whether the private key is in a // PEM container or not. If so, it extracts the private key // from PEM container before conversion. It only supports PEM // containers with no passphrase. diff --git a/vendor/golang.org/x/oauth2/internal/token.go b/vendor/golang.org/x/oauth2/internal/token.go index e83ddeef..8389f246 100644 --- a/vendor/golang.org/x/oauth2/internal/token.go +++ b/vendor/golang.org/x/oauth2/internal/token.go @@ -10,7 +10,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "math" "mime" "net/http" @@ -26,9 +25,9 @@ import ( // the requests to access protected resources on the OAuth 2.0 // provider's backend. // -// This type is a mirror of oauth2.Token and exists to break +// This type is a mirror of [golang.org/x/oauth2.Token] and exists to break // an otherwise-circular dependency. Other internal packages -// should convert this Token into an oauth2.Token before use. +// should convert this Token into an [golang.org/x/oauth2.Token] before use. type Token struct { // AccessToken is the token that authorizes and authenticates // the requests. @@ -50,9 +49,16 @@ type Token struct { // mechanisms for that TokenSource will not be used. Expiry time.Time + // ExpiresIn is the OAuth2 wire format "expires_in" field, + // which specifies how many seconds later the token expires, + // relative to an unknown time base approximately around "now". + // It is the application's responsibility to populate + // `Expiry` from `ExpiresIn` when required. + ExpiresIn int64 `json:"expires_in,omitempty"` + // Raw optionally contains extra metadata from the server // when updating a token. - Raw interface{} + Raw any } // tokenJSON is the struct representing the HTTP response from OAuth2 @@ -99,14 +105,6 @@ func (e *expirationTime) UnmarshalJSON(b []byte) error { return nil } -// RegisterBrokenAuthHeaderProvider previously did something. It is now a no-op. -// -// Deprecated: this function no longer does anything. Caller code that -// wants to avoid potential extra HTTP requests made during -// auto-probing of the provider's auth style should set -// Endpoint.AuthStyle. -func RegisterBrokenAuthHeaderProvider(tokenURL string) {} - // AuthStyle is a copy of the golang.org/x/oauth2 package's AuthStyle type. type AuthStyle int @@ -143,6 +141,11 @@ func (lc *LazyAuthStyleCache) Get() *AuthStyleCache { return c } +type authStyleCacheKey struct { + url string + clientID string +} + // AuthStyleCache is the set of tokenURLs we've successfully used via // RetrieveToken and which style auth we ended up using. // It's called a cache, but it doesn't (yet?) shrink. It's expected that @@ -150,26 +153,26 @@ func (lc *LazyAuthStyleCache) Get() *AuthStyleCache { // small. type AuthStyleCache struct { mu sync.Mutex - m map[string]AuthStyle // keyed by tokenURL + m map[authStyleCacheKey]AuthStyle } // lookupAuthStyle reports which auth style we last used with tokenURL // when calling RetrieveToken and whether we have ever done so. -func (c *AuthStyleCache) lookupAuthStyle(tokenURL string) (style AuthStyle, ok bool) { +func (c *AuthStyleCache) lookupAuthStyle(tokenURL, clientID string) (style AuthStyle, ok bool) { c.mu.Lock() defer c.mu.Unlock() - style, ok = c.m[tokenURL] + style, ok = c.m[authStyleCacheKey{tokenURL, clientID}] return } // setAuthStyle adds an entry to authStyleCache, documented above. -func (c *AuthStyleCache) setAuthStyle(tokenURL string, v AuthStyle) { +func (c *AuthStyleCache) setAuthStyle(tokenURL, clientID string, v AuthStyle) { c.mu.Lock() defer c.mu.Unlock() if c.m == nil { - c.m = make(map[string]AuthStyle) + c.m = make(map[authStyleCacheKey]AuthStyle) } - c.m[tokenURL] = v + c.m[authStyleCacheKey{tokenURL, clientID}] = v } // newTokenRequest returns a new *http.Request to retrieve a new token @@ -210,9 +213,9 @@ func cloneURLValues(v url.Values) url.Values { } func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, v url.Values, authStyle AuthStyle, styleCache *AuthStyleCache) (*Token, error) { - needsAuthStyleProbe := authStyle == 0 + needsAuthStyleProbe := authStyle == AuthStyleUnknown if needsAuthStyleProbe { - if style, ok := styleCache.lookupAuthStyle(tokenURL); ok { + if style, ok := styleCache.lookupAuthStyle(tokenURL, clientID); ok { authStyle = style needsAuthStyleProbe = false } else { @@ -242,7 +245,7 @@ func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, token, err = doTokenRoundTrip(ctx, req) } if needsAuthStyleProbe && err == nil { - styleCache.setAuthStyle(tokenURL, authStyle) + styleCache.setAuthStyle(tokenURL, clientID, authStyle) } // Don't overwrite `RefreshToken` with an empty value // if this was a token refreshing request. @@ -257,7 +260,7 @@ func doTokenRoundTrip(ctx context.Context, req *http.Request) (*Token, error) { if err != nil { return nil, err } - body, err := ioutil.ReadAll(io.LimitReader(r.Body, 1<<20)) + body, err := io.ReadAll(io.LimitReader(r.Body, 1<<20)) r.Body.Close() if err != nil { return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err) @@ -312,7 +315,8 @@ func doTokenRoundTrip(ctx context.Context, req *http.Request) (*Token, error) { TokenType: tj.TokenType, RefreshToken: tj.RefreshToken, Expiry: tj.expiry(), - Raw: make(map[string]interface{}), + ExpiresIn: int64(tj.ExpiresIn), + Raw: make(map[string]any), } json.Unmarshal(body, &token.Raw) // no error checks for optional fields } diff --git a/vendor/golang.org/x/oauth2/internal/transport.go b/vendor/golang.org/x/oauth2/internal/transport.go index b9db01dd..afc0aeb2 100644 --- a/vendor/golang.org/x/oauth2/internal/transport.go +++ b/vendor/golang.org/x/oauth2/internal/transport.go @@ -9,8 +9,8 @@ import ( "net/http" ) -// HTTPClient is the context key to use with golang.org/x/net/context's -// WithValue function to associate an *http.Client value with a context. +// HTTPClient is the context key to use with [context.WithValue] +// to associate an [*http.Client] value with a context. var HTTPClient ContextKey // ContextKey is just an empty struct. It exists so HTTPClient can be diff --git a/vendor/golang.org/x/oauth2/oauth2.go b/vendor/golang.org/x/oauth2/oauth2.go index eacdd7fd..de34feb8 100644 --- a/vendor/golang.org/x/oauth2/oauth2.go +++ b/vendor/golang.org/x/oauth2/oauth2.go @@ -22,9 +22,9 @@ import ( ) // NoContext is the default context you should supply if not using -// your own context.Context (see https://golang.org/x/net/context). +// your own [context.Context]. // -// Deprecated: Use context.Background() or context.TODO() instead. +// Deprecated: Use [context.Background] or [context.TODO] instead. var NoContext = context.TODO() // RegisterBrokenAuthHeaderProvider previously did something. It is now a no-op. @@ -37,8 +37,8 @@ func RegisterBrokenAuthHeaderProvider(tokenURL string) {} // Config describes a typical 3-legged OAuth2 flow, with both the // client application information and the server's endpoint URLs. -// For the client credentials 2-legged OAuth2 flow, see the clientcredentials -// package (https://golang.org/x/oauth2/clientcredentials). +// For the client credentials 2-legged OAuth2 flow, see the +// [golang.org/x/oauth2/clientcredentials] package. type Config struct { // ClientID is the application's ID. ClientID string @@ -46,7 +46,7 @@ type Config struct { // ClientSecret is the application's secret. ClientSecret string - // Endpoint contains the resource server's token endpoint + // Endpoint contains the authorization server's token endpoint // URLs. These are constants specific to each server and are // often available via site-specific packages, such as // google.Endpoint or github.Endpoint. @@ -135,7 +135,7 @@ type setParam struct{ k, v string } func (p setParam) setValue(m url.Values) { m.Set(p.k, p.v) } -// SetAuthURLParam builds an AuthCodeOption which passes key/value parameters +// SetAuthURLParam builds an [AuthCodeOption] which passes key/value parameters // to a provider's authorization endpoint. func SetAuthURLParam(key, value string) AuthCodeOption { return setParam{key, value} @@ -148,8 +148,8 @@ func SetAuthURLParam(key, value string) AuthCodeOption { // request and callback. The authorization server includes this value when // redirecting the user agent back to the client. // -// Opts may include AccessTypeOnline or AccessTypeOffline, as well -// as ApprovalForce. +// Opts may include [AccessTypeOnline] or [AccessTypeOffline], as well +// as [ApprovalForce]. // // To protect against CSRF attacks, opts should include a PKCE challenge // (S256ChallengeOption). Not all servers support PKCE. An alternative is to @@ -194,7 +194,7 @@ func (c *Config) AuthCodeURL(state string, opts ...AuthCodeOption) string { // and when other authorization grant types are not available." // See https://tools.ietf.org/html/rfc6749#section-4.3 for more info. // -// The provided context optionally controls which HTTP client is used. See the HTTPClient variable. +// The provided context optionally controls which HTTP client is used. See the [HTTPClient] variable. func (c *Config) PasswordCredentialsToken(ctx context.Context, username, password string) (*Token, error) { v := url.Values{ "grant_type": {"password"}, @@ -212,10 +212,10 @@ func (c *Config) PasswordCredentialsToken(ctx context.Context, username, passwor // It is used after a resource provider redirects the user back // to the Redirect URI (the URL obtained from AuthCodeURL). // -// The provided context optionally controls which HTTP client is used. See the HTTPClient variable. +// The provided context optionally controls which HTTP client is used. See the [HTTPClient] variable. // -// The code will be in the *http.Request.FormValue("code"). Before -// calling Exchange, be sure to validate FormValue("state") if you are +// The code will be in the [http.Request.FormValue]("code"). Before +// calling Exchange, be sure to validate [http.Request.FormValue]("state") if you are // using it to protect against CSRF attacks. // // If using PKCE to protect against CSRF attacks, opts should include a @@ -242,10 +242,10 @@ func (c *Config) Client(ctx context.Context, t *Token) *http.Client { return NewClient(ctx, c.TokenSource(ctx, t)) } -// TokenSource returns a TokenSource that returns t until t expires, +// TokenSource returns a [TokenSource] that returns t until t expires, // automatically refreshing it as necessary using the provided context. // -// Most users will use Config.Client instead. +// Most users will use [Config.Client] instead. func (c *Config) TokenSource(ctx context.Context, t *Token) TokenSource { tkr := &tokenRefresher{ ctx: ctx, @@ -260,7 +260,7 @@ func (c *Config) TokenSource(ctx context.Context, t *Token) TokenSource { } } -// tokenRefresher is a TokenSource that makes "grant_type"=="refresh_token" +// tokenRefresher is a TokenSource that makes "grant_type=refresh_token" // HTTP requests to renew a token using a RefreshToken. type tokenRefresher struct { ctx context.Context // used to get HTTP requests @@ -305,8 +305,7 @@ type reuseTokenSource struct { } // Token returns the current token if it's still valid, else will -// refresh the current token (using r.Context for HTTP client -// information) and return the new one. +// refresh the current token and return the new one. func (s *reuseTokenSource) Token() (*Token, error) { s.mu.Lock() defer s.mu.Unlock() @@ -322,7 +321,7 @@ func (s *reuseTokenSource) Token() (*Token, error) { return t, nil } -// StaticTokenSource returns a TokenSource that always returns the same token. +// StaticTokenSource returns a [TokenSource] that always returns the same token. // Because the provided token t is never refreshed, StaticTokenSource is only // useful for tokens that never expire. func StaticTokenSource(t *Token) TokenSource { @@ -338,16 +337,16 @@ func (s staticTokenSource) Token() (*Token, error) { return s.t, nil } -// HTTPClient is the context key to use with golang.org/x/net/context's -// WithValue function to associate an *http.Client value with a context. +// HTTPClient is the context key to use with [context.WithValue] +// to associate a [*http.Client] value with a context. var HTTPClient internal.ContextKey -// NewClient creates an *http.Client from a Context and TokenSource. +// NewClient creates an [*http.Client] from a [context.Context] and [TokenSource]. // The returned client is not valid beyond the lifetime of the context. // -// Note that if a custom *http.Client is provided via the Context it +// Note that if a custom [*http.Client] is provided via the [context.Context] it // is used only for token acquisition and is not used to configure the -// *http.Client returned from NewClient. +// [*http.Client] returned from NewClient. // // As a special case, if src is nil, a non-OAuth2 client is returned // using the provided context. This exists to support related OAuth2 @@ -368,7 +367,7 @@ func NewClient(ctx context.Context, src TokenSource) *http.Client { } } -// ReuseTokenSource returns a TokenSource which repeatedly returns the +// ReuseTokenSource returns a [TokenSource] which repeatedly returns the // same token as long as it's valid, starting with t. // When its cached token is invalid, a new token is obtained from src. // @@ -376,10 +375,10 @@ func NewClient(ctx context.Context, src TokenSource) *http.Client { // (such as a file on disk) between runs of a program, rather than // obtaining new tokens unnecessarily. // -// The initial token t may be nil, in which case the TokenSource is +// The initial token t may be nil, in which case the [TokenSource] is // wrapped in a caching version if it isn't one already. This also // means it's always safe to wrap ReuseTokenSource around any other -// TokenSource without adverse effects. +// [TokenSource] without adverse effects. func ReuseTokenSource(t *Token, src TokenSource) TokenSource { // Don't wrap a reuseTokenSource in itself. That would work, // but cause an unnecessary number of mutex operations. @@ -397,8 +396,8 @@ func ReuseTokenSource(t *Token, src TokenSource) TokenSource { } } -// ReuseTokenSourceWithExpiry returns a TokenSource that acts in the same manner as the -// TokenSource returned by ReuseTokenSource, except the expiry buffer is +// ReuseTokenSourceWithExpiry returns a [TokenSource] that acts in the same manner as the +// [TokenSource] returned by [ReuseTokenSource], except the expiry buffer is // configurable. The expiration time of a token is calculated as // t.Expiry.Add(-earlyExpiry). func ReuseTokenSourceWithExpiry(t *Token, src TokenSource, earlyExpiry time.Duration) TokenSource { diff --git a/vendor/golang.org/x/oauth2/pkce.go b/vendor/golang.org/x/oauth2/pkce.go index 6a95da97..cea8374d 100644 --- a/vendor/golang.org/x/oauth2/pkce.go +++ b/vendor/golang.org/x/oauth2/pkce.go @@ -1,6 +1,7 @@ // Copyright 2023 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. + package oauth2 import ( @@ -20,9 +21,9 @@ const ( // This follows recommendations in RFC 7636. // // A fresh verifier should be generated for each authorization. -// S256ChallengeOption(verifier) should then be passed to Config.AuthCodeURL -// (or Config.DeviceAuth) and VerifierOption(verifier) to Config.Exchange -// (or Config.DeviceAccessToken). +// The resulting verifier should be passed to [Config.AuthCodeURL] or [Config.DeviceAuth] +// with [S256ChallengeOption], and to [Config.Exchange] or [Config.DeviceAccessToken] +// with [VerifierOption]. func GenerateVerifier() string { // "RECOMMENDED that the output of a suitable random number generator be // used to create a 32-octet sequence. The octet sequence is then @@ -36,22 +37,22 @@ func GenerateVerifier() string { return base64.RawURLEncoding.EncodeToString(data) } -// VerifierOption returns a PKCE code verifier AuthCodeOption. It should be -// passed to Config.Exchange or Config.DeviceAccessToken only. +// VerifierOption returns a PKCE code verifier [AuthCodeOption]. It should only be +// passed to [Config.Exchange] or [Config.DeviceAccessToken]. func VerifierOption(verifier string) AuthCodeOption { return setParam{k: codeVerifierKey, v: verifier} } // S256ChallengeFromVerifier returns a PKCE code challenge derived from verifier with method S256. // -// Prefer to use S256ChallengeOption where possible. +// Prefer to use [S256ChallengeOption] where possible. func S256ChallengeFromVerifier(verifier string) string { sha := sha256.Sum256([]byte(verifier)) return base64.RawURLEncoding.EncodeToString(sha[:]) } // S256ChallengeOption derives a PKCE code challenge derived from verifier with -// method S256. It should be passed to Config.AuthCodeURL or Config.DeviceAuth +// method S256. It should be passed to [Config.AuthCodeURL] or [Config.DeviceAuth] // only. func S256ChallengeOption(verifier string) AuthCodeOption { return challengeOption{ diff --git a/vendor/golang.org/x/oauth2/token.go b/vendor/golang.org/x/oauth2/token.go index 8c31136c..239ec329 100644 --- a/vendor/golang.org/x/oauth2/token.go +++ b/vendor/golang.org/x/oauth2/token.go @@ -44,7 +44,7 @@ type Token struct { // Expiry is the optional expiration time of the access token. // - // If zero, TokenSource implementations will reuse the same + // If zero, [TokenSource] implementations will reuse the same // token forever and RefreshToken or equivalent // mechanisms for that TokenSource will not be used. Expiry time.Time `json:"expiry,omitempty"` @@ -58,7 +58,7 @@ type Token struct { // raw optionally contains extra metadata from the server // when updating a token. - raw interface{} + raw any // expiryDelta is used to calculate when a token is considered // expired, by subtracting from Expiry. If zero, defaultExpiryDelta @@ -86,16 +86,16 @@ func (t *Token) Type() string { // SetAuthHeader sets the Authorization header to r using the access // token in t. // -// This method is unnecessary when using Transport or an HTTP Client +// This method is unnecessary when using [Transport] or an HTTP Client // returned by this package. func (t *Token) SetAuthHeader(r *http.Request) { r.Header.Set("Authorization", t.Type()+" "+t.AccessToken) } -// WithExtra returns a new Token that's a clone of t, but using the +// WithExtra returns a new [Token] that's a clone of t, but using the // provided raw extra map. This is only intended for use by packages // implementing derivative OAuth2 flows. -func (t *Token) WithExtra(extra interface{}) *Token { +func (t *Token) WithExtra(extra any) *Token { t2 := new(Token) *t2 = *t t2.raw = extra @@ -105,8 +105,8 @@ func (t *Token) WithExtra(extra interface{}) *Token { // Extra returns an extra field. // Extra fields are key-value pairs returned by the server as a // part of the token retrieval response. -func (t *Token) Extra(key string) interface{} { - if raw, ok := t.raw.(map[string]interface{}); ok { +func (t *Token) Extra(key string) any { + if raw, ok := t.raw.(map[string]any); ok { return raw[key] } @@ -163,6 +163,7 @@ func tokenFromInternal(t *internal.Token) *Token { TokenType: t.TokenType, RefreshToken: t.RefreshToken, Expiry: t.Expiry, + ExpiresIn: t.ExpiresIn, raw: t.Raw, } } diff --git a/vendor/golang.org/x/oauth2/transport.go b/vendor/golang.org/x/oauth2/transport.go index 90657915..8bbebbac 100644 --- a/vendor/golang.org/x/oauth2/transport.go +++ b/vendor/golang.org/x/oauth2/transport.go @@ -11,12 +11,12 @@ import ( "sync" ) -// Transport is an http.RoundTripper that makes OAuth 2.0 HTTP requests, -// wrapping a base RoundTripper and adding an Authorization header -// with a token from the supplied Sources. +// Transport is an [http.RoundTripper] that makes OAuth 2.0 HTTP requests, +// wrapping a base [http.RoundTripper] and adding an Authorization header +// with a token from the supplied [TokenSource]. // // Transport is a low-level mechanism. Most code will use the -// higher-level Config.Client method instead. +// higher-level [Config.Client] method instead. type Transport struct { // Source supplies the token to add to outgoing requests' // Authorization headers. @@ -47,7 +47,7 @@ func (t *Transport) RoundTrip(req *http.Request) (*http.Response, error) { return nil, err } - req2 := cloneRequest(req) // per RoundTripper contract + req2 := req.Clone(req.Context()) token.SetAuthHeader(req2) // req.Body is assumed to be closed by the base RoundTripper. @@ -73,17 +73,3 @@ func (t *Transport) base() http.RoundTripper { } return http.DefaultTransport } - -// cloneRequest returns a clone of the provided *http.Request. -// The clone is a shallow copy of the struct and its Header map. -func cloneRequest(r *http.Request) *http.Request { - // shallow copy of the struct - r2 := new(http.Request) - *r2 = *r - // deep copy of the Header - r2.Header = make(http.Header, len(r.Header)) - for k, s := range r.Header { - r2.Header[k] = append([]string(nil), s...) - } - return r2 -} diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go index f8c3c092..cfafed5b 100644 --- a/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -12,6 +12,8 @@ package errgroup import ( "context" "fmt" + "runtime" + "runtime/debug" "sync" ) @@ -31,6 +33,10 @@ type Group struct { errOnce sync.Once err error + + mu sync.Mutex + panicValue any // = PanicError | PanicValue; non-nil if some Group.Go goroutine panicked. + abnormal bool // some Group.Go goroutine terminated abnormally (panic or goexit). } func (g *Group) done() { @@ -50,13 +56,22 @@ func WithContext(ctx context.Context) (*Group, context.Context) { return &Group{cancel: cancel}, ctx } -// Wait blocks until all function calls from the Go method have returned, then -// returns the first non-nil error (if any) from them. +// Wait blocks until all function calls from the Go method have returned +// normally, then returns the first non-nil error (if any) from them. +// +// If any of the calls panics, Wait panics with a [PanicValue]; +// and if any of them calls [runtime.Goexit], Wait calls runtime.Goexit. func (g *Group) Wait() error { g.wg.Wait() if g.cancel != nil { g.cancel(g.err) } + if g.panicValue != nil { + panic(g.panicValue) + } + if g.abnormal { + runtime.Goexit() + } return g.err } @@ -65,18 +80,56 @@ func (g *Group) Wait() error { // It blocks until the new goroutine can be added without the number of // active goroutines in the group exceeding the configured limit. // -// The first call to return a non-nil error cancels the group's context, if the -// group was created by calling WithContext. The error will be returned by Wait. +// It blocks until the new goroutine can be added without the number of +// goroutines in the group exceeding the configured limit. +// +// The first goroutine in the group that returns a non-nil error, panics, or +// invokes [runtime.Goexit] will cancel the associated Context, if any. func (g *Group) Go(f func() error) { if g.sem != nil { g.sem <- token{} } + g.add(f) +} + +func (g *Group) add(f func() error) { g.wg.Add(1) go func() { defer g.done() + normalReturn := false + defer func() { + if normalReturn { + return + } + v := recover() + g.mu.Lock() + defer g.mu.Unlock() + if !g.abnormal { + if g.cancel != nil { + g.cancel(g.err) + } + g.abnormal = true + } + if v != nil && g.panicValue == nil { + switch v := v.(type) { + case error: + g.panicValue = PanicError{ + Recovered: v, + Stack: debug.Stack(), + } + default: + g.panicValue = PanicValue{ + Recovered: v, + Stack: debug.Stack(), + } + } + } + }() - if err := f(); err != nil { + err := f() + normalReturn = true + if err != nil { g.errOnce.Do(func() { g.err = err if g.cancel != nil { @@ -101,19 +154,7 @@ func (g *Group) TryGo(f func() error) bool { } } - g.wg.Add(1) - go func() { - defer g.done() - - if err := f(); err != nil { - g.errOnce.Do(func() { - g.err = err - if g.cancel != nil { - g.cancel(g.err) - } - }) - } - }() + g.add(f) return true } @@ -135,3 +176,33 @@ func (g *Group) SetLimit(n int) { } g.sem = make(chan token, n) } + +// PanicError wraps an error recovered from an unhandled panic +// when calling a function passed to Go or TryGo. +type PanicError struct { + Recovered error + Stack []byte // result of call to [debug.Stack] +} + +func (p PanicError) Error() string { + // A Go Error method conventionally does not include a stack dump, so omit it + // here. (Callers who care can extract it from the Stack field.) + return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) +} + +func (p PanicError) Unwrap() error { return p.Recovered } + +// PanicValue wraps a value that does not implement the error interface, +// recovered from an unhandled panic when calling a function passed to Go or +// TryGo. +type PanicValue struct { + Recovered any + Stack []byte // result of call to [debug.Stack] +} + +func (p PanicValue) String() string { + if len(p.Stack) > 0 { + return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) + } + return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) +} diff --git a/vendor/golang.org/x/sys/cpu/cpu.go b/vendor/golang.org/x/sys/cpu/cpu.go index 2e73ee19..63541994 100644 --- a/vendor/golang.org/x/sys/cpu/cpu.go +++ b/vendor/golang.org/x/sys/cpu/cpu.go @@ -232,6 +232,17 @@ var RISCV64 struct { HasZba bool // Address generation instructions extension HasZbb bool // Basic bit-manipulation extension HasZbs bool // Single-bit instructions extension + HasZvbb bool // Vector Basic Bit-manipulation + HasZvbc bool // Vector Carryless Multiplication + HasZvkb bool // Vector Cryptography Bit-manipulation + HasZvkt bool // Vector Data-Independent Execution Latency + HasZvkg bool // Vector GCM/GMAC + HasZvkn bool // NIST Algorithm Suite (AES/SHA256/SHA512) + HasZvknc bool // NIST Algorithm Suite with carryless multiply + HasZvkng bool // NIST Algorithm Suite with GCM + HasZvks bool // ShangMi Algorithm Suite + HasZvksc bool // ShangMi Algorithm Suite with carryless multiplication + HasZvksg bool // ShangMi Algorithm Suite with GCM _ CacheLinePad } diff --git a/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go b/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go index cb4a0c57..ad741536 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go +++ b/vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go @@ -58,6 +58,15 @@ const ( riscv_HWPROBE_EXT_ZBA = 0x8 riscv_HWPROBE_EXT_ZBB = 0x10 riscv_HWPROBE_EXT_ZBS = 0x20 + riscv_HWPROBE_EXT_ZVBB = 0x20000 + riscv_HWPROBE_EXT_ZVBC = 0x40000 + riscv_HWPROBE_EXT_ZVKB = 0x80000 + riscv_HWPROBE_EXT_ZVKG = 0x100000 + riscv_HWPROBE_EXT_ZVKNED = 0x200000 + riscv_HWPROBE_EXT_ZVKNHB = 0x800000 + riscv_HWPROBE_EXT_ZVKSED = 0x1000000 + riscv_HWPROBE_EXT_ZVKSH = 0x2000000 + riscv_HWPROBE_EXT_ZVKT = 0x4000000 riscv_HWPROBE_KEY_CPUPERF_0 = 0x5 riscv_HWPROBE_MISALIGNED_FAST = 0x3 riscv_HWPROBE_MISALIGNED_MASK = 0x7 @@ -99,6 +108,20 @@ func doinit() { RISCV64.HasZba = isSet(v, riscv_HWPROBE_EXT_ZBA) RISCV64.HasZbb = isSet(v, riscv_HWPROBE_EXT_ZBB) RISCV64.HasZbs = isSet(v, riscv_HWPROBE_EXT_ZBS) + RISCV64.HasZvbb = isSet(v, riscv_HWPROBE_EXT_ZVBB) + RISCV64.HasZvbc = isSet(v, riscv_HWPROBE_EXT_ZVBC) + RISCV64.HasZvkb = isSet(v, riscv_HWPROBE_EXT_ZVKB) + RISCV64.HasZvkg = isSet(v, riscv_HWPROBE_EXT_ZVKG) + RISCV64.HasZvkt = isSet(v, riscv_HWPROBE_EXT_ZVKT) + // Cryptography shorthand extensions + RISCV64.HasZvkn = isSet(v, riscv_HWPROBE_EXT_ZVKNED) && + isSet(v, riscv_HWPROBE_EXT_ZVKNHB) && RISCV64.HasZvkb && RISCV64.HasZvkt + RISCV64.HasZvknc = RISCV64.HasZvkn && RISCV64.HasZvbc + RISCV64.HasZvkng = RISCV64.HasZvkn && RISCV64.HasZvkg + RISCV64.HasZvks = isSet(v, riscv_HWPROBE_EXT_ZVKSED) && + isSet(v, riscv_HWPROBE_EXT_ZVKSH) && RISCV64.HasZvkb && RISCV64.HasZvkt + RISCV64.HasZvksc = RISCV64.HasZvks && RISCV64.HasZvbc + RISCV64.HasZvksg = RISCV64.HasZvks && RISCV64.HasZvkg } if pairs[1].key != -1 { v := pairs[1].value & riscv_HWPROBE_MISALIGNED_MASK diff --git a/vendor/golang.org/x/sys/cpu/cpu_riscv64.go b/vendor/golang.org/x/sys/cpu/cpu_riscv64.go index aca3199c..0f617aef 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_riscv64.go +++ b/vendor/golang.org/x/sys/cpu/cpu_riscv64.go @@ -16,5 +16,17 @@ func initOptions() { {Name: "zba", Feature: &RISCV64.HasZba}, {Name: "zbb", Feature: &RISCV64.HasZbb}, {Name: "zbs", Feature: &RISCV64.HasZbs}, + // RISC-V Cryptography Extensions + {Name: "zvbb", Feature: &RISCV64.HasZvbb}, + {Name: "zvbc", Feature: &RISCV64.HasZvbc}, + {Name: "zvkb", Feature: &RISCV64.HasZvkb}, + {Name: "zvkg", Feature: &RISCV64.HasZvkg}, + {Name: "zvkt", Feature: &RISCV64.HasZvkt}, + {Name: "zvkn", Feature: &RISCV64.HasZvkn}, + {Name: "zvknc", Feature: &RISCV64.HasZvknc}, + {Name: "zvkng", Feature: &RISCV64.HasZvkng}, + {Name: "zvks", Feature: &RISCV64.HasZvks}, + {Name: "zvksc", Feature: &RISCV64.HasZvksc}, + {Name: "zvksg", Feature: &RISCV64.HasZvksg}, } } diff --git a/vendor/golang.org/x/sys/windows/security_windows.go b/vendor/golang.org/x/sys/windows/security_windows.go index b6e1ab76..a8b0364c 100644 --- a/vendor/golang.org/x/sys/windows/security_windows.go +++ b/vendor/golang.org/x/sys/windows/security_windows.go @@ -1303,7 +1303,10 @@ func (selfRelativeSD *SECURITY_DESCRIPTOR) ToAbsolute() (absoluteSD *SECURITY_DE return nil, err } if absoluteSDSize > 0 { - absoluteSD = (*SECURITY_DESCRIPTOR)(unsafe.Pointer(&make([]byte, absoluteSDSize)[0])) + absoluteSD = new(SECURITY_DESCRIPTOR) + if unsafe.Sizeof(*absoluteSD) < uintptr(absoluteSDSize) { + panic("sizeof(SECURITY_DESCRIPTOR) too small") + } } var ( dacl *ACL @@ -1312,19 +1315,55 @@ func (selfRelativeSD *SECURITY_DESCRIPTOR) ToAbsolute() (absoluteSD *SECURITY_DE group *SID ) if daclSize > 0 { - dacl = (*ACL)(unsafe.Pointer(&make([]byte, daclSize)[0])) + dacl = (*ACL)(unsafe.Pointer(unsafe.SliceData(make([]byte, daclSize)))) } if saclSize > 0 { - sacl = (*ACL)(unsafe.Pointer(&make([]byte, saclSize)[0])) + sacl = (*ACL)(unsafe.Pointer(unsafe.SliceData(make([]byte, saclSize)))) } if ownerSize > 0 { - owner = (*SID)(unsafe.Pointer(&make([]byte, ownerSize)[0])) + owner = (*SID)(unsafe.Pointer(unsafe.SliceData(make([]byte, ownerSize)))) } if groupSize > 0 { - group = (*SID)(unsafe.Pointer(&make([]byte, groupSize)[0])) + group = (*SID)(unsafe.Pointer(unsafe.SliceData(make([]byte, groupSize)))) } + // We call into Windows via makeAbsoluteSD, which sets up + // pointers within absoluteSD that point to other chunks of memory + // we pass into makeAbsoluteSD, and that happens outside the view of the GC. + // We therefore take some care here to then verify the pointers are as we expect + // and set them explicitly in view of the GC. See https://go.dev/issue/73199. + // TODO: consider weak pointers once Go 1.24 is appropriate. See suggestion in https://go.dev/cl/663575. err = makeAbsoluteSD(selfRelativeSD, absoluteSD, &absoluteSDSize, dacl, &daclSize, sacl, &saclSize, owner, &ownerSize, group, &groupSize) + if err != nil { + // Don't return absoluteSD, which might be partially initialized. + return nil, err + } + // Before using any fields, verify absoluteSD is in the format we expect according to Windows. + // See https://learn.microsoft.com/en-us/windows/win32/secauthz/absolute-and-self-relative-security-descriptors + absControl, _, err := absoluteSD.Control() + if err != nil { + panic("absoluteSD: " + err.Error()) + } + if absControl&SE_SELF_RELATIVE != 0 { + panic("absoluteSD not in absolute format") + } + if absoluteSD.dacl != dacl { + panic("dacl pointer mismatch") + } + if absoluteSD.sacl != sacl { + panic("sacl pointer mismatch") + } + if absoluteSD.owner != owner { + panic("owner pointer mismatch") + } + if absoluteSD.group != group { + panic("group pointer mismatch") + } + absoluteSD.dacl = dacl + absoluteSD.sacl = sacl + absoluteSD.owner = owner + absoluteSD.group = group + return } diff --git a/vendor/golang.org/x/sys/windows/syscall_windows.go b/vendor/golang.org/x/sys/windows/syscall_windows.go index 4a325438..640f6b15 100644 --- a/vendor/golang.org/x/sys/windows/syscall_windows.go +++ b/vendor/golang.org/x/sys/windows/syscall_windows.go @@ -870,6 +870,7 @@ const socket_error = uintptr(^uint32(0)) //sys WSARecvFrom(s Handle, bufs *WSABuf, bufcnt uint32, recvd *uint32, flags *uint32, from *RawSockaddrAny, fromlen *int32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSARecvFrom //sys WSASendTo(s Handle, bufs *WSABuf, bufcnt uint32, sent *uint32, flags uint32, to *RawSockaddrAny, tolen int32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSASendTo //sys WSASocket(af int32, typ int32, protocol int32, protoInfo *WSAProtocolInfo, group uint32, flags uint32) (handle Handle, err error) [failretval==InvalidHandle] = ws2_32.WSASocketW +//sys WSADuplicateSocket(s Handle, processID uint32, info *WSAProtocolInfo) (err error) [failretval!=0] = ws2_32.WSADuplicateSocketW //sys GetHostByName(name string) (h *Hostent, err error) [failretval==nil] = ws2_32.gethostbyname //sys GetServByName(name string, proto string) (s *Servent, err error) [failretval==nil] = ws2_32.getservbyname //sys Ntohs(netshort uint16) (u uint16) = ws2_32.ntohs @@ -1698,8 +1699,9 @@ func NewNTUnicodeString(s string) (*NTUnicodeString, error) { // Slice returns a uint16 slice that aliases the data in the NTUnicodeString. func (s *NTUnicodeString) Slice() []uint16 { - slice := unsafe.Slice(s.Buffer, s.MaximumLength) - return slice[:s.Length] + // Note: this rounds the length down, if it happens + // to (incorrectly) be odd. Probably safer than rounding up. + return unsafe.Slice(s.Buffer, s.MaximumLength/2)[:s.Length/2] } func (s *NTUnicodeString) String() string { diff --git a/vendor/golang.org/x/sys/windows/types_windows.go b/vendor/golang.org/x/sys/windows/types_windows.go index ad67df2f..958bcf47 100644 --- a/vendor/golang.org/x/sys/windows/types_windows.go +++ b/vendor/golang.org/x/sys/windows/types_windows.go @@ -2700,6 +2700,8 @@ type CommTimeouts struct { // NTUnicodeString is a UTF-16 string for NT native APIs, corresponding to UNICODE_STRING. type NTUnicodeString struct { + // Note: Length and MaximumLength are in *bytes*, not uint16s. + // They should always be even. Length uint16 MaximumLength uint16 Buffer *uint16 @@ -3628,3 +3630,213 @@ const ( KLF_NOTELLSHELL = 0x00000080 KLF_SETFORPROCESS = 0x00000100 ) + +// Virtual Key codes +// https://docs.microsoft.com/en-us/windows/win32/inputdev/virtual-key-codes +const ( + VK_LBUTTON = 0x01 + VK_RBUTTON = 0x02 + VK_CANCEL = 0x03 + VK_MBUTTON = 0x04 + VK_XBUTTON1 = 0x05 + VK_XBUTTON2 = 0x06 + VK_BACK = 0x08 + VK_TAB = 0x09 + VK_CLEAR = 0x0C + VK_RETURN = 0x0D + VK_SHIFT = 0x10 + VK_CONTROL = 0x11 + VK_MENU = 0x12 + VK_PAUSE = 0x13 + VK_CAPITAL = 0x14 + VK_KANA = 0x15 + VK_HANGEUL = 0x15 + VK_HANGUL = 0x15 + VK_IME_ON = 0x16 + VK_JUNJA = 0x17 + VK_FINAL = 0x18 + VK_HANJA = 0x19 + VK_KANJI = 0x19 + VK_IME_OFF = 0x1A + VK_ESCAPE = 0x1B + VK_CONVERT = 0x1C + VK_NONCONVERT = 0x1D + VK_ACCEPT = 0x1E + VK_MODECHANGE = 0x1F + VK_SPACE = 0x20 + VK_PRIOR = 0x21 + VK_NEXT = 0x22 + VK_END = 0x23 + VK_HOME = 0x24 + VK_LEFT = 0x25 + VK_UP = 0x26 + VK_RIGHT = 0x27 + VK_DOWN = 0x28 + VK_SELECT = 0x29 + VK_PRINT = 0x2A + VK_EXECUTE = 0x2B + VK_SNAPSHOT = 0x2C + VK_INSERT = 0x2D + VK_DELETE = 0x2E + VK_HELP = 0x2F + VK_LWIN = 0x5B + VK_RWIN = 0x5C + VK_APPS = 0x5D + VK_SLEEP = 0x5F + VK_NUMPAD0 = 0x60 + VK_NUMPAD1 = 0x61 + VK_NUMPAD2 = 0x62 + VK_NUMPAD3 = 0x63 + VK_NUMPAD4 = 0x64 + VK_NUMPAD5 = 0x65 + VK_NUMPAD6 = 0x66 + VK_NUMPAD7 = 0x67 + VK_NUMPAD8 = 0x68 + VK_NUMPAD9 = 0x69 + VK_MULTIPLY = 0x6A + VK_ADD = 0x6B + VK_SEPARATOR = 0x6C + VK_SUBTRACT = 0x6D + VK_DECIMAL = 0x6E + VK_DIVIDE = 0x6F + VK_F1 = 0x70 + VK_F2 = 0x71 + VK_F3 = 0x72 + VK_F4 = 0x73 + VK_F5 = 0x74 + VK_F6 = 0x75 + VK_F7 = 0x76 + VK_F8 = 0x77 + VK_F9 = 0x78 + VK_F10 = 0x79 + VK_F11 = 0x7A + VK_F12 = 0x7B + VK_F13 = 0x7C + VK_F14 = 0x7D + VK_F15 = 0x7E + VK_F16 = 0x7F + VK_F17 = 0x80 + VK_F18 = 0x81 + VK_F19 = 0x82 + VK_F20 = 0x83 + VK_F21 = 0x84 + VK_F22 = 0x85 + VK_F23 = 0x86 + VK_F24 = 0x87 + VK_NUMLOCK = 0x90 + VK_SCROLL = 0x91 + VK_OEM_NEC_EQUAL = 0x92 + VK_OEM_FJ_JISHO = 0x92 + VK_OEM_FJ_MASSHOU = 0x93 + VK_OEM_FJ_TOUROKU = 0x94 + VK_OEM_FJ_LOYA = 0x95 + VK_OEM_FJ_ROYA = 0x96 + VK_LSHIFT = 0xA0 + VK_RSHIFT = 0xA1 + VK_LCONTROL = 0xA2 + VK_RCONTROL = 0xA3 + VK_LMENU = 0xA4 + VK_RMENU = 0xA5 + VK_BROWSER_BACK = 0xA6 + VK_BROWSER_FORWARD = 0xA7 + VK_BROWSER_REFRESH = 0xA8 + VK_BROWSER_STOP = 0xA9 + VK_BROWSER_SEARCH = 0xAA + VK_BROWSER_FAVORITES = 0xAB + VK_BROWSER_HOME = 0xAC + VK_VOLUME_MUTE = 0xAD + VK_VOLUME_DOWN = 0xAE + VK_VOLUME_UP = 0xAF + VK_MEDIA_NEXT_TRACK = 0xB0 + VK_MEDIA_PREV_TRACK = 0xB1 + VK_MEDIA_STOP = 0xB2 + VK_MEDIA_PLAY_PAUSE = 0xB3 + VK_LAUNCH_MAIL = 0xB4 + VK_LAUNCH_MEDIA_SELECT = 0xB5 + VK_LAUNCH_APP1 = 0xB6 + VK_LAUNCH_APP2 = 0xB7 + VK_OEM_1 = 0xBA + VK_OEM_PLUS = 0xBB + VK_OEM_COMMA = 0xBC + VK_OEM_MINUS = 0xBD + VK_OEM_PERIOD = 0xBE + VK_OEM_2 = 0xBF + VK_OEM_3 = 0xC0 + VK_OEM_4 = 0xDB + VK_OEM_5 = 0xDC + VK_OEM_6 = 0xDD + VK_OEM_7 = 0xDE + VK_OEM_8 = 0xDF + VK_OEM_AX = 0xE1 + VK_OEM_102 = 0xE2 + VK_ICO_HELP = 0xE3 + VK_ICO_00 = 0xE4 + VK_PROCESSKEY = 0xE5 + VK_ICO_CLEAR = 0xE6 + VK_OEM_RESET = 0xE9 + VK_OEM_JUMP = 0xEA + VK_OEM_PA1 = 0xEB + VK_OEM_PA2 = 0xEC + VK_OEM_PA3 = 0xED + VK_OEM_WSCTRL = 0xEE + VK_OEM_CUSEL = 0xEF + VK_OEM_ATTN = 0xF0 + VK_OEM_FINISH = 0xF1 + VK_OEM_COPY = 0xF2 + VK_OEM_AUTO = 0xF3 + VK_OEM_ENLW = 0xF4 + VK_OEM_BACKTAB = 0xF5 + VK_ATTN = 0xF6 + VK_CRSEL = 0xF7 + VK_EXSEL = 0xF8 + VK_EREOF = 0xF9 + VK_PLAY = 0xFA + VK_ZOOM = 0xFB + VK_NONAME = 0xFC + VK_PA1 = 0xFD + VK_OEM_CLEAR = 0xFE +) + +// Mouse button constants. +// https://docs.microsoft.com/en-us/windows/console/mouse-event-record-str +const ( + FROM_LEFT_1ST_BUTTON_PRESSED = 0x0001 + RIGHTMOST_BUTTON_PRESSED = 0x0002 + FROM_LEFT_2ND_BUTTON_PRESSED = 0x0004 + FROM_LEFT_3RD_BUTTON_PRESSED = 0x0008 + FROM_LEFT_4TH_BUTTON_PRESSED = 0x0010 +) + +// Control key state constaints. +// https://docs.microsoft.com/en-us/windows/console/key-event-record-str +// https://docs.microsoft.com/en-us/windows/console/mouse-event-record-str +const ( + CAPSLOCK_ON = 0x0080 + ENHANCED_KEY = 0x0100 + LEFT_ALT_PRESSED = 0x0002 + LEFT_CTRL_PRESSED = 0x0008 + NUMLOCK_ON = 0x0020 + RIGHT_ALT_PRESSED = 0x0001 + RIGHT_CTRL_PRESSED = 0x0004 + SCROLLLOCK_ON = 0x0040 + SHIFT_PRESSED = 0x0010 +) + +// Mouse event record event flags. +// https://docs.microsoft.com/en-us/windows/console/mouse-event-record-str +const ( + MOUSE_MOVED = 0x0001 + DOUBLE_CLICK = 0x0002 + MOUSE_WHEELED = 0x0004 + MOUSE_HWHEELED = 0x0008 +) + +// Input Record Event Types +// https://learn.microsoft.com/en-us/windows/console/input-record-str +const ( + FOCUS_EVENT = 0x0010 + KEY_EVENT = 0x0001 + MENU_EVENT = 0x0008 + MOUSE_EVENT = 0x0002 + WINDOW_BUFFER_SIZE_EVENT = 0x0004 +) diff --git a/vendor/golang.org/x/sys/windows/zsyscall_windows.go b/vendor/golang.org/x/sys/windows/zsyscall_windows.go index 01c0716c..a58bc48b 100644 --- a/vendor/golang.org/x/sys/windows/zsyscall_windows.go +++ b/vendor/golang.org/x/sys/windows/zsyscall_windows.go @@ -511,6 +511,7 @@ var ( procFreeAddrInfoW = modws2_32.NewProc("FreeAddrInfoW") procGetAddrInfoW = modws2_32.NewProc("GetAddrInfoW") procWSACleanup = modws2_32.NewProc("WSACleanup") + procWSADuplicateSocketW = modws2_32.NewProc("WSADuplicateSocketW") procWSAEnumProtocolsW = modws2_32.NewProc("WSAEnumProtocolsW") procWSAGetOverlappedResult = modws2_32.NewProc("WSAGetOverlappedResult") procWSAIoctl = modws2_32.NewProc("WSAIoctl") @@ -4391,6 +4392,14 @@ func WSACleanup() (err error) { return } +func WSADuplicateSocket(s Handle, processID uint32, info *WSAProtocolInfo) (err error) { + r1, _, e1 := syscall.Syscall(procWSADuplicateSocketW.Addr(), 3, uintptr(s), uintptr(processID), uintptr(unsafe.Pointer(info))) + if r1 != 0 { + err = errnoErr(e1) + } + return +} + func WSAEnumProtocols(protocols *int32, protocolBuffer *WSAProtocolInfo, bufferLength *uint32) (n int32, err error) { r0, _, e1 := syscall.Syscall(procWSAEnumProtocolsW.Addr(), 3, uintptr(unsafe.Pointer(protocols)), uintptr(unsafe.Pointer(protocolBuffer)), uintptr(unsafe.Pointer(bufferLength))) n = int32(r0) diff --git a/vendor/gorm.io/gorm/.golangci.yml b/vendor/gorm.io/gorm/.golangci.yml index b88bf672..6c48152c 100644 --- a/vendor/gorm.io/gorm/.golangci.yml +++ b/vendor/gorm.io/gorm/.golangci.yml @@ -1,7 +1,9 @@ +version: "2" + linters: + default: standard enable: - cyclop - - exportloopref - gocritic - gosec - ineffassign @@ -9,12 +11,9 @@ linters: - prealloc - unconvert - unparam - - goimports - whitespace -linters-settings: - whitespace: - multi-func: true - goimports: - local-prefixes: gorm.io/gorm - +formatters: + enable: + - gofumpt + - goimports diff --git a/vendor/gorm.io/gorm/CODE_OF_CONDUCT.md b/vendor/gorm.io/gorm/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..60117926 --- /dev/null +++ b/vendor/gorm.io/gorm/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to participate in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community includes: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period. This +includes avoiding interactions in community spaces and external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any interaction or public +communication with the community for a specified period. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/vendor/gorm.io/gorm/LICENSE b/vendor/gorm.io/gorm/LICENSE index 037e1653..52964f13 100644 --- a/vendor/gorm.io/gorm/LICENSE +++ b/vendor/gorm.io/gorm/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2013-NOW Jinzhu +Copyright (c) 2013-present Jinzhu Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/gorm.io/gorm/callbacks/associations.go b/vendor/gorm.io/gorm/callbacks/associations.go index f3cd464a..67531127 100644 --- a/vendor/gorm.io/gorm/callbacks/associations.go +++ b/vendor/gorm.io/gorm/callbacks/associations.go @@ -47,7 +47,7 @@ func SaveBeforeAssociations(create bool) func(db *gorm.DB) { ) if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) @@ -126,7 +126,7 @@ func SaveAfterAssociations(create bool) func(db *gorm.DB) { ) if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) @@ -195,7 +195,7 @@ func SaveAfterAssociations(create bool) func(db *gorm.DB) { fieldType := rel.Field.IndirectFieldType.Elem() isPtr := fieldType.Kind() == reflect.Ptr if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) identityMap := map[string]bool{} @@ -268,11 +268,11 @@ func SaveAfterAssociations(create bool) func(db *gorm.DB) { fieldType := rel.Field.IndirectFieldType.Elem() isPtr := fieldType.Kind() == reflect.Ptr if !isPtr { - fieldType = reflect.PtrTo(fieldType) + fieldType = reflect.PointerTo(fieldType) } elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) distinctElems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10) - joins := reflect.MakeSlice(reflect.SliceOf(reflect.PtrTo(rel.JoinTable.ModelType)), 0, 10) + joins := reflect.MakeSlice(reflect.SliceOf(reflect.PointerTo(rel.JoinTable.ModelType)), 0, 10) objs := []reflect.Value{} appendToJoins := func(obj reflect.Value, elem reflect.Value) { diff --git a/vendor/gorm.io/gorm/clause/returning.go b/vendor/gorm.io/gorm/clause/returning.go index d94b7a4c..76064c4a 100644 --- a/vendor/gorm.io/gorm/clause/returning.go +++ b/vendor/gorm.io/gorm/clause/returning.go @@ -26,9 +26,12 @@ func (returning Returning) Build(builder Builder) { // MergeClause merge order by clauses func (returning Returning) MergeClause(clause *Clause) { - if v, ok := clause.Expression.(Returning); ok { - returning.Columns = append(v.Columns, returning.Columns...) + if v, ok := clause.Expression.(Returning); ok && len(returning.Columns) > 0 { + if v.Columns != nil { + returning.Columns = append(v.Columns, returning.Columns...) + } else { + returning.Columns = nil + } } - clause.Expression = returning } diff --git a/vendor/gorm.io/gorm/finisher_api.go b/vendor/gorm.io/gorm/finisher_api.go index f97571ed..6802945c 100644 --- a/vendor/gorm.io/gorm/finisher_api.go +++ b/vendor/gorm.io/gorm/finisher_api.go @@ -4,6 +4,7 @@ import ( "database/sql" "errors" "fmt" + "hash/maphash" "reflect" "strings" @@ -623,14 +624,15 @@ func (db *DB) Transaction(fc func(tx *DB) error, opts ...*sql.TxOptions) (err er if committer, ok := db.Statement.ConnPool.(TxCommitter); ok && committer != nil { // nested transaction if !db.DisableNestedTransaction { - err = db.SavePoint(fmt.Sprintf("sp%p", fc)).Error + spID := new(maphash.Hash).Sum64() + err = db.SavePoint(fmt.Sprintf("sp%d", spID)).Error if err != nil { return } defer func() { // Make sure to rollback when panic, Block error or Commit error if panicked || err != nil { - db.RollbackTo(fmt.Sprintf("sp%p", fc)) + db.RollbackTo(fmt.Sprintf("sp%d", spID)) } }() } diff --git a/vendor/gorm.io/gorm/gorm.go b/vendor/gorm.io/gorm/gorm.go index 117d2fd0..63a28b37 100644 --- a/vendor/gorm.io/gorm/gorm.go +++ b/vendor/gorm.io/gorm/gorm.go @@ -34,6 +34,11 @@ type Config struct { DryRun bool // PrepareStmt executes the given query in cached statement PrepareStmt bool + // PrepareStmt cache support LRU expired, + // default maxsize=int64 Max value and ttl=1h + PrepareStmtMaxSize int + PrepareStmtTTL time.Duration + // DisableAutomaticPing DisableAutomaticPing bool // DisableForeignKeyConstraintWhenMigrating @@ -183,16 +188,21 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { if config.Dialector != nil { err = config.Dialector.Initialize(db) - if err != nil { if db, _ := db.DB(); db != nil { _ = db.Close() } } + + if config.TranslateError { + if _, ok := db.Dialector.(ErrorTranslator); !ok { + config.Logger.Warn(context.Background(), "The TranslateError option is enabled, but the Dialector %s does not implement ErrorTranslator.", db.Dialector.Name()) + } + } } if config.PrepareStmt { - preparedStmt := NewPreparedStmtDB(db.ConnPool) + preparedStmt := NewPreparedStmtDB(db.ConnPool, config.PrepareStmtMaxSize, config.PrepareStmtTTL) db.cacheStore.Store(preparedStmtDBKey, preparedStmt) db.ConnPool = preparedStmt } @@ -263,7 +273,7 @@ func (db *DB) Session(config *Session) *DB { if v, ok := db.cacheStore.Load(preparedStmtDBKey); ok { preparedStmt = v.(*PreparedStmtDB) } else { - preparedStmt = NewPreparedStmtDB(db.ConnPool) + preparedStmt = NewPreparedStmtDB(db.ConnPool, db.PrepareStmtMaxSize, db.PrepareStmtTTL) db.cacheStore.Store(preparedStmtDBKey, preparedStmt) } diff --git a/vendor/gorm.io/gorm/internal/lru/lru.go b/vendor/gorm.io/gorm/internal/lru/lru.go new file mode 100644 index 00000000..4f21589a --- /dev/null +++ b/vendor/gorm.io/gorm/internal/lru/lru.go @@ -0,0 +1,493 @@ +package lru + +// golang -lru +// https://github.com/hashicorp/golang-lru +import ( + "sync" + "time" +) + +// EvictCallback is used to get a callback when a cache entry is evicted +type EvictCallback[K comparable, V any] func(key K, value V) + +// LRU implements a thread-safe LRU with expirable entries. +type LRU[K comparable, V any] struct { + size int + evictList *LruList[K, V] + items map[K]*Entry[K, V] + onEvict EvictCallback[K, V] + + // expirable options + mu sync.Mutex + ttl time.Duration + done chan struct{} + + // buckets for expiration + buckets []bucket[K, V] + // uint8 because it's number between 0 and numBuckets + nextCleanupBucket uint8 +} + +// bucket is a container for holding entries to be expired +type bucket[K comparable, V any] struct { + entries map[K]*Entry[K, V] + newestEntry time.Time +} + +// noEvictionTTL - very long ttl to prevent eviction +const noEvictionTTL = time.Hour * 24 * 365 * 10 + +// because of uint8 usage for nextCleanupBucket, should not exceed 256. +// casting it as uint8 explicitly requires type conversions in multiple places +const numBuckets = 100 + +// NewLRU returns a new thread-safe cache with expirable entries. +// +// Size parameter set to 0 makes cache of unlimited size, e.g. turns LRU mechanism off. +// +// Providing 0 TTL turns expiring off. +// +// Delete expired entries every 1/100th of ttl value. Goroutine which deletes expired entries runs indefinitely. +func NewLRU[K comparable, V any](size int, onEvict EvictCallback[K, V], ttl time.Duration) *LRU[K, V] { + if size < 0 { + size = 0 + } + if ttl <= 0 { + ttl = noEvictionTTL + } + + res := LRU[K, V]{ + ttl: ttl, + size: size, + evictList: NewList[K, V](), + items: make(map[K]*Entry[K, V]), + onEvict: onEvict, + done: make(chan struct{}), + } + + // initialize the buckets + res.buckets = make([]bucket[K, V], numBuckets) + for i := 0; i < numBuckets; i++ { + res.buckets[i] = bucket[K, V]{entries: make(map[K]*Entry[K, V])} + } + + // enable deleteExpired() running in separate goroutine for cache with non-zero TTL + // + // Important: done channel is never closed, so deleteExpired() goroutine will never exit, + // it's decided to add functionality to close it in the version later than v2. + if res.ttl != noEvictionTTL { + go func(done <-chan struct{}) { + ticker := time.NewTicker(res.ttl / numBuckets) + defer ticker.Stop() + for { + select { + case <-done: + return + case <-ticker.C: + res.deleteExpired() + } + } + }(res.done) + } + return &res +} + +// Purge clears the cache completely. +// onEvict is called for each evicted key. +func (c *LRU[K, V]) Purge() { + c.mu.Lock() + defer c.mu.Unlock() + for k, v := range c.items { + if c.onEvict != nil { + c.onEvict(k, v.Value) + } + delete(c.items, k) + } + for _, b := range c.buckets { + for _, ent := range b.entries { + delete(b.entries, ent.Key) + } + } + c.evictList.Init() +} + +// Add adds a value to the cache. Returns true if an eviction occurred. +// Returns false if there was no eviction: the item was already in the cache, +// or the size was not exceeded. +func (c *LRU[K, V]) Add(key K, value V) (evicted bool) { + c.mu.Lock() + defer c.mu.Unlock() + now := time.Now() + + // Check for existing item + if ent, ok := c.items[key]; ok { + c.evictList.MoveToFront(ent) + c.removeFromBucket(ent) // remove the entry from its current bucket as expiresAt is renewed + ent.Value = value + ent.ExpiresAt = now.Add(c.ttl) + c.addToBucket(ent) + return false + } + + // Add new item + ent := c.evictList.PushFrontExpirable(key, value, now.Add(c.ttl)) + c.items[key] = ent + c.addToBucket(ent) // adds the entry to the appropriate bucket and sets entry.expireBucket + + evict := c.size > 0 && c.evictList.Length() > c.size + // Verify size not exceeded + if evict { + c.removeOldest() + } + return evict +} + +// Get looks up a key's value from the cache. +func (c *LRU[K, V]) Get(key K) (value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + var ent *Entry[K, V] + if ent, ok = c.items[key]; ok { + // Expired item check + if time.Now().After(ent.ExpiresAt) { + return value, false + } + c.evictList.MoveToFront(ent) + return ent.Value, true + } + return +} + +// Contains checks if a key is in the cache, without updating the recent-ness +// or deleting it for being stale. +func (c *LRU[K, V]) Contains(key K) (ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + _, ok = c.items[key] + return ok +} + +// Peek returns the key value (or undefined if not found) without updating +// the "recently used"-ness of the key. +func (c *LRU[K, V]) Peek(key K) (value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + var ent *Entry[K, V] + if ent, ok = c.items[key]; ok { + // Expired item check + if time.Now().After(ent.ExpiresAt) { + return value, false + } + return ent.Value, true + } + return +} + +// Remove removes the provided key from the cache, returning if the +// key was contained. +func (c *LRU[K, V]) Remove(key K) bool { + c.mu.Lock() + defer c.mu.Unlock() + if ent, ok := c.items[key]; ok { + c.removeElement(ent) + return true + } + return false +} + +// RemoveOldest removes the oldest item from the cache. +func (c *LRU[K, V]) RemoveOldest() (key K, value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + if ent := c.evictList.Back(); ent != nil { + c.removeElement(ent) + return ent.Key, ent.Value, true + } + return +} + +// GetOldest returns the oldest entry +func (c *LRU[K, V]) GetOldest() (key K, value V, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + if ent := c.evictList.Back(); ent != nil { + return ent.Key, ent.Value, true + } + return +} + +func (c *LRU[K, V]) KeyValues() map[K]V { + c.mu.Lock() + defer c.mu.Unlock() + maps := make(map[K]V) + now := time.Now() + for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() { + if now.After(ent.ExpiresAt) { + continue + } + maps[ent.Key] = ent.Value + // keys = append(keys, ent.Key) + } + return maps +} + +// Keys returns a slice of the keys in the cache, from oldest to newest. +// Expired entries are filtered out. +func (c *LRU[K, V]) Keys() []K { + c.mu.Lock() + defer c.mu.Unlock() + keys := make([]K, 0, len(c.items)) + now := time.Now() + for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() { + if now.After(ent.ExpiresAt) { + continue + } + keys = append(keys, ent.Key) + } + return keys +} + +// Values returns a slice of the values in the cache, from oldest to newest. +// Expired entries are filtered out. +func (c *LRU[K, V]) Values() []V { + c.mu.Lock() + defer c.mu.Unlock() + values := make([]V, 0, len(c.items)) + now := time.Now() + for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() { + if now.After(ent.ExpiresAt) { + continue + } + values = append(values, ent.Value) + } + return values +} + +// Len returns the number of items in the cache. +func (c *LRU[K, V]) Len() int { + c.mu.Lock() + defer c.mu.Unlock() + return c.evictList.Length() +} + +// Resize changes the cache size. Size of 0 means unlimited. +func (c *LRU[K, V]) Resize(size int) (evicted int) { + c.mu.Lock() + defer c.mu.Unlock() + if size <= 0 { + c.size = 0 + return 0 + } + diff := c.evictList.Length() - size + if diff < 0 { + diff = 0 + } + for i := 0; i < diff; i++ { + c.removeOldest() + } + c.size = size + return diff +} + +// Close destroys cleanup goroutine. To clean up the cache, run Purge() before Close(). +// func (c *LRU[K, V]) Close() { +// c.mu.Lock() +// defer c.mu.Unlock() +// select { +// case <-c.done: +// return +// default: +// } +// close(c.done) +// } + +// removeOldest removes the oldest item from the cache. Has to be called with lock! +func (c *LRU[K, V]) removeOldest() { + if ent := c.evictList.Back(); ent != nil { + c.removeElement(ent) + } +} + +// removeElement is used to remove a given list element from the cache. Has to be called with lock! +func (c *LRU[K, V]) removeElement(e *Entry[K, V]) { + c.evictList.Remove(e) + delete(c.items, e.Key) + c.removeFromBucket(e) + if c.onEvict != nil { + c.onEvict(e.Key, e.Value) + } +} + +// deleteExpired deletes expired records from the oldest bucket, waiting for the newest entry +// in it to expire first. +func (c *LRU[K, V]) deleteExpired() { + c.mu.Lock() + bucketIdx := c.nextCleanupBucket + timeToExpire := time.Until(c.buckets[bucketIdx].newestEntry) + // wait for newest entry to expire before cleanup without holding lock + if timeToExpire > 0 { + c.mu.Unlock() + time.Sleep(timeToExpire) + c.mu.Lock() + } + for _, ent := range c.buckets[bucketIdx].entries { + c.removeElement(ent) + } + c.nextCleanupBucket = (c.nextCleanupBucket + 1) % numBuckets + c.mu.Unlock() +} + +// addToBucket adds entry to expire bucket so that it will be cleaned up when the time comes. Has to be called with lock! +func (c *LRU[K, V]) addToBucket(e *Entry[K, V]) { + bucketID := (numBuckets + c.nextCleanupBucket - 1) % numBuckets + e.ExpireBucket = bucketID + c.buckets[bucketID].entries[e.Key] = e + if c.buckets[bucketID].newestEntry.Before(e.ExpiresAt) { + c.buckets[bucketID].newestEntry = e.ExpiresAt + } +} + +// removeFromBucket removes the entry from its corresponding bucket. Has to be called with lock! +func (c *LRU[K, V]) removeFromBucket(e *Entry[K, V]) { + delete(c.buckets[e.ExpireBucket].entries, e.Key) +} + +// Cap returns the capacity of the cache +func (c *LRU[K, V]) Cap() int { + return c.size +} + +// Entry is an LRU Entry +type Entry[K comparable, V any] struct { + // Next and previous pointers in the doubly-linked list of elements. + // To simplify the implementation, internally a list l is implemented + // as a ring, such that &l.root is both the next element of the last + // list element (l.Back()) and the previous element of the first list + // element (l.Front()). + next, prev *Entry[K, V] + + // The list to which this element belongs. + list *LruList[K, V] + + // The LRU Key of this element. + Key K + + // The Value stored with this element. + Value V + + // The time this element would be cleaned up, optional + ExpiresAt time.Time + + // The expiry bucket item was put in, optional + ExpireBucket uint8 +} + +// PrevEntry returns the previous list element or nil. +func (e *Entry[K, V]) PrevEntry() *Entry[K, V] { + if p := e.prev; e.list != nil && p != &e.list.root { + return p + } + return nil +} + +// LruList represents a doubly linked list. +// The zero Value for LruList is an empty list ready to use. +type LruList[K comparable, V any] struct { + root Entry[K, V] // sentinel list element, only &root, root.prev, and root.next are used + len int // current list Length excluding (this) sentinel element +} + +// Init initializes or clears list l. +func (l *LruList[K, V]) Init() *LruList[K, V] { + l.root.next = &l.root + l.root.prev = &l.root + l.len = 0 + return l +} + +// NewList returns an initialized list. +func NewList[K comparable, V any]() *LruList[K, V] { return new(LruList[K, V]).Init() } + +// Length returns the number of elements of list l. +// The complexity is O(1). +func (l *LruList[K, V]) Length() int { return l.len } + +// Back returns the last element of list l or nil if the list is empty. +func (l *LruList[K, V]) Back() *Entry[K, V] { + if l.len == 0 { + return nil + } + return l.root.prev +} + +// lazyInit lazily initializes a zero List Value. +func (l *LruList[K, V]) lazyInit() { + if l.root.next == nil { + l.Init() + } +} + +// insert inserts e after at, increments l.len, and returns e. +func (l *LruList[K, V]) insert(e, at *Entry[K, V]) *Entry[K, V] { + e.prev = at + e.next = at.next + e.prev.next = e + e.next.prev = e + e.list = l + l.len++ + return e +} + +// insertValue is a convenience wrapper for insert(&Entry{Value: v, ExpiresAt: ExpiresAt}, at). +func (l *LruList[K, V]) insertValue(k K, v V, expiresAt time.Time, at *Entry[K, V]) *Entry[K, V] { + return l.insert(&Entry[K, V]{Value: v, Key: k, ExpiresAt: expiresAt}, at) +} + +// Remove removes e from its list, decrements l.len +func (l *LruList[K, V]) Remove(e *Entry[K, V]) V { + e.prev.next = e.next + e.next.prev = e.prev + e.next = nil // avoid memory leaks + e.prev = nil // avoid memory leaks + e.list = nil + l.len-- + + return e.Value +} + +// move moves e to next to at. +func (l *LruList[K, V]) move(e, at *Entry[K, V]) { + if e == at { + return + } + e.prev.next = e.next + e.next.prev = e.prev + + e.prev = at + e.next = at.next + e.prev.next = e + e.next.prev = e +} + +// PushFront inserts a new element e with value v at the front of list l and returns e. +func (l *LruList[K, V]) PushFront(k K, v V) *Entry[K, V] { + l.lazyInit() + return l.insertValue(k, v, time.Time{}, &l.root) +} + +// PushFrontExpirable inserts a new expirable element e with Value v at the front of list l and returns e. +func (l *LruList[K, V]) PushFrontExpirable(k K, v V, expiresAt time.Time) *Entry[K, V] { + l.lazyInit() + return l.insertValue(k, v, expiresAt, &l.root) +} + +// MoveToFront moves element e to the front of list l. +// If e is not an element of l, the list is not modified. +// The element must not be nil. +func (l *LruList[K, V]) MoveToFront(e *Entry[K, V]) { + if e.list != l || l.root.next == e { + return + } + // see comment in List.Remove about initialization of l + l.move(e, &l.root) +} diff --git a/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go b/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go new file mode 100644 index 00000000..a82b2cf5 --- /dev/null +++ b/vendor/gorm.io/gorm/internal/stmt_store/stmt_store.go @@ -0,0 +1,183 @@ +package stmt_store + +import ( + "context" + "database/sql" + "math" + "sync" + "time" + + "gorm.io/gorm/internal/lru" +) + +type Stmt struct { + *sql.Stmt + Transaction bool + prepared chan struct{} + prepareErr error +} + +func (stmt *Stmt) Error() error { + return stmt.prepareErr +} + +func (stmt *Stmt) Close() error { + <-stmt.prepared + + if stmt.Stmt != nil { + return stmt.Stmt.Close() + } + return nil +} + +// Store defines an interface for managing the caching operations of SQL statements (Stmt). +// This interface provides methods for creating new statements, retrieving all cache keys, +// getting cached statements, setting cached statements, and deleting cached statements. +type Store interface { + // New creates a new Stmt object and caches it. + // Parameters: + // ctx: The context for the request, which can carry deadlines, cancellation signals, etc. + // key: The key representing the SQL query, used for caching and preparing the statement. + // isTransaction: Indicates whether this operation is part of a transaction, which may affect the caching strategy. + // connPool: A connection pool that provides database connections. + // locker: A synchronization lock that is unlocked after initialization to avoid deadlocks. + // Returns: + // *Stmt: A newly created statement object for executing SQL operations. + // error: An error if the statement preparation fails. + New(ctx context.Context, key string, isTransaction bool, connPool ConnPool, locker sync.Locker) (*Stmt, error) + + // Keys returns a slice of all cache keys in the store. + Keys() []string + + // Get retrieves a Stmt object from the store based on the given key. + // Parameters: + // key: The key used to look up the Stmt object. + // Returns: + // *Stmt: The found Stmt object, or nil if not found. + // bool: Indicates whether the corresponding Stmt object was successfully found. + Get(key string) (*Stmt, bool) + + // Set stores the given Stmt object in the store and associates it with the specified key. + // Parameters: + // key: The key used to associate the Stmt object. + // value: The Stmt object to be stored. + Set(key string, value *Stmt) + + // Delete removes the Stmt object corresponding to the specified key from the store. + // Parameters: + // key: The key associated with the Stmt object to be deleted. + Delete(key string) +} + +// defaultMaxSize defines the default maximum capacity of the cache. +// Its value is the maximum value of the int64 type, which means that when the cache size is not specified, +// the cache can theoretically store as many elements as possible. +// (1 << 63) - 1 is the maximum value that an int64 type can represent. +const ( + defaultMaxSize = math.MaxInt + // defaultTTL defines the default time-to-live (TTL) for each cache entry. + // When the TTL for cache entries is not specified, each cache entry will expire after 24 hours. + defaultTTL = time.Hour * 24 +) + +// New creates and returns a new Store instance. +// +// Parameters: +// - size: The maximum capacity of the cache. If the provided size is less than or equal to 0, +// it defaults to defaultMaxSize. +// - ttl: The time-to-live duration for each cache entry. If the provided ttl is less than or equal to 0, +// it defaults to defaultTTL. +// +// This function defines an onEvicted callback that is invoked when a cache entry is evicted. +// The callback ensures that if the evicted value (v) is not nil, its Close method is called asynchronously +// to release associated resources. +// +// Returns: +// - A Store instance implemented by lruStore, which internally uses an LRU cache with the specified size, +// eviction callback, and TTL. +func New(size int, ttl time.Duration) Store { + if size <= 0 { + size = defaultMaxSize + } + + if ttl <= 0 { + ttl = defaultTTL + } + + onEvicted := func(k string, v *Stmt) { + if v != nil { + go v.Close() + } + } + return &lruStore{lru: lru.NewLRU[string, *Stmt](size, onEvicted, ttl)} +} + +type lruStore struct { + lru *lru.LRU[string, *Stmt] +} + +func (s *lruStore) Keys() []string { + return s.lru.Keys() +} + +func (s *lruStore) Get(key string) (*Stmt, bool) { + stmt, ok := s.lru.Get(key) + if ok && stmt != nil { + <-stmt.prepared + } + return stmt, ok +} + +func (s *lruStore) Set(key string, value *Stmt) { + s.lru.Add(key, value) +} + +func (s *lruStore) Delete(key string) { + s.lru.Remove(key) +} + +type ConnPool interface { + PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) +} + +// New creates a new Stmt object for executing SQL queries. +// It caches the Stmt object for future use and handles preparation and error states. +// Parameters: +// +// ctx: Context for the request, used to carry deadlines, cancellation signals, etc. +// key: The key representing the SQL query, used for caching and preparing the statement. +// isTransaction: Indicates whether this operation is part of a transaction, affecting cache strategy. +// conn: A connection pool that provides database connections. +// locker: A synchronization lock that is unlocked after initialization to avoid deadlocks. +// +// Returns: +// +// *Stmt: A newly created statement object for executing SQL operations. +// error: An error if the statement preparation fails. +func (s *lruStore) New(ctx context.Context, key string, isTransaction bool, conn ConnPool, locker sync.Locker) (_ *Stmt, err error) { + // Create a Stmt object and set its Transaction property. + // The prepared channel is used to synchronize the statement preparation state. + cacheStmt := &Stmt{ + Transaction: isTransaction, + prepared: make(chan struct{}), + } + // Cache the Stmt object with the associated key. + s.Set(key, cacheStmt) + // Unlock after completing initialization to prevent deadlocks. + locker.Unlock() + + // Ensure the prepared channel is closed after the function execution completes. + defer close(cacheStmt.prepared) + + // Prepare the SQL statement using the provided connection. + cacheStmt.Stmt, err = conn.PrepareContext(ctx, key) + if err != nil { + // If statement preparation fails, record the error and remove the invalid Stmt object from the cache. + cacheStmt.prepareErr = err + s.Delete(key) + return &Stmt{}, err + } + + // Return the successfully prepared Stmt object. + return cacheStmt, nil +} diff --git a/vendor/gorm.io/gorm/logger/logger.go b/vendor/gorm.io/gorm/logger/logger.go index 253f0325..8088cde2 100644 --- a/vendor/gorm.io/gorm/logger/logger.go +++ b/vendor/gorm.io/gorm/logger/logger.go @@ -80,6 +80,11 @@ var ( }) // Recorder logger records running SQL into a recorder instance Recorder = traceRecorder{Interface: Default, BeginAt: time.Now()} + + // RecorderParamsFilter defaults to no-op, allows to be run-over by a different implementation + RecorderParamsFilter = func(ctx context.Context, sql string, params ...interface{}) (string, []interface{}) { + return sql, params + } ) // New initialize logger @@ -211,3 +216,10 @@ func (l *traceRecorder) Trace(ctx context.Context, begin time.Time, fc func() (s l.SQL, l.RowsAffected = fc() l.Err = err } + +func (l *traceRecorder) ParamsFilter(ctx context.Context, sql string, params ...interface{}) (string, []interface{}) { + if RecorderParamsFilter == nil { + return sql, params + } + return RecorderParamsFilter(ctx, sql, params...) +} diff --git a/vendor/gorm.io/gorm/migrator/migrator.go b/vendor/gorm.io/gorm/migrator/migrator.go index 189a141f..cec4e30f 100644 --- a/vendor/gorm.io/gorm/migrator/migrator.go +++ b/vendor/gorm.io/gorm/migrator/migrator.go @@ -524,8 +524,8 @@ func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnTy // check nullable if nullable, ok := columnType.Nullable(); ok && nullable == field.NotNull { - // not primary key & database is nullable - if !field.PrimaryKey && nullable { + // not primary key & current database is non-nullable(to be nullable) + if !field.PrimaryKey && !nullable { alterColumn = true } } diff --git a/vendor/gorm.io/gorm/prepare_stmt.go b/vendor/gorm.io/gorm/prepare_stmt.go index 094bb477..799df5bc 100644 --- a/vendor/gorm.io/gorm/prepare_stmt.go +++ b/vendor/gorm.io/gorm/prepare_stmt.go @@ -7,29 +7,35 @@ import ( "errors" "reflect" "sync" + "time" + + "gorm.io/gorm/internal/stmt_store" ) -type Stmt struct { - *sql.Stmt - Transaction bool - prepared chan struct{} - prepareErr error -} - type PreparedStmtDB struct { - Stmts map[string]*Stmt + Stmts stmt_store.Store Mux *sync.RWMutex ConnPool } -func NewPreparedStmtDB(connPool ConnPool) *PreparedStmtDB { +// NewPreparedStmtDB creates and initializes a new instance of PreparedStmtDB. +// +// Parameters: +// - connPool: A connection pool that implements the ConnPool interface, used for managing database connections. +// - maxSize: The maximum number of prepared statements that can be stored in the statement store. +// - ttl: The time-to-live duration for each prepared statement in the store. Statements older than this duration will be automatically removed. +// +// Returns: +// - A pointer to a PreparedStmtDB instance, which manages prepared statements using the provided connection pool and configuration. +func NewPreparedStmtDB(connPool ConnPool, maxSize int, ttl time.Duration) *PreparedStmtDB { return &PreparedStmtDB{ - ConnPool: connPool, - Stmts: make(map[string]*Stmt), - Mux: &sync.RWMutex{}, + ConnPool: connPool, // Assigns the provided connection pool to manage database connections. + Stmts: stmt_store.New(maxSize, ttl), // Initializes a new statement store with the specified maximum size and TTL. + Mux: &sync.RWMutex{}, // Sets up a read-write mutex for synchronizing access to the statement store. } } +// GetDBConn returns the underlying *sql.DB connection func (db *PreparedStmtDB) GetDBConn() (*sql.DB, error) { if sqldb, ok := db.ConnPool.(*sql.DB); ok { return sqldb, nil @@ -42,98 +48,41 @@ func (db *PreparedStmtDB) GetDBConn() (*sql.DB, error) { return nil, ErrInvalidDB } +// Close closes all prepared statements in the store func (db *PreparedStmtDB) Close() { db.Mux.Lock() defer db.Mux.Unlock() - for _, stmt := range db.Stmts { - go func(s *Stmt) { - // make sure the stmt must finish preparation first - <-s.prepared - if s.Stmt != nil { - _ = s.Close() - } - }(stmt) + for _, key := range db.Stmts.Keys() { + db.Stmts.Delete(key) } - // setting db.Stmts to nil to avoid further using - db.Stmts = nil } -func (sdb *PreparedStmtDB) Reset() { - sdb.Mux.Lock() - defer sdb.Mux.Unlock() - - for _, stmt := range sdb.Stmts { - go func(s *Stmt) { - // make sure the stmt must finish preparation first - <-s.prepared - if s.Stmt != nil { - _ = s.Close() - } - }(stmt) - } - sdb.Stmts = make(map[string]*Stmt) +// Reset Deprecated use Close instead +func (db *PreparedStmtDB) Reset() { + db.Close() } -func (db *PreparedStmtDB) prepare(ctx context.Context, conn ConnPool, isTransaction bool, query string) (Stmt, error) { +func (db *PreparedStmtDB) prepare(ctx context.Context, conn ConnPool, isTransaction bool, query string) (_ *stmt_store.Stmt, err error) { db.Mux.RLock() - if stmt, ok := db.Stmts[query]; ok && (!stmt.Transaction || isTransaction) { - db.Mux.RUnlock() - // wait for other goroutines prepared - <-stmt.prepared - if stmt.prepareErr != nil { - return Stmt{}, stmt.prepareErr + if db.Stmts != nil { + if stmt, ok := db.Stmts.Get(query); ok && (!stmt.Transaction || isTransaction) { + db.Mux.RUnlock() + return stmt, stmt.Error() } - - return *stmt, nil } db.Mux.RUnlock() + // retry db.Mux.Lock() - // double check - if stmt, ok := db.Stmts[query]; ok && (!stmt.Transaction || isTransaction) { - db.Mux.Unlock() - // wait for other goroutines prepared - <-stmt.prepared - if stmt.prepareErr != nil { - return Stmt{}, stmt.prepareErr + if db.Stmts != nil { + if stmt, ok := db.Stmts.Get(query); ok && (!stmt.Transaction || isTransaction) { + db.Mux.Unlock() + return stmt, stmt.Error() } - - return *stmt, nil - } - // check db.Stmts first to avoid Segmentation Fault(setting value to nil map) - // which cause by calling Close and executing SQL concurrently - if db.Stmts == nil { - db.Mux.Unlock() - return Stmt{}, ErrInvalidDB - } - // cache preparing stmt first - cacheStmt := Stmt{Transaction: isTransaction, prepared: make(chan struct{})} - db.Stmts[query] = &cacheStmt - db.Mux.Unlock() - - // prepare completed - defer close(cacheStmt.prepared) - - // Reason why cannot lock conn.PrepareContext - // suppose the maxopen is 1, g1 is creating record and g2 is querying record. - // 1. g1 begin tx, g1 is requeue because of waiting for the system call, now `db.ConnPool` db.numOpen == 1. - // 2. g2 select lock `conn.PrepareContext(ctx, query)`, now db.numOpen == db.maxOpen , wait for release. - // 3. g1 tx exec insert, wait for unlock `conn.PrepareContext(ctx, query)` to finish tx and release. - stmt, err := conn.PrepareContext(ctx, query) - if err != nil { - cacheStmt.prepareErr = err - db.Mux.Lock() - delete(db.Stmts, query) - db.Mux.Unlock() - return Stmt{}, err } - db.Mux.Lock() - cacheStmt.Stmt = stmt - db.Mux.Unlock() - - return cacheStmt, nil + return db.Stmts.New(ctx, query, isTransaction, conn, db.Mux) } func (db *PreparedStmtDB) BeginTx(ctx context.Context, opt *sql.TxOptions) (ConnPool, error) { @@ -162,10 +111,7 @@ func (db *PreparedStmtDB) ExecContext(ctx context.Context, query string, args .. if err == nil { result, err = stmt.ExecContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - db.Mux.Lock() - defer db.Mux.Unlock() - go stmt.Close() - delete(db.Stmts, query) + db.Stmts.Delete(query) } } return result, err @@ -176,11 +122,7 @@ func (db *PreparedStmtDB) QueryContext(ctx context.Context, query string, args . if err == nil { rows, err = stmt.QueryContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - db.Mux.Lock() - defer db.Mux.Unlock() - - go stmt.Close() - delete(db.Stmts, query) + db.Stmts.Delete(query) } } return rows, err @@ -230,11 +172,7 @@ func (tx *PreparedStmtTX) ExecContext(ctx context.Context, query string, args .. if err == nil { result, err = tx.Tx.StmtContext(ctx, stmt.Stmt).ExecContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - tx.PreparedStmtDB.Mux.Lock() - defer tx.PreparedStmtDB.Mux.Unlock() - - go stmt.Close() - delete(tx.PreparedStmtDB.Stmts, query) + tx.PreparedStmtDB.Stmts.Delete(query) } } return result, err @@ -245,11 +183,7 @@ func (tx *PreparedStmtTX) QueryContext(ctx context.Context, query string, args . if err == nil { rows, err = tx.Tx.StmtContext(ctx, stmt.Stmt).QueryContext(ctx, args...) if errors.Is(err, driver.ErrBadConn) { - tx.PreparedStmtDB.Mux.Lock() - defer tx.PreparedStmtDB.Mux.Unlock() - - go stmt.Close() - delete(tx.PreparedStmtDB.Stmts, query) + tx.PreparedStmtDB.Stmts.Delete(query) } } return rows, err diff --git a/vendor/gorm.io/gorm/scan.go b/vendor/gorm.io/gorm/scan.go index d852c2c9..6dc55f62 100644 --- a/vendor/gorm.io/gorm/scan.go +++ b/vendor/gorm.io/gorm/scan.go @@ -15,7 +15,7 @@ func prepareValues(values []interface{}, db *DB, columnTypes []*sql.ColumnType, if db.Statement.Schema != nil { for idx, name := range columns { if field := db.Statement.Schema.LookUpField(name); field != nil { - values[idx] = reflect.New(reflect.PtrTo(field.FieldType)).Interface() + values[idx] = reflect.New(reflect.PointerTo(field.FieldType)).Interface() continue } values[idx] = new(interface{}) @@ -23,7 +23,7 @@ func prepareValues(values []interface{}, db *DB, columnTypes []*sql.ColumnType, } else if len(columnTypes) > 0 { for idx, columnType := range columnTypes { if columnType.ScanType() != nil { - values[idx] = reflect.New(reflect.PtrTo(columnType.ScanType())).Interface() + values[idx] = reflect.New(reflect.PointerTo(columnType.ScanType())).Interface() } else { values[idx] = new(interface{}) } diff --git a/vendor/gorm.io/gorm/schema/field.go b/vendor/gorm.io/gorm/schema/field.go index a16c98ab..d1a633ce 100644 --- a/vendor/gorm.io/gorm/schema/field.go +++ b/vendor/gorm.io/gorm/schema/field.go @@ -996,6 +996,6 @@ func (field *Field) setupNewValuePool() { } if field.NewValuePool == nil { - field.NewValuePool = poolInitializer(reflect.PtrTo(field.IndirectFieldType)) + field.NewValuePool = poolInitializer(reflect.PointerTo(field.IndirectFieldType)) } } diff --git a/vendor/gorm.io/gorm/schema/index.go b/vendor/gorm.io/gorm/schema/index.go index f4f36751..a1cdc639 100644 --- a/vendor/gorm.io/gorm/schema/index.go +++ b/vendor/gorm.io/gorm/schema/index.go @@ -23,12 +23,13 @@ type IndexOption struct { Sort string // DESC, ASC Collate string Length int - priority int + Priority int } // ParseIndexes parse schema indexes -func (schema *Schema) ParseIndexes() map[string]Index { - indexes := map[string]Index{} +func (schema *Schema) ParseIndexes() []*Index { + indexesByName := map[string]*Index{} + indexes := []*Index{} for _, field := range schema.Fields { if field.TagSettings["INDEX"] != "" || field.TagSettings["UNIQUEINDEX"] != "" { @@ -38,7 +39,12 @@ func (schema *Schema) ParseIndexes() map[string]Index { break } for _, index := range fieldIndexes { - idx := indexes[index.Name] + idx := indexesByName[index.Name] + if idx == nil { + idx = &Index{Name: index.Name} + indexesByName[index.Name] = idx + indexes = append(indexes, idx) + } idx.Name = index.Name if idx.Class == "" { idx.Class = index.Class @@ -58,10 +64,8 @@ func (schema *Schema) ParseIndexes() map[string]Index { idx.Fields = append(idx.Fields, index.Fields...) sort.Slice(idx.Fields, func(i, j int) bool { - return idx.Fields[i].priority < idx.Fields[j].priority + return idx.Fields[i].Priority < idx.Fields[j].Priority }) - - indexes[index.Name] = idx } } } @@ -78,12 +82,12 @@ func (schema *Schema) LookIndex(name string) *Index { indexes := schema.ParseIndexes() for _, index := range indexes { if index.Name == name { - return &index + return index } for _, field := range index.Fields { if field.Name == name { - return &index + return index } } } @@ -111,17 +115,14 @@ func parseFieldIndexes(field *Field) (indexes []Index, err error) { idx = len(tag) } - if idx != -1 { - name = tag[0:idx] - } - + name = tag[0:idx] if name == "" { subName := field.Name const key = "COMPOSITE" if composite, found := settings[key]; found { if len(composite) == 0 || composite == key { err = fmt.Errorf( - "The composite tag of %s.%s cannot be empty", + "the composite tag of %s.%s cannot be empty", field.Schema.Name, field.Name) return @@ -154,7 +155,7 @@ func parseFieldIndexes(field *Field) (indexes []Index, err error) { Sort: settings["SORT"], Collate: settings["COLLATE"], Length: length, - priority: priority, + Priority: priority, }}, }) } diff --git a/vendor/gorm.io/gorm/schema/relationship.go b/vendor/gorm.io/gorm/schema/relationship.go index 32676b39..def4a595 100644 --- a/vendor/gorm.io/gorm/schema/relationship.go +++ b/vendor/gorm.io/gorm/schema/relationship.go @@ -5,6 +5,7 @@ import ( "fmt" "reflect" "strings" + "sync" "github.com/jinzhu/inflection" "golang.org/x/text/cases" @@ -32,6 +33,8 @@ type Relationships struct { Relations map[string]*Relationship EmbeddedRelations map[string]*Relationships + + Mux sync.RWMutex } type Relationship struct { @@ -98,9 +101,10 @@ func (schema *Schema) parseRelation(field *Field) *Relationship { } if relation.Type == has { - // don't add relations to embedded schema, which might be shared if relation.FieldSchema != relation.Schema && relation.Polymorphic == nil && field.OwnerSchema == nil { + relation.FieldSchema.Relationships.Mux.Lock() relation.FieldSchema.Relationships.Relations["_"+relation.Schema.Name+"_"+relation.Name] = relation + relation.FieldSchema.Relationships.Mux.Unlock() } switch field.IndirectFieldType.Kind() { diff --git a/vendor/gorm.io/gorm/schema/utils.go b/vendor/gorm.io/gorm/schema/utils.go index 7fdda185..fa1c65d4 100644 --- a/vendor/gorm.io/gorm/schema/utils.go +++ b/vendor/gorm.io/gorm/schema/utils.go @@ -71,7 +71,7 @@ func appendSettingFromTag(tag reflect.StructTag, value string) reflect.StructTag // GetRelationsValues get relations's values from a reflect value func GetRelationsValues(ctx context.Context, reflectValue reflect.Value, rels []*Relationship) (reflectResults reflect.Value) { for _, rel := range rels { - reflectResults = reflect.MakeSlice(reflect.SliceOf(reflect.PtrTo(rel.FieldSchema.ModelType)), 0, 1) + reflectResults = reflect.MakeSlice(reflect.SliceOf(reflect.PointerTo(rel.FieldSchema.ModelType)), 0, 1) appendToResults := func(value reflect.Value) { if _, isZero := rel.Field.ValueOf(ctx, value); !isZero { diff --git a/vendor/modules.txt b/vendor/modules.txt index a9d04a3c..ec6c70d9 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -211,12 +211,12 @@ github.com/prometheus/client_golang/prometheus/promhttp/internal # github.com/prometheus/client_model v0.6.2 ## explicit; go 1.22.0 github.com/prometheus/client_model/go -# github.com/prometheus/common v0.63.0 -## explicit; go 1.21 +# github.com/prometheus/common v0.64.0 +## explicit; go 1.23.0 github.com/prometheus/common/expfmt github.com/prometheus/common/model -# github.com/prometheus/procfs v0.16.0 -## explicit; go 1.21 +# github.com/prometheus/procfs v0.16.1 +## explicit; go 1.23.0 github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util @@ -255,14 +255,13 @@ go.mongodb.org/mongo-driver/x/bsonx/bsoncore ## explicit; go 1.22.0 go.opentelemetry.io/auto/sdk go.opentelemetry.io/auto/sdk/internal/telemetry -# go.opentelemetry.io/otel v1.35.0 -## explicit; go 1.22.0 +# go.opentelemetry.io/otel v1.36.0 +## explicit; go 1.23.0 go.opentelemetry.io/otel go.opentelemetry.io/otel/attribute +go.opentelemetry.io/otel/attribute/internal go.opentelemetry.io/otel/baggage go.opentelemetry.io/otel/codes -go.opentelemetry.io/otel/internal -go.opentelemetry.io/otel/internal/attribute go.opentelemetry.io/otel/internal/baggage go.opentelemetry.io/otel/internal/global go.opentelemetry.io/otel/propagation @@ -270,17 +269,17 @@ go.opentelemetry.io/otel/semconv/internal/v2 go.opentelemetry.io/otel/semconv/v1.17.0 go.opentelemetry.io/otel/semconv/v1.17.0/httpconv go.opentelemetry.io/otel/semconv/v1.26.0 -# go.opentelemetry.io/otel/metric v1.35.0 -## explicit; go 1.22.0 +# go.opentelemetry.io/otel/metric v1.36.0 +## explicit; go 1.23.0 go.opentelemetry.io/otel/metric go.opentelemetry.io/otel/metric/embedded -# go.opentelemetry.io/otel/trace v1.35.0 -## explicit; go 1.22.0 +# go.opentelemetry.io/otel/trace v1.36.0 +## explicit; go 1.23.0 go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.37.0 +# golang.org/x/crypto v0.38.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -289,23 +288,23 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/net v0.39.0 +# golang.org/x/net v0.40.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks golang.org/x/net/proxy -# golang.org/x/oauth2 v0.29.0 +# golang.org/x/oauth2 v0.30.0 ## explicit; go 1.23.0 golang.org/x/oauth2 golang.org/x/oauth2/internal -# golang.org/x/sync v0.13.0 +# golang.org/x/sync v0.14.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup -# golang.org/x/sys v0.32.0 +# golang.org/x/sys v0.33.0 ## explicit; go 1.23.0 golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.24.0 +# golang.org/x/text v0.25.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal @@ -365,11 +364,13 @@ gorm.io/driver/mysql # gorm.io/driver/sqlite v1.5.7 ## explicit; go 1.20 gorm.io/driver/sqlite -# gorm.io/gorm v1.25.12 +# gorm.io/gorm v1.26.1 ## explicit; go 1.18 gorm.io/gorm gorm.io/gorm/callbacks gorm.io/gorm/clause +gorm.io/gorm/internal/lru +gorm.io/gorm/internal/stmt_store gorm.io/gorm/logger gorm.io/gorm/migrator gorm.io/gorm/schema From ae69e8c4732a5ec851f4d590bc859ef9581a45b2 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 24 May 2025 19:33:39 +0000 Subject: [PATCH 093/179] Experiment with a logo Signed-off-by: Gabriel Adrian Samfira --- README.md | 12 +++++- ...-dark.drawio.svg => garm-dark.diagram.svg} | 0 doc/images/garm-dark.svg | 37 +++++++++++++++++++ ...ight.drawio.svg => garm-light.diagram.svg} | 0 doc/images/garm-light.svg | 36 ++++++++++++++++++ 5 files changed, 83 insertions(+), 2 deletions(-) rename doc/images/{garm-dark.drawio.svg => garm-dark.diagram.svg} (100%) create mode 100644 doc/images/garm-dark.svg rename doc/images/{garm-light.drawio.svg => garm-light.diagram.svg} (100%) create mode 100644 doc/images/garm-light.svg diff --git a/README.md b/README.md index 175b0556..ce596781 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,9 @@ + +

+ Light mode image + Dark mode image +

+ # GitHub Actions Runner Manager (GARM) [![Go Tests](https://github.com/cloudbase/garm/actions/workflows/go-tests.yml/badge.svg)](https://github.com/cloudbase/garm/actions/workflows/go-tests.yml) @@ -10,6 +16,8 @@ - [Installing](#installing) - [Quickstart](#quickstart) - [Installing on Kubernetes](#installing-on-kubernetes) + - [Configuring GARM for GHES](#configuring-garm-for-ghes) + - [Configuring GARM for Gitea](#configuring-garm-for-gitea) - [Using GARM](#using-garm) - [Supported providers](#supported-providers) - [Installing external providers](#installing-external-providers) @@ -35,8 +43,8 @@ GARM supports two modes of operation: Here is a brief architectural diagram of how pools work and how GARM reacts to workflows triggered in GitHub (click the image to see a larger version): -![GARM architecture diagram](/doc/images/garm-light.drawio.svg?raw=true#gh-light-mode-only) -![GARM architecture diagram](/doc/images/garm-dark.drawio.svg?raw=true#gh-dark-mode-only) +![GARM architecture diagram](/doc/images/garm-light.diagram.svg?raw=true#gh-light-mode-only) +![GARM architecture diagram](/doc/images/garm-dark.diagram.svg?raw=true#gh-dark-mode-only) **Scale sets** work differently. While pools (as they are defined in GARM) rely on webhooks to know when a job was started and GARM needs to internally make the right decission in terms of which pool should handle that runner, scale sets have a lot of the scheduling and decission making logic done in GitHub itself. diff --git a/doc/images/garm-dark.drawio.svg b/doc/images/garm-dark.diagram.svg similarity index 100% rename from doc/images/garm-dark.drawio.svg rename to doc/images/garm-dark.diagram.svg diff --git a/doc/images/garm-dark.svg b/doc/images/garm-dark.svg new file mode 100644 index 00000000..f0a0c564 --- /dev/null +++ b/doc/images/garm-dark.svg @@ -0,0 +1,37 @@ + + + + + + + + + + + diff --git a/doc/images/garm-light.drawio.svg b/doc/images/garm-light.diagram.svg similarity index 100% rename from doc/images/garm-light.drawio.svg rename to doc/images/garm-light.diagram.svg diff --git a/doc/images/garm-light.svg b/doc/images/garm-light.svg new file mode 100644 index 00000000..2495959d --- /dev/null +++ b/doc/images/garm-light.svg @@ -0,0 +1,36 @@ + + + + + + + + + + From 87055f23da90920516d98942942d664f0897a5db Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 24 May 2025 21:10:05 +0000 Subject: [PATCH 094/179] Make the default github endpoint mutable This change allows users to remove the default github endpoint if no credentials are set on it. A new protection is added on URLs of any endpoint that prevents their update if the endpoint has credentials set. Signed-off-by: Gabriel Adrian Samfira --- database/sql/gitea.go | 17 ++++-- database/sql/gitea_test.go | 59 ++++++++++++++++++- database/sql/github.go | 25 ++++----- database/sql/github_test.go | 109 ++++++++++++++++++++++++++++++++++-- database/sql/sql.go | 15 ++++- 5 files changed, 197 insertions(+), 28 deletions(-) diff --git a/database/sql/gitea.go b/database/sql/gitea.go index 3b4c55ec..45dc30e5 100644 --- a/database/sql/gitea.go +++ b/database/sql/gitea.go @@ -94,6 +94,17 @@ func (s *sqlDatabase) UpdateGiteaEndpoint(_ context.Context, name string, param } return errors.Wrap(err, "fetching gitea endpoint") } + + var credsCount int64 + if err := tx.Model(&GiteaCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "fetching gitea credentials") + } + } + if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil) { + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update endpoint URLs with existing credentials") + } + if param.APIBaseURL != nil { endpoint.APIBaseURL = *param.APIBaseURL } @@ -140,10 +151,6 @@ func (s *sqlDatabase) GetGiteaEndpoint(_ context.Context, name string) (params.F } func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err error) { - if name == defaultGithubEndpoint { - return runnerErrors.NewBadRequestError("cannot delete default endpoint %s", defaultGithubEndpoint) - } - defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.ForgeEndpoint{Name: name}) @@ -180,7 +187,7 @@ func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err e } if credsCount > 0 || repoCnt > 0 || orgCnt > 0 { - return runnerErrors.NewBadRequestError("cannot delete endpoint with associated entities") + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete endpoint with associated entities") } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { diff --git a/database/sql/gitea_test.go b/database/sql/gitea_test.go index a70d3b1f..7ce6fb02 100644 --- a/database/sql/gitea_test.go +++ b/database/sql/gitea_test.go @@ -162,8 +162,8 @@ func (s *GiteaTestSuite) TestUpdateEndpoint() { s.Require().NotNil(endpoint) newDescription := "another description" - newAPIBaseURL := "https://new-api.example.com" - newBaseURL := "https://new.example.com" + newAPIBaseURL := "https://updated.example.com" + newBaseURL := "https://updated.example.com" caCertBundle, err := os.ReadFile("../../testdata/certs/srv-pub.pem") s.Require().NoError(err) updateEpParams := params.UpdateGiteaEndpointParams{ @@ -770,6 +770,61 @@ func (s *GiteaTestSuite) TestDeleteGiteaEndpointFailsWithOrgsReposOrCredentials( s.Require().ErrorIs(err, runnerErrors.ErrNotFound) } +func (s *GiteaTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGiteaEndpointParams{ + Name: "deleteme", + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGiteaEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + + credParams := params.CreateGiteaCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: testEndpointName, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + _, err = s.db.CreateGiteaCredentials(ctx, credParams) + s.Require().NoError(err) + + newDescription := "new gitea description" + newBaseURL := "https://new-gitea.example.com" + newAPIBaseURL := "https://new-gotea.example.com" + updateEpParams := params.UpdateGiteaEndpointParams{ + BaseURL: &newBaseURL, + } + + _, err = s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating gitea endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGiteaEndpointParams{ + APIBaseURL: &newAPIBaseURL, + } + _, err = s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating gitea endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGiteaEndpointParams{ + Description: &newDescription, + } + ret, err := s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().NoError(err) + s.Require().Equal(newDescription, ret.Description) +} + func (s *GiteaTestSuite) TestListGiteaEndpoints() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) diff --git a/database/sql/github.go b/database/sql/github.go index 8dd20225..0ad52049 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -26,10 +26,6 @@ import ( "github.com/cloudbase/garm/params" ) -const ( - defaultGithubEndpoint string = "github.com" -) - func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.CreateGithubEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { defer func() { if err == nil { @@ -85,10 +81,6 @@ func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.ForgeEndp } func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param params.UpdateGithubEndpointParams) (ghEndpoint params.ForgeEndpoint, err error) { - if name == defaultGithubEndpoint { - return params.ForgeEndpoint{}, errors.Wrap(runnerErrors.ErrBadRequest, "cannot update default github endpoint") - } - defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.UpdateOperation, ghEndpoint) @@ -102,6 +94,17 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param } return errors.Wrap(err, "fetching github endpoint") } + + var credsCount int64 + if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "fetching github credentials") + } + } + if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil || param.UploadBaseURL != nil) { + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update endpoint URLs with existing credentials") + } + if param.APIBaseURL != nil { endpoint.APIBaseURL = *param.APIBaseURL } @@ -153,10 +156,6 @@ func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params. } func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err error) { - if name == defaultGithubEndpoint { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete default github endpoint") - } - defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.ForgeEndpoint{Name: name}) @@ -200,7 +199,7 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err } if credsCount > 0 || repoCnt > 0 || orgCnt > 0 || entCnt > 0 { - return runnerErrors.NewBadRequestError("cannot delete endpoint with associated entities") + return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete endpoint with associated entities") } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { diff --git a/database/sql/github_test.go b/database/sql/github_test.go index 7b99d5e2..cca58a50 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -41,6 +41,7 @@ const ( testEndpointDescription string = "test description" testCredsName string = "test-creds" testCredsDescription string = "test creds" + defaultGithubEndpoint string = "github.com" ) type GithubTestSuite struct { @@ -57,18 +58,17 @@ func (s *GithubTestSuite) SetupTest() { s.db = db } -func (s *GithubTestSuite) TestDefaultEndpointGetsCreatedAutomatically() { +func (s *GithubTestSuite) TestDefaultEndpointGetsCreatedAutomaticallyIfNoOtherEndpointExists() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) endpoint, err := s.db.GetGithubEndpoint(ctx, defaultGithubEndpoint) s.Require().NoError(err) s.Require().NotNil(endpoint) } -func (s *GithubTestSuite) TestDeletingDefaultEndpointFails() { +func (s *GithubTestSuite) TestDeletingDefaultEndpointWorksIfNoCredentials() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) err := s.db.DeleteGithubEndpoint(ctx, defaultGithubEndpoint) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().NoError(err) } func (s *GithubTestSuite) TestCreatingEndpoint() { @@ -154,6 +154,39 @@ func (s *GithubTestSuite) TestDeletingEndpoint() { s.Require().ErrorIs(err, runnerErrors.ErrNotFound) } +func (s *GithubTestSuite) TestDeleteGithubEndpointFailsWhenCredentialsExist() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGithubEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + UploadBaseURL: testUploadBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGithubEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + + credParams := params.CreateGithubCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: testEndpointName, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + _, err = s.db.CreateGithubCredentials(ctx, credParams) + s.Require().NoError(err) + + err = s.db.DeleteGithubEndpoint(ctx, testEndpointName) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) +} + func (s *GithubTestSuite) TestUpdateEndpoint() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) @@ -169,7 +202,7 @@ func (s *GithubTestSuite) TestUpdateEndpoint() { s.Require().NoError(err) s.Require().NotNil(endpoint) - newDescription := "new description" + newDescription := "the new description" newAPIBaseURL := "https://new-api.example.com" newUploadBaseURL := "https://new-uploads.example.com" newBaseURL := "https://new.example.com" @@ -193,6 +226,72 @@ func (s *GithubTestSuite) TestUpdateEndpoint() { s.Require().Equal(caCertBundle, updatedEndpoint.CACertBundle) } +func (s *GithubTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated() { + ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) + + createEpParams := params.CreateGithubEndpointParams{ + Name: testEndpointName, + Description: testEndpointDescription, + APIBaseURL: testAPIBaseURL, + UploadBaseURL: testUploadBaseURL, + BaseURL: testBaseURL, + } + + endpoint, err := s.db.CreateGithubEndpoint(ctx, createEpParams) + s.Require().NoError(err) + s.Require().NotNil(endpoint) + + credParams := params.CreateGithubCredentialsParams{ + Name: testCredsName, + Description: testCredsDescription, + Endpoint: testEndpointName, + AuthType: params.ForgeAuthTypePAT, + PAT: params.GithubPAT{ + OAuth2Token: "test", + }, + } + + _, err = s.db.CreateGithubCredentials(ctx, credParams) + s.Require().NoError(err) + + newDescription := "new description" + newBaseURL := "https://new.example.com" + newAPIBaseURL := "https://new-api.example.com" + newUploadBaseURL := "https://new-uploads.example.com" + updateEpParams := params.UpdateGithubEndpointParams{ + BaseURL: &newBaseURL, + } + + _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGithubEndpointParams{ + UploadBaseURL: &newUploadBaseURL, + } + + _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGithubEndpointParams{ + APIBaseURL: &newAPIBaseURL, + } + _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().Error(err) + s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) + s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + + updateEpParams = params.UpdateGithubEndpointParams{ + Description: &newDescription, + } + ret, err := s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) + s.Require().NoError(err) + s.Require().Equal(newDescription, ret.Description) +} + func (s *GithubTestSuite) TestUpdatingNonExistingEndpointReturnsNotFoundError() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) diff --git a/database/sql/sql.go b/database/sql/sql.go index 167e90ed..d6e60586 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -218,9 +218,18 @@ func (s *sqlDatabase) ensureGithubEndpoint() error { UploadBaseURL: appdefaults.GithubDefaultUploadBaseURL, } - if _, err := s.CreateGithubEndpoint(context.Background(), createEndpointParams); err != nil { - if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { - return errors.Wrap(err, "creating default github endpoint") + var epCount int64 + if err := s.conn.Model(&GithubEndpoint{}).Count(&epCount).Error; err != nil { + if !errors.Is(err, gorm.ErrRecordNotFound) { + return errors.Wrap(err, "counting github endpoints") + } + } + + if epCount == 0 { + if _, err := s.CreateGithubEndpoint(context.Background(), createEndpointParams); err != nil { + if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { + return errors.Wrap(err, "creating default github endpoint") + } } } From 914f7fbd49ea6a11cb32d5d507b8ab1d68886eca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 06:15:56 +0000 Subject: [PATCH 095/179] Bump gorm.io/gorm from 1.26.1 to 1.30.0 Bumps [gorm.io/gorm](https://github.com/go-gorm/gorm) from 1.26.1 to 1.30.0. - [Release notes](https://github.com/go-gorm/gorm/releases) - [Commits](https://github.com/go-gorm/gorm/compare/v1.26.1...v1.30.0) --- updated-dependencies: - dependency-name: gorm.io/gorm dependency-version: 1.30.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 +- vendor/gorm.io/gorm/callbacks/create.go | 10 + vendor/gorm.io/gorm/callbacks/delete.go | 10 + vendor/gorm.io/gorm/callbacks/preload.go | 16 +- vendor/gorm.io/gorm/callbacks/query.go | 49 +- vendor/gorm.io/gorm/callbacks/raw.go | 5 + vendor/gorm.io/gorm/callbacks/update.go | 9 + vendor/gorm.io/gorm/chainable_api.go | 7 +- vendor/gorm.io/gorm/clause/joins.go | 32 ++ vendor/gorm.io/gorm/finisher_api.go | 12 +- vendor/gorm.io/gorm/generics.go | 605 +++++++++++++++++++++ vendor/gorm.io/gorm/gorm.go | 14 +- vendor/gorm.io/gorm/scan.go | 9 + vendor/gorm.io/gorm/schema/field.go | 5 +- vendor/gorm.io/gorm/schema/index.go | 2 +- vendor/gorm.io/gorm/schema/relationship.go | 8 +- vendor/gorm.io/gorm/statement.go | 57 +- vendor/modules.txt | 2 +- 19 files changed, 795 insertions(+), 63 deletions(-) create mode 100644 vendor/gorm.io/gorm/generics.go diff --git a/go.mod b/go.mod index 097353a1..94071dd7 100644 --- a/go.mod +++ b/go.mod @@ -37,7 +37,7 @@ require ( gorm.io/datatypes v1.2.5 gorm.io/driver/mysql v1.5.7 gorm.io/driver/sqlite v1.5.7 - gorm.io/gorm v1.26.1 + gorm.io/gorm v1.30.0 ) require ( diff --git a/go.sum b/go.sum index 28e4606e..467ebbcf 100644 --- a/go.sum +++ b/go.sum @@ -231,5 +231,5 @@ gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDa gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= -gorm.io/gorm v1.26.1 h1:ghB2gUI9FkS46luZtn6DLZ0f6ooBJ5IbVej2ENFDjRw= -gorm.io/gorm v1.26.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= +gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= +gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/vendor/gorm.io/gorm/callbacks/create.go b/vendor/gorm.io/gorm/callbacks/create.go index 8b7846b6..d8701f51 100644 --- a/vendor/gorm.io/gorm/callbacks/create.go +++ b/vendor/gorm.io/gorm/callbacks/create.go @@ -89,6 +89,10 @@ func Create(config *Config) func(db *gorm.DB) { db.AddError(rows.Close()) }() gorm.Scan(rows, db, mode) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } } return @@ -103,6 +107,12 @@ func Create(config *Config) func(db *gorm.DB) { } db.RowsAffected, _ = result.RowsAffected() + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } + if db.RowsAffected == 0 { return } diff --git a/vendor/gorm.io/gorm/callbacks/delete.go b/vendor/gorm.io/gorm/callbacks/delete.go index 84f446a3..07ed6fee 100644 --- a/vendor/gorm.io/gorm/callbacks/delete.go +++ b/vendor/gorm.io/gorm/callbacks/delete.go @@ -157,8 +157,14 @@ func Delete(config *Config) func(db *gorm.DB) { ok, mode := hasReturning(db, supportReturning) if !ok { result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...) + if db.AddError(err) == nil { db.RowsAffected, _ = result.RowsAffected() + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } } return @@ -166,6 +172,10 @@ func Delete(config *Config) func(db *gorm.DB) { if rows, err := db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...); db.AddError(err) == nil { gorm.Scan(rows, db, mode) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } db.AddError(rows.Close()) } } diff --git a/vendor/gorm.io/gorm/callbacks/preload.go b/vendor/gorm.io/gorm/callbacks/preload.go index fd8214bb..225cda28 100644 --- a/vendor/gorm.io/gorm/callbacks/preload.go +++ b/vendor/gorm.io/gorm/callbacks/preload.go @@ -103,11 +103,11 @@ func preloadEntryPoint(db *gorm.DB, joins []string, relationships *schema.Relati joined = true continue } - joinNames := strings.SplitN(join, ".", 2) - if len(joinNames) == 2 { - if _, ok := relationships.Relations[joinNames[0]]; ok && name == joinNames[0] { + join0, join1, cut := strings.Cut(join, ".") + if cut { + if _, ok := relationships.Relations[join0]; ok && name == join0 { joined = true - nestedJoins = append(nestedJoins, joinNames[1]) + nestedJoins = append(nestedJoins, join1) } } } @@ -275,6 +275,8 @@ func preload(tx *gorm.DB, rel *schema.Relationship, conds []interface{}, preload column, values := schema.ToQueryValues(clause.CurrentTable, relForeignKeys, foreignValues) if len(values) != 0 { + tx = tx.Model(reflectResults.Addr().Interface()).Where(clause.IN{Column: column, Values: values}) + for _, cond := range conds { if fc, ok := cond.(func(*gorm.DB) *gorm.DB); ok { tx = fc(tx) @@ -283,7 +285,11 @@ func preload(tx *gorm.DB, rel *schema.Relationship, conds []interface{}, preload } } - if err := tx.Where(clause.IN{Column: column, Values: values}).Find(reflectResults.Addr().Interface(), inlineConds...).Error; err != nil { + if len(inlineConds) > 0 { + tx = tx.Where(inlineConds[0], inlineConds[1:]...) + } + + if err := tx.Find(reflectResults.Addr().Interface()).Error; err != nil { return err } } diff --git a/vendor/gorm.io/gorm/callbacks/query.go b/vendor/gorm.io/gorm/callbacks/query.go index bbf238a9..548bf709 100644 --- a/vendor/gorm.io/gorm/callbacks/query.go +++ b/vendor/gorm.io/gorm/callbacks/query.go @@ -25,6 +25,10 @@ func Query(db *gorm.DB) { db.AddError(rows.Close()) }() gorm.Scan(rows, db, 0) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } } } } @@ -110,7 +114,7 @@ func BuildQuerySQL(db *gorm.DB) { } } - specifiedRelationsName := make(map[string]interface{}) + specifiedRelationsName := map[string]string{clause.CurrentTable: clause.CurrentTable} for _, join := range db.Statement.Joins { if db.Statement.Schema != nil { var isRelations bool // is relations or raw sql @@ -124,12 +128,12 @@ func BuildQuerySQL(db *gorm.DB) { nestedJoinNames := strings.Split(join.Name, ".") if len(nestedJoinNames) > 1 { isNestedJoin := true - gussNestedRelations := make([]*schema.Relationship, 0, len(nestedJoinNames)) + guessNestedRelations := make([]*schema.Relationship, 0, len(nestedJoinNames)) currentRelations := db.Statement.Schema.Relationships.Relations for _, relname := range nestedJoinNames { // incomplete match, only treated as raw sql if relation, ok = currentRelations[relname]; ok { - gussNestedRelations = append(gussNestedRelations, relation) + guessNestedRelations = append(guessNestedRelations, relation) currentRelations = relation.FieldSchema.Relationships.Relations } else { isNestedJoin = false @@ -139,18 +143,13 @@ func BuildQuerySQL(db *gorm.DB) { if isNestedJoin { isRelations = true - relations = gussNestedRelations + relations = guessNestedRelations } } } if isRelations { - genJoinClause := func(joinType clause.JoinType, parentTableName string, relation *schema.Relationship) clause.Join { - tableAliasName := relation.Name - if parentTableName != clause.CurrentTable { - tableAliasName = utils.NestedRelationName(parentTableName, tableAliasName) - } - + genJoinClause := func(joinType clause.JoinType, tableAliasName string, parentTableName string, relation *schema.Relationship) clause.Join { columnStmt := gorm.Statement{ Table: tableAliasName, DB: db, Schema: relation.FieldSchema, Selects: join.Selects, Omits: join.Omits, @@ -167,6 +166,13 @@ func BuildQuerySQL(db *gorm.DB) { } } + if join.Expression != nil { + return clause.Join{ + Type: join.JoinType, + Expression: join.Expression, + } + } + exprs := make([]clause.Expression, len(relation.References)) for idx, ref := range relation.References { if ref.OwnPrimaryKey { @@ -226,19 +232,24 @@ func BuildQuerySQL(db *gorm.DB) { } parentTableName := clause.CurrentTable - for _, rel := range relations { + for idx, rel := range relations { // joins table alias like "Manager, Company, Manager__Company" - nestedAlias := utils.NestedRelationName(parentTableName, rel.Name) - if _, ok := specifiedRelationsName[nestedAlias]; !ok { - fromClause.Joins = append(fromClause.Joins, genJoinClause(join.JoinType, parentTableName, rel)) - specifiedRelationsName[nestedAlias] = nil + curAliasName := rel.Name + if parentTableName != clause.CurrentTable { + curAliasName = utils.NestedRelationName(parentTableName, curAliasName) } - if parentTableName != clause.CurrentTable { - parentTableName = utils.NestedRelationName(parentTableName, rel.Name) - } else { - parentTableName = rel.Name + if _, ok := specifiedRelationsName[curAliasName]; !ok { + aliasName := curAliasName + if idx == len(relations)-1 && join.Alias != "" { + aliasName = join.Alias + } + + fromClause.Joins = append(fromClause.Joins, genJoinClause(join.JoinType, aliasName, specifiedRelationsName[parentTableName], rel)) + specifiedRelationsName[curAliasName] = aliasName } + + parentTableName = curAliasName } } else { fromClause.Joins = append(fromClause.Joins, clause.Join{ diff --git a/vendor/gorm.io/gorm/callbacks/raw.go b/vendor/gorm.io/gorm/callbacks/raw.go index 013e638c..3bb647c4 100644 --- a/vendor/gorm.io/gorm/callbacks/raw.go +++ b/vendor/gorm.io/gorm/callbacks/raw.go @@ -13,5 +13,10 @@ func RawExec(db *gorm.DB) { } db.RowsAffected, _ = result.RowsAffected() + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } } } diff --git a/vendor/gorm.io/gorm/callbacks/update.go b/vendor/gorm.io/gorm/callbacks/update.go index 7cde7f61..8e2782e1 100644 --- a/vendor/gorm.io/gorm/callbacks/update.go +++ b/vendor/gorm.io/gorm/callbacks/update.go @@ -92,6 +92,10 @@ func Update(config *Config) func(db *gorm.DB) { gorm.Scan(rows, db, mode) db.Statement.Dest = dest db.AddError(rows.Close()) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } } } else { result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...) @@ -99,6 +103,11 @@ func Update(config *Config) func(db *gorm.DB) { if db.AddError(err) == nil { db.RowsAffected, _ = result.RowsAffected() } + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } } } } diff --git a/vendor/gorm.io/gorm/chainable_api.go b/vendor/gorm.io/gorm/chainable_api.go index 8953413d..8a6aea34 100644 --- a/vendor/gorm.io/gorm/chainable_api.go +++ b/vendor/gorm.io/gorm/chainable_api.go @@ -448,9 +448,10 @@ func (db *DB) Assign(attrs ...interface{}) (tx *DB) { // Unscoped allows queries to include records marked as deleted, // overriding the soft deletion behavior. // Example: -// var users []User -// db.Unscoped().Find(&users) -// // Retrieves all users, including deleted ones. +// +// var users []User +// db.Unscoped().Find(&users) +// // Retrieves all users, including deleted ones. func (db *DB) Unscoped() (tx *DB) { tx = db.getInstance() tx.Statement.Unscoped = true diff --git a/vendor/gorm.io/gorm/clause/joins.go b/vendor/gorm.io/gorm/clause/joins.go index 879892be..a6f13e55 100644 --- a/vendor/gorm.io/gorm/clause/joins.go +++ b/vendor/gorm.io/gorm/clause/joins.go @@ -1,5 +1,7 @@ package clause +import "gorm.io/gorm/utils" + type JoinType string const ( @@ -9,6 +11,30 @@ const ( RightJoin JoinType = "RIGHT" ) +type JoinTarget struct { + Type JoinType + Association string + Subquery Expression + Table string +} + +func Has(name string) JoinTarget { + return JoinTarget{Type: InnerJoin, Association: name} +} + +func (jt JoinType) Association(name string) JoinTarget { + return JoinTarget{Type: jt, Association: name} +} + +func (jt JoinType) AssociationFrom(name string, subquery Expression) JoinTarget { + return JoinTarget{Type: jt, Association: name, Subquery: subquery} +} + +func (jt JoinTarget) As(name string) JoinTarget { + jt.Table = name + return jt +} + // Join clause for from type Join struct { Type JoinType @@ -18,6 +44,12 @@ type Join struct { Expression Expression } +func JoinTable(names ...string) Table { + return Table{ + Name: utils.JoinNestedRelationNames(names), + } +} + func (join Join) Build(builder Builder) { if join.Expression != nil { join.Expression.Build(builder) diff --git a/vendor/gorm.io/gorm/finisher_api.go b/vendor/gorm.io/gorm/finisher_api.go index 6802945c..57809d17 100644 --- a/vendor/gorm.io/gorm/finisher_api.go +++ b/vendor/gorm.io/gorm/finisher_api.go @@ -1,6 +1,7 @@ package gorm import ( + "context" "database/sql" "errors" "fmt" @@ -673,11 +674,18 @@ func (db *DB) Begin(opts ...*sql.TxOptions) *DB { opt = opts[0] } + ctx := tx.Statement.Context + if _, ok := ctx.Deadline(); !ok { + if db.Config.DefaultTransactionTimeout > 0 { + ctx, _ = context.WithTimeout(ctx, db.Config.DefaultTransactionTimeout) + } + } + switch beginner := tx.Statement.ConnPool.(type) { case TxBeginner: - tx.Statement.ConnPool, err = beginner.BeginTx(tx.Statement.Context, opt) + tx.Statement.ConnPool, err = beginner.BeginTx(ctx, opt) case ConnPoolBeginner: - tx.Statement.ConnPool, err = beginner.BeginTx(tx.Statement.Context, opt) + tx.Statement.ConnPool, err = beginner.BeginTx(ctx, opt) default: err = ErrInvalidTransaction } diff --git a/vendor/gorm.io/gorm/generics.go b/vendor/gorm.io/gorm/generics.go new file mode 100644 index 00000000..ad2d063f --- /dev/null +++ b/vendor/gorm.io/gorm/generics.go @@ -0,0 +1,605 @@ +package gorm + +import ( + "context" + "database/sql" + "fmt" + "sort" + "strings" + + "gorm.io/gorm/clause" + "gorm.io/gorm/logger" +) + +type result struct { + Result sql.Result + RowsAffected int64 +} + +func (info *result) ModifyStatement(stmt *Statement) { + stmt.Result = info +} + +// Build implements clause.Expression interface +func (result) Build(clause.Builder) { +} + +func WithResult() *result { + return &result{} +} + +type Interface[T any] interface { + Raw(sql string, values ...interface{}) ExecInterface[T] + Exec(ctx context.Context, sql string, values ...interface{}) error + CreateInterface[T] +} + +type CreateInterface[T any] interface { + ChainInterface[T] + Table(name string, args ...interface{}) CreateInterface[T] + Create(ctx context.Context, r *T) error + CreateInBatches(ctx context.Context, r *[]T, batchSize int) error +} + +type ChainInterface[T any] interface { + ExecInterface[T] + Scopes(scopes ...func(db *Statement)) ChainInterface[T] + Where(query interface{}, args ...interface{}) ChainInterface[T] + Not(query interface{}, args ...interface{}) ChainInterface[T] + Or(query interface{}, args ...interface{}) ChainInterface[T] + Limit(offset int) ChainInterface[T] + Offset(offset int) ChainInterface[T] + Joins(query clause.JoinTarget, on func(db JoinBuilder, joinTable clause.Table, curTable clause.Table) error) ChainInterface[T] + Preload(association string, query func(db PreloadBuilder) error) ChainInterface[T] + Select(query string, args ...interface{}) ChainInterface[T] + Omit(columns ...string) ChainInterface[T] + MapColumns(m map[string]string) ChainInterface[T] + Distinct(args ...interface{}) ChainInterface[T] + Group(name string) ChainInterface[T] + Having(query interface{}, args ...interface{}) ChainInterface[T] + Order(value interface{}) ChainInterface[T] + + Build(builder clause.Builder) + + Delete(ctx context.Context) (rowsAffected int, err error) + Update(ctx context.Context, name string, value any) (rowsAffected int, err error) + Updates(ctx context.Context, t T) (rowsAffected int, err error) + Count(ctx context.Context, column string) (result int64, err error) +} + +type ExecInterface[T any] interface { + Scan(ctx context.Context, r interface{}) error + First(context.Context) (T, error) + Last(ctx context.Context) (T, error) + Take(context.Context) (T, error) + Find(ctx context.Context) ([]T, error) + FindInBatches(ctx context.Context, batchSize int, fc func(data []T, batch int) error) error + Row(ctx context.Context) *sql.Row + Rows(ctx context.Context) (*sql.Rows, error) +} + +type JoinBuilder interface { + Select(...string) JoinBuilder + Omit(...string) JoinBuilder + Where(query interface{}, args ...interface{}) JoinBuilder + Not(query interface{}, args ...interface{}) JoinBuilder + Or(query interface{}, args ...interface{}) JoinBuilder +} + +type PreloadBuilder interface { + Select(...string) PreloadBuilder + Omit(...string) PreloadBuilder + Where(query interface{}, args ...interface{}) PreloadBuilder + Not(query interface{}, args ...interface{}) PreloadBuilder + Or(query interface{}, args ...interface{}) PreloadBuilder + Limit(offset int) PreloadBuilder + Offset(offset int) PreloadBuilder + Order(value interface{}) PreloadBuilder + LimitPerRecord(num int) PreloadBuilder +} + +type op func(*DB) *DB + +func G[T any](db *DB, opts ...clause.Expression) Interface[T] { + v := &g[T]{ + db: db, + ops: make([]op, 0, 5), + } + + if len(opts) > 0 { + v.ops = append(v.ops, func(db *DB) *DB { + return db.Clauses(opts...) + }) + } + + v.createG = &createG[T]{ + chainG: chainG[T]{ + execG: execG[T]{g: v}, + }, + } + return v +} + +type g[T any] struct { + *createG[T] + db *DB + ops []op +} + +func (g *g[T]) apply(ctx context.Context) *DB { + db := g.db + if !db.DryRun { + db = db.Session(&Session{NewDB: true, Context: ctx}).getInstance() + } + + for _, op := range g.ops { + db = op(db) + } + return db +} + +func (c *g[T]) Raw(sql string, values ...interface{}) ExecInterface[T] { + return execG[T]{g: &g[T]{ + db: c.db, + ops: append(c.ops, func(db *DB) *DB { + return db.Raw(sql, values...) + }), + }} +} + +func (c *g[T]) Exec(ctx context.Context, sql string, values ...interface{}) error { + return c.apply(ctx).Exec(sql, values...).Error +} + +type createG[T any] struct { + chainG[T] +} + +func (c createG[T]) Table(name string, args ...interface{}) CreateInterface[T] { + return createG[T]{c.with(func(db *DB) *DB { + return db.Table(name, args...) + })} +} + +func (c createG[T]) Create(ctx context.Context, r *T) error { + return c.g.apply(ctx).Create(r).Error +} + +func (c createG[T]) CreateInBatches(ctx context.Context, r *[]T, batchSize int) error { + return c.g.apply(ctx).CreateInBatches(r, batchSize).Error +} + +type chainG[T any] struct { + execG[T] +} + +func (c chainG[T]) getInstance() *DB { + var r T + return c.g.apply(context.Background()).Model(r).getInstance() +} + +func (c chainG[T]) with(v op) chainG[T] { + return chainG[T]{ + execG: execG[T]{g: &g[T]{ + db: c.g.db, + ops: append(append([]op(nil), c.g.ops...), v), + }}, + } +} + +func (c chainG[T]) Scopes(scopes ...func(db *Statement)) ChainInterface[T] { + return c.with(func(db *DB) *DB { + for _, fc := range scopes { + fc(db.Statement) + } + return db + }) +} + +func (c chainG[T]) Table(name string, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Table(name, args...) + }) +} + +func (c chainG[T]) Where(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Where(query, args...) + }) +} + +func (c chainG[T]) Not(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Not(query, args...) + }) +} + +func (c chainG[T]) Or(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Or(query, args...) + }) +} + +func (c chainG[T]) Limit(offset int) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Limit(offset) + }) +} + +func (c chainG[T]) Offset(offset int) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Offset(offset) + }) +} + +type joinBuilder struct { + db *DB +} + +func (q *joinBuilder) Where(query interface{}, args ...interface{}) JoinBuilder { + q.db.Where(query, args...) + return q +} + +func (q *joinBuilder) Or(query interface{}, args ...interface{}) JoinBuilder { + q.db.Where(query, args...) + return q +} + +func (q *joinBuilder) Not(query interface{}, args ...interface{}) JoinBuilder { + q.db.Where(query, args...) + return q +} + +func (q *joinBuilder) Select(columns ...string) JoinBuilder { + q.db.Select(columns) + return q +} + +func (q *joinBuilder) Omit(columns ...string) JoinBuilder { + q.db.Omit(columns...) + return q +} + +type preloadBuilder struct { + limitPerRecord int + db *DB +} + +func (q *preloadBuilder) Where(query interface{}, args ...interface{}) PreloadBuilder { + q.db.Where(query, args...) + return q +} + +func (q *preloadBuilder) Or(query interface{}, args ...interface{}) PreloadBuilder { + q.db.Where(query, args...) + return q +} + +func (q *preloadBuilder) Not(query interface{}, args ...interface{}) PreloadBuilder { + q.db.Where(query, args...) + return q +} + +func (q *preloadBuilder) Select(columns ...string) PreloadBuilder { + q.db.Select(columns) + return q +} + +func (q *preloadBuilder) Omit(columns ...string) PreloadBuilder { + q.db.Omit(columns...) + return q +} + +func (q *preloadBuilder) Limit(limit int) PreloadBuilder { + q.db.Limit(limit) + return q +} + +func (q *preloadBuilder) Offset(offset int) PreloadBuilder { + q.db.Offset(offset) + return q +} + +func (q *preloadBuilder) Order(value interface{}) PreloadBuilder { + q.db.Order(value) + return q +} + +func (q *preloadBuilder) LimitPerRecord(num int) PreloadBuilder { + q.limitPerRecord = num + return q +} + +func (c chainG[T]) Joins(jt clause.JoinTarget, on func(db JoinBuilder, joinTable clause.Table, curTable clause.Table) error) ChainInterface[T] { + return c.with(func(db *DB) *DB { + if jt.Table == "" { + jt.Table = clause.JoinTable(strings.Split(jt.Association, ".")...).Name + } + + q := joinBuilder{db: db.Session(&Session{NewDB: true, Initialized: true}).Table(jt.Table)} + if on != nil { + if err := on(&q, clause.Table{Name: jt.Table}, clause.Table{Name: clause.CurrentTable}); err != nil { + db.AddError(err) + } + } + + j := join{ + Name: jt.Association, + Alias: jt.Table, + Selects: q.db.Statement.Selects, + Omits: q.db.Statement.Omits, + JoinType: jt.Type, + } + + if where, ok := q.db.Statement.Clauses["WHERE"].Expression.(clause.Where); ok { + j.On = &where + } + + if jt.Subquery != nil { + joinType := j.JoinType + if joinType == "" { + joinType = clause.LeftJoin + } + + if db, ok := jt.Subquery.(interface{ getInstance() *DB }); ok { + stmt := db.getInstance().Statement + if len(j.Selects) == 0 { + j.Selects = stmt.Selects + } + if len(j.Omits) == 0 { + j.Omits = stmt.Omits + } + } + + expr := clause.NamedExpr{SQL: fmt.Sprintf("%s JOIN (?) AS ?", joinType), Vars: []interface{}{jt.Subquery, clause.Table{Name: j.Alias}}} + + if j.On != nil { + expr.SQL += " ON ?" + expr.Vars = append(expr.Vars, clause.AndConditions{Exprs: j.On.Exprs}) + } + + j.Expression = expr + } + + db.Statement.Joins = append(db.Statement.Joins, j) + sort.Slice(db.Statement.Joins, func(i, j int) bool { + return db.Statement.Joins[i].Name < db.Statement.Joins[j].Name + }) + return db + }) +} + +func (c chainG[T]) Select(query string, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Select(query, args...) + }) +} + +func (c chainG[T]) Omit(columns ...string) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Omit(columns...) + }) +} + +func (c chainG[T]) MapColumns(m map[string]string) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.MapColumns(m) + }) +} + +func (c chainG[T]) Distinct(args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Distinct(args...) + }) +} + +func (c chainG[T]) Group(name string) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Group(name) + }) +} + +func (c chainG[T]) Having(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Having(query, args...) + }) +} + +func (c chainG[T]) Order(value interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Order(value) + }) +} + +func (c chainG[T]) Preload(association string, query func(db PreloadBuilder) error) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Preload(association, func(tx *DB) *DB { + q := preloadBuilder{db: tx.getInstance()} + if query != nil { + if err := query(&q); err != nil { + db.AddError(err) + } + } + + relation, ok := db.Statement.Schema.Relationships.Relations[association] + if !ok { + if preloadFields := strings.Split(association, "."); len(preloadFields) > 1 { + relationships := db.Statement.Schema.Relationships + for _, field := range preloadFields { + var ok bool + relation, ok = relationships.Relations[field] + if ok { + relationships = relation.FieldSchema.Relationships + } else { + db.AddError(fmt.Errorf("relation %s not found", association)) + return nil + } + } + } else { + db.AddError(fmt.Errorf("relation %s not found", association)) + return nil + } + } + + if q.limitPerRecord > 0 { + if relation.JoinTable != nil { + tx.AddError(fmt.Errorf("many2many relation %s don't support LimitPerRecord", association)) + return tx + } + + refColumns := []clause.Column{} + for _, rel := range relation.References { + if rel.OwnPrimaryKey { + refColumns = append(refColumns, clause.Column{Name: rel.ForeignKey.DBName}) + } + } + + if len(refColumns) != 0 { + selectExpr := clause.CommaExpression{} + for _, column := range q.db.Statement.Selects { + selectExpr.Exprs = append(selectExpr.Exprs, clause.Expr{SQL: "?", Vars: []interface{}{clause.Column{Name: column}}}) + } + + if len(selectExpr.Exprs) == 0 { + selectExpr.Exprs = []clause.Expression{clause.Expr{SQL: "*", Vars: []interface{}{}}} + } + + partitionBy := clause.CommaExpression{} + for _, column := range refColumns { + partitionBy.Exprs = append(partitionBy.Exprs, clause.Expr{SQL: "?", Vars: []interface{}{clause.Column{Name: column.Name}}}) + } + + rnnColumn := clause.Column{Name: "gorm_preload_rnn"} + sql := "ROW_NUMBER() OVER (PARTITION BY ? ?)" + vars := []interface{}{partitionBy} + if orderBy, ok := q.db.Statement.Clauses["ORDER BY"]; ok { + vars = append(vars, orderBy) + } else { + vars = append(vars, clause.Clause{Name: "ORDER BY", Expression: clause.OrderBy{ + Columns: []clause.OrderByColumn{{Column: clause.PrimaryColumn, Desc: true}}, + }}) + } + vars = append(vars, rnnColumn) + + selectExpr.Exprs = append(selectExpr.Exprs, clause.Expr{SQL: sql + " AS ?", Vars: vars}) + + q.db.Clauses(clause.Select{Expression: selectExpr}) + + return q.db.Session(&Session{NewDB: true}).Unscoped().Table("(?) t", q.db).Where("? <= ?", rnnColumn, q.limitPerRecord) + } + } + + return q.db + }) + }) +} + +func (c chainG[T]) Delete(ctx context.Context) (rowsAffected int, err error) { + r := new(T) + res := c.g.apply(ctx).Delete(r) + return int(res.RowsAffected), res.Error +} + +func (c chainG[T]) Update(ctx context.Context, name string, value any) (rowsAffected int, err error) { + var r T + res := c.g.apply(ctx).Model(r).Update(name, value) + return int(res.RowsAffected), res.Error +} + +func (c chainG[T]) Updates(ctx context.Context, t T) (rowsAffected int, err error) { + res := c.g.apply(ctx).Updates(t) + return int(res.RowsAffected), res.Error +} + +func (c chainG[T]) Count(ctx context.Context, column string) (result int64, err error) { + var r T + err = c.g.apply(ctx).Model(r).Select(column).Count(&result).Error + return +} + +func (c chainG[T]) Build(builder clause.Builder) { + subdb := c.getInstance() + subdb.Logger = logger.Discard + subdb.DryRun = true + + if stmt, ok := builder.(*Statement); ok { + if subdb.Statement.SQL.Len() > 0 { + var ( + vars = subdb.Statement.Vars + sql = subdb.Statement.SQL.String() + ) + + subdb.Statement.Vars = make([]interface{}, 0, len(vars)) + for _, vv := range vars { + subdb.Statement.Vars = append(subdb.Statement.Vars, vv) + bindvar := strings.Builder{} + subdb.BindVarTo(&bindvar, subdb.Statement, vv) + sql = strings.Replace(sql, bindvar.String(), "?", 1) + } + + subdb.Statement.SQL.Reset() + subdb.Statement.Vars = stmt.Vars + if strings.Contains(sql, "@") { + clause.NamedExpr{SQL: sql, Vars: vars}.Build(subdb.Statement) + } else { + clause.Expr{SQL: sql, Vars: vars}.Build(subdb.Statement) + } + } else { + subdb.Statement.Vars = append(stmt.Vars, subdb.Statement.Vars...) + subdb.callbacks.Query().Execute(subdb) + } + + builder.WriteString(subdb.Statement.SQL.String()) + stmt.Vars = subdb.Statement.Vars + } +} + +type execG[T any] struct { + g *g[T] +} + +func (g execG[T]) First(ctx context.Context) (T, error) { + var r T + err := g.g.apply(ctx).First(&r).Error + return r, err +} + +func (g execG[T]) Scan(ctx context.Context, result interface{}) error { + var r T + err := g.g.apply(ctx).Model(r).Find(&result).Error + return err +} + +func (g execG[T]) Last(ctx context.Context) (T, error) { + var r T + err := g.g.apply(ctx).Last(&r).Error + return r, err +} + +func (g execG[T]) Take(ctx context.Context) (T, error) { + var r T + err := g.g.apply(ctx).Take(&r).Error + return r, err +} + +func (g execG[T]) Find(ctx context.Context) ([]T, error) { + var r []T + err := g.g.apply(ctx).Find(&r).Error + return r, err +} + +func (g execG[T]) FindInBatches(ctx context.Context, batchSize int, fc func(data []T, batch int) error) error { + var data []T + return g.g.apply(ctx).FindInBatches(&data, batchSize, func(tx *DB, batch int) error { + return fc(data, batch) + }).Error +} + +func (g execG[T]) Row(ctx context.Context) *sql.Row { + return g.g.apply(ctx).Row() +} + +func (g execG[T]) Rows(ctx context.Context) (*sql.Rows, error) { + return g.g.apply(ctx).Rows() +} diff --git a/vendor/gorm.io/gorm/gorm.go b/vendor/gorm.io/gorm/gorm.go index 63a28b37..67889262 100644 --- a/vendor/gorm.io/gorm/gorm.go +++ b/vendor/gorm.io/gorm/gorm.go @@ -21,7 +21,9 @@ const preparedStmtDBKey = "preparedStmt" type Config struct { // GORM perform single create, update, delete operations in transactions by default to ensure database data integrity // You can disable it by setting `SkipDefaultTransaction` to true - SkipDefaultTransaction bool + SkipDefaultTransaction bool + DefaultTransactionTimeout time.Duration + // NamingStrategy tables, columns naming strategy NamingStrategy schema.Namer // FullSaveAssociations full save associations @@ -135,12 +137,16 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { return isConfig && !isConfig2 }) + var skipAfterInitialize bool for _, opt := range opts { if opt != nil { if applyErr := opt.Apply(config); applyErr != nil { return nil, applyErr } defer func(opt Option) { + if skipAfterInitialize { + return + } if errr := opt.AfterInitialize(db); errr != nil { err = errr } @@ -192,6 +198,10 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { if db, _ := db.DB(); db != nil { _ = db.Close() } + + // DB is not initialized, so we skip AfterInitialize + skipAfterInitialize = true + return } if config.TranslateError { @@ -519,7 +529,7 @@ func (db *DB) Use(plugin Plugin) error { // .First(&User{}) // }) func (db *DB) ToSQL(queryFn func(tx *DB) *DB) string { - tx := queryFn(db.Session(&Session{DryRun: true, SkipDefaultTransaction: true})) + tx := queryFn(db.Session(&Session{DryRun: true, SkipDefaultTransaction: true}).getInstance()) stmt := tx.Statement return db.Dialector.Explain(stmt.SQL.String(), stmt.Vars...) diff --git a/vendor/gorm.io/gorm/scan.go b/vendor/gorm.io/gorm/scan.go index 6dc55f62..9a99d024 100644 --- a/vendor/gorm.io/gorm/scan.go +++ b/vendor/gorm.io/gorm/scan.go @@ -4,6 +4,7 @@ import ( "database/sql" "database/sql/driver" "reflect" + "strings" "time" "gorm.io/gorm/schema" @@ -244,6 +245,14 @@ func Scan(rows Rows, db *DB, mode ScanMode) { matchedFieldCount[column] = 1 } } else if names := utils.SplitNestedRelationName(column); len(names) > 1 { // has nested relation + aliasName := utils.JoinNestedRelationNames(names[0 : len(names)-1]) + for _, join := range db.Statement.Joins { + if join.Alias == aliasName { + names = append(strings.Split(join.Name, "."), names[len(names)-1]) + break + } + } + if rel, ok := sch.Relationships.Relations[names[0]]; ok { subNameCount := len(names) // nested relation fields diff --git a/vendor/gorm.io/gorm/schema/field.go b/vendor/gorm.io/gorm/schema/field.go index d1a633ce..a6ff1a72 100644 --- a/vendor/gorm.io/gorm/schema/field.go +++ b/vendor/gorm.io/gorm/schema/field.go @@ -318,9 +318,10 @@ func (schema *Schema) ParseField(fieldStruct reflect.StructField) *Field { } if val, ok := field.TagSettings["TYPE"]; ok { - switch DataType(strings.ToLower(val)) { + lowerVal := DataType(strings.ToLower(val)) + switch lowerVal { case Bool, Int, Uint, Float, String, Time, Bytes: - field.DataType = DataType(strings.ToLower(val)) + field.DataType = lowerVal default: field.DataType = DataType(val) } diff --git a/vendor/gorm.io/gorm/schema/index.go b/vendor/gorm.io/gorm/schema/index.go index a1cdc639..2690a0cb 100644 --- a/vendor/gorm.io/gorm/schema/index.go +++ b/vendor/gorm.io/gorm/schema/index.go @@ -105,7 +105,7 @@ func parseFieldIndexes(field *Field) (indexes []Index, err error) { var ( name string tag = strings.Join(v[1:], ":") - idx = strings.Index(tag, ",") + idx = strings.IndexByte(tag, ',') tagSetting = strings.Join(strings.Split(tag, ",")[1:], ",") settings = ParseTagSetting(tagSetting, ",") length, _ = strconv.Atoi(settings["LENGTH"]) diff --git a/vendor/gorm.io/gorm/schema/relationship.go b/vendor/gorm.io/gorm/schema/relationship.go index def4a595..f1ace924 100644 --- a/vendor/gorm.io/gorm/schema/relationship.go +++ b/vendor/gorm.io/gorm/schema/relationship.go @@ -78,7 +78,7 @@ func (schema *Schema) parseRelation(field *Field) *Relationship { cacheStore := schema.cacheStore if relation.FieldSchema, err = getOrParse(fieldValue, cacheStore, schema.namer); err != nil { - schema.err = err + schema.err = fmt.Errorf("failed to parse field: %s, error: %w", field.Name, err) return nil } @@ -663,6 +663,7 @@ func (rel *Relationship) ParseConstraint() *Constraint { if !(rel.References[idx].PrimaryKey == ref.PrimaryKey && rel.References[idx].ForeignKey == ref.ForeignKey && rel.References[idx].PrimaryValue == ref.PrimaryValue) { matched = false + break } } @@ -675,7 +676,7 @@ func (rel *Relationship) ParseConstraint() *Constraint { var ( name string - idx = strings.Index(str, ",") + idx = strings.IndexByte(str, ',') settings = ParseTagSetting(str, ",") ) @@ -762,8 +763,9 @@ func (rel *Relationship) ToQueryConditions(ctx context.Context, reflectValue ref } func copyableDataType(str DataType) bool { + lowerStr := strings.ToLower(string(str)) for _, s := range []string{"auto_increment", "primary key"} { - if strings.Contains(strings.ToLower(string(str)), s) { + if strings.Contains(lowerStr, s) { return false } } diff --git a/vendor/gorm.io/gorm/statement.go b/vendor/gorm.io/gorm/statement.go index 39e05d09..c6183724 100644 --- a/vendor/gorm.io/gorm/statement.go +++ b/vendor/gorm.io/gorm/statement.go @@ -47,15 +47,18 @@ type Statement struct { attrs []interface{} assigns []interface{} scopes []func(*DB) *DB + Result *result } type join struct { - Name string - Conds []interface{} - On *clause.Where - Selects []string - Omits []string - JoinType clause.JoinType + Name string + Alias string + Conds []interface{} + On *clause.Where + Selects []string + Omits []string + Expression clause.Expression + JoinType clause.JoinType } // StatementModifier statement modifier interface @@ -205,19 +208,21 @@ func (stmt *Statement) AddVar(writer clause.Writer, vars ...interface{}) { } else { writer.WriteString("(NULL)") } - case *DB: - subdb := v.Session(&Session{Logger: logger.Discard, DryRun: true}).getInstance() - if v.Statement.SQL.Len() > 0 { + case interface{ getInstance() *DB }: + cv := v.getInstance() + + subdb := cv.Session(&Session{Logger: logger.Discard, DryRun: true}).getInstance() + if cv.Statement.SQL.Len() > 0 { var ( vars = subdb.Statement.Vars - sql = v.Statement.SQL.String() + sql = cv.Statement.SQL.String() ) subdb.Statement.Vars = make([]interface{}, 0, len(vars)) for _, vv := range vars { subdb.Statement.Vars = append(subdb.Statement.Vars, vv) bindvar := strings.Builder{} - v.Dialector.BindVarTo(&bindvar, subdb.Statement, vv) + cv.BindVarTo(&bindvar, subdb.Statement, vv) sql = strings.Replace(sql, bindvar.String(), "?", 1) } @@ -321,6 +326,11 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] arg, _ = valuer.Value() } + curTable := stmt.Table + if curTable == "" { + curTable = clause.CurrentTable + } + switch v := arg.(type) { case clause.Expression: conds = append(conds, v) @@ -351,7 +361,8 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] sort.Strings(keys) for _, key := range keys { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + column := clause.Column{Name: key, Table: curTable} + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } case map[string]interface{}: keys := make([]string, 0, len(v)) @@ -362,12 +373,13 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] for _, key := range keys { reflectValue := reflect.Indirect(reflect.ValueOf(v[key])) + column := clause.Column{Name: key, Table: curTable} switch reflectValue.Kind() { case reflect.Slice, reflect.Array: if _, ok := v[key].(driver.Valuer); ok { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } else if _, ok := v[key].(Valuer); ok { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } else { // optimize reflect value length valueLen := reflectValue.Len() @@ -376,10 +388,10 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] values[i] = reflectValue.Index(i).Interface() } - conds = append(conds, clause.IN{Column: key, Values: values}) + conds = append(conds, clause.IN{Column: column, Values: values}) } default: - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } } default: @@ -406,9 +418,9 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] if selected || (!restricted && field.Readable) { if v, isZero := field.ValueOf(stmt.Context, reflectValue); !isZero || selected { if field.DBName != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.DBName}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.DBName}, Value: v}) } else if field.DataType != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.Name}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.Name}, Value: v}) } } } @@ -420,9 +432,9 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] if selected || (!restricted && field.Readable) { if v, isZero := field.ValueOf(stmt.Context, reflectValue.Index(i)); !isZero || selected { if field.DBName != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.DBName}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.DBName}, Value: v}) } else if field.DataType != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.Name}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.Name}, Value: v}) } } } @@ -447,14 +459,14 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] } if len(values) > 0 { - conds = append(conds, clause.IN{Column: clause.PrimaryColumn, Values: values}) + conds = append(conds, clause.IN{Column: clause.Column{Table: curTable, Name: clause.PrimaryKey}, Values: values}) return []clause.Expression{clause.And(conds...)} } return nil } } - conds = append(conds, clause.IN{Column: clause.PrimaryColumn, Values: args}) + conds = append(conds, clause.IN{Column: clause.Column{Table: curTable, Name: clause.PrimaryKey}, Values: args}) } } } @@ -521,6 +533,7 @@ func (stmt *Statement) clone() *Statement { Context: stmt.Context, RaiseErrorOnNotFound: stmt.RaiseErrorOnNotFound, SkipHooks: stmt.SkipHooks, + Result: stmt.Result, } if stmt.SQL.Len() > 0 { diff --git a/vendor/modules.txt b/vendor/modules.txt index 46b3a5e6..b18b786a 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -365,7 +365,7 @@ gorm.io/driver/mysql # gorm.io/driver/sqlite v1.5.7 ## explicit; go 1.20 gorm.io/driver/sqlite -# gorm.io/gorm v1.26.1 +# gorm.io/gorm v1.30.0 ## explicit; go 1.18 gorm.io/gorm gorm.io/gorm/callbacks From 4c9f78f81b640fe195cde76226d1503622557860 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 26 May 2025 15:09:21 +0000 Subject: [PATCH 096/179] Relax update endpoint validation Signed-off-by: Gabriel Adrian Samfira --- database/sql/github.go | 9 ----- database/sql/github_test.go | 66 ------------------------------------- 2 files changed, 75 deletions(-) diff --git a/database/sql/github.go b/database/sql/github.go index 18526f5a..a66c7331 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -178,15 +178,6 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param return errors.Wrap(err, "fetching github endpoint") } - var credsCount int64 - if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { - if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching github credentials") - } - } - if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil || param.UploadBaseURL != nil) { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update endpoint URLs with existing credentials") - } if param.APIBaseURL != nil { endpoint.APIBaseURL = *param.APIBaseURL } diff --git a/database/sql/github_test.go b/database/sql/github_test.go index 72617de4..4d94a202 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -219,72 +219,6 @@ func (s *GithubTestSuite) TestUpdateEndpoint() { s.Require().Equal(caCertBundle, updatedEndpoint.CACertBundle) } -func (s *GithubTestSuite) TestUpdateEndpointUDLsFailsIfCredentialsAreAssociated() { - ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) - - createEpParams := params.CreateGithubEndpointParams{ - Name: testEndpointName, - Description: testEndpointDescription, - APIBaseURL: testAPIBaseURL, - UploadBaseURL: testUploadBaseURL, - BaseURL: testBaseURL, - } - - endpoint, err := s.db.CreateGithubEndpoint(ctx, createEpParams) - s.Require().NoError(err) - s.Require().NotNil(endpoint) - - credParams := params.CreateGithubCredentialsParams{ - Name: testCredsName, - Description: testCredsDescription, - Endpoint: testEndpointName, - AuthType: params.GithubAuthTypePAT, - PAT: params.GithubPAT{ - OAuth2Token: "test", - }, - } - - _, err = s.db.CreateGithubCredentials(ctx, credParams) - s.Require().NoError(err) - - newDescription := "new description" - newBaseURL := "https://new.example.com" - newAPIBaseURL := "https://new-api.example.com" - newUploadBaseURL := "https://new-uploads.example.com" - updateEpParams := params.UpdateGithubEndpointParams{ - BaseURL: &newBaseURL, - } - - _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") - - updateEpParams = params.UpdateGithubEndpointParams{ - UploadBaseURL: &newUploadBaseURL, - } - - _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") - - updateEpParams = params.UpdateGithubEndpointParams{ - APIBaseURL: &newAPIBaseURL, - } - _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().Error(err) - s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") - - updateEpParams = params.UpdateGithubEndpointParams{ - Description: &newDescription, - } - ret, err := s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) - s.Require().NoError(err) - s.Require().Equal(newDescription, ret.Description) -} - func (s *GithubTestSuite) TestUpdatingNonExistingEndpointReturnsNotFoundError() { ctx := garmTesting.ImpersonateAdminContext(context.Background(), s.db, s.T()) From efd725ea94cf0594dd20c322536bf9b0c5234a52 Mon Sep 17 00:00:00 2001 From: Christopher Homberger Date: Thu, 29 May 2025 18:00:19 +0200 Subject: [PATCH 097/179] Create Repo / Org make --forge-type optional * This makes gitea garm usage unnessary complex Signed-off-by: Christopher Homberger --- cmd/garm-cli/cmd/organization.go | 2 +- cmd/garm-cli/cmd/repository.go | 2 +- doc/gitea.md | 3 +-- params/params.go | 1 + params/requests.go | 32 ++++++++++++++------------------ runner/common.go | 31 +++++++++++++++++++++++++++++++ runner/organizations.go | 4 ++-- runner/repositories.go | 4 ++-- 8 files changed, 53 insertions(+), 26 deletions(-) create mode 100644 runner/common.go diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index 58110053..a95f912f 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -308,7 +308,7 @@ func init() { orgAddCmd.Flags().StringVar(&orgName, "name", "", "The name of the organization") orgAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") orgAddCmd.Flags().StringVar(&orgWebhookSecret, "webhook-secret", "", "The webhook secret for this organization") - orgAddCmd.Flags().StringVar(&forgeType, "forge-type", string(params.GithubEndpointType), "The forge type of the organization. Supported values: github, gitea.") + orgAddCmd.Flags().StringVar(&forgeType, "forge-type", "", "The forge type of the organization. Supported values: github, gitea.") orgAddCmd.Flags().StringVar(&orgCreds, "credentials", "", "Credentials name. See credentials list.") orgAddCmd.Flags().BoolVar(&orgRandomWebhookSecret, "random-webhook-secret", false, "Generate a random webhook secret for this organization.") orgAddCmd.Flags().BoolVar(&installOrgWebhook, "install-webhook", false, "Install the webhook as part of the add operation.") diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index 96f214fd..5bf588c5 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -312,7 +312,7 @@ func init() { repoAddCmd.Flags().StringVar(&repoOwner, "owner", "", "The owner of this repository") repoAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") repoAddCmd.Flags().StringVar(&repoName, "name", "", "The name of the repository") - repoAddCmd.Flags().StringVar(&forgeType, "forge-type", string(params.GithubEndpointType), "The forge type of the repository. Supported values: github, gitea.") + repoAddCmd.Flags().StringVar(&forgeType, "forge-type", "", "The forge type of the repository. Supported values: github, gitea.") repoAddCmd.Flags().StringVar(&repoWebhookSecret, "webhook-secret", "", "The webhook secret for this repository") repoAddCmd.Flags().StringVar(&repoCreds, "credentials", "", "Credentials name. See credentials list.") repoAddCmd.Flags().BoolVar(&randomWebhookSecret, "random-webhook-secret", false, "Generate a random webhook secret for this repository.") diff --git a/doc/gitea.md b/doc/gitea.md index 215a51ba..e7833a32 100644 --- a/doc/gitea.md +++ b/doc/gitea.md @@ -295,8 +295,7 @@ garm-cli repo add \ --name testrepo \ --owner testorg \ --random-webhook-secret \ - --install-webhook \ - --forge-type gitea + --install-webhook ``` Make a note of the repo UUID. You will need it when adding a pool. diff --git a/params/params.go b/params/params.go index a127d760..a680fda4 100644 --- a/params/params.go +++ b/params/params.go @@ -78,6 +78,7 @@ const ( ) const ( + AutoEndpointType EndpointType = "" GithubEndpointType EndpointType = "github" GiteaEndpointType EndpointType = "gitea" ) diff --git a/params/requests.go b/params/requests.go index 82cbf113..5be0e3a1 100644 --- a/params/requests.go +++ b/params/requests.go @@ -48,15 +48,6 @@ type CreateRepoParams struct { ForgeType EndpointType `json:"forge_type,omitempty"` } -func (c CreateRepoParams) GetForgeType() EndpointType { - switch c.ForgeType { - case GithubEndpointType, GiteaEndpointType: - return c.ForgeType - default: - return GithubEndpointType - } -} - func (c *CreateRepoParams) Validate() error { if c.Owner == "" { return runnerErrors.NewBadRequestError("missing owner") @@ -73,6 +64,13 @@ func (c *CreateRepoParams) Validate() error { return runnerErrors.NewMissingSecretError("missing secret") } + switch c.ForgeType { + case GithubEndpointType, GiteaEndpointType, AutoEndpointType: + break + default: + return runnerErrors.NewBadRequestError("invalid forge type") + } + switch c.PoolBalancerType { case PoolBalancerTypeRoundRobin, PoolBalancerTypePack, PoolBalancerTypeNone: default: @@ -90,15 +88,6 @@ type CreateOrgParams struct { ForgeType EndpointType `json:"forge_type,omitempty"` } -func (c CreateOrgParams) GetForgeType() EndpointType { - switch c.ForgeType { - case GithubEndpointType, GiteaEndpointType: - return c.ForgeType - default: - return GithubEndpointType - } -} - func (c *CreateOrgParams) Validate() error { if c.Name == "" { return runnerErrors.NewBadRequestError("missing org name") @@ -111,6 +100,13 @@ func (c *CreateOrgParams) Validate() error { return runnerErrors.NewMissingSecretError("missing secret") } + switch c.ForgeType { + case GithubEndpointType, GiteaEndpointType, AutoEndpointType: + break + default: + return runnerErrors.NewBadRequestError("invalid forge type") + } + switch c.PoolBalancerType { case PoolBalancerTypeRoundRobin, PoolBalancerTypePack, PoolBalancerTypeNone: default: diff --git a/runner/common.go b/runner/common.go new file mode 100644 index 00000000..63d4887c --- /dev/null +++ b/runner/common.go @@ -0,0 +1,31 @@ +package runner + +import ( + "context" + + "github.com/pkg/errors" + + runnerErrors "github.com/cloudbase/garm-provider-common/errors" + "github.com/cloudbase/garm/params" +) + +func (r *Runner) ResolveForgeCredentialByName(ctx context.Context, credentialsName string) (params.ForgeCredentials, error) { + githubCred, err := r.store.GetGithubCredentialsByName(ctx, credentialsName, false) + if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { + return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") + } + giteaCred, err := r.store.GetGiteaCredentialsByName(ctx, credentialsName, false) + if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { + return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + } + if githubCred.ID != 0 && giteaCred.ID != 0 { + return params.ForgeCredentials{}, runnerErrors.NewBadRequestError("credentials %s are defined for both GitHub and Gitea, please specify the forge type", credentialsName) + } + if githubCred.ID != 0 { + return githubCred, nil + } + if giteaCred.ID != 0 { + return giteaCred, nil + } + return params.ForgeCredentials{}, runnerErrors.NewBadRequestError("credentials %s not found", credentialsName) +} diff --git a/runner/organizations.go b/runner/organizations.go index bddab87c..26d4f6e9 100644 --- a/runner/organizations.go +++ b/runner/organizations.go @@ -39,7 +39,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP } var creds params.ForgeCredentials - switch param.GetForgeType() { + switch param.ForgeType { case params.GithubEndpointType: slog.DebugContext(ctx, "getting github credentials") creds, err = r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) @@ -47,7 +47,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP slog.DebugContext(ctx, "getting gitea credentials") creds, err = r.store.GetGiteaCredentialsByName(ctx, param.CredentialsName, true) default: - return params.Organization{}, runnerErrors.NewBadRequestError("invalid forge type: %s", param.GetForgeType()) + creds, err = r.ResolveForgeCredentialByName(ctx, param.CredentialsName) } if err != nil { diff --git a/runner/repositories.go b/runner/repositories.go index 058e1a02..d5118e96 100644 --- a/runner/repositories.go +++ b/runner/repositories.go @@ -39,13 +39,13 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa } var creds params.ForgeCredentials - switch param.GetForgeType() { + switch param.ForgeType { case params.GithubEndpointType: creds, err = r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) case params.GiteaEndpointType: creds, err = r.store.GetGiteaCredentialsByName(ctx, param.CredentialsName, true) default: - return params.Repository{}, runnerErrors.NewBadRequestError("invalid forge type: %s", param.GetForgeType()) + creds, err = r.ResolveForgeCredentialByName(ctx, param.CredentialsName) } if err != nil { From 48c4ea7d15c7b1a52d909409daacfc39f047f104 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 29 May 2025 17:07:46 +0000 Subject: [PATCH 098/179] Trigger tests on release branches This change triggers tests for PRs created against release/* branches. Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/go-tests.yml | 2 ++ Makefile | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml index f2792835..36d113d0 100644 --- a/.github/workflows/go-tests.yml +++ b/.github/workflows/go-tests.yml @@ -4,9 +4,11 @@ on: push: branches: - main + - 'release/**' pull_request: branches: - main + - 'release/**' concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref_name }} diff --git a/Makefile b/Makefile index ad9ed795..f5337783 100644 --- a/Makefile +++ b/Makefile @@ -67,7 +67,7 @@ lint-fix: golangci-lint $(GOLANGCI_LINT) ## Lint the codebase and run auto-fixer verify-vendor: ## verify if all the go.mod/go.sum files are up-to-date $(eval TMPDIR := $(shell mktemp -d)) - @cp -R ${ROOTDIR} ${TMPDIR} + @cp -R ${ROOTDIR} ${TMPDIR}/. @(cd ${TMPDIR}/garm && ${GO} mod tidy) @diff -r -u -q ${ROOTDIR} ${TMPDIR}/garm >/dev/null 2>&1; if [ "$$?" -ne 0 ];then echo "please run: go mod tidy && go mod vendor"; exit 1; fi @rm -rf ${TMPDIR} From 913388ed3bd859843915956e300364053d04e513 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 29 May 2025 17:07:46 +0000 Subject: [PATCH 099/179] Trigger tests on release branches This change triggers tests for PRs created against release/* branches. Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/go-tests.yml | 2 ++ Makefile | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml index f2792835..36d113d0 100644 --- a/.github/workflows/go-tests.yml +++ b/.github/workflows/go-tests.yml @@ -4,9 +4,11 @@ on: push: branches: - main + - 'release/**' pull_request: branches: - main + - 'release/**' concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref_name }} diff --git a/Makefile b/Makefile index a655e9f5..bce60045 100644 --- a/Makefile +++ b/Makefile @@ -67,7 +67,7 @@ lint-fix: golangci-lint $(GOLANGCI_LINT) ## Lint the codebase and run auto-fixer verify-vendor: ## verify if all the go.mod/go.sum files are up-to-date $(eval TMPDIR := $(shell mktemp -d)) - @cp -R ${ROOTDIR} ${TMPDIR} + @cp -R ${ROOTDIR} ${TMPDIR}/. @(cd ${TMPDIR}/garm && ${GO} mod tidy) @diff -r -u -q ${ROOTDIR} ${TMPDIR}/garm >/dev/null 2>&1; if [ "$$?" -ne 0 ];then echo "please run: go mod tidy && go mod vendor"; exit 1; fi @rm -rf ${TMPDIR} From ef3402bf17e0b092f54ca0b0d99ebed1d1146cf9 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 26 May 2025 19:12:59 +0000 Subject: [PATCH 100/179] Add write lock for sqlite3 Signed-off-by: Gabriel Adrian Samfira --- database/sql/controller.go | 6 ++++++ database/sql/enterprise.go | 9 +++++++++ database/sql/github.go | 18 ++++++++++++++++++ database/sql/instances.go | 12 ++++++++++++ database/sql/jobs.go | 18 ++++++++++++++++++ database/sql/organizations.go | 9 +++++++++ database/sql/pools.go | 12 ++++++++++++ database/sql/repositories.go | 9 +++++++++ database/sql/sql.go | 6 ++++++ database/sql/users.go | 6 ++++++ 10 files changed, 105 insertions(+) diff --git a/database/sql/controller.go b/database/sql/controller.go index fb360e00..71890c88 100644 --- a/database/sql/controller.go +++ b/database/sql/controller.go @@ -63,6 +63,9 @@ func (s *sqlDatabase) ControllerInfo() (params.ControllerInfo, error) { } func (s *sqlDatabase) InitController() (params.ControllerInfo, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if _, err := s.ControllerInfo(); err == nil { return params.ControllerInfo{}, runnerErrors.NewConflictError("controller already initialized") } @@ -88,6 +91,9 @@ func (s *sqlDatabase) InitController() (params.ControllerInfo, error) { } func (s *sqlDatabase) UpdateController(info params.UpdateControllerParams) (paramInfo params.ControllerInfo, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.ControllerEntityType, common.UpdateOperation, paramInfo) diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index dfcb10a2..9b927bed 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -29,6 +29,9 @@ import ( ) func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (paramEnt params.Enterprise, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if webhookSecret == "" { return params.Enterprise{}, errors.New("creating enterprise: missing secret") } @@ -132,6 +135,9 @@ func (s *sqlDatabase) ListEnterprises(_ context.Context) ([]params.Enterprise, e } func (s *sqlDatabase) DeleteEnterprise(ctx context.Context, enterpriseID string) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { return errors.Wrap(err, "fetching enterprise") @@ -157,6 +163,9 @@ func (s *sqlDatabase) DeleteEnterprise(ctx context.Context, enterpriseID string) } func (s *sqlDatabase) UpdateEnterprise(ctx context.Context, enterpriseID string, param params.UpdateEntityParams) (newParams params.Enterprise, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.EnterpriseEntityType, common.UpdateOperation, newParams) diff --git a/database/sql/github.go b/database/sql/github.go index a66c7331..d787653d 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -111,6 +111,9 @@ func getUIDFromContext(ctx context.Context) (uuid.UUID, error) { } func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.CreateGithubEndpointParams) (ghEndpoint params.GithubEndpoint, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.CreateOperation, ghEndpoint) @@ -164,6 +167,9 @@ func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.GithubEnd } func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param params.UpdateGithubEndpointParams) (ghEndpoint params.GithubEndpoint, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.UpdateOperation, ghEndpoint) @@ -229,6 +235,9 @@ func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params. } func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.GithubEndpointEntityType, common.DeleteOperation, params.GithubEndpoint{Name: name}) @@ -287,6 +296,9 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err } func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (ghCreds params.GithubCredentials, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + userID, err := getUIDFromContext(ctx) if err != nil { return params.GithubCredentials{}, errors.Wrap(err, "creating github credentials") @@ -450,6 +462,9 @@ func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.Githu } func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (ghCreds params.GithubCredentials, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.GithubCredentialsEntityType, common.UpdateOperation, ghCreds) @@ -529,6 +544,9 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para } func (s *sqlDatabase) DeleteGithubCredentials(ctx context.Context, id uint) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var name string defer func() { if err == nil { diff --git a/database/sql/instances.go b/database/sql/instances.go index 864e7ba2..c7fb02f6 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -31,6 +31,9 @@ import ( ) func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param params.CreateInstanceParams) (instance params.Instance, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + pool, err := s.getPoolByID(s.conn, poolID) if err != nil { return params.Instance{}, errors.Wrap(err, "fetching pool") @@ -143,6 +146,9 @@ func (s *sqlDatabase) GetInstanceByName(ctx context.Context, instanceName string } func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceName string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + instance, err := s.getPoolInstanceByName(poolID, instanceName) if err != nil { return errors.Wrap(err, "deleting instance") @@ -176,6 +182,9 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN } func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, statusMessage string) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + instance, err := s.getInstanceByName(ctx, instanceName) if err != nil { return errors.Wrap(err, "updating instance") @@ -194,6 +203,9 @@ func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, } func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + instance, err := s.getInstanceByName(ctx, instanceName) if err != nil { return params.Instance{}, errors.Wrap(err, "updating instance") diff --git a/database/sql/jobs.go b/database/sql/jobs.go index b7dda926..9cbf2ffe 100644 --- a/database/sql/jobs.go +++ b/database/sql/jobs.go @@ -95,6 +95,9 @@ func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job } func (s *sqlDatabase) DeleteJob(_ context.Context, jobID int64) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { if notifyErr := s.sendNotify(common.JobEntityType, common.DeleteOperation, params.Job{ID: jobID}); notifyErr != nil { @@ -113,6 +116,9 @@ func (s *sqlDatabase) DeleteJob(_ context.Context, jobID int64) (err error) { } func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + entityUUID, err := uuid.Parse(entityID) if err != nil { return errors.Wrap(err, "parsing entity id") @@ -152,6 +158,9 @@ func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) e } func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var workflowJob WorkflowJob q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ? and status = ?", jobID, params.JobStatusQueued).First(&workflowJob) @@ -180,6 +189,9 @@ func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err } func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var workflowJob WorkflowJob q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Where("id = ?", jobID).First(&workflowJob) @@ -213,6 +225,9 @@ func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) } func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (params.Job, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var workflowJob WorkflowJob var err error q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ?", job.ID).First(&workflowJob) @@ -381,6 +396,9 @@ func (s *sqlDatabase) GetJobByID(_ context.Context, jobID int64) (params.Job, er // DeleteCompletedJobs deletes all completed jobs. func (s *sqlDatabase) DeleteCompletedJobs(_ context.Context) error { + s.writeMux.Lock() + defer s.writeMux.Unlock() + query := s.conn.Model(&WorkflowJob{}).Where("status = ?", params.JobStatusCompleted) if err := query.Unscoped().Delete(&WorkflowJob{}); err.Error != nil { diff --git a/database/sql/organizations.go b/database/sql/organizations.go index c41b9269..3c2cdbbf 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -30,6 +30,9 @@ import ( ) func (s *sqlDatabase) CreateOrganization(ctx context.Context, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (org params.Organization, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if webhookSecret == "" { return params.Organization{}, errors.New("creating org: missing secret") } @@ -123,6 +126,9 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context) ([]params.Organizatio } func (s *sqlDatabase) DeleteOrganization(ctx context.Context, orgID string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + org, err := s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { return errors.Wrap(err, "fetching org") @@ -148,6 +154,9 @@ func (s *sqlDatabase) DeleteOrganization(ctx context.Context, orgID string) (err } func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, param params.UpdateEntityParams) (paramOrg params.Organization, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.OrganizationEntityType, common.UpdateOperation, paramOrg) diff --git a/database/sql/pools.go b/database/sql/pools.go index fdcf3f5a..cd888505 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -68,6 +68,9 @@ func (s *sqlDatabase) GetPoolByID(_ context.Context, poolID string) (params.Pool } func (s *sqlDatabase) DeletePoolByID(_ context.Context, poolID string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + pool, err := s.getPoolByID(s.conn, poolID) if err != nil { return errors.Wrap(err, "fetching pool by ID") @@ -255,6 +258,9 @@ func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType par } func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.GithubEntity, param params.CreatePoolParams) (pool params.Pool, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if len(param.Tags) == 0 { return params.Pool{}, runnerErrors.NewBadRequestError("no tags specified") } @@ -343,6 +349,9 @@ func (s *sqlDatabase) GetEntityPool(_ context.Context, entity params.GithubEntit } func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.GithubEntity, poolID string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + entityID, err := uuid.Parse(entity.ID) if err != nil { return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") @@ -380,6 +389,9 @@ func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.GithubEn } func (s *sqlDatabase) UpdateEntityPool(_ context.Context, entity params.GithubEntity, poolID string, param params.UpdatePoolParams) (updatedPool params.Pool, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.PoolEntityType, common.UpdateOperation, updatedPool) diff --git a/database/sql/repositories.go b/database/sql/repositories.go index c1eaef3b..d6cefc64 100644 --- a/database/sql/repositories.go +++ b/database/sql/repositories.go @@ -30,6 +30,9 @@ import ( ) func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name, credentialsName, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.RepositoryEntityType, common.CreateOperation, param) @@ -122,6 +125,9 @@ func (s *sqlDatabase) ListRepositories(_ context.Context) ([]params.Repository, } func (s *sqlDatabase) DeleteRepository(ctx context.Context, repoID string) (err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + repo, err := s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { return errors.Wrap(err, "fetching repo") @@ -147,6 +153,9 @@ func (s *sqlDatabase) DeleteRepository(ctx context.Context, repoID string) (err } func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param params.UpdateEntityParams) (newParams params.Repository, err error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + defer func() { if err == nil { s.sendNotify(common.RepositoryEntityType, common.UpdateOperation, newParams) diff --git a/database/sql/sql.go b/database/sql/sql.go index d4e6895a..290cce3f 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -20,6 +20,7 @@ import ( "log/slog" "net/url" "strings" + "sync" "github.com/pkg/errors" "gorm.io/driver/mysql" @@ -91,6 +92,11 @@ type sqlDatabase struct { ctx context.Context cfg config.Database producer common.Producer + + // while busy_timeout helps, in situations of high contention, we can still + // end up with multiple threads trying to write to the database. SQLite does now + // support row level locking. + writeMux sync.Mutex } var renameTemplate = ` diff --git a/database/sql/users.go b/database/sql/users.go index 7d604a83..6bc0973f 100644 --- a/database/sql/users.go +++ b/database/sql/users.go @@ -57,6 +57,9 @@ func (s *sqlDatabase) getUserByID(tx *gorm.DB, userID string) (User, error) { } func (s *sqlDatabase) CreateUser(_ context.Context, user params.NewUserParams) (params.User, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + if user.Username == "" || user.Email == "" || user.Password == "" { return params.User{}, runnerErrors.NewBadRequestError("missing username, password or email") } @@ -119,6 +122,9 @@ func (s *sqlDatabase) GetUserByID(_ context.Context, userID string) (params.User } func (s *sqlDatabase) UpdateUser(_ context.Context, user string, param params.UpdateUserParams) (params.User, error) { + s.writeMux.Lock() + defer s.writeMux.Unlock() + var err error var dbUser User err = s.conn.Transaction(func(tx *gorm.DB) error { From 043359936b35af3478e30a82b3e7211e38a2077d Mon Sep 17 00:00:00 2001 From: Mathieu Tortuyaux Date: Tue, 10 Jun 2025 17:58:11 +0200 Subject: [PATCH 101/179] readme: sort external providers Signed-off-by: Mathieu Tortuyaux --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index ce596781..99c914d7 100644 --- a/README.md +++ b/README.md @@ -94,14 +94,14 @@ GARM uses providers to create runners in a particular IaaS. The providers are ex External providers are binaries that GARM calls into to create runners in a particular IaaS. There are several external providers available: -* [OpenStack](https://github.com/cloudbase/garm-provider-openstack) +* [Amazon EC2](https://github.com/cloudbase/garm-provider-aws) * [Azure](https://github.com/cloudbase/garm-provider-azure) +* [Equinix Metal](https://github.com/cloudbase/garm-provider-equinix) +* [Google Cloud Platform (GCP)](https://github.com/cloudbase/garm-provider-gcp) +* [Incus](https://github.com/cloudbase/garm-provider-incus) * [Kubernetes](https://github.com/mercedes-benz/garm-provider-k8s) - Thanks to the amazing folks at @mercedes-benz for sharing their awesome provider! * [LXD](https://github.com/cloudbase/garm-provider-lxd) -* [Incus](https://github.com/cloudbase/garm-provider-incus) -* [Equinix Metal](https://github.com/cloudbase/garm-provider-equinix) -* [Amazon EC2](https://github.com/cloudbase/garm-provider-aws) -* [Google Cloud Platform (GCP)](https://github.com/cloudbase/garm-provider-gcp) +* [OpenStack](https://github.com/cloudbase/garm-provider-openstack) * [Oracle Cloud Infrastructure (OCI)](https://github.com/cloudbase/garm-provider-oci) Follow the instructions in the README of each provider to install them. From 4c536f2584e33f13f1486f75feed52f658ea51a1 Mon Sep 17 00:00:00 2001 From: Mathieu Tortuyaux Date: Tue, 10 Jun 2025 17:59:29 +0200 Subject: [PATCH 102/179] readme: add Akamai/Linode external provider The provider is quite new and awaits for feedback, let's mention the "experimental" status. Signed-off-by: Mathieu Tortuyaux --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 99c914d7..92664859 100644 --- a/README.md +++ b/README.md @@ -94,6 +94,7 @@ GARM uses providers to create runners in a particular IaaS. The providers are ex External providers are binaries that GARM calls into to create runners in a particular IaaS. There are several external providers available: +* [Akamai/Linode](https://github.com/flatcar/garm-provider-linode) - Experimental * [Amazon EC2](https://github.com/cloudbase/garm-provider-aws) * [Azure](https://github.com/cloudbase/garm-provider-azure) * [Equinix Metal](https://github.com/cloudbase/garm-provider-equinix) From 98fa085bc739eddc9adfb2903d0ce7b32d7506d1 Mon Sep 17 00:00:00 2001 From: Mathieu Tortuyaux Date: Thu, 12 Jun 2025 09:53:59 +0200 Subject: [PATCH 103/179] doc/gitea: remove 'version' This field is deprecated: ``` $ docker compose version Docker Compose version v2.36.2 ... WARN[0000] /home/core/docker-compose.yaml: the attribute `version` is obsolete, it will be ignored, please remove it to avoid potential confusion ``` Signed-off-by: Mathieu Tortuyaux --- doc/gitea.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/gitea.md b/doc/gitea.md index e7833a32..923d59fd 100644 --- a/doc/gitea.md +++ b/doc/gitea.md @@ -36,8 +36,6 @@ sudo iptables -I DOCKER-USER -j ACCEPT Create a docker compose file in `$HOME/compose.yaml`. This docker compose will deploy both gitea and GARM. If you already have a Gitea >=1.24.0, you can edit this docker compose to only deploy GARM. ```yaml -version: "3" - networks: default: external: false From d42160cab2d7e3f93ddc5bf09173236dc952f77c Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 17 Jun 2025 21:03:46 +0000 Subject: [PATCH 104/179] Update dependencies This change updates all dependencies. Signed-off-by: Gabriel Adrian Samfira --- go.mod | 32 +- go.sum | 66 ++- params/params.go | 2 +- runner/common/mocks/GithubClient.go | 2 +- runner/common/mocks/GithubEnterpriseClient.go | 2 +- runner/common/mocks/GithubEntityOperations.go | 2 +- runner/common/mocks/OrganizationHooks.go | 2 +- runner/common/mocks/RateLimitClient.go | 2 +- runner/common/mocks/RepositoryHooks.go | 2 +- runner/common/util.go | 2 +- runner/pool/common.go | 2 +- runner/pool/pool.go | 2 +- runner/pool/stub_client.go | 2 +- runner/pool/util.go | 2 +- test/integration/gh_cleanup/main.go | 2 +- test/integration/jobs_test.go | 2 +- test/integration/organizations_test.go | 2 +- test/integration/repositories_test.go | 2 +- util/github/client.go | 2 +- util/github/gitea.go | 2 +- util/github/scalesets/client.go | 2 +- .../ghinstallation/v2/transport.go | 2 +- vendor/github.com/go-logr/logr/.golangci.yaml | 16 +- vendor/github.com/go-logr/logr/funcr/funcr.go | 8 +- vendor/github.com/go-sql-driver/mysql/AUTHORS | 3 + .../go-sql-driver/mysql/CHANGELOG.md | 11 + .../github.com/go-sql-driver/mysql/buffer.go | 18 +- .../go-sql-driver/mysql/compress.go | 21 +- .../github.com/go-sql-driver/mysql/packets.go | 49 +- .../go-sql-driver/mysql/transaction.go | 18 +- .../v71/github/actions_required_workflows.go | 267 ----------- .../google/go-github/{v71 => v72}/AUTHORS | 0 .../google/go-github/{v71 => v72}/LICENSE | 0 .../go-github/{v71 => v72}/github/actions.go | 0 .../{v71 => v72}/github/actions_artifacts.go | 0 .../{v71 => v72}/github/actions_cache.go | 0 .../github/actions_hosted_runners.go | 0 .../{v71 => v72}/github/actions_oidc.go | 0 .../github/actions_permissions_enterprise.go | 0 .../github/actions_permissions_orgs.go | 0 .../github/actions_runner_groups.go | 0 .../{v71 => v72}/github/actions_runners.go | 0 .../{v71 => v72}/github/actions_secrets.go | 0 .../{v71 => v72}/github/actions_variables.go | 0 .../github/actions_workflow_jobs.go | 0 .../github/actions_workflow_runs.go | 0 .../{v71 => v72}/github/actions_workflows.go | 0 .../go-github/{v71 => v72}/github/activity.go | 0 .../{v71 => v72}/github/activity_events.go | 0 .../github/activity_notifications.go | 0 .../{v71 => v72}/github/activity_star.go | 0 .../{v71 => v72}/github/activity_watching.go | 0 .../go-github/{v71 => v72}/github/admin.go | 0 .../{v71 => v72}/github/admin_orgs.go | 0 .../{v71 => v72}/github/admin_stats.go | 0 .../{v71 => v72}/github/admin_users.go | 0 .../go-github/{v71 => v72}/github/apps.go | 0 .../{v71 => v72}/github/apps_hooks.go | 0 .../github/apps_hooks_deliveries.go | 0 .../{v71 => v72}/github/apps_installation.go | 0 .../{v71 => v72}/github/apps_manifest.go | 0 .../{v71 => v72}/github/apps_marketplace.go | 0 .../{v71 => v72}/github/attestations.go | 0 .../{v71 => v72}/github/authorizations.go | 0 .../go-github/{v71 => v72}/github/billing.go | 0 .../go-github/{v71 => v72}/github/checks.go | 0 .../{v71 => v72}/github/code_scanning.go | 0 .../{v71 => v72}/github/codesofconduct.go | 0 .../{v71 => v72}/github/codespaces.go | 0 .../{v71 => v72}/github/codespaces_secrets.go | 0 .../go-github/{v71 => v72}/github/copilot.go | 0 .../{v71 => v72}/github/dependabot.go | 0 .../{v71 => v72}/github/dependabot_alerts.go | 10 + .../{v71 => v72}/github/dependabot_secrets.go | 0 .../{v71 => v72}/github/dependency_graph.go | 0 .../github/dependency_graph_snapshots.go | 0 .../go-github/{v71 => v72}/github/doc.go | 2 +- .../go-github/{v71 => v72}/github/emojis.go | 0 .../{v71 => v72}/github/enterprise.go | 0 .../enterprise_actions_hosted_runners.go | 0 .../enterprise_actions_runner_groups.go | 0 .../github/enterprise_actions_runners.go | 0 .../github/enterprise_audit_log.go | 0 .../enterprise_code_security_and_analysis.go | 0 .../github/enterprise_manage_ghes.go | 0 .../github/enterprise_manage_ghes_config.go | 0 .../enterprise_manage_ghes_maintenance.go | 0 .../github/enterprise_manage_ghes_ssh.go | 0 .../enterprise_network_configurations.go | 0 .../github/enterprise_properties.go | 0 .../{v71 => v72}/github/enterprise_rules.go | 0 .../go-github/{v71 => v72}/github/event.go | 0 .../{v71 => v72}/github/event_types.go | 20 + .../go-github/{v71 => v72}/github/gists.go | 0 .../{v71 => v72}/github/gists_comments.go | 0 .../go-github/{v71 => v72}/github/git.go | 0 .../{v71 => v72}/github/git_blobs.go | 0 .../{v71 => v72}/github/git_commits.go | 0 .../go-github/{v71 => v72}/github/git_refs.go | 0 .../go-github/{v71 => v72}/github/git_tags.go | 0 .../{v71 => v72}/github/git_trees.go | 0 .../{v71 => v72}/github/github-accessors.go | 368 ++++++-------- .../go-github/{v71 => v72}/github/github.go | 2 +- .../{v71 => v72}/github/gitignore.go | 0 .../{v71 => v72}/github/interactions.go | 0 .../{v71 => v72}/github/interactions_orgs.go | 0 .../{v71 => v72}/github/interactions_repos.go | 0 .../{v71 => v72}/github/issue_import.go | 0 .../go-github/{v71 => v72}/github/issues.go | 9 + .../{v71 => v72}/github/issues_assignees.go | 0 .../{v71 => v72}/github/issues_comments.go | 0 .../{v71 => v72}/github/issues_events.go | 0 .../{v71 => v72}/github/issues_labels.go | 0 .../{v71 => v72}/github/issues_milestones.go | 0 .../{v71 => v72}/github/issues_timeline.go | 0 .../go-github/{v71 => v72}/github/licenses.go | 0 .../go-github/{v71 => v72}/github/markdown.go | 0 .../go-github/{v71 => v72}/github/messages.go | 1 + .../go-github/{v71 => v72}/github/meta.go | 0 .../{v71 => v72}/github/migrations.go | 0 .../github/migrations_source_import.go | 0 .../{v71 => v72}/github/migrations_user.go | 0 .../go-github/{v71 => v72}/github/orgs.go | 0 .../github/orgs_actions_allowed.go | 0 .../github/orgs_actions_permissions.go | 0 .../{v71 => v72}/github/orgs_attestations.go | 0 .../{v71 => v72}/github/orgs_audit_log.go | 0 .../orgs_codesecurity_configurations.go | 0 .../github/orgs_credential_authorizations.go | 0 .../github/orgs_custom_repository_roles.go | 0 .../{v71 => v72}/github/orgs_hooks.go | 0 .../github/orgs_hooks_configuration.go | 0 .../github/orgs_hooks_deliveries.go | 0 .../{v71 => v72}/github/orgs_issue_types.go | 0 .../{v71 => v72}/github/orgs_members.go | 0 .../github/orgs_network_configurations.go | 0 .../github/orgs_organization_roles.go | 0 .../github/orgs_outside_collaborators.go | 0 .../{v71 => v72}/github/orgs_packages.go | 0 .../github/orgs_personal_access_tokens.go | 0 .../{v71 => v72}/github/orgs_properties.go | 0 .../{v71 => v72}/github/orgs_rules.go | 7 +- .../github/orgs_security_managers.go | 0 .../github/orgs_users_blocking.go | 0 .../go-github/{v71 => v72}/github/packages.go | 0 .../go-github/{v71 => v72}/github/pulls.go | 0 .../{v71 => v72}/github/pulls_comments.go | 0 .../{v71 => v72}/github/pulls_reviewers.go | 0 .../{v71 => v72}/github/pulls_reviews.go | 0 .../{v71 => v72}/github/pulls_threads.go | 0 .../{v71 => v72}/github/rate_limit.go | 0 .../{v71 => v72}/github/reactions.go | 0 .../go-github/{v71 => v72}/github/repos.go | 0 .../github/repos_actions_access.go | 0 .../github/repos_actions_allowed.go | 0 .../github/repos_actions_permissions.go | 0 .../{v71 => v72}/github/repos_attestations.go | 0 .../{v71 => v72}/github/repos_autolinks.go | 0 .../{v71 => v72}/github/repos_codeowners.go | 0 .../github/repos_collaborators.go | 0 .../{v71 => v72}/github/repos_comments.go | 0 .../{v71 => v72}/github/repos_commits.go | 0 .../github/repos_community_health.go | 0 .../{v71 => v72}/github/repos_contents.go | 0 .../repos_deployment_branch_policies.go | 0 .../repos_deployment_protection_rules.go | 0 .../{v71 => v72}/github/repos_deployments.go | 0 .../{v71 => v72}/github/repos_environments.go | 0 .../{v71 => v72}/github/repos_forks.go | 0 .../{v71 => v72}/github/repos_hooks.go | 0 .../github/repos_hooks_configuration.go | 0 .../github/repos_hooks_deliveries.go | 21 + .../{v71 => v72}/github/repos_invitations.go | 0 .../{v71 => v72}/github/repos_keys.go | 0 .../{v71 => v72}/github/repos_lfs.go | 0 .../{v71 => v72}/github/repos_merging.go | 0 .../{v71 => v72}/github/repos_pages.go | 0 .../github/repos_prereceive_hooks.go | 0 .../{v71 => v72}/github/repos_properties.go | 0 .../{v71 => v72}/github/repos_releases.go | 0 .../{v71 => v72}/github/repos_rules.go | 27 +- .../{v71 => v72}/github/repos_stats.go | 0 .../{v71 => v72}/github/repos_statuses.go | 0 .../{v71 => v72}/github/repos_tags.go | 0 .../{v71 => v72}/github/repos_traffic.go | 0 .../go-github/{v71 => v72}/github/rules.go | 48 +- .../go-github/{v71 => v72}/github/scim.go | 0 .../go-github/{v71 => v72}/github/search.go | 0 .../{v71 => v72}/github/secret_scanning.go | 50 +- .../github/security_advisories.go | 0 .../go-github/{v71 => v72}/github/strings.go | 0 .../go-github/{v71 => v72}/github/teams.go | 0 .../github/teams_discussion_comments.go | 0 .../{v71 => v72}/github/teams_discussions.go | 0 .../{v71 => v72}/github/teams_members.go | 0 .../{v71 => v72}/github/timestamp.go | 0 .../go-github/{v71 => v72}/github/users.go | 0 .../github/users_administration.go | 0 .../{v71 => v72}/github/users_attestations.go | 0 .../{v71 => v72}/github/users_blocking.go | 0 .../{v71 => v72}/github/users_emails.go | 0 .../{v71 => v72}/github/users_followers.go | 0 .../{v71 => v72}/github/users_gpg_keys.go | 0 .../{v71 => v72}/github/users_keys.go | 0 .../{v71 => v72}/github/users_packages.go | 0 .../github/users_ssh_signing_keys.go | 0 .../{v71 => v72}/github/with_appengine.go | 0 .../{v71 => v72}/github/without_appengine.go | 0 .../prometheus/common/expfmt/text_parse.go | 4 +- .../prometheus/common/model/labels.go | 3 +- .../bson/bsonrw/extjson_writer.go | 7 +- vendor/go.opentelemetry.io/otel/.golangci.yml | 452 +++++++++--------- vendor/go.opentelemetry.io/otel/CHANGELOG.md | 54 ++- .../go.opentelemetry.io/otel/CONTRIBUTING.md | 1 + vendor/go.opentelemetry.io/otel/Makefile | 19 +- vendor/go.opentelemetry.io/otel/README.md | 8 +- vendor/go.opentelemetry.io/otel/RELEASING.md | 18 + .../otel/attribute/filter.go | 4 +- .../internal}/attribute.go | 2 +- .../otel/attribute/rawhelpers.go | 37 ++ .../otel/attribute/value.go | 15 +- .../otel/dependencies.Dockerfile | 5 +- .../go.opentelemetry.io/otel/get_main_pkgs.sh | 30 -- .../go.opentelemetry.io/otel/internal/gen.go | 18 - .../otel/internal/global/handler.go | 1 + .../otel/internal/global/meter.go | 45 +- .../otel/internal/global/trace.go | 13 +- .../otel/internal/rawhelpers.go | 48 -- .../otel/metric/asyncfloat64.go | 12 +- .../otel/metric/asyncint64.go | 8 +- .../otel/metric/instrument.go | 16 +- .../go.opentelemetry.io/otel/metric/meter.go | 10 +- .../otel/propagation/baggage.go | 36 +- .../otel/propagation/propagation.go | 30 +- vendor/go.opentelemetry.io/otel/renovate.json | 7 +- .../otel/semconv/internal/v2/http.go | 1 + vendor/go.opentelemetry.io/otel/trace/auto.go | 5 +- .../otel/trace/internal/telemetry/span.go | 56 ++- .../otel/trace/internal/telemetry/status.go | 12 +- .../otel/trace/internal/telemetry/traces.go | 4 +- .../otel/trace/internal/telemetry/value.go | 2 +- vendor/go.opentelemetry.io/otel/trace/noop.go | 2 + .../otel/verify_readmes.sh | 21 - vendor/go.opentelemetry.io/otel/version.go | 2 +- vendor/go.opentelemetry.io/otel/versions.yaml | 8 +- vendor/golang.org/x/crypto/bcrypt/bcrypt.go | 2 +- vendor/golang.org/x/mod/semver/semver.go | 30 +- vendor/golang.org/x/sync/errgroup/errgroup.go | 9 +- vendor/gorm.io/driver/mysql/migrator.go | 2 +- vendor/gorm.io/driver/mysql/mysql.go | 4 +- vendor/gorm.io/driver/sqlite/ddlmod.go | 10 +- vendor/gorm.io/driver/sqlite/sqlite.go | 4 +- vendor/modules.txt | 47 +- 253 files changed, 1092 insertions(+), 1146 deletions(-) delete mode 100644 vendor/github.com/google/go-github/v71/github/actions_required_workflows.go rename vendor/github.com/google/go-github/{v71 => v72}/AUTHORS (100%) rename vendor/github.com/google/go-github/{v71 => v72}/LICENSE (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_artifacts.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_cache.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_hosted_runners.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_oidc.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_permissions_enterprise.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_permissions_orgs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_runner_groups.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_runners.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_secrets.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_variables.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_workflow_jobs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_workflow_runs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_workflows.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/activity.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/activity_events.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/activity_notifications.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/activity_star.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/activity_watching.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin_orgs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin_stats.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin_users.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/apps.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/apps_hooks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/apps_hooks_deliveries.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/apps_installation.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/apps_manifest.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/apps_marketplace.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/attestations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/authorizations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/billing.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/checks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/code_scanning.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/codesofconduct.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/codespaces.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/codespaces_secrets.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/copilot.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/dependabot.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/dependabot_alerts.go (94%) rename vendor/github.com/google/go-github/{v71 => v72}/github/dependabot_secrets.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/dependency_graph.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/dependency_graph_snapshots.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/doc.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/emojis.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_actions_hosted_runners.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_actions_runner_groups.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_actions_runners.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_audit_log.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_code_security_and_analysis.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes_config.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes_maintenance.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes_ssh.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_network_configurations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_properties.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_rules.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/event.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/event_types.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/gists.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/gists_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git_blobs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git_commits.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git_refs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git_tags.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git_trees.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/github-accessors.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/github.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/gitignore.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/interactions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/interactions_orgs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/interactions_repos.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issue_import.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues.go (97%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_assignees.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_events.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_labels.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_milestones.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_timeline.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/licenses.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/markdown.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/messages.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/meta.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/migrations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/migrations_source_import.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/migrations_user.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_actions_allowed.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_actions_permissions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_attestations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_audit_log.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_codesecurity_configurations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_credential_authorizations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_custom_repository_roles.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_hooks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_hooks_configuration.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_hooks_deliveries.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_issue_types.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_members.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_network_configurations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_organization_roles.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_outside_collaborators.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_packages.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_personal_access_tokens.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_properties.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_rules.go (96%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_security_managers.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_users_blocking.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/packages.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/pulls.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/pulls_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/pulls_reviewers.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/pulls_reviews.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/pulls_threads.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/rate_limit.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/reactions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_actions_access.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_actions_allowed.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_actions_permissions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_attestations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_autolinks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_codeowners.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_collaborators.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_commits.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_community_health.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_contents.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_deployment_branch_policies.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_deployment_protection_rules.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_deployments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_environments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_forks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_hooks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_hooks_configuration.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_hooks_deliveries.go (89%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_invitations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_keys.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_lfs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_merging.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_pages.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_prereceive_hooks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_properties.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_releases.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_rules.go (88%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_stats.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_statuses.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_tags.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_traffic.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/rules.go (95%) rename vendor/github.com/google/go-github/{v71 => v72}/github/scim.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/search.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/secret_scanning.go (78%) rename vendor/github.com/google/go-github/{v71 => v72}/github/security_advisories.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/strings.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/teams.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/teams_discussion_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/teams_discussions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/teams_members.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/timestamp.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_administration.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_attestations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_blocking.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_emails.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_followers.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_gpg_keys.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_keys.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_packages.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_ssh_signing_keys.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/with_appengine.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/without_appengine.go (100%) rename vendor/go.opentelemetry.io/otel/{internal/attribute => attribute/internal}/attribute.go (97%) create mode 100644 vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go delete mode 100644 vendor/go.opentelemetry.io/otel/get_main_pkgs.sh delete mode 100644 vendor/go.opentelemetry.io/otel/internal/gen.go delete mode 100644 vendor/go.opentelemetry.io/otel/internal/rawhelpers.go delete mode 100644 vendor/go.opentelemetry.io/otel/verify_readmes.sh diff --git a/go.mod b/go.mod index 94071dd7..ad7cdb66 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 - github.com/bradleyfalzon/ghinstallation/v2 v2.15.0 + github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e github.com/felixge/httpsnoop v1.0.4 github.com/go-openapi/errors v0.22.1 @@ -14,7 +14,7 @@ require ( github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 github.com/golang-jwt/jwt/v5 v5.2.2 - github.com/google/go-github/v71 v71.0.0 + github.com/google/go-github/v72 v72.0.0 github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.2 github.com/gorilla/mux v1.8.1 @@ -28,15 +28,15 @@ require ( github.com/prometheus/client_golang v1.22.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 - golang.org/x/crypto v0.38.0 - golang.org/x/mod v0.24.0 + golang.org/x/crypto v0.39.0 + golang.org/x/mod v0.25.0 golang.org/x/oauth2 v0.30.0 - golang.org/x/sync v0.14.0 + golang.org/x/sync v0.15.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gorm.io/datatypes v1.2.5 - gorm.io/driver/mysql v1.5.7 - gorm.io/driver/sqlite v1.5.7 + gorm.io/driver/mysql v1.6.0 + gorm.io/driver/sqlite v1.6.0 gorm.io/gorm v1.30.0 ) @@ -47,7 +47,7 @@ require ( github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/analysis v0.23.0 // indirect github.com/go-openapi/jsonpointer v0.21.1 // indirect @@ -55,7 +55,7 @@ require ( github.com/go-openapi/loads v0.22.0 // indirect github.com/go-openapi/spec v0.21.0 // indirect github.com/go-openapi/validate v0.24.0 // indirect - github.com/go-sql-driver/mysql v1.9.2 // indirect + github.com/go-sql-driver/mysql v1.9.3 // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -76,20 +76,20 @@ require ( github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect - github.com/prometheus/common v0.63.0 // indirect + github.com/prometheus/common v0.64.0 // indirect github.com/prometheus/procfs v0.16.1 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/spf13/pflag v1.0.6 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 // indirect - go.mongodb.org/mongo-driver v1.17.3 // indirect + go.mongodb.org/mongo-driver v1.17.4 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/otel v1.35.0 // indirect - go.opentelemetry.io/otel/metric v1.35.0 // indirect - go.opentelemetry.io/otel/trace v1.35.0 // indirect - golang.org/x/net v0.40.0 // indirect + go.opentelemetry.io/otel v1.36.0 // indirect + go.opentelemetry.io/otel/metric v1.36.0 // indirect + go.opentelemetry.io/otel/trace v1.36.0 // indirect + golang.org/x/net v0.41.0 // indirect golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.25.0 // indirect + golang.org/x/text v0.26.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 467ebbcf..99587a2c 100644 --- a/go.sum +++ b/go.sum @@ -6,8 +6,8 @@ github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3d github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bradleyfalzon/ghinstallation/v2 v2.15.0 h1:7r2rPUM04rgszMP0U1UZ1M5VoVVIlsaBSnpABfYxcQY= -github.com/bradleyfalzon/ghinstallation/v2 v2.15.0/go.mod h1:PoH9Vhy82OeRFZfxsVrk3mfQhVkEzou9OOwPOsEhiXE= +github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 h1:B91r9bHtXp/+XRgS5aZm6ZzTdz3ahgJYmkt4xZkgDz8= +github.com/bradleyfalzon/ghinstallation/v2 v2.16.0/go.mod h1:OeVe5ggFzoBnmgitZe/A+BqGOnv1DvU/0uiLQi1wutM= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= @@ -28,8 +28,8 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= @@ -52,9 +52,8 @@ github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZ github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0= github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= -github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= -github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU= -github.com/go-sql-driver/mysql v1.9.2/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= +github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo= +github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= @@ -66,8 +65,8 @@ github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EO github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-github/v71 v71.0.0 h1:Zi16OymGKZZMm8ZliffVVJ/Q9YZreDKONCr+WUd0Z30= -github.com/google/go-github/v71 v71.0.0/go.mod h1:URZXObp2BLlMjwu0O8g4y6VBneUj2bCHgnI8FfgZ51M= +github.com/google/go-github/v72 v72.0.0 h1:FcIO37BLoVPBO9igQQ6tStsv2asG4IPcYFi655PPvBM= +github.com/google/go-github/v72 v72.0.0/go.mod h1:WWtw8GMRiL62mvIquf1kO3onRHeWWKmK01qdCY8c5fg= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -153,8 +152,8 @@ github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/ github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= -github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= -github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18= +github.com/prometheus/common v0.64.0 h1:pdZeA+g617P7oGv1CzdTzyeShxAGrTBsolKNOLQPGO4= +github.com/prometheus/common v0.64.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= @@ -176,35 +175,35 @@ github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOf github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 h1:xzABM9let0HLLqFypcxvLmlvEciCHL7+Lv+4vwZqecI= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569/go.mod h1:2Ly+NIftZN4de9zRmENdYbvPQeaVIYKWpLFStLFEBgI= -go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ= -go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= +go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= -go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= -go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= -go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= +go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= +go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= +go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= -go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= -go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= -golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= -golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= -golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= -golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= +go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= +go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= -golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= -golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= -golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= @@ -222,14 +221,13 @@ gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/datatypes v1.2.5 h1:9UogU3jkydFVW1bIVVeoYsTpLRgwDVW3rHfJG6/Ek9I= gorm.io/datatypes v1.2.5/go.mod h1:I5FUdlKpLb5PMqeMQhm30CQ6jXP8Rj89xkTeCSAaAD4= -gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo= -gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM= +gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= +gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= gorm.io/driver/postgres v1.5.0 h1:u2FXTy14l45qc3UeCJ7QaAXZmZfDDv0YrthvmRq1l0U= gorm.io/driver/postgres v1.5.0/go.mod h1:FUZXzO+5Uqg5zzwzv4KK49R8lvGIyscBOqYrtI1Ce9A= -gorm.io/driver/sqlite v1.5.7 h1:8NvsrhP0ifM7LX9G4zPB97NwovUakUxc+2V2uuf3Z1I= -gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4= +gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= +gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= -gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/params/params.go b/params/params.go index a680fda4..2a7fdef9 100644 --- a/params/params.go +++ b/params/params.go @@ -27,7 +27,7 @@ import ( "time" "github.com/bradleyfalzon/ghinstallation/v2" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "github.com/google/uuid" "golang.org/x/oauth2" diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index 36ef1079..f44d54cb 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v71/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" diff --git a/runner/common/mocks/GithubEnterpriseClient.go b/runner/common/mocks/GithubEnterpriseClient.go index fa2966f0..5606e340 100644 --- a/runner/common/mocks/GithubEnterpriseClient.go +++ b/runner/common/mocks/GithubEnterpriseClient.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v71/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index 0aab9943..15326795 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v71/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" diff --git a/runner/common/mocks/OrganizationHooks.go b/runner/common/mocks/OrganizationHooks.go index 46638f02..73528638 100644 --- a/runner/common/mocks/OrganizationHooks.go +++ b/runner/common/mocks/OrganizationHooks.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v71/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/mocks/RateLimitClient.go b/runner/common/mocks/RateLimitClient.go index 2c360217..119f62e1 100644 --- a/runner/common/mocks/RateLimitClient.go +++ b/runner/common/mocks/RateLimitClient.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v71/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/mocks/RepositoryHooks.go b/runner/common/mocks/RepositoryHooks.go index 76ed0db0..3f38915e 100644 --- a/runner/common/mocks/RepositoryHooks.go +++ b/runner/common/mocks/RepositoryHooks.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v71/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/util.go b/runner/common/util.go index 2720c496..588ab68e 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -18,7 +18,7 @@ import ( "context" "net/url" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "github.com/cloudbase/garm/params" ) diff --git a/runner/pool/common.go b/runner/pool/common.go index 5316e07e..066866a2 100644 --- a/runner/pool/common.go +++ b/runner/pool/common.go @@ -20,7 +20,7 @@ import ( "net/url" "strings" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" diff --git a/runner/pool/pool.go b/runner/pool/pool.go index ca95867f..17285e1d 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -27,7 +27,7 @@ import ( "sync" "time" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "github.com/google/uuid" "github.com/pkg/errors" "golang.org/x/sync/errgroup" diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index 6fa44e74..6493f7a5 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -18,7 +18,7 @@ import ( "context" "net/url" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "github.com/cloudbase/garm/params" ) diff --git a/runner/pool/util.go b/runner/pool/util.go index dd55e1db..c6c311c8 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -21,7 +21,7 @@ import ( "sync/atomic" "time" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" diff --git a/test/integration/gh_cleanup/main.go b/test/integration/gh_cleanup/main.go index 95f1aa78..86d39ea7 100644 --- a/test/integration/gh_cleanup/main.go +++ b/test/integration/gh_cleanup/main.go @@ -20,7 +20,7 @@ import ( "log/slog" "os" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "golang.org/x/oauth2" ) diff --git a/test/integration/jobs_test.go b/test/integration/jobs_test.go index 8da94414..4b2d9d5d 100644 --- a/test/integration/jobs_test.go +++ b/test/integration/jobs_test.go @@ -21,7 +21,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/params" diff --git a/test/integration/organizations_test.go b/test/integration/organizations_test.go index 8acfb6d3..d587f4a5 100644 --- a/test/integration/organizations_test.go +++ b/test/integration/organizations_test.go @@ -21,7 +21,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/params" diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index 43a5d8ec..1b0558f9 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -21,7 +21,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "golang.org/x/oauth2" commonParams "github.com/cloudbase/garm-provider-common/params" diff --git a/util/github/client.go b/util/github/client.go index 35d846ab..46b6a170 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -24,7 +24,7 @@ import ( "net/url" "strings" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" diff --git a/util/github/gitea.go b/util/github/gitea.go index e657db48..51f340b7 100644 --- a/util/github/gitea.go +++ b/util/github/gitea.go @@ -19,7 +19,7 @@ import ( "fmt" "net/http" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" "github.com/pkg/errors" "github.com/cloudbase/garm/metrics" diff --git a/util/github/scalesets/client.go b/util/github/scalesets/client.go index 7a8a53fd..5b01a539 100644 --- a/util/github/scalesets/client.go +++ b/util/github/scalesets/client.go @@ -20,7 +20,7 @@ import ( "net/http" "sync" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/params" diff --git a/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go b/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go index 39be5917..7794dd9b 100644 --- a/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go +++ b/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go @@ -13,7 +13,7 @@ import ( "sync" "time" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" ) const ( diff --git a/vendor/github.com/go-logr/logr/.golangci.yaml b/vendor/github.com/go-logr/logr/.golangci.yaml index 0cffafa7..0ed62c1a 100644 --- a/vendor/github.com/go-logr/logr/.golangci.yaml +++ b/vendor/github.com/go-logr/logr/.golangci.yaml @@ -1,26 +1,28 @@ +version: "2" + run: timeout: 1m tests: true linters: - disable-all: true - enable: + default: none + enable: # please keep this alphabetized + - asasalint - asciicheck + - copyloopvar + - dupl - errcheck - forcetypeassert + - goconst - gocritic - - gofmt - - goimports - - gosimple - govet - ineffassign - misspell + - musttag - revive - staticcheck - - typecheck - unused issues: - exclude-use-default: false max-issues-per-linter: 0 max-same-issues: 10 diff --git a/vendor/github.com/go-logr/logr/funcr/funcr.go b/vendor/github.com/go-logr/logr/funcr/funcr.go index 30568e76..b22c57d7 100644 --- a/vendor/github.com/go-logr/logr/funcr/funcr.go +++ b/vendor/github.com/go-logr/logr/funcr/funcr.go @@ -77,7 +77,7 @@ func newSink(fn func(prefix, args string), formatter Formatter) logr.LogSink { write: fn, } // For skipping fnlogger.Info and fnlogger.Error. - l.Formatter.AddCallDepth(1) + l.AddCallDepth(1) // via Formatter return l } @@ -164,17 +164,17 @@ type fnlogger struct { } func (l fnlogger) WithName(name string) logr.LogSink { - l.Formatter.AddName(name) + l.AddName(name) // via Formatter return &l } func (l fnlogger) WithValues(kvList ...any) logr.LogSink { - l.Formatter.AddValues(kvList) + l.AddValues(kvList) // via Formatter return &l } func (l fnlogger) WithCallDepth(depth int) logr.LogSink { - l.Formatter.AddCallDepth(depth) + l.AddCallDepth(depth) // via Formatter return &l } diff --git a/vendor/github.com/go-sql-driver/mysql/AUTHORS b/vendor/github.com/go-sql-driver/mysql/AUTHORS index 510b869b..ec346e20 100644 --- a/vendor/github.com/go-sql-driver/mysql/AUTHORS +++ b/vendor/github.com/go-sql-driver/mysql/AUTHORS @@ -25,6 +25,7 @@ Asta Xie B Lamarche Bes Dollma Bogdan Constantinescu +Brad Higgins Brian Hendriks Bulat Gaifullin Caine Jette @@ -37,6 +38,7 @@ Daniel Montoya Daniel Nichter Daniël van Eeden Dave Protasowski +Diego Dupin Dirkjan Bussink DisposaBoy Egor Smolyakov @@ -133,6 +135,7 @@ Ziheng Lyu Barracuda Networks, Inc. Counting Ltd. +Defined Networking Inc. DigitalOcean Inc. Dolthub Inc. dyves labs AG diff --git a/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md b/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md index 66189eda..75674b60 100644 --- a/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md +++ b/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## v1.9.3 (2025-06-13) + +* `tx.Commit()` and `tx.Rollback()` returned `ErrInvalidConn` always. + Now they return cached real error if present. (#1690) + +* Optimize reading small resultsets to fix performance regression + introduced by compression protocol support. (#1707) + +* Fix `db.Ping()` on compressed connection. (#1723) + + ## v1.9.2 (2025-04-07) v1.9.2 is a re-release of v1.9.1 due to a release process issue; no changes were made to the content. diff --git a/vendor/github.com/go-sql-driver/mysql/buffer.go b/vendor/github.com/go-sql-driver/mysql/buffer.go index a6532431..f895e87b 100644 --- a/vendor/github.com/go-sql-driver/mysql/buffer.go +++ b/vendor/github.com/go-sql-driver/mysql/buffer.go @@ -42,6 +42,11 @@ func (b *buffer) busy() bool { return len(b.buf) > 0 } +// len returns how many bytes in the read buffer. +func (b *buffer) len() int { + return len(b.buf) +} + // fill reads into the read buffer until at least _need_ bytes are in it. func (b *buffer) fill(need int, r readerFunc) error { // we'll move the contents of the current buffer to dest before filling it. @@ -86,17 +91,10 @@ func (b *buffer) fill(need int, r readerFunc) error { // returns next N bytes from buffer. // The returned slice is only guaranteed to be valid until the next read -func (b *buffer) readNext(need int, r readerFunc) ([]byte, error) { - if len(b.buf) < need { - // refill - if err := b.fill(need, r); err != nil { - return nil, err - } - } - - data := b.buf[:need] +func (b *buffer) readNext(need int) []byte { + data := b.buf[:need:need] b.buf = b.buf[need:] - return data, nil + return data } // takeBuffer returns a buffer with the requested size. diff --git a/vendor/github.com/go-sql-driver/mysql/compress.go b/vendor/github.com/go-sql-driver/mysql/compress.go index fa42772a..38bfa000 100644 --- a/vendor/github.com/go-sql-driver/mysql/compress.go +++ b/vendor/github.com/go-sql-driver/mysql/compress.go @@ -84,9 +84,9 @@ func (c *compIO) reset() { c.buff.Reset() } -func (c *compIO) readNext(need int, r readerFunc) ([]byte, error) { +func (c *compIO) readNext(need int) ([]byte, error) { for c.buff.Len() < need { - if err := c.readCompressedPacket(r); err != nil { + if err := c.readCompressedPacket(); err != nil { return nil, err } } @@ -94,8 +94,8 @@ func (c *compIO) readNext(need int, r readerFunc) ([]byte, error) { return data[:need:need], nil // prevent caller writes into c.buff } -func (c *compIO) readCompressedPacket(r readerFunc) error { - header, err := c.mc.buf.readNext(7, r) // size of compressed header +func (c *compIO) readCompressedPacket() error { + header, err := c.mc.readNext(7) if err != nil { return err } @@ -103,7 +103,7 @@ func (c *compIO) readCompressedPacket(r readerFunc) error { // compressed header structure comprLength := getUint24(header[0:3]) - compressionSequence := uint8(header[3]) + compressionSequence := header[3] uncompressedLength := getUint24(header[4:7]) if debug { fmt.Printf("uncompress cmplen=%v uncomplen=%v pkt_cmp_seq=%v expected_cmp_seq=%v\n", @@ -113,14 +113,13 @@ func (c *compIO) readCompressedPacket(r readerFunc) error { // Server may return error packet (e.g. 1153 Got a packet bigger than 'max_allowed_packet' bytes) // before receiving all packets from client. In this case, seqnr is younger than expected. // NOTE: Both of mariadbclient and mysqlclient do not check seqnr. Only server checks it. - if debug && compressionSequence != c.mc.sequence { + if debug && compressionSequence != c.mc.compressSequence { fmt.Printf("WARN: unexpected cmpress seq nr: expected %v, got %v", - c.mc.sequence, compressionSequence) + c.mc.compressSequence, compressionSequence) } - c.mc.sequence = compressionSequence + 1 - c.mc.compressSequence = c.mc.sequence + c.mc.compressSequence = compressionSequence + 1 - comprData, err := c.mc.buf.readNext(comprLength, r) + comprData, err := c.mc.readNext(comprLength) if err != nil { return err } @@ -200,7 +199,7 @@ func (c *compIO) writeCompressedPacket(data []byte, uncompressedLen int) (int, e comprLength := len(data) - 7 if debug { fmt.Printf( - "writeCompressedPacket: comprLength=%v, uncompressedLen=%v, seq=%v", + "writeCompressedPacket: comprLength=%v, uncompressedLen=%v, seq=%v\n", comprLength, uncompressedLen, mc.compressSequence) } diff --git a/vendor/github.com/go-sql-driver/mysql/packets.go b/vendor/github.com/go-sql-driver/mysql/packets.go index 4b836216..831fca6c 100644 --- a/vendor/github.com/go-sql-driver/mysql/packets.go +++ b/vendor/github.com/go-sql-driver/mysql/packets.go @@ -17,6 +17,7 @@ import ( "fmt" "io" "math" + "os" "strconv" "time" ) @@ -25,19 +26,30 @@ import ( // https://dev.mysql.com/doc/dev/mysql-server/latest/PAGE_PROTOCOL.html // https://mariadb.com/kb/en/clientserver-protocol/ +// read n bytes from mc.buf +func (mc *mysqlConn) readNext(n int) ([]byte, error) { + if mc.buf.len() < n { + err := mc.buf.fill(n, mc.readWithTimeout) + if err != nil { + return nil, err + } + } + return mc.buf.readNext(n), nil +} + // Read packet to buffer 'data' func (mc *mysqlConn) readPacket() ([]byte, error) { var prevData []byte invalidSequence := false - readNext := mc.buf.readNext + readNext := mc.readNext if mc.compress { readNext = mc.compIO.readNext } for { // read packet header - data, err := readNext(4, mc.readWithTimeout) + data, err := readNext(4) if err != nil { mc.close() if cerr := mc.canceled.Value(); cerr != nil { @@ -51,17 +63,11 @@ func (mc *mysqlConn) readPacket() ([]byte, error) { pktLen := getUint24(data[:3]) seq := data[3] - if mc.compress { + // check packet sync [8 bit] + if seq != mc.sequence { + mc.log(fmt.Sprintf("[warn] unexpected sequence nr: expected %v, got %v", mc.sequence, seq)) // MySQL and MariaDB doesn't check packet nr in compressed packet. - if debug && seq != mc.compressSequence { - fmt.Printf("[debug] mismatched compression sequence nr: expected: %v, got %v", - mc.compressSequence, seq) - } - mc.compressSequence = seq + 1 - } else { - // check packet sync [8 bit] - if seq != mc.sequence { - mc.log(fmt.Sprintf("[warn] unexpected seq nr: expected %v, got %v", mc.sequence, seq)) + if !mc.compress { // For large packets, we stop reading as soon as sync error. if len(prevData) > 0 { mc.close() @@ -69,8 +75,8 @@ func (mc *mysqlConn) readPacket() ([]byte, error) { } invalidSequence = true } - mc.sequence++ } + mc.sequence = seq + 1 // packets with length 0 terminate a previous packet which is a // multiple of (2^24)-1 bytes long @@ -85,7 +91,7 @@ func (mc *mysqlConn) readPacket() ([]byte, error) { } // read packet body [pktLen bytes] - data, err = readNext(pktLen, mc.readWithTimeout) + data, err = readNext(pktLen) if err != nil { mc.close() if cerr := mc.canceled.Value(); cerr != nil { @@ -135,7 +141,7 @@ func (mc *mysqlConn) writePacket(data []byte) error { // Write packet if debug { - fmt.Printf("writePacket: size=%v seq=%v", size, mc.sequence) + fmt.Fprintf(os.Stderr, "writePacket: size=%v seq=%v\n", size, mc.sequence) } n, err := writeFunc(data[:4+size]) @@ -434,7 +440,9 @@ func (mc *mysqlConn) writeCommandPacket(command byte) error { data[4] = command // Send CMD packet - return mc.writePacket(data) + err = mc.writePacket(data) + mc.syncSequence() + return err } func (mc *mysqlConn) writeCommandPacketStr(command byte, arg string) error { @@ -475,7 +483,9 @@ func (mc *mysqlConn) writeCommandPacketUint32(command byte, arg uint32) error { binary.LittleEndian.PutUint32(data[5:], arg) // Send CMD packet - return mc.writePacket(data) + err = mc.writePacket(data) + mc.syncSequence() + return err } /****************************************************************************** @@ -945,7 +955,6 @@ func (stmt *mysqlStmt) writeCommandLongData(paramID int, arg []byte) error { pktLen = dataOffset + argLen } - stmt.mc.resetSequence() // Add command byte [1 byte] data[4] = comStmtSendLongData @@ -957,6 +966,8 @@ func (stmt *mysqlStmt) writeCommandLongData(paramID int, arg []byte) error { // Send CMD packet err := stmt.mc.writePacket(data[:4+pktLen]) + // Every COM_LONG_DATA packet reset Packet Sequence + stmt.mc.resetSequence() if err == nil { data = data[pktLen-dataOffset:] continue @@ -964,8 +975,6 @@ func (stmt *mysqlStmt) writeCommandLongData(paramID int, arg []byte) error { return err } - // Reset Packet Sequence - stmt.mc.resetSequence() return nil } diff --git a/vendor/github.com/go-sql-driver/mysql/transaction.go b/vendor/github.com/go-sql-driver/mysql/transaction.go index 4a4b6100..8c502f49 100644 --- a/vendor/github.com/go-sql-driver/mysql/transaction.go +++ b/vendor/github.com/go-sql-driver/mysql/transaction.go @@ -13,18 +13,32 @@ type mysqlTx struct { } func (tx *mysqlTx) Commit() (err error) { - if tx.mc == nil || tx.mc.closed.Load() { + if tx.mc == nil { return ErrInvalidConn } + if tx.mc.closed.Load() { + err = tx.mc.error() + if err == nil { + err = ErrInvalidConn + } + return + } err = tx.mc.exec("COMMIT") tx.mc = nil return } func (tx *mysqlTx) Rollback() (err error) { - if tx.mc == nil || tx.mc.closed.Load() { + if tx.mc == nil { return ErrInvalidConn } + if tx.mc.closed.Load() { + err = tx.mc.error() + if err == nil { + err = ErrInvalidConn + } + return + } err = tx.mc.exec("ROLLBACK") tx.mc = nil return diff --git a/vendor/github.com/google/go-github/v71/github/actions_required_workflows.go b/vendor/github.com/google/go-github/v71/github/actions_required_workflows.go deleted file mode 100644 index b89741a8..00000000 --- a/vendor/github.com/google/go-github/v71/github/actions_required_workflows.go +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrgRequiredWorkflow represents a required workflow object at the org level. -type OrgRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - Scope *string `json:"scope,omitempty"` - Ref *string `json:"ref,omitempty"` - State *string `json:"state,omitempty"` - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -// OrgRequiredWorkflows represents the required workflows for the org. -type OrgRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*OrgRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// CreateUpdateRequiredWorkflowOptions represents the input object used to create or update required workflows. -type CreateUpdateRequiredWorkflowOptions struct { - WorkflowFilePath *string `json:"workflow_file_path,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Scope *string `json:"scope,omitempty"` - SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -// RequiredWorkflowSelectedRepos represents the repos that a required workflow is applied to. -type RequiredWorkflowSelectedRepos struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -// RepoRequiredWorkflow represents a required workflow object at the repo level. -type RepoRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - State *string `json:"state,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BadgeURL *string `json:"badge_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - SourceRepository *Repository `json:"source_repository,omitempty"` -} - -// RepoRequiredWorkflows represents the required workflows for a repo. -type RepoRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*RepoRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// ListOrgRequiredWorkflows lists the RequiredWorkflows for an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows -func (s *ActionsService) ListOrgRequiredWorkflows(ctx context.Context, org string, opts *ListOptions) (*OrgRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(OrgRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} - -// CreateRequiredWorkflow creates the required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation POST /orgs/{org}/actions/required_workflows -func (s *ActionsService) CreateRequiredWorkflow(ctx context.Context, org string, createRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - req, err := s.client.NewRequest("POST", url, createRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// GetRequiredWorkflowByID get the RequiredWorkflows for an org by its ID. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) GetRequiredWorkflowByID(ctx context.Context, owner string, requiredWorkflowID int64) (*OrgRequiredWorkflow, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", owner, requiredWorkflowID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, &requiredWorkflow) - if err != nil { - return nil, resp, err - } - - return requiredWorkflow, resp, nil -} - -// UpdateRequiredWorkflow updates a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PATCH /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) UpdateRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64, updateRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("PATCH", url, updateRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// DeleteRequiredWorkflow deletes a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) DeleteRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRequiredWorkflowSelectedRepos lists the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) ListRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, opts *ListOptions) (*RequiredWorkflowSelectedRepos, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflowRepos := new(RequiredWorkflowSelectedRepos) - resp, err := s.client.Do(ctx, req, &requiredWorkflowRepos) - if err != nil { - return nil, resp, err - } - - return requiredWorkflowRepos, resp, nil -} - -// SetRequiredWorkflowSelectedRepos sets the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) SetRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRepoToRequiredWorkflow adds the Repository to a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) AddRepoToRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// RemoveRepoFromRequiredWorkflow removes the Repository from a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) RemoveRepoFromRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRepoRequiredWorkflows lists the RequiredWorkflows for a repo. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /repos/{owner}/{repo}/actions/required_workflows -func (s *ActionsService) ListRepoRequiredWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*RepoRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/required_workflows", owner, repo) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(RepoRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/AUTHORS b/vendor/github.com/google/go-github/v72/AUTHORS similarity index 100% rename from vendor/github.com/google/go-github/v71/AUTHORS rename to vendor/github.com/google/go-github/v72/AUTHORS diff --git a/vendor/github.com/google/go-github/v71/LICENSE b/vendor/github.com/google/go-github/v72/LICENSE similarity index 100% rename from vendor/github.com/google/go-github/v71/LICENSE rename to vendor/github.com/google/go-github/v72/LICENSE diff --git a/vendor/github.com/google/go-github/v71/github/actions.go b/vendor/github.com/google/go-github/v72/github/actions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions.go rename to vendor/github.com/google/go-github/v72/github/actions.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_artifacts.go b/vendor/github.com/google/go-github/v72/github/actions_artifacts.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_artifacts.go rename to vendor/github.com/google/go-github/v72/github/actions_artifacts.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_cache.go b/vendor/github.com/google/go-github/v72/github/actions_cache.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_cache.go rename to vendor/github.com/google/go-github/v72/github/actions_cache.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_hosted_runners.go b/vendor/github.com/google/go-github/v72/github/actions_hosted_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_hosted_runners.go rename to vendor/github.com/google/go-github/v72/github/actions_hosted_runners.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_oidc.go b/vendor/github.com/google/go-github/v72/github/actions_oidc.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_oidc.go rename to vendor/github.com/google/go-github/v72/github/actions_oidc.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_permissions_enterprise.go b/vendor/github.com/google/go-github/v72/github/actions_permissions_enterprise.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_permissions_enterprise.go rename to vendor/github.com/google/go-github/v72/github/actions_permissions_enterprise.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_permissions_orgs.go b/vendor/github.com/google/go-github/v72/github/actions_permissions_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_permissions_orgs.go rename to vendor/github.com/google/go-github/v72/github/actions_permissions_orgs.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_runner_groups.go b/vendor/github.com/google/go-github/v72/github/actions_runner_groups.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_runner_groups.go rename to vendor/github.com/google/go-github/v72/github/actions_runner_groups.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_runners.go b/vendor/github.com/google/go-github/v72/github/actions_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_runners.go rename to vendor/github.com/google/go-github/v72/github/actions_runners.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_secrets.go b/vendor/github.com/google/go-github/v72/github/actions_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_secrets.go rename to vendor/github.com/google/go-github/v72/github/actions_secrets.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_variables.go b/vendor/github.com/google/go-github/v72/github/actions_variables.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_variables.go rename to vendor/github.com/google/go-github/v72/github/actions_variables.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_workflow_jobs.go b/vendor/github.com/google/go-github/v72/github/actions_workflow_jobs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_workflow_jobs.go rename to vendor/github.com/google/go-github/v72/github/actions_workflow_jobs.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_workflow_runs.go b/vendor/github.com/google/go-github/v72/github/actions_workflow_runs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_workflow_runs.go rename to vendor/github.com/google/go-github/v72/github/actions_workflow_runs.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_workflows.go b/vendor/github.com/google/go-github/v72/github/actions_workflows.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_workflows.go rename to vendor/github.com/google/go-github/v72/github/actions_workflows.go diff --git a/vendor/github.com/google/go-github/v71/github/activity.go b/vendor/github.com/google/go-github/v72/github/activity.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/activity.go rename to vendor/github.com/google/go-github/v72/github/activity.go diff --git a/vendor/github.com/google/go-github/v71/github/activity_events.go b/vendor/github.com/google/go-github/v72/github/activity_events.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/activity_events.go rename to vendor/github.com/google/go-github/v72/github/activity_events.go diff --git a/vendor/github.com/google/go-github/v71/github/activity_notifications.go b/vendor/github.com/google/go-github/v72/github/activity_notifications.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/activity_notifications.go rename to vendor/github.com/google/go-github/v72/github/activity_notifications.go diff --git a/vendor/github.com/google/go-github/v71/github/activity_star.go b/vendor/github.com/google/go-github/v72/github/activity_star.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/activity_star.go rename to vendor/github.com/google/go-github/v72/github/activity_star.go diff --git a/vendor/github.com/google/go-github/v71/github/activity_watching.go b/vendor/github.com/google/go-github/v72/github/activity_watching.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/activity_watching.go rename to vendor/github.com/google/go-github/v72/github/activity_watching.go diff --git a/vendor/github.com/google/go-github/v71/github/admin.go b/vendor/github.com/google/go-github/v72/github/admin.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin.go rename to vendor/github.com/google/go-github/v72/github/admin.go diff --git a/vendor/github.com/google/go-github/v71/github/admin_orgs.go b/vendor/github.com/google/go-github/v72/github/admin_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin_orgs.go rename to vendor/github.com/google/go-github/v72/github/admin_orgs.go diff --git a/vendor/github.com/google/go-github/v71/github/admin_stats.go b/vendor/github.com/google/go-github/v72/github/admin_stats.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin_stats.go rename to vendor/github.com/google/go-github/v72/github/admin_stats.go diff --git a/vendor/github.com/google/go-github/v71/github/admin_users.go b/vendor/github.com/google/go-github/v72/github/admin_users.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin_users.go rename to vendor/github.com/google/go-github/v72/github/admin_users.go diff --git a/vendor/github.com/google/go-github/v71/github/apps.go b/vendor/github.com/google/go-github/v72/github/apps.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/apps.go rename to vendor/github.com/google/go-github/v72/github/apps.go diff --git a/vendor/github.com/google/go-github/v71/github/apps_hooks.go b/vendor/github.com/google/go-github/v72/github/apps_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/apps_hooks.go rename to vendor/github.com/google/go-github/v72/github/apps_hooks.go diff --git a/vendor/github.com/google/go-github/v71/github/apps_hooks_deliveries.go b/vendor/github.com/google/go-github/v72/github/apps_hooks_deliveries.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/apps_hooks_deliveries.go rename to vendor/github.com/google/go-github/v72/github/apps_hooks_deliveries.go diff --git a/vendor/github.com/google/go-github/v71/github/apps_installation.go b/vendor/github.com/google/go-github/v72/github/apps_installation.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/apps_installation.go rename to vendor/github.com/google/go-github/v72/github/apps_installation.go diff --git a/vendor/github.com/google/go-github/v71/github/apps_manifest.go b/vendor/github.com/google/go-github/v72/github/apps_manifest.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/apps_manifest.go rename to vendor/github.com/google/go-github/v72/github/apps_manifest.go diff --git a/vendor/github.com/google/go-github/v71/github/apps_marketplace.go b/vendor/github.com/google/go-github/v72/github/apps_marketplace.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/apps_marketplace.go rename to vendor/github.com/google/go-github/v72/github/apps_marketplace.go diff --git a/vendor/github.com/google/go-github/v71/github/attestations.go b/vendor/github.com/google/go-github/v72/github/attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/attestations.go rename to vendor/github.com/google/go-github/v72/github/attestations.go diff --git a/vendor/github.com/google/go-github/v71/github/authorizations.go b/vendor/github.com/google/go-github/v72/github/authorizations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/authorizations.go rename to vendor/github.com/google/go-github/v72/github/authorizations.go diff --git a/vendor/github.com/google/go-github/v71/github/billing.go b/vendor/github.com/google/go-github/v72/github/billing.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/billing.go rename to vendor/github.com/google/go-github/v72/github/billing.go diff --git a/vendor/github.com/google/go-github/v71/github/checks.go b/vendor/github.com/google/go-github/v72/github/checks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/checks.go rename to vendor/github.com/google/go-github/v72/github/checks.go diff --git a/vendor/github.com/google/go-github/v71/github/code_scanning.go b/vendor/github.com/google/go-github/v72/github/code_scanning.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/code_scanning.go rename to vendor/github.com/google/go-github/v72/github/code_scanning.go diff --git a/vendor/github.com/google/go-github/v71/github/codesofconduct.go b/vendor/github.com/google/go-github/v72/github/codesofconduct.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/codesofconduct.go rename to vendor/github.com/google/go-github/v72/github/codesofconduct.go diff --git a/vendor/github.com/google/go-github/v71/github/codespaces.go b/vendor/github.com/google/go-github/v72/github/codespaces.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/codespaces.go rename to vendor/github.com/google/go-github/v72/github/codespaces.go diff --git a/vendor/github.com/google/go-github/v71/github/codespaces_secrets.go b/vendor/github.com/google/go-github/v72/github/codespaces_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/codespaces_secrets.go rename to vendor/github.com/google/go-github/v72/github/codespaces_secrets.go diff --git a/vendor/github.com/google/go-github/v71/github/copilot.go b/vendor/github.com/google/go-github/v72/github/copilot.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/copilot.go rename to vendor/github.com/google/go-github/v72/github/copilot.go diff --git a/vendor/github.com/google/go-github/v71/github/dependabot.go b/vendor/github.com/google/go-github/v72/github/dependabot.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/dependabot.go rename to vendor/github.com/google/go-github/v72/github/dependabot.go diff --git a/vendor/github.com/google/go-github/v71/github/dependabot_alerts.go b/vendor/github.com/google/go-github/v72/github/dependabot_alerts.go similarity index 94% rename from vendor/github.com/google/go-github/v71/github/dependabot_alerts.go rename to vendor/github.com/google/go-github/v72/github/dependabot_alerts.go index c274f07b..67e624c9 100644 --- a/vendor/github.com/google/go-github/v71/github/dependabot_alerts.go +++ b/vendor/github.com/google/go-github/v72/github/dependabot_alerts.go @@ -29,6 +29,15 @@ type AdvisoryCWEs struct { Name *string `json:"name,omitempty"` } +// AdvisoryEPSS represents the advisory pertaining to the Exploit Prediction Scoring System. +// +// For more information, see: +// https://github.blog/changelog/2024-10-10-epss-scores-in-the-github-advisory-database/ +type AdvisoryEPSS struct { + Percentage float64 `json:"percentage"` + Percentile float64 `json:"percentile"` +} + // DependabotSecurityAdvisory represents the GitHub Security Advisory. type DependabotSecurityAdvisory struct { GHSAID *string `json:"ghsa_id,omitempty"` @@ -39,6 +48,7 @@ type DependabotSecurityAdvisory struct { Severity *string `json:"severity,omitempty"` CVSS *AdvisoryCVSS `json:"cvss,omitempty"` CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` + EPSS *AdvisoryEPSS `json:"epss,omitempty"` Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` References []*AdvisoryReference `json:"references,omitempty"` PublishedAt *Timestamp `json:"published_at,omitempty"` diff --git a/vendor/github.com/google/go-github/v71/github/dependabot_secrets.go b/vendor/github.com/google/go-github/v72/github/dependabot_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/dependabot_secrets.go rename to vendor/github.com/google/go-github/v72/github/dependabot_secrets.go diff --git a/vendor/github.com/google/go-github/v71/github/dependency_graph.go b/vendor/github.com/google/go-github/v72/github/dependency_graph.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/dependency_graph.go rename to vendor/github.com/google/go-github/v72/github/dependency_graph.go diff --git a/vendor/github.com/google/go-github/v71/github/dependency_graph_snapshots.go b/vendor/github.com/google/go-github/v72/github/dependency_graph_snapshots.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/dependency_graph_snapshots.go rename to vendor/github.com/google/go-github/v72/github/dependency_graph_snapshots.go diff --git a/vendor/github.com/google/go-github/v71/github/doc.go b/vendor/github.com/google/go-github/v72/github/doc.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/doc.go rename to vendor/github.com/google/go-github/v72/github/doc.go index b963051f..8a6112fe 100644 --- a/vendor/github.com/google/go-github/v71/github/doc.go +++ b/vendor/github.com/google/go-github/v72/github/doc.go @@ -8,7 +8,7 @@ Package github provides a client for using the GitHub API. Usage: - import "github.com/google/go-github/v71/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) + import "github.com/google/go-github/v72/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) import "github.com/google/go-github/github" // with go modules disabled Construct a new GitHub client, then use the various services on the client to diff --git a/vendor/github.com/google/go-github/v71/github/emojis.go b/vendor/github.com/google/go-github/v72/github/emojis.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/emojis.go rename to vendor/github.com/google/go-github/v72/github/emojis.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise.go b/vendor/github.com/google/go-github/v72/github/enterprise.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise.go rename to vendor/github.com/google/go-github/v72/github/enterprise.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_actions_hosted_runners.go b/vendor/github.com/google/go-github/v72/github/enterprise_actions_hosted_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_actions_hosted_runners.go rename to vendor/github.com/google/go-github/v72/github/enterprise_actions_hosted_runners.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_actions_runner_groups.go b/vendor/github.com/google/go-github/v72/github/enterprise_actions_runner_groups.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_actions_runner_groups.go rename to vendor/github.com/google/go-github/v72/github/enterprise_actions_runner_groups.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_actions_runners.go b/vendor/github.com/google/go-github/v72/github/enterprise_actions_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_actions_runners.go rename to vendor/github.com/google/go-github/v72/github/enterprise_actions_runners.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_audit_log.go b/vendor/github.com/google/go-github/v72/github/enterprise_audit_log.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_audit_log.go rename to vendor/github.com/google/go-github/v72/github/enterprise_audit_log.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_code_security_and_analysis.go b/vendor/github.com/google/go-github/v72/github/enterprise_code_security_and_analysis.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_code_security_and_analysis.go rename to vendor/github.com/google/go-github/v72/github/enterprise_code_security_and_analysis.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_config.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_config.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_config.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_config.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_maintenance.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_maintenance.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_maintenance.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_maintenance.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_ssh.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_ssh.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_ssh.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_ssh.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_network_configurations.go b/vendor/github.com/google/go-github/v72/github/enterprise_network_configurations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_network_configurations.go rename to vendor/github.com/google/go-github/v72/github/enterprise_network_configurations.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_properties.go b/vendor/github.com/google/go-github/v72/github/enterprise_properties.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_properties.go rename to vendor/github.com/google/go-github/v72/github/enterprise_properties.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_rules.go b/vendor/github.com/google/go-github/v72/github/enterprise_rules.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_rules.go rename to vendor/github.com/google/go-github/v72/github/enterprise_rules.go diff --git a/vendor/github.com/google/go-github/v71/github/event.go b/vendor/github.com/google/go-github/v72/github/event.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/event.go rename to vendor/github.com/google/go-github/v72/github/event.go diff --git a/vendor/github.com/google/go-github/v71/github/event_types.go b/vendor/github.com/google/go-github/v72/github/event_types.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/event_types.go rename to vendor/github.com/google/go-github/v72/github/event_types.go index 6a6caf19..b5369865 100644 --- a/vendor/github.com/google/go-github/v71/github/event_types.go +++ b/vendor/github.com/google/go-github/v72/github/event_types.go @@ -1446,6 +1446,26 @@ type PushEventRepoOwner struct { Email *string `json:"email,omitempty"` } +// RegistryPackageEvent represents activity related to GitHub Packages. +// The Webhook event name is "registry_package". +// +// This event is triggered when a GitHub Package is published or updated. +// +// GitHub API docs: https://docs.github.com/en/webhooks/webhook-events-and-payloads#registry_package +type RegistryPackageEvent struct { + // Action is the action that was performed. + // Can be "published" or "updated". + Action *string `json:"action,omitempty"` + RegistryPackage *Package `json:"registry_package,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Sender *User `json:"sender,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` +} + // ReleaseEvent is triggered when a release is published, unpublished, created, // edited, deleted, or prereleased. // The Webhook event name is "release". diff --git a/vendor/github.com/google/go-github/v71/github/gists.go b/vendor/github.com/google/go-github/v72/github/gists.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/gists.go rename to vendor/github.com/google/go-github/v72/github/gists.go diff --git a/vendor/github.com/google/go-github/v71/github/gists_comments.go b/vendor/github.com/google/go-github/v72/github/gists_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/gists_comments.go rename to vendor/github.com/google/go-github/v72/github/gists_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/git.go b/vendor/github.com/google/go-github/v72/github/git.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git.go rename to vendor/github.com/google/go-github/v72/github/git.go diff --git a/vendor/github.com/google/go-github/v71/github/git_blobs.go b/vendor/github.com/google/go-github/v72/github/git_blobs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git_blobs.go rename to vendor/github.com/google/go-github/v72/github/git_blobs.go diff --git a/vendor/github.com/google/go-github/v71/github/git_commits.go b/vendor/github.com/google/go-github/v72/github/git_commits.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git_commits.go rename to vendor/github.com/google/go-github/v72/github/git_commits.go diff --git a/vendor/github.com/google/go-github/v71/github/git_refs.go b/vendor/github.com/google/go-github/v72/github/git_refs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git_refs.go rename to vendor/github.com/google/go-github/v72/github/git_refs.go diff --git a/vendor/github.com/google/go-github/v71/github/git_tags.go b/vendor/github.com/google/go-github/v72/github/git_tags.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git_tags.go rename to vendor/github.com/google/go-github/v72/github/git_tags.go diff --git a/vendor/github.com/google/go-github/v71/github/git_trees.go b/vendor/github.com/google/go-github/v72/github/git_trees.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git_trees.go rename to vendor/github.com/google/go-github/v72/github/git_trees.go diff --git a/vendor/github.com/google/go-github/v71/github/github-accessors.go b/vendor/github.com/google/go-github/v72/github/github-accessors.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/github-accessors.go rename to vendor/github.com/google/go-github/v72/github/github-accessors.go index 45fa9075..6378b40b 100644 --- a/vendor/github.com/google/go-github/v71/github/github-accessors.go +++ b/vendor/github.com/google/go-github/v72/github/github-accessors.go @@ -6022,38 +6022,6 @@ func (c *CreateUpdateEnvironment) GetWaitTimer() int { return *c.WaitTimer } -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetRepositoryID() int64 { - if c == nil || c.RepositoryID == nil { - return 0 - } - return *c.RepositoryID -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetScope() string { - if c == nil || c.Scope == nil { - return "" - } - return *c.Scope -} - -// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. -func (c *CreateUpdateRequiredWorkflowOptions) GetSelectedRepositoryIDs() *SelectedRepoIDs { - if c == nil { - return nil - } - return c.SelectedRepositoryIDs -} - -// GetWorkflowFilePath returns the WorkflowFilePath field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetWorkflowFilePath() string { - if c == nil || c.WorkflowFilePath == nil { - return "" - } - return *c.WorkflowFilePath -} - // GetEmail returns the Email field if it's non-nil, zero value otherwise. func (c *CreateUserRequest) GetEmail() string { if c == nil || c.Email == nil { @@ -6886,6 +6854,14 @@ func (d *DependabotSecurityAdvisory) GetDescription() string { return *d.Description } +// GetEPSS returns the EPSS field. +func (d *DependabotSecurityAdvisory) GetEPSS() *AdvisoryEPSS { + if d == nil { + return nil + } + return d.EPSS +} + // GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. func (d *DependabotSecurityAdvisory) GetGHSAID() string { if d == nil || d.GHSAID == nil { @@ -12430,6 +12406,14 @@ func (i *IssueRequest) GetTitle() string { return *i.Title } +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetType() string { + if i == nil || i.Type == nil { + return "" + } + return *i.Type +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (i *IssuesEvent) GetAction() string { if i == nil || i.Action == nil { @@ -15694,94 +15678,6 @@ func (o *OrgBlockEvent) GetSender() *User { return o.Sender } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetCreatedAt() Timestamp { - if o == nil || o.CreatedAt == nil { - return Timestamp{} - } - return *o.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetID() int64 { - if o == nil || o.ID == nil { - return 0 - } - return *o.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetPath() string { - if o == nil || o.Path == nil { - return "" - } - return *o.Path -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetRef() string { - if o == nil || o.Ref == nil { - return "" - } - return *o.Ref -} - -// GetRepository returns the Repository field. -func (o *OrgRequiredWorkflow) GetRepository() *Repository { - if o == nil { - return nil - } - return o.Repository -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetScope() string { - if o == nil || o.Scope == nil { - return "" - } - return *o.Scope -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetSelectedRepositoriesURL() string { - if o == nil || o.SelectedRepositoriesURL == nil { - return "" - } - return *o.SelectedRepositoriesURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetState() string { - if o == nil || o.State == nil { - return "" - } - return *o.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetUpdatedAt() Timestamp { - if o == nil || o.UpdatedAt == nil { - return Timestamp{} - } - return *o.UpdatedAt -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflows) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - // GetDisabledOrgs returns the DisabledOrgs field if it's non-nil, zero value otherwise. func (o *OrgStats) GetDisabledOrgs() int { if o == nil || o.DisabledOrgs == nil { @@ -20982,6 +20878,62 @@ func (r *RegistrationToken) GetToken() string { return *r.Token } +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (r *RegistryPackageEvent) GetAction() string { + if r == nil || r.Action == nil { + return "" + } + return *r.Action +} + +// GetEnterprise returns the Enterprise field. +func (r *RegistryPackageEvent) GetEnterprise() *Enterprise { + if r == nil { + return nil + } + return r.Enterprise +} + +// GetInstallation returns the Installation field. +func (r *RegistryPackageEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetOrganization returns the Organization field. +func (r *RegistryPackageEvent) GetOrganization() *Organization { + if r == nil { + return nil + } + return r.Organization +} + +// GetRegistryPackage returns the RegistryPackage field. +func (r *RegistryPackageEvent) GetRegistryPackage() *Package { + if r == nil { + return nil + } + return r.RegistryPackage +} + +// GetRepository returns the Repository field. +func (r *RegistryPackageEvent) GetRepository() *Repository { + if r == nil { + return nil + } + return r.Repository +} + +// GetSender returns the Sender field. +func (r *RegistryPackageEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + // GetBrowserDownloadURL returns the BrowserDownloadURL field if it's non-nil, zero value otherwise. func (r *ReleaseAsset) GetBrowserDownloadURL() string { if r == nil || r.BrowserDownloadURL == nil { @@ -21350,102 +21302,6 @@ func (r *RepoName) GetFrom() string { return *r.From } -// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetBadgeURL() string { - if r == nil || r.BadgeURL == nil { - return "" - } - return *r.BadgeURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSourceRepository returns the SourceRepository field. -func (r *RepoRequiredWorkflow) GetSourceRepository() *Repository { - if r == nil { - return nil - } - return r.SourceRepository -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflows) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - // GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. func (r *RepositoriesSearchResult) GetIncompleteResults() bool { if r == nil || r.IncompleteResults == nil { @@ -22934,6 +22790,14 @@ func (r *RepositoryLicense) GetURL() string { return *r.URL } +// GetIncludesParents returns the IncludesParents field if it's non-nil, zero value otherwise. +func (r *RepositoryListRulesetsOptions) GetIncludesParents() bool { + if r == nil || r.IncludesParents == nil { + return false + } + return *r.IncludesParents +} + // GetBase returns the Base field if it's non-nil, zero value otherwise. func (r *RepositoryMergeRequest) GetBase() string { if r == nil || r.Base == nil { @@ -24078,14 +23942,6 @@ func (r *RequiredStatusChecksRuleParameters) GetDoNotEnforceOnCreate() bool { return *r.DoNotEnforceOnCreate } -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RequiredWorkflowSelectedRepos) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - // GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. func (r *ReviewersRequest) GetNodeID() string { if r == nil || r.NodeID == nil { @@ -24846,6 +24702,14 @@ func (s *SecretScanningAlert) GetHTMLURL() string { return *s.HTMLURL } +// GetIsBase64Encoded returns the IsBase64Encoded field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetIsBase64Encoded() bool { + if s == nil || s.IsBase64Encoded == nil { + return false + } + return *s.IsBase64Encoded +} + // GetLocationsURL returns the LocationsURL field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetLocationsURL() string { if s == nil || s.LocationsURL == nil { @@ -24854,6 +24718,14 @@ func (s *SecretScanningAlert) GetLocationsURL() string { return *s.LocationsURL } +// GetMultiRepo returns the MultiRepo field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetMultiRepo() bool { + if s == nil || s.MultiRepo == nil { + return false + } + return *s.MultiRepo +} + // GetNumber returns the Number field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetNumber() int { if s == nil || s.Number == nil { @@ -24862,6 +24734,14 @@ func (s *SecretScanningAlert) GetNumber() int { return *s.Number } +// GetPubliclyLeaked returns the PubliclyLeaked field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPubliclyLeaked() bool { + if s == nil || s.PubliclyLeaked == nil { + return false + } + return *s.PubliclyLeaked +} + // GetPushProtectionBypassed returns the PushProtectionBypassed field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetPushProtectionBypassed() bool { if s == nil || s.PushProtectionBypassed == nil { @@ -24886,6 +24766,38 @@ func (s *SecretScanningAlert) GetPushProtectionBypassedBy() *User { return s.PushProtectionBypassedBy } +// GetPushProtectionBypassRequestComment returns the PushProtectionBypassRequestComment field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestComment() string { + if s == nil || s.PushProtectionBypassRequestComment == nil { + return "" + } + return *s.PushProtectionBypassRequestComment +} + +// GetPushProtectionBypassRequestHTMLURL returns the PushProtectionBypassRequestHTMLURL field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestHTMLURL() string { + if s == nil || s.PushProtectionBypassRequestHTMLURL == nil { + return "" + } + return *s.PushProtectionBypassRequestHTMLURL +} + +// GetPushProtectionBypassRequestReviewer returns the PushProtectionBypassRequestReviewer field. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestReviewer() *User { + if s == nil { + return nil + } + return s.PushProtectionBypassRequestReviewer +} + +// GetPushProtectionBypassRequestReviewerComment returns the PushProtectionBypassRequestReviewerComment field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestReviewerComment() string { + if s == nil || s.PushProtectionBypassRequestReviewerComment == nil { + return "" + } + return *s.PushProtectionBypassRequestReviewerComment +} + // GetRepository returns the Repository field. func (s *SecretScanningAlert) GetRepository() *Repository { if s == nil { @@ -24974,6 +24886,14 @@ func (s *SecretScanningAlert) GetURL() string { return *s.URL } +// GetValidity returns the Validity field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetValidity() string { + if s == nil || s.Validity == nil { + return "" + } + return *s.Validity +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (s *SecretScanningAlertEvent) GetAction() string { if s == nil || s.Action == nil { diff --git a/vendor/github.com/google/go-github/v71/github/github.go b/vendor/github.com/google/go-github/v72/github/github.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/github.go rename to vendor/github.com/google/go-github/v72/github/github.go index ddafffb2..a3b1941e 100644 --- a/vendor/github.com/google/go-github/v71/github/github.go +++ b/vendor/github.com/google/go-github/v72/github/github.go @@ -29,7 +29,7 @@ import ( ) const ( - Version = "v71.0.0" + Version = "v72.0.0" defaultAPIVersion = "2022-11-28" defaultBaseURL = "https://api.github.com/" diff --git a/vendor/github.com/google/go-github/v71/github/gitignore.go b/vendor/github.com/google/go-github/v72/github/gitignore.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/gitignore.go rename to vendor/github.com/google/go-github/v72/github/gitignore.go diff --git a/vendor/github.com/google/go-github/v71/github/interactions.go b/vendor/github.com/google/go-github/v72/github/interactions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/interactions.go rename to vendor/github.com/google/go-github/v72/github/interactions.go diff --git a/vendor/github.com/google/go-github/v71/github/interactions_orgs.go b/vendor/github.com/google/go-github/v72/github/interactions_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/interactions_orgs.go rename to vendor/github.com/google/go-github/v72/github/interactions_orgs.go diff --git a/vendor/github.com/google/go-github/v71/github/interactions_repos.go b/vendor/github.com/google/go-github/v72/github/interactions_repos.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/interactions_repos.go rename to vendor/github.com/google/go-github/v72/github/interactions_repos.go diff --git a/vendor/github.com/google/go-github/v71/github/issue_import.go b/vendor/github.com/google/go-github/v72/github/issue_import.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issue_import.go rename to vendor/github.com/google/go-github/v72/github/issue_import.go diff --git a/vendor/github.com/google/go-github/v71/github/issues.go b/vendor/github.com/google/go-github/v72/github/issues.go similarity index 97% rename from vendor/github.com/google/go-github/v71/github/issues.go rename to vendor/github.com/google/go-github/v72/github/issues.go index 6d3a6b15..395d64cf 100644 --- a/vendor/github.com/google/go-github/v71/github/issues.go +++ b/vendor/github.com/google/go-github/v72/github/issues.go @@ -90,6 +90,7 @@ type IssueRequest struct { StateReason *string `json:"state_reason,omitempty"` Milestone *int `json:"milestone,omitempty"` Assignees *[]string `json:"assignees,omitempty"` + Type *string `json:"type,omitempty"` } // IssueListOptions specifies the optional parameters to the IssuesService.List @@ -117,6 +118,10 @@ type IssueListOptions struct { // Since filters issues by time. Since time.Time `url:"since,omitempty"` + ListCursorOptions + + // Add ListOptions so offset pagination with integer type "page" query parameter is accepted + // since ListCursorOptions accepts "page" as string only. ListOptions } @@ -233,6 +238,10 @@ type IssueListByRepoOptions struct { // Since filters issues by time. Since time.Time `url:"since,omitempty"` + ListCursorOptions + + // Add ListOptions so offset pagination with integer type "page" query parameter is accepted + // since ListCursorOptions accepts "page" as string only. ListOptions } diff --git a/vendor/github.com/google/go-github/v71/github/issues_assignees.go b/vendor/github.com/google/go-github/v72/github/issues_assignees.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_assignees.go rename to vendor/github.com/google/go-github/v72/github/issues_assignees.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_comments.go b/vendor/github.com/google/go-github/v72/github/issues_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_comments.go rename to vendor/github.com/google/go-github/v72/github/issues_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_events.go b/vendor/github.com/google/go-github/v72/github/issues_events.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_events.go rename to vendor/github.com/google/go-github/v72/github/issues_events.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_labels.go b/vendor/github.com/google/go-github/v72/github/issues_labels.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_labels.go rename to vendor/github.com/google/go-github/v72/github/issues_labels.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_milestones.go b/vendor/github.com/google/go-github/v72/github/issues_milestones.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_milestones.go rename to vendor/github.com/google/go-github/v72/github/issues_milestones.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_timeline.go b/vendor/github.com/google/go-github/v72/github/issues_timeline.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_timeline.go rename to vendor/github.com/google/go-github/v72/github/issues_timeline.go diff --git a/vendor/github.com/google/go-github/v71/github/licenses.go b/vendor/github.com/google/go-github/v72/github/licenses.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/licenses.go rename to vendor/github.com/google/go-github/v72/github/licenses.go diff --git a/vendor/github.com/google/go-github/v71/github/markdown.go b/vendor/github.com/google/go-github/v72/github/markdown.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/markdown.go rename to vendor/github.com/google/go-github/v72/github/markdown.go diff --git a/vendor/github.com/google/go-github/v71/github/messages.go b/vendor/github.com/google/go-github/v72/github/messages.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/messages.go rename to vendor/github.com/google/go-github/v72/github/messages.go index 59b214b3..2b5cce75 100644 --- a/vendor/github.com/google/go-github/v71/github/messages.go +++ b/vendor/github.com/google/go-github/v72/github/messages.go @@ -95,6 +95,7 @@ var ( "pull_request_review_thread": &PullRequestReviewThreadEvent{}, "pull_request_target": &PullRequestTargetEvent{}, "push": &PushEvent{}, + "registry_package": &RegistryPackageEvent{}, "repository": &RepositoryEvent{}, "repository_dispatch": &RepositoryDispatchEvent{}, "repository_import": &RepositoryImportEvent{}, diff --git a/vendor/github.com/google/go-github/v71/github/meta.go b/vendor/github.com/google/go-github/v72/github/meta.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/meta.go rename to vendor/github.com/google/go-github/v72/github/meta.go diff --git a/vendor/github.com/google/go-github/v71/github/migrations.go b/vendor/github.com/google/go-github/v72/github/migrations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/migrations.go rename to vendor/github.com/google/go-github/v72/github/migrations.go diff --git a/vendor/github.com/google/go-github/v71/github/migrations_source_import.go b/vendor/github.com/google/go-github/v72/github/migrations_source_import.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/migrations_source_import.go rename to vendor/github.com/google/go-github/v72/github/migrations_source_import.go diff --git a/vendor/github.com/google/go-github/v71/github/migrations_user.go b/vendor/github.com/google/go-github/v72/github/migrations_user.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/migrations_user.go rename to vendor/github.com/google/go-github/v72/github/migrations_user.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs.go b/vendor/github.com/google/go-github/v72/github/orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs.go rename to vendor/github.com/google/go-github/v72/github/orgs.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_actions_allowed.go b/vendor/github.com/google/go-github/v72/github/orgs_actions_allowed.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_actions_allowed.go rename to vendor/github.com/google/go-github/v72/github/orgs_actions_allowed.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_actions_permissions.go b/vendor/github.com/google/go-github/v72/github/orgs_actions_permissions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_actions_permissions.go rename to vendor/github.com/google/go-github/v72/github/orgs_actions_permissions.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_attestations.go b/vendor/github.com/google/go-github/v72/github/orgs_attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_attestations.go rename to vendor/github.com/google/go-github/v72/github/orgs_attestations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_audit_log.go b/vendor/github.com/google/go-github/v72/github/orgs_audit_log.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_audit_log.go rename to vendor/github.com/google/go-github/v72/github/orgs_audit_log.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_codesecurity_configurations.go b/vendor/github.com/google/go-github/v72/github/orgs_codesecurity_configurations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_codesecurity_configurations.go rename to vendor/github.com/google/go-github/v72/github/orgs_codesecurity_configurations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_credential_authorizations.go b/vendor/github.com/google/go-github/v72/github/orgs_credential_authorizations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_credential_authorizations.go rename to vendor/github.com/google/go-github/v72/github/orgs_credential_authorizations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_custom_repository_roles.go b/vendor/github.com/google/go-github/v72/github/orgs_custom_repository_roles.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_custom_repository_roles.go rename to vendor/github.com/google/go-github/v72/github/orgs_custom_repository_roles.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_hooks.go b/vendor/github.com/google/go-github/v72/github/orgs_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_hooks.go rename to vendor/github.com/google/go-github/v72/github/orgs_hooks.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_hooks_configuration.go b/vendor/github.com/google/go-github/v72/github/orgs_hooks_configuration.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_hooks_configuration.go rename to vendor/github.com/google/go-github/v72/github/orgs_hooks_configuration.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_hooks_deliveries.go b/vendor/github.com/google/go-github/v72/github/orgs_hooks_deliveries.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_hooks_deliveries.go rename to vendor/github.com/google/go-github/v72/github/orgs_hooks_deliveries.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_issue_types.go b/vendor/github.com/google/go-github/v72/github/orgs_issue_types.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_issue_types.go rename to vendor/github.com/google/go-github/v72/github/orgs_issue_types.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_members.go b/vendor/github.com/google/go-github/v72/github/orgs_members.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_members.go rename to vendor/github.com/google/go-github/v72/github/orgs_members.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_network_configurations.go b/vendor/github.com/google/go-github/v72/github/orgs_network_configurations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_network_configurations.go rename to vendor/github.com/google/go-github/v72/github/orgs_network_configurations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_organization_roles.go b/vendor/github.com/google/go-github/v72/github/orgs_organization_roles.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_organization_roles.go rename to vendor/github.com/google/go-github/v72/github/orgs_organization_roles.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_outside_collaborators.go b/vendor/github.com/google/go-github/v72/github/orgs_outside_collaborators.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_outside_collaborators.go rename to vendor/github.com/google/go-github/v72/github/orgs_outside_collaborators.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_packages.go b/vendor/github.com/google/go-github/v72/github/orgs_packages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_packages.go rename to vendor/github.com/google/go-github/v72/github/orgs_packages.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_personal_access_tokens.go b/vendor/github.com/google/go-github/v72/github/orgs_personal_access_tokens.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_personal_access_tokens.go rename to vendor/github.com/google/go-github/v72/github/orgs_personal_access_tokens.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_properties.go b/vendor/github.com/google/go-github/v72/github/orgs_properties.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_properties.go rename to vendor/github.com/google/go-github/v72/github/orgs_properties.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_rules.go b/vendor/github.com/google/go-github/v72/github/orgs_rules.go similarity index 96% rename from vendor/github.com/google/go-github/v71/github/orgs_rules.go rename to vendor/github.com/google/go-github/v72/github/orgs_rules.go index 8cb2e5d1..357eb8ce 100644 --- a/vendor/github.com/google/go-github/v71/github/orgs_rules.go +++ b/vendor/github.com/google/go-github/v72/github/orgs_rules.go @@ -15,9 +15,14 @@ import ( // GitHub API docs: https://docs.github.com/rest/orgs/rules#get-all-organization-repository-rulesets // //meta:operation GET /orgs/{org}/rulesets -func (s *OrganizationsService) GetAllRepositoryRulesets(ctx context.Context, org string) ([]*RepositoryRuleset, *Response, error) { +func (s *OrganizationsService) GetAllRepositoryRulesets(ctx context.Context, org string, opts *ListOptions) ([]*RepositoryRuleset, *Response, error) { u := fmt.Sprintf("orgs/%v/rulesets", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err diff --git a/vendor/github.com/google/go-github/v71/github/orgs_security_managers.go b/vendor/github.com/google/go-github/v72/github/orgs_security_managers.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_security_managers.go rename to vendor/github.com/google/go-github/v72/github/orgs_security_managers.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_users_blocking.go b/vendor/github.com/google/go-github/v72/github/orgs_users_blocking.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_users_blocking.go rename to vendor/github.com/google/go-github/v72/github/orgs_users_blocking.go diff --git a/vendor/github.com/google/go-github/v71/github/packages.go b/vendor/github.com/google/go-github/v72/github/packages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/packages.go rename to vendor/github.com/google/go-github/v72/github/packages.go diff --git a/vendor/github.com/google/go-github/v71/github/pulls.go b/vendor/github.com/google/go-github/v72/github/pulls.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/pulls.go rename to vendor/github.com/google/go-github/v72/github/pulls.go diff --git a/vendor/github.com/google/go-github/v71/github/pulls_comments.go b/vendor/github.com/google/go-github/v72/github/pulls_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/pulls_comments.go rename to vendor/github.com/google/go-github/v72/github/pulls_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/pulls_reviewers.go b/vendor/github.com/google/go-github/v72/github/pulls_reviewers.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/pulls_reviewers.go rename to vendor/github.com/google/go-github/v72/github/pulls_reviewers.go diff --git a/vendor/github.com/google/go-github/v71/github/pulls_reviews.go b/vendor/github.com/google/go-github/v72/github/pulls_reviews.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/pulls_reviews.go rename to vendor/github.com/google/go-github/v72/github/pulls_reviews.go diff --git a/vendor/github.com/google/go-github/v71/github/pulls_threads.go b/vendor/github.com/google/go-github/v72/github/pulls_threads.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/pulls_threads.go rename to vendor/github.com/google/go-github/v72/github/pulls_threads.go diff --git a/vendor/github.com/google/go-github/v71/github/rate_limit.go b/vendor/github.com/google/go-github/v72/github/rate_limit.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/rate_limit.go rename to vendor/github.com/google/go-github/v72/github/rate_limit.go diff --git a/vendor/github.com/google/go-github/v71/github/reactions.go b/vendor/github.com/google/go-github/v72/github/reactions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/reactions.go rename to vendor/github.com/google/go-github/v72/github/reactions.go diff --git a/vendor/github.com/google/go-github/v71/github/repos.go b/vendor/github.com/google/go-github/v72/github/repos.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos.go rename to vendor/github.com/google/go-github/v72/github/repos.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_actions_access.go b/vendor/github.com/google/go-github/v72/github/repos_actions_access.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_actions_access.go rename to vendor/github.com/google/go-github/v72/github/repos_actions_access.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_actions_allowed.go b/vendor/github.com/google/go-github/v72/github/repos_actions_allowed.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_actions_allowed.go rename to vendor/github.com/google/go-github/v72/github/repos_actions_allowed.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_actions_permissions.go b/vendor/github.com/google/go-github/v72/github/repos_actions_permissions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_actions_permissions.go rename to vendor/github.com/google/go-github/v72/github/repos_actions_permissions.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_attestations.go b/vendor/github.com/google/go-github/v72/github/repos_attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_attestations.go rename to vendor/github.com/google/go-github/v72/github/repos_attestations.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_autolinks.go b/vendor/github.com/google/go-github/v72/github/repos_autolinks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_autolinks.go rename to vendor/github.com/google/go-github/v72/github/repos_autolinks.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_codeowners.go b/vendor/github.com/google/go-github/v72/github/repos_codeowners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_codeowners.go rename to vendor/github.com/google/go-github/v72/github/repos_codeowners.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_collaborators.go b/vendor/github.com/google/go-github/v72/github/repos_collaborators.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_collaborators.go rename to vendor/github.com/google/go-github/v72/github/repos_collaborators.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_comments.go b/vendor/github.com/google/go-github/v72/github/repos_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_comments.go rename to vendor/github.com/google/go-github/v72/github/repos_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_commits.go b/vendor/github.com/google/go-github/v72/github/repos_commits.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_commits.go rename to vendor/github.com/google/go-github/v72/github/repos_commits.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_community_health.go b/vendor/github.com/google/go-github/v72/github/repos_community_health.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_community_health.go rename to vendor/github.com/google/go-github/v72/github/repos_community_health.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_contents.go b/vendor/github.com/google/go-github/v72/github/repos_contents.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_contents.go rename to vendor/github.com/google/go-github/v72/github/repos_contents.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_deployment_branch_policies.go b/vendor/github.com/google/go-github/v72/github/repos_deployment_branch_policies.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_deployment_branch_policies.go rename to vendor/github.com/google/go-github/v72/github/repos_deployment_branch_policies.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_deployment_protection_rules.go b/vendor/github.com/google/go-github/v72/github/repos_deployment_protection_rules.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_deployment_protection_rules.go rename to vendor/github.com/google/go-github/v72/github/repos_deployment_protection_rules.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_deployments.go b/vendor/github.com/google/go-github/v72/github/repos_deployments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_deployments.go rename to vendor/github.com/google/go-github/v72/github/repos_deployments.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_environments.go b/vendor/github.com/google/go-github/v72/github/repos_environments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_environments.go rename to vendor/github.com/google/go-github/v72/github/repos_environments.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_forks.go b/vendor/github.com/google/go-github/v72/github/repos_forks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_forks.go rename to vendor/github.com/google/go-github/v72/github/repos_forks.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_hooks.go b/vendor/github.com/google/go-github/v72/github/repos_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_hooks.go rename to vendor/github.com/google/go-github/v72/github/repos_hooks.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_hooks_configuration.go b/vendor/github.com/google/go-github/v72/github/repos_hooks_configuration.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_hooks_configuration.go rename to vendor/github.com/google/go-github/v72/github/repos_hooks_configuration.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_hooks_deliveries.go b/vendor/github.com/google/go-github/v72/github/repos_hooks_deliveries.go similarity index 89% rename from vendor/github.com/google/go-github/v71/github/repos_hooks_deliveries.go rename to vendor/github.com/google/go-github/v72/github/repos_hooks_deliveries.go index c8029f62..bcd4b336 100644 --- a/vendor/github.com/google/go-github/v71/github/repos_hooks_deliveries.go +++ b/vendor/github.com/google/go-github/v72/github/repos_hooks_deliveries.go @@ -9,6 +9,7 @@ import ( "context" "encoding/json" "fmt" + "strings" ) // HookDelivery represents the data that is received from GitHub's Webhook Delivery API @@ -39,6 +40,16 @@ func (d HookDelivery) String() string { return Stringify(d) } +// getHeader common function for GetHeader funcs of HookRequest & HookResponse. +func getHeader(headers map[string]string, key string) string { + for k, v := range headers { + if strings.EqualFold(k, key) { + return v + } + } + return "" +} + // HookRequest is a part of HookDelivery that contains // the HTTP headers and the JSON payload of the webhook request. type HookRequest struct { @@ -46,6 +57,11 @@ type HookRequest struct { RawPayload *json.RawMessage `json:"payload,omitempty"` } +// GetHeader gets the value associated with the given key (ignoring key case). +func (r *HookRequest) GetHeader(key string) string { + return getHeader(r.Headers, key) +} + func (r HookRequest) String() string { return Stringify(r) } @@ -57,6 +73,11 @@ type HookResponse struct { RawPayload *json.RawMessage `json:"payload,omitempty"` } +// GetHeader gets the value associated with the given key (ignoring key case). +func (r *HookResponse) GetHeader(key string) string { + return getHeader(r.Headers, key) +} + func (r HookResponse) String() string { return Stringify(r) } diff --git a/vendor/github.com/google/go-github/v71/github/repos_invitations.go b/vendor/github.com/google/go-github/v72/github/repos_invitations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_invitations.go rename to vendor/github.com/google/go-github/v72/github/repos_invitations.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_keys.go b/vendor/github.com/google/go-github/v72/github/repos_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_keys.go rename to vendor/github.com/google/go-github/v72/github/repos_keys.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_lfs.go b/vendor/github.com/google/go-github/v72/github/repos_lfs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_lfs.go rename to vendor/github.com/google/go-github/v72/github/repos_lfs.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_merging.go b/vendor/github.com/google/go-github/v72/github/repos_merging.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_merging.go rename to vendor/github.com/google/go-github/v72/github/repos_merging.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_pages.go b/vendor/github.com/google/go-github/v72/github/repos_pages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_pages.go rename to vendor/github.com/google/go-github/v72/github/repos_pages.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/v72/github/repos_prereceive_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_prereceive_hooks.go rename to vendor/github.com/google/go-github/v72/github/repos_prereceive_hooks.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_properties.go b/vendor/github.com/google/go-github/v72/github/repos_properties.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_properties.go rename to vendor/github.com/google/go-github/v72/github/repos_properties.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_releases.go b/vendor/github.com/google/go-github/v72/github/repos_releases.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_releases.go rename to vendor/github.com/google/go-github/v72/github/repos_releases.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_rules.go b/vendor/github.com/google/go-github/v72/github/repos_rules.go similarity index 88% rename from vendor/github.com/google/go-github/v71/github/repos_rules.go rename to vendor/github.com/google/go-github/v72/github/repos_rules.go index d38e35cd..038cefd7 100644 --- a/vendor/github.com/google/go-github/v71/github/repos_rules.go +++ b/vendor/github.com/google/go-github/v72/github/repos_rules.go @@ -38,9 +38,14 @@ type rulesetClearBypassActors struct { // GitHub API docs: https://docs.github.com/rest/repos/rules#get-rules-for-a-branch // //meta:operation GET /repos/{owner}/{repo}/rules/branches/{branch} -func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo, branch string) (*BranchRules, *Response, error) { +func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo, branch string, opts *ListOptions) (*BranchRules, *Response, error) { u := fmt.Sprintf("repos/%v/%v/rules/branches/%v", owner, repo, branch) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err @@ -55,14 +60,28 @@ func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo return rules, resp, nil } +// RepositoryListRulesetsOptions specifies optional parameters to the +// RepositoriesService.GetAllRulesets method. +type RepositoryListRulesetsOptions struct { + // IncludesParents indicates whether to include rulesets configured at the organization or enterprise level that apply to the repository. + IncludesParents *bool `url:"includes_parents,omitempty"` + ListOptions +} + // GetAllRulesets gets all the repository rulesets for the specified repository. -// If includesParents is true, rulesets configured at the organization or enterprise level that apply to the repository will be returned. +// By default, this endpoint will include rulesets configured at the organization or enterprise level that apply to the repository. +// To exclude those rulesets, set the `RepositoryListRulesetsOptions.IncludesParents` parameter to `false`. // // GitHub API docs: https://docs.github.com/rest/repos/rules#get-all-repository-rulesets // //meta:operation GET /repos/{owner}/{repo}/rulesets -func (s *RepositoriesService) GetAllRulesets(ctx context.Context, owner, repo string, includesParents bool) ([]*RepositoryRuleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets?includes_parents=%v", owner, repo, includesParents) +func (s *RepositoriesService) GetAllRulesets(ctx context.Context, owner, repo string, opts *RepositoryListRulesetsOptions) ([]*RepositoryRuleset, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/rulesets", owner, repo) + + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } req, err := s.client.NewRequest("GET", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/v71/github/repos_stats.go b/vendor/github.com/google/go-github/v72/github/repos_stats.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_stats.go rename to vendor/github.com/google/go-github/v72/github/repos_stats.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_statuses.go b/vendor/github.com/google/go-github/v72/github/repos_statuses.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_statuses.go rename to vendor/github.com/google/go-github/v72/github/repos_statuses.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_tags.go b/vendor/github.com/google/go-github/v72/github/repos_tags.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_tags.go rename to vendor/github.com/google/go-github/v72/github/repos_tags.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_traffic.go b/vendor/github.com/google/go-github/v72/github/repos_traffic.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_traffic.go rename to vendor/github.com/google/go-github/v72/github/repos_traffic.go diff --git a/vendor/github.com/google/go-github/v71/github/rules.go b/vendor/github.com/google/go-github/v72/github/rules.go similarity index 95% rename from vendor/github.com/google/go-github/v71/github/rules.go rename to vendor/github.com/google/go-github/v72/github/rules.go index 985f0aac..4def2d19 100644 --- a/vendor/github.com/google/go-github/v71/github/rules.go +++ b/vendor/github.com/google/go-github/v72/github/rules.go @@ -99,14 +99,34 @@ const ( MergeGroupingStrategyHeadGreen MergeGroupingStrategy = "HEADGREEN" ) -// MergeMethod models a GitHub merge method. -type MergeMethod string +// PullRequestMergeMethod is used in PullRequestRuleParameters, +// where the GitHub API expects lowercase merge method values: "merge", "rebase", "squash". +// +// NOTE: GitHub's API inconsistently uses different casing for the same logical values +// across different rules. +// +// TODO: Unify with MergeQueueMergeMethod once the GitHub API uses consistent casing. +type PullRequestMergeMethod string -// This is the set of GitHub merge methods. const ( - MergeMethodMerge MergeMethod = "merge" - MergeMethodRebase MergeMethod = "rebase" - MergeMethodSquash MergeMethod = "squash" + PullRequestMergeMethodMerge PullRequestMergeMethod = "merge" + PullRequestMergeMethodRebase PullRequestMergeMethod = "rebase" + PullRequestMergeMethodSquash PullRequestMergeMethod = "squash" +) + +// MergeQueueMergeMethod is used in MergeQueueRuleParameters, +// where the GitHub API expects uppercase merge method values: "MERGE", "REBASE", "SQUASH". +// +// NOTE: This type exists alongside PullRequestMergeMethod solely due to API casing inconsistencies. +// It enforces the correct usage by API context. +// +// TODO: Unify with PullRequestMergeMethod once the GitHub API uses consistent casing. +type MergeQueueMergeMethod string + +const ( + MergeQueueMergeMethodMerge MergeQueueMergeMethod = "MERGE" + MergeQueueMergeMethodRebase MergeQueueMergeMethod = "REBASE" + MergeQueueMergeMethodSquash MergeQueueMergeMethod = "SQUASH" ) // PatternRuleOperator models a GitHub pattern rule operator. @@ -383,7 +403,7 @@ type MergeQueueRuleParameters struct { GroupingStrategy MergeGroupingStrategy `json:"grouping_strategy"` MaxEntriesToBuild int `json:"max_entries_to_build"` MaxEntriesToMerge int `json:"max_entries_to_merge"` - MergeMethod MergeMethod `json:"merge_method"` + MergeMethod MergeQueueMergeMethod `json:"merge_method"` MinEntriesToMerge int `json:"min_entries_to_merge"` MinEntriesToMergeWaitMinutes int `json:"min_entries_to_merge_wait_minutes"` } @@ -395,13 +415,13 @@ type RequiredDeploymentsRuleParameters struct { // PullRequestRuleParameters represents the pull_request rule parameters. type PullRequestRuleParameters struct { - AllowedMergeMethods []MergeMethod `json:"allowed_merge_methods"` - AutomaticCopilotCodeReviewEnabled *bool `json:"automatic_copilot_code_review_enabled,omitempty"` - DismissStaleReviewsOnPush bool `json:"dismiss_stale_reviews_on_push"` - RequireCodeOwnerReview bool `json:"require_code_owner_review"` - RequireLastPushApproval bool `json:"require_last_push_approval"` - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - RequiredReviewThreadResolution bool `json:"required_review_thread_resolution"` + AllowedMergeMethods []PullRequestMergeMethod `json:"allowed_merge_methods"` + AutomaticCopilotCodeReviewEnabled *bool `json:"automatic_copilot_code_review_enabled,omitempty"` + DismissStaleReviewsOnPush bool `json:"dismiss_stale_reviews_on_push"` + RequireCodeOwnerReview bool `json:"require_code_owner_review"` + RequireLastPushApproval bool `json:"require_last_push_approval"` + RequiredApprovingReviewCount int `json:"required_approving_review_count"` + RequiredReviewThreadResolution bool `json:"required_review_thread_resolution"` } // RequiredStatusChecksRuleParameters represents the required status checks rule parameters. diff --git a/vendor/github.com/google/go-github/v71/github/scim.go b/vendor/github.com/google/go-github/v72/github/scim.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/scim.go rename to vendor/github.com/google/go-github/v72/github/scim.go diff --git a/vendor/github.com/google/go-github/v71/github/search.go b/vendor/github.com/google/go-github/v72/github/search.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/search.go rename to vendor/github.com/google/go-github/v72/github/search.go diff --git a/vendor/github.com/google/go-github/v71/github/secret_scanning.go b/vendor/github.com/google/go-github/v72/github/secret_scanning.go similarity index 78% rename from vendor/github.com/google/go-github/v71/github/secret_scanning.go rename to vendor/github.com/google/go-github/v72/github/secret_scanning.go index 4eeeba3d..ad2312d0 100644 --- a/vendor/github.com/google/go-github/v71/github/secret_scanning.go +++ b/vendor/github.com/google/go-github/v72/github/secret_scanning.go @@ -16,24 +16,32 @@ type SecretScanningService service // SecretScanningAlert represents a GitHub secret scanning alert. type SecretScanningAlert struct { - Number *int `json:"number,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - LocationsURL *string `json:"locations_url,omitempty"` - State *string `json:"state,omitempty"` - Resolution *string `json:"resolution,omitempty"` - ResolvedAt *Timestamp `json:"resolved_at,omitempty"` - ResolvedBy *User `json:"resolved_by,omitempty"` - SecretType *string `json:"secret_type,omitempty"` - SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` - Secret *string `json:"secret,omitempty"` - Repository *Repository `json:"repository,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PushProtectionBypassed *bool `json:"push_protection_bypassed,omitempty"` - PushProtectionBypassedBy *User `json:"push_protection_bypassed_by,omitempty"` - PushProtectionBypassedAt *Timestamp `json:"push_protection_bypassed_at,omitempty"` - ResolutionComment *string `json:"resolution_comment,omitempty"` + Number *int `json:"number,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + LocationsURL *string `json:"locations_url,omitempty"` + State *string `json:"state,omitempty"` + Resolution *string `json:"resolution,omitempty"` + ResolvedAt *Timestamp `json:"resolved_at,omitempty"` + ResolvedBy *User `json:"resolved_by,omitempty"` + SecretType *string `json:"secret_type,omitempty"` + SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` + Secret *string `json:"secret,omitempty"` + Repository *Repository `json:"repository,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + IsBase64Encoded *bool `json:"is_base64_encoded,omitempty"` + MultiRepo *bool `json:"multi_repo,omitempty"` + PubliclyLeaked *bool `json:"publicly_leaked,omitempty"` + PushProtectionBypassed *bool `json:"push_protection_bypassed,omitempty"` + PushProtectionBypassedBy *User `json:"push_protection_bypassed_by,omitempty"` + PushProtectionBypassedAt *Timestamp `json:"push_protection_bypassed_at,omitempty"` + ResolutionComment *string `json:"resolution_comment,omitempty"` + PushProtectionBypassRequestComment *string `json:"push_protection_bypass_request_comment,omitempty"` + PushProtectionBypassRequestHTMLURL *string `json:"push_protection_bypass_request_html_url,omitempty"` + PushProtectionBypassRequestReviewer *User `json:"push_protection_bypass_request_reviewer,omitempty"` + PushProtectionBypassRequestReviewerComment *string `json:"push_protection_bypass_request_reviewer_comment,omitempty"` + Validity *string `json:"validity,omitempty"` } // SecretScanningAlertLocation represents the location for a secret scanning alert. @@ -72,6 +80,12 @@ type SecretScanningAlertListOptions struct { // Valid options are active, inactive, and unknown. Validity string `url:"validity,omitempty"` + // A boolean value representing whether or not to filter alerts by the publicly-leaked tag being present. Default: false. + IsPubliclyLeaked bool `url:"is_publicly_leaked,omitempty"` + + // A boolean value representing whether or not to filter alerts by the multi-repo tag being present. Default: false. + IsMultiRepo bool `url:"is_multi_repo,omitempty"` + // The direction to sort the results by. Possible values are: asc, desc. Default: desc. Direction string `url:"direction,omitempty"` diff --git a/vendor/github.com/google/go-github/v71/github/security_advisories.go b/vendor/github.com/google/go-github/v72/github/security_advisories.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/security_advisories.go rename to vendor/github.com/google/go-github/v72/github/security_advisories.go diff --git a/vendor/github.com/google/go-github/v71/github/strings.go b/vendor/github.com/google/go-github/v72/github/strings.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/strings.go rename to vendor/github.com/google/go-github/v72/github/strings.go diff --git a/vendor/github.com/google/go-github/v71/github/teams.go b/vendor/github.com/google/go-github/v72/github/teams.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/teams.go rename to vendor/github.com/google/go-github/v72/github/teams.go diff --git a/vendor/github.com/google/go-github/v71/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/v72/github/teams_discussion_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/teams_discussion_comments.go rename to vendor/github.com/google/go-github/v72/github/teams_discussion_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/teams_discussions.go b/vendor/github.com/google/go-github/v72/github/teams_discussions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/teams_discussions.go rename to vendor/github.com/google/go-github/v72/github/teams_discussions.go diff --git a/vendor/github.com/google/go-github/v71/github/teams_members.go b/vendor/github.com/google/go-github/v72/github/teams_members.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/teams_members.go rename to vendor/github.com/google/go-github/v72/github/teams_members.go diff --git a/vendor/github.com/google/go-github/v71/github/timestamp.go b/vendor/github.com/google/go-github/v72/github/timestamp.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/timestamp.go rename to vendor/github.com/google/go-github/v72/github/timestamp.go diff --git a/vendor/github.com/google/go-github/v71/github/users.go b/vendor/github.com/google/go-github/v72/github/users.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users.go rename to vendor/github.com/google/go-github/v72/github/users.go diff --git a/vendor/github.com/google/go-github/v71/github/users_administration.go b/vendor/github.com/google/go-github/v72/github/users_administration.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_administration.go rename to vendor/github.com/google/go-github/v72/github/users_administration.go diff --git a/vendor/github.com/google/go-github/v71/github/users_attestations.go b/vendor/github.com/google/go-github/v72/github/users_attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_attestations.go rename to vendor/github.com/google/go-github/v72/github/users_attestations.go diff --git a/vendor/github.com/google/go-github/v71/github/users_blocking.go b/vendor/github.com/google/go-github/v72/github/users_blocking.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_blocking.go rename to vendor/github.com/google/go-github/v72/github/users_blocking.go diff --git a/vendor/github.com/google/go-github/v71/github/users_emails.go b/vendor/github.com/google/go-github/v72/github/users_emails.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_emails.go rename to vendor/github.com/google/go-github/v72/github/users_emails.go diff --git a/vendor/github.com/google/go-github/v71/github/users_followers.go b/vendor/github.com/google/go-github/v72/github/users_followers.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_followers.go rename to vendor/github.com/google/go-github/v72/github/users_followers.go diff --git a/vendor/github.com/google/go-github/v71/github/users_gpg_keys.go b/vendor/github.com/google/go-github/v72/github/users_gpg_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_gpg_keys.go rename to vendor/github.com/google/go-github/v72/github/users_gpg_keys.go diff --git a/vendor/github.com/google/go-github/v71/github/users_keys.go b/vendor/github.com/google/go-github/v72/github/users_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_keys.go rename to vendor/github.com/google/go-github/v72/github/users_keys.go diff --git a/vendor/github.com/google/go-github/v71/github/users_packages.go b/vendor/github.com/google/go-github/v72/github/users_packages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_packages.go rename to vendor/github.com/google/go-github/v72/github/users_packages.go diff --git a/vendor/github.com/google/go-github/v71/github/users_ssh_signing_keys.go b/vendor/github.com/google/go-github/v72/github/users_ssh_signing_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_ssh_signing_keys.go rename to vendor/github.com/google/go-github/v72/github/users_ssh_signing_keys.go diff --git a/vendor/github.com/google/go-github/v71/github/with_appengine.go b/vendor/github.com/google/go-github/v72/github/with_appengine.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/with_appengine.go rename to vendor/github.com/google/go-github/v72/github/with_appengine.go diff --git a/vendor/github.com/google/go-github/v71/github/without_appengine.go b/vendor/github.com/google/go-github/v72/github/without_appengine.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/without_appengine.go rename to vendor/github.com/google/go-github/v72/github/without_appengine.go diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse.go b/vendor/github.com/prometheus/common/expfmt/text_parse.go index b4607fe4..4067978a 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_parse.go +++ b/vendor/github.com/prometheus/common/expfmt/text_parse.go @@ -345,8 +345,8 @@ func (p *TextParser) startLabelName() stateFn { } // Special summary/histogram treatment. Don't add 'quantile' and 'le' // labels to 'real' labels. - if !(p.currentMF.GetType() == dto.MetricType_SUMMARY && p.currentLabelPair.GetName() == model.QuantileLabel) && - !(p.currentMF.GetType() == dto.MetricType_HISTOGRAM && p.currentLabelPair.GetName() == model.BucketLabel) { + if (p.currentMF.GetType() != dto.MetricType_SUMMARY || p.currentLabelPair.GetName() != model.QuantileLabel) && + (p.currentMF.GetType() != dto.MetricType_HISTOGRAM || p.currentLabelPair.GetName() != model.BucketLabel) { p.currentLabelPairs = append(p.currentLabelPairs, p.currentLabelPair) } // Check for duplicate label names. diff --git a/vendor/github.com/prometheus/common/model/labels.go b/vendor/github.com/prometheus/common/model/labels.go index f4a38760..de83afe9 100644 --- a/vendor/github.com/prometheus/common/model/labels.go +++ b/vendor/github.com/prometheus/common/model/labels.go @@ -122,7 +122,8 @@ func (ln LabelName) IsValidLegacy() bool { return false } for i, b := range ln { - if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) { + // TODO: Apply De Morgan's law. Make sure there are tests for this. + if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) { //nolint:staticcheck return false } } diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go index 57781ff9..86a29357 100644 --- a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go @@ -468,12 +468,13 @@ func (ejvw *extJSONValueWriter) WriteRegex(pattern string, options string) error return err } + options = sortStringAlphebeticAscending(options) var buf bytes.Buffer buf.WriteString(`{"$regularExpression":{"pattern":`) writeStringWithEscapes(pattern, &buf, ejvw.escapeHTML) - buf.WriteString(`,"options":"`) - buf.WriteString(sortStringAlphebeticAscending(options)) - buf.WriteString(`"}},`) + buf.WriteString(`,"options":`) + writeStringWithEscapes(options, &buf, ejvw.escapeHTML) + buf.WriteString(`}},`) ejvw.buf = append(ejvw.buf, buf.Bytes()...) diff --git a/vendor/go.opentelemetry.io/otel/.golangci.yml b/vendor/go.opentelemetry.io/otel/.golangci.yml index c58e48ab..888e5da8 100644 --- a/vendor/go.opentelemetry.io/otel/.golangci.yml +++ b/vendor/go.opentelemetry.io/otel/.golangci.yml @@ -1,13 +1,9 @@ -# See https://github.com/golangci/golangci-lint#config-file +version: "2" run: - issues-exit-code: 1 #Default - tests: true #Default - + issues-exit-code: 1 + tests: true linters: - # Disable everything by default so upgrades to not include new "default - # enabled" linters. - disable-all: true - # Specifically enable linters we want to use. + default: none enable: - asasalint - bodyclose @@ -15,10 +11,7 @@ linters: - errcheck - errorlint - godot - - gofumpt - - goimports - gosec - - gosimple - govet - ineffassign - misspell @@ -26,227 +19,230 @@ linters: - revive - staticcheck - testifylint - - typecheck - unconvert - - unused - unparam + - unused - usestdlibvars - usetesting - + settings: + depguard: + rules: + auto/sdk: + files: + - '!internal/global/trace.go' + - ~internal/global/trace_test.go + deny: + - pkg: go.opentelemetry.io/auto/sdk + desc: Do not use SDK from automatic instrumentation. + non-tests: + files: + - '!$test' + - '!**/*test/*.go' + - '!**/internal/matchers/*.go' + deny: + - pkg: testing + - pkg: github.com/stretchr/testify + - pkg: crypto/md5 + - pkg: crypto/sha1 + - pkg: crypto/**/pkix + otel-internal: + files: + - '**/sdk/*.go' + - '**/sdk/**/*.go' + - '**/exporters/*.go' + - '**/exporters/**/*.go' + - '**/schema/*.go' + - '**/schema/**/*.go' + - '**/metric/*.go' + - '**/metric/**/*.go' + - '**/bridge/*.go' + - '**/bridge/**/*.go' + - '**/trace/*.go' + - '**/trace/**/*.go' + - '**/log/*.go' + - '**/log/**/*.go' + deny: + - pkg: go.opentelemetry.io/otel/internal$ + desc: Do not use cross-module internal packages. + - pkg: go.opentelemetry.io/otel/internal/internaltest + desc: Do not use cross-module internal packages. + - pkg: go.opentelemetry.io/otel/internal/matchers + desc: Do not use cross-module internal packages. + otlp-internal: + files: + - '!**/exporters/otlp/internal/**/*.go' + deny: + - pkg: go.opentelemetry.io/otel/exporters/otlp/internal + desc: Do not use cross-module internal packages. + otlpmetric-internal: + files: + - '!**/exporters/otlp/otlpmetric/internal/*.go' + - '!**/exporters/otlp/otlpmetric/internal/**/*.go' + deny: + - pkg: go.opentelemetry.io/otel/exporters/otlp/otlpmetric/internal + desc: Do not use cross-module internal packages. + otlptrace-internal: + files: + - '!**/exporters/otlp/otlptrace/*.go' + - '!**/exporters/otlp/otlptrace/internal/**.go' + deny: + - pkg: go.opentelemetry.io/otel/exporters/otlp/otlptrace/internal + desc: Do not use cross-module internal packages. + godot: + exclude: + # Exclude links. + - '^ *\[[^]]+\]:' + # Exclude sentence fragments for lists. + - ^[ ]*[-•] + # Exclude sentences prefixing a list. + - :$ + misspell: + locale: US + ignore-rules: + - cancelled + perfsprint: + int-conversion: true + err-error: true + errorf: true + sprintf1: true + strconcat: true + revive: + confidence: 0.01 + rules: + - name: blank-imports + - name: bool-literal-in-expr + - name: constant-logical-expr + - name: context-as-argument + arguments: + - allowTypesBefore: '*testing.T' + disabled: true + - name: context-keys-type + - name: deep-exit + - name: defer + arguments: + - - call-chain + - loop + - name: dot-imports + - name: duplicated-imports + - name: early-return + arguments: + - preserveScope + - name: empty-block + - name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: exported + arguments: + - sayRepetitiveInsteadOfStutters + - name: flag-parameter + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + arguments: + - preserveScope + - name: package-comments + - name: range + - name: range-val-in-closure + - name: range-val-address + - name: redefines-builtin-id + - name: string-format + arguments: + - - panic + - /^[^\n]*$/ + - must not contain line breaks + - name: struct-tag + - name: superfluous-else + arguments: + - preserveScope + - name: time-equal + - name: unconditional-recursion + - name: unexported-return + - name: unhandled-error + arguments: + - fmt.Fprint + - fmt.Fprintf + - fmt.Fprintln + - fmt.Print + - fmt.Printf + - fmt.Println + - name: unnecessary-stmt + - name: useless-break + - name: var-declaration + - name: var-naming + arguments: + - ["ID"] # AllowList + - ["Otel", "Aws", "Gcp"] # DenyList + - name: waitgroup-by-value + testifylint: + enable-all: true + disable: + - float-compare + - go-require + - require-error + exclusions: + generated: lax + presets: + - common-false-positives + - legacy + - std-error-handling + rules: + # TODO: Having appropriate comments for exported objects helps development, + # even for objects in internal packages. Appropriate comments for all + # exported objects should be added and this exclusion removed. + - linters: + - revive + path: .*internal/.* + text: exported (method|function|type|const) (.+) should have comment or be unexported + # Yes, they are, but it's okay in a test. + - linters: + - revive + path: _test\.go + text: exported func.*returns unexported type.*which can be annoying to use + # Example test functions should be treated like main. + - linters: + - revive + path: example.*_test\.go + text: calls to (.+) only in main[(][)] or init[(][)] functions + # It's okay to not run gosec and perfsprint in a test. + - linters: + - gosec + - perfsprint + path: _test\.go + # Ignoring gosec G404: Use of weak random number generator (math/rand instead of crypto/rand) + # as we commonly use it in tests and examples. + - linters: + - gosec + text: 'G404:' + # Ignoring gosec G402: TLS MinVersion too low + # as the https://pkg.go.dev/crypto/tls#Config handles MinVersion default well. + - linters: + - gosec + text: 'G402: TLS MinVersion too low.' + paths: + - third_party$ + - builtin$ + - examples$ issues: - # Maximum issues count per one linter. - # Set to 0 to disable. - # Default: 50 - # Setting to unlimited so the linter only is run once to debug all issues. max-issues-per-linter: 0 - # Maximum count of issues with the same text. - # Set to 0 to disable. - # Default: 3 - # Setting to unlimited so the linter only is run once to debug all issues. max-same-issues: 0 - # Excluding configuration per-path, per-linter, per-text and per-source. - exclude-rules: - # TODO: Having appropriate comments for exported objects helps development, - # even for objects in internal packages. Appropriate comments for all - # exported objects should be added and this exclusion removed. - - path: '.*internal/.*' - text: "exported (method|function|type|const) (.+) should have comment or be unexported" - linters: - - revive - # Yes, they are, but it's okay in a test. - - path: _test\.go - text: "exported func.*returns unexported type.*which can be annoying to use" - linters: - - revive - # Example test functions should be treated like main. - - path: example.*_test\.go - text: "calls to (.+) only in main[(][)] or init[(][)] functions" - linters: - - revive - # It's okay to not run gosec and perfsprint in a test. - - path: _test\.go - linters: - - gosec - - perfsprint - # Ignoring gosec G404: Use of weak random number generator (math/rand instead of crypto/rand) - # as we commonly use it in tests and examples. - - text: "G404:" - linters: - - gosec - # Ignoring gosec G402: TLS MinVersion too low - # as the https://pkg.go.dev/crypto/tls#Config handles MinVersion default well. - - text: "G402: TLS MinVersion too low." - linters: - - gosec - include: - # revive exported should have comment or be unexported. - - EXC0012 - # revive package comment should be of the form ... - - EXC0013 - -linters-settings: - depguard: - rules: - non-tests: - files: - - "!$test" - - "!**/*test/*.go" - - "!**/internal/matchers/*.go" - deny: - - pkg: "testing" - - pkg: "github.com/stretchr/testify" - - pkg: "crypto/md5" - - pkg: "crypto/sha1" - - pkg: "crypto/**/pkix" - auto/sdk: - files: - - "!internal/global/trace.go" - - "~internal/global/trace_test.go" - deny: - - pkg: "go.opentelemetry.io/auto/sdk" - desc: Do not use SDK from automatic instrumentation. - otlp-internal: - files: - - "!**/exporters/otlp/internal/**/*.go" - deny: - - pkg: "go.opentelemetry.io/otel/exporters/otlp/internal" - desc: Do not use cross-module internal packages. - otlptrace-internal: - files: - - "!**/exporters/otlp/otlptrace/*.go" - - "!**/exporters/otlp/otlptrace/internal/**.go" - deny: - - pkg: "go.opentelemetry.io/otel/exporters/otlp/otlptrace/internal" - desc: Do not use cross-module internal packages. - otlpmetric-internal: - files: - - "!**/exporters/otlp/otlpmetric/internal/*.go" - - "!**/exporters/otlp/otlpmetric/internal/**/*.go" - deny: - - pkg: "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/internal" - desc: Do not use cross-module internal packages. - otel-internal: - files: - - "**/sdk/*.go" - - "**/sdk/**/*.go" - - "**/exporters/*.go" - - "**/exporters/**/*.go" - - "**/schema/*.go" - - "**/schema/**/*.go" - - "**/metric/*.go" - - "**/metric/**/*.go" - - "**/bridge/*.go" - - "**/bridge/**/*.go" - - "**/trace/*.go" - - "**/trace/**/*.go" - - "**/log/*.go" - - "**/log/**/*.go" - deny: - - pkg: "go.opentelemetry.io/otel/internal$" - desc: Do not use cross-module internal packages. - - pkg: "go.opentelemetry.io/otel/internal/attribute" - desc: Do not use cross-module internal packages. - - pkg: "go.opentelemetry.io/otel/internal/internaltest" - desc: Do not use cross-module internal packages. - - pkg: "go.opentelemetry.io/otel/internal/matchers" - desc: Do not use cross-module internal packages. - godot: - exclude: - # Exclude links. - - '^ *\[[^]]+\]:' - # Exclude sentence fragments for lists. - - '^[ ]*[-•]' - # Exclude sentences prefixing a list. - - ':$' - goimports: - local-prefixes: go.opentelemetry.io - misspell: - locale: US - ignore-words: - - cancelled - perfsprint: - err-error: true - errorf: true - int-conversion: true - sprintf1: true - strconcat: true - revive: - # Sets the default failure confidence. - # This means that linting errors with less than 0.8 confidence will be ignored. - # Default: 0.8 - confidence: 0.01 - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md - rules: - - name: blank-imports - - name: bool-literal-in-expr - - name: constant-logical-expr - - name: context-as-argument - disabled: true - arguments: - - allowTypesBefore: "*testing.T" - - name: context-keys-type - - name: deep-exit - - name: defer - arguments: - - ["call-chain", "loop"] - - name: dot-imports - - name: duplicated-imports - - name: early-return - arguments: - - "preserveScope" - - name: empty-block - - name: empty-lines - - name: error-naming - - name: error-return - - name: error-strings - - name: errorf - - name: exported - arguments: - - "sayRepetitiveInsteadOfStutters" - - name: flag-parameter - - name: identical-branches - - name: if-return - - name: import-shadowing - - name: increment-decrement - - name: indent-error-flow - arguments: - - "preserveScope" - - name: package-comments - - name: range - - name: range-val-in-closure - - name: range-val-address - - name: redefines-builtin-id - - name: string-format - arguments: - - - panic - - '/^[^\n]*$/' - - must not contain line breaks - - name: struct-tag - - name: superfluous-else - arguments: - - "preserveScope" - - name: time-equal - - name: unconditional-recursion - - name: unexported-return - - name: unhandled-error - arguments: - - "fmt.Fprint" - - "fmt.Fprintf" - - "fmt.Fprintln" - - "fmt.Print" - - "fmt.Printf" - - "fmt.Println" - - name: unnecessary-stmt - - name: useless-break - - name: var-declaration - - name: var-naming - arguments: - - ["ID"] # AllowList - - ["Otel", "Aws", "Gcp"] # DenyList - - name: waitgroup-by-value - testifylint: - enable-all: true - disable: - - float-compare - - go-require - - require-error +formatters: + enable: + - gofumpt + - goimports + - golines + settings: + goimports: + local-prefixes: + - go.opentelemetry.io + golines: + max-len: 120 + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/vendor/go.opentelemetry.io/otel/CHANGELOG.md b/vendor/go.opentelemetry.io/otel/CHANGELOG.md index c076db28..648e4aba 100644 --- a/vendor/go.opentelemetry.io/otel/CHANGELOG.md +++ b/vendor/go.opentelemetry.io/otel/CHANGELOG.md @@ -11,6 +11,57 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +## [1.36.0/0.58.0/0.12.0] 2025-05-20 + +### Added + +- Add exponential histogram support in `go.opentelemetry.io/otel/exporters/prometheus`. (#6421) +- The `go.opentelemetry.io/otel/semconv/v1.31.0` package. + The package contains semantic conventions from the `v1.31.0` version of the OpenTelemetry Semantic Conventions. + See the [migration documentation](./semconv/v1.31.0/MIGRATION.md) for information on how to upgrade from `go.opentelemetry.io/otel/semconv/v1.30.0`. (#6479) +- Add `Recording`, `Scope`, and `Record` types in `go.opentelemetry.io/otel/log/logtest`. (#6507) +- Add `WithHTTPClient` option to configure the `http.Client` used by `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp`. (#6751) +- Add `WithHTTPClient` option to configure the `http.Client` used by `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp`. (#6752) +- Add `WithHTTPClient` option to configure the `http.Client` used by `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6688) +- Add `ValuesGetter` in `go.opentelemetry.io/otel/propagation`, a `TextMapCarrier` that supports retrieving multiple values for a single key. (#5973) +- Add `Values` method to `HeaderCarrier` to implement the new `ValuesGetter` interface in `go.opentelemetry.io/otel/propagation`. (#5973) +- Update `Baggage` in `go.opentelemetry.io/otel/propagation` to retrieve multiple values for a key when the carrier implements `ValuesGetter`. (#5973) +- Add `AssertEqual` function in `go.opentelemetry.io/otel/log/logtest`. (#6662) +- The `go.opentelemetry.io/otel/semconv/v1.32.0` package. + The package contains semantic conventions from the `v1.32.0` version of the OpenTelemetry Semantic Conventions. + See the [migration documentation](./semconv/v1.32.0/MIGRATION.md) for information on how to upgrade from `go.opentelemetry.io/otel/semconv/v1.31.0`(#6782) +- Add `Transform` option in `go.opentelemetry.io/otel/log/logtest`. (#6794) +- Add `Desc` option in `go.opentelemetry.io/otel/log/logtest`. (#6796) + +### Removed + +- Drop support for [Go 1.22]. (#6381, #6418) +- Remove `Resource` field from `EnabledParameters` in `go.opentelemetry.io/otel/sdk/log`. (#6494) +- Remove `RecordFactory` type from `go.opentelemetry.io/otel/log/logtest`. (#6492) +- Remove `ScopeRecords`, `EmittedRecord`, and `RecordFactory` types from `go.opentelemetry.io/otel/log/logtest`. (#6507) +- Remove `AssertRecordEqual` function in `go.opentelemetry.io/otel/log/logtest`, use `AssertEqual` instead. (#6662) + +### Changed + +- ⚠️ Update `github.com/prometheus/client_golang` to `v1.21.1`, which changes the `NameValidationScheme` to `UTF8Validation`. + This allows metrics names to keep original delimiters (e.g. `.`), rather than replacing with underscores. + This can be reverted by setting `github.com/prometheus/common/model.NameValidationScheme` to `LegacyValidation` in `github.com/prometheus/common/model`. (#6433) +- Initialize map with `len(keys)` in `NewAllowKeysFilter` and `NewDenyKeysFilter` to avoid unnecessary allocations in `go.opentelemetry.io/otel/attribute`. (#6455) +- `go.opentelemetry.io/otel/log/logtest` is now a separate Go module. (#6465) +- `go.opentelemetry.io/otel/sdk/log/logtest` is now a separate Go module. (#6466) +- `Recorder` in `go.opentelemetry.io/otel/log/logtest` no longer separately stores records emitted by loggers with the same instrumentation scope. (#6507) +- Improve performance of `BatchProcessor` in `go.opentelemetry.io/otel/sdk/log` by not exporting when exporter cannot accept more. (#6569, #6641) + +### Deprecated + +- Deprecate support for `model.LegacyValidation` for `go.opentelemetry.io/otel/exporters/prometheus`. (#6449) + +### Fixes + +- Stop percent encoding header environment variables in `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc` and `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6392) +- Ensure the `noopSpan.tracerProvider` method is not inlined in `go.opentelemetry.io/otel/trace` so the `go.opentelemetry.io/auto` instrumentation can instrument non-recording spans. (#6456) +- Use a `sync.Pool` instead of allocating `metricdata.ResourceMetrics` in `go.opentelemetry.io/otel/exporters/prometheus`. (#6472) + ## [1.35.0/0.57.0/0.11.0] 2025-03-05 This release is the last to support [Go 1.22]. @@ -3237,7 +3288,8 @@ It contains api and sdk for trace and meter. - CircleCI build CI manifest files. - CODEOWNERS file to track owners of this project. -[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.35.0...HEAD +[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.36.0...HEAD +[1.36.0/0.58.0/0.12.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.36.0 [1.35.0/0.57.0/0.11.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.35.0 [1.34.0/0.56.0/0.10.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.34.0 [1.33.0/0.55.0/0.9.0/0.0.12]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.33.0 diff --git a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md index 7b8af585..1902dac0 100644 --- a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md +++ b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md @@ -643,6 +643,7 @@ should be canceled. ### Triagers +- [Alex Kats](https://github.com/akats7), Capital One - [Cheng-Zhen Yang](https://github.com/scorpionknifes), Independent ### Approvers diff --git a/vendor/go.opentelemetry.io/otel/Makefile b/vendor/go.opentelemetry.io/otel/Makefile index 226410d7..62a56f4d 100644 --- a/vendor/go.opentelemetry.io/otel/Makefile +++ b/vendor/go.opentelemetry.io/otel/Makefile @@ -43,8 +43,11 @@ $(TOOLS)/crosslink: PACKAGE=go.opentelemetry.io/build-tools/crosslink SEMCONVKIT = $(TOOLS)/semconvkit $(TOOLS)/semconvkit: PACKAGE=go.opentelemetry.io/otel/$(TOOLS_MOD_DIR)/semconvkit +VERIFYREADMES = $(TOOLS)/verifyreadmes +$(TOOLS)/verifyreadmes: PACKAGE=go.opentelemetry.io/otel/$(TOOLS_MOD_DIR)/verifyreadmes + GOLANGCI_LINT = $(TOOLS)/golangci-lint -$(TOOLS)/golangci-lint: PACKAGE=github.com/golangci/golangci-lint/cmd/golangci-lint +$(TOOLS)/golangci-lint: PACKAGE=github.com/golangci/golangci-lint/v2/cmd/golangci-lint MISSPELL = $(TOOLS)/misspell $(TOOLS)/misspell: PACKAGE=github.com/client9/misspell/cmd/misspell @@ -68,7 +71,7 @@ GOVULNCHECK = $(TOOLS)/govulncheck $(TOOLS)/govulncheck: PACKAGE=golang.org/x/vuln/cmd/govulncheck .PHONY: tools -tools: $(CROSSLINK) $(GOLANGCI_LINT) $(MISSPELL) $(GOCOVMERGE) $(STRINGER) $(PORTO) $(SEMCONVGEN) $(MULTIMOD) $(SEMCONVKIT) $(GOTMPL) $(GORELEASE) +tools: $(CROSSLINK) $(GOLANGCI_LINT) $(MISSPELL) $(GOCOVMERGE) $(STRINGER) $(PORTO) $(SEMCONVGEN) $(VERIFYREADMES) $(MULTIMOD) $(SEMCONVKIT) $(GOTMPL) $(GORELEASE) # Virtualized python tools via docker @@ -213,11 +216,8 @@ go-mod-tidy/%: crosslink && cd $(DIR) \ && $(GO) mod tidy -compat=1.21 -.PHONY: lint-modules -lint-modules: go-mod-tidy - .PHONY: lint -lint: misspell lint-modules golangci-lint govulncheck +lint: misspell go-mod-tidy golangci-lint govulncheck .PHONY: vanity-import-check vanity-import-check: $(PORTO) @@ -319,10 +319,11 @@ add-tags: verify-mods @[ "${MODSET}" ] || ( echo ">> env var MODSET is not set"; exit 1 ) $(MULTIMOD) tag -m ${MODSET} -c ${COMMIT} +MARKDOWNIMAGE := $(shell awk '$$4=="markdown" {print $$2}' $(DEPENDENCIES_DOCKERFILE)) .PHONY: lint-markdown lint-markdown: - docker run -v "$(CURDIR):$(WORKDIR)" avtodev/markdown-lint:v1 -c $(WORKDIR)/.markdownlint.yaml $(WORKDIR)/**/*.md + docker run --rm -u $(DOCKER_USER) -v "$(CURDIR):$(WORKDIR)" $(MARKDOWNIMAGE) -c $(WORKDIR)/.markdownlint.yaml $(WORKDIR)/**/*.md .PHONY: verify-readmes -verify-readmes: - ./verify_readmes.sh +verify-readmes: $(VERIFYREADMES) + $(VERIFYREADMES) diff --git a/vendor/go.opentelemetry.io/otel/README.md b/vendor/go.opentelemetry.io/otel/README.md index 8421cd7e..b6007881 100644 --- a/vendor/go.opentelemetry.io/otel/README.md +++ b/vendor/go.opentelemetry.io/otel/README.md @@ -6,6 +6,7 @@ [![Go Report Card](https://goreportcard.com/badge/go.opentelemetry.io/otel)](https://goreportcard.com/report/go.opentelemetry.io/otel) [![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/open-telemetry/opentelemetry-go/badge)](https://scorecard.dev/viewer/?uri=github.com/open-telemetry/opentelemetry-go) [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9996/badge)](https://www.bestpractices.dev/projects/9996) +[![Fuzzing Status](https://oss-fuzz-build-logs.storage.googleapis.com/badges/opentelemetry-go.svg)](https://issues.oss-fuzz.com/issues?q=project:opentelemetry-go) [![Slack](https://img.shields.io/badge/slack-@cncf/otel--go-brightgreen.svg?logo=slack)](https://cloud-native.slack.com/archives/C01NPAXACKT) OpenTelemetry-Go is the [Go](https://golang.org/) implementation of [OpenTelemetry](https://opentelemetry.io/). @@ -53,25 +54,18 @@ Currently, this project supports the following environments. |----------|------------|--------------| | Ubuntu | 1.24 | amd64 | | Ubuntu | 1.23 | amd64 | -| Ubuntu | 1.22 | amd64 | | Ubuntu | 1.24 | 386 | | Ubuntu | 1.23 | 386 | -| Ubuntu | 1.22 | 386 | | Ubuntu | 1.24 | arm64 | | Ubuntu | 1.23 | arm64 | -| Ubuntu | 1.22 | arm64 | | macOS 13 | 1.24 | amd64 | | macOS 13 | 1.23 | amd64 | -| macOS 13 | 1.22 | amd64 | | macOS | 1.24 | arm64 | | macOS | 1.23 | arm64 | -| macOS | 1.22 | arm64 | | Windows | 1.24 | amd64 | | Windows | 1.23 | amd64 | -| Windows | 1.22 | amd64 | | Windows | 1.24 | 386 | | Windows | 1.23 | 386 | -| Windows | 1.22 | 386 | While this project should work for other systems, no compatibility guarantees are made for those systems currently. diff --git a/vendor/go.opentelemetry.io/otel/RELEASING.md b/vendor/go.opentelemetry.io/otel/RELEASING.md index 1e13ae54..7c1a9119 100644 --- a/vendor/go.opentelemetry.io/otel/RELEASING.md +++ b/vendor/go.opentelemetry.io/otel/RELEASING.md @@ -1,5 +1,9 @@ # Release Process +## Create a `Version Release` issue + +Create a `Version Release` issue to track the release process. + ## Semantic Convention Generation New versions of the [OpenTelemetry Semantic Conventions] mean new versions of the `semconv` package need to be generated. @@ -123,6 +127,16 @@ Importantly, bump any package versions referenced to be the latest one you just [Go instrumentation documentation]: https://opentelemetry.io/docs/languages/go/ [content/en/docs/languages/go]: https://github.com/open-telemetry/opentelemetry.io/tree/main/content/en/docs/languages/go +### Close the milestone + +Once a release is made, ensure all issues that were fixed and PRs that were merged as part of this release are added to the corresponding milestone. +This helps track what changes were included in each release. + +- To find issues that haven't been included in a milestone, use this [GitHub search query](https://github.com/open-telemetry/opentelemetry-go/issues?q=is%3Aissue%20no%3Amilestone%20is%3Aclosed%20sort%3Aupdated-desc%20reason%3Acompleted%20-label%3AStale%20linked%3Apr) +- To find merged PRs that haven't been included in a milestone, use this [GitHub search query](https://github.com/open-telemetry/opentelemetry-go/pulls?q=is%3Apr+no%3Amilestone+is%3Amerged). + +Once all related issues and PRs have been added to the milestone, close the milestone. + ### Demo Repository Bump the dependencies in the following Go services: @@ -130,3 +144,7 @@ Bump the dependencies in the following Go services: - [`accounting`](https://github.com/open-telemetry/opentelemetry-demo/tree/main/src/accounting) - [`checkoutservice`](https://github.com/open-telemetry/opentelemetry-demo/tree/main/src/checkout) - [`productcatalogservice`](https://github.com/open-telemetry/opentelemetry-demo/tree/main/src/product-catalog) + +### Close the `Version Release` issue + +Once the todo list in the `Version Release` issue is complete, close the issue. diff --git a/vendor/go.opentelemetry.io/otel/attribute/filter.go b/vendor/go.opentelemetry.io/otel/attribute/filter.go index be9cd922..3eeaa5d4 100644 --- a/vendor/go.opentelemetry.io/otel/attribute/filter.go +++ b/vendor/go.opentelemetry.io/otel/attribute/filter.go @@ -19,7 +19,7 @@ func NewAllowKeysFilter(keys ...Key) Filter { return func(kv KeyValue) bool { return false } } - allowed := make(map[Key]struct{}) + allowed := make(map[Key]struct{}, len(keys)) for _, k := range keys { allowed[k] = struct{}{} } @@ -38,7 +38,7 @@ func NewDenyKeysFilter(keys ...Key) Filter { return func(kv KeyValue) bool { return true } } - forbid := make(map[Key]struct{}) + forbid := make(map[Key]struct{}, len(keys)) for _, k := range keys { forbid[k] = struct{}{} } diff --git a/vendor/go.opentelemetry.io/otel/internal/attribute/attribute.go b/vendor/go.opentelemetry.io/otel/attribute/internal/attribute.go similarity index 97% rename from vendor/go.opentelemetry.io/otel/internal/attribute/attribute.go rename to vendor/go.opentelemetry.io/otel/attribute/internal/attribute.go index 691d96c7..b76d2bbf 100644 --- a/vendor/go.opentelemetry.io/otel/internal/attribute/attribute.go +++ b/vendor/go.opentelemetry.io/otel/attribute/internal/attribute.go @@ -5,7 +5,7 @@ Package attribute provide several helper functions for some commonly used logic of processing attributes. */ -package attribute // import "go.opentelemetry.io/otel/internal/attribute" +package attribute // import "go.opentelemetry.io/otel/attribute/internal" import ( "reflect" diff --git a/vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go b/vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go new file mode 100644 index 00000000..5791c6e7 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/attribute/rawhelpers.go @@ -0,0 +1,37 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package attribute // import "go.opentelemetry.io/otel/attribute" + +import ( + "math" +) + +func boolToRaw(b bool) uint64 { // nolint:revive // b is not a control flag. + if b { + return 1 + } + return 0 +} + +func rawToBool(r uint64) bool { + return r != 0 +} + +func int64ToRaw(i int64) uint64 { + // Assumes original was a valid int64 (overflow not checked). + return uint64(i) // nolint: gosec +} + +func rawToInt64(r uint64) int64 { + // Assumes original was a valid int64 (overflow not checked). + return int64(r) // nolint: gosec +} + +func float64ToRaw(f float64) uint64 { + return math.Float64bits(f) +} + +func rawToFloat64(r uint64) float64 { + return math.Float64frombits(r) +} diff --git a/vendor/go.opentelemetry.io/otel/attribute/value.go b/vendor/go.opentelemetry.io/otel/attribute/value.go index 9ea0ecbb..817eecac 100644 --- a/vendor/go.opentelemetry.io/otel/attribute/value.go +++ b/vendor/go.opentelemetry.io/otel/attribute/value.go @@ -9,8 +9,7 @@ import ( "reflect" "strconv" - "go.opentelemetry.io/otel/internal" - "go.opentelemetry.io/otel/internal/attribute" + attribute "go.opentelemetry.io/otel/attribute/internal" ) //go:generate stringer -type=Type @@ -51,7 +50,7 @@ const ( func BoolValue(v bool) Value { return Value{ vtype: BOOL, - numeric: internal.BoolToRaw(v), + numeric: boolToRaw(v), } } @@ -82,7 +81,7 @@ func IntSliceValue(v []int) Value { func Int64Value(v int64) Value { return Value{ vtype: INT64, - numeric: internal.Int64ToRaw(v), + numeric: int64ToRaw(v), } } @@ -95,7 +94,7 @@ func Int64SliceValue(v []int64) Value { func Float64Value(v float64) Value { return Value{ vtype: FLOAT64, - numeric: internal.Float64ToRaw(v), + numeric: float64ToRaw(v), } } @@ -125,7 +124,7 @@ func (v Value) Type() Type { // AsBool returns the bool value. Make sure that the Value's type is // BOOL. func (v Value) AsBool() bool { - return internal.RawToBool(v.numeric) + return rawToBool(v.numeric) } // AsBoolSlice returns the []bool value. Make sure that the Value's type is @@ -144,7 +143,7 @@ func (v Value) asBoolSlice() []bool { // AsInt64 returns the int64 value. Make sure that the Value's type is // INT64. func (v Value) AsInt64() int64 { - return internal.RawToInt64(v.numeric) + return rawToInt64(v.numeric) } // AsInt64Slice returns the []int64 value. Make sure that the Value's type is @@ -163,7 +162,7 @@ func (v Value) asInt64Slice() []int64 { // AsFloat64 returns the float64 value. Make sure that the Value's // type is FLOAT64. func (v Value) AsFloat64() float64 { - return internal.RawToFloat64(v.numeric) + return rawToFloat64(v.numeric) } // AsFloat64Slice returns the []float64 value. Make sure that the Value's type is diff --git a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile index e4c4a753..51fb76b3 100644 --- a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile +++ b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile @@ -1,3 +1,4 @@ # This is a renovate-friendly source of Docker images. -FROM python:3.13.2-slim-bullseye@sha256:31b581c8218e1f3c58672481b3b7dba8e898852866b408c6a984c22832523935 AS python -FROM otel/weaver:v0.13.2@sha256:ae7346b992e477f629ea327e0979e8a416a97f7956ab1f7e95ac1f44edf1a893 AS weaver +FROM python:3.13.3-slim-bullseye@sha256:9e3f9243e06fd68eb9519074b49878eda20ad39a855fac51aaffb741de20726e AS python +FROM otel/weaver:v0.15.0@sha256:1cf1c72eaed57dad813c2e359133b8a15bd4facf305aae5b13bdca6d3eccff56 AS weaver +FROM avtodev/markdown-lint:v1@sha256:6aeedc2f49138ce7a1cd0adffc1b1c0321b841dc2102408967d9301c031949ee AS markdown diff --git a/vendor/go.opentelemetry.io/otel/get_main_pkgs.sh b/vendor/go.opentelemetry.io/otel/get_main_pkgs.sh deleted file mode 100644 index 93e80ea3..00000000 --- a/vendor/go.opentelemetry.io/otel/get_main_pkgs.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env bash - -# Copyright The OpenTelemetry Authors -# SPDX-License-Identifier: Apache-2.0 - -set -euo pipefail - -top_dir='.' -if [[ $# -gt 0 ]]; then - top_dir="${1}" -fi - -p=$(pwd) -mod_dirs=() - -# Note `mapfile` does not exist in older bash versions: -# https://stackoverflow.com/questions/41475261/need-alternative-to-readarray-mapfile-for-script-on-older-version-of-bash - -while IFS= read -r line; do - mod_dirs+=("$line") -done < <(find "${top_dir}" -type f -name 'go.mod' -exec dirname {} \; | sort) - -for mod_dir in "${mod_dirs[@]}"; do - cd "${mod_dir}" - - while IFS= read -r line; do - echo ".${line#${p}}" - done < <(go list --find -f '{{.Name}}|{{.Dir}}' ./... | grep '^main|' | cut -f 2- -d '|') - cd "${p}" -done diff --git a/vendor/go.opentelemetry.io/otel/internal/gen.go b/vendor/go.opentelemetry.io/otel/internal/gen.go deleted file mode 100644 index 4259f032..00000000 --- a/vendor/go.opentelemetry.io/otel/internal/gen.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package internal // import "go.opentelemetry.io/otel/internal" - -//go:generate gotmpl --body=./shared/matchers/expectation.go.tmpl "--data={}" --out=matchers/expectation.go -//go:generate gotmpl --body=./shared/matchers/expecter.go.tmpl "--data={}" --out=matchers/expecter.go -//go:generate gotmpl --body=./shared/matchers/temporal_matcher.go.tmpl "--data={}" --out=matchers/temporal_matcher.go - -//go:generate gotmpl --body=./shared/internaltest/alignment.go.tmpl "--data={}" --out=internaltest/alignment.go -//go:generate gotmpl --body=./shared/internaltest/env.go.tmpl "--data={}" --out=internaltest/env.go -//go:generate gotmpl --body=./shared/internaltest/env_test.go.tmpl "--data={}" --out=internaltest/env_test.go -//go:generate gotmpl --body=./shared/internaltest/errors.go.tmpl "--data={}" --out=internaltest/errors.go -//go:generate gotmpl --body=./shared/internaltest/harness.go.tmpl "--data={\"matchersImportPath\": \"go.opentelemetry.io/otel/internal/matchers\"}" --out=internaltest/harness.go -//go:generate gotmpl --body=./shared/internaltest/text_map_carrier.go.tmpl "--data={}" --out=internaltest/text_map_carrier.go -//go:generate gotmpl --body=./shared/internaltest/text_map_carrier_test.go.tmpl "--data={}" --out=internaltest/text_map_carrier_test.go -//go:generate gotmpl --body=./shared/internaltest/text_map_propagator.go.tmpl "--data={}" --out=internaltest/text_map_propagator.go -//go:generate gotmpl --body=./shared/internaltest/text_map_propagator_test.go.tmpl "--data={}" --out=internaltest/text_map_propagator_test.go diff --git a/vendor/go.opentelemetry.io/otel/internal/global/handler.go b/vendor/go.opentelemetry.io/otel/internal/global/handler.go index c657ff8e..2e47b296 100644 --- a/vendor/go.opentelemetry.io/otel/internal/global/handler.go +++ b/vendor/go.opentelemetry.io/otel/internal/global/handler.go @@ -1,6 +1,7 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 +// Package global provides the OpenTelemetry global API. package global // import "go.opentelemetry.io/otel/internal/global" import ( diff --git a/vendor/go.opentelemetry.io/otel/internal/global/meter.go b/vendor/go.opentelemetry.io/otel/internal/global/meter.go index a6acd8dc..adb37b5b 100644 --- a/vendor/go.opentelemetry.io/otel/internal/global/meter.go +++ b/vendor/go.opentelemetry.io/otel/internal/global/meter.go @@ -169,7 +169,10 @@ func (m *meter) Int64Counter(name string, options ...metric.Int64CounterOption) return i, nil } -func (m *meter) Int64UpDownCounter(name string, options ...metric.Int64UpDownCounterOption) (metric.Int64UpDownCounter, error) { +func (m *meter) Int64UpDownCounter( + name string, + options ...metric.Int64UpDownCounterOption, +) (metric.Int64UpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -238,7 +241,10 @@ func (m *meter) Int64Gauge(name string, options ...metric.Int64GaugeOption) (met return i, nil } -func (m *meter) Int64ObservableCounter(name string, options ...metric.Int64ObservableCounterOption) (metric.Int64ObservableCounter, error) { +func (m *meter) Int64ObservableCounter( + name string, + options ...metric.Int64ObservableCounterOption, +) (metric.Int64ObservableCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -261,7 +267,10 @@ func (m *meter) Int64ObservableCounter(name string, options ...metric.Int64Obser return i, nil } -func (m *meter) Int64ObservableUpDownCounter(name string, options ...metric.Int64ObservableUpDownCounterOption) (metric.Int64ObservableUpDownCounter, error) { +func (m *meter) Int64ObservableUpDownCounter( + name string, + options ...metric.Int64ObservableUpDownCounterOption, +) (metric.Int64ObservableUpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -284,7 +293,10 @@ func (m *meter) Int64ObservableUpDownCounter(name string, options ...metric.Int6 return i, nil } -func (m *meter) Int64ObservableGauge(name string, options ...metric.Int64ObservableGaugeOption) (metric.Int64ObservableGauge, error) { +func (m *meter) Int64ObservableGauge( + name string, + options ...metric.Int64ObservableGaugeOption, +) (metric.Int64ObservableGauge, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -330,7 +342,10 @@ func (m *meter) Float64Counter(name string, options ...metric.Float64CounterOpti return i, nil } -func (m *meter) Float64UpDownCounter(name string, options ...metric.Float64UpDownCounterOption) (metric.Float64UpDownCounter, error) { +func (m *meter) Float64UpDownCounter( + name string, + options ...metric.Float64UpDownCounterOption, +) (metric.Float64UpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -353,7 +368,10 @@ func (m *meter) Float64UpDownCounter(name string, options ...metric.Float64UpDow return i, nil } -func (m *meter) Float64Histogram(name string, options ...metric.Float64HistogramOption) (metric.Float64Histogram, error) { +func (m *meter) Float64Histogram( + name string, + options ...metric.Float64HistogramOption, +) (metric.Float64Histogram, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -399,7 +417,10 @@ func (m *meter) Float64Gauge(name string, options ...metric.Float64GaugeOption) return i, nil } -func (m *meter) Float64ObservableCounter(name string, options ...metric.Float64ObservableCounterOption) (metric.Float64ObservableCounter, error) { +func (m *meter) Float64ObservableCounter( + name string, + options ...metric.Float64ObservableCounterOption, +) (metric.Float64ObservableCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -422,7 +443,10 @@ func (m *meter) Float64ObservableCounter(name string, options ...metric.Float64O return i, nil } -func (m *meter) Float64ObservableUpDownCounter(name string, options ...metric.Float64ObservableUpDownCounterOption) (metric.Float64ObservableUpDownCounter, error) { +func (m *meter) Float64ObservableUpDownCounter( + name string, + options ...metric.Float64ObservableUpDownCounterOption, +) (metric.Float64ObservableUpDownCounter, error) { m.mtx.Lock() defer m.mtx.Unlock() @@ -445,7 +469,10 @@ func (m *meter) Float64ObservableUpDownCounter(name string, options ...metric.Fl return i, nil } -func (m *meter) Float64ObservableGauge(name string, options ...metric.Float64ObservableGaugeOption) (metric.Float64ObservableGauge, error) { +func (m *meter) Float64ObservableGauge( + name string, + options ...metric.Float64ObservableGaugeOption, +) (metric.Float64ObservableGauge, error) { m.mtx.Lock() defer m.mtx.Unlock() diff --git a/vendor/go.opentelemetry.io/otel/internal/global/trace.go b/vendor/go.opentelemetry.io/otel/internal/global/trace.go index 8982aa0d..49e4ac4f 100644 --- a/vendor/go.opentelemetry.io/otel/internal/global/trace.go +++ b/vendor/go.opentelemetry.io/otel/internal/global/trace.go @@ -158,7 +158,18 @@ func (t *tracer) Start(ctx context.Context, name string, opts ...trace.SpanStart // a nonRecordingSpan by default. var autoInstEnabled = new(bool) -func (t *tracer) newSpan(ctx context.Context, autoSpan *bool, name string, opts []trace.SpanStartOption) (context.Context, trace.Span) { +// newSpan is called by tracer.Start so auto-instrumentation can attach an eBPF +// uprobe to this code. +// +// "noinline" pragma prevents the method from ever being inlined. +// +//go:noinline +func (t *tracer) newSpan( + ctx context.Context, + autoSpan *bool, + name string, + opts []trace.SpanStartOption, +) (context.Context, trace.Span) { // autoInstEnabled is passed to newSpan via the autoSpan parameter. This is // so the auto-instrumentation can define a uprobe for (*t).newSpan and be // provided with the address of the bool autoInstEnabled points to. It diff --git a/vendor/go.opentelemetry.io/otel/internal/rawhelpers.go b/vendor/go.opentelemetry.io/otel/internal/rawhelpers.go deleted file mode 100644 index b2fe3e41..00000000 --- a/vendor/go.opentelemetry.io/otel/internal/rawhelpers.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package internal // import "go.opentelemetry.io/otel/internal" - -import ( - "math" - "unsafe" -) - -func BoolToRaw(b bool) uint64 { // nolint:revive // b is not a control flag. - if b { - return 1 - } - return 0 -} - -func RawToBool(r uint64) bool { - return r != 0 -} - -func Int64ToRaw(i int64) uint64 { - // Assumes original was a valid int64 (overflow not checked). - return uint64(i) // nolint: gosec -} - -func RawToInt64(r uint64) int64 { - // Assumes original was a valid int64 (overflow not checked). - return int64(r) // nolint: gosec -} - -func Float64ToRaw(f float64) uint64 { - return math.Float64bits(f) -} - -func RawToFloat64(r uint64) float64 { - return math.Float64frombits(r) -} - -func RawPtrToFloat64Ptr(r *uint64) *float64 { - // Assumes original was a valid *float64 (overflow not checked). - return (*float64)(unsafe.Pointer(r)) // nolint: gosec -} - -func RawPtrToInt64Ptr(r *uint64) *int64 { - // Assumes original was a valid *int64 (overflow not checked). - return (*int64)(unsafe.Pointer(r)) // nolint: gosec -} diff --git a/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go b/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go index f8435d8f..b7fc973a 100644 --- a/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go +++ b/vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go @@ -106,7 +106,9 @@ type Float64ObservableUpDownCounterConfig struct { // NewFloat64ObservableUpDownCounterConfig returns a new // [Float64ObservableUpDownCounterConfig] with all opts applied. -func NewFloat64ObservableUpDownCounterConfig(opts ...Float64ObservableUpDownCounterOption) Float64ObservableUpDownCounterConfig { +func NewFloat64ObservableUpDownCounterConfig( + opts ...Float64ObservableUpDownCounterOption, +) Float64ObservableUpDownCounterConfig { var config Float64ObservableUpDownCounterConfig for _, o := range opts { config = o.applyFloat64ObservableUpDownCounter(config) @@ -239,12 +241,16 @@ type float64CallbackOpt struct { cback Float64Callback } -func (o float64CallbackOpt) applyFloat64ObservableCounter(cfg Float64ObservableCounterConfig) Float64ObservableCounterConfig { +func (o float64CallbackOpt) applyFloat64ObservableCounter( + cfg Float64ObservableCounterConfig, +) Float64ObservableCounterConfig { cfg.callbacks = append(cfg.callbacks, o.cback) return cfg } -func (o float64CallbackOpt) applyFloat64ObservableUpDownCounter(cfg Float64ObservableUpDownCounterConfig) Float64ObservableUpDownCounterConfig { +func (o float64CallbackOpt) applyFloat64ObservableUpDownCounter( + cfg Float64ObservableUpDownCounterConfig, +) Float64ObservableUpDownCounterConfig { cfg.callbacks = append(cfg.callbacks, o.cback) return cfg } diff --git a/vendor/go.opentelemetry.io/otel/metric/asyncint64.go b/vendor/go.opentelemetry.io/otel/metric/asyncint64.go index e079aaef..4404b71a 100644 --- a/vendor/go.opentelemetry.io/otel/metric/asyncint64.go +++ b/vendor/go.opentelemetry.io/otel/metric/asyncint64.go @@ -105,7 +105,9 @@ type Int64ObservableUpDownCounterConfig struct { // NewInt64ObservableUpDownCounterConfig returns a new // [Int64ObservableUpDownCounterConfig] with all opts applied. -func NewInt64ObservableUpDownCounterConfig(opts ...Int64ObservableUpDownCounterOption) Int64ObservableUpDownCounterConfig { +func NewInt64ObservableUpDownCounterConfig( + opts ...Int64ObservableUpDownCounterOption, +) Int64ObservableUpDownCounterConfig { var config Int64ObservableUpDownCounterConfig for _, o := range opts { config = o.applyInt64ObservableUpDownCounter(config) @@ -242,7 +244,9 @@ func (o int64CallbackOpt) applyInt64ObservableCounter(cfg Int64ObservableCounter return cfg } -func (o int64CallbackOpt) applyInt64ObservableUpDownCounter(cfg Int64ObservableUpDownCounterConfig) Int64ObservableUpDownCounterConfig { +func (o int64CallbackOpt) applyInt64ObservableUpDownCounter( + cfg Int64ObservableUpDownCounterConfig, +) Int64ObservableUpDownCounterConfig { cfg.callbacks = append(cfg.callbacks, o.cback) return cfg } diff --git a/vendor/go.opentelemetry.io/otel/metric/instrument.go b/vendor/go.opentelemetry.io/otel/metric/instrument.go index a535782e..9f48d5f1 100644 --- a/vendor/go.opentelemetry.io/otel/metric/instrument.go +++ b/vendor/go.opentelemetry.io/otel/metric/instrument.go @@ -63,7 +63,9 @@ func (o descOpt) applyFloat64ObservableCounter(c Float64ObservableCounterConfig) return c } -func (o descOpt) applyFloat64ObservableUpDownCounter(c Float64ObservableUpDownCounterConfig) Float64ObservableUpDownCounterConfig { +func (o descOpt) applyFloat64ObservableUpDownCounter( + c Float64ObservableUpDownCounterConfig, +) Float64ObservableUpDownCounterConfig { c.description = string(o) return c } @@ -98,7 +100,9 @@ func (o descOpt) applyInt64ObservableCounter(c Int64ObservableCounterConfig) Int return c } -func (o descOpt) applyInt64ObservableUpDownCounter(c Int64ObservableUpDownCounterConfig) Int64ObservableUpDownCounterConfig { +func (o descOpt) applyInt64ObservableUpDownCounter( + c Int64ObservableUpDownCounterConfig, +) Int64ObservableUpDownCounterConfig { c.description = string(o) return c } @@ -138,7 +142,9 @@ func (o unitOpt) applyFloat64ObservableCounter(c Float64ObservableCounterConfig) return c } -func (o unitOpt) applyFloat64ObservableUpDownCounter(c Float64ObservableUpDownCounterConfig) Float64ObservableUpDownCounterConfig { +func (o unitOpt) applyFloat64ObservableUpDownCounter( + c Float64ObservableUpDownCounterConfig, +) Float64ObservableUpDownCounterConfig { c.unit = string(o) return c } @@ -173,7 +179,9 @@ func (o unitOpt) applyInt64ObservableCounter(c Int64ObservableCounterConfig) Int return c } -func (o unitOpt) applyInt64ObservableUpDownCounter(c Int64ObservableUpDownCounterConfig) Int64ObservableUpDownCounterConfig { +func (o unitOpt) applyInt64ObservableUpDownCounter( + c Int64ObservableUpDownCounterConfig, +) Int64ObservableUpDownCounterConfig { c.unit = string(o) return c } diff --git a/vendor/go.opentelemetry.io/otel/metric/meter.go b/vendor/go.opentelemetry.io/otel/metric/meter.go index 14e08c24..fdd2a701 100644 --- a/vendor/go.opentelemetry.io/otel/metric/meter.go +++ b/vendor/go.opentelemetry.io/otel/metric/meter.go @@ -110,7 +110,10 @@ type Meter interface { // The name needs to conform to the OpenTelemetry instrument name syntax. // See the Instrument Name section of the package documentation for more // information. - Int64ObservableUpDownCounter(name string, options ...Int64ObservableUpDownCounterOption) (Int64ObservableUpDownCounter, error) + Int64ObservableUpDownCounter( + name string, + options ...Int64ObservableUpDownCounterOption, + ) (Int64ObservableUpDownCounter, error) // Int64ObservableGauge returns a new Int64ObservableGauge instrument // identified by name and configured with options. The instrument is used @@ -194,7 +197,10 @@ type Meter interface { // The name needs to conform to the OpenTelemetry instrument name syntax. // See the Instrument Name section of the package documentation for more // information. - Float64ObservableUpDownCounter(name string, options ...Float64ObservableUpDownCounterOption) (Float64ObservableUpDownCounter, error) + Float64ObservableUpDownCounter( + name string, + options ...Float64ObservableUpDownCounterOption, + ) (Float64ObservableUpDownCounter, error) // Float64ObservableGauge returns a new Float64ObservableGauge instrument // identified by name and configured with options. The instrument is used diff --git a/vendor/go.opentelemetry.io/otel/propagation/baggage.go b/vendor/go.opentelemetry.io/otel/propagation/baggage.go index 552263ba..ebda5026 100644 --- a/vendor/go.opentelemetry.io/otel/propagation/baggage.go +++ b/vendor/go.opentelemetry.io/otel/propagation/baggage.go @@ -28,7 +28,21 @@ func (b Baggage) Inject(ctx context.Context, carrier TextMapCarrier) { } // Extract returns a copy of parent with the baggage from the carrier added. +// If carrier implements [ValuesGetter] (e.g. [HeaderCarrier]), Values is invoked +// for multiple values extraction. Otherwise, Get is called. func (b Baggage) Extract(parent context.Context, carrier TextMapCarrier) context.Context { + if multiCarrier, ok := carrier.(ValuesGetter); ok { + return extractMultiBaggage(parent, multiCarrier) + } + return extractSingleBaggage(parent, carrier) +} + +// Fields returns the keys who's values are set with Inject. +func (b Baggage) Fields() []string { + return []string{baggageHeader} +} + +func extractSingleBaggage(parent context.Context, carrier TextMapCarrier) context.Context { bStr := carrier.Get(baggageHeader) if bStr == "" { return parent @@ -41,7 +55,23 @@ func (b Baggage) Extract(parent context.Context, carrier TextMapCarrier) context return baggage.ContextWithBaggage(parent, bag) } -// Fields returns the keys who's values are set with Inject. -func (b Baggage) Fields() []string { - return []string{baggageHeader} +func extractMultiBaggage(parent context.Context, carrier ValuesGetter) context.Context { + bVals := carrier.Values(baggageHeader) + if len(bVals) == 0 { + return parent + } + var members []baggage.Member + for _, bStr := range bVals { + currBag, err := baggage.Parse(bStr) + if err != nil { + continue + } + members = append(members, currBag.Members()...) + } + + b, err := baggage.New(members...) + if err != nil || b.Len() == 0 { + return parent + } + return baggage.ContextWithBaggage(parent, b) } diff --git a/vendor/go.opentelemetry.io/otel/propagation/propagation.go b/vendor/go.opentelemetry.io/otel/propagation/propagation.go index 8c8286aa..5c8c26ea 100644 --- a/vendor/go.opentelemetry.io/otel/propagation/propagation.go +++ b/vendor/go.opentelemetry.io/otel/propagation/propagation.go @@ -9,6 +9,7 @@ import ( ) // TextMapCarrier is the storage medium used by a TextMapPropagator. +// See ValuesGetter for how a TextMapCarrier can get multiple values for a key. type TextMapCarrier interface { // DO NOT CHANGE: any modification will not be backwards compatible and // must never be done outside of a new major release. @@ -29,6 +30,18 @@ type TextMapCarrier interface { // must never be done outside of a new major release. } +// ValuesGetter can return multiple values for a single key, +// with contrast to TextMapCarrier.Get which returns a single value. +type ValuesGetter interface { + // DO NOT CHANGE: any modification will not be backwards compatible and + // must never be done outside of a new major release. + + // Values returns all values associated with the passed key. + Values(key string) []string + // DO NOT CHANGE: any modification will not be backwards compatible and + // must never be done outside of a new major release. +} + // MapCarrier is a TextMapCarrier that uses a map held in memory as a storage // medium for propagated key-value pairs. type MapCarrier map[string]string @@ -55,14 +68,25 @@ func (c MapCarrier) Keys() []string { return keys } -// HeaderCarrier adapts http.Header to satisfy the TextMapCarrier interface. +// HeaderCarrier adapts http.Header to satisfy the TextMapCarrier and ValuesGetter interfaces. type HeaderCarrier http.Header -// Get returns the value associated with the passed key. +// Compile time check that HeaderCarrier implements ValuesGetter. +var _ TextMapCarrier = HeaderCarrier{} + +// Compile time check that HeaderCarrier implements TextMapCarrier. +var _ ValuesGetter = HeaderCarrier{} + +// Get returns the first value associated with the passed key. func (hc HeaderCarrier) Get(key string) string { return http.Header(hc).Get(key) } +// Values returns all values associated with the passed key. +func (hc HeaderCarrier) Values(key string) []string { + return http.Header(hc).Values(key) +} + // Set stores the key-value pair. func (hc HeaderCarrier) Set(key string, value string) { http.Header(hc).Set(key, value) @@ -89,6 +113,8 @@ type TextMapPropagator interface { // must never be done outside of a new major release. // Extract reads cross-cutting concerns from the carrier into a Context. + // Implementations may check if the carrier implements ValuesGetter, + // to support extraction of multiple values per key. Extract(ctx context.Context, carrier TextMapCarrier) context.Context // DO NOT CHANGE: any modification will not be backwards compatible and // must never be done outside of a new major release. diff --git a/vendor/go.opentelemetry.io/otel/renovate.json b/vendor/go.opentelemetry.io/otel/renovate.json index a6fa353f..fa5acf2d 100644 --- a/vendor/go.opentelemetry.io/otel/renovate.json +++ b/vendor/go.opentelemetry.io/otel/renovate.json @@ -1,7 +1,8 @@ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ - "config:best-practices" + "config:best-practices", + "helpers:pinGitHubActionDigestsToSemver" ], "ignorePaths": [], "labels": ["Skip Changelog", "dependencies"], @@ -25,6 +26,10 @@ { "matchPackageNames": ["golang.org/x/**"], "groupName": "golang.org/x" + }, + { + "matchPackageNames": ["go.opentelemetry.io/otel/sdk/log/logtest"], + "enabled": false } ] } diff --git a/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go b/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go index 09e094de..3709ef09 100644 --- a/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go +++ b/vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go @@ -1,6 +1,7 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 +// Package internal provides common semconv functionality. package internal // import "go.opentelemetry.io/otel/semconv/internal/v2" import ( diff --git a/vendor/go.opentelemetry.io/otel/trace/auto.go b/vendor/go.opentelemetry.io/otel/trace/auto.go index 7e291002..d90af8f6 100644 --- a/vendor/go.opentelemetry.io/otel/trace/auto.go +++ b/vendor/go.opentelemetry.io/otel/trace/auto.go @@ -57,14 +57,15 @@ type autoTracer struct { var _ Tracer = autoTracer{} func (t autoTracer) Start(ctx context.Context, name string, opts ...SpanStartOption) (context.Context, Span) { - var psc SpanContext + var psc, sc SpanContext sampled := true span := new(autoSpan) // Ask eBPF for sampling decision and span context info. - t.start(ctx, span, &psc, &sampled, &span.spanContext) + t.start(ctx, span, &psc, &sampled, &sc) span.sampled.Store(sampled) + span.spanContext = sc ctx = ContextWithSpan(ctx, span) diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go index 3c5e1cdb..e7ca62c6 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/span.go @@ -251,13 +251,20 @@ func (s *Span) UnmarshalJSON(data []byte) error { type SpanFlags int32 const ( + // SpanFlagsTraceFlagsMask is a mask for trace-flags. + // // Bits 0-7 are used for trace flags. SpanFlagsTraceFlagsMask SpanFlags = 255 - // Bits 8 and 9 are used to indicate that the parent span or link span is remote. - // Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known. - // Bit 9 (`IS_REMOTE`) indicates whether the span or link is remote. + // SpanFlagsContextHasIsRemoteMask is a mask for HAS_IS_REMOTE status. + // + // Bits 8 and 9 are used to indicate that the parent span or link span is + // remote. Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known. SpanFlagsContextHasIsRemoteMask SpanFlags = 256 - // SpanFlagsContextHasIsRemoteMask indicates the Span is remote. + // SpanFlagsContextIsRemoteMask is a mask for IS_REMOTE status. + // + // Bits 8 and 9 are used to indicate that the parent span or link span is + // remote. Bit 9 (`IS_REMOTE`) indicates whether the span or link is + // remote. SpanFlagsContextIsRemoteMask SpanFlags = 512 ) @@ -266,27 +273,31 @@ const ( type SpanKind int32 const ( - // Indicates that the span represents an internal operation within an application, - // as opposed to an operation happening at the boundaries. Default value. + // SpanKindInternal indicates that the span represents an internal + // operation within an application, as opposed to an operation happening at + // the boundaries. SpanKindInternal SpanKind = 1 - // Indicates that the span covers server-side handling of an RPC or other - // remote network request. + // SpanKindServer indicates that the span covers server-side handling of an + // RPC or other remote network request. SpanKindServer SpanKind = 2 - // Indicates that the span describes a request to some remote service. + // SpanKindClient indicates that the span describes a request to some + // remote service. SpanKindClient SpanKind = 3 - // Indicates that the span describes a producer sending a message to a broker. - // Unlike CLIENT and SERVER, there is often no direct critical path latency relationship - // between producer and consumer spans. A PRODUCER span ends when the message was accepted - // by the broker while the logical processing of the message might span a much longer time. + // SpanKindProducer indicates that the span describes a producer sending a + // message to a broker. Unlike SpanKindClient and SpanKindServer, there is + // often no direct critical path latency relationship between producer and + // consumer spans. A SpanKindProducer span ends when the message was + // accepted by the broker while the logical processing of the message might + // span a much longer time. SpanKindProducer SpanKind = 4 - // Indicates that the span describes consumer receiving a message from a broker. - // Like the PRODUCER kind, there is often no direct critical path latency relationship - // between producer and consumer spans. + // SpanKindConsumer indicates that the span describes a consumer receiving + // a message from a broker. Like SpanKindProducer, there is often no direct + // critical path latency relationship between producer and consumer spans. SpanKindConsumer SpanKind = 5 ) -// Event is a time-stamped annotation of the span, consisting of user-supplied -// text description and key-value pairs. +// SpanEvent is a time-stamped annotation of the span, consisting of +// user-supplied text description and key-value pairs. type SpanEvent struct { // time_unix_nano is the time the event occurred. Time time.Time `json:"timeUnixNano,omitempty"` @@ -369,10 +380,11 @@ func (se *SpanEvent) UnmarshalJSON(data []byte) error { return nil } -// A pointer from the current span to another span in the same trace or in a -// different trace. For example, this can be used in batching operations, -// where a single batch handler processes multiple requests from different -// traces or when the handler receives a request from a different project. +// SpanLink is a reference from the current span to another span in the same +// trace or in a different trace. For example, this can be used in batching +// operations, where a single batch handler processes multiple requests from +// different traces or when the handler receives a request from a different +// project. type SpanLink struct { // A unique identifier of a trace that this linked span is part of. The ID is a // 16-byte array. diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go index 1d013a8f..1039bf40 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/status.go @@ -3,17 +3,19 @@ package telemetry // import "go.opentelemetry.io/otel/trace/internal/telemetry" +// StatusCode is the status of a Span. +// // For the semantics of status codes see // https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#set-status type StatusCode int32 const ( - // The default status. + // StatusCodeUnset is the default status. StatusCodeUnset StatusCode = 0 - // The Span has been validated by an Application developer or Operator to - // have completed successfully. + // StatusCodeOK is used when the Span has been validated by an Application + // developer or Operator to have completed successfully. StatusCodeOK StatusCode = 1 - // The Span contains an error. + // StatusCodeError is used when the Span contains an error. StatusCodeError StatusCode = 2 ) @@ -30,7 +32,7 @@ func (s StatusCode) String() string { return "" } -// The Status type defines a logical error model that is suitable for different +// Status defines a logical error model that is suitable for different // programming environments, including REST APIs and RPC APIs. type Status struct { // A developer-facing human readable error message. diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go index b0394070..e5f10767 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/traces.go @@ -71,7 +71,7 @@ func (td *Traces) UnmarshalJSON(data []byte) error { return nil } -// A collection of ScopeSpans from a Resource. +// ResourceSpans is a collection of ScopeSpans from a Resource. type ResourceSpans struct { // The resource for the spans in this message. // If this field is not set then no resource info is known. @@ -128,7 +128,7 @@ func (rs *ResourceSpans) UnmarshalJSON(data []byte) error { return nil } -// A collection of Spans produced by an InstrumentationScope. +// ScopeSpans is a collection of Spans produced by an InstrumentationScope. type ScopeSpans struct { // The instrumentation scope information for the spans in this message. // Semantically when InstrumentationScope isn't set, it is equivalent with diff --git a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go index 7251492d..ae9ce102 100644 --- a/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go +++ b/vendor/go.opentelemetry.io/otel/trace/internal/telemetry/value.go @@ -316,7 +316,7 @@ func (v Value) String() string { case ValueKindBool: return strconv.FormatBool(v.asBool()) case ValueKindBytes: - return fmt.Sprint(v.asBytes()) + return string(v.asBytes()) case ValueKindMap: return fmt.Sprint(v.asMap()) case ValueKindSlice: diff --git a/vendor/go.opentelemetry.io/otel/trace/noop.go b/vendor/go.opentelemetry.io/otel/trace/noop.go index c8b1ae5d..0f56e4db 100644 --- a/vendor/go.opentelemetry.io/otel/trace/noop.go +++ b/vendor/go.opentelemetry.io/otel/trace/noop.go @@ -95,6 +95,8 @@ var autoInstEnabled = new(bool) // tracerProvider return a noopTracerProvider if autoEnabled is false, // otherwise it will return a TracerProvider from the sdk package used in // auto-instrumentation. +// +//go:noinline func (noopSpan) tracerProvider(autoEnabled *bool) TracerProvider { if *autoEnabled { return newAutoTracerProvider() diff --git a/vendor/go.opentelemetry.io/otel/verify_readmes.sh b/vendor/go.opentelemetry.io/otel/verify_readmes.sh deleted file mode 100644 index 1e87855e..00000000 --- a/vendor/go.opentelemetry.io/otel/verify_readmes.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -# Copyright The OpenTelemetry Authors -# SPDX-License-Identifier: Apache-2.0 - -set -euo pipefail - -dirs=$(find . -type d -not -path "*/internal*" -not -path "*/test*" -not -path "*/example*" -not -path "*/.*" | sort) - -missingReadme=false -for dir in $dirs; do - if [ ! -f "$dir/README.md" ]; then - echo "couldn't find README.md for $dir" - missingReadme=true - fi -done - -if [ "$missingReadme" = true ] ; then - echo "Error: some READMEs couldn't be found." - exit 1 -fi diff --git a/vendor/go.opentelemetry.io/otel/version.go b/vendor/go.opentelemetry.io/otel/version.go index d5fa71f6..ac3c0b15 100644 --- a/vendor/go.opentelemetry.io/otel/version.go +++ b/vendor/go.opentelemetry.io/otel/version.go @@ -5,5 +5,5 @@ package otel // import "go.opentelemetry.io/otel" // Version is the current release version of OpenTelemetry in use. func Version() string { - return "1.35.0" + return "1.36.0" } diff --git a/vendor/go.opentelemetry.io/otel/versions.yaml b/vendor/go.opentelemetry.io/otel/versions.yaml index 2b4cb4b4..79f82f3d 100644 --- a/vendor/go.opentelemetry.io/otel/versions.yaml +++ b/vendor/go.opentelemetry.io/otel/versions.yaml @@ -3,7 +3,7 @@ module-sets: stable-v1: - version: v1.35.0 + version: v1.36.0 modules: - go.opentelemetry.io/otel - go.opentelemetry.io/otel/bridge/opencensus @@ -23,11 +23,11 @@ module-sets: - go.opentelemetry.io/otel/sdk/metric - go.opentelemetry.io/otel/trace experimental-metrics: - version: v0.57.0 + version: v0.58.0 modules: - go.opentelemetry.io/otel/exporters/prometheus experimental-logs: - version: v0.11.0 + version: v0.12.0 modules: - go.opentelemetry.io/otel/log - go.opentelemetry.io/otel/sdk/log @@ -40,4 +40,6 @@ module-sets: - go.opentelemetry.io/otel/schema excluded-modules: - go.opentelemetry.io/otel/internal/tools + - go.opentelemetry.io/otel/log/logtest + - go.opentelemetry.io/otel/sdk/log/logtest - go.opentelemetry.io/otel/trace/internal/telemetry/test diff --git a/vendor/golang.org/x/crypto/bcrypt/bcrypt.go b/vendor/golang.org/x/crypto/bcrypt/bcrypt.go index dc931187..3e7f8df8 100644 --- a/vendor/golang.org/x/crypto/bcrypt/bcrypt.go +++ b/vendor/golang.org/x/crypto/bcrypt/bcrypt.go @@ -50,7 +50,7 @@ func (ih InvalidHashPrefixError) Error() string { type InvalidCostError int func (ic InvalidCostError) Error() string { - return fmt.Sprintf("crypto/bcrypt: cost %d is outside allowed range (%d,%d)", int(ic), MinCost, MaxCost) + return fmt.Sprintf("crypto/bcrypt: cost %d is outside allowed inclusive range %d..%d", int(ic), MinCost, MaxCost) } const ( diff --git a/vendor/golang.org/x/mod/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go index 9a2dfd33..628f8fd6 100644 --- a/vendor/golang.org/x/mod/semver/semver.go +++ b/vendor/golang.org/x/mod/semver/semver.go @@ -22,7 +22,10 @@ // as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0. package semver -import "sort" +import ( + "slices" + "strings" +) // parsed returns the parsed form of a semantic version string. type parsed struct { @@ -154,19 +157,22 @@ func Max(v, w string) string { // ByVersion implements [sort.Interface] for sorting semantic version strings. type ByVersion []string -func (vs ByVersion) Len() int { return len(vs) } -func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] } -func (vs ByVersion) Less(i, j int) bool { - cmp := Compare(vs[i], vs[j]) - if cmp != 0 { - return cmp < 0 - } - return vs[i] < vs[j] +func (vs ByVersion) Len() int { return len(vs) } +func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] } +func (vs ByVersion) Less(i, j int) bool { return compareVersion(vs[i], vs[j]) < 0 } + +// Sort sorts a list of semantic version strings using [Compare] and falls back +// to use [strings.Compare] if both versions are considered equal. +func Sort(list []string) { + slices.SortFunc(list, compareVersion) } -// Sort sorts a list of semantic version strings using [ByVersion]. -func Sort(list []string) { - sort.Sort(ByVersion(list)) +func compareVersion(a, b string) int { + cmp := Compare(a, b) + if cmp != 0 { + return cmp + } + return strings.Compare(a, b) } func parse(v string) (p parsed, ok bool) { diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go index cfafed5b..cb6bb9ad 100644 --- a/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -76,10 +76,8 @@ func (g *Group) Wait() error { } // Go calls the given function in a new goroutine. -// The first call to Go must happen before a Wait. -// It blocks until the new goroutine can be added without the number of -// active goroutines in the group exceeding the configured limit. // +// The first call to Go must happen before a Wait. // It blocks until the new goroutine can be added without the number of // goroutines in the group exceeding the configured limit. // @@ -185,8 +183,9 @@ type PanicError struct { } func (p PanicError) Error() string { - // A Go Error method conventionally does not include a stack dump, so omit it - // here. (Callers who care can extract it from the Stack field.) + if len(p.Stack) > 0 { + return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) + } return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) } diff --git a/vendor/gorm.io/driver/mysql/migrator.go b/vendor/gorm.io/driver/mysql/migrator.go index e266cfdc..018368d1 100644 --- a/vendor/gorm.io/driver/mysql/migrator.go +++ b/vendor/gorm.io/driver/mysql/migrator.go @@ -109,7 +109,7 @@ func (m Migrator) MigrateColumnUnique(value interface{}, field *schema.Field, co return err } } - if field.UniqueIndex != "" { + if field.UniqueIndex != "" && !queryTx.Migrator().HasIndex(value, field.UniqueIndex) { if err := execTx.Migrator().CreateIndex(value, field.UniqueIndex); err != nil { return err } diff --git a/vendor/gorm.io/driver/mysql/mysql.go b/vendor/gorm.io/driver/mysql/mysql.go index 8b6ef3db..d203577f 100644 --- a/vendor/gorm.io/driver/mysql/mysql.go +++ b/vendor/gorm.io/driver/mysql/mysql.go @@ -185,7 +185,9 @@ func (dialector Dialector) Initialize(db *gorm.DB) (err error) { callbacks.RegisterDefaultCallbacks(db, callbackConfig) for k, v := range dialector.ClauseBuilders() { - db.ClauseBuilders[k] = v + if _, ok := db.ClauseBuilders[k]; !ok { + db.ClauseBuilders[k] = v + } } return } diff --git a/vendor/gorm.io/driver/sqlite/ddlmod.go b/vendor/gorm.io/driver/sqlite/ddlmod.go index c839cd79..e7e58597 100644 --- a/vendor/gorm.io/driver/sqlite/ddlmod.go +++ b/vendor/gorm.io/driver/sqlite/ddlmod.go @@ -209,8 +209,12 @@ func (d *ddl) renameTable(dst, src string) error { return nil } +func compileConstraintRegexp(name string) *regexp.Regexp { + return regexp.MustCompile("^(?i:CONSTRAINT)\\s+[\"`]?" + regexp.QuoteMeta(name) + "[\"`\\s]") +} + func (d *ddl) addConstraint(name string, sql string) { - reg := regexp.MustCompile("^CONSTRAINT [\"`]?" + regexp.QuoteMeta(name) + "[\"` ]") + reg := compileConstraintRegexp(name) for i := 0; i < len(d.fields); i++ { if reg.MatchString(d.fields[i]) { @@ -223,7 +227,7 @@ func (d *ddl) addConstraint(name string, sql string) { } func (d *ddl) removeConstraint(name string) bool { - reg := regexp.MustCompile("^CONSTRAINT [\"`]?" + regexp.QuoteMeta(name) + "[\"` ]") + reg := compileConstraintRegexp(name) for i := 0; i < len(d.fields); i++ { if reg.MatchString(d.fields[i]) { @@ -235,7 +239,7 @@ func (d *ddl) removeConstraint(name string) bool { } func (d *ddl) hasConstraint(name string) bool { - reg := regexp.MustCompile("^CONSTRAINT [\"`]?" + regexp.QuoteMeta(name) + "[\"` ]") + reg := compileConstraintRegexp(name) for _, f := range d.fields { if reg.MatchString(f) { diff --git a/vendor/gorm.io/driver/sqlite/sqlite.go b/vendor/gorm.io/driver/sqlite/sqlite.go index 0543d0b1..418842aa 100644 --- a/vendor/gorm.io/driver/sqlite/sqlite.go +++ b/vendor/gorm.io/driver/sqlite/sqlite.go @@ -76,7 +76,9 @@ func (dialector Dialector) Initialize(db *gorm.DB) (err error) { } for k, v := range dialector.ClauseBuilders() { - db.ClauseBuilders[k] = v + if _, ok := db.ClauseBuilders[k]; !ok { + db.ClauseBuilders[k] = v + } } return } diff --git a/vendor/modules.txt b/vendor/modules.txt index b18b786a..b269cd6f 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -12,7 +12,7 @@ github.com/asaskevich/govalidator # github.com/beorn7/perks v1.0.1 ## explicit; go 1.11 github.com/beorn7/perks/quantile -# github.com/bradleyfalzon/ghinstallation/v2 v2.15.0 +# github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 ## explicit; go 1.23.0 github.com/bradleyfalzon/ghinstallation/v2 # github.com/cespare/xxhash/v2 v2.3.0 @@ -36,7 +36,7 @@ github.com/davecgh/go-spew/spew # github.com/felixge/httpsnoop v1.0.4 ## explicit; go 1.13 github.com/felixge/httpsnoop -# github.com/go-logr/logr v1.4.2 +# github.com/go-logr/logr v1.4.3 ## explicit; go 1.18 github.com/go-logr/logr github.com/go-logr/logr/funcr @@ -88,7 +88,7 @@ github.com/go-openapi/swag # github.com/go-openapi/validate v0.24.0 ## explicit; go 1.20 github.com/go-openapi/validate -# github.com/go-sql-driver/mysql v1.9.2 +# github.com/go-sql-driver/mysql v1.9.3 ## explicit; go 1.21.0 github.com/go-sql-driver/mysql # github.com/golang-jwt/jwt/v4 v4.5.2 @@ -97,9 +97,9 @@ github.com/golang-jwt/jwt/v4 # github.com/golang-jwt/jwt/v5 v5.2.2 ## explicit; go 1.18 github.com/golang-jwt/jwt/v5 -# github.com/google/go-github/v71 v71.0.0 +# github.com/google/go-github/v72 v72.0.0 ## explicit; go 1.23.0 -github.com/google/go-github/v71/github +github.com/google/go-github/v72/github # github.com/google/go-querystring v1.1.0 ## explicit; go 1.10 github.com/google/go-querystring/query @@ -208,8 +208,8 @@ github.com/prometheus/client_golang/prometheus/promhttp/internal # github.com/prometheus/client_model v0.6.2 ## explicit; go 1.22.0 github.com/prometheus/client_model/go -# github.com/prometheus/common v0.63.0 -## explicit; go 1.21 +# github.com/prometheus/common v0.64.0 +## explicit; go 1.23.0 github.com/prometheus/common/expfmt github.com/prometheus/common/model # github.com/prometheus/procfs v0.16.1 @@ -239,7 +239,7 @@ github.com/stretchr/testify/suite # github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 ## explicit; go 1.18 github.com/teris-io/shortid -# go.mongodb.org/mongo-driver v1.17.3 +# go.mongodb.org/mongo-driver v1.17.4 ## explicit; go 1.18 go.mongodb.org/mongo-driver/bson go.mongodb.org/mongo-driver/bson/bsoncodec @@ -252,14 +252,13 @@ go.mongodb.org/mongo-driver/x/bsonx/bsoncore ## explicit; go 1.22.0 go.opentelemetry.io/auto/sdk go.opentelemetry.io/auto/sdk/internal/telemetry -# go.opentelemetry.io/otel v1.35.0 -## explicit; go 1.22.0 +# go.opentelemetry.io/otel v1.36.0 +## explicit; go 1.23.0 go.opentelemetry.io/otel go.opentelemetry.io/otel/attribute +go.opentelemetry.io/otel/attribute/internal go.opentelemetry.io/otel/baggage go.opentelemetry.io/otel/codes -go.opentelemetry.io/otel/internal -go.opentelemetry.io/otel/internal/attribute go.opentelemetry.io/otel/internal/baggage go.opentelemetry.io/otel/internal/global go.opentelemetry.io/otel/propagation @@ -267,17 +266,17 @@ go.opentelemetry.io/otel/semconv/internal/v2 go.opentelemetry.io/otel/semconv/v1.17.0 go.opentelemetry.io/otel/semconv/v1.17.0/httpconv go.opentelemetry.io/otel/semconv/v1.26.0 -# go.opentelemetry.io/otel/metric v1.35.0 -## explicit; go 1.22.0 +# go.opentelemetry.io/otel/metric v1.36.0 +## explicit; go 1.23.0 go.opentelemetry.io/otel/metric go.opentelemetry.io/otel/metric/embedded -# go.opentelemetry.io/otel/trace v1.35.0 -## explicit; go 1.22.0 +# go.opentelemetry.io/otel/trace v1.36.0 +## explicit; go 1.23.0 go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.38.0 +# golang.org/x/crypto v0.39.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -286,10 +285,10 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/mod v0.24.0 +# golang.org/x/mod v0.25.0 ## explicit; go 1.23.0 golang.org/x/mod/semver -# golang.org/x/net v0.40.0 +# golang.org/x/net v0.41.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks golang.org/x/net/proxy @@ -297,7 +296,7 @@ golang.org/x/net/proxy ## explicit; go 1.23.0 golang.org/x/oauth2 golang.org/x/oauth2/internal -# golang.org/x/sync v0.14.0 +# golang.org/x/sync v0.15.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup # golang.org/x/sys v0.33.0 @@ -305,7 +304,7 @@ golang.org/x/sync/errgroup golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.25.0 +# golang.org/x/text v0.26.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal @@ -359,10 +358,10 @@ gopkg.in/yaml.v3 # gorm.io/datatypes v1.2.5 ## explicit; go 1.19 gorm.io/datatypes -# gorm.io/driver/mysql v1.5.7 -## explicit; go 1.14 +# gorm.io/driver/mysql v1.6.0 +## explicit; go 1.18 gorm.io/driver/mysql -# gorm.io/driver/sqlite v1.5.7 +# gorm.io/driver/sqlite v1.6.0 ## explicit; go 1.20 gorm.io/driver/sqlite # gorm.io/gorm v1.30.0 From 499fbde60cfa0eaddf4d5901d873545bba52fe36 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 17 Jun 2025 22:37:18 +0000 Subject: [PATCH 105/179] Add a rudimentary filter option when listing entities This change adds the ability to filter the list of entities returned by the API by entity owner, name or endpoint, depending on the entity type. Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/enterprises.go | 19 +++- apiserver/controllers/organizations.go | 19 +++- apiserver/controllers/repositories.go | 26 ++++- apiserver/swagger.yaml | 31 ++++++ .../list_enterprises_parameters.go | 69 ++++++++++++ client/organizations/list_orgs_parameters.go | 69 ++++++++++++ client/repositories/list_repos_parameters.go | 103 ++++++++++++++++++ cmd/garm-cli/cmd/enterprise.go | 5 + cmd/garm-cli/cmd/organization.go | 6 + cmd/garm-cli/cmd/repository.go | 7 ++ database/common/mocks/Store.go | 54 ++++----- database/common/store.go | 6 +- database/sql/enterprise.go | 12 +- database/sql/enterprise_test.go | 58 +++++++++- database/sql/organizations.go | 14 ++- database/sql/organizations_test.go | 60 +++++++++- database/sql/repositories.go | 15 ++- database/sql/repositories_test.go | 87 ++++++++++++++- internal/testing/testing.go | 25 +++++ params/params.go | 16 +++ runner/enterprises.go | 4 +- runner/enterprises_test.go | 68 +++++++++++- runner/metrics/enterprise.go | 3 +- runner/metrics/organization.go | 3 +- runner/metrics/repository.go | 3 +- runner/organizations.go | 4 +- runner/organizations_test.go | 68 +++++++++++- runner/repositories.go | 4 +- runner/repositories_test.go | 75 ++++++++++++- runner/runner.go | 6 +- workers/cache/cache.go | 6 +- workers/entity/controller.go | 7 +- 32 files changed, 879 insertions(+), 73 deletions(-) diff --git a/apiserver/controllers/enterprises.go b/apiserver/controllers/enterprises.go index 9ce278cd..b4b3e528 100644 --- a/apiserver/controllers/enterprises.go +++ b/apiserver/controllers/enterprises.go @@ -66,13 +66,30 @@ func (a *APIController) CreateEnterpriseHandler(w http.ResponseWriter, r *http.R // // List all enterprises. // +// Parameters: +// + name: name +// description: Exact enterprise name to filter by +// type: string +// in: query +// required: false +// +// + name: endpoint +// description: Exact endpoint name to filter by +// type: string +// in: query +// required: false +// // Responses: // 200: Enterprises // default: APIErrorResponse func (a *APIController) ListEnterprisesHandler(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - enterprise, err := a.r.ListEnterprises(ctx) + filter := runnerParams.EnterpriseFilter{ + Name: r.URL.Query().Get("name"), + Endpoint: r.URL.Query().Get("endpoint"), + } + enterprise, err := a.r.ListEnterprises(ctx, filter) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing enterprise") handleError(ctx, w, err) diff --git a/apiserver/controllers/organizations.go b/apiserver/controllers/organizations.go index 86f3c5d6..9089f440 100644 --- a/apiserver/controllers/organizations.go +++ b/apiserver/controllers/organizations.go @@ -67,13 +67,30 @@ func (a *APIController) CreateOrgHandler(w http.ResponseWriter, r *http.Request) // // List organizations. // +// Parameters: +// + name: name +// description: Exact organization name to filter by +// type: string +// in: query +// required: false +// +// + name: endpoint +// description: Exact endpoint name to filter by +// type: string +// in: query +// required: false +// // Responses: // 200: Organizations // default: APIErrorResponse func (a *APIController) ListOrgsHandler(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - orgs, err := a.r.ListOrganizations(ctx) + filter := runnerParams.OrganizationFilter{ + Name: r.URL.Query().Get("name"), + Endpoint: r.URL.Query().Get("endpoint"), + } + orgs, err := a.r.ListOrganizations(ctx, filter) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing orgs") handleError(ctx, w, err) diff --git a/apiserver/controllers/repositories.go b/apiserver/controllers/repositories.go index 2eea0001..f3675790 100644 --- a/apiserver/controllers/repositories.go +++ b/apiserver/controllers/repositories.go @@ -67,13 +67,37 @@ func (a *APIController) CreateRepoHandler(w http.ResponseWriter, r *http.Request // // List repositories. // +// Parameters: +// + name: owner +// description: Exact owner name to filter by +// type: string +// in: query +// required: false +// +// + name: name +// description: Exact repository name to filter by +// type: string +// in: query +// required: false +// +// + name: endpoint +// description: Exact endpoint name to filter by +// type: string +// in: query +// required: false +// // Responses: // 200: Repositories // default: APIErrorResponse func (a *APIController) ListReposHandler(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - repos, err := a.r.ListRepositories(ctx) + filter := runnerParams.RepositoryFilter{ + Name: r.URL.Query().Get("name"), + Owner: r.URL.Query().Get("owner"), + Endpoint: r.URL.Query().Get("endpoint"), + } + repos, err := a.r.ListRepositories(ctx, filter) if err != nil { slog.With(slog.Any("error", err)).ErrorContext(ctx, "listing repositories") handleError(ctx, w, err) diff --git a/apiserver/swagger.yaml b/apiserver/swagger.yaml index 66e7a655..bf02a2d7 100644 --- a/apiserver/swagger.yaml +++ b/apiserver/swagger.yaml @@ -418,6 +418,15 @@ paths: /enterprises: get: operationId: ListEnterprises + parameters: + - description: Exact enterprise name to filter by + in: query + name: name + type: string + - description: Exact endpoint name to filter by + in: query + name: endpoint + type: string responses: "200": description: Enterprises @@ -1254,6 +1263,15 @@ paths: /organizations: get: operationId: ListOrgs + parameters: + - description: Exact organization name to filter by + in: query + name: name + type: string + - description: Exact endpoint name to filter by + in: query + name: endpoint + type: string responses: "200": description: Organizations @@ -1754,6 +1772,19 @@ paths: /repositories: get: operationId: ListRepos + parameters: + - description: Exact owner name to filter by + in: query + name: owner + type: string + - description: Exact repository name to filter by + in: query + name: name + type: string + - description: Exact endpoint name to filter by + in: query + name: endpoint + type: string responses: "200": description: Repositories diff --git a/client/enterprises/list_enterprises_parameters.go b/client/enterprises/list_enterprises_parameters.go index 83291c5f..44ba108b 100644 --- a/client/enterprises/list_enterprises_parameters.go +++ b/client/enterprises/list_enterprises_parameters.go @@ -60,6 +60,19 @@ ListEnterprisesParams contains all the parameters to send to the API endpoint Typically these are written to a http.Request. */ type ListEnterprisesParams struct { + + /* Endpoint. + + Exact endpoint name to filter by + */ + Endpoint *string + + /* Name. + + Exact enterprise name to filter by + */ + Name *string + timeout time.Duration Context context.Context HTTPClient *http.Client @@ -113,6 +126,28 @@ func (o *ListEnterprisesParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithEndpoint adds the endpoint to the list enterprises params +func (o *ListEnterprisesParams) WithEndpoint(endpoint *string) *ListEnterprisesParams { + o.SetEndpoint(endpoint) + return o +} + +// SetEndpoint adds the endpoint to the list enterprises params +func (o *ListEnterprisesParams) SetEndpoint(endpoint *string) { + o.Endpoint = endpoint +} + +// WithName adds the name to the list enterprises params +func (o *ListEnterprisesParams) WithName(name *string) *ListEnterprisesParams { + o.SetName(name) + return o +} + +// SetName adds the name to the list enterprises params +func (o *ListEnterprisesParams) SetName(name *string) { + o.Name = name +} + // WriteToRequest writes these params to a swagger request func (o *ListEnterprisesParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -121,6 +156,40 @@ func (o *ListEnterprisesParams) WriteToRequest(r runtime.ClientRequest, reg strf } var res []error + if o.Endpoint != nil { + + // query param endpoint + var qrEndpoint string + + if o.Endpoint != nil { + qrEndpoint = *o.Endpoint + } + qEndpoint := qrEndpoint + if qEndpoint != "" { + + if err := r.SetQueryParam("endpoint", qEndpoint); err != nil { + return err + } + } + } + + if o.Name != nil { + + // query param name + var qrName string + + if o.Name != nil { + qrName = *o.Name + } + qName := qrName + if qName != "" { + + if err := r.SetQueryParam("name", qName); err != nil { + return err + } + } + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/client/organizations/list_orgs_parameters.go b/client/organizations/list_orgs_parameters.go index 1441722f..af4c19c8 100644 --- a/client/organizations/list_orgs_parameters.go +++ b/client/organizations/list_orgs_parameters.go @@ -60,6 +60,19 @@ ListOrgsParams contains all the parameters to send to the API endpoint Typically these are written to a http.Request. */ type ListOrgsParams struct { + + /* Endpoint. + + Exact endpoint name to filter by + */ + Endpoint *string + + /* Name. + + Exact organization name to filter by + */ + Name *string + timeout time.Duration Context context.Context HTTPClient *http.Client @@ -113,6 +126,28 @@ func (o *ListOrgsParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithEndpoint adds the endpoint to the list orgs params +func (o *ListOrgsParams) WithEndpoint(endpoint *string) *ListOrgsParams { + o.SetEndpoint(endpoint) + return o +} + +// SetEndpoint adds the endpoint to the list orgs params +func (o *ListOrgsParams) SetEndpoint(endpoint *string) { + o.Endpoint = endpoint +} + +// WithName adds the name to the list orgs params +func (o *ListOrgsParams) WithName(name *string) *ListOrgsParams { + o.SetName(name) + return o +} + +// SetName adds the name to the list orgs params +func (o *ListOrgsParams) SetName(name *string) { + o.Name = name +} + // WriteToRequest writes these params to a swagger request func (o *ListOrgsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -121,6 +156,40 @@ func (o *ListOrgsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Regi } var res []error + if o.Endpoint != nil { + + // query param endpoint + var qrEndpoint string + + if o.Endpoint != nil { + qrEndpoint = *o.Endpoint + } + qEndpoint := qrEndpoint + if qEndpoint != "" { + + if err := r.SetQueryParam("endpoint", qEndpoint); err != nil { + return err + } + } + } + + if o.Name != nil { + + // query param name + var qrName string + + if o.Name != nil { + qrName = *o.Name + } + qName := qrName + if qName != "" { + + if err := r.SetQueryParam("name", qName); err != nil { + return err + } + } + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/client/repositories/list_repos_parameters.go b/client/repositories/list_repos_parameters.go index f4e17d79..9998a1ba 100644 --- a/client/repositories/list_repos_parameters.go +++ b/client/repositories/list_repos_parameters.go @@ -60,6 +60,25 @@ ListReposParams contains all the parameters to send to the API endpoint Typically these are written to a http.Request. */ type ListReposParams struct { + + /* Endpoint. + + Exact endpoint name to filter by + */ + Endpoint *string + + /* Name. + + Exact repository name to filter by + */ + Name *string + + /* Owner. + + Exact owner name to filter by + */ + Owner *string + timeout time.Duration Context context.Context HTTPClient *http.Client @@ -113,6 +132,39 @@ func (o *ListReposParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithEndpoint adds the endpoint to the list repos params +func (o *ListReposParams) WithEndpoint(endpoint *string) *ListReposParams { + o.SetEndpoint(endpoint) + return o +} + +// SetEndpoint adds the endpoint to the list repos params +func (o *ListReposParams) SetEndpoint(endpoint *string) { + o.Endpoint = endpoint +} + +// WithName adds the name to the list repos params +func (o *ListReposParams) WithName(name *string) *ListReposParams { + o.SetName(name) + return o +} + +// SetName adds the name to the list repos params +func (o *ListReposParams) SetName(name *string) { + o.Name = name +} + +// WithOwner adds the owner to the list repos params +func (o *ListReposParams) WithOwner(owner *string) *ListReposParams { + o.SetOwner(owner) + return o +} + +// SetOwner adds the owner to the list repos params +func (o *ListReposParams) SetOwner(owner *string) { + o.Owner = owner +} + // WriteToRequest writes these params to a swagger request func (o *ListReposParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -121,6 +173,57 @@ func (o *ListReposParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Reg } var res []error + if o.Endpoint != nil { + + // query param endpoint + var qrEndpoint string + + if o.Endpoint != nil { + qrEndpoint = *o.Endpoint + } + qEndpoint := qrEndpoint + if qEndpoint != "" { + + if err := r.SetQueryParam("endpoint", qEndpoint); err != nil { + return err + } + } + } + + if o.Name != nil { + + // query param name + var qrName string + + if o.Name != nil { + qrName = *o.Name + } + qName := qrName + if qName != "" { + + if err := r.SetQueryParam("name", qName); err != nil { + return err + } + } + } + + if o.Owner != nil { + + // query param owner + var qrOwner string + + if o.Owner != nil { + qrOwner = *o.Owner + } + qOwner := qrOwner + if qOwner != "" { + + if err := r.SetQueryParam("owner", qOwner); err != nil { + return err + } + } + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/cmd/garm-cli/cmd/enterprise.go b/cmd/garm-cli/cmd/enterprise.go index 1e6c3930..b8850e1b 100644 --- a/cmd/garm-cli/cmd/enterprise.go +++ b/cmd/garm-cli/cmd/enterprise.go @@ -28,6 +28,7 @@ import ( var ( enterpriseName string + enterpriseEndpoint string enterpriseWebhookSecret string enterpriseCreds string ) @@ -85,6 +86,8 @@ var enterpriseListCmd = &cobra.Command{ } listEnterprisesReq := apiClientEnterprises.NewListEnterprisesParams() + listEnterprisesReq.Name = &enterpriseName + listEnterprisesReq.Endpoint = &enterpriseEndpoint response, err := apiCli.Enterprises.ListEnterprises(listEnterprisesReq, authToken) if err != nil { return err @@ -185,6 +188,8 @@ func init() { enterpriseAddCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", string(params.PoolBalancerTypeRoundRobin), "The balancing strategy to use when creating runners in pools matching requested labels.") enterpriseListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") + enterpriseListCmd.Flags().StringVarP(&enterpriseName, "name", "n", "", "Exact enterprise name to filter by.") + enterpriseListCmd.Flags().StringVarP(&enterpriseEndpoint, "endpoint", "e", "", "Exact endpoint name to filter by.") enterpriseAddCmd.MarkFlagRequired("credentials") //nolint enterpriseAddCmd.MarkFlagRequired("name") //nolint diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index a95f912f..9f23888a 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -29,6 +29,7 @@ import ( var ( orgName string + orgEndpoint string orgWebhookSecret string orgCreds string orgRandomWebhookSecret bool @@ -243,6 +244,8 @@ var orgListCmd = &cobra.Command{ } listOrgsReq := apiClientOrgs.NewListOrgsParams() + listOrgsReq.Name = &orgName + listOrgsReq.Endpoint = &orgEndpoint response, err := apiCli.Organizations.ListOrgs(listOrgsReq, authToken) if err != nil { return err @@ -314,7 +317,10 @@ func init() { orgAddCmd.Flags().BoolVar(&installOrgWebhook, "install-webhook", false, "Install the webhook as part of the add operation.") orgAddCmd.MarkFlagsMutuallyExclusive("webhook-secret", "random-webhook-secret") orgAddCmd.MarkFlagsOneRequired("webhook-secret", "random-webhook-secret") + orgListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") + orgListCmd.Flags().StringVarP(&orgName, "name", "n", "", "Exact org name to filter by.") + orgListCmd.Flags().StringVarP(&orgEndpoint, "endpoint", "e", "", "Exact endpoint name to filter by.") orgAddCmd.MarkFlagRequired("credentials") //nolint orgAddCmd.MarkFlagRequired("name") //nolint diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index 5bf588c5..91db23ea 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -30,6 +30,7 @@ import ( var ( repoOwner string repoName string + repoEndpoint string repoWebhookSecret string repoCreds string forgeType string @@ -213,6 +214,9 @@ var repoListCmd = &cobra.Command{ } listReposReq := apiClientRepos.NewListReposParams() + listReposReq.Name = &repoName + listReposReq.Owner = &repoOwner + listReposReq.Endpoint = &repoEndpoint response, err := apiCli.Repositories.ListRepos(listReposReq, authToken) if err != nil { return err @@ -321,6 +325,9 @@ func init() { repoAddCmd.MarkFlagsOneRequired("webhook-secret", "random-webhook-secret") repoListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") + repoListCmd.Flags().StringVarP(&repoName, "name", "n", "", "Exact repo name to filter by.") + repoListCmd.Flags().StringVarP(&repoOwner, "owner", "o", "", "Exact repo owner to filter by.") + repoListCmd.Flags().StringVarP(&repoEndpoint, "endpoint", "e", "", "Exact endpoint name to filter by.") repoAddCmd.MarkFlagRequired("credentials") //nolint repoAddCmd.MarkFlagRequired("owner") //nolint diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index 97da1c06..ec107854 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -1524,9 +1524,9 @@ func (_m *Store) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error return r0, r1 } -// ListEnterprises provides a mock function with given fields: ctx -func (_m *Store) ListEnterprises(ctx context.Context) ([]params.Enterprise, error) { - ret := _m.Called(ctx) +// ListEnterprises provides a mock function with given fields: ctx, filter +func (_m *Store) ListEnterprises(ctx context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) { + ret := _m.Called(ctx, filter) if len(ret) == 0 { panic("no return value specified for ListEnterprises") @@ -1534,19 +1534,19 @@ func (_m *Store) ListEnterprises(ctx context.Context) ([]params.Enterprise, erro var r0 []params.Enterprise var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.Enterprise, error)); ok { - return rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, params.EnterpriseFilter) ([]params.Enterprise, error)); ok { + return rf(ctx, filter) } - if rf, ok := ret.Get(0).(func(context.Context) []params.Enterprise); ok { - r0 = rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, params.EnterpriseFilter) []params.Enterprise); ok { + r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]params.Enterprise) } } - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) + if rf, ok := ret.Get(1).(func(context.Context, params.EnterpriseFilter) error); ok { + r1 = rf(ctx, filter) } else { r1 = ret.Error(1) } @@ -1824,9 +1824,9 @@ func (_m *Store) ListJobsByStatus(ctx context.Context, status params.JobStatus) return r0, r1 } -// ListOrganizations provides a mock function with given fields: ctx -func (_m *Store) ListOrganizations(ctx context.Context) ([]params.Organization, error) { - ret := _m.Called(ctx) +// ListOrganizations provides a mock function with given fields: ctx, filter +func (_m *Store) ListOrganizations(ctx context.Context, filter params.OrganizationFilter) ([]params.Organization, error) { + ret := _m.Called(ctx, filter) if len(ret) == 0 { panic("no return value specified for ListOrganizations") @@ -1834,19 +1834,19 @@ func (_m *Store) ListOrganizations(ctx context.Context) ([]params.Organization, var r0 []params.Organization var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.Organization, error)); ok { - return rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, params.OrganizationFilter) ([]params.Organization, error)); ok { + return rf(ctx, filter) } - if rf, ok := ret.Get(0).(func(context.Context) []params.Organization); ok { - r0 = rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, params.OrganizationFilter) []params.Organization); ok { + r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]params.Organization) } } - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) + if rf, ok := ret.Get(1).(func(context.Context, params.OrganizationFilter) error); ok { + r1 = rf(ctx, filter) } else { r1 = ret.Error(1) } @@ -1884,9 +1884,9 @@ func (_m *Store) ListPoolInstances(ctx context.Context, poolID string) ([]params return r0, r1 } -// ListRepositories provides a mock function with given fields: ctx -func (_m *Store) ListRepositories(ctx context.Context) ([]params.Repository, error) { - ret := _m.Called(ctx) +// ListRepositories provides a mock function with given fields: ctx, filter +func (_m *Store) ListRepositories(ctx context.Context, filter params.RepositoryFilter) ([]params.Repository, error) { + ret := _m.Called(ctx, filter) if len(ret) == 0 { panic("no return value specified for ListRepositories") @@ -1894,19 +1894,19 @@ func (_m *Store) ListRepositories(ctx context.Context) ([]params.Repository, err var r0 []params.Repository var r1 error - if rf, ok := ret.Get(0).(func(context.Context) ([]params.Repository, error)); ok { - return rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, params.RepositoryFilter) ([]params.Repository, error)); ok { + return rf(ctx, filter) } - if rf, ok := ret.Get(0).(func(context.Context) []params.Repository); ok { - r0 = rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, params.RepositoryFilter) []params.Repository); ok { + r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]params.Repository) } } - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) + if rf, ok := ret.Get(1).(func(context.Context, params.RepositoryFilter) error); ok { + r1 = rf(ctx, filter) } else { r1 = ret.Error(1) } diff --git a/database/common/store.go b/database/common/store.go index db5fbb04..8b3c4f7c 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -41,7 +41,7 @@ type RepoStore interface { CreateRepository(ctx context.Context, owner, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (param params.Repository, err error) GetRepository(ctx context.Context, owner, name, endpointName string) (params.Repository, error) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) - ListRepositories(ctx context.Context) ([]params.Repository, error) + ListRepositories(ctx context.Context, filter params.RepositoryFilter) ([]params.Repository, error) DeleteRepository(ctx context.Context, repoID string) error UpdateRepository(ctx context.Context, repoID string, param params.UpdateEntityParams) (params.Repository, error) } @@ -50,7 +50,7 @@ type OrgStore interface { CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (org params.Organization, err error) GetOrganization(ctx context.Context, name, endpointName string) (params.Organization, error) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) - ListOrganizations(ctx context.Context) ([]params.Organization, error) + ListOrganizations(ctx context.Context, filter params.OrganizationFilter) ([]params.Organization, error) DeleteOrganization(ctx context.Context, orgID string) error UpdateOrganization(ctx context.Context, orgID string, param params.UpdateEntityParams) (params.Organization, error) } @@ -59,7 +59,7 @@ type EnterpriseStore interface { CreateEnterprise(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) GetEnterprise(ctx context.Context, name, endpointName string) (params.Enterprise, error) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) - ListEnterprises(ctx context.Context) ([]params.Enterprise, error) + ListEnterprises(ctx context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) DeleteEnterprise(ctx context.Context, enterpriseID string) error UpdateEnterprise(ctx context.Context, enterpriseID string, param params.UpdateEntityParams) (params.Enterprise, error) } diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index 41d95b26..fc273165 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -111,13 +111,19 @@ func (s *sqlDatabase) GetEnterpriseByID(ctx context.Context, enterpriseID string return param, nil } -func (s *sqlDatabase) ListEnterprises(_ context.Context) ([]params.Enterprise, error) { +func (s *sqlDatabase) ListEnterprises(_ context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) { var enterprises []Enterprise q := s.conn. Preload("Credentials"). Preload("Credentials.Endpoint"). - Preload("Endpoint"). - Find(&enterprises) + Preload("Endpoint") + if filter.Name != "" { + q = q.Where("name = ?", filter.Name) + } + if filter.Endpoint != "" { + q = q.Where("endpoint_name = ?", filter.Endpoint) + } + q = q.Find(&enterprises) if q.Error != nil { return []params.Enterprise{}, errors.Wrap(q.Error, "fetching enterprises") } diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 79b298d5..056bb7fa 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -54,8 +54,10 @@ type EnterpriseTestSuite struct { adminUserID string testCreds params.ForgeCredentials + ghesCreds params.ForgeCredentials secondaryTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint + ghesEndpoint params.ForgeEndpoint } func (s *EnterpriseTestSuite) equalInstancesByName(expected, actual []params.Instance) { @@ -90,7 +92,9 @@ func (s *EnterpriseTestSuite) SetupTest() { s.Require().NotEmpty(s.adminUserID) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.ghesEndpoint = garmTesting.CreateGHESEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) + s.ghesCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "ghes-creds", db, s.T(), s.ghesEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some enterprise objects in the database, for testing purposes @@ -272,18 +276,68 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseDBDecryptingErr() { } func (s *EnterpriseTestSuite) TestListEnterprises() { - enterprises, err := s.Store.ListEnterprises(s.adminCtx) + enterprises, err := s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{}) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), s.Fixtures.Enterprises, enterprises) } +func (s *EnterpriseTestSuite) TestListEnterprisesWithFilter() { + enterprise, err := s.Store.CreateEnterprise( + s.adminCtx, + "test-enterprise", + s.ghesCreds, + "test-secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + enterprise2, err := s.Store.CreateEnterprise( + s.adminCtx, + "test-enterprise", + s.testCreds, + "test-secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + enterprise3, err := s.Store.CreateEnterprise( + s.adminCtx, + "test-enterprise2", + s.testCreds, + "test-secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + enterprises, err := s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{ + Name: "test-enterprise", + }) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise, enterprise2}, enterprises) + + enterprises, err = s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{ + Name: "test-enterprise", + Endpoint: s.ghesEndpoint.Name, + }) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise}, enterprises) + + enterprises, err = s.Store.ListEnterprises(s.adminCtx, params.EnterpriseFilter{ + Name: "test-enterprise2", + }) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise3}, enterprises) +} + func (s *EnterpriseTestSuite) TestListEnterprisesDBFetchErr() { s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `enterprises` WHERE `enterprises`.`deleted_at` IS NULL")). WillReturnError(fmt.Errorf("fetching user from database mock error")) - _, err := s.StoreSQLMocked.ListEnterprises(s.adminCtx) + _, err := s.StoreSQLMocked.ListEnterprises(s.adminCtx, params.EnterpriseFilter{}) s.assertSQLMockExpectations() s.Require().NotNil(err) diff --git a/database/sql/organizations.go b/database/sql/organizations.go index 73456362..3b1a05fa 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -92,15 +92,23 @@ func (s *sqlDatabase) GetOrganization(ctx context.Context, name, endpointName st return param, nil } -func (s *sqlDatabase) ListOrganizations(_ context.Context) ([]params.Organization, error) { +func (s *sqlDatabase) ListOrganizations(_ context.Context, filter params.OrganizationFilter) ([]params.Organization, error) { var orgs []Organization q := s.conn. Preload("Credentials"). Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). Preload("GiteaCredentials.Endpoint"). - Preload("Endpoint"). - Find(&orgs) + Preload("Endpoint") + + if filter.Name != "" { + q = q.Where("name = ?", filter.Name) + } + + if filter.Endpoint != "" { + q = q.Where("endpoint_name = ?", filter.Endpoint) + } + q = q.Find(&orgs) if q.Error != nil { return []params.Organization{}, errors.Wrap(q.Error, "fetching org from database") } diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index 5c053cec..df876ba1 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -333,18 +333,74 @@ func (s *OrgTestSuite) TestGetOrganizationDBDecryptingErr() { } func (s *OrgTestSuite) TestListOrganizations() { - orgs, err := s.Store.ListOrganizations(s.adminCtx) + orgs, err := s.Store.ListOrganizations(s.adminCtx, params.OrganizationFilter{}) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), s.Fixtures.Orgs, orgs) } +func (s *OrgTestSuite) TestListOrganizationsWithFilters() { + org, err := s.Store.CreateOrganization( + s.adminCtx, + "test-org", + s.testCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + org2, err := s.Store.CreateOrganization( + s.adminCtx, + "test-org", + s.testCredsGitea, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + org3, err := s.Store.CreateOrganization( + s.adminCtx, + "test-org2", + s.testCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + orgs, err := s.Store.ListOrganizations( + s.adminCtx, + params.OrganizationFilter{ + Name: "test-org", + }) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org, org2}, orgs) + + orgs, err = s.Store.ListOrganizations( + s.adminCtx, + params.OrganizationFilter{ + Name: "test-org", + Endpoint: s.giteaEndpoint.Name, + }) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org2}, orgs) + + orgs, err = s.Store.ListOrganizations( + s.adminCtx, + params.OrganizationFilter{ + Name: "test-org2", + }) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org3}, orgs) +} + func (s *OrgTestSuite) TestListOrganizationsDBFetchErr() { s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `organizations` WHERE `organizations`.`deleted_at` IS NULL")). WillReturnError(fmt.Errorf("fetching user from database mock error")) - _, err := s.StoreSQLMocked.ListOrganizations(s.adminCtx) + _, err := s.StoreSQLMocked.ListOrganizations(s.adminCtx, params.OrganizationFilter{}) s.assertSQLMockExpectations() s.Require().NotNil(err) diff --git a/database/sql/repositories.go b/database/sql/repositories.go index 03452df6..a18eb001 100644 --- a/database/sql/repositories.go +++ b/database/sql/repositories.go @@ -93,15 +93,24 @@ func (s *sqlDatabase) GetRepository(ctx context.Context, owner, name, endpointNa return param, nil } -func (s *sqlDatabase) ListRepositories(_ context.Context) ([]params.Repository, error) { +func (s *sqlDatabase) ListRepositories(_ context.Context, filter params.RepositoryFilter) ([]params.Repository, error) { var repos []Repository q := s.conn. Preload("Credentials"). Preload("GiteaCredentials"). Preload("Credentials.Endpoint"). Preload("GiteaCredentials.Endpoint"). - Preload("Endpoint"). - Find(&repos) + Preload("Endpoint") + if filter.Owner != "" { + q = q.Where("owner = ?", filter.Owner) + } + if filter.Name != "" { + q = q.Where("name = ?", filter.Name) + } + if filter.Endpoint != "" { + q = q.Where("endpoint_name = ?", filter.Endpoint) + } + q = q.Find(&repos) if q.Error != nil { return []params.Repository{}, errors.Wrap(q.Error, "fetching user from database") } diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index f593ddce..4609a357 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -376,18 +376,99 @@ func (s *RepoTestSuite) TestGetRepositoryDBDecryptingErr() { } func (s *RepoTestSuite) TestListRepositories() { - repos, err := s.Store.ListRepositories(s.adminCtx) + repos, err := s.Store.ListRepositories(s.adminCtx, params.RepositoryFilter{}) s.Require().Nil(err) s.equalReposByName(s.Fixtures.Repos, repos) } +func (s *RepoTestSuite) TestListRepositoriesWithFilters() { + repo, err := s.Store.CreateRepository( + s.adminCtx, + "test-owner", + "test-repo", + s.testCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + repo2, err := s.Store.CreateRepository( + s.adminCtx, + "test-owner", + "test-repo", + s.testCredsGitea, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + repo3, err := s.Store.CreateRepository( + s.adminCtx, + "test-owner", + "test-repo2", + s.testCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + repo4, err := s.Store.CreateRepository( + s.adminCtx, + "test-owner2", + "test-repo", + s.testCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + + repos, err := s.Store.ListRepositories( + s.adminCtx, + params.RepositoryFilter{ + Name: "test-repo", + }) + + s.Require().Nil(err) + s.equalReposByName([]params.Repository{repo, repo2, repo4}, repos) + + repos, err = s.Store.ListRepositories( + s.adminCtx, + params.RepositoryFilter{ + Name: "test-repo", + Owner: "test-owner", + }) + + s.Require().Nil(err) + s.equalReposByName([]params.Repository{repo, repo2}, repos) + + repos, err = s.Store.ListRepositories( + s.adminCtx, + params.RepositoryFilter{ + Name: "test-repo", + Owner: "test-owner", + Endpoint: s.giteaEndpoint.Name, + }) + + s.Require().Nil(err) + s.equalReposByName([]params.Repository{repo2}, repos) + + repos, err = s.Store.ListRepositories( + s.adminCtx, + params.RepositoryFilter{ + Name: "test-repo2", + }) + + s.Require().Nil(err) + s.equalReposByName([]params.Repository{repo3}, repos) +} + func (s *RepoTestSuite) TestListRepositoriesDBFetchErr() { s.Fixtures.SQLMock. ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repositories` WHERE `repositories`.`deleted_at` IS NULL")). WillReturnError(fmt.Errorf("fetching user from database mock error")) - _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx) + _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx, params.RepositoryFilter{}) s.Require().NotNil(err) s.Require().Equal("fetching user from database: fetching user from database mock error", err.Error()) @@ -401,7 +482,7 @@ func (s *RepoTestSuite) TestListRepositoriesDBDecryptingErr() { ExpectQuery(regexp.QuoteMeta("SELECT * FROM `repositories` WHERE `repositories`.`deleted_at` IS NULL")). WillReturnRows(sqlmock.NewRows([]string{"id", "webhook_secret"}).AddRow(s.Fixtures.Repos[0].ID, s.Fixtures.Repos[0].WebhookSecret)) - _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx) + _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx, params.RepositoryFilter{}) s.Require().NotNil(err) s.Require().Equal("fetching repositories: decrypting secret: invalid passphrase length (expected length 32 characters)", err.Error()) diff --git a/internal/testing/testing.go b/internal/testing/testing.go index 84b4d48c..98bfd34c 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -85,6 +85,31 @@ func CreateGARMTestUser(ctx context.Context, username string, db common.Store, s return user } +func CreateGHESEndpoint(ctx context.Context, db common.Store, s *testing.T) params.ForgeEndpoint { + endpointParams := params.CreateGithubEndpointParams{ + Name: "ghes.example.com", + Description: "GHES endpoint", + APIBaseURL: "https://ghes.example.com", + UploadBaseURL: "https://upload.ghes.example.com/", + BaseURL: "https://ghes.example.com", + } + + ep, err := db.GetGithubEndpoint(ctx, endpointParams.Name) + if err != nil { + if !errors.Is(err, runnerErrors.ErrNotFound) { + s.Fatalf("failed to get database object (%s): %v", endpointParams.Name, err) + } + ep, err = db.CreateGithubEndpoint(ctx, endpointParams) + if err != nil { + if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { + s.Fatalf("failed to create database object (%s): %v", endpointParams.Name, err) + } + } + } + + return ep +} + func CreateDefaultGithubEndpoint(ctx context.Context, db common.Store, s *testing.T) params.ForgeEndpoint { endpointParams := params.CreateGithubEndpointParams{ Name: "github.com", diff --git a/params/params.go b/params/params.go index 2a7fdef9..c9d4fb94 100644 --- a/params/params.go +++ b/params/params.go @@ -1192,3 +1192,19 @@ type ForgeEndpoint struct { EndpointType EndpointType `json:"endpoint_type,omitempty"` } + +type RepositoryFilter struct { + Owner string + Name string + Endpoint string +} + +type OrganizationFilter struct { + Name string + Endpoint string +} + +type EnterpriseFilter struct { + Name string + Endpoint string +} diff --git a/runner/enterprises.go b/runner/enterprises.go index f192c7cd..341cf5b9 100644 --- a/runner/enterprises.go +++ b/runner/enterprises.go @@ -86,12 +86,12 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp return enterprise, nil } -func (r *Runner) ListEnterprises(ctx context.Context) ([]params.Enterprise, error) { +func (r *Runner) ListEnterprises(ctx context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - enterprises, err := r.store.ListEnterprises(ctx) + enterprises, err := r.store.ListEnterprises(ctx, filter) if err != nil { return nil, errors.Wrap(err, "listing enterprises") } diff --git a/runner/enterprises_test.go b/runner/enterprises_test.go index d5eef463..ce791e55 100644 --- a/runner/enterprises_test.go +++ b/runner/enterprises_test.go @@ -59,6 +59,8 @@ type EnterpriseTestSuite struct { testCreds params.ForgeCredentials secondaryTestCreds params.ForgeCredentials forgeEndpoint params.ForgeEndpoint + ghesEndpoint params.ForgeEndpoint + ghesCreds params.ForgeCredentials } func (s *EnterpriseTestSuite) SetupTest() { @@ -71,8 +73,10 @@ func (s *EnterpriseTestSuite) SetupTest() { adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) s.forgeEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.ghesEndpoint = garmTesting.CreateGHESEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.forgeEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.forgeEndpoint) + s.ghesCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "ghes-creds", db, s.T(), s.ghesEndpoint) // create some organization objects in the database, for testing purposes enterprises := map[string]params.Enterprise{} @@ -224,14 +228,74 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseStartPoolMgrFailed() { func (s *EnterpriseTestSuite) TestListEnterprises() { s.Fixtures.PoolMgrCtrlMock.On("GetEnterprisePoolManager", mock.AnythingOfType("params.Enterprise")).Return(s.Fixtures.PoolMgrMock, nil) s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - orgs, err := s.Runner.ListEnterprises(s.Fixtures.AdminContext) + orgs, err := s.Runner.ListEnterprises(s.Fixtures.AdminContext, params.EnterpriseFilter{}) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), garmTesting.DBEntityMapToSlice(s.Fixtures.StoreEnterprises), orgs) } +func (s *EnterpriseTestSuite) TestListEnterprisesWithFilters() { + s.Fixtures.PoolMgrCtrlMock.On("GetEnterprisePoolManager", mock.AnythingOfType("params.Enterprise")).Return(s.Fixtures.PoolMgrMock, nil) + s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) + + enterprise, err := s.Fixtures.Store.CreateEnterprise( + s.Fixtures.AdminContext, + "test-enterprise", + s.testCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + enterprise2, err := s.Fixtures.Store.CreateEnterprise( + s.Fixtures.AdminContext, + "test-enterprise2", + s.testCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + enterprise3, err := s.Fixtures.Store.CreateEnterprise( + s.Fixtures.AdminContext, + "test-enterprise", + s.ghesCreds, + "super secret", + params.PoolBalancerTypeRoundRobin, + ) + s.Require().NoError(err) + orgs, err := s.Runner.ListEnterprises( + s.Fixtures.AdminContext, + params.EnterpriseFilter{ + Name: "test-enterprise", + }, + ) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise, enterprise3}, orgs) + + orgs, err = s.Runner.ListEnterprises( + s.Fixtures.AdminContext, + params.EnterpriseFilter{ + Name: "test-enterprise", + Endpoint: s.ghesEndpoint.Name, + }, + ) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise3}, orgs) + + orgs, err = s.Runner.ListEnterprises( + s.Fixtures.AdminContext, + params.EnterpriseFilter{ + Name: "test-enterprise2", + }, + ) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Enterprise{enterprise2}, orgs) +} + func (s *EnterpriseTestSuite) TestListEnterprisesErrUnauthorized() { - _, err := s.Runner.ListEnterprises(context.Background()) + _, err := s.Runner.ListEnterprises(context.Background(), params.EnterpriseFilter{}) s.Require().Equal(runnerErrors.ErrUnauthorized, err) } diff --git a/runner/metrics/enterprise.go b/runner/metrics/enterprise.go index 3ab9003c..be6eba66 100644 --- a/runner/metrics/enterprise.go +++ b/runner/metrics/enterprise.go @@ -19,6 +19,7 @@ import ( "strconv" "github.com/cloudbase/garm/metrics" + "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" //nolint:typecheck ) @@ -28,7 +29,7 @@ func CollectEnterpriseMetric(ctx context.Context, r *runner.Runner) error { metrics.EnterpriseInfo.Reset() metrics.EnterprisePoolManagerStatus.Reset() - enterprises, err := r.ListEnterprises(ctx) + enterprises, err := r.ListEnterprises(ctx, params.EnterpriseFilter{}) if err != nil { return err } diff --git a/runner/metrics/organization.go b/runner/metrics/organization.go index 3716cca1..6bf6d9e5 100644 --- a/runner/metrics/organization.go +++ b/runner/metrics/organization.go @@ -19,6 +19,7 @@ import ( "strconv" "github.com/cloudbase/garm/metrics" + "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" ) @@ -28,7 +29,7 @@ func CollectOrganizationMetric(ctx context.Context, r *runner.Runner) error { metrics.OrganizationInfo.Reset() metrics.OrganizationPoolManagerStatus.Reset() - organizations, err := r.ListOrganizations(ctx) + organizations, err := r.ListOrganizations(ctx, params.OrganizationFilter{}) if err != nil { return err } diff --git a/runner/metrics/repository.go b/runner/metrics/repository.go index 36e07bf0..a2e8fa57 100644 --- a/runner/metrics/repository.go +++ b/runner/metrics/repository.go @@ -19,6 +19,7 @@ import ( "strconv" "github.com/cloudbase/garm/metrics" + "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" ) @@ -27,7 +28,7 @@ func CollectRepositoryMetric(ctx context.Context, r *runner.Runner) error { metrics.EnterpriseInfo.Reset() metrics.EnterprisePoolManagerStatus.Reset() - repositories, err := r.ListRepositories(ctx) + repositories, err := r.ListRepositories(ctx, params.RepositoryFilter{}) if err != nil { return err } diff --git a/runner/organizations.go b/runner/organizations.go index 26d4f6e9..0ec4bfa2 100644 --- a/runner/organizations.go +++ b/runner/organizations.go @@ -95,12 +95,12 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP return org, nil } -func (r *Runner) ListOrganizations(ctx context.Context) ([]params.Organization, error) { +func (r *Runner) ListOrganizations(ctx context.Context, filter params.OrganizationFilter) ([]params.Organization, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - orgs, err := r.store.ListOrganizations(ctx) + orgs, err := r.store.ListOrganizations(ctx, filter) if err != nil { return nil, errors.Wrap(err, "listing organizations") } diff --git a/runner/organizations_test.go b/runner/organizations_test.go index 90075c87..9de6d2b4 100644 --- a/runner/organizations_test.go +++ b/runner/organizations_test.go @@ -58,7 +58,9 @@ type OrgTestSuite struct { testCreds params.ForgeCredentials secondaryTestCreds params.ForgeCredentials + giteaTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint + giteaEndpoint params.ForgeEndpoint } func (s *OrgTestSuite) SetupTest() { @@ -72,7 +74,9 @@ func (s *OrgTestSuite) SetupTest() { adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) + s.giteaTestCreds = garmTesting.CreateTestGiteaCredentials(adminCtx, "gitea-creds", db, s.T(), s.giteaEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) // create some organization objects in the database, for testing purposes @@ -238,14 +242,74 @@ func (s *OrgTestSuite) TestCreateOrganizationStartPoolMgrFailed() { func (s *OrgTestSuite) TestListOrganizations() { s.Fixtures.PoolMgrCtrlMock.On("GetOrgPoolManager", mock.AnythingOfType("params.Organization")).Return(s.Fixtures.PoolMgrMock, nil) s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - orgs, err := s.Runner.ListOrganizations(s.Fixtures.AdminContext) + orgs, err := s.Runner.ListOrganizations(s.Fixtures.AdminContext, params.OrganizationFilter{}) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), garmTesting.DBEntityMapToSlice(s.Fixtures.StoreOrgs), orgs) } +func (s *OrgTestSuite) TestListOrganizationsWithFilter() { + s.Fixtures.PoolMgrCtrlMock.On("GetOrgPoolManager", mock.AnythingOfType("params.Organization")).Return(s.Fixtures.PoolMgrMock, nil) + s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) + + org, err := s.Fixtures.Store.CreateOrganization( + s.Fixtures.AdminContext, + "test-org", + s.testCreds, + "super-secret", + params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + + org2, err := s.Fixtures.Store.CreateOrganization( + s.Fixtures.AdminContext, + "test-org", + s.giteaTestCreds, + "super-secret", + params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + + org3, err := s.Fixtures.Store.CreateOrganization( + s.Fixtures.AdminContext, + "test-org2", + s.giteaTestCreds, + "super-secret", + params.PoolBalancerTypeRoundRobin) + s.Require().NoError(err) + + orgs, err := s.Runner.ListOrganizations( + s.Fixtures.AdminContext, + params.OrganizationFilter{ + Name: "test-org", + }, + ) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org, org2}, orgs) + + orgs, err = s.Runner.ListOrganizations( + s.Fixtures.AdminContext, + params.OrganizationFilter{ + Name: "test-org", + Endpoint: s.giteaEndpoint.Name, + }, + ) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org2}, orgs) + + orgs, err = s.Runner.ListOrganizations( + s.Fixtures.AdminContext, + params.OrganizationFilter{ + Name: "test-org2", + }, + ) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Organization{org3}, orgs) +} + func (s *OrgTestSuite) TestListOrganizationsErrUnauthorized() { - _, err := s.Runner.ListOrganizations(context.Background()) + _, err := s.Runner.ListOrganizations(context.Background(), params.OrganizationFilter{}) s.Require().Equal(runnerErrors.ErrUnauthorized, err) } diff --git a/runner/repositories.go b/runner/repositories.go index d5118e96..24beaa07 100644 --- a/runner/repositories.go +++ b/runner/repositories.go @@ -93,12 +93,12 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa return repo, nil } -func (r *Runner) ListRepositories(ctx context.Context) ([]params.Repository, error) { +func (r *Runner) ListRepositories(ctx context.Context, filter params.RepositoryFilter) ([]params.Repository, error) { if !auth.IsAdmin(ctx) { return nil, runnerErrors.ErrUnauthorized } - repos, err := r.store.ListRepositories(ctx) + repos, err := r.store.ListRepositories(ctx, filter) if err != nil { return nil, errors.Wrap(err, "listing repositories") } diff --git a/runner/repositories_test.go b/runner/repositories_test.go index 0adf40d7..53fe5869 100644 --- a/runner/repositories_test.go +++ b/runner/repositories_test.go @@ -62,7 +62,9 @@ type RepoTestSuite struct { testCreds params.ForgeCredentials secondaryTestCreds params.ForgeCredentials + giteaTestCreds params.ForgeCredentials githubEndpoint params.ForgeEndpoint + giteaEndpoint params.ForgeEndpoint } func (s *RepoTestSuite) SetupTest() { @@ -75,8 +77,10 @@ func (s *RepoTestSuite) SetupTest() { adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T()) s.githubEndpoint = garmTesting.CreateDefaultGithubEndpoint(adminCtx, db, s.T()) + s.giteaEndpoint = garmTesting.CreateDefaultGiteaEndpoint(adminCtx, db, s.T()) s.testCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "new-creds", db, s.T(), s.githubEndpoint) s.secondaryTestCreds = garmTesting.CreateTestGithubCredentials(adminCtx, "secondary-creds", db, s.T(), s.githubEndpoint) + s.giteaTestCreds = garmTesting.CreateTestGiteaCredentials(adminCtx, "gitea-creds", db, s.T(), s.giteaEndpoint) // create some repository objects in the database, for testing purposes repos := map[string]params.Repository{} @@ -254,14 +258,81 @@ func (s *RepoTestSuite) TestCreateRepositoryStartPoolMgrFailed() { func (s *RepoTestSuite) TestListRepositories() { s.Fixtures.PoolMgrCtrlMock.On("GetRepoPoolManager", mock.AnythingOfType("params.Repository")).Return(s.Fixtures.PoolMgrMock, nil) s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) - repos, err := s.Runner.ListRepositories(s.Fixtures.AdminContext) + repos, err := s.Runner.ListRepositories(s.Fixtures.AdminContext, params.RepositoryFilter{}) s.Require().Nil(err) garmTesting.EqualDBEntityByName(s.T(), garmTesting.DBEntityMapToSlice(s.Fixtures.StoreRepos), repos) } +func (s *RepoTestSuite) TestListRepositoriesWithFilters() { + s.Fixtures.PoolMgrCtrlMock.On("GetRepoPoolManager", mock.AnythingOfType("params.Repository")).Return(s.Fixtures.PoolMgrMock, nil) + s.Fixtures.PoolMgrMock.On("Status").Return(params.PoolManagerStatus{IsRunning: true}, nil) + + repo, err := s.Fixtures.Store.CreateRepository( + s.Fixtures.AdminContext, + "example-owner", + "example-repo", + s.testCreds, + "test-webhook-secret", + params.PoolBalancerTypeRoundRobin, + ) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create database object (example-repo): %q", err)) + } + + repo2, err := s.Fixtures.Store.CreateRepository( + s.Fixtures.AdminContext, + "another-example-owner", + "example-repo", + s.testCreds, + "test-webhook-secret", + params.PoolBalancerTypeRoundRobin, + ) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create database object (example-repo): %q", err)) + } + + repo3, err := s.Fixtures.Store.CreateRepository( + s.Fixtures.AdminContext, + "example-owner", + "example-repo", + s.giteaTestCreds, + "test-webhook-secret", + params.PoolBalancerTypeRoundRobin, + ) + if err != nil { + s.FailNow(fmt.Sprintf("failed to create database object (example-repo): %q", err)) + } + + repos, err := s.Runner.ListRepositories(s.Fixtures.AdminContext, params.RepositoryFilter{Name: "example-repo"}) + + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Repository{repo, repo2, repo3}, repos) + + repos, err = s.Runner.ListRepositories( + s.Fixtures.AdminContext, + params.RepositoryFilter{ + Name: "example-repo", + Owner: "example-owner", + }, + ) + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Repository{repo, repo3}, repos) + + repos, err = s.Runner.ListRepositories( + s.Fixtures.AdminContext, + params.RepositoryFilter{ + Name: "example-repo", + Owner: "example-owner", + Endpoint: s.giteaEndpoint.Name, + }, + ) + s.Require().Nil(err) + garmTesting.EqualDBEntityByName(s.T(), []params.Repository{repo3}, repos) +} + func (s *RepoTestSuite) TestListRepositoriesErrUnauthorized() { - _, err := s.Runner.ListRepositories(context.Background()) + _, err := s.Runner.ListRepositories(context.Background(), params.RepositoryFilter{}) s.Require().Equal(runnerErrors.ErrUnauthorized, err) } diff --git a/runner/runner.go b/runner/runner.go index aa55ee4f..da3f35ea 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -327,17 +327,17 @@ func (r *Runner) loadReposOrgsAndEnterprises() error { r.mux.Lock() defer r.mux.Unlock() - repos, err := r.store.ListRepositories(r.ctx) + repos, err := r.store.ListRepositories(r.ctx, params.RepositoryFilter{}) if err != nil { return errors.Wrap(err, "fetching repositories") } - orgs, err := r.store.ListOrganizations(r.ctx) + orgs, err := r.store.ListOrganizations(r.ctx, params.OrganizationFilter{}) if err != nil { return errors.Wrap(err, "fetching organizations") } - enterprises, err := r.store.ListEnterprises(r.ctx) + enterprises, err := r.store.ListEnterprises(r.ctx, params.EnterpriseFilter{}) if err != nil { return errors.Wrap(err, "fetching enterprises") } diff --git a/workers/cache/cache.go b/workers/cache/cache.go index a00c7667..3f589edd 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -96,17 +96,17 @@ func (w *Worker) loadAllEntities() error { return fmt.Errorf("listing scale sets: %w", err) } - repos, err := w.store.ListRepositories(w.ctx) + repos, err := w.store.ListRepositories(w.ctx, params.RepositoryFilter{}) if err != nil { return fmt.Errorf("listing repositories: %w", err) } - orgs, err := w.store.ListOrganizations(w.ctx) + orgs, err := w.store.ListOrganizations(w.ctx, params.OrganizationFilter{}) if err != nil { return fmt.Errorf("listing organizations: %w", err) } - enterprises, err := w.store.ListEnterprises(w.ctx) + enterprises, err := w.store.ListEnterprises(w.ctx, params.EnterpriseFilter{}) if err != nil { return fmt.Errorf("listing enterprises: %w", err) } diff --git a/workers/entity/controller.go b/workers/entity/controller.go index 99618194..3ad52108 100644 --- a/workers/entity/controller.go +++ b/workers/entity/controller.go @@ -24,6 +24,7 @@ import ( "github.com/cloudbase/garm/auth" dbCommon "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/database/watcher" + "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" garmUtil "github.com/cloudbase/garm/util" ) @@ -63,7 +64,7 @@ type Controller struct { func (c *Controller) loadAllRepositories() error { c.mux.Lock() defer c.mux.Unlock() - repos, err := c.store.ListRepositories(c.ctx) + repos, err := c.store.ListRepositories(c.ctx, params.RepositoryFilter{}) if err != nil { return fmt.Errorf("fetching repositories: %w", err) } @@ -95,7 +96,7 @@ func (c *Controller) loadAllRepositories() error { func (c *Controller) loadAllOrganizations() error { c.mux.Lock() defer c.mux.Unlock() - orgs, err := c.store.ListOrganizations(c.ctx) + orgs, err := c.store.ListOrganizations(c.ctx, params.OrganizationFilter{}) if err != nil { return fmt.Errorf("fetching organizations: %w", err) } @@ -127,7 +128,7 @@ func (c *Controller) loadAllOrganizations() error { func (c *Controller) loadAllEnterprises() error { c.mux.Lock() defer c.mux.Unlock() - enterprises, err := c.store.ListEnterprises(c.ctx) + enterprises, err := c.store.ListEnterprises(c.ctx, params.EnterpriseFilter{}) if err != nil { return fmt.Errorf("fetching enterprises: %w", err) } From e92b2c111154daa78eb464ef377c1bf8f45679d6 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 19 Jun 2025 09:15:34 +0000 Subject: [PATCH 106/179] Allow usage of friendly names in most commands This change adds the ability to use the repo/org/enterprise names instead of UUID in most garm-cli commands, at the expense of an extra list API call, leveraging the recently added filter options. Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/enterprise.go | 23 ++++++- cmd/garm-cli/cmd/organization.go | 49 ++++++++++++--- cmd/garm-cli/cmd/pool.go | 24 ++++++++ cmd/garm-cli/cmd/repository.go | 49 ++++++++++++--- cmd/garm-cli/cmd/scalesets.go | 24 ++++++++ cmd/garm-cli/cmd/util.go | 100 +++++++++++++++++++++++++++++++ 6 files changed, 249 insertions(+), 20 deletions(-) create mode 100644 cmd/garm-cli/cmd/util.go diff --git a/cmd/garm-cli/cmd/enterprise.go b/cmd/garm-cli/cmd/enterprise.go index b8850e1b..0f688fe5 100644 --- a/cmd/garm-cli/cmd/enterprise.go +++ b/cmd/garm-cli/cmd/enterprise.go @@ -112,8 +112,14 @@ var enterpriseShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + enterpriseID, err := resolveEnterprise(args[0]) + if err != nil { + return err + } + showEnterpriseReq := apiClientEnterprises.NewGetEnterpriseParams() - showEnterpriseReq.EnterpriseID = args[0] + showEnterpriseReq.EnterpriseID = enterpriseID response, err := apiCli.Enterprises.GetEnterprise(showEnterpriseReq, authToken) if err != nil { return err @@ -139,8 +145,14 @@ var enterpriseDeleteCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + enterpriseID, err := resolveEnterprise(args[0]) + if err != nil { + return err + } + deleteEnterpriseReq := apiClientEnterprises.NewDeleteEnterpriseParams() - deleteEnterpriseReq.EnterpriseID = args[0] + deleteEnterpriseReq.EnterpriseID = enterpriseID if err := apiCli.Enterprises.DeleteEnterprise(deleteEnterpriseReq, authToken); err != nil { return err } @@ -165,13 +177,18 @@ var enterpriseUpdateCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + enterpriseID, err := resolveEnterprise(args[0]) + if err != nil { + return err + } + updateEnterpriseReq := apiClientEnterprises.NewUpdateEnterpriseParams() updateEnterpriseReq.Body = params.UpdateEntityParams{ WebhookSecret: repoWebhookSecret, CredentialsName: repoCreds, PoolBalancerType: params.PoolBalancerType(poolBalancerType), } - updateEnterpriseReq.EnterpriseID = args[0] + updateEnterpriseReq.EnterpriseID = enterpriseID response, err := apiCli.Enterprises.UpdateEnterprise(updateEnterpriseReq, authToken) if err != nil { return err diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index 9f23888a..4cb7222f 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -76,8 +76,13 @@ var orgWebhookInstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } + orgID, err := resolveOrganization(args[0]) + if err != nil { + return err + } + installWebhookReq := apiClientOrgs.NewInstallOrgWebhookParams() - installWebhookReq.OrgID = args[0] + installWebhookReq.OrgID = orgID installWebhookReq.Body.InsecureSSL = insecureOrgWebhook installWebhookReq.Body.WebhookEndpointType = params.WebhookEndpointDirect @@ -105,9 +110,12 @@ var orgHookInfoShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - + orgID, err := resolveOrganization(args[0]) + if err != nil { + return err + } showWebhookInfoReq := apiClientOrgs.NewGetOrgWebhookInfoParams() - showWebhookInfoReq.OrgID = args[0] + showWebhookInfoReq.OrgID = orgID response, err := apiCli.Organizations.GetOrgWebhookInfo(showWebhookInfoReq, authToken) if err != nil { @@ -134,10 +142,15 @@ var orgWebhookUninstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - uninstallWebhookReq := apiClientOrgs.NewUninstallOrgWebhookParams() - uninstallWebhookReq.OrgID = args[0] + orgID, err := resolveOrganization(args[0]) + if err != nil { + return err + } - err := apiCli.Organizations.UninstallOrgWebhook(uninstallWebhookReq, authToken) + uninstallWebhookReq := apiClientOrgs.NewUninstallOrgWebhookParams() + uninstallWebhookReq.OrgID = orgID + + err = apiCli.Organizations.UninstallOrgWebhook(uninstallWebhookReq, authToken) if err != nil { return err } @@ -216,13 +229,19 @@ var orgUpdateCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + orgID, err := resolveOrganization(args[0]) + if err != nil { + return err + } + updateOrgReq := apiClientOrgs.NewUpdateOrgParams() updateOrgReq.Body = params.UpdateEntityParams{ WebhookSecret: orgWebhookSecret, CredentialsName: orgCreds, PoolBalancerType: params.PoolBalancerType(poolBalancerType), } - updateOrgReq.OrgID = args[0] + updateOrgReq.OrgID = orgID response, err := apiCli.Organizations.UpdateOrg(updateOrgReq, authToken) if err != nil { return err @@ -270,8 +289,14 @@ var orgShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + orgID, err := resolveOrganization(args[0]) + if err != nil { + return err + } + showOrgReq := apiClientOrgs.NewGetOrgParams() - showOrgReq.OrgID = args[0] + showOrgReq.OrgID = orgID response, err := apiCli.Organizations.GetOrg(showOrgReq, authToken) if err != nil { return err @@ -297,8 +322,14 @@ var orgDeleteCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + orgID, err := resolveOrganization(args[0]) + if err != nil { + return err + } + deleteOrgReq := apiClientOrgs.NewDeleteOrgParams() - deleteOrgReq.OrgID = args[0] + deleteOrgReq.OrgID = orgID deleteOrgReq.KeepWebhook = &keepOrgWebhook if err := apiCli.Organizations.DeleteOrg(deleteOrgReq, authToken); err != nil { return err diff --git a/cmd/garm-cli/cmd/pool.go b/cmd/garm-cli/cmd/pool.go index b2c324ea..096210fa 100644 --- a/cmd/garm-cli/cmd/pool.go +++ b/cmd/garm-cli/cmd/pool.go @@ -105,14 +105,26 @@ Example: switch len(args) { case 0: if cmd.Flags().Changed("repo") { + poolRepository, err = resolveRepository(poolRepository) + if err != nil { + return err + } listRepoPoolsReq := apiClientRepos.NewListRepoPoolsParams() listRepoPoolsReq.RepoID = poolRepository response, err = apiCli.Repositories.ListRepoPools(listRepoPoolsReq, authToken) } else if cmd.Flags().Changed("org") { + poolOrganization, err = resolveOrganization(poolOrganization) + if err != nil { + return err + } listOrgPoolsReq := apiClientOrgs.NewListOrgPoolsParams() listOrgPoolsReq.OrgID = poolOrganization response, err = apiCli.Organizations.ListOrgPools(listOrgPoolsReq, authToken) } else if cmd.Flags().Changed("enterprise") { + poolEnterprise, err = resolveEnterprise(poolEnterprise) + if err != nil { + return err + } listEnterprisePoolsReq := apiClientEnterprises.NewListEnterprisePoolsParams() listEnterprisePoolsReq.EnterpriseID = poolEnterprise response, err = apiCli.Enterprises.ListEnterprisePools(listEnterprisePoolsReq, authToken) @@ -250,16 +262,28 @@ var poolAddCmd = &cobra.Command{ var err error var response poolPayloadGetter if cmd.Flags().Changed("repo") { + poolRepository, err = resolveRepository(poolRepository) + if err != nil { + return err + } newRepoPoolReq := apiClientRepos.NewCreateRepoPoolParams() newRepoPoolReq.RepoID = poolRepository newRepoPoolReq.Body = newPoolParams response, err = apiCli.Repositories.CreateRepoPool(newRepoPoolReq, authToken) } else if cmd.Flags().Changed("org") { + poolOrganization, err = resolveOrganization(poolOrganization) + if err != nil { + return err + } newOrgPoolReq := apiClientOrgs.NewCreateOrgPoolParams() newOrgPoolReq.OrgID = poolOrganization newOrgPoolReq.Body = newPoolParams response, err = apiCli.Organizations.CreateOrgPool(newOrgPoolReq, authToken) } else if cmd.Flags().Changed("enterprise") { + poolEnterprise, err = resolveEnterprise(poolEnterprise) + if err != nil { + return err + } newEnterprisePoolReq := apiClientEnterprises.NewCreateEnterprisePoolParams() newEnterprisePoolReq.EnterpriseID = poolEnterprise newEnterprisePoolReq.Body = newPoolParams diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index 91db23ea..eef936da 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -78,8 +78,13 @@ var repoWebhookInstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } + repoID, err := resolveRepository(args[0]) + if err != nil { + return err + } + installWebhookReq := apiClientRepos.NewInstallRepoWebhookParams() - installWebhookReq.RepoID = args[0] + installWebhookReq.RepoID = repoID installWebhookReq.Body.InsecureSSL = insecureRepoWebhook installWebhookReq.Body.WebhookEndpointType = params.WebhookEndpointDirect @@ -108,8 +113,13 @@ var repoHookInfoShowCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } + repoID, err := resolveRepository(args[0]) + if err != nil { + return err + } + showWebhookInfoReq := apiClientRepos.NewGetRepoWebhookInfoParams() - showWebhookInfoReq.RepoID = args[0] + showWebhookInfoReq.RepoID = repoID response, err := apiCli.Repositories.GetRepoWebhookInfo(showWebhookInfoReq, authToken) if err != nil { @@ -136,10 +146,15 @@ var repoWebhookUninstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - uninstallWebhookReq := apiClientRepos.NewUninstallRepoWebhookParams() - uninstallWebhookReq.RepoID = args[0] + repoID, err := resolveRepository(args[0]) + if err != nil { + return err + } - err := apiCli.Repositories.UninstallRepoWebhook(uninstallWebhookReq, authToken) + uninstallWebhookReq := apiClientRepos.NewUninstallRepoWebhookParams() + uninstallWebhookReq.RepoID = repoID + + err = apiCli.Repositories.UninstallRepoWebhook(uninstallWebhookReq, authToken) if err != nil { return err } @@ -243,13 +258,19 @@ var repoUpdateCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + repoID, err := resolveRepository(args[0]) + if err != nil { + return err + } + updateReposReq := apiClientRepos.NewUpdateRepoParams() updateReposReq.Body = params.UpdateEntityParams{ WebhookSecret: repoWebhookSecret, CredentialsName: repoCreds, PoolBalancerType: params.PoolBalancerType(poolBalancerType), } - updateReposReq.RepoID = args[0] + updateReposReq.RepoID = repoID response, err := apiCli.Repositories.UpdateRepo(updateReposReq, authToken) if err != nil { @@ -275,8 +296,14 @@ var repoShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + repoID, err := resolveRepository(args[0]) + if err != nil { + return err + } + showRepoReq := apiClientRepos.NewGetRepoParams() - showRepoReq.RepoID = args[0] + showRepoReq.RepoID = repoID response, err := apiCli.Repositories.GetRepo(showRepoReq, authToken) if err != nil { return err @@ -302,8 +329,14 @@ var repoDeleteCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } + + repoID, err := resolveRepository(args[0]) + if err != nil { + return err + } + deleteRepoReq := apiClientRepos.NewDeleteRepoParams() - deleteRepoReq.RepoID = args[0] + deleteRepoReq.RepoID = repoID deleteRepoReq.KeepWebhook = &keepRepoWebhook if err := apiCli.Repositories.DeleteRepo(deleteRepoReq, authToken); err != nil { return err diff --git a/cmd/garm-cli/cmd/scalesets.go b/cmd/garm-cli/cmd/scalesets.go index 920b60cf..ece9b7a2 100644 --- a/cmd/garm-cli/cmd/scalesets.go +++ b/cmd/garm-cli/cmd/scalesets.go @@ -105,14 +105,26 @@ Example: switch len(args) { case 0: if cmd.Flags().Changed("repo") { + scalesetRepository, err = resolveRepository(scalesetRepository) + if err != nil { + return err + } listRepoScaleSetsReq := apiClientRepos.NewListRepoScaleSetsParams() listRepoScaleSetsReq.RepoID = scalesetRepository response, err = apiCli.Repositories.ListRepoScaleSets(listRepoScaleSetsReq, authToken) } else if cmd.Flags().Changed("org") { + scalesetOrganization, err = resolveOrganization(scalesetOrganization) + if err != nil { + return err + } listOrgScaleSetsReq := apiClientOrgs.NewListOrgScaleSetsParams() listOrgScaleSetsReq.OrgID = scalesetOrganization response, err = apiCli.Organizations.ListOrgScaleSets(listOrgScaleSetsReq, authToken) } else if cmd.Flags().Changed("enterprise") { + scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise) + if err != nil { + return err + } listEnterpriseScaleSetsReq := apiClientEnterprises.NewListEnterpriseScaleSetsParams() listEnterpriseScaleSetsReq.EnterpriseID = scalesetEnterprise response, err = apiCli.Enterprises.ListEnterpriseScaleSets(listEnterpriseScaleSetsReq, authToken) @@ -244,16 +256,28 @@ var scaleSetAddCmd = &cobra.Command{ var err error var response scalesetPayloadGetter if cmd.Flags().Changed("repo") { + scalesetRepository, err = resolveRepository(scalesetRepository) + if err != nil { + return err + } newRepoScaleSetReq := apiClientRepos.NewCreateRepoScaleSetParams() newRepoScaleSetReq.RepoID = scalesetRepository newRepoScaleSetReq.Body = newScaleSetParams response, err = apiCli.Repositories.CreateRepoScaleSet(newRepoScaleSetReq, authToken) } else if cmd.Flags().Changed("org") { + scalesetOrganization, err = resolveOrganization(scalesetOrganization) + if err != nil { + return err + } newOrgScaleSetReq := apiClientOrgs.NewCreateOrgScaleSetParams() newOrgScaleSetReq.OrgID = scalesetOrganization newOrgScaleSetReq.Body = newScaleSetParams response, err = apiCli.Organizations.CreateOrgScaleSet(newOrgScaleSetReq, authToken) } else if cmd.Flags().Changed("enterprise") { + scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise) + if err != nil { + return err + } newEnterpriseScaleSetReq := apiClientEnterprises.NewCreateEnterpriseScaleSetParams() newEnterpriseScaleSetReq.EnterpriseID = scalesetEnterprise newEnterpriseScaleSetReq.Body = newScaleSetParams diff --git a/cmd/garm-cli/cmd/util.go b/cmd/garm-cli/cmd/util.go new file mode 100644 index 00000000..584ad9c4 --- /dev/null +++ b/cmd/garm-cli/cmd/util.go @@ -0,0 +1,100 @@ +package cmd + +import ( + "fmt" + "strings" + + "github.com/google/uuid" + + apiClientEnterprises "github.com/cloudbase/garm/client/enterprises" + apiClientOrgs "github.com/cloudbase/garm/client/organizations" + apiClientRepos "github.com/cloudbase/garm/client/repositories" +) + +func resolveRepository(nameOrID string) (string, error) { + if nameOrID == "" { + return "", fmt.Errorf("missing repository name or ID") + } + entityID, err := uuid.Parse(nameOrID) + if err == nil { + return entityID.String(), nil + } + + parts := strings.SplitN(nameOrID, "/", 2) + if len(parts) < 2 { + // format of friendly name is invalid for a repository. + // Return the string as is. + return nameOrID, nil + } + + listReposReq := apiClientRepos.NewListReposParams() + listReposReq.Owner = &parts[0] + listReposReq.Name = &parts[1] + response, err := apiCli.Repositories.ListRepos(listReposReq, authToken) + if err != nil { + return "", err + } + if len(response.Payload) == 0 { + return "", fmt.Errorf("repository %s was not found", nameOrID) + } + + if len(response.Payload) > 1 { + return "", fmt.Errorf("multiple repositories with the name %s exist, please use the repository ID", nameOrID) + } + return response.Payload[0].ID, nil +} + +func resolveOrganization(nameOrID string) (string, error) { + if nameOrID == "" { + return "", fmt.Errorf("missing organization name or ID") + } + entityID, err := uuid.Parse(nameOrID) + if err == nil { + return entityID.String(), nil + } + + listOrgsReq := apiClientOrgs.NewListOrgsParams() + listOrgsReq.Name = &nameOrID + response, err := apiCli.Organizations.ListOrgs(listOrgsReq, authToken) + if err != nil { + return "", err + } + + if len(response.Payload) == 0 { + return "", fmt.Errorf("organization %s was not found", nameOrID) + } + + if len(response.Payload) > 1 { + return "", fmt.Errorf("multiple organizations with the name %s exist, please use the organization ID", nameOrID) + } + + return response.Payload[0].ID, nil +} + +func resolveEnterprise(nameOrID string) (string, error) { + if nameOrID == "" { + return "", fmt.Errorf("missing enterprise name or ID") + } + entityID, err := uuid.Parse(nameOrID) + if err == nil { + return entityID.String(), nil + } + + listEnterprisesReq := apiClientEnterprises.NewListEnterprisesParams() + listEnterprisesReq.Name = &enterpriseName + listEnterprisesReq.Endpoint = &enterpriseEndpoint + response, err := apiCli.Enterprises.ListEnterprises(listEnterprisesReq, authToken) + if err != nil { + return "", err + } + + if len(response.Payload) == 0 { + return "", fmt.Errorf("enterprise %s was not found", nameOrID) + } + + if len(response.Payload) > 1 { + return "", fmt.Errorf("multiple enterprises with the name %s exist, please use the enterprise ID", nameOrID) + } + + return response.Payload[0].ID, nil +} From 584bd73553a2b62dba71943a5849c5dcec5b20d6 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 19 Jun 2025 19:52:47 +0000 Subject: [PATCH 107/179] Update dependencies. Signed-off-by: Gabriel Adrian Samfira --- go.mod | 27 +- go.sum | 56 +- params/params.go | 2 +- runner/common/mocks/GithubClient.go | 12 +- runner/common/mocks/GithubEnterpriseClient.go | 2 +- runner/common/mocks/GithubEntityOperations.go | 12 +- runner/common/mocks/OrganizationHooks.go | 2 +- runner/common/mocks/RepositoryHooks.go | 2 +- runner/common/util.go | 4 +- runner/pool/common.go | 28 +- runner/pool/pool.go | 20 +- runner/pool/stub_client.go | 4 +- runner/pool/util.go | 2 +- test/integration/gh_cleanup/main.go | 10 +- test/integration/jobs_test.go | 2 +- test/integration/organizations_test.go | 6 +- test/integration/repositories_test.go | 6 +- util/util.go | 6 +- .../ghinstallation/v2/transport.go | 2 +- .../garm-provider-common/params/params.go | 1 + vendor/github.com/go-logr/logr/.golangci.yaml | 16 +- vendor/github.com/go-logr/logr/funcr/funcr.go | 8 +- vendor/github.com/go-sql-driver/mysql/AUTHORS | 3 + .../go-sql-driver/mysql/CHANGELOG.md | 11 + .../github.com/go-sql-driver/mysql/buffer.go | 18 +- .../go-sql-driver/mysql/compress.go | 21 +- .../github.com/go-sql-driver/mysql/packets.go | 49 +- .../go-sql-driver/mysql/transaction.go | 18 +- .../github.com/google/go-github/v57/AUTHORS | 487 - .../go-github/v57/github/actions_artifacts.go | 167 - .../go-github/v57/github/actions_cache.go | 249 - .../github/actions_permissions_enterprise.go | 207 - .../v57/github/actions_permissions_orgs.go | 220 - .../v57/github/actions_required_workflows.go | 267 - .../v57/github/actions_runner_groups.go | 337 - .../go-github/v57/github/actions_runners.go | 371 - .../go-github/v57/github/actions_secrets.go | 396 - .../go-github/v57/github/actions_variables.go | 331 - .../v57/github/actions_workflow_jobs.go | 139 - .../v57/github/actions_workflow_runs.go | 410 - .../v57/github/activity_notifications.go | 241 - .../google/go-github/v57/github/admin.go | 123 - .../google/go-github/v57/github/admin_orgs.go | 95 - .../go-github/v57/github/admin_stats.go | 172 - .../go-github/v57/github/admin_users.go | 141 - .../google/go-github/v57/github/apps.go | 420 - .../go-github/v57/github/authorizations.go | 293 - .../google/go-github/v57/github/billing.go | 215 - .../google/go-github/v57/github/checks.go | 475 - .../go-github/v57/github/code-scanning.go | 652 - .../go-github/v57/github/codesofconduct.go | 87 - .../go-github/v57/github/dependabot_alerts.go | 176 - .../google/go-github/v57/github/doc.go | 194 - .../google/go-github/v57/github/emojis.go | 40 - .../v57/github/enterprise_actions_runners.go | 118 - .../enterprise_code_security_and_analysis.go | 84 - .../go-github/v57/github/event_types.go | 1795 -- .../google/go-github/v57/github/gists.go | 397 - .../go-github/v57/github/git_commits.go | 225 - .../google/go-github/v57/github/git_refs.go | 185 - .../go-github/v57/github/github-accessors.go | 25375 ---------------- .../google/go-github/v57/github/github.go | 1537 - .../go-github/v57/github/interactions_orgs.go | 86 - .../v57/github/interactions_repos.go | 86 - .../google/go-github/v57/github/issues.go | 382 - .../go-github/v57/github/issues_events.go | 186 - .../go-github/v57/github/issues_labels.go | 253 - .../go-github/v57/github/issues_timeline.go | 195 - .../google/go-github/v57/github/markdown.go | 69 - .../google/go-github/v57/github/messages.go | 352 - .../google/go-github/v57/github/meta.go | 160 - .../google/go-github/v57/github/migrations.go | 240 - .../v57/github/migrations_source_import.go | 321 - .../go-github/v57/github/migrations_user.go | 230 - .../google/go-github/v57/github/orgs.go | 316 - .../go-github/v57/github/orgs_audit_log.go | 160 - .../github/orgs_credential_authorizations.go | 99 - .../go-github/v57/github/orgs_custom_roles.go | 128 - .../go-github/v57/github/orgs_members.go | 422 - .../go-github/v57/github/orgs_packages.go | 165 - .../v57/github/orgs_personal_access_tokens.go | 36 - .../go-github/v57/github/orgs_projects.go | 64 - .../go-github/v57/github/orgs_properties.go | 198 - .../google/go-github/v57/github/orgs_rules.go | 115 - .../v57/github/orgs_security_managers.go | 63 - .../google/go-github/v57/github/packages.go | 143 - .../google/go-github/v57/github/projects.go | 634 - .../google/go-github/v57/github/pulls.go | 508 - .../go-github/v57/github/pulls_reviewers.go | 86 - .../google/go-github/v57/github/rate_limit.go | 113 - .../google/go-github/v57/github/reactions.go | 570 - .../google/go-github/v57/github/repos.go | 2387 -- .../v57/github/repos_actions_permissions.go | 66 - .../go-github/v57/github/repos_autolinks.go | 112 - .../v57/github/repos_collaborators.go | 176 - .../go-github/v57/github/repos_contents.go | 359 - .../go-github/v57/github/repos_deployments.go | 264 - .../v57/github/repos_environments.go | 252 - .../go-github/v57/github/repos_hooks.go | 271 - .../v57/github/repos_hooks_configuration.go | 53 - .../v57/github/repos_hooks_deliveries.go | 142 - .../go-github/v57/github/repos_invitations.go | 95 - .../go-github/v57/github/repos_pages.go | 324 - .../v57/github/repos_prereceive_hooks.go | 118 - .../go-github/v57/github/repos_projects.go | 73 - .../go-github/v57/github/repos_releases.go | 475 - .../go-github/v57/github/repos_rules.go | 511 - .../go-github/v57/github/repos_stats.go | 242 - .../google/go-github/v57/github/repos_tags.go | 82 - .../google/go-github/v57/github/scim.go | 217 - .../google/go-github/v57/github/search.go | 347 - .../go-github/v57/github/secret_scanning.go | 257 - .../v57/github/security_advisories.go | 248 - .../google/go-github/v57/github/strings.go | 94 - .../google/go-github/v57/github/teams.go | 1067 - .../v57/github/teams_discussion_comments.go | 262 - .../go-github/v57/github/teams_discussions.go | 267 - .../google/go-github/v57/github/timestamp.go | 52 - .../google/go-github/v57/github/users.go | 294 - .../v57/github/users_administration.go | 80 - .../go-github/v57/github/users_emails.go | 105 - .../go-github/v57/github/users_gpg_keys.go | 138 - .../go-github/v57/github/users_packages.go | 235 - .../go-github/v57/github/users_projects.go | 72 - .../go-github/v57/github/with_appengine.go | 21 - .../go-github/v57/github/without_appengine.go | 20 - .../github.com/google/go-github/v71/LICENSE | 27 - .../google/go-github/v71/github/actions.go | 12 - .../go-github/v71/github/actions_oidc.go | 81 - .../v71/github/actions_required_workflows.go | 267 - .../go-github/v71/github/actions_workflows.go | 237 - .../google/go-github/v71/github/activity.go | 77 - .../go-github/v71/github/activity_events.go | 235 - .../go-github/v71/github/activity_star.go | 152 - .../go-github/v71/github/activity_watching.go | 158 - .../google/go-github/v71/github/apps_hooks.go | 52 - .../v71/github/apps_hooks_deliveries.go | 78 - .../go-github/v71/github/apps_installation.go | 138 - .../go-github/v71/github/apps_manifest.go | 51 - .../go-github/v71/github/apps_marketplace.go | 207 - .../google/go-github/v71/github/codespaces.go | 266 - .../v71/github/codespaces_secrets.go | 451 - .../google/go-github/v71/github/dependabot.go | 12 - .../v71/github/dependabot_secrets.go | 289 - .../go-github/v71/github/dependency_graph.go | 82 - .../google/go-github/v71/github/enterprise.go | 12 - .../enterprise_actions_runner_groups.go | 336 - .../v71/github/enterprise_audit_log.go | 37 - .../google/go-github/v71/github/event.go | 54 - .../go-github/v71/github/gists_comments.go | 128 - .../google/go-github/v71/github/git.go | 12 - .../google/go-github/v71/github/git_blobs.go | 88 - .../google/go-github/v71/github/git_tags.go | 88 - .../google/go-github/v71/github/git_trees.go | 166 - .../google/go-github/v71/github/gitignore.go | 68 - .../go-github/v71/github/interactions.go | 28 - .../go-github/v71/github/issue_import.go | 154 - .../go-github/v71/github/issues_assignees.go | 103 - .../go-github/v71/github/issues_comments.go | 165 - .../go-github/v71/github/issues_milestones.go | 157 - .../google/go-github/v71/github/licenses.go | 101 - .../v71/github/orgs_actions_allowed.go | 34 - .../v71/github/orgs_actions_permissions.go | 34 - .../google/go-github/v71/github/orgs_hooks.go | 142 - .../v71/github/orgs_hooks_configuration.go | 53 - .../v71/github/orgs_hooks_deliveries.go | 79 - .../v71/github/orgs_outside_collaborators.go | 87 - .../v71/github/orgs_users_blocking.go | 99 - .../go-github/v71/github/pulls_comments.go | 217 - .../go-github/v71/github/pulls_reviews.go | 329 - .../go-github/v71/github/pulls_threads.go | 17 - .../v71/github/repos_actions_access.go | 59 - .../v71/github/repos_actions_allowed.go | 53 - .../go-github/v71/github/repos_codeowners.go | 61 - .../go-github/v71/github/repos_comments.go | 173 - .../go-github/v71/github/repos_commits.go | 325 - .../v71/github/repos_community_health.go | 63 - .../repos_deployment_branch_policies.go | 135 - .../go-github/v71/github/repos_forks.go | 97 - .../google/go-github/v71/github/repos_keys.go | 99 - .../google/go-github/v71/github/repos_lfs.go | 53 - .../go-github/v71/github/repos_merging.go | 76 - .../go-github/v71/github/repos_statuses.go | 138 - .../go-github/v71/github/repos_traffic.go | 149 - .../go-github/v71/github/teams_members.go | 263 - .../go-github/v71/github/users_blocking.go | 99 - .../go-github/v71/github/users_followers.go | 135 - .../google/go-github/v71/github/users_keys.go | 122 - .../v71/github/users_ssh_signing_keys.go | 117 - .../google/go-github/{v71 => v72}/AUTHORS | 0 .../google/go-github/{v57 => v72}/LICENSE | 0 .../go-github/{v57 => v72}/github/actions.go | 0 .../{v71 => v72}/github/actions_artifacts.go | 0 .../{v71 => v72}/github/actions_cache.go | 0 .../github/actions_hosted_runners.go | 0 .../{v57 => v72}/github/actions_oidc.go | 0 .../github/actions_permissions_enterprise.go | 0 .../github/actions_permissions_orgs.go | 0 .../github/actions_runner_groups.go | 0 .../{v71 => v72}/github/actions_runners.go | 0 .../{v71 => v72}/github/actions_secrets.go | 0 .../{v71 => v72}/github/actions_variables.go | 0 .../github/actions_workflow_jobs.go | 0 .../github/actions_workflow_runs.go | 0 .../{v57 => v72}/github/actions_workflows.go | 0 .../go-github/{v57 => v72}/github/activity.go | 0 .../{v57 => v72}/github/activity_events.go | 0 .../github/activity_notifications.go | 0 .../{v57 => v72}/github/activity_star.go | 0 .../{v57 => v72}/github/activity_watching.go | 0 .../go-github/{v71 => v72}/github/admin.go | 0 .../{v71 => v72}/github/admin_orgs.go | 0 .../{v71 => v72}/github/admin_stats.go | 0 .../{v71 => v72}/github/admin_users.go | 0 .../go-github/{v71 => v72}/github/apps.go | 0 .../{v57 => v72}/github/apps_hooks.go | 0 .../github/apps_hooks_deliveries.go | 0 .../{v57 => v72}/github/apps_installation.go | 0 .../{v57 => v72}/github/apps_manifest.go | 0 .../{v57 => v72}/github/apps_marketplace.go | 0 .../{v71 => v72}/github/attestations.go | 0 .../{v71 => v72}/github/authorizations.go | 0 .../go-github/{v71 => v72}/github/billing.go | 0 .../go-github/{v71 => v72}/github/checks.go | 0 .../{v71 => v72}/github/code_scanning.go | 0 .../{v71 => v72}/github/codesofconduct.go | 0 .../{v57 => v72}/github/codespaces.go | 0 .../{v57 => v72}/github/codespaces_secrets.go | 0 .../go-github/{v71 => v72}/github/copilot.go | 0 .../{v57 => v72}/github/dependabot.go | 0 .../{v71 => v72}/github/dependabot_alerts.go | 10 + .../{v57 => v72}/github/dependabot_secrets.go | 0 .../{v57 => v72}/github/dependency_graph.go | 0 .../github/dependency_graph_snapshots.go | 0 .../go-github/{v71 => v72}/github/doc.go | 2 +- .../go-github/{v71 => v72}/github/emojis.go | 0 .../{v57 => v72}/github/enterprise.go | 0 .../enterprise_actions_hosted_runners.go | 0 .../enterprise_actions_runner_groups.go | 0 .../github/enterprise_actions_runners.go | 0 .../github/enterprise_audit_log.go | 0 .../enterprise_code_security_and_analysis.go | 0 .../github/enterprise_manage_ghes.go | 0 .../github/enterprise_manage_ghes_config.go | 0 .../enterprise_manage_ghes_maintenance.go | 0 .../github/enterprise_manage_ghes_ssh.go | 0 .../enterprise_network_configurations.go | 0 .../github/enterprise_properties.go | 0 .../{v71 => v72}/github/enterprise_rules.go | 0 .../go-github/{v57 => v72}/github/event.go | 0 .../{v71 => v72}/github/event_types.go | 20 + .../go-github/{v71 => v72}/github/gists.go | 0 .../{v57 => v72}/github/gists_comments.go | 0 .../go-github/{v57 => v72}/github/git.go | 0 .../{v57 => v72}/github/git_blobs.go | 0 .../{v71 => v72}/github/git_commits.go | 0 .../go-github/{v71 => v72}/github/git_refs.go | 0 .../go-github/{v57 => v72}/github/git_tags.go | 0 .../{v57 => v72}/github/git_trees.go | 0 .../{v71 => v72}/github/github-accessors.go | 368 +- .../go-github/{v71 => v72}/github/github.go | 2 +- .../{v57 => v72}/github/gitignore.go | 0 .../{v57 => v72}/github/interactions.go | 0 .../{v71 => v72}/github/interactions_orgs.go | 0 .../{v71 => v72}/github/interactions_repos.go | 0 .../{v57 => v72}/github/issue_import.go | 0 .../go-github/{v71 => v72}/github/issues.go | 9 + .../{v57 => v72}/github/issues_assignees.go | 0 .../{v57 => v72}/github/issues_comments.go | 0 .../{v71 => v72}/github/issues_events.go | 0 .../{v71 => v72}/github/issues_labels.go | 0 .../{v57 => v72}/github/issues_milestones.go | 0 .../{v71 => v72}/github/issues_timeline.go | 0 .../go-github/{v57 => v72}/github/licenses.go | 0 .../go-github/{v71 => v72}/github/markdown.go | 0 .../go-github/{v71 => v72}/github/messages.go | 1 + .../go-github/{v71 => v72}/github/meta.go | 0 .../{v71 => v72}/github/migrations.go | 0 .../github/migrations_source_import.go | 0 .../{v71 => v72}/github/migrations_user.go | 0 .../go-github/{v71 => v72}/github/orgs.go | 0 .../github/orgs_actions_allowed.go | 0 .../github/orgs_actions_permissions.go | 0 .../{v71 => v72}/github/orgs_attestations.go | 0 .../{v71 => v72}/github/orgs_audit_log.go | 0 .../orgs_codesecurity_configurations.go | 0 .../github/orgs_credential_authorizations.go | 0 .../github/orgs_custom_repository_roles.go | 0 .../{v57 => v72}/github/orgs_hooks.go | 0 .../github/orgs_hooks_configuration.go | 0 .../github/orgs_hooks_deliveries.go | 0 .../{v71 => v72}/github/orgs_issue_types.go | 0 .../{v71 => v72}/github/orgs_members.go | 0 .../github/orgs_network_configurations.go | 0 .../github/orgs_organization_roles.go | 0 .../github/orgs_outside_collaborators.go | 0 .../{v71 => v72}/github/orgs_packages.go | 0 .../github/orgs_personal_access_tokens.go | 0 .../{v71 => v72}/github/orgs_properties.go | 0 .../{v71 => v72}/github/orgs_rules.go | 7 +- .../github/orgs_security_managers.go | 0 .../github/orgs_users_blocking.go | 0 .../go-github/{v71 => v72}/github/packages.go | 0 .../go-github/{v71 => v72}/github/pulls.go | 0 .../{v57 => v72}/github/pulls_comments.go | 0 .../{v71 => v72}/github/pulls_reviewers.go | 0 .../{v57 => v72}/github/pulls_reviews.go | 0 .../{v57 => v72}/github/pulls_threads.go | 0 .../{v71 => v72}/github/rate_limit.go | 0 .../{v71 => v72}/github/reactions.go | 0 .../go-github/{v71 => v72}/github/repos.go | 0 .../github/repos_actions_access.go | 0 .../github/repos_actions_allowed.go | 0 .../github/repos_actions_permissions.go | 0 .../{v71 => v72}/github/repos_attestations.go | 0 .../{v71 => v72}/github/repos_autolinks.go | 0 .../{v57 => v72}/github/repos_codeowners.go | 0 .../github/repos_collaborators.go | 0 .../{v57 => v72}/github/repos_comments.go | 0 .../{v57 => v72}/github/repos_commits.go | 0 .../github/repos_community_health.go | 0 .../{v71 => v72}/github/repos_contents.go | 0 .../repos_deployment_branch_policies.go | 0 .../repos_deployment_protection_rules.go | 0 .../{v71 => v72}/github/repos_deployments.go | 0 .../{v71 => v72}/github/repos_environments.go | 0 .../{v57 => v72}/github/repos_forks.go | 0 .../{v71 => v72}/github/repos_hooks.go | 0 .../github/repos_hooks_configuration.go | 0 .../github/repos_hooks_deliveries.go | 21 + .../{v71 => v72}/github/repos_invitations.go | 0 .../{v57 => v72}/github/repos_keys.go | 0 .../{v57 => v72}/github/repos_lfs.go | 0 .../{v57 => v72}/github/repos_merging.go | 0 .../{v71 => v72}/github/repos_pages.go | 0 .../github/repos_prereceive_hooks.go | 0 .../{v71 => v72}/github/repos_properties.go | 0 .../{v71 => v72}/github/repos_releases.go | 0 .../{v71 => v72}/github/repos_rules.go | 27 +- .../{v71 => v72}/github/repos_stats.go | 0 .../{v57 => v72}/github/repos_statuses.go | 0 .../{v71 => v72}/github/repos_tags.go | 0 .../{v57 => v72}/github/repos_traffic.go | 0 .../go-github/{v71 => v72}/github/rules.go | 48 +- .../go-github/{v71 => v72}/github/scim.go | 0 .../go-github/{v71 => v72}/github/search.go | 0 .../{v71 => v72}/github/secret_scanning.go | 50 +- .../github/security_advisories.go | 0 .../go-github/{v71 => v72}/github/strings.go | 0 .../go-github/{v71 => v72}/github/teams.go | 0 .../github/teams_discussion_comments.go | 0 .../{v71 => v72}/github/teams_discussions.go | 0 .../{v57 => v72}/github/teams_members.go | 0 .../{v71 => v72}/github/timestamp.go | 0 .../go-github/{v71 => v72}/github/users.go | 0 .../github/users_administration.go | 0 .../{v71 => v72}/github/users_attestations.go | 0 .../{v57 => v72}/github/users_blocking.go | 0 .../{v71 => v72}/github/users_emails.go | 0 .../{v57 => v72}/github/users_followers.go | 0 .../{v71 => v72}/github/users_gpg_keys.go | 0 .../{v57 => v72}/github/users_keys.go | 0 .../{v71 => v72}/github/users_packages.go | 0 .../github/users_ssh_signing_keys.go | 0 .../{v71 => v72}/github/with_appengine.go | 0 .../{v71 => v72}/github/without_appengine.go | 0 .../bson/bsonrw/extjson_writer.go | 7 +- vendor/golang.org/x/crypto/bcrypt/bcrypt.go | 2 +- vendor/golang.org/x/sync/errgroup/errgroup.go | 9 +- vendor/gorm.io/driver/mysql/migrator.go | 2 +- vendor/gorm.io/driver/mysql/mysql.go | 4 +- vendor/gorm.io/driver/sqlite/ddlmod.go | 10 +- vendor/gorm.io/driver/sqlite/sqlite.go | 4 +- vendor/gorm.io/gorm/callbacks/create.go | 10 + vendor/gorm.io/gorm/callbacks/delete.go | 10 + vendor/gorm.io/gorm/callbacks/preload.go | 16 +- vendor/gorm.io/gorm/callbacks/query.go | 49 +- vendor/gorm.io/gorm/callbacks/raw.go | 5 + vendor/gorm.io/gorm/callbacks/update.go | 9 + vendor/gorm.io/gorm/chainable_api.go | 7 +- vendor/gorm.io/gorm/clause/joins.go | 32 + vendor/gorm.io/gorm/finisher_api.go | 12 +- vendor/gorm.io/gorm/generics.go | 605 + vendor/gorm.io/gorm/gorm.go | 14 +- vendor/gorm.io/gorm/scan.go | 9 + vendor/gorm.io/gorm/schema/field.go | 5 +- vendor/gorm.io/gorm/schema/index.go | 2 +- vendor/gorm.io/gorm/schema/relationship.go | 8 +- vendor/gorm.io/gorm/statement.go | 57 +- vendor/modules.txt | 35 +- 390 files changed, 1318 insertions(+), 61176 deletions(-) delete mode 100644 vendor/github.com/google/go-github/v57/AUTHORS delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_artifacts.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_cache.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_required_workflows.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_runner_groups.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_runners.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_secrets.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_variables.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/activity_notifications.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin_orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin_stats.go delete mode 100644 vendor/github.com/google/go-github/v57/github/admin_users.go delete mode 100644 vendor/github.com/google/go-github/v57/github/apps.go delete mode 100644 vendor/github.com/google/go-github/v57/github/authorizations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/billing.go delete mode 100644 vendor/github.com/google/go-github/v57/github/checks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/code-scanning.go delete mode 100644 vendor/github.com/google/go-github/v57/github/codesofconduct.go delete mode 100644 vendor/github.com/google/go-github/v57/github/dependabot_alerts.go delete mode 100644 vendor/github.com/google/go-github/v57/github/doc.go delete mode 100644 vendor/github.com/google/go-github/v57/github/emojis.go delete mode 100644 vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go delete mode 100644 vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go delete mode 100644 vendor/github.com/google/go-github/v57/github/event_types.go delete mode 100644 vendor/github.com/google/go-github/v57/github/gists.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git_commits.go delete mode 100644 vendor/github.com/google/go-github/v57/github/git_refs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/github-accessors.go delete mode 100644 vendor/github.com/google/go-github/v57/github/github.go delete mode 100644 vendor/github.com/google/go-github/v57/github/interactions_orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/interactions_repos.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_events.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_labels.go delete mode 100644 vendor/github.com/google/go-github/v57/github/issues_timeline.go delete mode 100644 vendor/github.com/google/go-github/v57/github/markdown.go delete mode 100644 vendor/github.com/google/go-github/v57/github/messages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/meta.go delete mode 100644 vendor/github.com/google/go-github/v57/github/migrations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/migrations_source_import.go delete mode 100644 vendor/github.com/google/go-github/v57/github/migrations_user.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_audit_log.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_members.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_packages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_properties.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_rules.go delete mode 100644 vendor/github.com/google/go-github/v57/github/orgs_security_managers.go delete mode 100644 vendor/github.com/google/go-github/v57/github/packages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/pulls.go delete mode 100644 vendor/github.com/google/go-github/v57/github/pulls_reviewers.go delete mode 100644 vendor/github.com/google/go-github/v57/github/rate_limit.go delete mode 100644 vendor/github.com/google/go-github/v57/github/reactions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_autolinks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_collaborators.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_contents.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_deployments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_environments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_hooks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_invitations.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_pages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_releases.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_rules.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_stats.go delete mode 100644 vendor/github.com/google/go-github/v57/github/repos_tags.go delete mode 100644 vendor/github.com/google/go-github/v57/github/scim.go delete mode 100644 vendor/github.com/google/go-github/v57/github/search.go delete mode 100644 vendor/github.com/google/go-github/v57/github/secret_scanning.go delete mode 100644 vendor/github.com/google/go-github/v57/github/security_advisories.go delete mode 100644 vendor/github.com/google/go-github/v57/github/strings.go delete mode 100644 vendor/github.com/google/go-github/v57/github/teams.go delete mode 100644 vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go delete mode 100644 vendor/github.com/google/go-github/v57/github/teams_discussions.go delete mode 100644 vendor/github.com/google/go-github/v57/github/timestamp.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_administration.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_emails.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_gpg_keys.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_packages.go delete mode 100644 vendor/github.com/google/go-github/v57/github/users_projects.go delete mode 100644 vendor/github.com/google/go-github/v57/github/with_appengine.go delete mode 100644 vendor/github.com/google/go-github/v57/github/without_appengine.go delete mode 100644 vendor/github.com/google/go-github/v71/LICENSE delete mode 100644 vendor/github.com/google/go-github/v71/github/actions.go delete mode 100644 vendor/github.com/google/go-github/v71/github/actions_oidc.go delete mode 100644 vendor/github.com/google/go-github/v71/github/actions_required_workflows.go delete mode 100644 vendor/github.com/google/go-github/v71/github/actions_workflows.go delete mode 100644 vendor/github.com/google/go-github/v71/github/activity.go delete mode 100644 vendor/github.com/google/go-github/v71/github/activity_events.go delete mode 100644 vendor/github.com/google/go-github/v71/github/activity_star.go delete mode 100644 vendor/github.com/google/go-github/v71/github/activity_watching.go delete mode 100644 vendor/github.com/google/go-github/v71/github/apps_hooks.go delete mode 100644 vendor/github.com/google/go-github/v71/github/apps_hooks_deliveries.go delete mode 100644 vendor/github.com/google/go-github/v71/github/apps_installation.go delete mode 100644 vendor/github.com/google/go-github/v71/github/apps_manifest.go delete mode 100644 vendor/github.com/google/go-github/v71/github/apps_marketplace.go delete mode 100644 vendor/github.com/google/go-github/v71/github/codespaces.go delete mode 100644 vendor/github.com/google/go-github/v71/github/codespaces_secrets.go delete mode 100644 vendor/github.com/google/go-github/v71/github/dependabot.go delete mode 100644 vendor/github.com/google/go-github/v71/github/dependabot_secrets.go delete mode 100644 vendor/github.com/google/go-github/v71/github/dependency_graph.go delete mode 100644 vendor/github.com/google/go-github/v71/github/enterprise.go delete mode 100644 vendor/github.com/google/go-github/v71/github/enterprise_actions_runner_groups.go delete mode 100644 vendor/github.com/google/go-github/v71/github/enterprise_audit_log.go delete mode 100644 vendor/github.com/google/go-github/v71/github/event.go delete mode 100644 vendor/github.com/google/go-github/v71/github/gists_comments.go delete mode 100644 vendor/github.com/google/go-github/v71/github/git.go delete mode 100644 vendor/github.com/google/go-github/v71/github/git_blobs.go delete mode 100644 vendor/github.com/google/go-github/v71/github/git_tags.go delete mode 100644 vendor/github.com/google/go-github/v71/github/git_trees.go delete mode 100644 vendor/github.com/google/go-github/v71/github/gitignore.go delete mode 100644 vendor/github.com/google/go-github/v71/github/interactions.go delete mode 100644 vendor/github.com/google/go-github/v71/github/issue_import.go delete mode 100644 vendor/github.com/google/go-github/v71/github/issues_assignees.go delete mode 100644 vendor/github.com/google/go-github/v71/github/issues_comments.go delete mode 100644 vendor/github.com/google/go-github/v71/github/issues_milestones.go delete mode 100644 vendor/github.com/google/go-github/v71/github/licenses.go delete mode 100644 vendor/github.com/google/go-github/v71/github/orgs_actions_allowed.go delete mode 100644 vendor/github.com/google/go-github/v71/github/orgs_actions_permissions.go delete mode 100644 vendor/github.com/google/go-github/v71/github/orgs_hooks.go delete mode 100644 vendor/github.com/google/go-github/v71/github/orgs_hooks_configuration.go delete mode 100644 vendor/github.com/google/go-github/v71/github/orgs_hooks_deliveries.go delete mode 100644 vendor/github.com/google/go-github/v71/github/orgs_outside_collaborators.go delete mode 100644 vendor/github.com/google/go-github/v71/github/orgs_users_blocking.go delete mode 100644 vendor/github.com/google/go-github/v71/github/pulls_comments.go delete mode 100644 vendor/github.com/google/go-github/v71/github/pulls_reviews.go delete mode 100644 vendor/github.com/google/go-github/v71/github/pulls_threads.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_actions_access.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_actions_allowed.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_codeowners.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_comments.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_commits.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_community_health.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_deployment_branch_policies.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_forks.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_keys.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_lfs.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_merging.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_statuses.go delete mode 100644 vendor/github.com/google/go-github/v71/github/repos_traffic.go delete mode 100644 vendor/github.com/google/go-github/v71/github/teams_members.go delete mode 100644 vendor/github.com/google/go-github/v71/github/users_blocking.go delete mode 100644 vendor/github.com/google/go-github/v71/github/users_followers.go delete mode 100644 vendor/github.com/google/go-github/v71/github/users_keys.go delete mode 100644 vendor/github.com/google/go-github/v71/github/users_ssh_signing_keys.go rename vendor/github.com/google/go-github/{v71 => v72}/AUTHORS (100%) rename vendor/github.com/google/go-github/{v57 => v72}/LICENSE (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/actions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_artifacts.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_cache.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_hosted_runners.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/actions_oidc.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_permissions_enterprise.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_permissions_orgs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_runner_groups.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_runners.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_secrets.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_variables.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_workflow_jobs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/actions_workflow_runs.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/actions_workflows.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/activity.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/activity_events.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/activity_notifications.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/activity_star.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/activity_watching.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin_orgs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin_stats.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/admin_users.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/apps.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/apps_hooks.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/apps_hooks_deliveries.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/apps_installation.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/apps_manifest.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/apps_marketplace.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/attestations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/authorizations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/billing.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/checks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/code_scanning.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/codesofconduct.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/codespaces.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/codespaces_secrets.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/copilot.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/dependabot.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/dependabot_alerts.go (94%) rename vendor/github.com/google/go-github/{v57 => v72}/github/dependabot_secrets.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/dependency_graph.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/dependency_graph_snapshots.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/doc.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/emojis.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/enterprise.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_actions_hosted_runners.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/enterprise_actions_runner_groups.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_actions_runners.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/enterprise_audit_log.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_code_security_and_analysis.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes_config.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes_maintenance.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_manage_ghes_ssh.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_network_configurations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_properties.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/enterprise_rules.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/event.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/event_types.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/gists.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/gists_comments.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/git.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/git_blobs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git_commits.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/git_refs.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/git_tags.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/git_trees.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/github-accessors.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/github.go (99%) rename vendor/github.com/google/go-github/{v57 => v72}/github/gitignore.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/interactions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/interactions_orgs.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/interactions_repos.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/issue_import.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues.go (97%) rename vendor/github.com/google/go-github/{v57 => v72}/github/issues_assignees.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/issues_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_events.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_labels.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/issues_milestones.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/issues_timeline.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/licenses.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/markdown.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/messages.go (99%) rename vendor/github.com/google/go-github/{v71 => v72}/github/meta.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/migrations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/migrations_source_import.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/migrations_user.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/orgs_actions_allowed.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/orgs_actions_permissions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_attestations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_audit_log.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_codesecurity_configurations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_credential_authorizations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_custom_repository_roles.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/orgs_hooks.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/orgs_hooks_configuration.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/orgs_hooks_deliveries.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_issue_types.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_members.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_network_configurations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_organization_roles.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/orgs_outside_collaborators.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_packages.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_personal_access_tokens.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_properties.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_rules.go (96%) rename vendor/github.com/google/go-github/{v71 => v72}/github/orgs_security_managers.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/orgs_users_blocking.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/packages.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/pulls.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/pulls_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/pulls_reviewers.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/pulls_reviews.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/pulls_threads.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/rate_limit.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/reactions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_actions_access.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_actions_allowed.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_actions_permissions.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_attestations.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_autolinks.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_codeowners.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_collaborators.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_comments.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_commits.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_community_health.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_contents.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_deployment_branch_policies.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_deployment_protection_rules.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_deployments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_environments.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_forks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_hooks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_hooks_configuration.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_hooks_deliveries.go (89%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_invitations.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_keys.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_lfs.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_merging.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_pages.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_prereceive_hooks.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_properties.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_releases.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_rules.go (88%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_stats.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_statuses.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/repos_tags.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/repos_traffic.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/rules.go (95%) rename vendor/github.com/google/go-github/{v71 => v72}/github/scim.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/search.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/secret_scanning.go (78%) rename vendor/github.com/google/go-github/{v71 => v72}/github/security_advisories.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/strings.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/teams.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/teams_discussion_comments.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/teams_discussions.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/teams_members.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/timestamp.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_administration.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_attestations.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/users_blocking.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_emails.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/users_followers.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_gpg_keys.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/users_keys.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/users_packages.go (100%) rename vendor/github.com/google/go-github/{v57 => v72}/github/users_ssh_signing_keys.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/with_appengine.go (100%) rename vendor/github.com/google/go-github/{v71 => v72}/github/without_appengine.go (100%) create mode 100644 vendor/gorm.io/gorm/generics.go diff --git a/go.mod b/go.mod index 30815255..b572643f 100644 --- a/go.mod +++ b/go.mod @@ -6,15 +6,15 @@ toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 - github.com/bradleyfalzon/ghinstallation/v2 v2.15.0 - github.com/cloudbase/garm-provider-common v0.1.4 + github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 + github.com/cloudbase/garm-provider-common v0.1.5 github.com/felixge/httpsnoop v1.0.4 github.com/go-openapi/errors v0.22.1 github.com/go-openapi/runtime v0.28.0 github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 github.com/golang-jwt/jwt/v5 v5.2.2 - github.com/google/go-github/v57 v57.0.0 + github.com/google/go-github/v72 v72.0.0 github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.2 github.com/gorilla/mux v1.8.1 @@ -28,15 +28,15 @@ require ( github.com/prometheus/client_golang v1.22.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 - golang.org/x/crypto v0.38.0 + golang.org/x/crypto v0.39.0 golang.org/x/oauth2 v0.30.0 - golang.org/x/sync v0.14.0 + golang.org/x/sync v0.15.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gorm.io/datatypes v1.2.5 - gorm.io/driver/mysql v1.5.7 - gorm.io/driver/sqlite v1.5.7 - gorm.io/gorm v1.26.1 + gorm.io/driver/mysql v1.6.0 + gorm.io/driver/sqlite v1.6.0 + gorm.io/gorm v1.30.0 ) require ( @@ -46,7 +46,7 @@ require ( github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/chzyer/readline v1.5.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/analysis v0.23.0 // indirect github.com/go-openapi/jsonpointer v0.21.1 // indirect @@ -54,9 +54,8 @@ require ( github.com/go-openapi/loads v0.22.0 // indirect github.com/go-openapi/spec v0.21.0 // indirect github.com/go-openapi/validate v0.24.0 // indirect - github.com/go-sql-driver/mysql v1.9.2 // indirect + github.com/go-sql-driver/mysql v1.9.3 // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect - github.com/google/go-github/v71 v71.0.0 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect @@ -82,14 +81,14 @@ require ( github.com/spf13/pflag v1.0.6 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 // indirect - go.mongodb.org/mongo-driver v1.17.3 // indirect + go.mongodb.org/mongo-driver v1.17.4 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/otel v1.36.0 // indirect go.opentelemetry.io/otel/metric v1.36.0 // indirect go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/net v0.40.0 // indirect + golang.org/x/net v0.41.0 // indirect golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.25.0 // indirect + golang.org/x/text v0.26.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 2f887203..e557709f 100644 --- a/go.sum +++ b/go.sum @@ -6,8 +6,8 @@ github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3d github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bradleyfalzon/ghinstallation/v2 v2.15.0 h1:7r2rPUM04rgszMP0U1UZ1M5VoVVIlsaBSnpABfYxcQY= -github.com/bradleyfalzon/ghinstallation/v2 v2.15.0/go.mod h1:PoH9Vhy82OeRFZfxsVrk3mfQhVkEzou9OOwPOsEhiXE= +github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 h1:B91r9bHtXp/+XRgS5aZm6ZzTdz3ahgJYmkt4xZkgDz8= +github.com/bradleyfalzon/ghinstallation/v2 v2.16.0/go.mod h1:OeVe5ggFzoBnmgitZe/A+BqGOnv1DvU/0uiLQi1wutM= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= @@ -19,8 +19,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= -github.com/cloudbase/garm-provider-common v0.1.4 h1:spRjl0PV4r8vKaCTNp6xBQbRKfls/cmbBEl/i/eGWSo= -github.com/cloudbase/garm-provider-common v0.1.4/go.mod h1:sK26i2NpjjAjhanNKiWw8iPkqt+XeohTKpFnEP7JdZ4= +github.com/cloudbase/garm-provider-common v0.1.5 h1:aJL646l+VnZceQ2grbDYhWfxYpaQR2/QsUSD76kSZVs= +github.com/cloudbase/garm-provider-common v0.1.5/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -28,8 +28,8 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= @@ -52,9 +52,8 @@ github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZ github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0= github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= -github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= -github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU= -github.com/go-sql-driver/mysql v1.9.2/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= +github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo= +github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= @@ -66,10 +65,8 @@ github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EO github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs= -github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw= -github.com/google/go-github/v71 v71.0.0 h1:Zi16OymGKZZMm8ZliffVVJ/Q9YZreDKONCr+WUd0Z30= -github.com/google/go-github/v71 v71.0.0/go.mod h1:URZXObp2BLlMjwu0O8g4y6VBneUj2bCHgnI8FfgZ51M= +github.com/google/go-github/v72 v72.0.0 h1:FcIO37BLoVPBO9igQQ6tStsv2asG4IPcYFi655PPvBM= +github.com/google/go-github/v72 v72.0.0/go.mod h1:WWtw8GMRiL62mvIquf1kO3onRHeWWKmK01qdCY8c5fg= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -178,8 +175,8 @@ github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOf github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 h1:xzABM9let0HLLqFypcxvLmlvEciCHL7+Lv+4vwZqecI= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569/go.mod h1:2Ly+NIftZN4de9zRmENdYbvPQeaVIYKWpLFStLFEBgI= -go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ= -go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= +go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= @@ -190,21 +187,21 @@ go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucg go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= -golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= -golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= -golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= -golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= -golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= -golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= -golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= @@ -222,14 +219,13 @@ gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/datatypes v1.2.5 h1:9UogU3jkydFVW1bIVVeoYsTpLRgwDVW3rHfJG6/Ek9I= gorm.io/datatypes v1.2.5/go.mod h1:I5FUdlKpLb5PMqeMQhm30CQ6jXP8Rj89xkTeCSAaAD4= -gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo= -gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM= +gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= +gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= gorm.io/driver/postgres v1.5.0 h1:u2FXTy14l45qc3UeCJ7QaAXZmZfDDv0YrthvmRq1l0U= gorm.io/driver/postgres v1.5.0/go.mod h1:FUZXzO+5Uqg5zzwzv4KK49R8lvGIyscBOqYrtI1Ce9A= -gorm.io/driver/sqlite v1.5.7 h1:8NvsrhP0ifM7LX9G4zPB97NwovUakUxc+2V2uuf3Z1I= -gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4= +gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= +gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= -gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= -gorm.io/gorm v1.26.1 h1:ghB2gUI9FkS46luZtn6DLZ0f6ooBJ5IbVej2ENFDjRw= -gorm.io/gorm v1.26.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= +gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= +gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/params/params.go b/params/params.go index 3a337dba..c44891bd 100644 --- a/params/params.go +++ b/params/params.go @@ -26,7 +26,7 @@ import ( "time" "github.com/bradleyfalzon/ghinstallation/v2" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "github.com/google/uuid" "golang.org/x/oauth2" diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index 3388a894..c867d32e 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" @@ -302,7 +302,7 @@ func (_m *GithubClient) ListEntityRunnerApplicationDownloads(ctx context.Context } // ListEntityRunners provides a mock function with given fields: ctx, opts -func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { +func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) if len(ret) == 0 { @@ -312,10 +312,10 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List var r0 *github.Runners var r1 *github.Response var r2 error - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) (*github.Runners, *github.Response, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)); ok { return rf(ctx, opts) } - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) *github.Runners); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) *github.Runners); ok { r0 = rf(ctx, opts) } else { if ret.Get(0) != nil { @@ -323,7 +323,7 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List } } - if rf, ok := ret.Get(1).(func(context.Context, *github.ListOptions) *github.Response); ok { + if rf, ok := ret.Get(1).(func(context.Context, *github.ListRunnersOptions) *github.Response); ok { r1 = rf(ctx, opts) } else { if ret.Get(1) != nil { @@ -331,7 +331,7 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List } } - if rf, ok := ret.Get(2).(func(context.Context, *github.ListOptions) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, *github.ListRunnersOptions) error); ok { r2 = rf(ctx, opts) } else { r2 = ret.Error(2) diff --git a/runner/common/mocks/GithubEnterpriseClient.go b/runner/common/mocks/GithubEnterpriseClient.go index 36b6517d..5606e340 100644 --- a/runner/common/mocks/GithubEnterpriseClient.go +++ b/runner/common/mocks/GithubEnterpriseClient.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index c04f9024..0015a485 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" params "github.com/cloudbase/garm/params" @@ -263,7 +263,7 @@ func (_m *GithubEntityOperations) ListEntityRunnerApplicationDownloads(ctx conte } // ListEntityRunners provides a mock function with given fields: ctx, opts -func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { +func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) if len(ret) == 0 { @@ -273,10 +273,10 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g var r0 *github.Runners var r1 *github.Response var r2 error - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) (*github.Runners, *github.Response, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)); ok { return rf(ctx, opts) } - if rf, ok := ret.Get(0).(func(context.Context, *github.ListOptions) *github.Runners); ok { + if rf, ok := ret.Get(0).(func(context.Context, *github.ListRunnersOptions) *github.Runners); ok { r0 = rf(ctx, opts) } else { if ret.Get(0) != nil { @@ -284,7 +284,7 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g } } - if rf, ok := ret.Get(1).(func(context.Context, *github.ListOptions) *github.Response); ok { + if rf, ok := ret.Get(1).(func(context.Context, *github.ListRunnersOptions) *github.Response); ok { r1 = rf(ctx, opts) } else { if ret.Get(1) != nil { @@ -292,7 +292,7 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g } } - if rf, ok := ret.Get(2).(func(context.Context, *github.ListOptions) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, *github.ListRunnersOptions) error); ok { r2 = rf(ctx, opts) } else { r2 = ret.Error(2) diff --git a/runner/common/mocks/OrganizationHooks.go b/runner/common/mocks/OrganizationHooks.go index 67d17a30..73528638 100644 --- a/runner/common/mocks/OrganizationHooks.go +++ b/runner/common/mocks/OrganizationHooks.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/mocks/RepositoryHooks.go b/runner/common/mocks/RepositoryHooks.go index 72b7831a..3f38915e 100644 --- a/runner/common/mocks/RepositoryHooks.go +++ b/runner/common/mocks/RepositoryHooks.go @@ -5,7 +5,7 @@ package mocks import ( context "context" - github "github.com/google/go-github/v57/github" + github "github.com/google/go-github/v72/github" mock "github.com/stretchr/testify/mock" ) diff --git a/runner/common/util.go b/runner/common/util.go index d2e6c16b..37e9b60c 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -3,7 +3,7 @@ package common import ( "context" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "github.com/cloudbase/garm/params" ) @@ -14,7 +14,7 @@ type GithubEntityOperations interface { CreateEntityHook(ctx context.Context, hook *github.Hook) (ret *github.Hook, err error) DeleteEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) PingEntityHook(ctx context.Context, id int64) (ret *github.Response, err error) - ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) + ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) RemoveEntityRunner(ctx context.Context, runnerID int64) (*github.Response, error) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) diff --git a/runner/pool/common.go b/runner/pool/common.go index fcf4f73f..5a15659f 100644 --- a/runner/pool/common.go +++ b/runner/pool/common.go @@ -6,7 +6,7 @@ import ( "net/url" "strings" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -21,16 +21,12 @@ func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, partialMatches := []string{} for _, hook := range allHooks { - hookURL, ok := hook.Config["url"].(string) - if !ok { - continue - } - hookURL = strings.ToLower(hookURL) + hookURL := strings.ToLower(hook.GetURL()) - if hook.Config["url"] == req.Config["url"] { + if hook.GetURL() == req.GetURL() { return runnerErrors.NewConflictError("hook already installed") } else if strings.Contains(hookURL, controllerID) || strings.Contains(hookURL, parsed.Hostname()) { - partialMatches = append(partialMatches, hook.Config["url"].(string)) + partialMatches = append(partialMatches, hook.GetURL()) } } @@ -42,23 +38,15 @@ func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, } func hookToParamsHookInfo(hook *github.Hook) params.HookInfo { - var hookURL string - url, ok := hook.Config["url"] - if ok { - hookURL = url.(string) - } - var insecureSSL bool - insecureSSLConfig, ok := hook.Config["insecure_ssl"] - if ok { - if insecureSSLConfig.(string) == "1" { - insecureSSL = true - } + config := hook.GetConfig() + if config != nil { + insecureSSL = config.GetInsecureSSL() == "1" } return params.HookInfo{ ID: *hook.ID, - URL: hookURL, + URL: hook.GetURL(), Events: hook.Events, Active: *hook.Active, InsecureSSL: insecureSSL, diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 7e2a6080..0b9f3675 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -27,7 +27,7 @@ import ( "sync" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "github.com/google/uuid" "github.com/pkg/errors" "golang.org/x/sync/errgroup" @@ -1931,12 +1931,12 @@ func (r *basePoolManager) InstallWebhook(ctx context.Context, param params.Insta insecureSSL = "1" } req := &github.Hook{ - Active: github.Bool(true), - Config: map[string]interface{}{ - "url": r.controllerInfo.ControllerWebhookURL, - "content_type": "json", - "insecure_ssl": insecureSSL, - "secret": r.WebhookSecret(), + Active: github.Ptr(true), + Config: &github.HookConfig{ + URL: github.Ptr(r.controllerInfo.ControllerWebhookURL), + ContentType: github.Ptr("json"), + InsecureSSL: github.Ptr(insecureSSL), + Secret: github.Ptr(r.WebhookSecret()), }, Events: []string{ "workflow_job", @@ -1998,8 +1998,10 @@ func (r *basePoolManager) FetchTools() ([]commonParams.RunnerApplicationDownload } func (r *basePoolManager) GetGithubRunners() ([]*github.Runner, error) { - opts := github.ListOptions{ - PerPage: 100, + opts := github.ListRunnersOptions{ + ListOptions: github.ListOptions{ + PerPage: 100, + }, } var allRunners []*github.Runner diff --git a/runner/pool/stub_client.go b/runner/pool/stub_client.go index df547501..eef4afde 100644 --- a/runner/pool/stub_client.go +++ b/runner/pool/stub_client.go @@ -3,7 +3,7 @@ package pool import ( "context" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "github.com/cloudbase/garm/params" ) @@ -32,7 +32,7 @@ func (s *stubGithubClient) PingEntityHook(_ context.Context, _ int64) (*github.R return nil, s.err } -func (s *stubGithubClient) ListEntityRunners(_ context.Context, _ *github.ListOptions) (*github.Runners, *github.Response, error) { +func (s *stubGithubClient) ListEntityRunners(_ context.Context, _ *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { return nil, nil, s.err } diff --git a/runner/pool/util.go b/runner/pool/util.go index e2308160..f78d0e32 100644 --- a/runner/pool/util.go +++ b/runner/pool/util.go @@ -6,7 +6,7 @@ import ( "sync" "sync/atomic" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" diff --git a/test/integration/gh_cleanup/main.go b/test/integration/gh_cleanup/main.go index 0095dba8..5d70705a 100644 --- a/test/integration/gh_cleanup/main.go +++ b/test/integration/gh_cleanup/main.go @@ -6,7 +6,7 @@ import ( "log/slog" "os" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "golang.org/x/oauth2" ) @@ -141,8 +141,8 @@ func getGhOrgWebhook(url, ghToken, orgName string) (*github.Hook, error) { } for _, hook := range ghOrgHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.GetURL() + if hookURL == url { return hook, nil } } @@ -158,8 +158,8 @@ func getGhRepoWebhook(url, ghToken, orgName, repoName string) (*github.Hook, err } for _, hook := range ghRepoHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.GetURL() + if hookURL == url { return hook, nil } } diff --git a/test/integration/jobs_test.go b/test/integration/jobs_test.go index e9483e17..002ebeda 100644 --- a/test/integration/jobs_test.go +++ b/test/integration/jobs_test.go @@ -8,7 +8,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/params" diff --git a/test/integration/organizations_test.go b/test/integration/organizations_test.go index 5089725b..ed8f7f02 100644 --- a/test/integration/organizations_test.go +++ b/test/integration/organizations_test.go @@ -8,7 +8,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/params" @@ -99,8 +99,8 @@ func getGhOrgWebhook(url, ghToken, orgName string) (*github.Hook, error) { } for _, hook := range ghOrgHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.GetURL() + if hookURL == url { return hook, nil } } diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index 0c516a74..36887664 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -8,7 +8,7 @@ import ( "fmt" "time" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "golang.org/x/oauth2" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -109,8 +109,8 @@ func getGhRepoWebhook(url, ghToken, orgName, repoName string) (*github.Hook, err } for _, hook := range ghRepoHooks { - hookURL, ok := hook.Config["url"].(string) - if ok && hookURL == url { + hookURL := hook.GetURL() + if hookURL == url { return hook, nil } } diff --git a/util/util.go b/util/util.go index eb390743..b6d8ac6d 100644 --- a/util/util.go +++ b/util/util.go @@ -22,7 +22,7 @@ import ( "log/slog" "net/http" - "github.com/google/go-github/v57/github" + "github.com/google/go-github/v72/github" "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -160,7 +160,7 @@ func (g *githubClient) PingEntityHook(ctx context.Context, id int64) (ret *githu return ret, err } -func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListOptions) (*github.Runners, *github.Response, error) { +func (g *githubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { var ret *github.Runners var response *github.Response var err error @@ -383,7 +383,7 @@ func (g *githubClient) GetEntityJITConfig(ctx context.Context, instance string, Labels: labels, // nolint:golangci-lint,godox // TODO(gabriel-samfira): Should we make this configurable? - WorkFolder: github.String("_work"), + WorkFolder: github.Ptr("_work"), } metrics.GithubOperationCount.WithLabelValues( diff --git a/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go b/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go index 39be5917..7794dd9b 100644 --- a/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go +++ b/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go @@ -13,7 +13,7 @@ import ( "sync" "time" - "github.com/google/go-github/v71/github" + "github.com/google/go-github/v72/github" ) const ( diff --git a/vendor/github.com/cloudbase/garm-provider-common/params/params.go b/vendor/github.com/cloudbase/garm-provider-common/params/params.go index 95a6e6bb..0a63f709 100644 --- a/vendor/github.com/cloudbase/garm-provider-common/params/params.go +++ b/vendor/github.com/cloudbase/garm-provider-common/params/params.go @@ -45,6 +45,7 @@ const ( InstancePendingDelete InstanceStatus = "pending_delete" InstancePendingForceDelete InstanceStatus = "pending_force_delete" InstanceDeleting InstanceStatus = "deleting" + InstanceDeleted InstanceStatus = "deleted" InstancePendingCreate InstanceStatus = "pending_create" InstanceCreating InstanceStatus = "creating" InstanceStatusUnknown InstanceStatus = "unknown" diff --git a/vendor/github.com/go-logr/logr/.golangci.yaml b/vendor/github.com/go-logr/logr/.golangci.yaml index 0cffafa7..0ed62c1a 100644 --- a/vendor/github.com/go-logr/logr/.golangci.yaml +++ b/vendor/github.com/go-logr/logr/.golangci.yaml @@ -1,26 +1,28 @@ +version: "2" + run: timeout: 1m tests: true linters: - disable-all: true - enable: + default: none + enable: # please keep this alphabetized + - asasalint - asciicheck + - copyloopvar + - dupl - errcheck - forcetypeassert + - goconst - gocritic - - gofmt - - goimports - - gosimple - govet - ineffassign - misspell + - musttag - revive - staticcheck - - typecheck - unused issues: - exclude-use-default: false max-issues-per-linter: 0 max-same-issues: 10 diff --git a/vendor/github.com/go-logr/logr/funcr/funcr.go b/vendor/github.com/go-logr/logr/funcr/funcr.go index 30568e76..b22c57d7 100644 --- a/vendor/github.com/go-logr/logr/funcr/funcr.go +++ b/vendor/github.com/go-logr/logr/funcr/funcr.go @@ -77,7 +77,7 @@ func newSink(fn func(prefix, args string), formatter Formatter) logr.LogSink { write: fn, } // For skipping fnlogger.Info and fnlogger.Error. - l.Formatter.AddCallDepth(1) + l.AddCallDepth(1) // via Formatter return l } @@ -164,17 +164,17 @@ type fnlogger struct { } func (l fnlogger) WithName(name string) logr.LogSink { - l.Formatter.AddName(name) + l.AddName(name) // via Formatter return &l } func (l fnlogger) WithValues(kvList ...any) logr.LogSink { - l.Formatter.AddValues(kvList) + l.AddValues(kvList) // via Formatter return &l } func (l fnlogger) WithCallDepth(depth int) logr.LogSink { - l.Formatter.AddCallDepth(depth) + l.AddCallDepth(depth) // via Formatter return &l } diff --git a/vendor/github.com/go-sql-driver/mysql/AUTHORS b/vendor/github.com/go-sql-driver/mysql/AUTHORS index 510b869b..ec346e20 100644 --- a/vendor/github.com/go-sql-driver/mysql/AUTHORS +++ b/vendor/github.com/go-sql-driver/mysql/AUTHORS @@ -25,6 +25,7 @@ Asta Xie B Lamarche Bes Dollma Bogdan Constantinescu +Brad Higgins Brian Hendriks Bulat Gaifullin Caine Jette @@ -37,6 +38,7 @@ Daniel Montoya Daniel Nichter Daniël van Eeden Dave Protasowski +Diego Dupin Dirkjan Bussink DisposaBoy Egor Smolyakov @@ -133,6 +135,7 @@ Ziheng Lyu Barracuda Networks, Inc. Counting Ltd. +Defined Networking Inc. DigitalOcean Inc. Dolthub Inc. dyves labs AG diff --git a/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md b/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md index 66189eda..75674b60 100644 --- a/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md +++ b/vendor/github.com/go-sql-driver/mysql/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## v1.9.3 (2025-06-13) + +* `tx.Commit()` and `tx.Rollback()` returned `ErrInvalidConn` always. + Now they return cached real error if present. (#1690) + +* Optimize reading small resultsets to fix performance regression + introduced by compression protocol support. (#1707) + +* Fix `db.Ping()` on compressed connection. (#1723) + + ## v1.9.2 (2025-04-07) v1.9.2 is a re-release of v1.9.1 due to a release process issue; no changes were made to the content. diff --git a/vendor/github.com/go-sql-driver/mysql/buffer.go b/vendor/github.com/go-sql-driver/mysql/buffer.go index a6532431..f895e87b 100644 --- a/vendor/github.com/go-sql-driver/mysql/buffer.go +++ b/vendor/github.com/go-sql-driver/mysql/buffer.go @@ -42,6 +42,11 @@ func (b *buffer) busy() bool { return len(b.buf) > 0 } +// len returns how many bytes in the read buffer. +func (b *buffer) len() int { + return len(b.buf) +} + // fill reads into the read buffer until at least _need_ bytes are in it. func (b *buffer) fill(need int, r readerFunc) error { // we'll move the contents of the current buffer to dest before filling it. @@ -86,17 +91,10 @@ func (b *buffer) fill(need int, r readerFunc) error { // returns next N bytes from buffer. // The returned slice is only guaranteed to be valid until the next read -func (b *buffer) readNext(need int, r readerFunc) ([]byte, error) { - if len(b.buf) < need { - // refill - if err := b.fill(need, r); err != nil { - return nil, err - } - } - - data := b.buf[:need] +func (b *buffer) readNext(need int) []byte { + data := b.buf[:need:need] b.buf = b.buf[need:] - return data, nil + return data } // takeBuffer returns a buffer with the requested size. diff --git a/vendor/github.com/go-sql-driver/mysql/compress.go b/vendor/github.com/go-sql-driver/mysql/compress.go index fa42772a..38bfa000 100644 --- a/vendor/github.com/go-sql-driver/mysql/compress.go +++ b/vendor/github.com/go-sql-driver/mysql/compress.go @@ -84,9 +84,9 @@ func (c *compIO) reset() { c.buff.Reset() } -func (c *compIO) readNext(need int, r readerFunc) ([]byte, error) { +func (c *compIO) readNext(need int) ([]byte, error) { for c.buff.Len() < need { - if err := c.readCompressedPacket(r); err != nil { + if err := c.readCompressedPacket(); err != nil { return nil, err } } @@ -94,8 +94,8 @@ func (c *compIO) readNext(need int, r readerFunc) ([]byte, error) { return data[:need:need], nil // prevent caller writes into c.buff } -func (c *compIO) readCompressedPacket(r readerFunc) error { - header, err := c.mc.buf.readNext(7, r) // size of compressed header +func (c *compIO) readCompressedPacket() error { + header, err := c.mc.readNext(7) if err != nil { return err } @@ -103,7 +103,7 @@ func (c *compIO) readCompressedPacket(r readerFunc) error { // compressed header structure comprLength := getUint24(header[0:3]) - compressionSequence := uint8(header[3]) + compressionSequence := header[3] uncompressedLength := getUint24(header[4:7]) if debug { fmt.Printf("uncompress cmplen=%v uncomplen=%v pkt_cmp_seq=%v expected_cmp_seq=%v\n", @@ -113,14 +113,13 @@ func (c *compIO) readCompressedPacket(r readerFunc) error { // Server may return error packet (e.g. 1153 Got a packet bigger than 'max_allowed_packet' bytes) // before receiving all packets from client. In this case, seqnr is younger than expected. // NOTE: Both of mariadbclient and mysqlclient do not check seqnr. Only server checks it. - if debug && compressionSequence != c.mc.sequence { + if debug && compressionSequence != c.mc.compressSequence { fmt.Printf("WARN: unexpected cmpress seq nr: expected %v, got %v", - c.mc.sequence, compressionSequence) + c.mc.compressSequence, compressionSequence) } - c.mc.sequence = compressionSequence + 1 - c.mc.compressSequence = c.mc.sequence + c.mc.compressSequence = compressionSequence + 1 - comprData, err := c.mc.buf.readNext(comprLength, r) + comprData, err := c.mc.readNext(comprLength) if err != nil { return err } @@ -200,7 +199,7 @@ func (c *compIO) writeCompressedPacket(data []byte, uncompressedLen int) (int, e comprLength := len(data) - 7 if debug { fmt.Printf( - "writeCompressedPacket: comprLength=%v, uncompressedLen=%v, seq=%v", + "writeCompressedPacket: comprLength=%v, uncompressedLen=%v, seq=%v\n", comprLength, uncompressedLen, mc.compressSequence) } diff --git a/vendor/github.com/go-sql-driver/mysql/packets.go b/vendor/github.com/go-sql-driver/mysql/packets.go index 4b836216..831fca6c 100644 --- a/vendor/github.com/go-sql-driver/mysql/packets.go +++ b/vendor/github.com/go-sql-driver/mysql/packets.go @@ -17,6 +17,7 @@ import ( "fmt" "io" "math" + "os" "strconv" "time" ) @@ -25,19 +26,30 @@ import ( // https://dev.mysql.com/doc/dev/mysql-server/latest/PAGE_PROTOCOL.html // https://mariadb.com/kb/en/clientserver-protocol/ +// read n bytes from mc.buf +func (mc *mysqlConn) readNext(n int) ([]byte, error) { + if mc.buf.len() < n { + err := mc.buf.fill(n, mc.readWithTimeout) + if err != nil { + return nil, err + } + } + return mc.buf.readNext(n), nil +} + // Read packet to buffer 'data' func (mc *mysqlConn) readPacket() ([]byte, error) { var prevData []byte invalidSequence := false - readNext := mc.buf.readNext + readNext := mc.readNext if mc.compress { readNext = mc.compIO.readNext } for { // read packet header - data, err := readNext(4, mc.readWithTimeout) + data, err := readNext(4) if err != nil { mc.close() if cerr := mc.canceled.Value(); cerr != nil { @@ -51,17 +63,11 @@ func (mc *mysqlConn) readPacket() ([]byte, error) { pktLen := getUint24(data[:3]) seq := data[3] - if mc.compress { + // check packet sync [8 bit] + if seq != mc.sequence { + mc.log(fmt.Sprintf("[warn] unexpected sequence nr: expected %v, got %v", mc.sequence, seq)) // MySQL and MariaDB doesn't check packet nr in compressed packet. - if debug && seq != mc.compressSequence { - fmt.Printf("[debug] mismatched compression sequence nr: expected: %v, got %v", - mc.compressSequence, seq) - } - mc.compressSequence = seq + 1 - } else { - // check packet sync [8 bit] - if seq != mc.sequence { - mc.log(fmt.Sprintf("[warn] unexpected seq nr: expected %v, got %v", mc.sequence, seq)) + if !mc.compress { // For large packets, we stop reading as soon as sync error. if len(prevData) > 0 { mc.close() @@ -69,8 +75,8 @@ func (mc *mysqlConn) readPacket() ([]byte, error) { } invalidSequence = true } - mc.sequence++ } + mc.sequence = seq + 1 // packets with length 0 terminate a previous packet which is a // multiple of (2^24)-1 bytes long @@ -85,7 +91,7 @@ func (mc *mysqlConn) readPacket() ([]byte, error) { } // read packet body [pktLen bytes] - data, err = readNext(pktLen, mc.readWithTimeout) + data, err = readNext(pktLen) if err != nil { mc.close() if cerr := mc.canceled.Value(); cerr != nil { @@ -135,7 +141,7 @@ func (mc *mysqlConn) writePacket(data []byte) error { // Write packet if debug { - fmt.Printf("writePacket: size=%v seq=%v", size, mc.sequence) + fmt.Fprintf(os.Stderr, "writePacket: size=%v seq=%v\n", size, mc.sequence) } n, err := writeFunc(data[:4+size]) @@ -434,7 +440,9 @@ func (mc *mysqlConn) writeCommandPacket(command byte) error { data[4] = command // Send CMD packet - return mc.writePacket(data) + err = mc.writePacket(data) + mc.syncSequence() + return err } func (mc *mysqlConn) writeCommandPacketStr(command byte, arg string) error { @@ -475,7 +483,9 @@ func (mc *mysqlConn) writeCommandPacketUint32(command byte, arg uint32) error { binary.LittleEndian.PutUint32(data[5:], arg) // Send CMD packet - return mc.writePacket(data) + err = mc.writePacket(data) + mc.syncSequence() + return err } /****************************************************************************** @@ -945,7 +955,6 @@ func (stmt *mysqlStmt) writeCommandLongData(paramID int, arg []byte) error { pktLen = dataOffset + argLen } - stmt.mc.resetSequence() // Add command byte [1 byte] data[4] = comStmtSendLongData @@ -957,6 +966,8 @@ func (stmt *mysqlStmt) writeCommandLongData(paramID int, arg []byte) error { // Send CMD packet err := stmt.mc.writePacket(data[:4+pktLen]) + // Every COM_LONG_DATA packet reset Packet Sequence + stmt.mc.resetSequence() if err == nil { data = data[pktLen-dataOffset:] continue @@ -964,8 +975,6 @@ func (stmt *mysqlStmt) writeCommandLongData(paramID int, arg []byte) error { return err } - // Reset Packet Sequence - stmt.mc.resetSequence() return nil } diff --git a/vendor/github.com/go-sql-driver/mysql/transaction.go b/vendor/github.com/go-sql-driver/mysql/transaction.go index 4a4b6100..8c502f49 100644 --- a/vendor/github.com/go-sql-driver/mysql/transaction.go +++ b/vendor/github.com/go-sql-driver/mysql/transaction.go @@ -13,18 +13,32 @@ type mysqlTx struct { } func (tx *mysqlTx) Commit() (err error) { - if tx.mc == nil || tx.mc.closed.Load() { + if tx.mc == nil { return ErrInvalidConn } + if tx.mc.closed.Load() { + err = tx.mc.error() + if err == nil { + err = ErrInvalidConn + } + return + } err = tx.mc.exec("COMMIT") tx.mc = nil return } func (tx *mysqlTx) Rollback() (err error) { - if tx.mc == nil || tx.mc.closed.Load() { + if tx.mc == nil { return ErrInvalidConn } + if tx.mc.closed.Load() { + err = tx.mc.error() + if err == nil { + err = ErrInvalidConn + } + return + } err = tx.mc.exec("ROLLBACK") tx.mc = nil return diff --git a/vendor/github.com/google/go-github/v57/AUTHORS b/vendor/github.com/google/go-github/v57/AUTHORS deleted file mode 100644 index 74a21dc6..00000000 --- a/vendor/github.com/google/go-github/v57/AUTHORS +++ /dev/null @@ -1,487 +0,0 @@ -# This is the official list of go-github authors for copyright purposes. -# -# This does not necessarily list everyone who has contributed code, since in -# some cases, their employer may be the copyright holder. To see the full list -# of contributors, see the revision history in source control or -# https://github.com/google/go-github/graphs/contributors. -# -# Authors who wish to be recognized in this file should add themselves (or -# their employer, as appropriate). - -178inaba -2BFL -413x -6543 <6543@obermui.de> -Abed Kibbe -Abhinav Gupta -Abhishek Veeramalla -aboy -Adam Kohring -adrienzieba -afdesk -Ahmad Nurus S -Ahmed Hagy -Aidan -Aidan Steele -Ainsley Chong -ajz01 -Akeda Bagus -Akhil Mohan -Alec Thomas -Aleks Clark -Alex Bramley -Alex Ellis -Alex Orr -Alex Su -Alex Unger -Alexander Harkness -Alexis Gauthiez -Ali Farooq -Allan Guwatudde -Allen Sun -Amey Sakhadeo -Anders Janmyr -Andreas Garnæs -Andrew Ryabchun -Andrew Svoboda -Andy Grunwald -Andy Hume -Andy Lindeman -angie pinilla -anjanashenoy -Anshuman Bhartiya -Antoine -Antoine Pelisse -Anton Nguyen -Anubha Kushwaha -appilon -aprp -apurwaj2 -Aravind -Arda Kuyumcu -Arıl Bozoluk -Asier Marruedo -Austin Burdine -Austin Dizzy -Azuka Okuleye -Ben Batha -Benjamen Keroack -Berkay Tacyildiz -Beshr Kayali -Beyang Liu -Billy Keyes -Billy Lynch -Bingtan Lu -Bjorn Neergaard -Björn Häuser -Bo Huang -boljen -Bracken -Brad Harris -Brad Moylan -Bradley Falzon -Bradley McAllister -Brandon Butler -Brandon Cook -Brandon Stubbs -Brett Kuhlman -Brett Logan -Brian Egizi -Bryan Boreham -Bryan Peterson -Cami Diez -Carl Johnson -Carlos Alexandro Becker -Carlos Tadeu Panato Junior -ChandanChainani -chandresh-pancholi -Charles Fenwick Elliott -Charlie Yan -Chmouel Boudjnah -Chris King -Chris Mc -Chris Raborg -Chris Roche -Chris Schaefer -chrisforrette -Christian Bargmann -Christian Muehlhaeuser -Christoph Jerolimov -Christoph Sassenberg -CI Monk -Colin Misare -Craig Gumbley -Craig Peterson -Cristian Maglie -Cyb3r Jak3 -Daehyeok Mun -Dalton Hubble -Daniel Lanner -Daniel Leavitt -Daniel Nilsson -Daoq -Dave Du Cros -Dave Henderson -Dave Perrett -Dave Protasowski -David Deng -David Gamba -David J. M. Karlsen -David Jannotta -David Ji -David Lopez Reyes -Davide Zipeto -Dennis Webb -Derek Jobst -DeviousLab -Dhi Aurrahman -Diego Lapiduz -Diogo Vilela -Dmitri Shuralyov -dmnlk -Don Petersen -Doug Turner -Drew Fradette -Dustin Deus -Dustin Lish -Eivind -Eli Uriegas -Elliott Beach -Emerson Wood -Emil V -Eng Zer Jun -eperm -Erick Fejta -Erik Nobel -erwinvaneyk -Evan Anderson -Evan Elias -Fabian Holler -Fabrice -Fatema-Moaiyadi -Federico Di Pierro -Felix Geisendörfer -Filippo Valsorda -Florian Forster -Florian Wagner -Francesc Gil -Francis -Francisco Guimarães -François de Metz -Fredrik Jönsson -Gabriel -Gal Ofri -Garrett Squire -George Kontridze -Georgy Buranov -Glen Mailer -Gnahz -Google Inc. -Grachev Mikhail -griffin_stewie -guangwu -Guillaume Jacquet -Guz Alexander -Guðmundur Bjarni Ólafsson -Hanno Hecker -Hari haran -Harikesh00 -haya14busa -haya14busa -Hiroki Ito -Hubot Jr -Huy Tr -huydx -i2bskn -Iain Steers -Ikko Ashimine -Ilia Choly -Ioannis Georgoulas -Isao Jonas -ishan upadhyay -isqua -Jacob Valdemar -Jake Krammer -Jake White -Jameel Haffejee -James Bowes -James Cockbain -James Loh -James Maguire -James Turley -Jamie West -Jan Kosecki -Jan Švábík -Jason Field -Javier Campanini -Jef LeCompte -Jeff Wenzbauer -Jens Rantil -Jeremy Morris -Jesse Haka -Jesse Newland -Jihoon Chung -Jille Timmermans -Jimmi Dyson -Joan Saum -Joe Tsai -John Barton -John Engelman -John Jones -John Liu -Jordan Brockopp -Jordan Burandt -Jordan Sussman -Jorge Gómez Reus -Joshua Bezaleel Abednego -João Cerqueira -JP Phillips -jpbelanger-mtl -Juan -Juan Basso -Julien Garcia Gonzalez -Julien Rostand -Junya Kono -Justin Abrahms -Justin Toh -Jusung Lee -jzhoucliqr -k0ral -k1rnt -kadern0 -Karthik Sundari -Katrina Owen -Kautilya Tripathi -Keita Urashima -Kevin Burke -Kevin Wang -Kevin Zhao -kgalli -Kirill -Konrad Malawski -Kookheon Kwon -Krishna Indani -Krzysztof Kowalczyk -Kshitij Saraogi -Kumar Saurabh -Kyle Kurz -kyokomi -Lars Lehtonen -Laurent Verdoïa -leopoldwang -Liam Galvin -Lluis Campos -Lovro Mažgon -Loïs Postula -Luca Campese -Lucas Alcantara -Lucas Martin-King -Luis Davim -Luke Evers -Luke Hinds -Luke Kysow -Luke Roberts -Luke Young -lynn [they] -Magnus Kulke -Maksim Zhylinski -Marc Binder -Marcelo Carlos -Mark Tareshawty -Martin Holman -Martin-Louis Bright -Martins Sipenko -Marwan Sulaiman -Masayuki Izumi -Mat Geist -Matija Horvat -Matin Rahmanian -Matt -Matt Brender -Matt Dainty -Matt Gaunt -Matt Landis -Matt Moore -Matt Simons -Maxime Bury -Michael Meng -Michael Spiegel -Michael Tiller -Michał Glapa -Michelangelo Morrillo -Miguel Elias dos Santos -Mike Chen -mohammad ali <2018cs92@student.uet.edu.pk> -Mohammed AlDujaili -Mukundan Senthil -Munia Balayil -Mustafa Abban -Nadav Kaner -Naoki Kanatani -Nathan VanBenschoten -Navaneeth Suresh -Neal Caffery -Neil O'Toole -Nick Miyake -Nick Platt -Nick Spragg -Nicolas Chapurlat -Nikhita Raghunath -Nilesh Singh -Noah Hanjun Lee -Noah Zoschke -ns-cweber -nxya -Ole Orhagen -Oleg Kovalov -Ondřej Kupka -Ori Talmor -Osama Faqhruldin -oslowalk -Pablo Pérez Schröder -Palash Nigam -Panagiotis Moustafellos -Parham Alvani -pari-27 -Parker Moore -parkhyukjun89 -Pat Alwell -Patrick DeVivo -Patrick Marabeas -Patrik Nordlén -Pavel Dvoinos -Pavel Shtanko -Pete Wagner -Petr Shevtsov -Pierce McEntagart -Pierre Carrier -Piotr Zurek -Piyush Chugh -Pratik Mallya -Qais Patankar -Quang Le Hong -Quentin Leffray -Quinn Slack -Rackspace US, Inc. -Radek Simko -Radliński Ignacy -Rafael Aramizu Gomes -Rajat Jindal -Rajendra arora -Rajkumar -Ranbir Singh -Ravi Shekhar Jethani -RaviTeja Pothana -rc1140 -Red Hat, Inc. -Reetuparna Mukherjee -reeves122 -Reinier Timmer -Renjith R -Ricco Førgaard -Richard de Vries -Rob Figueiredo -Rohit Upadhyay -Rojan Dinc -Ronak Jain -Ronan Pelliard -Ross Gustafson -Ruben Vereecken -Russell Boley -Ryan Leung -Ryan Lower -Ryo Nakao -Saaarah -Safwan Olaimat -Sahil Dua -Sai Ravi Teja Chintakrindi -saisi -Sam Minnée -Sandeep Sukhani -Sander Knape -Sander van Harmelen -Sanket Payghan -Sarah Funkhouser -Sarasa Kisaragi -Sasha Melentyev -Sean Wang -Sebastian Mandrean -Sebastian Mæland Pedersen -Sergei Popinevskii -Sergey Romanov -Sergio Garcia -Seth Vargo -Sevki -Shagun Khemka -shakeelrao -Shawn Catanzarite -Shawn Smith -Shibasis Patel -Sho Okada -Shrikrishna Singh -Simon Davis -sona-tar -soniachikh -SoundCloud, Ltd. -Sridhar Mocherla -SriVignessh Pss -Stefan Sedich -Steve Teuber -Stian Eikeland -Suhaib Mujahid -sushmita wable -Szymon Kodrebski -Søren Hansen -T.J. Corrigan -Takashi Yoneuchi -Takayuki Watanabe -Taketoshi Fujiwara -Taketoshi Fujiwara -Takuma Kajikawa -Tasya Aditya Rukmana -Theo Henson -Theofilos Petsios -Thomas Aidan Curran -Thomas Bruyelle -Tim Rogers -Timothy O'Brien -Timothée Peignier -Tingluo Huang -tkhandel -Tobias Gesellchen -Tom Payne -Trey Tacon -tsbkw -ttacon -Vaibhav Singh -Varadarajan Aravamudhan -Victor Castell -Victor Vrantchan -Victory Osikwemhe -vikkyomkar -Vivek -Vlad Ungureanu -Wasim Thabraze -Weslei Juan Moser Pereira -Wheeler Law -Will Maier -Will Norris -Willem D'Haeseleer -William Bailey -William Cooke -Xabi -xibz -Yann Malet -Yannick Utard -Yarden Shoham -Yicheng Qin -Yosuke Akatsuka -Yumikiyo Osanai -Yurii Soldak -Yusef Mohamadi -Yusuke Kuoka -Zach Latta -zhouhaibing089 -六开箱 -缘生 -蒋航 diff --git a/vendor/github.com/google/go-github/v57/github/actions_artifacts.go b/vendor/github.com/google/go-github/v57/github/actions_artifacts.go deleted file mode 100644 index f804b809..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_artifacts.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" -) - -// ArtifactWorkflowRun represents a GitHub artifact's workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts -type ArtifactWorkflowRun struct { - ID *int64 `json:"id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - HeadRepositoryID *int64 `json:"head_repository_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` -} - -// Artifact represents a GitHub artifact. Artifacts allow sharing -// data between jobs in a workflow and provide storage for data -// once a workflow is complete. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts -type Artifact struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - SizeInBytes *int64 `json:"size_in_bytes,omitempty"` - URL *string `json:"url,omitempty"` - ArchiveDownloadURL *string `json:"archive_download_url,omitempty"` - Expired *bool `json:"expired,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` - WorkflowRun *ArtifactWorkflowRun `json:"workflow_run,omitempty"` -} - -// ArtifactList represents a list of GitHub artifacts. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#artifacts -type ArtifactList struct { - TotalCount *int64 `json:"total_count,omitempty"` - Artifacts []*Artifact `json:"artifacts,omitempty"` -} - -// ListArtifacts lists all artifacts that belong to a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#list-artifacts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/artifacts -func (s *ActionsService) ListArtifacts(ctx context.Context, owner, repo string, opts *ListOptions) (*ArtifactList, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - artifactList := new(ArtifactList) - resp, err := s.client.Do(ctx, req, artifactList) - if err != nil { - return nil, resp, err - } - - return artifactList, resp, nil -} - -// ListWorkflowRunArtifacts lists all artifacts that belong to a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#list-workflow-run-artifacts -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts -func (s *ActionsService) ListWorkflowRunArtifacts(ctx context.Context, owner, repo string, runID int64, opts *ListOptions) (*ArtifactList, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/artifacts", owner, repo, runID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - artifactList := new(ArtifactList) - resp, err := s.client.Do(ctx, req, artifactList) - if err != nil { - return nil, resp, err - } - - return artifactList, resp, nil -} - -// GetArtifact gets a specific artifact for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#get-an-artifact -// -//meta:operation GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id} -func (s *ActionsService) GetArtifact(ctx context.Context, owner, repo string, artifactID int64) (*Artifact, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v", owner, repo, artifactID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - artifact := new(Artifact) - resp, err := s.client.Do(ctx, req, artifact) - if err != nil { - return nil, resp, err - } - - return artifact, resp, nil -} - -// DownloadArtifact gets a redirect URL to download an archive for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#download-an-artifact -// -//meta:operation GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format} -func (s *ActionsService) DownloadArtifact(ctx context.Context, owner, repo string, artifactID int64, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v/zip", owner, repo, artifactID) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - if err != nil { - return nil, newResponse(resp), err - } - - return parsedURL, newResponse(resp), nil -} - -// DeleteArtifact deletes a workflow run artifact. -// -// GitHub API docs: https://docs.github.com/rest/actions/artifacts#delete-an-artifact -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id} -func (s *ActionsService) DeleteArtifact(ctx context.Context, owner, repo string, artifactID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v", owner, repo, artifactID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_cache.go b/vendor/github.com/google/go-github/v57/github/actions_cache.go deleted file mode 100644 index 271d7d82..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_cache.go +++ /dev/null @@ -1,249 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsCache represents a GitHub action cache. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#about-the-cache-api -type ActionsCache struct { - ID *int64 `json:"id,omitempty" url:"-"` - Ref *string `json:"ref,omitempty" url:"ref"` - Key *string `json:"key,omitempty" url:"key"` - Version *string `json:"version,omitempty" url:"-"` - LastAccessedAt *Timestamp `json:"last_accessed_at,omitempty" url:"-"` - CreatedAt *Timestamp `json:"created_at,omitempty" url:"-"` - SizeInBytes *int64 `json:"size_in_bytes,omitempty" url:"-"` -} - -// ActionsCacheList represents a list of GitHub actions Cache. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository -type ActionsCacheList struct { - TotalCount int `json:"total_count"` - ActionsCaches []*ActionsCache `json:"actions_caches,omitempty"` -} - -// ActionsCacheUsage represents a GitHub Actions Cache Usage object. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-a-repository -type ActionsCacheUsage struct { - FullName string `json:"full_name"` - ActiveCachesSizeInBytes int64 `json:"active_caches_size_in_bytes"` - ActiveCachesCount int `json:"active_caches_count"` -} - -// ActionsCacheUsageList represents a list of repositories with GitHub Actions cache usage for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-a-repository -type ActionsCacheUsageList struct { - TotalCount int `json:"total_count"` - RepoCacheUsage []*ActionsCacheUsage `json:"repository_cache_usages,omitempty"` -} - -// TotalCacheUsage represents total GitHub actions cache usage of an organization or enterprise. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-an-enterprise -type TotalCacheUsage struct { - TotalActiveCachesUsageSizeInBytes int64 `json:"total_active_caches_size_in_bytes"` - TotalActiveCachesCount int `json:"total_active_caches_count"` -} - -// ActionsCacheListOptions represents a list of all possible optional Query parameters for ListCaches method. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository -type ActionsCacheListOptions struct { - ListOptions - // The Git reference for the results you want to list. - // The ref for a branch can be formatted either as refs/heads/ - // or simply . To reference a pull request use refs/pull//merge - Ref *string `url:"ref,omitempty"` - Key *string `url:"key,omitempty"` - // Can be one of: "created_at", "last_accessed_at", "size_in_bytes". Default: "last_accessed_at" - Sort *string `url:"sort,omitempty"` - // Can be one of: "asc", "desc" Default: desc - Direction *string `url:"direction,omitempty"` -} - -// ListCaches lists the GitHub Actions caches for a repository. -// You must authenticate using an access token with the repo scope to use this endpoint. -// -// Permissions: must have the actions:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/caches -func (s *ActionsService) ListCaches(ctx context.Context, owner, repo string, opts *ActionsCacheListOptions) (*ActionsCacheList, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/caches", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionCacheList := new(ActionsCacheList) - resp, err := s.client.Do(ctx, req, actionCacheList) - if err != nil { - return nil, resp, err - } - - return actionCacheList, resp, nil -} - -// DeleteCachesByKey deletes one or more GitHub Actions caches for a repository, using a complete cache key. -// By default, all caches that match the provided key are deleted, but you can optionally provide -// a Git ref to restrict deletions to caches that match both the provided key and the Git ref. -// The ref for a branch can be formatted either as "refs/heads/" or simply "". -// To reference a pull request use "refs/pull//merge". If you don't want to use ref just pass nil in parameter. -// -// Permissions: You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have the actions:write permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/caches -func (s *ActionsService) DeleteCachesByKey(ctx context.Context, owner, repo, key string, ref *string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/caches", owner, repo) - u, err := addOptions(u, ActionsCache{Key: &key, Ref: ref}) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteCachesByID deletes a GitHub Actions cache for a repository, using a cache ID. -// -// Permissions: You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have the actions:write permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/caches/{cache_id} -func (s *ActionsService) DeleteCachesByID(ctx context.Context, owner, repo string, cacheID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/caches/%v", owner, repo, cacheID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetCacheUsageForRepo gets GitHub Actions cache usage for a repository. The data fetched using this API is refreshed approximately every 5 minutes, -// so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an -// access token with the repo scope. GitHub Apps must have the actions:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/cache/usage -func (s *ActionsService) GetCacheUsageForRepo(ctx context.Context, owner, repo string) (*ActionsCacheUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/cache/usage", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(ActionsCacheUsage) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} - -// ListCacheUsageByRepoForOrg lists repositories and their GitHub Actions cache usage for an organization. The data fetched using this API is -// refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: You must authenticate using an access token with the read:org scope to use this endpoint. -// GitHub Apps must have the organization_admistration:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#list-repositories-with-github-actions-cache-usage-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/cache/usage-by-repository -func (s *ActionsService) ListCacheUsageByRepoForOrg(ctx context.Context, org string, opts *ListOptions) (*ActionsCacheUsageList, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/cache/usage-by-repository", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(ActionsCacheUsageList) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} - -// GetTotalCacheUsageForOrg gets the total GitHub Actions cache usage for an organization. The data fetched using this API is refreshed approximately every -// 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: You must authenticate using an access token with the read:org scope to use this endpoint. -// GitHub Apps must have the organization_admistration:read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/actions/cache#get-github-actions-cache-usage-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/cache/usage -func (s *ActionsService) GetTotalCacheUsageForOrg(ctx context.Context, org string) (*TotalCacheUsage, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/cache/usage", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(TotalCacheUsage) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} - -// GetTotalCacheUsageForEnterprise gets the total GitHub Actions cache usage for an enterprise. The data fetched using this API is refreshed approximately every 5 minutes, -// so values returned from this endpoint may take at least 5 minutes to get updated. -// -// Permissions: You must authenticate using an access token with the "admin:enterprise" scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/cache#get-github-actions-cache-usage-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/cache/usage -func (s *ActionsService) GetTotalCacheUsageForEnterprise(ctx context.Context, enterprise string) (*TotalCacheUsage, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/cache/usage", enterprise) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cacheUsage := new(TotalCacheUsage) - res, err := s.client.Do(ctx, req, cacheUsage) - if err != nil { - return nil, res, err - } - - return cacheUsage, res, err -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go b/vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go deleted file mode 100644 index 7e10444a..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_permissions_enterprise.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsEnabledOnEnterpriseRepos represents all the repositories in an enterprise for which Actions is enabled. -type ActionsEnabledOnEnterpriseRepos struct { - TotalCount int `json:"total_count"` - Organizations []*Organization `json:"organizations"` -} - -// ActionsPermissionsEnterprise represents a policy for allowed actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions -type ActionsPermissionsEnterprise struct { - EnabledOrganizations *string `json:"enabled_organizations,omitempty"` - AllowedActions *string `json:"allowed_actions,omitempty"` - SelectedActionsURL *string `json:"selected_actions_url,omitempty"` -} - -func (a ActionsPermissionsEnterprise) String() string { - return Stringify(a) -} - -// GetActionsPermissionsInEnterprise gets the GitHub Actions permissions policy for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#get-github-actions-permissions-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/permissions -func (s *ActionsService) GetActionsPermissionsInEnterprise(ctx context.Context, enterprise string) (*ActionsPermissionsEnterprise, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions", enterprise) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissionsEnterprise) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} - -// EditActionsPermissionsInEnterprise sets the permissions policy in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-github-actions-permissions-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions -func (s *ActionsService) EditActionsPermissionsInEnterprise(ctx context.Context, enterprise string, actionsPermissionsEnterprise ActionsPermissionsEnterprise) (*ActionsPermissionsEnterprise, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions", enterprise) - req, err := s.client.NewRequest("PUT", u, actionsPermissionsEnterprise) - if err != nil { - return nil, nil, err - } - - p := new(ActionsPermissionsEnterprise) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// ListEnabledOrgsInEnterprise lists the selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#list-selected-organizations-enabled-for-github-actions-in-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/permissions/organizations -func (s *ActionsService) ListEnabledOrgsInEnterprise(ctx context.Context, owner string, opts *ListOptions) (*ActionsEnabledOnEnterpriseRepos, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations", owner) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - orgs := &ActionsEnabledOnEnterpriseRepos{} - resp, err := s.client.Do(ctx, req, orgs) - if err != nil { - return nil, resp, err - } - - return orgs, resp, nil -} - -// SetEnabledOrgsInEnterprise replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-selected-organizations-enabled-for-github-actions-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions/organizations -func (s *ActionsService) SetEnabledOrgsInEnterprise(ctx context.Context, owner string, organizationIDs []int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations", owner) - - req, err := s.client.NewRequest("PUT", u, struct { - IDs []int64 `json:"selected_organization_ids"` - }{IDs: organizationIDs}) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// AddEnabledOrgInEnterprise adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#enable-a-selected-organization-for-github-actions-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id} -func (s *ActionsService) AddEnabledOrgInEnterprise(ctx context.Context, owner string, organizationID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations/%v", owner, organizationID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// RemoveEnabledOrgInEnterprise removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#disable-a-selected-organization-for-github-actions-in-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id} -func (s *ActionsService) RemoveEnabledOrgInEnterprise(ctx context.Context, owner string, organizationID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations/%v", owner, organizationID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// GetActionsAllowedInEnterprise gets the actions that are allowed in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/permissions/selected-actions -func (s *ActionsService) GetActionsAllowedInEnterprise(ctx context.Context, enterprise string) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/selected-actions", enterprise) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsAllowed := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, actionsAllowed) - if err != nil { - return nil, resp, err - } - - return actionsAllowed, resp, nil -} - -// EditActionsAllowedInEnterprise sets the actions that are allowed in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/permissions/selected-actions -func (s *ActionsService) EditActionsAllowedInEnterprise(ctx context.Context, enterprise string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/permissions/selected-actions", enterprise) - req, err := s.client.NewRequest("PUT", u, actionsAllowed) - if err != nil { - return nil, nil, err - } - - p := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go b/vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go deleted file mode 100644 index 1a31e4c6..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_permissions_orgs.go +++ /dev/null @@ -1,220 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsPermissions represents a policy for repositories and allowed actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions -type ActionsPermissions struct { - EnabledRepositories *string `json:"enabled_repositories,omitempty"` - AllowedActions *string `json:"allowed_actions,omitempty"` - SelectedActionsURL *string `json:"selected_actions_url,omitempty"` -} - -func (a ActionsPermissions) String() string { - return Stringify(a) -} - -// ActionsEnabledOnOrgRepos represents all the repositories in an organization for which Actions is enabled. -type ActionsEnabledOnOrgRepos struct { - TotalCount int `json:"total_count"` - Repositories []*Repository `json:"repositories"` -} - -// ActionsAllowed represents selected actions that are allowed. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions -type ActionsAllowed struct { - GithubOwnedAllowed *bool `json:"github_owned_allowed,omitempty"` - VerifiedAllowed *bool `json:"verified_allowed,omitempty"` - PatternsAllowed []string `json:"patterns_allowed,omitempty"` -} - -func (a ActionsAllowed) String() string { - return Stringify(a) -} - -// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-github-actions-permissions-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions -func (s *ActionsService) GetActionsPermissions(ctx context.Context, org string) (*ActionsPermissions, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissions) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} - -// EditActionsPermissions sets the permissions policy for repositories and allowed actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-github-actions-permissions-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions -func (s *ActionsService) EditActionsPermissions(ctx context.Context, org string, actionsPermissions ActionsPermissions) (*ActionsPermissions, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions", org) - req, err := s.client.NewRequest("PUT", u, actionsPermissions) - if err != nil { - return nil, nil, err - } - - p := new(ActionsPermissions) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// ListEnabledReposInOrg lists the selected repositories that are enabled for GitHub Actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#list-selected-repositories-enabled-for-github-actions-in-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions/repositories -func (s *ActionsService) ListEnabledReposInOrg(ctx context.Context, owner string, opts *ListOptions) (*ActionsEnabledOnOrgRepos, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories", owner) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - repos := &ActionsEnabledOnOrgRepos{} - resp, err := s.client.Do(ctx, req, repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// SetEnabledReposInOrg replaces the list of selected repositories that are enabled for GitHub Actions in an organization.. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-selected-repositories-enabled-for-github-actions-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/repositories -func (s *ActionsService) SetEnabledReposInOrg(ctx context.Context, owner string, repositoryIDs []int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories", owner) - - req, err := s.client.NewRequest("PUT", u, struct { - IDs []int64 `json:"selected_repository_ids"` - }{IDs: repositoryIDs}) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// AddEnabledReposInOrg adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#enable-a-selected-repository-for-github-actions-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/repositories/{repository_id} -func (s *ActionsService) AddEnabledReposInOrg(ctx context.Context, owner string, repositoryID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories/%v", owner, repositoryID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// RemoveEnabledReposInOrg removes a single repository from the list of enabled repos for GitHub Actions in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#disable-a-selected-repository-for-github-actions-in-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/permissions/repositories/{repository_id} -func (s *ActionsService) RemoveEnabledReposInOrg(ctx context.Context, owner string, repositoryID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/repositories/%v", owner, repositoryID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// GetActionsAllowed gets the actions that are allowed in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions/selected-actions -func (s *ActionsService) GetActionsAllowed(ctx context.Context, org string) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/selected-actions", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsAllowed := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, actionsAllowed) - if err != nil { - return nil, resp, err - } - - return actionsAllowed, resp, nil -} - -// EditActionsAllowed sets the actions that are allowed in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/selected-actions -func (s *ActionsService) EditActionsAllowed(ctx context.Context, org string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/permissions/selected-actions", org) - req, err := s.client.NewRequest("PUT", u, actionsAllowed) - if err != nil { - return nil, nil, err - } - - p := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_required_workflows.go b/vendor/github.com/google/go-github/v57/github/actions_required_workflows.go deleted file mode 100644 index b89741a8..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_required_workflows.go +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrgRequiredWorkflow represents a required workflow object at the org level. -type OrgRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - Scope *string `json:"scope,omitempty"` - Ref *string `json:"ref,omitempty"` - State *string `json:"state,omitempty"` - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -// OrgRequiredWorkflows represents the required workflows for the org. -type OrgRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*OrgRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// CreateUpdateRequiredWorkflowOptions represents the input object used to create or update required workflows. -type CreateUpdateRequiredWorkflowOptions struct { - WorkflowFilePath *string `json:"workflow_file_path,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Scope *string `json:"scope,omitempty"` - SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -// RequiredWorkflowSelectedRepos represents the repos that a required workflow is applied to. -type RequiredWorkflowSelectedRepos struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -// RepoRequiredWorkflow represents a required workflow object at the repo level. -type RepoRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - State *string `json:"state,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BadgeURL *string `json:"badge_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - SourceRepository *Repository `json:"source_repository,omitempty"` -} - -// RepoRequiredWorkflows represents the required workflows for a repo. -type RepoRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*RepoRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// ListOrgRequiredWorkflows lists the RequiredWorkflows for an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows -func (s *ActionsService) ListOrgRequiredWorkflows(ctx context.Context, org string, opts *ListOptions) (*OrgRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(OrgRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} - -// CreateRequiredWorkflow creates the required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation POST /orgs/{org}/actions/required_workflows -func (s *ActionsService) CreateRequiredWorkflow(ctx context.Context, org string, createRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - req, err := s.client.NewRequest("POST", url, createRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// GetRequiredWorkflowByID get the RequiredWorkflows for an org by its ID. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) GetRequiredWorkflowByID(ctx context.Context, owner string, requiredWorkflowID int64) (*OrgRequiredWorkflow, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", owner, requiredWorkflowID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, &requiredWorkflow) - if err != nil { - return nil, resp, err - } - - return requiredWorkflow, resp, nil -} - -// UpdateRequiredWorkflow updates a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PATCH /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) UpdateRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64, updateRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("PATCH", url, updateRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// DeleteRequiredWorkflow deletes a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) DeleteRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRequiredWorkflowSelectedRepos lists the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) ListRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, opts *ListOptions) (*RequiredWorkflowSelectedRepos, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflowRepos := new(RequiredWorkflowSelectedRepos) - resp, err := s.client.Do(ctx, req, &requiredWorkflowRepos) - if err != nil { - return nil, resp, err - } - - return requiredWorkflowRepos, resp, nil -} - -// SetRequiredWorkflowSelectedRepos sets the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) SetRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRepoToRequiredWorkflow adds the Repository to a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) AddRepoToRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// RemoveRepoFromRequiredWorkflow removes the Repository from a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) RemoveRepoFromRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRepoRequiredWorkflows lists the RequiredWorkflows for a repo. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /repos/{owner}/{repo}/actions/required_workflows -func (s *ActionsService) ListRepoRequiredWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*RepoRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/required_workflows", owner, repo) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(RepoRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_runner_groups.go b/vendor/github.com/google/go-github/v57/github/actions_runner_groups.go deleted file mode 100644 index a1f453f3..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_runner_groups.go +++ /dev/null @@ -1,337 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RunnerGroup represents a self-hosted runner group configured in an organization. -type RunnerGroup struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - Default *bool `json:"default,omitempty"` - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - RunnersURL *string `json:"runners_url,omitempty"` - Inherited *bool `json:"inherited,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` - WorkflowRestrictionsReadOnly *bool `json:"workflow_restrictions_read_only,omitempty"` -} - -// RunnerGroups represents a collection of self-hosted runner groups configured for an organization. -type RunnerGroups struct { - TotalCount int `json:"total_count"` - RunnerGroups []*RunnerGroup `json:"runner_groups"` -} - -// CreateRunnerGroupRequest represents a request to create a Runner group for an organization. -type CreateRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - // List of repository IDs that can access the runner group. - SelectedRepositoryIDs []int64 `json:"selected_repository_ids,omitempty"` - // Runners represent a list of runner IDs to add to the runner group. - Runners []int64 `json:"runners,omitempty"` - // If set to True, public repos can use this runner group - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - // If true, the runner group will be restricted to running only the workflows specified in the SelectedWorkflows slice. - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - // List of workflows the runner group should be allowed to run. This setting will be ignored unless RestrictedToWorkflows is set to true. - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// UpdateRunnerGroupRequest represents a request to update a Runner group for an organization. -type UpdateRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// SetRepoAccessRunnerGroupRequest represents a request to replace the list of repositories -// that can access a self-hosted runner group configured in an organization. -type SetRepoAccessRunnerGroupRequest struct { - // Updated list of repository IDs that should be given access to the runner group. - SelectedRepositoryIDs []int64 `json:"selected_repository_ids"` -} - -// SetRunnerGroupRunnersRequest represents a request to replace the list of -// self-hosted runners that are part of an organization runner group. -type SetRunnerGroupRunnersRequest struct { - // Updated list of runner IDs that should be given access to the runner group. - Runners []int64 `json:"runners"` -} - -// ListOrgRunnerGroupOptions extend ListOptions to have the optional parameters VisibleToRepository. -type ListOrgRunnerGroupOptions struct { - ListOptions - - // Only return runner groups that are allowed to be used by this repository. - VisibleToRepository string `url:"visible_to_repository,omitempty"` -} - -// ListOrganizationRunnerGroups lists all self-hosted runner groups configured in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runner-groups-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups -func (s *ActionsService) ListOrganizationRunnerGroups(ctx context.Context, org string, opts *ListOrgRunnerGroupOptions) (*RunnerGroups, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := &RunnerGroups{} - resp, err := s.client.Do(ctx, req, &groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// GetOrganizationRunnerGroup gets a specific self-hosted runner group for an organization using its RunnerGroup ID. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#get-a-self-hosted-runner-group-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups/{runner_group_id} -func (s *ActionsService) GetOrganizationRunnerGroup(ctx context.Context, org string, groupID int64) (*RunnerGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(RunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// DeleteOrganizationRunnerGroup deletes a self-hosted runner group from an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#delete-a-self-hosted-runner-group-from-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runner-groups/{runner_group_id} -func (s *ActionsService) DeleteOrganizationRunnerGroup(ctx context.Context, org string, groupID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateOrganizationRunnerGroup creates a new self-hosted runner group for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#create-a-self-hosted-runner-group-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runner-groups -func (s *ActionsService) CreateOrganizationRunnerGroup(ctx context.Context, org string, createReq CreateRunnerGroupRequest) (*RunnerGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups", org) - req, err := s.client.NewRequest("POST", u, createReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(RunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// UpdateOrganizationRunnerGroup updates a self-hosted runner group for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#update-a-self-hosted-runner-group-for-an-organization -// -//meta:operation PATCH /orgs/{org}/actions/runner-groups/{runner_group_id} -func (s *ActionsService) UpdateOrganizationRunnerGroup(ctx context.Context, org string, groupID int64, updateReq UpdateRunnerGroupRequest) (*RunnerGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) - req, err := s.client.NewRequest("PATCH", u, updateReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(RunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// ListRepositoryAccessRunnerGroup lists the repositories with access to a self-hosted runner group configured in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-repository-access-to-a-self-hosted-runner-group-in-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories -func (s *ActionsService) ListRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID int64, opts *ListOptions) (*ListRepositories, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories", org, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - repos := &ListRepositories{} - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// SetRepositoryAccessRunnerGroup replaces the list of repositories that have access to a self-hosted runner group configured in an organization -// with a new List of repositories. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-repository-access-for-a-self-hosted-runner-group-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories -func (s *ActionsService) SetRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID int64, ids SetRepoAccessRunnerGroupRequest) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories", org, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRepositoryAccessRunnerGroup adds a repository to the list of selected repositories that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-repository-access-to-a-self-hosted-runner-group-in-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id} -func (s *ActionsService) AddRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID, repoID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories/%v", org, groupID, repoID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveRepositoryAccessRunnerGroup removes a repository from the list of selected repositories that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-repository-access-to-a-self-hosted-runner-group-in-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id} -func (s *ActionsService) RemoveRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID, repoID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories/%v", org, groupID, repoID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListRunnerGroupRunners lists self-hosted runners that are in a specific organization group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runners-in-a-group-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners -func (s *ActionsService) ListRunnerGroupRunners(ctx context.Context, org string, groupID int64, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners", org, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// SetRunnerGroupRunners replaces the list of self-hosted runners that are part of an organization runner group -// with a new list of runners. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-self-hosted-runners-in-a-group-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners -func (s *ActionsService) SetRunnerGroupRunners(ctx context.Context, org string, groupID int64, ids SetRunnerGroupRunnersRequest) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners", org, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRunnerGroupRunners adds a self-hosted runner to a runner group configured in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-a-self-hosted-runner-to-a-group-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *ActionsService) AddRunnerGroupRunners(ctx context.Context, org string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners/%v", org, groupID, runnerID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveRunnerGroupRunners removes a self-hosted runner from a group configured in an organization. -// The runner is then returned to the default group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-a-self-hosted-runner-from-a-group-for-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *ActionsService) RemoveRunnerGroupRunners(ctx context.Context, org string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners/%v", org, groupID, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_runners.go b/vendor/github.com/google/go-github/v57/github/actions_runners.go deleted file mode 100644 index 90cf5804..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_runners.go +++ /dev/null @@ -1,371 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RunnerApplicationDownload represents a binary for the self-hosted runner application that can be downloaded. -type RunnerApplicationDownload struct { - OS *string `json:"os,omitempty"` - Architecture *string `json:"architecture,omitempty"` - DownloadURL *string `json:"download_url,omitempty"` - Filename *string `json:"filename,omitempty"` - TempDownloadToken *string `json:"temp_download_token,omitempty"` - SHA256Checksum *string `json:"sha256_checksum,omitempty"` -} - -// ListRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-runner-applications-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runners/downloads -func (s *ActionsService) ListRunnerApplicationDownloads(ctx context.Context, owner, repo string) ([]*RunnerApplicationDownload, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/downloads", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rads []*RunnerApplicationDownload - resp, err := s.client.Do(ctx, req, &rads) - if err != nil { - return nil, resp, err - } - - return rads, resp, nil -} - -// GenerateJITConfigRequest specifies body parameters to GenerateRepoJITConfig. -type GenerateJITConfigRequest struct { - Name string `json:"name"` - RunnerGroupID int64 `json:"runner_group_id"` - WorkFolder *string `json:"work_folder,omitempty"` - - // Labels represents the names of the custom labels to add to the runner. - // Minimum items: 1. Maximum items: 100. - Labels []string `json:"labels"` -} - -// JITRunnerConfig represents encoded JIT configuration that can be used to bootstrap a self-hosted runner. -type JITRunnerConfig struct { - Runner *Runner `json:"runner,omitempty"` - EncodedJITConfig *string `json:"encoded_jit_config,omitempty"` -} - -// GenerateOrgJITConfig generate a just-in-time configuration for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runners/generate-jitconfig -func (s *ActionsService) GenerateOrgJITConfig(ctx context.Context, owner string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/generate-jitconfig", owner) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - jitConfig := new(JITRunnerConfig) - resp, err := s.client.Do(ctx, req, jitConfig) - if err != nil { - return nil, resp, err - } - - return jitConfig, resp, nil -} - -// GenerateRepoJITConfig generates a just-in-time configuration for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig -func (s *ActionsService) GenerateRepoJITConfig(ctx context.Context, owner, repo string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/generate-jitconfig", owner, repo) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - jitConfig := new(JITRunnerConfig) - resp, err := s.client.Do(ctx, req, jitConfig) - if err != nil { - return nil, resp, err - } - - return jitConfig, resp, nil -} - -// RegistrationToken represents a token that can be used to add a self-hosted runner to a repository. -type RegistrationToken struct { - Token *string `json:"token,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} - -// CreateRegistrationToken creates a token that can be used to add a self-hosted runner. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-registration-token-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/actions/runners/registration-token -func (s *ActionsService) CreateRegistrationToken(ctx context.Context, owner, repo string) (*RegistrationToken, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/registration-token", owner, repo) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - registrationToken := new(RegistrationToken) - resp, err := s.client.Do(ctx, req, registrationToken) - if err != nil { - return nil, resp, err - } - - return registrationToken, resp, nil -} - -// Runner represents a self-hosted runner registered with a repository. -type Runner struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - OS *string `json:"os,omitempty"` - Status *string `json:"status,omitempty"` - Busy *bool `json:"busy,omitempty"` - Labels []*RunnerLabels `json:"labels,omitempty"` -} - -// RunnerLabels represents a collection of labels attached to each runner. -type RunnerLabels struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` -} - -// Runners represents a collection of self-hosted runners for a repository. -type Runners struct { - TotalCount int `json:"total_count"` - Runners []*Runner `json:"runners"` -} - -// ListRunners lists all the self-hosted runners for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-self-hosted-runners-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runners -func (s *ActionsService) ListRunners(ctx context.Context, owner, repo string, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// GetRunner gets a specific self-hosted runner for a repository using its runner ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#get-a-self-hosted-runner-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runners/{runner_id} -func (s *ActionsService) GetRunner(ctx context.Context, owner, repo string, runnerID int64) (*Runner, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/%v", owner, repo, runnerID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runner := new(Runner) - resp, err := s.client.Do(ctx, req, runner) - if err != nil { - return nil, resp, err - } - - return runner, resp, nil -} - -// RemoveToken represents a token that can be used to remove a self-hosted runner from a repository. -type RemoveToken struct { - Token *string `json:"token,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} - -// CreateRemoveToken creates a token that can be used to remove a self-hosted runner from a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-remove-token-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/actions/runners/remove-token -func (s *ActionsService) CreateRemoveToken(ctx context.Context, owner, repo string) (*RemoveToken, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/remove-token", owner, repo) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - removeToken := new(RemoveToken) - resp, err := s.client.Do(ctx, req, removeToken) - if err != nil { - return nil, resp, err - } - - return removeToken, resp, nil -} - -// RemoveRunner forces the removal of a self-hosted runner in a repository using the runner id. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/runners/{runner_id} -func (s *ActionsService) RemoveRunner(ctx context.Context, owner, repo string, runnerID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runners/%v", owner, repo, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListOrganizationRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-runner-applications-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runners/downloads -func (s *ActionsService) ListOrganizationRunnerApplicationDownloads(ctx context.Context, owner string) ([]*RunnerApplicationDownload, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/downloads", owner) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rads []*RunnerApplicationDownload - resp, err := s.client.Do(ctx, req, &rads) - if err != nil { - return nil, resp, err - } - - return rads, resp, nil -} - -// CreateOrganizationRegistrationToken creates a token that can be used to add a self-hosted runner to an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-registration-token-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runners/registration-token -func (s *ActionsService) CreateOrganizationRegistrationToken(ctx context.Context, owner string) (*RegistrationToken, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/registration-token", owner) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - registrationToken := new(RegistrationToken) - resp, err := s.client.Do(ctx, req, registrationToken) - if err != nil { - return nil, resp, err - } - - return registrationToken, resp, nil -} - -// ListOrganizationRunners lists all the self-hosted runners for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#list-self-hosted-runners-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runners -func (s *ActionsService) ListOrganizationRunners(ctx context.Context, owner string, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners", owner) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// GetOrganizationRunner gets a specific self-hosted runner for an organization using its runner ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#get-a-self-hosted-runner-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/runners/{runner_id} -func (s *ActionsService) GetOrganizationRunner(ctx context.Context, owner string, runnerID int64) (*Runner, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/%v", owner, runnerID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runner := new(Runner) - resp, err := s.client.Do(ctx, req, runner) - if err != nil { - return nil, resp, err - } - - return runner, resp, nil -} - -// CreateOrganizationRemoveToken creates a token that can be used to remove a self-hosted runner from an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#create-a-remove-token-for-an-organization -// -//meta:operation POST /orgs/{org}/actions/runners/remove-token -func (s *ActionsService) CreateOrganizationRemoveToken(ctx context.Context, owner string) (*RemoveToken, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/remove-token", owner) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - removeToken := new(RemoveToken) - resp, err := s.client.Do(ctx, req, removeToken) - if err != nil { - return nil, resp, err - } - - return removeToken, resp, nil -} - -// RemoveOrganizationRunner forces the removal of a self-hosted runner from an organization using the runner id. -// -// GitHub API docs: https://docs.github.com/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-an-organization -// -//meta:operation DELETE /orgs/{org}/actions/runners/{runner_id} -func (s *ActionsService) RemoveOrganizationRunner(ctx context.Context, owner string, runnerID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/runners/%v", owner, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_secrets.go b/vendor/github.com/google/go-github/v57/github/actions_secrets.go deleted file mode 100644 index 2d4ba98d..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_secrets.go +++ /dev/null @@ -1,396 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" - "strconv" -) - -// PublicKey represents the public key that should be used to encrypt secrets. -type PublicKey struct { - KeyID *string `json:"key_id"` - Key *string `json:"key"` -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// This ensures GitHub Enterprise versions which return a numeric key id -// do not error out when unmarshaling. -func (p *PublicKey) UnmarshalJSON(data []byte) error { - var pk struct { - KeyID interface{} `json:"key_id"` - Key *string `json:"key"` - } - - if err := json.Unmarshal(data, &pk); err != nil { - return err - } - - p.Key = pk.Key - - switch v := pk.KeyID.(type) { - case nil: - return nil - case string: - p.KeyID = &v - case float64: - p.KeyID = String(strconv.FormatFloat(v, 'f', -1, 64)) - default: - return fmt.Errorf("unable to unmarshal %T as a string", v) - } - - return nil -} - -func (s *ActionsService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - pubKey := new(PublicKey) - resp, err := s.client.Do(ctx, req, pubKey) - if err != nil { - return nil, resp, err - } - - return pubKey, resp, nil -} - -// GetRepoPublicKey gets a public key that should be used for secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-a-repository-public-key -// -//meta:operation GET /repos/{owner}/{repo}/actions/secrets/public-key -func (s *ActionsService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/public-key", owner, repo) - return s.getPublicKey(ctx, url) -} - -// GetOrgPublicKey gets a public key that should be used for secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-organization-public-key -// -//meta:operation GET /orgs/{org}/actions/secrets/public-key -func (s *ActionsService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/public-key", org) - return s.getPublicKey(ctx, url) -} - -// GetEnvPublicKey gets a public key that should be used for secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-environment-public-key -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key -func (s *ActionsService) GetEnvPublicKey(ctx context.Context, repoID int, env string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/public-key", repoID, env) - return s.getPublicKey(ctx, url) -} - -// Secret represents a repository action secret. -type Secret struct { - Name string `json:"name"` - CreatedAt Timestamp `json:"created_at"` - UpdatedAt Timestamp `json:"updated_at"` - Visibility string `json:"visibility,omitempty"` - SelectedRepositoriesURL string `json:"selected_repositories_url,omitempty"` -} - -// Secrets represents one item from the ListSecrets response. -type Secrets struct { - TotalCount int `json:"total_count"` - Secrets []*Secret `json:"secrets"` -} - -func (s *ActionsService) listSecrets(ctx context.Context, url string, opts *ListOptions) (*Secrets, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - secrets := new(Secrets) - resp, err := s.client.Do(ctx, req, &secrets) - if err != nil { - return nil, resp, err - } - - return secrets, resp, nil -} - -// ListRepoSecrets lists all secrets available in a repository -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-repository-secrets -// -//meta:operation GET /repos/{owner}/{repo}/actions/secrets -func (s *ActionsService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets", owner, repo) - return s.listSecrets(ctx, url, opts) -} - -// ListOrgSecrets lists all secrets available in an organization -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-organization-secrets -// -//meta:operation GET /orgs/{org}/actions/secrets -func (s *ActionsService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets", org) - return s.listSecrets(ctx, url, opts) -} - -// ListEnvSecrets lists all secrets available in an environment. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-environment-secrets -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/secrets -func (s *ActionsService) ListEnvSecrets(ctx context.Context, repoID int, env string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets", repoID, env) - return s.listSecrets(ctx, url, opts) -} - -func (s *ActionsService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - secret := new(Secret) - resp, err := s.client.Do(ctx, req, secret) - if err != nil { - return nil, resp, err - } - - return secret, resp, nil -} - -// GetRepoSecret gets a single repository secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-a-repository-secret -// -//meta:operation GET /repos/{owner}/{repo}/actions/secrets/{secret_name} -func (s *ActionsService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, name) - return s.getSecret(ctx, url) -} - -// GetOrgSecret gets a single organization secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-organization-secret -// -//meta:operation GET /orgs/{org}/actions/secrets/{secret_name} -func (s *ActionsService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, name) - return s.getSecret(ctx, url) -} - -// GetEnvSecret gets a single environment secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#get-an-environment-secret -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name} -func (s *ActionsService) GetEnvSecret(ctx context.Context, repoID int, env, secretName string) (*Secret, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, secretName) - return s.getSecret(ctx, url) -} - -// SelectedRepoIDs are the repository IDs that have access to the actions secrets. -type SelectedRepoIDs []int64 - -// EncryptedSecret represents a secret that is encrypted using a public key. -// -// The value of EncryptedValue must be your secret, encrypted with -// LibSodium (see documentation here: https://libsodium.gitbook.io/doc/bindings_for_other_languages) -// using the public key retrieved using the GetPublicKey method. -type EncryptedSecret struct { - Name string `json:"-"` - KeyID string `json:"key_id"` - EncryptedValue string `json:"encrypted_value"` - Visibility string `json:"visibility,omitempty"` - SelectedRepositoryIDs SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -func (s *ActionsService) putSecret(ctx context.Context, url string, eSecret *EncryptedSecret) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, eSecret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateOrUpdateRepoSecret creates or updates a repository secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#create-or-update-a-repository-secret -// -//meta:operation PUT /repos/{owner}/{repo}/actions/secrets/{secret_name} -func (s *ActionsService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *EncryptedSecret) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -// CreateOrUpdateOrgSecret creates or updates an organization secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#create-or-update-an-organization-secret -// -//meta:operation PUT /orgs/{org}/actions/secrets/{secret_name} -func (s *ActionsService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *EncryptedSecret) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -// CreateOrUpdateEnvSecret creates or updates a single environment secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#create-or-update-an-environment-secret -// -//meta:operation PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name} -func (s *ActionsService) CreateOrUpdateEnvSecret(ctx context.Context, repoID int, env string, eSecret *EncryptedSecret) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -func (s *ActionsService) deleteSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteRepoSecret deletes a secret in a repository using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#delete-a-repository-secret -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name} -func (s *ActionsService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, name) - return s.deleteSecret(ctx, url) -} - -// DeleteOrgSecret deletes a secret in an organization using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#delete-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/actions/secrets/{secret_name} -func (s *ActionsService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, name) - return s.deleteSecret(ctx, url) -} - -// DeleteEnvSecret deletes a secret in an environment using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#delete-an-environment-secret -// -//meta:operation DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name} -func (s *ActionsService) DeleteEnvSecret(ctx context.Context, repoID int, env, secretName string) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, secretName) - return s.deleteSecret(ctx, url) -} - -// SelectedReposList represents the list of repositories selected for an organization secret. -type SelectedReposList struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -func (s *ActionsService) listSelectedReposForSecret(ctx context.Context, url string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(SelectedReposList) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// ListSelectedReposForOrgSecret lists all repositories that have access to a secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#list-selected-repositories-for-an-organization-secret -// -//meta:operation GET /orgs/{org}/actions/secrets/{secret_name}/repositories -func (s *ActionsService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories", org, name) - return s.listSelectedReposForSecret(ctx, url, opts) -} - -func (s *ActionsService) setSelectedReposForSecret(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// SetSelectedReposForOrgSecret sets the repositories that have access to a secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#set-selected-repositories-for-an-organization-secret -// -//meta:operation PUT /orgs/{org}/actions/secrets/{secret_name}/repositories -func (s *ActionsService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories", org, name) - return s.setSelectedReposForSecret(ctx, url, ids) -} - -func (s *ActionsService) addSelectedRepoToSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddSelectedRepoToOrgSecret adds a repository to an organization secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#add-selected-repository-to-an-organization-secret -// -//meta:operation PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id} -func (s *ActionsService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.addSelectedRepoToSecret(ctx, url) -} - -func (s *ActionsService) removeSelectedRepoFromSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSelectedRepoFromOrgSecret removes a repository from an organization secret. -// -// GitHub API docs: https://docs.github.com/rest/actions/secrets#remove-selected-repository-from-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id} -func (s *ActionsService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.removeSelectedRepoFromSecret(ctx, url) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_variables.go b/vendor/github.com/google/go-github/v57/github/actions_variables.go deleted file mode 100644 index 244d1590..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_variables.go +++ /dev/null @@ -1,331 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsVariable represents a repository action variable. -type ActionsVariable struct { - Name string `json:"name"` - Value string `json:"value"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Visibility *string `json:"visibility,omitempty"` - // Used by ListOrgVariables and GetOrgVariables - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - // Used by UpdateOrgVariable and CreateOrgVariable - SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -// ActionsVariables represents one item from the ListVariables response. -type ActionsVariables struct { - TotalCount int `json:"total_count"` - Variables []*ActionsVariable `json:"variables"` -} - -func (s *ActionsService) listVariables(ctx context.Context, url string, opts *ListOptions) (*ActionsVariables, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - variables := new(ActionsVariables) - resp, err := s.client.Do(ctx, req, &variables) - if err != nil { - return nil, resp, err - } - - return variables, resp, nil -} - -// ListRepoVariables lists all variables available in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-repository-variables -// -//meta:operation GET /repos/{owner}/{repo}/actions/variables -func (s *ActionsService) ListRepoVariables(ctx context.Context, owner, repo string, opts *ListOptions) (*ActionsVariables, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables", owner, repo) - return s.listVariables(ctx, url, opts) -} - -// ListOrgVariables lists all variables available in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-organization-variables -// -//meta:operation GET /orgs/{org}/actions/variables -func (s *ActionsService) ListOrgVariables(ctx context.Context, org string, opts *ListOptions) (*ActionsVariables, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables", org) - return s.listVariables(ctx, url, opts) -} - -// ListEnvVariables lists all variables available in an environment. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-environment-variables -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/variables -func (s *ActionsService) ListEnvVariables(ctx context.Context, repoID int, env string, opts *ListOptions) (*ActionsVariables, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables", repoID, env) - return s.listVariables(ctx, url, opts) -} - -func (s *ActionsService) getVariable(ctx context.Context, url string) (*ActionsVariable, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - variable := new(ActionsVariable) - resp, err := s.client.Do(ctx, req, variable) - if err != nil { - return nil, resp, err - } - - return variable, resp, nil -} - -// GetRepoVariable gets a single repository variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#get-a-repository-variable -// -//meta:operation GET /repos/{owner}/{repo}/actions/variables/{name} -func (s *ActionsService) GetRepoVariable(ctx context.Context, owner, repo, name string) (*ActionsVariable, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, name) - return s.getVariable(ctx, url) -} - -// GetOrgVariable gets a single organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#get-an-organization-variable -// -//meta:operation GET /orgs/{org}/actions/variables/{name} -func (s *ActionsService) GetOrgVariable(ctx context.Context, org, name string) (*ActionsVariable, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, name) - return s.getVariable(ctx, url) -} - -// GetEnvVariable gets a single environment variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#get-an-environment-variable -// -//meta:operation GET /repositories/{repository_id}/environments/{environment_name}/variables/{name} -func (s *ActionsService) GetEnvVariable(ctx context.Context, repoID int, env, variableName string) (*ActionsVariable, *Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variableName) - return s.getVariable(ctx, url) -} - -func (s *ActionsService) postVariable(ctx context.Context, url string, variable *ActionsVariable) (*Response, error) { - req, err := s.client.NewRequest("POST", url, variable) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// CreateRepoVariable creates a repository variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#create-a-repository-variable -// -//meta:operation POST /repos/{owner}/{repo}/actions/variables -func (s *ActionsService) CreateRepoVariable(ctx context.Context, owner, repo string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables", owner, repo) - return s.postVariable(ctx, url, variable) -} - -// CreateOrgVariable creates an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#create-an-organization-variable -// -//meta:operation POST /orgs/{org}/actions/variables -func (s *ActionsService) CreateOrgVariable(ctx context.Context, org string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables", org) - return s.postVariable(ctx, url, variable) -} - -// CreateEnvVariable creates an environment variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#create-an-environment-variable -// -//meta:operation POST /repositories/{repository_id}/environments/{environment_name}/variables -func (s *ActionsService) CreateEnvVariable(ctx context.Context, repoID int, env string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables", repoID, env) - return s.postVariable(ctx, url, variable) -} - -func (s *ActionsService) patchVariable(ctx context.Context, url string, variable *ActionsVariable) (*Response, error) { - req, err := s.client.NewRequest("PATCH", url, variable) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// UpdateRepoVariable updates a repository variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#update-a-repository-variable -// -//meta:operation PATCH /repos/{owner}/{repo}/actions/variables/{name} -func (s *ActionsService) UpdateRepoVariable(ctx context.Context, owner, repo string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, variable.Name) - return s.patchVariable(ctx, url, variable) -} - -// UpdateOrgVariable updates an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#update-an-organization-variable -// -//meta:operation PATCH /orgs/{org}/actions/variables/{name} -func (s *ActionsService) UpdateOrgVariable(ctx context.Context, org string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, variable.Name) - return s.patchVariable(ctx, url, variable) -} - -// UpdateEnvVariable updates an environment variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#update-an-environment-variable -// -//meta:operation PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name} -func (s *ActionsService) UpdateEnvVariable(ctx context.Context, repoID int, env string, variable *ActionsVariable) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variable.Name) - return s.patchVariable(ctx, url, variable) -} - -func (s *ActionsService) deleteVariable(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteRepoVariable deletes a variable in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#delete-a-repository-variable -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/variables/{name} -func (s *ActionsService) DeleteRepoVariable(ctx context.Context, owner, repo, name string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, name) - return s.deleteVariable(ctx, url) -} - -// DeleteOrgVariable deletes a variable in an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#delete-an-organization-variable -// -//meta:operation DELETE /orgs/{org}/actions/variables/{name} -func (s *ActionsService) DeleteOrgVariable(ctx context.Context, org, name string) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, name) - return s.deleteVariable(ctx, url) -} - -// DeleteEnvVariable deletes a variable in an environment. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#delete-an-environment-variable -// -//meta:operation DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name} -func (s *ActionsService) DeleteEnvVariable(ctx context.Context, repoID int, env, variableName string) (*Response, error) { - url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variableName) - return s.deleteVariable(ctx, url) -} - -func (s *ActionsService) listSelectedReposForVariable(ctx context.Context, url string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(SelectedReposList) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// ListSelectedReposForOrgVariable lists all repositories that have access to a variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#list-selected-repositories-for-an-organization-variable -// -//meta:operation GET /orgs/{org}/actions/variables/{name}/repositories -func (s *ActionsService) ListSelectedReposForOrgVariable(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories", org, name) - return s.listSelectedReposForVariable(ctx, url, opts) -} - -func (s *ActionsService) setSelectedReposForVariable(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// SetSelectedReposForOrgVariable sets the repositories that have access to a variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#set-selected-repositories-for-an-organization-variable -// -//meta:operation PUT /orgs/{org}/actions/variables/{name}/repositories -func (s *ActionsService) SetSelectedReposForOrgVariable(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories", org, name) - return s.setSelectedReposForVariable(ctx, url, ids) -} - -func (s *ActionsService) addSelectedRepoToVariable(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddSelectedRepoToOrgVariable adds a repository to an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#add-selected-repository-to-an-organization-variable -// -//meta:operation PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id} -func (s *ActionsService) AddSelectedRepoToOrgVariable(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories/%v", org, name, *repo.ID) - return s.addSelectedRepoToVariable(ctx, url) -} - -func (s *ActionsService) removeSelectedRepoFromVariable(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSelectedRepoFromOrgVariable removes a repository from an organization variable. -// -// GitHub API docs: https://docs.github.com/rest/actions/variables#remove-selected-repository-from-an-organization-variable -// -//meta:operation DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id} -func (s *ActionsService) RemoveSelectedRepoFromOrgVariable(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories/%v", org, name, *repo.ID) - return s.removeSelectedRepoFromVariable(ctx, url) -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go b/vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go deleted file mode 100644 index 0e0eb6e1..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_workflow_jobs.go +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" -) - -// TaskStep represents a single task step from a sequence of tasks of a job. -type TaskStep struct { - Name *string `json:"name,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - Number *int64 `json:"number,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` -} - -// WorkflowJob represents a repository action workflow job. -type WorkflowJob struct { - ID *int64 `json:"id,omitempty"` - RunID *int64 `json:"run_id,omitempty"` - RunURL *string `json:"run_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` - Name *string `json:"name,omitempty"` - Steps []*TaskStep `json:"steps,omitempty"` - CheckRunURL *string `json:"check_run_url,omitempty"` - // Labels represents runner labels from the `runs-on:` key from a GitHub Actions workflow. - Labels []string `json:"labels,omitempty"` - RunnerID *int64 `json:"runner_id,omitempty"` - RunnerName *string `json:"runner_name,omitempty"` - RunnerGroupID *int64 `json:"runner_group_id,omitempty"` - RunnerGroupName *string `json:"runner_group_name,omitempty"` - RunAttempt *int64 `json:"run_attempt,omitempty"` - WorkflowName *string `json:"workflow_name,omitempty"` -} - -// Jobs represents a slice of repository action workflow job. -type Jobs struct { - TotalCount *int `json:"total_count,omitempty"` - Jobs []*WorkflowJob `json:"jobs,omitempty"` -} - -// ListWorkflowJobsOptions specifies optional parameters to ListWorkflowJobs. -type ListWorkflowJobsOptions struct { - // Filter specifies how jobs should be filtered by their completed_at timestamp. - // Possible values are: - // latest - Returns jobs from the most recent execution of the workflow run - // all - Returns all jobs for a workflow run, including from old executions of the workflow run - // - // Default value is "latest". - Filter string `url:"filter,omitempty"` - ListOptions -} - -// ListWorkflowJobs lists all jobs for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-jobs#list-jobs-for-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs -func (s *ActionsService) ListWorkflowJobs(ctx context.Context, owner, repo string, runID int64, opts *ListWorkflowJobsOptions) (*Jobs, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/runs/%v/jobs", owner, repo, runID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - jobs := new(Jobs) - resp, err := s.client.Do(ctx, req, &jobs) - if err != nil { - return nil, resp, err - } - - return jobs, resp, nil -} - -// GetWorkflowJobByID gets a specific job in a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-jobs#get-a-job-for-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/jobs/{job_id} -func (s *ActionsService) GetWorkflowJobByID(ctx context.Context, owner, repo string, jobID int64) (*WorkflowJob, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v", owner, repo, jobID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - job := new(WorkflowJob) - resp, err := s.client.Do(ctx, req, job) - if err != nil { - return nil, resp, err - } - - return job, resp, nil -} - -// GetWorkflowJobLogs gets a redirect URL to download a plain text file of logs for a workflow job. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-jobs#download-job-logs-for-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs -func (s *ActionsService) GetWorkflowJobLogs(ctx context.Context, owner, repo string, jobID int64, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v/logs", owner, repo, jobID) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - return parsedURL, newResponse(resp), err -} diff --git a/vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go b/vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go deleted file mode 100644 index bc7afe9e..00000000 --- a/vendor/github.com/google/go-github/v57/github/actions_workflow_runs.go +++ /dev/null @@ -1,410 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" -) - -// WorkflowRun represents a repository action workflow run. -type WorkflowRun struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - RunNumber *int `json:"run_number,omitempty"` - RunAttempt *int `json:"run_attempt,omitempty"` - Event *string `json:"event,omitempty"` - DisplayTitle *string `json:"display_title,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - WorkflowID *int64 `json:"workflow_id,omitempty"` - CheckSuiteID *int64 `json:"check_suite_id,omitempty"` - CheckSuiteNodeID *string `json:"check_suite_node_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - RunStartedAt *Timestamp `json:"run_started_at,omitempty"` - JobsURL *string `json:"jobs_url,omitempty"` - LogsURL *string `json:"logs_url,omitempty"` - CheckSuiteURL *string `json:"check_suite_url,omitempty"` - ArtifactsURL *string `json:"artifacts_url,omitempty"` - CancelURL *string `json:"cancel_url,omitempty"` - RerunURL *string `json:"rerun_url,omitempty"` - PreviousAttemptURL *string `json:"previous_attempt_url,omitempty"` - HeadCommit *HeadCommit `json:"head_commit,omitempty"` - WorkflowURL *string `json:"workflow_url,omitempty"` - Repository *Repository `json:"repository,omitempty"` - HeadRepository *Repository `json:"head_repository,omitempty"` - Actor *User `json:"actor,omitempty"` - TriggeringActor *User `json:"triggering_actor,omitempty"` - ReferencedWorkflows []*ReferencedWorkflow `json:"referenced_workflows,omitempty"` -} - -// WorkflowRuns represents a slice of repository action workflow run. -type WorkflowRuns struct { - TotalCount *int `json:"total_count,omitempty"` - WorkflowRuns []*WorkflowRun `json:"workflow_runs,omitempty"` -} - -// ListWorkflowRunsOptions specifies optional parameters to ListWorkflowRuns. -type ListWorkflowRunsOptions struct { - Actor string `url:"actor,omitempty"` - Branch string `url:"branch,omitempty"` - Event string `url:"event,omitempty"` - Status string `url:"status,omitempty"` - Created string `url:"created,omitempty"` - HeadSHA string `url:"head_sha,omitempty"` - ExcludePullRequests bool `url:"exclude_pull_requests,omitempty"` - CheckSuiteID int64 `url:"check_suite_id,omitempty"` - ListOptions -} - -// WorkflowRunUsage represents a usage of a specific workflow run. -type WorkflowRunUsage struct { - Billable *WorkflowRunBillMap `json:"billable,omitempty"` - RunDurationMS *int64 `json:"run_duration_ms,omitempty"` -} - -// WorkflowRunBillMap represents different runner environments available for a workflow run. -// Its key is the name of its environment, e.g. "UBUNTU", "MACOS", "WINDOWS", etc. -type WorkflowRunBillMap map[string]*WorkflowRunBill - -// WorkflowRunBill specifies billable time for a specific environment in a workflow run. -type WorkflowRunBill struct { - TotalMS *int64 `json:"total_ms,omitempty"` - Jobs *int `json:"jobs,omitempty"` - JobRuns []*WorkflowRunJobRun `json:"job_runs,omitempty"` -} - -// WorkflowRunJobRun represents a usage of individual jobs of a specific workflow run. -type WorkflowRunJobRun struct { - JobID *int `json:"job_id,omitempty"` - DurationMS *int64 `json:"duration_ms,omitempty"` -} - -// WorkflowRunAttemptOptions specifies optional parameters to GetWorkflowRunAttempt. -type WorkflowRunAttemptOptions struct { - ExcludePullRequests *bool `url:"exclude_pull_requests,omitempty"` -} - -// PendingDeploymentsRequest specifies body parameters to PendingDeployments. -type PendingDeploymentsRequest struct { - EnvironmentIDs []int64 `json:"environment_ids"` - // State can be one of: "approved", "rejected". - State string `json:"state"` - Comment string `json:"comment"` -} - -type ReferencedWorkflow struct { - Path *string `json:"path,omitempty"` - SHA *string `json:"sha,omitempty"` - Ref *string `json:"ref,omitempty"` -} - -func (s *ActionsService) listWorkflowRuns(ctx context.Context, endpoint string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u, err := addOptions(endpoint, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runs := new(WorkflowRuns) - resp, err := s.client.Do(ctx, req, &runs) - if err != nil { - return nil, resp, err - } - - return runs, resp, nil -} - -// ListWorkflowRunsByID lists all workflow runs by workflow ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#list-workflow-runs-for-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs -func (s *ActionsService) ListWorkflowRunsByID(ctx context.Context, owner, repo string, workflowID int64, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/workflows/%v/runs", owner, repo, workflowID) - return s.listWorkflowRuns(ctx, u, opts) -} - -// ListWorkflowRunsByFileName lists all workflow runs by workflow file name. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#list-workflow-runs-for-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs -func (s *ActionsService) ListWorkflowRunsByFileName(ctx context.Context, owner, repo, workflowFileName string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/workflows/%v/runs", owner, repo, workflowFileName) - return s.listWorkflowRuns(ctx, u, opts) -} - -// ListRepositoryWorkflowRuns lists all workflow runs for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#list-workflow-runs-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs -func (s *ActionsService) ListRepositoryWorkflowRuns(ctx context.Context, owner, repo string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/runs", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runs := new(WorkflowRuns) - resp, err := s.client.Do(ctx, req, &runs) - if err != nil { - return nil, resp, err - } - - return runs, resp, nil -} - -// GetWorkflowRunByID gets a specific workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#get-a-workflow-run -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id} -func (s *ActionsService) GetWorkflowRunByID(ctx context.Context, owner, repo string, runID int64) (*WorkflowRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v", owner, repo, runID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - run := new(WorkflowRun) - resp, err := s.client.Do(ctx, req, run) - if err != nil { - return nil, resp, err - } - - return run, resp, nil -} - -// GetWorkflowRunAttempt gets a specific workflow run attempt. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#get-a-workflow-run-attempt -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number} -func (s *ActionsService) GetWorkflowRunAttempt(ctx context.Context, owner, repo string, runID int64, attemptNumber int, opts *WorkflowRunAttemptOptions) (*WorkflowRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/attempts/%v", owner, repo, runID, attemptNumber) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - run := new(WorkflowRun) - resp, err := s.client.Do(ctx, req, run) - if err != nil { - return nil, resp, err - } - - return run, resp, nil -} - -// GetWorkflowRunAttemptLogs gets a redirect URL to download a plain text file of logs for a workflow run for attempt number. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#download-workflow-run-attempt-logs -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs -func (s *ActionsService) GetWorkflowRunAttemptLogs(ctx context.Context, owner, repo string, runID int64, attemptNumber int, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/attempts/%v/logs", owner, repo, runID, attemptNumber) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - return parsedURL, newResponse(resp), err -} - -// RerunWorkflowByID re-runs a workflow by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#re-run-a-workflow -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun -func (s *ActionsService) RerunWorkflowByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/rerun", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RerunFailedJobsByID re-runs all of the failed jobs and their dependent jobs in a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#re-run-failed-jobs-from-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs -func (s *ActionsService) RerunFailedJobsByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/rerun-failed-jobs", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RerunJobByID re-runs a job and its dependent jobs in a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#re-run-a-job-from-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun -func (s *ActionsService) RerunJobByID(ctx context.Context, owner, repo string, jobID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v/rerun", owner, repo, jobID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CancelWorkflowRunByID cancels a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#cancel-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel -func (s *ActionsService) CancelWorkflowRunByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/cancel", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetWorkflowRunLogs gets a redirect URL to download a plain text file of logs for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#download-workflow-run-logs -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs -func (s *ActionsService) GetWorkflowRunLogs(ctx context.Context, owner, repo string, runID int64, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/logs", owner, repo, runID) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - return parsedURL, newResponse(resp), err -} - -// DeleteWorkflowRun deletes a workflow run by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#delete-a-workflow-run -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/runs/{run_id} -func (s *ActionsService) DeleteWorkflowRun(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v", owner, repo, runID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteWorkflowRunLogs deletes all logs for a workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#delete-workflow-run-logs -// -//meta:operation DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs -func (s *ActionsService) DeleteWorkflowRunLogs(ctx context.Context, owner, repo string, runID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/logs", owner, repo, runID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetWorkflowRunUsageByID gets a specific workflow usage run by run ID in the unit of billable milliseconds. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#get-workflow-run-usage -// -//meta:operation GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing -func (s *ActionsService) GetWorkflowRunUsageByID(ctx context.Context, owner, repo string, runID int64) (*WorkflowRunUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/timing", owner, repo, runID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - workflowRunUsage := new(WorkflowRunUsage) - resp, err := s.client.Do(ctx, req, workflowRunUsage) - if err != nil { - return nil, resp, err - } - - return workflowRunUsage, resp, nil -} - -// PendingDeployments approve or reject pending deployments that are waiting on approval by a required reviewer. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflow-runs#review-pending-deployments-for-a-workflow-run -// -//meta:operation POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments -func (s *ActionsService) PendingDeployments(ctx context.Context, owner, repo string, runID int64, request *PendingDeploymentsRequest) ([]*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/pending_deployments", owner, repo, runID) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - var deployments []*Deployment - resp, err := s.client.Do(ctx, req, &deployments) - if err != nil { - return nil, resp, err - } - - return deployments, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/activity_notifications.go b/vendor/github.com/google/go-github/v57/github/activity_notifications.go deleted file mode 100644 index 47f22261..00000000 --- a/vendor/github.com/google/go-github/v57/github/activity_notifications.go +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// Notification identifies a GitHub notification for a user. -type Notification struct { - ID *string `json:"id,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Subject *NotificationSubject `json:"subject,omitempty"` - - // Reason identifies the event that triggered the notification. - // - // GitHub API docs: https://docs.github.com/rest/activity#notification-reasons - Reason *string `json:"reason,omitempty"` - - Unread *bool `json:"unread,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - LastReadAt *Timestamp `json:"last_read_at,omitempty"` - URL *string `json:"url,omitempty"` -} - -// NotificationSubject identifies the subject of a notification. -type NotificationSubject struct { - Title *string `json:"title,omitempty"` - URL *string `json:"url,omitempty"` - LatestCommentURL *string `json:"latest_comment_url,omitempty"` - Type *string `json:"type,omitempty"` -} - -// NotificationListOptions specifies the optional parameters to the -// ActivityService.ListNotifications method. -type NotificationListOptions struct { - All bool `url:"all,omitempty"` - Participating bool `url:"participating,omitempty"` - Since time.Time `url:"since,omitempty"` - Before time.Time `url:"before,omitempty"` - - ListOptions -} - -// ListNotifications lists all notifications for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#list-notifications-for-the-authenticated-user -// -//meta:operation GET /notifications -func (s *ActivityService) ListNotifications(ctx context.Context, opts *NotificationListOptions) ([]*Notification, *Response, error) { - u := "notifications" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var notifications []*Notification - resp, err := s.client.Do(ctx, req, ¬ifications) - if err != nil { - return nil, resp, err - } - - return notifications, resp, nil -} - -// ListRepositoryNotifications lists all notifications in a given repository -// for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#list-repository-notifications-for-the-authenticated-user -// -//meta:operation GET /repos/{owner}/{repo}/notifications -func (s *ActivityService) ListRepositoryNotifications(ctx context.Context, owner, repo string, opts *NotificationListOptions) ([]*Notification, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/notifications", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var notifications []*Notification - resp, err := s.client.Do(ctx, req, ¬ifications) - if err != nil { - return nil, resp, err - } - - return notifications, resp, nil -} - -type markReadOptions struct { - LastReadAt Timestamp `json:"last_read_at,omitempty"` -} - -// MarkNotificationsRead marks all notifications up to lastRead as read. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#mark-notifications-as-read -// -//meta:operation PUT /notifications -func (s *ActivityService) MarkNotificationsRead(ctx context.Context, lastRead Timestamp) (*Response, error) { - opts := &markReadOptions{ - LastReadAt: lastRead, - } - req, err := s.client.NewRequest("PUT", "notifications", opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// MarkRepositoryNotificationsRead marks all notifications up to lastRead in -// the specified repository as read. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#mark-repository-notifications-as-read -// -//meta:operation PUT /repos/{owner}/{repo}/notifications -func (s *ActivityService) MarkRepositoryNotificationsRead(ctx context.Context, owner, repo string, lastRead Timestamp) (*Response, error) { - opts := &markReadOptions{ - LastReadAt: lastRead, - } - u := fmt.Sprintf("repos/%v/%v/notifications", owner, repo) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetThread gets the specified notification thread. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#get-a-thread -// -//meta:operation GET /notifications/threads/{thread_id} -func (s *ActivityService) GetThread(ctx context.Context, id string) (*Notification, *Response, error) { - u := fmt.Sprintf("notifications/threads/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - notification := new(Notification) - resp, err := s.client.Do(ctx, req, notification) - if err != nil { - return nil, resp, err - } - - return notification, resp, nil -} - -// MarkThreadRead marks the specified thread as read. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#mark-a-thread-as-read -// -//meta:operation PATCH /notifications/threads/{thread_id} -func (s *ActivityService) MarkThreadRead(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("notifications/threads/%v", id) - - req, err := s.client.NewRequest("PATCH", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetThreadSubscription checks to see if the authenticated user is subscribed -// to a thread. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#get-a-thread-subscription-for-the-authenticated-user -// -//meta:operation GET /notifications/threads/{thread_id}/subscription -func (s *ActivityService) GetThreadSubscription(ctx context.Context, id string) (*Subscription, *Response, error) { - u := fmt.Sprintf("notifications/threads/%v/subscription", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - return nil, resp, err - } - - return sub, resp, nil -} - -// SetThreadSubscription sets the subscription for the specified thread for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#set-a-thread-subscription -// -//meta:operation PUT /notifications/threads/{thread_id}/subscription -func (s *ActivityService) SetThreadSubscription(ctx context.Context, id string, subscription *Subscription) (*Subscription, *Response, error) { - u := fmt.Sprintf("notifications/threads/%v/subscription", id) - - req, err := s.client.NewRequest("PUT", u, subscription) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - return nil, resp, err - } - - return sub, resp, nil -} - -// DeleteThreadSubscription deletes the subscription for the specified thread -// for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/notifications#delete-a-thread-subscription -// -//meta:operation DELETE /notifications/threads/{thread_id}/subscription -func (s *ActivityService) DeleteThreadSubscription(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("notifications/threads/%v/subscription", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/admin.go b/vendor/github.com/google/go-github/v57/github/admin.go deleted file mode 100644 index 8eee9854..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AdminService handles communication with the admin related methods of the -// GitHub API. These API routes are normally only accessible for GitHub -// Enterprise installations. -// -// GitHub API docs: https://docs.github.com/rest/enterprise-admin -type AdminService service - -// TeamLDAPMapping represents the mapping between a GitHub team and an LDAP group. -type TeamLDAPMapping struct { - ID *int64 `json:"id,omitempty"` - LDAPDN *string `json:"ldap_dn,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Slug *string `json:"slug,omitempty"` - Description *string `json:"description,omitempty"` - Privacy *string `json:"privacy,omitempty"` - Permission *string `json:"permission,omitempty"` - - MembersURL *string `json:"members_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` -} - -func (m TeamLDAPMapping) String() string { - return Stringify(m) -} - -// UserLDAPMapping represents the mapping between a GitHub user and an LDAP user. -type UserLDAPMapping struct { - ID *int64 `json:"id,omitempty"` - LDAPDN *string `json:"ldap_dn,omitempty"` - Login *string `json:"login,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - GravatarID *string `json:"gravatar_id,omitempty"` - Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin,omitempty"` - - URL *string `json:"url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - FollowingURL *string `json:"following_url,omitempty"` - FollowersURL *string `json:"followers_url,omitempty"` - GistsURL *string `json:"gists_url,omitempty"` - OrganizationsURL *string `json:"organizations_url,omitempty"` - ReceivedEventsURL *string `json:"received_events_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` - StarredURL *string `json:"starred_url,omitempty"` - SubscriptionsURL *string `json:"subscriptions_url,omitempty"` -} - -func (m UserLDAPMapping) String() string { - return Stringify(m) -} - -// Enterprise represents the GitHub enterprise profile. -type Enterprise struct { - ID *int `json:"id,omitempty"` - Slug *string `json:"slug,omitempty"` - Name *string `json:"name,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - Description *string `json:"description,omitempty"` - WebsiteURL *string `json:"website_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -func (m Enterprise) String() string { - return Stringify(m) -} - -// UpdateUserLDAPMapping updates the mapping between a GitHub user and an LDAP user. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/ldap#update-ldap-mapping-for-a-user -// -//meta:operation PATCH /admin/ldap/users/{username}/mapping -func (s *AdminService) UpdateUserLDAPMapping(ctx context.Context, user string, mapping *UserLDAPMapping) (*UserLDAPMapping, *Response, error) { - u := fmt.Sprintf("admin/ldap/users/%v/mapping", user) - req, err := s.client.NewRequest("PATCH", u, mapping) - if err != nil { - return nil, nil, err - } - - m := new(UserLDAPMapping) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// UpdateTeamLDAPMapping updates the mapping between a GitHub team and an LDAP group. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/ldap#update-ldap-mapping-for-a-team -// -//meta:operation PATCH /admin/ldap/teams/{team_id}/mapping -func (s *AdminService) UpdateTeamLDAPMapping(ctx context.Context, team int64, mapping *TeamLDAPMapping) (*TeamLDAPMapping, *Response, error) { - u := fmt.Sprintf("admin/ldap/teams/%v/mapping", team) - req, err := s.client.NewRequest("PATCH", u, mapping) - if err != nil { - return nil, nil, err - } - - m := new(TeamLDAPMapping) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/admin_orgs.go b/vendor/github.com/google/go-github/v57/github/admin_orgs.go deleted file mode 100644 index c734d4de..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin_orgs.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// createOrgRequest is a subset of Organization and is used internally -// by CreateOrg to pass only the known fields for the endpoint. -type createOrgRequest struct { - Login *string `json:"login,omitempty"` - Admin *string `json:"admin,omitempty"` -} - -// CreateOrg creates a new organization in GitHub Enterprise. -// -// Note that only a subset of the org fields are used and org must -// not be nil. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/orgs#create-an-organization -// -//meta:operation POST /admin/organizations -func (s *AdminService) CreateOrg(ctx context.Context, org *Organization, admin string) (*Organization, *Response, error) { - u := "admin/organizations" - - orgReq := &createOrgRequest{ - Login: org.Login, - Admin: &admin, - } - - req, err := s.client.NewRequest("POST", u, orgReq) - if err != nil { - return nil, nil, err - } - - o := new(Organization) - resp, err := s.client.Do(ctx, req, o) - if err != nil { - return nil, resp, err - } - - return o, resp, nil -} - -// renameOrgRequest is a subset of Organization and is used internally -// by RenameOrg and RenameOrgByName to pass only the known fields for the endpoint. -type renameOrgRequest struct { - Login *string `json:"login,omitempty"` -} - -// RenameOrgResponse is the response given when renaming an Organization. -type RenameOrgResponse struct { - Message *string `json:"message,omitempty"` - URL *string `json:"url,omitempty"` -} - -// RenameOrg renames an organization in GitHub Enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/orgs#update-an-organization-name -// -//meta:operation PATCH /admin/organizations/{org} -func (s *AdminService) RenameOrg(ctx context.Context, org *Organization, newName string) (*RenameOrgResponse, *Response, error) { - return s.RenameOrgByName(ctx, *org.Login, newName) -} - -// RenameOrgByName renames an organization in GitHub Enterprise using its current name. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/orgs#update-an-organization-name -// -//meta:operation PATCH /admin/organizations/{org} -func (s *AdminService) RenameOrgByName(ctx context.Context, org, newName string) (*RenameOrgResponse, *Response, error) { - u := fmt.Sprintf("admin/organizations/%v", org) - - orgReq := &renameOrgRequest{ - Login: &newName, - } - - req, err := s.client.NewRequest("PATCH", u, orgReq) - if err != nil { - return nil, nil, err - } - - o := new(RenameOrgResponse) - resp, err := s.client.Do(ctx, req, o) - if err != nil { - return nil, resp, err - } - - return o, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/admin_stats.go b/vendor/github.com/google/go-github/v57/github/admin_stats.go deleted file mode 100644 index aa23f5d1..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin_stats.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// AdminStats represents a variety of stats of a GitHub Enterprise -// installation. -type AdminStats struct { - Issues *IssueStats `json:"issues,omitempty"` - Hooks *HookStats `json:"hooks,omitempty"` - Milestones *MilestoneStats `json:"milestones,omitempty"` - Orgs *OrgStats `json:"orgs,omitempty"` - Comments *CommentStats `json:"comments,omitempty"` - Pages *PageStats `json:"pages,omitempty"` - Users *UserStats `json:"users,omitempty"` - Gists *GistStats `json:"gists,omitempty"` - Pulls *PullStats `json:"pulls,omitempty"` - Repos *RepoStats `json:"repos,omitempty"` -} - -func (s AdminStats) String() string { - return Stringify(s) -} - -// IssueStats represents the number of total, open and closed issues. -type IssueStats struct { - TotalIssues *int `json:"total_issues,omitempty"` - OpenIssues *int `json:"open_issues,omitempty"` - ClosedIssues *int `json:"closed_issues,omitempty"` -} - -func (s IssueStats) String() string { - return Stringify(s) -} - -// HookStats represents the number of total, active and inactive hooks. -type HookStats struct { - TotalHooks *int `json:"total_hooks,omitempty"` - ActiveHooks *int `json:"active_hooks,omitempty"` - InactiveHooks *int `json:"inactive_hooks,omitempty"` -} - -func (s HookStats) String() string { - return Stringify(s) -} - -// MilestoneStats represents the number of total, open and close milestones. -type MilestoneStats struct { - TotalMilestones *int `json:"total_milestones,omitempty"` - OpenMilestones *int `json:"open_milestones,omitempty"` - ClosedMilestones *int `json:"closed_milestones,omitempty"` -} - -func (s MilestoneStats) String() string { - return Stringify(s) -} - -// OrgStats represents the number of total, disabled organizations and the team -// and team member count. -type OrgStats struct { - TotalOrgs *int `json:"total_orgs,omitempty"` - DisabledOrgs *int `json:"disabled_orgs,omitempty"` - TotalTeams *int `json:"total_teams,omitempty"` - TotalTeamMembers *int `json:"total_team_members,omitempty"` -} - -func (s OrgStats) String() string { - return Stringify(s) -} - -// CommentStats represents the number of total comments on commits, gists, issues -// and pull requests. -type CommentStats struct { - TotalCommitComments *int `json:"total_commit_comments,omitempty"` - TotalGistComments *int `json:"total_gist_comments,omitempty"` - TotalIssueComments *int `json:"total_issue_comments,omitempty"` - TotalPullRequestComments *int `json:"total_pull_request_comments,omitempty"` -} - -func (s CommentStats) String() string { - return Stringify(s) -} - -// PageStats represents the total number of github pages. -type PageStats struct { - TotalPages *int `json:"total_pages,omitempty"` -} - -func (s PageStats) String() string { - return Stringify(s) -} - -// UserStats represents the number of total, admin and suspended users. -type UserStats struct { - TotalUsers *int `json:"total_users,omitempty"` - AdminUsers *int `json:"admin_users,omitempty"` - SuspendedUsers *int `json:"suspended_users,omitempty"` -} - -func (s UserStats) String() string { - return Stringify(s) -} - -// GistStats represents the number of total, private and public gists. -type GistStats struct { - TotalGists *int `json:"total_gists,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` -} - -func (s GistStats) String() string { - return Stringify(s) -} - -// PullStats represents the number of total, merged, mergable and unmergeable -// pull-requests. -type PullStats struct { - TotalPulls *int `json:"total_pulls,omitempty"` - MergedPulls *int `json:"merged_pulls,omitempty"` - MergablePulls *int `json:"mergeable_pulls,omitempty"` - UnmergablePulls *int `json:"unmergeable_pulls,omitempty"` -} - -func (s PullStats) String() string { - return Stringify(s) -} - -// RepoStats represents the number of total, root, fork, organization repositories -// together with the total number of pushes and wikis. -type RepoStats struct { - TotalRepos *int `json:"total_repos,omitempty"` - RootRepos *int `json:"root_repos,omitempty"` - ForkRepos *int `json:"fork_repos,omitempty"` - OrgRepos *int `json:"org_repos,omitempty"` - TotalPushes *int `json:"total_pushes,omitempty"` - TotalWikis *int `json:"total_wikis,omitempty"` -} - -func (s RepoStats) String() string { - return Stringify(s) -} - -// GetAdminStats returns a variety of metrics about a GitHub Enterprise -// installation. -// -// Please note that this is only available to site administrators, -// otherwise it will error with a 404 not found (instead of 401 or 403). -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/admin-stats#get-all-statistics -// -//meta:operation GET /enterprise/stats/all -func (s *AdminService) GetAdminStats(ctx context.Context) (*AdminStats, *Response, error) { - u := "enterprise/stats/all" - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - m := new(AdminStats) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/admin_users.go b/vendor/github.com/google/go-github/v57/github/admin_users.go deleted file mode 100644 index 3916a470..00000000 --- a/vendor/github.com/google/go-github/v57/github/admin_users.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// createUserRequest is a subset of User and is used internally -// by CreateUser to pass only the known fields for the endpoint. -type createUserRequest struct { - Login *string `json:"login,omitempty"` - Email *string `json:"email,omitempty"` -} - -// CreateUser creates a new user in GitHub Enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#create-a-user -// -//meta:operation POST /admin/users -func (s *AdminService) CreateUser(ctx context.Context, login, email string) (*User, *Response, error) { - u := "admin/users" - - userReq := &createUserRequest{ - Login: &login, - Email: &email, - } - - req, err := s.client.NewRequest("POST", u, userReq) - if err != nil { - return nil, nil, err - } - - var user User - resp, err := s.client.Do(ctx, req, &user) - if err != nil { - return nil, resp, err - } - - return &user, resp, nil -} - -// DeleteUser deletes a user in GitHub Enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#delete-a-user -// -//meta:operation DELETE /admin/users/{username} -func (s *AdminService) DeleteUser(ctx context.Context, username string) (*Response, error) { - u := "admin/users/" + username - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// ImpersonateUserOptions represents the scoping for the OAuth token. -type ImpersonateUserOptions struct { - Scopes []string `json:"scopes,omitempty"` -} - -// OAuthAPP represents the GitHub Site Administrator OAuth app. -type OAuthAPP struct { - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - ClientID *string `json:"client_id,omitempty"` -} - -func (s OAuthAPP) String() string { - return Stringify(s) -} - -// UserAuthorization represents the impersonation response. -type UserAuthorization struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Scopes []string `json:"scopes,omitempty"` - Token *string `json:"token,omitempty"` - TokenLastEight *string `json:"token_last_eight,omitempty"` - HashedToken *string `json:"hashed_token,omitempty"` - App *OAuthAPP `json:"app,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` -} - -// CreateUserImpersonation creates an impersonation OAuth token. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#create-an-impersonation-oauth-token -// -//meta:operation POST /admin/users/{username}/authorizations -func (s *AdminService) CreateUserImpersonation(ctx context.Context, username string, opts *ImpersonateUserOptions) (*UserAuthorization, *Response, error) { - u := fmt.Sprintf("admin/users/%s/authorizations", username) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - a := new(UserAuthorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// DeleteUserImpersonation deletes an impersonation OAuth token. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#delete-an-impersonation-oauth-token -// -//meta:operation DELETE /admin/users/{username}/authorizations -func (s *AdminService) DeleteUserImpersonation(ctx context.Context, username string) (*Response, error) { - u := fmt.Sprintf("admin/users/%s/authorizations", username) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/apps.go b/vendor/github.com/google/go-github/v57/github/apps.go deleted file mode 100644 index f0392f2d..00000000 --- a/vendor/github.com/google/go-github/v57/github/apps.go +++ /dev/null @@ -1,420 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AppsService provides access to the installation related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/apps/ -type AppsService service - -// App represents a GitHub App. -type App struct { - ID *int64 `json:"id,omitempty"` - Slug *string `json:"slug,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - ExternalURL *string `json:"external_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Permissions *InstallationPermissions `json:"permissions,omitempty"` - Events []string `json:"events,omitempty"` - InstallationsCount *int `json:"installations_count,omitempty"` -} - -// InstallationToken represents an installation token. -type InstallationToken struct { - Token *string `json:"token,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` - Permissions *InstallationPermissions `json:"permissions,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -// InstallationTokenOptions allow restricting a token's access to specific repositories. -type InstallationTokenOptions struct { - // The IDs of the repositories that the installation token can access. - // Providing repository IDs restricts the access of an installation token to specific repositories. - RepositoryIDs []int64 `json:"repository_ids,omitempty"` - - // The names of the repositories that the installation token can access. - // Providing repository names restricts the access of an installation token to specific repositories. - Repositories []string `json:"repositories,omitempty"` - - // The permissions granted to the access token. - // The permissions object includes the permission names and their access type. - Permissions *InstallationPermissions `json:"permissions,omitempty"` -} - -// InstallationPermissions lists the repository and organization permissions for an installation. -// -// Permission names taken from: -// -// https://docs.github.com/enterprise-server@3.0/rest/apps#create-an-installation-access-token-for-an-app -// https://docs.github.com/rest/apps#create-an-installation-access-token-for-an-app -type InstallationPermissions struct { - Actions *string `json:"actions,omitempty"` - Administration *string `json:"administration,omitempty"` - Blocking *string `json:"blocking,omitempty"` - Checks *string `json:"checks,omitempty"` - Contents *string `json:"contents,omitempty"` - ContentReferences *string `json:"content_references,omitempty"` - Deployments *string `json:"deployments,omitempty"` - Emails *string `json:"emails,omitempty"` - Environments *string `json:"environments,omitempty"` - Followers *string `json:"followers,omitempty"` - Issues *string `json:"issues,omitempty"` - Metadata *string `json:"metadata,omitempty"` - Members *string `json:"members,omitempty"` - OrganizationAdministration *string `json:"organization_administration,omitempty"` - OrganizationCustomRoles *string `json:"organization_custom_roles,omitempty"` - OrganizationHooks *string `json:"organization_hooks,omitempty"` - OrganizationPackages *string `json:"organization_packages,omitempty"` - OrganizationPlan *string `json:"organization_plan,omitempty"` - OrganizationPreReceiveHooks *string `json:"organization_pre_receive_hooks,omitempty"` - OrganizationProjects *string `json:"organization_projects,omitempty"` - OrganizationSecrets *string `json:"organization_secrets,omitempty"` - OrganizationSelfHostedRunners *string `json:"organization_self_hosted_runners,omitempty"` - OrganizationUserBlocking *string `json:"organization_user_blocking,omitempty"` - Packages *string `json:"packages,omitempty"` - Pages *string `json:"pages,omitempty"` - PullRequests *string `json:"pull_requests,omitempty"` - RepositoryHooks *string `json:"repository_hooks,omitempty"` - RepositoryProjects *string `json:"repository_projects,omitempty"` - RepositoryPreReceiveHooks *string `json:"repository_pre_receive_hooks,omitempty"` - Secrets *string `json:"secrets,omitempty"` - SecretScanningAlerts *string `json:"secret_scanning_alerts,omitempty"` - SecurityEvents *string `json:"security_events,omitempty"` - SingleFile *string `json:"single_file,omitempty"` - Statuses *string `json:"statuses,omitempty"` - TeamDiscussions *string `json:"team_discussions,omitempty"` - VulnerabilityAlerts *string `json:"vulnerability_alerts,omitempty"` - Workflows *string `json:"workflows,omitempty"` -} - -// InstallationRequest represents a pending GitHub App installation request. -type InstallationRequest struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Account *User `json:"account,omitempty"` - Requester *User `json:"requester,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -// Installation represents a GitHub Apps installation. -type Installation struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AppID *int64 `json:"app_id,omitempty"` - AppSlug *string `json:"app_slug,omitempty"` - TargetID *int64 `json:"target_id,omitempty"` - Account *User `json:"account,omitempty"` - AccessTokensURL *string `json:"access_tokens_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - TargetType *string `json:"target_type,omitempty"` - SingleFileName *string `json:"single_file_name,omitempty"` - RepositorySelection *string `json:"repository_selection,omitempty"` - Events []string `json:"events,omitempty"` - SingleFilePaths []string `json:"single_file_paths,omitempty"` - Permissions *InstallationPermissions `json:"permissions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - HasMultipleSingleFiles *bool `json:"has_multiple_single_files,omitempty"` - SuspendedBy *User `json:"suspended_by,omitempty"` - SuspendedAt *Timestamp `json:"suspended_at,omitempty"` -} - -// Attachment represents a GitHub Apps attachment. -type Attachment struct { - ID *int64 `json:"id,omitempty"` - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` -} - -// ContentReference represents a reference to a URL in an issue or pull request. -type ContentReference struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Reference *string `json:"reference,omitempty"` -} - -func (i Installation) String() string { - return Stringify(i) -} - -// Get a single GitHub App. Passing the empty string will get -// the authenticated GitHub App. -// -// Note: appSlug is just the URL-friendly name of your GitHub App. -// You can find this on the settings page for your GitHub App -// (e.g., https://github.com/settings/apps/:app_slug). -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-an-app -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-the-authenticated-app -// -//meta:operation GET /app -//meta:operation GET /apps/{app_slug} -func (s *AppsService) Get(ctx context.Context, appSlug string) (*App, *Response, error) { - var u string - if appSlug != "" { - u = fmt.Sprintf("apps/%v", appSlug) - } else { - u = "app" - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - app := new(App) - resp, err := s.client.Do(ctx, req, app) - if err != nil { - return nil, resp, err - } - - return app, resp, nil -} - -// ListInstallationRequests lists the pending installation requests that the current GitHub App has. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#list-installation-requests-for-the-authenticated-app -// -//meta:operation GET /app/installation-requests -func (s *AppsService) ListInstallationRequests(ctx context.Context, opts *ListOptions) ([]*InstallationRequest, *Response, error) { - u, err := addOptions("app/installation-requests", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var i []*InstallationRequest - resp, err := s.client.Do(ctx, req, &i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// ListInstallations lists the installations that the current GitHub App has. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#list-installations-for-the-authenticated-app -// -//meta:operation GET /app/installations -func (s *AppsService) ListInstallations(ctx context.Context, opts *ListOptions) ([]*Installation, *Response, error) { - u, err := addOptions("app/installations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var i []*Installation - resp, err := s.client.Do(ctx, req, &i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// GetInstallation returns the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-an-installation-for-the-authenticated-app -// -//meta:operation GET /app/installations/{installation_id} -func (s *AppsService) GetInstallation(ctx context.Context, id int64) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("app/installations/%v", id)) -} - -// ListUserInstallations lists installations that are accessible to the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#list-app-installations-accessible-to-the-user-access-token -// -//meta:operation GET /user/installations -func (s *AppsService) ListUserInstallations(ctx context.Context, opts *ListOptions) ([]*Installation, *Response, error) { - u, err := addOptions("user/installations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var i struct { - Installations []*Installation `json:"installations"` - } - resp, err := s.client.Do(ctx, req, &i) - if err != nil { - return nil, resp, err - } - - return i.Installations, resp, nil -} - -// SuspendInstallation suspends the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#suspend-an-app-installation -// -//meta:operation PUT /app/installations/{installation_id}/suspended -func (s *AppsService) SuspendInstallation(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("app/installations/%v/suspended", id) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UnsuspendInstallation unsuspends the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#unsuspend-an-app-installation -// -//meta:operation DELETE /app/installations/{installation_id}/suspended -func (s *AppsService) UnsuspendInstallation(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("app/installations/%v/suspended", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteInstallation deletes the specified installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#delete-an-installation-for-the-authenticated-app -// -//meta:operation DELETE /app/installations/{installation_id} -func (s *AppsService) DeleteInstallation(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("app/installations/%v", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateInstallationToken creates a new installation token. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#create-an-installation-access-token-for-an-app -// -//meta:operation POST /app/installations/{installation_id}/access_tokens -func (s *AppsService) CreateInstallationToken(ctx context.Context, id int64, opts *InstallationTokenOptions) (*InstallationToken, *Response, error) { - u := fmt.Sprintf("app/installations/%v/access_tokens", id) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - t := new(InstallationToken) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// CreateAttachment creates a new attachment on user comment containing a url. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.3/rest/reference/apps#create-a-content-attachment -// -//meta:operation POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments -func (s *AppsService) CreateAttachment(ctx context.Context, contentReferenceID int64, title, body string) (*Attachment, *Response, error) { - u := fmt.Sprintf("content_references/%v/attachments", contentReferenceID) - payload := &Attachment{Title: String(title), Body: String(body)} - req, err := s.client.NewRequest("POST", u, payload) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeContentAttachmentsPreview) - - m := &Attachment{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// FindOrganizationInstallation finds the organization's installation information. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-an-organization-installation-for-the-authenticated-app -// -//meta:operation GET /orgs/{org}/installation -func (s *AppsService) FindOrganizationInstallation(ctx context.Context, org string) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("orgs/%v/installation", org)) -} - -// FindRepositoryInstallation finds the repository's installation information. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-a-repository-installation-for-the-authenticated-app -// -//meta:operation GET /repos/{owner}/{repo}/installation -func (s *AppsService) FindRepositoryInstallation(ctx context.Context, owner, repo string) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("repos/%v/%v/installation", owner, repo)) -} - -// FindRepositoryInstallationByID finds the repository's installation information. -// -// Note: FindRepositoryInstallationByID uses the undocumented GitHub API endpoint "GET /repositories/{repository_id}/installation". -// -//meta:operation GET /repositories/{repository_id}/installation -func (s *AppsService) FindRepositoryInstallationByID(ctx context.Context, id int64) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("repositories/%d/installation", id)) -} - -// FindUserInstallation finds the user's installation information. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#get-a-user-installation-for-the-authenticated-app -// -//meta:operation GET /users/{username}/installation -func (s *AppsService) FindUserInstallation(ctx context.Context, user string) (*Installation, *Response, error) { - return s.getInstallation(ctx, fmt.Sprintf("users/%v/installation", user)) -} - -func (s *AppsService) getInstallation(ctx context.Context, url string) (*Installation, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - i := new(Installation) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/authorizations.go b/vendor/github.com/google/go-github/v57/github/authorizations.go deleted file mode 100644 index 7adc5323..00000000 --- a/vendor/github.com/google/go-github/v57/github/authorizations.go +++ /dev/null @@ -1,293 +0,0 @@ -// Copyright 2015 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Scope models a GitHub authorization scope. -// -// GitHub API docs: https://docs.github.com/rest/oauth/#scopes -type Scope string - -// This is the set of scopes for GitHub API V3 -const ( - ScopeNone Scope = "(no scope)" // REVISIT: is this actually returned, or just a documentation artifact? - ScopeUser Scope = "user" - ScopeUserEmail Scope = "user:email" - ScopeUserFollow Scope = "user:follow" - ScopePublicRepo Scope = "public_repo" - ScopeRepo Scope = "repo" - ScopeRepoDeployment Scope = "repo_deployment" - ScopeRepoStatus Scope = "repo:status" - ScopeDeleteRepo Scope = "delete_repo" - ScopeNotifications Scope = "notifications" - ScopeGist Scope = "gist" - ScopeReadRepoHook Scope = "read:repo_hook" - ScopeWriteRepoHook Scope = "write:repo_hook" - ScopeAdminRepoHook Scope = "admin:repo_hook" - ScopeAdminOrgHook Scope = "admin:org_hook" - ScopeReadOrg Scope = "read:org" - ScopeWriteOrg Scope = "write:org" - ScopeAdminOrg Scope = "admin:org" - ScopeReadPublicKey Scope = "read:public_key" - ScopeWritePublicKey Scope = "write:public_key" - ScopeAdminPublicKey Scope = "admin:public_key" - ScopeReadGPGKey Scope = "read:gpg_key" - ScopeWriteGPGKey Scope = "write:gpg_key" - ScopeAdminGPGKey Scope = "admin:gpg_key" - ScopeSecurityEvents Scope = "security_events" -) - -// AuthorizationsService handles communication with the authorization related -// methods of the GitHub API. -// -// This service requires HTTP Basic Authentication; it cannot be accessed using -// an OAuth token. -// -// GitHub API docs: https://docs.github.com/rest/oauth-authorizations -type AuthorizationsService service - -// Authorization represents an individual GitHub authorization. -type Authorization struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Scopes []Scope `json:"scopes,omitempty"` - Token *string `json:"token,omitempty"` - TokenLastEight *string `json:"token_last_eight,omitempty"` - HashedToken *string `json:"hashed_token,omitempty"` - App *AuthorizationApp `json:"app,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` - - // User is only populated by the Check and Reset methods. - User *User `json:"user,omitempty"` -} - -func (a Authorization) String() string { - return Stringify(a) -} - -// AuthorizationApp represents an individual GitHub app (in the context of authorization). -type AuthorizationApp struct { - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - ClientID *string `json:"client_id,omitempty"` -} - -func (a AuthorizationApp) String() string { - return Stringify(a) -} - -// Grant represents an OAuth application that has been granted access to an account. -type Grant struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - App *AuthorizationApp `json:"app,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Scopes []string `json:"scopes,omitempty"` -} - -func (g Grant) String() string { - return Stringify(g) -} - -// AuthorizationRequest represents a request to create an authorization. -type AuthorizationRequest struct { - Scopes []Scope `json:"scopes,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - ClientID *string `json:"client_id,omitempty"` - ClientSecret *string `json:"client_secret,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` -} - -func (a AuthorizationRequest) String() string { - return Stringify(a) -} - -// AuthorizationUpdateRequest represents a request to update an authorization. -// -// Note that for any one update, you must only provide one of the "scopes" -// fields. That is, you may provide only one of "Scopes", or "AddScopes", or -// "RemoveScopes". -// -// GitHub API docs: https://docs.github.com/rest/oauth-authorizations#update-an-existing-authorization -type AuthorizationUpdateRequest struct { - Scopes []string `json:"scopes,omitempty"` - AddScopes []string `json:"add_scopes,omitempty"` - RemoveScopes []string `json:"remove_scopes,omitempty"` - Note *string `json:"note,omitempty"` - NoteURL *string `json:"note_url,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` -} - -func (a AuthorizationUpdateRequest) String() string { - return Stringify(a) -} - -// Check if an OAuth token is valid for a specific app. -// -// Note that this operation requires the use of BasicAuth, but where the -// username is the OAuth application clientID, and the password is its -// clientSecret. Invalid tokens will return a 404 Not Found. -// -// The returned Authorization.User field will be populated. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#check-a-token -// -//meta:operation POST /applications/{client_id}/token -func (s *AuthorizationsService) Check(ctx context.Context, clientID, accessToken string) (*Authorization, *Response, error) { - u := fmt.Sprintf("applications/%v/token", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("POST", u, reqBody) - if err != nil { - return nil, nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - a := new(Authorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// Reset is used to reset a valid OAuth token without end user involvement. -// Applications must save the "token" property in the response, because changes -// take effect immediately. -// -// Note that this operation requires the use of BasicAuth, but where the -// username is the OAuth application clientID, and the password is its -// clientSecret. Invalid tokens will return a 404 Not Found. -// -// The returned Authorization.User field will be populated. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#reset-a-token -// -//meta:operation PATCH /applications/{client_id}/token -func (s *AuthorizationsService) Reset(ctx context.Context, clientID, accessToken string) (*Authorization, *Response, error) { - u := fmt.Sprintf("applications/%v/token", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("PATCH", u, reqBody) - if err != nil { - return nil, nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - a := new(Authorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// Revoke an authorization for an application. -// -// Note that this operation requires the use of BasicAuth, but where the -// username is the OAuth application clientID, and the password is its -// clientSecret. Invalid tokens will return a 404 Not Found. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#delete-an-app-token -// -//meta:operation DELETE /applications/{client_id}/token -func (s *AuthorizationsService) Revoke(ctx context.Context, clientID, accessToken string) (*Response, error) { - u := fmt.Sprintf("applications/%v/token", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("DELETE", u, reqBody) - if err != nil { - return nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - return s.client.Do(ctx, req, nil) -} - -// DeleteGrant deletes an OAuth application grant. Deleting an application's -// grant will also delete all OAuth tokens associated with the application for -// the user. -// -// GitHub API docs: https://docs.github.com/rest/apps/oauth-applications#delete-an-app-authorization -// -//meta:operation DELETE /applications/{client_id}/grant -func (s *AuthorizationsService) DeleteGrant(ctx context.Context, clientID, accessToken string) (*Response, error) { - u := fmt.Sprintf("applications/%v/grant", clientID) - - reqBody := &struct { - AccessToken string `json:"access_token"` - }{AccessToken: accessToken} - - req, err := s.client.NewRequest("DELETE", u, reqBody) - if err != nil { - return nil, err - } - req.Header.Set("Accept", mediaTypeOAuthAppPreview) - - return s.client.Do(ctx, req, nil) -} - -// CreateImpersonation creates an impersonation OAuth token. -// -// This requires admin permissions. With the returned Authorization.Token -// you can e.g. create or delete a user's public SSH key. NOTE: creating a -// new token automatically revokes an existing one. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#create-an-impersonation-oauth-token -// -//meta:operation POST /admin/users/{username}/authorizations -func (s *AuthorizationsService) CreateImpersonation(ctx context.Context, username string, authReq *AuthorizationRequest) (*Authorization, *Response, error) { - u := fmt.Sprintf("admin/users/%v/authorizations", username) - req, err := s.client.NewRequest("POST", u, authReq) - if err != nil { - return nil, nil, err - } - - a := new(Authorization) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - return a, resp, nil -} - -// DeleteImpersonation deletes an impersonation OAuth token. -// -// NOTE: there can be only one at a time. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#delete-an-impersonation-oauth-token -// -//meta:operation DELETE /admin/users/{username}/authorizations -func (s *AuthorizationsService) DeleteImpersonation(ctx context.Context, username string) (*Response, error) { - u := fmt.Sprintf("admin/users/%v/authorizations", username) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/billing.go b/vendor/github.com/google/go-github/v57/github/billing.go deleted file mode 100644 index 6d7579b8..00000000 --- a/vendor/github.com/google/go-github/v57/github/billing.go +++ /dev/null @@ -1,215 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// BillingService provides access to the billing related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/billing -type BillingService service - -// ActionBilling represents a GitHub Action billing. -type ActionBilling struct { - TotalMinutesUsed float64 `json:"total_minutes_used"` - TotalPaidMinutesUsed float64 `json:"total_paid_minutes_used"` - IncludedMinutes float64 `json:"included_minutes"` - MinutesUsedBreakdown MinutesUsedBreakdown `json:"minutes_used_breakdown"` -} - -// MinutesUsedBreakdown counts the actions minutes used by machine type (e.g. UBUNTU, WINDOWS, MACOS). -type MinutesUsedBreakdown = map[string]int - -// PackageBilling represents a GitHub Package billing. -type PackageBilling struct { - TotalGigabytesBandwidthUsed int `json:"total_gigabytes_bandwidth_used"` - TotalPaidGigabytesBandwidthUsed int `json:"total_paid_gigabytes_bandwidth_used"` - IncludedGigabytesBandwidth float64 `json:"included_gigabytes_bandwidth"` -} - -// StorageBilling represents a GitHub Storage billing. -type StorageBilling struct { - DaysLeftInBillingCycle int `json:"days_left_in_billing_cycle"` - EstimatedPaidStorageForMonth float64 `json:"estimated_paid_storage_for_month"` - EstimatedStorageForMonth float64 `json:"estimated_storage_for_month"` -} - -// ActiveCommitters represents the total active committers across all repositories in an Organization. -type ActiveCommitters struct { - TotalAdvancedSecurityCommitters int `json:"total_advanced_security_committers"` - Repositories []*RepositoryActiveCommitters `json:"repositories,omitempty"` -} - -// RepositoryActiveCommitters represents active committers on each repository. -type RepositoryActiveCommitters struct { - Name *string `json:"name,omitempty"` - AdvancedSecurityCommitters *int `json:"advanced_security_committers,omitempty"` - AdvancedSecurityCommittersBreakdown []*AdvancedSecurityCommittersBreakdown `json:"advanced_security_committers_breakdown,omitempty"` -} - -// AdvancedSecurityCommittersBreakdown represents the user activity breakdown for ActiveCommitters. -type AdvancedSecurityCommittersBreakdown struct { - UserLogin *string `json:"user_login,omitempty"` - LastPushedDate *string `json:"last_pushed_date,omitempty"` -} - -// GetActionsBillingOrg returns the summary of the free and paid GitHub Actions minutes used for an Org. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-actions-billing-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/actions -func (s *BillingService) GetActionsBillingOrg(ctx context.Context, org string) (*ActionBilling, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/actions", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsOrgBilling := new(ActionBilling) - resp, err := s.client.Do(ctx, req, actionsOrgBilling) - if err != nil { - return nil, resp, err - } - - return actionsOrgBilling, resp, nil -} - -// GetPackagesBillingOrg returns the free and paid storage used for GitHub Packages in gigabytes for an Org. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-packages-billing-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/packages -func (s *BillingService) GetPackagesBillingOrg(ctx context.Context, org string) (*PackageBilling, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/packages", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - packagesOrgBilling := new(PackageBilling) - resp, err := s.client.Do(ctx, req, packagesOrgBilling) - if err != nil { - return nil, resp, err - } - - return packagesOrgBilling, resp, nil -} - -// GetStorageBillingOrg returns the estimated paid and estimated total storage used for GitHub Actions -// and GitHub Packages in gigabytes for an Org. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-shared-storage-billing-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/shared-storage -func (s *BillingService) GetStorageBillingOrg(ctx context.Context, org string) (*StorageBilling, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/shared-storage", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - storageOrgBilling := new(StorageBilling) - resp, err := s.client.Do(ctx, req, storageOrgBilling) - if err != nil { - return nil, resp, err - } - - return storageOrgBilling, resp, nil -} - -// GetAdvancedSecurityActiveCommittersOrg returns the GitHub Advanced Security active committers for an organization per repository. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/billing/billing#get-github-advanced-security-active-committers-for-an-organization -// -//meta:operation GET /orgs/{org}/settings/billing/advanced-security -func (s *BillingService) GetAdvancedSecurityActiveCommittersOrg(ctx context.Context, org string, opts *ListOptions) (*ActiveCommitters, *Response, error) { - u := fmt.Sprintf("orgs/%v/settings/billing/advanced-security", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - activeOrgCommitters := new(ActiveCommitters) - resp, err := s.client.Do(ctx, req, activeOrgCommitters) - if err != nil { - return nil, resp, err - } - - return activeOrgCommitters, resp, nil -} - -// GetActionsBillingUser returns the summary of the free and paid GitHub Actions minutes used for a user. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-actions-billing-for-a-user -// -//meta:operation GET /users/{username}/settings/billing/actions -func (s *BillingService) GetActionsBillingUser(ctx context.Context, user string) (*ActionBilling, *Response, error) { - u := fmt.Sprintf("users/%v/settings/billing/actions", user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsUserBilling := new(ActionBilling) - resp, err := s.client.Do(ctx, req, actionsUserBilling) - if err != nil { - return nil, resp, err - } - - return actionsUserBilling, resp, nil -} - -// GetPackagesBillingUser returns the free and paid storage used for GitHub Packages in gigabytes for a user. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-github-packages-billing-for-a-user -// -//meta:operation GET /users/{username}/settings/billing/packages -func (s *BillingService) GetPackagesBillingUser(ctx context.Context, user string) (*PackageBilling, *Response, error) { - u := fmt.Sprintf("users/%v/settings/billing/packages", user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - packagesUserBilling := new(PackageBilling) - resp, err := s.client.Do(ctx, req, packagesUserBilling) - if err != nil { - return nil, resp, err - } - - return packagesUserBilling, resp, nil -} - -// GetStorageBillingUser returns the estimated paid and estimated total storage used for GitHub Actions -// and GitHub Packages in gigabytes for a user. -// -// GitHub API docs: https://docs.github.com/rest/billing/billing#get-shared-storage-billing-for-a-user -// -//meta:operation GET /users/{username}/settings/billing/shared-storage -func (s *BillingService) GetStorageBillingUser(ctx context.Context, user string) (*StorageBilling, *Response, error) { - u := fmt.Sprintf("users/%v/settings/billing/shared-storage", user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - storageUserBilling := new(StorageBilling) - resp, err := s.client.Do(ctx, req, storageUserBilling) - if err != nil { - return nil, resp, err - } - - return storageUserBilling, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/checks.go b/vendor/github.com/google/go-github/v57/github/checks.go deleted file mode 100644 index a8618944..00000000 --- a/vendor/github.com/google/go-github/v57/github/checks.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ChecksService provides access to the Checks API in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/checks/ -type ChecksService service - -// CheckRun represents a GitHub check run on a repository associated with a GitHub app. -type CheckRun struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - ExternalID *string `json:"external_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - DetailsURL *string `json:"details_url,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` - Output *CheckRunOutput `json:"output,omitempty"` - Name *string `json:"name,omitempty"` - CheckSuite *CheckSuite `json:"check_suite,omitempty"` - App *App `json:"app,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` -} - -// CheckRunOutput represents the output of a CheckRun. -type CheckRunOutput struct { - Title *string `json:"title,omitempty"` - Summary *string `json:"summary,omitempty"` - Text *string `json:"text,omitempty"` - AnnotationsCount *int `json:"annotations_count,omitempty"` - AnnotationsURL *string `json:"annotations_url,omitempty"` - Annotations []*CheckRunAnnotation `json:"annotations,omitempty"` - Images []*CheckRunImage `json:"images,omitempty"` -} - -// CheckRunAnnotation represents an annotation object for a CheckRun output. -type CheckRunAnnotation struct { - Path *string `json:"path,omitempty"` - StartLine *int `json:"start_line,omitempty"` - EndLine *int `json:"end_line,omitempty"` - StartColumn *int `json:"start_column,omitempty"` - EndColumn *int `json:"end_column,omitempty"` - AnnotationLevel *string `json:"annotation_level,omitempty"` - Message *string `json:"message,omitempty"` - Title *string `json:"title,omitempty"` - RawDetails *string `json:"raw_details,omitempty"` -} - -// CheckRunImage represents an image object for a CheckRun output. -type CheckRunImage struct { - Alt *string `json:"alt,omitempty"` - ImageURL *string `json:"image_url,omitempty"` - Caption *string `json:"caption,omitempty"` -} - -// CheckSuite represents a suite of check runs. -type CheckSuite struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - URL *string `json:"url,omitempty"` - BeforeSHA *string `json:"before,omitempty"` - AfterSHA *string `json:"after,omitempty"` - Status *string `json:"status,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - App *App `json:"app,omitempty"` - Repository *Repository `json:"repository,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` - - // The following fields are only populated by Webhook events. - HeadCommit *Commit `json:"head_commit,omitempty"` -} - -func (c CheckRun) String() string { - return Stringify(c) -} - -func (c CheckSuite) String() string { - return Stringify(c) -} - -// GetCheckRun gets a check-run for a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#get-a-check-run -// -//meta:operation GET /repos/{owner}/{repo}/check-runs/{check_run_id} -func (s *ChecksService) GetCheckRun(ctx context.Context, owner, repo string, checkRunID int64) (*CheckRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkRun := new(CheckRun) - resp, err := s.client.Do(ctx, req, checkRun) - if err != nil { - return nil, resp, err - } - - return checkRun, resp, nil -} - -// GetCheckSuite gets a single check suite. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#get-a-check-suite -// -//meta:operation GET /repos/{owner}/{repo}/check-suites/{check_suite_id} -func (s *ChecksService) GetCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*CheckSuite, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/%v", owner, repo, checkSuiteID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkSuite := new(CheckSuite) - resp, err := s.client.Do(ctx, req, checkSuite) - if err != nil { - return nil, resp, err - } - - return checkSuite, resp, nil -} - -// CreateCheckRunOptions sets up parameters needed to create a CheckRun. -type CreateCheckRunOptions struct { - Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) - HeadSHA string `json:"head_sha"` // The SHA of the commit. (Required.) - DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) - ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) - Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) - Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "skipped", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) - StartedAt *Timestamp `json:"started_at,omitempty"` // The time that the check run began. (Optional.) - CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) - Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) - Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) -} - -// CheckRunAction exposes further actions the integrator can perform, which a user may trigger. -type CheckRunAction struct { - Label string `json:"label"` // The text to be displayed on a button in the web UI. The maximum size is 20 characters. (Required.) - Description string `json:"description"` // A short explanation of what this action would do. The maximum size is 40 characters. (Required.) - Identifier string `json:"identifier"` // A reference for the action on the integrator's system. The maximum size is 20 characters. (Required.) -} - -// CreateCheckRun creates a check run for repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#create-a-check-run -// -//meta:operation POST /repos/{owner}/{repo}/check-runs -func (s *ChecksService) CreateCheckRun(ctx context.Context, owner, repo string, opts CreateCheckRunOptions) (*CheckRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkRun := new(CheckRun) - resp, err := s.client.Do(ctx, req, checkRun) - if err != nil { - return nil, resp, err - } - - return checkRun, resp, nil -} - -// UpdateCheckRunOptions sets up parameters needed to update a CheckRun. -type UpdateCheckRunOptions struct { - Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) - DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) - ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) - Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) - Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "skipped", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) - CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) - Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) - Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) -} - -// UpdateCheckRun updates a check run for a specific commit in a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#update-a-check-run -// -//meta:operation PATCH /repos/{owner}/{repo}/check-runs/{check_run_id} -func (s *ChecksService) UpdateCheckRun(ctx context.Context, owner, repo string, checkRunID int64, opts UpdateCheckRunOptions) (*CheckRun, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkRun := new(CheckRun) - resp, err := s.client.Do(ctx, req, checkRun) - if err != nil { - return nil, resp, err - } - - return checkRun, resp, nil -} - -// ListCheckRunAnnotations lists the annotations for a check run. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#list-check-run-annotations -// -//meta:operation GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations -func (s *ChecksService) ListCheckRunAnnotations(ctx context.Context, owner, repo string, checkRunID int64, opts *ListOptions) ([]*CheckRunAnnotation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v/annotations", owner, repo, checkRunID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkRunAnnotations []*CheckRunAnnotation - resp, err := s.client.Do(ctx, req, &checkRunAnnotations) - if err != nil { - return nil, resp, err - } - - return checkRunAnnotations, resp, nil -} - -// ListCheckRunsOptions represents parameters to list check runs. -type ListCheckRunsOptions struct { - CheckName *string `url:"check_name,omitempty"` // Returns check runs with the specified name. - Status *string `url:"status,omitempty"` // Returns check runs with the specified status. Can be one of "queued", "in_progress", or "completed". - Filter *string `url:"filter,omitempty"` // Filters check runs by their completed_at timestamp. Can be one of "latest" (returning the most recent check runs) or "all". Default: "latest" - AppID *int64 `url:"app_id,omitempty"` // Filters check runs by GitHub App ID. - - ListOptions -} - -// ListCheckRunsResults represents the result of a check run list. -type ListCheckRunsResults struct { - Total *int `json:"total_count,omitempty"` - CheckRuns []*CheckRun `json:"check_runs,omitempty"` -} - -// ListCheckRunsForRef lists check runs for a specific ref. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#list-check-runs-for-a-git-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/check-runs -func (s *ChecksService) ListCheckRunsForRef(ctx context.Context, owner, repo, ref string, opts *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/check-runs", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkRunResults *ListCheckRunsResults - resp, err := s.client.Do(ctx, req, &checkRunResults) - if err != nil { - return nil, resp, err - } - - return checkRunResults, resp, nil -} - -// ListCheckRunsCheckSuite lists check runs for a check suite. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#list-check-runs-in-a-check-suite -// -//meta:operation GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs -func (s *ChecksService) ListCheckRunsCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64, opts *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/%v/check-runs", owner, repo, checkSuiteID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkRunResults *ListCheckRunsResults - resp, err := s.client.Do(ctx, req, &checkRunResults) - if err != nil { - return nil, resp, err - } - - return checkRunResults, resp, nil -} - -// ReRequestCheckRun triggers GitHub to rerequest an existing check run. -// -// GitHub API docs: https://docs.github.com/rest/checks/runs#rerequest-a-check-run -// -//meta:operation POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest -func (s *ChecksService) ReRequestCheckRun(ctx context.Context, owner, repo string, checkRunID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-runs/%v/rerequest", owner, repo, checkRunID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ListCheckSuiteOptions represents parameters to list check suites. -type ListCheckSuiteOptions struct { - CheckName *string `url:"check_name,omitempty"` // Filters checks suites by the name of the check run. - AppID *int `url:"app_id,omitempty"` // Filters check suites by GitHub App id. - - ListOptions -} - -// ListCheckSuiteResults represents the result of a check run list. -type ListCheckSuiteResults struct { - Total *int `json:"total_count,omitempty"` - CheckSuites []*CheckSuite `json:"check_suites,omitempty"` -} - -// ListCheckSuitesForRef lists check suite for a specific ref. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#list-check-suites-for-a-git-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/check-suites -func (s *ChecksService) ListCheckSuitesForRef(ctx context.Context, owner, repo, ref string, opts *ListCheckSuiteOptions) (*ListCheckSuiteResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/check-suites", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkSuiteResults *ListCheckSuiteResults - resp, err := s.client.Do(ctx, req, &checkSuiteResults) - if err != nil { - return nil, resp, err - } - - return checkSuiteResults, resp, nil -} - -// AutoTriggerCheck enables or disables automatic creation of CheckSuite events upon pushes to the repository. -type AutoTriggerCheck struct { - AppID *int64 `json:"app_id,omitempty"` // The id of the GitHub App. (Required.) - Setting *bool `json:"setting,omitempty"` // Set to "true" to enable automatic creation of CheckSuite events upon pushes to the repository, or "false" to disable them. Default: "true" (Required.) -} - -// CheckSuitePreferenceOptions set options for check suite preferences for a repository. -type CheckSuitePreferenceOptions struct { - AutoTriggerChecks []*AutoTriggerCheck `json:"auto_trigger_checks,omitempty"` // A slice of auto trigger checks that can be set for a check suite in a repository. -} - -// CheckSuitePreferenceResults represents the results of the preference set operation. -type CheckSuitePreferenceResults struct { - Preferences *PreferenceList `json:"preferences,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -// PreferenceList represents a list of auto trigger checks for repository -type PreferenceList struct { - AutoTriggerChecks []*AutoTriggerCheck `json:"auto_trigger_checks,omitempty"` // A slice of auto trigger checks that can be set for a check suite in a repository. -} - -// SetCheckSuitePreferences changes the default automatic flow when creating check suites. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#update-repository-preferences-for-check-suites -// -//meta:operation PATCH /repos/{owner}/{repo}/check-suites/preferences -func (s *ChecksService) SetCheckSuitePreferences(ctx context.Context, owner, repo string, opts CheckSuitePreferenceOptions) (*CheckSuitePreferenceResults, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/preferences", owner, repo) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - var checkSuitePrefResults *CheckSuitePreferenceResults - resp, err := s.client.Do(ctx, req, &checkSuitePrefResults) - if err != nil { - return nil, resp, err - } - - return checkSuitePrefResults, resp, nil -} - -// CreateCheckSuiteOptions sets up parameters to manually create a check suites -type CreateCheckSuiteOptions struct { - HeadSHA string `json:"head_sha"` // The sha of the head commit. (Required.) - HeadBranch *string `json:"head_branch,omitempty"` // The name of the head branch where the code changes are implemented. -} - -// CreateCheckSuite manually creates a check suite for a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#create-a-check-suite -// -//meta:operation POST /repos/{owner}/{repo}/check-suites -func (s *ChecksService) CreateCheckSuite(ctx context.Context, owner, repo string, opts CreateCheckSuiteOptions) (*CheckSuite, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - checkSuite := new(CheckSuite) - resp, err := s.client.Do(ctx, req, checkSuite) - if err != nil { - return nil, resp, err - } - - return checkSuite, resp, nil -} - -// ReRequestCheckSuite triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. -// -// GitHub API docs: https://docs.github.com/rest/checks/suites#rerequest-a-check-suite -// -//meta:operation POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest -func (s *ChecksService) ReRequestCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/check-suites/%v/rerequest", owner, repo, checkSuiteID) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - req.Header.Set("Accept", mediaTypeCheckRunsPreview) - - resp, err := s.client.Do(ctx, req, nil) - return resp, err -} diff --git a/vendor/github.com/google/go-github/v57/github/code-scanning.go b/vendor/github.com/google/go-github/v57/github/code-scanning.go deleted file mode 100644 index 74a7b6c9..00000000 --- a/vendor/github.com/google/go-github/v57/github/code-scanning.go +++ /dev/null @@ -1,652 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strconv" - "strings" -) - -// CodeScanningService handles communication with the code scanning related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type CodeScanningService service - -// Rule represents the complete details of GitHub Code Scanning alert type. -type Rule struct { - ID *string `json:"id,omitempty"` - Severity *string `json:"severity,omitempty"` - Description *string `json:"description,omitempty"` - Name *string `json:"name,omitempty"` - SecuritySeverityLevel *string `json:"security_severity_level,omitempty"` - FullDescription *string `json:"full_description,omitempty"` - Tags []string `json:"tags,omitempty"` - Help *string `json:"help,omitempty"` -} - -// Location represents the exact location of the GitHub Code Scanning Alert in the scanned project. -type Location struct { - Path *string `json:"path,omitempty"` - StartLine *int `json:"start_line,omitempty"` - EndLine *int `json:"end_line,omitempty"` - StartColumn *int `json:"start_column,omitempty"` - EndColumn *int `json:"end_column,omitempty"` -} - -// Message is a part of MostRecentInstance struct which provides the appropriate message when any action is performed on the analysis object. -type Message struct { - Text *string `json:"text,omitempty"` -} - -// MostRecentInstance provides details of the most recent instance of this alert for the default branch or for the specified Git reference. -type MostRecentInstance struct { - Ref *string `json:"ref,omitempty"` - AnalysisKey *string `json:"analysis_key,omitempty"` - Category *string `json:"category,omitempty"` - Environment *string `json:"environment,omitempty"` - State *string `json:"state,omitempty"` - CommitSHA *string `json:"commit_sha,omitempty"` - Message *Message `json:"message,omitempty"` - Location *Location `json:"location,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Classifications []string `json:"classifications,omitempty"` -} - -// Tool represents the tool used to generate a GitHub Code Scanning Alert. -type Tool struct { - Name *string `json:"name,omitempty"` - GUID *string `json:"guid,omitempty"` - Version *string `json:"version,omitempty"` -} - -// Alert represents an individual GitHub Code Scanning Alert on a single repository. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type Alert struct { - Number *int `json:"number,omitempty"` - Repository *Repository `json:"repository,omitempty"` - RuleID *string `json:"rule_id,omitempty"` - RuleSeverity *string `json:"rule_severity,omitempty"` - RuleDescription *string `json:"rule_description,omitempty"` - Rule *Rule `json:"rule,omitempty"` - Tool *Tool `json:"tool,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - FixedAt *Timestamp `json:"fixed_at,omitempty"` - State *string `json:"state,omitempty"` - ClosedBy *User `json:"closed_by,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - MostRecentInstance *MostRecentInstance `json:"most_recent_instance,omitempty"` - Instances []*MostRecentInstance `json:"instances,omitempty"` - DismissedBy *User `json:"dismissed_by,omitempty"` - DismissedAt *Timestamp `json:"dismissed_at,omitempty"` - DismissedReason *string `json:"dismissed_reason,omitempty"` - DismissedComment *string `json:"dismissed_comment,omitempty"` - InstancesURL *string `json:"instances_url,omitempty"` -} - -// ID returns the ID associated with an alert. It is the number at the end of the security alert's URL. -func (a *Alert) ID() int64 { - if a == nil { - return 0 - } - - s := a.GetHTMLURL() - - // Check for an ID to parse at the end of the url - if i := strings.LastIndex(s, "/"); i >= 0 { - s = s[i+1:] - } - - // Return the alert ID as a 64-bit integer. Unable to convert or out of range returns 0. - id, err := strconv.ParseInt(s, 10, 64) - if err != nil { - return 0 - } - - return id -} - -// AlertInstancesListOptions specifies optional parameters to the CodeScanningService.ListAlertInstances method. -type AlertInstancesListOptions struct { - // Return code scanning alert instances for a specific branch reference. - // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge - Ref string `url:"ref,omitempty"` - - ListOptions -} - -// AlertListOptions specifies optional parameters to the CodeScanningService.ListAlerts method. -type AlertListOptions struct { - // State of the code scanning alerts to list. Set to closed to list only closed code scanning alerts. Default: open - State string `url:"state,omitempty"` - - // Return code scanning alerts for a specific branch reference. - // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge - Ref string `url:"ref,omitempty"` - - // If specified, only code scanning alerts with this severity will be returned. Possible values are: critical, high, medium, low, warning, note, error. - Severity string `url:"severity,omitempty"` - - // The name of a code scanning tool. Only results by this tool will be listed. - ToolName string `url:"tool_name,omitempty"` - - ListCursorOptions - - // Add ListOptions so offset pagination with integer type "page" query parameter is accepted - // since ListCursorOptions accepts "page" as string only. - ListOptions -} - -// AnalysesListOptions specifies optional parameters to the CodeScanningService.ListAnalysesForRepo method. -type AnalysesListOptions struct { - // Return code scanning analyses belonging to the same SARIF upload. - SarifID *string `url:"sarif_id,omitempty"` - - // Return code scanning analyses for a specific branch reference. - // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge - Ref *string `url:"ref,omitempty"` - - ListOptions -} - -// CodeQLDatabase represents a metadata about the CodeQL database. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type CodeQLDatabase struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Language *string `json:"language,omitempty"` - Uploader *User `json:"uploader,omitempty"` - ContentType *string `json:"content_type,omitempty"` - Size *int64 `json:"size,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` -} - -// ScanningAnalysis represents an individual GitHub Code Scanning ScanningAnalysis on a single repository. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type ScanningAnalysis struct { - ID *int64 `json:"id,omitempty"` - Ref *string `json:"ref,omitempty"` - CommitSHA *string `json:"commit_sha,omitempty"` - AnalysisKey *string `json:"analysis_key,omitempty"` - Environment *string `json:"environment,omitempty"` - Error *string `json:"error,omitempty"` - Category *string `json:"category,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ResultsCount *int `json:"results_count,omitempty"` - RulesCount *int `json:"rules_count,omitempty"` - URL *string `json:"url,omitempty"` - SarifID *string `json:"sarif_id,omitempty"` - Tool *Tool `json:"tool,omitempty"` - Deletable *bool `json:"deletable,omitempty"` - Warning *string `json:"warning,omitempty"` -} - -// SarifAnalysis specifies the results of a code scanning job. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type SarifAnalysis struct { - CommitSHA *string `json:"commit_sha,omitempty"` - Ref *string `json:"ref,omitempty"` - Sarif *string `json:"sarif,omitempty"` - CheckoutURI *string `json:"checkout_uri,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - ToolName *string `json:"tool_name,omitempty"` -} - -// CodeScanningAlertState specifies the state of a code scanning alert. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type CodeScanningAlertState struct { - // State sets the state of the code scanning alert and is a required field. - // You must also provide DismissedReason when you set the state to "dismissed". - // State can be one of: "open", "dismissed". - State string `json:"state"` - // DismissedReason represents the reason for dismissing or closing the alert. - // It is required when the state is "dismissed". - // It can be one of: "false positive", "won't fix", "used in tests". - DismissedReason *string `json:"dismissed_reason,omitempty"` - // DismissedComment is associated with the dismissal of the alert. - DismissedComment *string `json:"dismissed_comment,omitempty"` -} - -// SarifID identifies a sarif analysis upload. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning -type SarifID struct { - ID *string `json:"id,omitempty"` - URL *string `json:"url,omitempty"` -} - -// ListAlertsForOrg lists code scanning alerts for an org. -// -// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events -// read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-an-organization -// -//meta:operation GET /orgs/{org}/code-scanning/alerts -func (s *CodeScanningService) ListAlertsForOrg(ctx context.Context, org string, opts *AlertListOptions) ([]*Alert, *Response, error) { - u := fmt.Sprintf("orgs/%v/code-scanning/alerts", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*Alert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListAlertsForRepo lists code scanning alerts for a repository. -// -// Lists all open code scanning alerts for the default branch (usually master) and protected branches in a repository. -// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events -// read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/alerts -func (s *CodeScanningService) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *AlertListOptions) ([]*Alert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*Alert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// GetAlert gets a single code scanning alert for a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security alert_id is the number at the end of the security alert's URL. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-code-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number} -func (s *CodeScanningService) GetAlert(ctx context.Context, owner, repo string, id int64) (*Alert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - a := new(Alert) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// UpdateAlert updates the state of a single code scanning alert for a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security alert_id is the number at the end of the security alert's URL. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#update-a-code-scanning-alert -// -//meta:operation PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number} -func (s *CodeScanningService) UpdateAlert(ctx context.Context, owner, repo string, id int64, stateInfo *CodeScanningAlertState) (*Alert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v", owner, repo, id) - - req, err := s.client.NewRequest("PATCH", u, stateInfo) - if err != nil { - return nil, nil, err - } - - a := new(Alert) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} - -// ListAlertInstances lists instances of a code scanning alert. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-instances-of-a-code-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances -func (s *CodeScanningService) ListAlertInstances(ctx context.Context, owner, repo string, id int64, opts *AlertInstancesListOptions) ([]*MostRecentInstance, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v/instances", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alertInstances []*MostRecentInstance - resp, err := s.client.Do(ctx, req, &alertInstances) - if err != nil { - return nil, resp, err - } - - return alertInstances, resp, nil -} - -// UploadSarif uploads the result of code scanning job to GitHub. -// -// For the parameter sarif, you must first compress your SARIF file using gzip and then translate the contents of the file into a Base64 encoding string. -// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events -// write permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#upload-an-analysis-as-sarif-data -// -//meta:operation POST /repos/{owner}/{repo}/code-scanning/sarifs -func (s *CodeScanningService) UploadSarif(ctx context.Context, owner, repo string, sarif *SarifAnalysis) (*SarifID, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs", owner, repo) - - req, err := s.client.NewRequest("POST", u, sarif) - if err != nil { - return nil, nil, err - } - - sarifID := new(SarifID) - resp, err := s.client.Do(ctx, req, sarifID) - if err != nil { - return nil, resp, err - } - - return sarifID, resp, nil -} - -// SARIFUpload represents information about a SARIF upload. -type SARIFUpload struct { - // `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. - // `failed` files have either not been processed at all, or could only be partially processed. - ProcessingStatus *string `json:"processing_status,omitempty"` - // The REST API URL for getting the analyses associated with the upload. - AnalysesURL *string `json:"analyses_url,omitempty"` -} - -// GetSARIF gets information about a SARIF upload. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-information-about-a-sarif-upload -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id} -func (s *CodeScanningService) GetSARIF(ctx context.Context, owner, repo, sarifID string) (*SARIFUpload, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs/%v", owner, repo, sarifID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - sarifUpload := new(SARIFUpload) - resp, err := s.client.Do(ctx, req, sarifUpload) - if err != nil { - return nil, resp, err - } - - return sarifUpload, resp, nil -} - -// ListAnalysesForRepo lists code scanning analyses for a repository. -// -// Lists the details of all code scanning analyses for a repository, starting with the most recent. -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-code-scanning-analyses-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/analyses -func (s *CodeScanningService) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *AnalysesListOptions) ([]*ScanningAnalysis, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var analyses []*ScanningAnalysis - resp, err := s.client.Do(ctx, req, &analyses) - if err != nil { - return nil, resp, err - } - - return analyses, resp, nil -} - -// GetAnalysis gets a single code scanning analysis for a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-code-scanning-analysis-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id} -func (s *CodeScanningService) GetAnalysis(ctx context.Context, owner, repo string, id int64) (*ScanningAnalysis, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - analysis := new(ScanningAnalysis) - resp, err := s.client.Do(ctx, req, analysis) - if err != nil { - return nil, resp, err - } - - return analysis, resp, nil -} - -// DeleteAnalysis represents a successful deletion of a code scanning analysis. -type DeleteAnalysis struct { - // Next deletable analysis in chain, without last analysis deletion confirmation - NextAnalysisURL *string `json:"next_analysis_url,omitempty"` - // Next deletable analysis in chain, with last analysis deletion confirmation - ConfirmDeleteURL *string `json:"confirm_delete_url,omitempty"` -} - -// DeleteAnalysis deletes a single code scanning analysis from a repository. -// -// You must use an access token with the repo scope to use this endpoint. -// GitHub Apps must have the security_events read permission to use this endpoint. -// -// The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#delete-a-code-scanning-analysis-from-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id} -func (s *CodeScanningService) DeleteAnalysis(ctx context.Context, owner, repo string, id int64) (*DeleteAnalysis, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, nil, err - } - - deleteAnalysis := new(DeleteAnalysis) - resp, err := s.client.Do(ctx, req, deleteAnalysis) - if err != nil { - return nil, resp, err - } - - return deleteAnalysis, resp, nil -} - -// ListCodeQLDatabases lists the CodeQL databases that are available in a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the contents read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#list-codeql-databases-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/codeql/databases -func (s *CodeScanningService) ListCodeQLDatabases(ctx context.Context, owner, repo string) ([]*CodeQLDatabase, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var codeqlDatabases []*CodeQLDatabase - resp, err := s.client.Do(ctx, req, &codeqlDatabases) - if err != nil { - return nil, resp, err - } - - return codeqlDatabases, resp, nil -} - -// GetCodeQLDatabase gets a CodeQL database for a language in a repository. -// -// You must use an access token with the security_events scope to use this endpoint. -// GitHub Apps must have the contents read permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-codeql-database-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language} -func (s *CodeScanningService) GetCodeQLDatabase(ctx context.Context, owner, repo, language string) (*CodeQLDatabase, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases/%v", owner, repo, language) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - codeqlDatabase := new(CodeQLDatabase) - resp, err := s.client.Do(ctx, req, codeqlDatabase) - if err != nil { - return nil, resp, err - } - - return codeqlDatabase, resp, nil -} - -// DefaultSetupConfiguration represents a code scanning default setup configuration. -type DefaultSetupConfiguration struct { - State *string `json:"state,omitempty"` - Languages []string `json:"languages,omitempty"` - QuerySuite *string `json:"query_suite,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -// GetDefaultSetupConfiguration gets a code scanning default setup configuration. -// -// You must use an access token with the repo scope to use this -// endpoint with private repos or the public_repo scope for public repos. GitHub Apps must have the repo write -// permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#get-a-code-scanning-default-setup-configuration -// -//meta:operation GET /repos/{owner}/{repo}/code-scanning/default-setup -func (s *CodeScanningService) GetDefaultSetupConfiguration(ctx context.Context, owner, repo string) (*DefaultSetupConfiguration, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - cfg := new(DefaultSetupConfiguration) - resp, err := s.client.Do(ctx, req, cfg) - if err != nil { - return nil, resp, err - } - - return cfg, resp, nil -} - -// UpdateDefaultSetupConfigurationOptions specifies parameters to the CodeScanningService.UpdateDefaultSetupConfiguration -// method. -type UpdateDefaultSetupConfigurationOptions struct { - State string `json:"state"` - QuerySuite *string `json:"query_suite,omitempty"` - Languages []string `json:"languages,omitempty"` -} - -// UpdateDefaultSetupConfigurationResponse represents a response from updating a code scanning default setup configuration. -type UpdateDefaultSetupConfigurationResponse struct { - RunID *int64 `json:"run_id,omitempty"` - RunURL *string `json:"run_url,omitempty"` -} - -// UpdateDefaultSetupConfiguration updates a code scanning default setup configuration. -// -// You must use an access token with the repo scope to use this -// endpoint with private repos or the public_repo scope for public repos. GitHub Apps must have the repo write -// permission to use this endpoint. -// -// This method might return an AcceptedError and a status code of 202. This is because this is the status that GitHub -// returns to signify that it has now scheduled the update of the pull request branch in a background task. -// -// GitHub API docs: https://docs.github.com/rest/code-scanning/code-scanning#update-a-code-scanning-default-setup-configuration -// -//meta:operation PATCH /repos/{owner}/{repo}/code-scanning/default-setup -func (s *CodeScanningService) UpdateDefaultSetupConfiguration(ctx context.Context, owner, repo string, options *UpdateDefaultSetupConfigurationOptions) (*UpdateDefaultSetupConfigurationResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) - - req, err := s.client.NewRequest("PATCH", u, options) - if err != nil { - return nil, nil, err - } - - a := new(UpdateDefaultSetupConfigurationResponse) - resp, err := s.client.Do(ctx, req, a) - if err != nil { - return nil, resp, err - } - - return a, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/codesofconduct.go b/vendor/github.com/google/go-github/v57/github/codesofconduct.go deleted file mode 100644 index 7d7f9ef8..00000000 --- a/vendor/github.com/google/go-github/v57/github/codesofconduct.go +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// CodesOfConductService provides access to code-of-conduct-related functions in the GitHub API. -type CodesOfConductService service - -// CodeOfConduct represents a code of conduct. -type CodeOfConduct struct { - Name *string `json:"name,omitempty"` - Key *string `json:"key,omitempty"` - URL *string `json:"url,omitempty"` - Body *string `json:"body,omitempty"` -} - -func (c *CodeOfConduct) String() string { - return Stringify(c) -} - -// List returns all codes of conduct. -// -// GitHub API docs: https://docs.github.com/rest/codes-of-conduct/codes-of-conduct#get-all-codes-of-conduct -// -//meta:operation GET /codes_of_conduct -func (s *CodesOfConductService) List(ctx context.Context) ([]*CodeOfConduct, *Response, error) { - req, err := s.client.NewRequest("GET", "codes_of_conduct", nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeCodesOfConductPreview) - - var cs []*CodeOfConduct - resp, err := s.client.Do(ctx, req, &cs) - if err != nil { - return nil, resp, err - } - - return cs, resp, nil -} - -// ListCodesOfConduct returns all codes of conduct. -// -// Deprecated: Use CodesOfConductService.List instead -func (c *Client) ListCodesOfConduct(ctx context.Context) ([]*CodeOfConduct, *Response, error) { - return c.CodesOfConduct.List(ctx) -} - -// Get returns an individual code of conduct. -// -// GitHub API docs: https://docs.github.com/rest/codes-of-conduct/codes-of-conduct#get-a-code-of-conduct -// -//meta:operation GET /codes_of_conduct/{key} -func (s *CodesOfConductService) Get(ctx context.Context, key string) (*CodeOfConduct, *Response, error) { - u := fmt.Sprintf("codes_of_conduct/%s", key) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeCodesOfConductPreview) - - coc := new(CodeOfConduct) - resp, err := s.client.Do(ctx, req, coc) - if err != nil { - return nil, resp, err - } - - return coc, resp, nil -} - -// GetCodeOfConduct returns an individual code of conduct. -// -// Deprecated: Use CodesOfConductService.Get instead -func (c *Client) GetCodeOfConduct(ctx context.Context, key string) (*CodeOfConduct, *Response, error) { - return c.CodesOfConduct.Get(ctx, key) -} diff --git a/vendor/github.com/google/go-github/v57/github/dependabot_alerts.go b/vendor/github.com/google/go-github/v57/github/dependabot_alerts.go deleted file mode 100644 index f1ed126c..00000000 --- a/vendor/github.com/google/go-github/v57/github/dependabot_alerts.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Dependency reprensents the vulnerable dependency. -type Dependency struct { - Package *VulnerabilityPackage `json:"package,omitempty"` - ManifestPath *string `json:"manifest_path,omitempty"` - Scope *string `json:"scope,omitempty"` -} - -// AdvisoryCVSS represents the advisory pertaining to the Common Vulnerability Scoring System. -type AdvisoryCVSS struct { - Score *float64 `json:"score,omitempty"` - VectorString *string `json:"vector_string,omitempty"` -} - -// AdvisoryCWEs reprensent the advisory pertaining to Common Weakness Enumeration. -type AdvisoryCWEs struct { - CWEID *string `json:"cwe_id,omitempty"` - Name *string `json:"name,omitempty"` -} - -// DependabotSecurityAdvisory represents the GitHub Security Advisory. -type DependabotSecurityAdvisory struct { - GHSAID *string `json:"ghsa_id,omitempty"` - CVEID *string `json:"cve_id,omitempty"` - Summary *string `json:"summary,omitempty"` - Description *string `json:"description,omitempty"` - Vulnerabilities []*AdvisoryVulnerability `json:"vulnerabilities,omitempty"` - Severity *string `json:"severity,omitempty"` - CVSS *AdvisoryCVSS `json:"cvss,omitempty"` - CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` - Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` - References []*AdvisoryReference `json:"references,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - WithdrawnAt *Timestamp `json:"withdrawn_at,omitempty"` -} - -// DependabotAlert represents a Dependabot alert. -type DependabotAlert struct { - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - Dependency *Dependency `json:"dependency,omitempty"` - SecurityAdvisory *DependabotSecurityAdvisory `json:"security_advisory,omitempty"` - SecurityVulnerability *AdvisoryVulnerability `json:"security_vulnerability,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - DismissedAt *Timestamp `json:"dismissed_at,omitempty"` - DismissedBy *User `json:"dismissed_by,omitempty"` - DismissedReason *string `json:"dismissed_reason,omitempty"` - DismissedComment *string `json:"dismissed_comment,omitempty"` - FixedAt *Timestamp `json:"fixed_at,omitempty"` - AutoDismissedAt *Timestamp `json:"auto_dismissed_at,omitempty"` - // The repository is always empty for events - Repository *Repository `json:"repository,omitempty"` -} - -// DependabotAlertState represents the state of a Dependabot alert to update. -type DependabotAlertState struct { - // The state of the Dependabot alert. A dismissed_reason must be provided when setting the state to dismissed. - State string `json:"state"` - // Required when state is dismissed. A reason for dismissing the alert. - // Can be one of: fix_started, inaccurate, no_bandwidth, not_used, tolerable_risk - DismissedReason *string `json:"dismissed_reason,omitempty"` - // An optional comment associated with dismissing the alert. - DismissedComment *string `json:"dismissed_comment,omitempty"` -} - -// ListAlertsOptions specifies the optional parameters to the DependabotService.ListRepoAlerts -// and DependabotService.ListOrgAlerts methods. -type ListAlertsOptions struct { - State *string `url:"state,omitempty"` - Severity *string `url:"severity,omitempty"` - Ecosystem *string `url:"ecosystem,omitempty"` - Package *string `url:"package,omitempty"` - Scope *string `url:"scope,omitempty"` - Sort *string `url:"sort,omitempty"` - Direction *string `url:"direction,omitempty"` - - ListOptions - ListCursorOptions -} - -func (s *DependabotService) listAlerts(ctx context.Context, url string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*DependabotAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListRepoAlerts lists all Dependabot alerts of a repository. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/alerts -func (s *DependabotService) ListRepoAlerts(ctx context.Context, owner, repo string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/alerts", owner, repo) - return s.listAlerts(ctx, url, opts) -} - -// ListOrgAlerts lists all Dependabot alerts of an organization. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization -// -//meta:operation GET /orgs/{org}/dependabot/alerts -func (s *DependabotService) ListOrgAlerts(ctx context.Context, org string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/alerts", org) - return s.listAlerts(ctx, url, opts) -} - -// GetRepoAlert gets a single repository Dependabot alert. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#get-a-dependabot-alert -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number} -func (s *DependabotService) GetRepoAlert(ctx context.Context, owner, repo string, number int) (*DependabotAlert, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/alerts/%v", owner, repo, number) - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - alert := new(DependabotAlert) - resp, err := s.client.Do(ctx, req, alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} - -// UpdateAlert updates a Dependabot alert. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/alerts#update-a-dependabot-alert -// -//meta:operation PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number} -func (s *DependabotService) UpdateAlert(ctx context.Context, owner, repo string, number int, stateInfo *DependabotAlertState) (*DependabotAlert, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/alerts/%v", owner, repo, number) - req, err := s.client.NewRequest("PATCH", url, stateInfo) - if err != nil { - return nil, nil, err - } - - alert := new(DependabotAlert) - resp, err := s.client.Do(ctx, req, alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/doc.go b/vendor/github.com/google/go-github/v57/github/doc.go deleted file mode 100644 index ca00a4bd..00000000 --- a/vendor/github.com/google/go-github/v57/github/doc.go +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package github provides a client for using the GitHub API. - -Usage: - - import "github.com/google/go-github/v57/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) - import "github.com/google/go-github/github" // with go modules disabled - -Construct a new GitHub client, then use the various services on the client to -access different parts of the GitHub API. For example: - - client := github.NewClient(nil) - - // list all organizations for user "willnorris" - orgs, _, err := client.Organizations.List(ctx, "willnorris", nil) - -Some API methods have optional parameters that can be passed. For example: - - client := github.NewClient(nil) - - // list public repositories for org "github" - opt := &github.RepositoryListByOrgOptions{Type: "public"} - repos, _, err := client.Repositories.ListByOrg(ctx, "github", opt) - -The services of a client divide the API into logical chunks and correspond to -the structure of the GitHub API documentation at -https://docs.github.com/rest . - -NOTE: Using the https://godoc.org/context package, one can easily -pass cancelation signals and deadlines to various services of the client for -handling a request. In case there is no context available, then context.Background() -can be used as a starting point. - -For more sample code snippets, head over to the https://github.com/google/go-github/tree/master/example directory. - -# Authentication - -Use Client.WithAuthToken to configure your client to authenticate using an Oauth token -(for example, a personal access token). This is what is needed for a majority of use cases -aside from GitHub Apps. - - client := github.NewClient(nil).WithAuthToken("... your access token ...") - -Note that when using an authenticated Client, all calls made by the client will -include the specified OAuth token. Therefore, authenticated clients should -almost never be shared between different users. - -For API methods that require HTTP Basic Authentication, use the -BasicAuthTransport. - -GitHub Apps authentication can be provided by the -https://github.com/bradleyfalzon/ghinstallation package. -It supports both authentication as an installation, using an installation access token, -and as an app, using a JWT. - -To authenticate as an installation: - - import "github.com/bradleyfalzon/ghinstallation" - - func main() { - // Wrap the shared transport for use with the integration ID 1 authenticating with installation ID 99. - itr, err := ghinstallation.NewKeyFromFile(http.DefaultTransport, 1, 99, "2016-10-19.private-key.pem") - if err != nil { - // Handle error. - } - - // Use installation transport with client - client := github.NewClient(&http.Client{Transport: itr}) - - // Use client... - } - -To authenticate as an app, using a JWT: - - import "github.com/bradleyfalzon/ghinstallation" - - func main() { - // Wrap the shared transport for use with the application ID 1. - atr, err := ghinstallation.NewAppsTransportKeyFromFile(http.DefaultTransport, 1, "2016-10-19.private-key.pem") - if err != nil { - // Handle error. - } - - // Use app transport with client - client := github.NewClient(&http.Client{Transport: atr}) - - // Use client... - } - -# Rate Limiting - -GitHub imposes a rate limit on all API clients. Unauthenticated clients are -limited to 60 requests per hour, while authenticated clients can make up to -5,000 requests per hour. The Search API has a custom rate limit. Unauthenticated -clients are limited to 10 requests per minute, while authenticated clients -can make up to 30 requests per minute. To receive the higher rate limit when -making calls that are not issued on behalf of a user, -use UnauthenticatedRateLimitedTransport. - -The returned Response.Rate value contains the rate limit information -from the most recent API call. If a recent enough response isn't -available, you can use RateLimits to fetch the most up-to-date rate -limit data for the client. - -To detect an API rate limit error, you can check if its type is *github.RateLimitError. -For secondary rate limits, you can check if its type is *github.AbuseRateLimitError: - - repos, _, err := client.Repositories.List(ctx, "", nil) - if _, ok := err.(*github.RateLimitError); ok { - log.Println("hit rate limit") - } - if _, ok := err.(*github.AbuseRateLimitError); ok { - log.Println("hit secondary rate limit") - } - -Learn more about GitHub rate limiting at -https://docs.github.com/rest/rate-limit . - -# Accepted Status - -Some endpoints may return a 202 Accepted status code, meaning that the -information required is not yet ready and was scheduled to be gathered on -the GitHub side. Methods known to behave like this are documented specifying -this behavior. - -To detect this condition of error, you can check if its type is -*github.AcceptedError: - - stats, _, err := client.Repositories.ListContributorsStats(ctx, org, repo) - if _, ok := err.(*github.AcceptedError); ok { - log.Println("scheduled on GitHub side") - } - -# Conditional Requests - -The GitHub API has good support for conditional requests which will help -prevent you from burning through your rate limit, as well as help speed up your -application. go-github does not handle conditional requests directly, but is -instead designed to work with a caching http.Transport. We recommend using -https://github.com/gregjones/httpcache for that. - -Learn more about GitHub conditional requests at -https://docs.github.com/rest/overview/resources-in-the-rest-api#conditional-requests. - -# Creating and Updating Resources - -All structs for GitHub resources use pointer values for all non-repeated fields. -This allows distinguishing between unset fields and those set to a zero-value. -Helper functions have been provided to easily create these pointers for string, -bool, and int values. For example: - - // create a new private repository named "foo" - repo := &github.Repository{ - Name: github.String("foo"), - Private: github.Bool(true), - } - client.Repositories.Create(ctx, "", repo) - -Users who have worked with protocol buffers should find this pattern familiar. - -# Pagination - -All requests for resource collections (repos, pull requests, issues, etc.) -support pagination. Pagination options are described in the -github.ListOptions struct and passed to the list methods directly or as an -embedded type of a more specific list options struct (for example -github.PullRequestListOptions). Pages information is available via the -github.Response struct. - - client := github.NewClient(nil) - - opt := &github.RepositoryListByOrgOptions{ - ListOptions: github.ListOptions{PerPage: 10}, - } - // get all pages of results - var allRepos []*github.Repository - for { - repos, resp, err := client.Repositories.ListByOrg(ctx, "github", opt) - if err != nil { - return err - } - allRepos = append(allRepos, repos...) - if resp.NextPage == 0 { - break - } - opt.Page = resp.NextPage - } -*/ -package github diff --git a/vendor/github.com/google/go-github/v57/github/emojis.go b/vendor/github.com/google/go-github/v57/github/emojis.go deleted file mode 100644 index 93ef232f..00000000 --- a/vendor/github.com/google/go-github/v57/github/emojis.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// EmojisService provides access to emoji-related functions in the GitHub API. -type EmojisService service - -// List returns the emojis available to use on GitHub. -// -// GitHub API docs: https://docs.github.com/rest/emojis/emojis#get-emojis -// -//meta:operation GET /emojis -func (s *EmojisService) List(ctx context.Context) (map[string]string, *Response, error) { - req, err := s.client.NewRequest("GET", "emojis", nil) - if err != nil { - return nil, nil, err - } - - var emoji map[string]string - resp, err := s.client.Do(ctx, req, &emoji) - if err != nil { - return nil, resp, err - } - - return emoji, resp, nil -} - -// ListEmojis returns the emojis available to use on GitHub. -// -// Deprecated: Use EmojisService.List instead -func (c *Client) ListEmojis(ctx context.Context) (map[string]string, *Response, error) { - return c.Emojis.List(ctx) -} diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go b/vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go deleted file mode 100644 index 4a6e6b52..00000000 --- a/vendor/github.com/google/go-github/v57/github/enterprise_actions_runners.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#list-runner-applications-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runners/downloads -func (s *EnterpriseService) ListRunnerApplicationDownloads(ctx context.Context, enterprise string) ([]*RunnerApplicationDownload, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/downloads", enterprise) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rads []*RunnerApplicationDownload - resp, err := s.client.Do(ctx, req, &rads) - if err != nil { - return nil, resp, err - } - - return rads, resp, nil -} - -// GenerateEnterpriseJITConfig generates a just-in-time configuration for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-an-enterprise -// -//meta:operation POST /enterprises/{enterprise}/actions/runners/generate-jitconfig -func (s *EnterpriseService) GenerateEnterpriseJITConfig(ctx context.Context, enterprise string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/generate-jitconfig", enterprise) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - jitConfig := new(JITRunnerConfig) - resp, err := s.client.Do(ctx, req, jitConfig) - if err != nil { - return nil, resp, err - } - - return jitConfig, resp, nil -} - -// CreateRegistrationToken creates a token that can be used to add a self-hosted runner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#create-a-registration-token-for-an-enterprise -// -//meta:operation POST /enterprises/{enterprise}/actions/runners/registration-token -func (s *EnterpriseService) CreateRegistrationToken(ctx context.Context, enterprise string) (*RegistrationToken, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/registration-token", enterprise) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - registrationToken := new(RegistrationToken) - resp, err := s.client.Do(ctx, req, registrationToken) - if err != nil { - return nil, resp, err - } - - return registrationToken, resp, nil -} - -// ListRunners lists all the self-hosted runners for a enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#list-self-hosted-runners-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runners -func (s *EnterpriseService) ListRunners(ctx context.Context, enterprise string, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// RemoveRunner forces the removal of a self-hosted runner from an enterprise using the runner id. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runners/{runner_id} -func (s *EnterpriseService) RemoveRunner(ctx context.Context, enterprise string, runnerID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runners/%v", enterprise, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go b/vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go deleted file mode 100644 index af8eb0ff..00000000 --- a/vendor/github.com/google/go-github/v57/github/enterprise_code_security_and_analysis.go +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// EnterpriseSecurityAnalysisSettings represents security analysis settings for an enterprise. -type EnterpriseSecurityAnalysisSettings struct { - AdvancedSecurityEnabledForNewRepositories *bool `json:"advanced_security_enabled_for_new_repositories,omitempty"` - SecretScanningEnabledForNewRepositories *bool `json:"secret_scanning_enabled_for_new_repositories,omitempty"` - SecretScanningPushProtectionEnabledForNewRepositories *bool `json:"secret_scanning_push_protection_enabled_for_new_repositories,omitempty"` - SecretScanningPushProtectionCustomLink *string `json:"secret_scanning_push_protection_custom_link,omitempty"` -} - -// GetCodeSecurityAndAnalysis gets code security and analysis features for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/code-security-and-analysis#get-code-security-and-analysis-features-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/code_security_and_analysis -func (s *EnterpriseService) GetCodeSecurityAndAnalysis(ctx context.Context, enterprise string) (*EnterpriseSecurityAnalysisSettings, *Response, error) { - u := fmt.Sprintf("enterprises/%v/code_security_and_analysis", enterprise) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - settings := new(EnterpriseSecurityAnalysisSettings) - resp, err := s.client.Do(ctx, req, settings) - if err != nil { - return nil, resp, err - } - - return settings, resp, nil -} - -// UpdateCodeSecurityAndAnalysis updates code security and analysis features for new repositories in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/code-security-and-analysis#update-code-security-and-analysis-features-for-an-enterprise -// -//meta:operation PATCH /enterprises/{enterprise}/code_security_and_analysis -func (s *EnterpriseService) UpdateCodeSecurityAndAnalysis(ctx context.Context, enterprise string, settings *EnterpriseSecurityAnalysisSettings) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/code_security_and_analysis", enterprise) - req, err := s.client.NewRequest("PATCH", u, settings) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// EnableDisableSecurityFeature enables or disables a security feature for all repositories in an enterprise. -// -// Valid values for securityProduct: "advanced_security", "secret_scanning", "secret_scanning_push_protection". -// Valid values for enablement: "enable_all", "disable_all". -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/code-security-and-analysis#enable-or-disable-a-security-feature -// -//meta:operation POST /enterprises/{enterprise}/{security_product}/{enablement} -func (s *EnterpriseService) EnableDisableSecurityFeature(ctx context.Context, enterprise, securityProduct, enablement string) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/%v/%v", enterprise, securityProduct, enablement) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/event_types.go b/vendor/github.com/google/go-github/v57/github/event_types.go deleted file mode 100644 index badd29b2..00000000 --- a/vendor/github.com/google/go-github/v57/github/event_types.go +++ /dev/null @@ -1,1795 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// These event types are shared between the Events API and used as Webhook payloads. - -package github - -import "encoding/json" - -// RequestedAction is included in a CheckRunEvent when a user has invoked an action, -// i.e. when the CheckRunEvent's Action field is "requested_action". -type RequestedAction struct { - Identifier string `json:"identifier"` // The integrator reference of the action requested by the user. -} - -// BranchProtectionRuleEvent triggered when a check suite is "created", "edited", or "deleted". -// The Webhook event name is "branch_protection_rule". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#branch_protection_rule -type BranchProtectionRuleEvent struct { - Action *string `json:"action,omitempty"` - Rule *BranchProtectionRule `json:"rule,omitempty"` - Changes *ProtectionChanges `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CheckRunEvent is triggered when a check run is "created", "completed", or "rerequested". -// The Webhook event name is "check_run". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#check_run -type CheckRunEvent struct { - CheckRun *CheckRun `json:"check_run,omitempty"` - // The action performed. Possible values are: "created", "completed", "rerequested" or "requested_action". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The action requested by the user. Populated when the Action is "requested_action". - RequestedAction *RequestedAction `json:"requested_action,omitempty"` // -} - -// CheckSuiteEvent is triggered when a check suite is "completed", "requested", or "rerequested". -// The Webhook event name is "check_suite". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#check_suite -type CheckSuiteEvent struct { - CheckSuite *CheckSuite `json:"check_suite,omitempty"` - // The action performed. Possible values are: "completed", "requested" or "rerequested". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CommitCommentEvent is triggered when a commit comment is created. -// The Webhook event name is "commit_comment". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#commit_comment -type CommitCommentEvent struct { - Comment *RepositoryComment `json:"comment,omitempty"` - - // The following fields are only populated by Webhook events. - Action *string `json:"action,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// ContentReferenceEvent is triggered when the body or comment of an issue or -// pull request includes a URL that matches a configured content reference -// domain. -// The Webhook event name is "content_reference". -// -// GitHub API docs: https://developer.github.com/webhooks/event-payloads/#content_reference -type ContentReferenceEvent struct { - Action *string `json:"action,omitempty"` - ContentReference *ContentReference `json:"content_reference,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CreateEvent represents a created repository, branch, or tag. -// The Webhook event name is "create". -// -// Note: webhooks will not receive this event for created repositories. -// Additionally, webhooks will not receive this event for tags if more -// than three tags are pushed at once. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/github-event-types#createevent -type CreateEvent struct { - Ref *string `json:"ref,omitempty"` - // RefType is the object that was created. Possible values are: "repository", "branch", "tag". - RefType *string `json:"ref_type,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - Description *string `json:"description,omitempty"` - PusherType *string `json:"pusher_type,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// DeleteEvent represents a deleted branch or tag. -// The Webhook event name is "delete". -// -// Note: webhooks will not receive this event for tags if more than three tags -// are deleted at once. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/github-event-types#deleteevent -type DeleteEvent struct { - Ref *string `json:"ref,omitempty"` - // RefType is the object that was deleted. Possible values are: "branch", "tag". - RefType *string `json:"ref_type,omitempty"` - - // The following fields are only populated by Webhook events. - PusherType *string `json:"pusher_type,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// DependabotAlertEvent is triggered when there is activity relating to Dependabot alerts. -// The Webhook event name is "dependabot_alert". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#dependabot_alert -type DependabotAlertEvent struct { - Action *string `json:"action,omitempty"` - Alert *DependabotAlert `json:"alert,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -// DeployKeyEvent is triggered when a deploy key is added or removed from a repository. -// The Webhook event name is "deploy_key". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#deploy_key -type DeployKeyEvent struct { - // Action is the action that was performed. Possible values are: - // "created" or "deleted". - Action *string `json:"action,omitempty"` - - // The deploy key resource. - Key *Key `json:"key,omitempty"` - - // The Repository where the event occurred - Repo *Repository `json:"repository,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// DeploymentEvent represents a deployment. -// The Webhook event name is "deployment". -// -// Events of this type are not visible in timelines, they are only used to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#deployment -type DeploymentEvent struct { - Deployment *Deployment `json:"deployment,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Workflow *Workflow `json:"workflow,omitempty"` - WorkflowRun *WorkflowRun `json:"workflow_run,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// DeploymentProtectionRuleEvent represents a deployment protection rule event. -// The Webhook event name is "deployment_protection_rule". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#deployment_protection_rule -type DeploymentProtectionRuleEvent struct { - Action *string `json:"action,omitempty"` - Environment *string `json:"environment,omitempty"` - Event *string `json:"event,omitempty"` - - // The URL Github provides for a third-party to use in order to pass/fail a deployment gate - DeploymentCallbackURL *string `json:"deployment_callback_url,omitempty"` - Deployment *Deployment `json:"deployment,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Organization *Organization `json:"organization,omitempty"` - PullRequests []*PullRequest `json:"pull_requests,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// DeploymentStatusEvent represents a deployment status. -// The Webhook event name is "deployment_status". -// -// Events of this type are not visible in timelines, they are only used to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#deployment_status -type DeploymentStatusEvent struct { - Deployment *Deployment `json:"deployment,omitempty"` - DeploymentStatus *DeploymentStatus `json:"deployment_status,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// DiscussionCommentEvent represents a webhook event for a comment on discussion. -// The Webhook event name is "discussion_comment". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion_comment -type DiscussionCommentEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "created", "edited", "deleted". ** check what all can be added - Action *string `json:"action,omitempty"` - Discussion *Discussion `json:"discussion,omitempty"` - Comment *CommentDiscussion `json:"comment,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// CommentDiscussion represents a comment in a GitHub DiscussionCommentEvent. -type CommentDiscussion struct { - AuthorAssociation *string `json:"author_association,omitempty"` - Body *string `json:"body,omitempty"` - ChildCommentCount *int `json:"child_comment_count,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - DiscussionID *int64 `json:"discussion_id,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - ParentID *int64 `json:"parent_id,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - User *User `json:"user,omitempty"` -} - -// DiscussionEvent represents a webhook event for a discussion. -// The Webhook event name is "discussion". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion -type DiscussionEvent struct { - // Action is the action that was performed. Possible values are: - // created, edited, deleted, pinned, unpinned, locked, unlocked, - // transferred, category_changed, answered, or unanswered. - Action *string `json:"action,omitempty"` - Discussion *Discussion `json:"discussion,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// Discussion represents a discussion in a GitHub DiscussionEvent. -type Discussion struct { - RepositoryURL *string `json:"repository_url,omitempty"` - DiscussionCategory *DiscussionCategory `json:"category,omitempty"` - AnswerHTMLURL *string `json:"answer_html_url,omitempty"` - AnswerChosenAt *Timestamp `json:"answer_chosen_at,omitempty"` - AnswerChosenBy *string `json:"answer_chosen_by,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Number *int `json:"number,omitempty"` - Title *string `json:"title,omitempty"` - User *User `json:"user,omitempty"` - State *string `json:"state,omitempty"` - Locked *bool `json:"locked,omitempty"` - Comments *int `json:"comments,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - AuthorAssociation *string `json:"author_association,omitempty"` - ActiveLockReason *string `json:"active_lock_reason,omitempty"` - Body *string `json:"body,omitempty"` -} - -// DiscussionCategory represents a discussion category in a GitHub DiscussionEvent. -type DiscussionCategory struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Emoji *string `json:"emoji,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Slug *string `json:"slug,omitempty"` - IsAnswerable *bool `json:"is_answerable,omitempty"` -} - -// ForkEvent is triggered when a user forks a repository. -// The Webhook event name is "fork". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#fork -type ForkEvent struct { - // Forkee is the created repository. - Forkee *Repository `json:"forkee,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// GitHubAppAuthorizationEvent is triggered when a user's authorization for a -// GitHub Application is revoked. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#github_app_authorization -type GitHubAppAuthorizationEvent struct { - // The action performed. Possible value is: "revoked". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// Page represents a single Wiki page. -type Page struct { - PageName *string `json:"page_name,omitempty"` - Title *string `json:"title,omitempty"` - Summary *string `json:"summary,omitempty"` - Action *string `json:"action,omitempty"` - SHA *string `json:"sha,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` -} - -// GollumEvent is triggered when a Wiki page is created or updated. -// The Webhook event name is "gollum". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#gollum -type GollumEvent struct { - Pages []*Page `json:"pages,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// EditChange represents the changes when an issue, pull request, comment, -// or repository has been edited. -type EditChange struct { - Title *EditTitle `json:"title,omitempty"` - Body *EditBody `json:"body,omitempty"` - Base *EditBase `json:"base,omitempty"` - Repo *EditRepo `json:"repository,omitempty"` - Owner *EditOwner `json:"owner,omitempty"` - DefaultBranch *EditDefaultBranch `json:"default_branch,omitempty"` -} - -// EditTitle represents a pull-request title change. -type EditTitle struct { - From *string `json:"from,omitempty"` -} - -// EditBody represents a change of pull-request body. -type EditBody struct { - From *string `json:"from,omitempty"` -} - -// EditBase represents the change of a pull-request base branch. -type EditBase struct { - Ref *EditRef `json:"ref,omitempty"` - SHA *EditSHA `json:"sha,omitempty"` -} - -// EditRef represents a ref change of a pull-request. -type EditRef struct { - From *string `json:"from,omitempty"` -} - -// EditRepo represents a change of repository name. -type EditRepo struct { - Name *RepoName `json:"name,omitempty"` -} - -// EditOwner represents a change of repository ownership. -type EditOwner struct { - OwnerInfo *OwnerInfo `json:"from,omitempty"` -} - -// OwnerInfo represents the account info of the owner of the repo (could be User or Organization but both are User structs). -type OwnerInfo struct { - User *User `json:"user,omitempty"` - Org *User `json:"organization,omitempty"` -} - -// RepoName represents a change of repository name. -type RepoName struct { - From *string `json:"from,omitempty"` -} - -// EditSHA represents a sha change of a pull-request. -type EditSHA struct { - From *string `json:"from,omitempty"` -} - -// EditDefaultBranch represents a change of repository's default branch name. -type EditDefaultBranch struct { - From *string `json:"from,omitempty"` -} - -// ProjectChange represents the changes when a project has been edited. -type ProjectChange struct { - Name *ProjectName `json:"name,omitempty"` - Body *ProjectBody `json:"body,omitempty"` -} - -// ProjectName represents a project name change. -type ProjectName struct { - From *string `json:"from,omitempty"` -} - -// ProjectBody represents a project body change. -type ProjectBody struct { - From *string `json:"from,omitempty"` -} - -// ProjectCardChange represents the changes when a project card has been edited. -type ProjectCardChange struct { - Note *ProjectCardNote `json:"note,omitempty"` -} - -// ProjectCardNote represents a change of a note of a project card. -type ProjectCardNote struct { - From *string `json:"from,omitempty"` -} - -// ProjectColumnChange represents the changes when a project column has been edited. -type ProjectColumnChange struct { - Name *ProjectColumnName `json:"name,omitempty"` -} - -// ProjectColumnName represents a project column name change. -type ProjectColumnName struct { - From *string `json:"from,omitempty"` -} - -// TeamChange represents the changes when a team has been edited. -type TeamChange struct { - Description *TeamDescription `json:"description,omitempty"` - Name *TeamName `json:"name,omitempty"` - Privacy *TeamPrivacy `json:"privacy,omitempty"` - Repository *TeamRepository `json:"repository,omitempty"` -} - -// TeamDescription represents a team description change. -type TeamDescription struct { - From *string `json:"from,omitempty"` -} - -// TeamName represents a team name change. -type TeamName struct { - From *string `json:"from,omitempty"` -} - -// TeamPrivacy represents a team privacy change. -type TeamPrivacy struct { - From *string `json:"from,omitempty"` -} - -// TeamRepository represents a team repository permission change. -type TeamRepository struct { - Permissions *TeamPermissions `json:"permissions,omitempty"` -} - -// TeamPermissions represents a team permission change. -type TeamPermissions struct { - From *TeamPermissionsFrom `json:"from,omitempty"` -} - -// TeamPermissionsFrom represents a team permission change. -type TeamPermissionsFrom struct { - Admin *bool `json:"admin,omitempty"` - Pull *bool `json:"pull,omitempty"` - Push *bool `json:"push,omitempty"` -} - -// InstallationEvent is triggered when a GitHub App has been installed, uninstalled, suspend, unsuspended -// or new permissions have been accepted. -// The Webhook event name is "installation". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#installation -type InstallationEvent struct { - // The action that was performed. Can be either "created", "deleted", "suspend", "unsuspend" or "new_permissions_accepted". - Action *string `json:"action,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Requester *User `json:"requester,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// InstallationRepositoriesEvent is triggered when a repository is added or -// removed from an installation. The Webhook event name is "installation_repositories". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#installation_repositories -type InstallationRepositoriesEvent struct { - // The action that was performed. Can be either "added" or "removed". - Action *string `json:"action,omitempty"` - RepositoriesAdded []*Repository `json:"repositories_added,omitempty"` - RepositoriesRemoved []*Repository `json:"repositories_removed,omitempty"` - RepositorySelection *string `json:"repository_selection,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// InstallationLoginChange represents a change in login on an installation. -type InstallationLoginChange struct { - From *string `json:"from,omitempty"` -} - -// InstallationSlugChange represents a change in slug on an installation. -type InstallationSlugChange struct { - From *string `json:"from,omitempty"` -} - -// InstallationChanges represents a change in slug or login on an installation. -type InstallationChanges struct { - Login *InstallationLoginChange `json:"login,omitempty"` - Slug *InstallationSlugChange `json:"slug,omitempty"` -} - -// InstallationTargetEvent is triggered when there is activity on an installation from a user or organization account. -// The Webhook event name is "installation_target". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#installation_target -type InstallationTargetEvent struct { - Account *User `json:"account,omitempty"` - Action *string `json:"action,omitempty"` - Changes *InstallationChanges `json:"changes,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - TargetType *string `json:"target_type,omitempty"` -} - -// IssueCommentEvent is triggered when an issue comment is created on an issue -// or pull request. -// The Webhook event name is "issue_comment". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#issue_comment -type IssueCommentEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "created", "edited", "deleted". - Action *string `json:"action,omitempty"` - Issue *Issue `json:"issue,omitempty"` - Comment *IssueComment `json:"comment,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -// IssuesEvent is triggered when an issue is opened, edited, deleted, transferred, -// pinned, unpinned, closed, reopened, assigned, unassigned, labeled, unlabeled, -// locked, unlocked, milestoned, or demilestoned. -// The Webhook event name is "issues". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#issues -type IssuesEvent struct { - // Action is the action that was performed. Possible values are: "opened", - // "edited", "deleted", "transferred", "pinned", "unpinned", "closed", "reopened", - // "assigned", "unassigned", "labeled", "unlabeled", "locked", "unlocked", - // "milestoned", or "demilestoned". - Action *string `json:"action,omitempty"` - Issue *Issue `json:"issue,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Label *Label `json:"label,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// LabelEvent is triggered when a repository's label is created, edited, or deleted. -// The Webhook event name is "label" -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#label -type LabelEvent struct { - // Action is the action that was performed. Possible values are: - // "created", "edited", "deleted" - Action *string `json:"action,omitempty"` - Label *Label `json:"label,omitempty"` - Changes *EditChange `json:"changes,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// MarketplacePurchaseEvent is triggered when a user purchases, cancels, or changes -// their GitHub Marketplace plan. -// Webhook event name "marketplace_purchase". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#marketplace_purchase -type MarketplacePurchaseEvent struct { - // Action is the action that was performed. Possible values are: - // "purchased", "cancelled", "pending_change", "pending_change_cancelled", "changed". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - EffectiveDate *Timestamp `json:"effective_date,omitempty"` - MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` - PreviousMarketplacePurchase *MarketplacePurchase `json:"previous_marketplace_purchase,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// MemberEvent is triggered when a user is added as a collaborator to a repository. -// The Webhook event name is "member". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#member -type MemberEvent struct { - // Action is the action that was performed. Possible value is: "added". - Action *string `json:"action,omitempty"` - Member *User `json:"member,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// MembershipEvent is triggered when a user is added or removed from a team. -// The Webhook event name is "membership". -// -// Events of this type are not visible in timelines, they are only used to -// trigger organization webhooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#membership -type MembershipEvent struct { - // Action is the action that was performed. Possible values are: "added", "removed". - Action *string `json:"action,omitempty"` - // Scope is the scope of the membership. Possible value is: "team". - Scope *string `json:"scope,omitempty"` - Member *User `json:"member,omitempty"` - Team *Team `json:"team,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// MergeGroup represents the merge group in a merge queue. -type MergeGroup struct { - // The SHA of the merge group. - HeadSHA *string `json:"head_sha,omitempty"` - // The full ref of the merge group. - HeadRef *string `json:"head_ref,omitempty"` - // The SHA of the merge group's parent commit. - BaseSHA *string `json:"base_sha,omitempty"` - // The full ref of the branch the merge group will be merged into. - BaseRef *string `json:"base_ref,omitempty"` - // An expanded representation of the head_sha commit. - HeadCommit *Commit `json:"head_commit,omitempty"` -} - -// MergeGroupEvent represents activity related to merge groups in a merge queue. The type of activity is specified -// in the action property of the payload object. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#merge_group -type MergeGroupEvent struct { - // The action that was performed. Currently, can only be checks_requested. - Action *string `json:"action,omitempty"` - // The merge group. - MergeGroup *MergeGroup `json:"merge_group,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// MetaEvent is triggered when the webhook that this event is configured on is deleted. -// This event will only listen for changes to the particular hook the event is installed on. -// Therefore, it must be selected for each hook that you'd like to receive meta events for. -// The Webhook event name is "meta". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#meta -type MetaEvent struct { - // Action is the action that was performed. Possible value is: "deleted". - Action *string `json:"action,omitempty"` - // The ID of the modified webhook. - HookID *int64 `json:"hook_id,omitempty"` - // The modified webhook. - // This will contain different keys based on the type of webhook it is: repository, - // organization, business, app, or GitHub Marketplace. - Hook *Hook `json:"hook,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// MilestoneEvent is triggered when a milestone is created, closed, opened, edited, or deleted. -// The Webhook event name is "milestone". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#milestone -type MilestoneEvent struct { - // Action is the action that was performed. Possible values are: - // "created", "closed", "opened", "edited", "deleted" - Action *string `json:"action,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Org *Organization `json:"organization,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// OrganizationEvent is triggered when an organization is deleted and renamed, and when a user is added, -// removed, or invited to an organization. -// Events of this type are not visible in timelines. These events are only used to trigger organization hooks. -// Webhook event name is "organization". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#organization -type OrganizationEvent struct { - // Action is the action that was performed. - // Possible values are: "deleted", "renamed", "member_added", "member_removed", or "member_invited". - Action *string `json:"action,omitempty"` - - // Invitation is the invitation for the user or email if the action is "member_invited". - Invitation *Invitation `json:"invitation,omitempty"` - - // Membership is the membership between the user and the organization. - // Not present when the action is "member_invited". - Membership *Membership `json:"membership,omitempty"` - - Organization *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// OrgBlockEvent is triggered when an organization blocks or unblocks a user. -// The Webhook event name is "org_block". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#org_block -type OrgBlockEvent struct { - // Action is the action that was performed. - // Can be "blocked" or "unblocked". - Action *string `json:"action,omitempty"` - BlockedUser *User `json:"blocked_user,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` -} - -// PackageEvent represents activity related to GitHub Packages. -// The Webhook event name is "package". -// -// This event is triggered when a GitHub Package is published or updated. -// -// GitHub API docs: https://developer.github.com/webhooks/event-payloads/#package -type PackageEvent struct { - // Action is the action that was performed. - // Can be "published" or "updated". - Action *string `json:"action,omitempty"` - Package *Package `json:"package,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` -} - -// PageBuildEvent represents an attempted build of a GitHub Pages site, whether -// successful or not. -// The Webhook event name is "page_build". -// -// This event is triggered on push to a GitHub Pages enabled branch (gh-pages -// for project pages, master for user and organization pages). -// -// Events of this type are not visible in timelines, they are only used to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#page_build -type PageBuildEvent struct { - Build *PagesBuild `json:"build,omitempty"` - - // The following fields are only populated by Webhook events. - ID *int64 `json:"id,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PersonalAccessTokenRequestEvent occurs when there is activity relating to a -// request for a fine-grained personal access token to access resources that -// belong to a resource owner that requires approval for token access. -// The webhook event name is "personal_access_token_request". -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#personal_access_token_request -type PersonalAccessTokenRequestEvent struct { - // Action is the action that was performed. Possible values are: - // "approved", "cancelled", "created" or "denied" - Action *string `json:"action,omitempty"` - PersonalAccessTokenRequest *PersonalAccessTokenRequest `json:"personal_access_token_request,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// PersonalAccessTokenRequest contains the details of a PersonalAccessTokenRequestEvent. -type PersonalAccessTokenRequest struct { - // Unique identifier of the request for access via fine-grained personal - // access token. Used as the pat_request_id parameter in the list and review - // API calls. - ID *int64 `json:"id,omitempty"` - Owner *User `json:"owner,omitempty"` - - // New requested permissions, categorized by type of permission. - PermissionsAdded *PersonalAccessTokenPermissions `json:"permissions_added,omitempty"` - - // Requested permissions that elevate access for a previously approved - // request for access, categorized by type of permission. - PermissionsUpgraded *PersonalAccessTokenPermissions `json:"permissions_upgraded,omitempty"` - - // Permissions requested, categorized by type of permission. - // This field incorporates permissions_added and permissions_upgraded. - PermissionsResult *PersonalAccessTokenPermissions `json:"permissions_result,omitempty"` - - // Type of repository selection requested. Possible values are: - // "none", "all" or "subset" - RepositorySelection *string `json:"repository_selection,omitempty"` - - // The number of repositories the token is requesting access to. - // This field is only populated when repository_selection is subset. - RepositoryCount *int64 `json:"repository_count,omitempty"` - - // An array of repository objects the token is requesting access to. - // This field is only populated when repository_selection is subset. - Repositories []*Repository `json:"repositories,omitempty"` - - // Date and time when the request for access was created. - CreatedAt *Timestamp `json:"created_at,omitempty"` - - // Whether the associated fine-grained personal access token has expired. - TokenExpired *bool `json:"token_expired,omitempty"` - - // Date and time when the associated fine-grained personal access token expires. - TokenExpiresAt *Timestamp `json:"token_expires_at,omitempty"` - - // Date and time when the associated fine-grained personal access token was last used for authentication. - TokenLastUsedAt *Timestamp `json:"token_last_used_at,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PersonalAccessTokenPermissions represents the original or newly requested -// scope of permissions for a fine-grained personal access token within a PersonalAccessTokenRequest. -type PersonalAccessTokenPermissions struct { - Org map[string]string `json:"organization,omitempty"` - Repo map[string]string `json:"repository,omitempty"` - Other map[string]string `json:"other,omitempty"` -} - -// PingEvent is triggered when a Webhook is added to GitHub. -// -// GitHub API docs: https://developer.github.com/webhooks/#ping-event -type PingEvent struct { - // Random string of GitHub zen. - Zen *string `json:"zen,omitempty"` - // The ID of the webhook that triggered the ping. - HookID *int64 `json:"hook_id,omitempty"` - // The webhook configuration. - Hook *Hook `json:"hook,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectEvent is triggered when project is created, modified or deleted. -// The webhook event name is "project". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#project -type ProjectEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectChange `json:"changes,omitempty"` - Project *Project `json:"project,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectCardEvent is triggered when a project card is created, updated, moved, converted to an issue, or deleted. -// The webhook event name is "project_card". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#project_card -type ProjectCardEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectCardChange `json:"changes,omitempty"` - AfterID *int64 `json:"after_id,omitempty"` - ProjectCard *ProjectCard `json:"project_card,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectColumnEvent is triggered when a project column is created, updated, moved, or deleted. -// The webhook event name is "project_column". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#project_column -type ProjectColumnEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectColumnChange `json:"changes,omitempty"` - AfterID *int64 `json:"after_id,omitempty"` - ProjectColumn *ProjectColumn `json:"project_column,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// ProjectV2Event is triggered when there is activity relating to an organization-level project. -// The Webhook event name is "projects_v2". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2 -type ProjectV2Event struct { - Action *string `json:"action,omitempty"` - ProjectsV2 *ProjectsV2 `json:"projects_v2,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// ProjectsV2 represents a projects v2 project. -type ProjectsV2 struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Creator *User `json:"creator,omitempty"` - Title *string `json:"title,omitempty"` - Description *string `json:"description,omitempty"` - Public *bool `json:"public,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - DeletedAt *Timestamp `json:"deleted_at,omitempty"` - Number *int `json:"number,omitempty"` - ShortDescription *string `json:"short_description,omitempty"` - DeletedBy *User `json:"deleted_by,omitempty"` -} - -// ProjectV2ItemEvent is triggered when there is activity relating to an item on an organization-level project. -// The Webhook event name is "projects_v2_item". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2_item -type ProjectV2ItemEvent struct { - Action *string `json:"action,omitempty"` - Changes *ProjectV2ItemChange `json:"changes,omitempty"` - ProjectV2Item *ProjectV2Item `json:"projects_v2_item,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// ProjectV2ItemChange represents a project v2 item change. -type ProjectV2ItemChange struct { - ArchivedAt *ArchivedAt `json:"archived_at,omitempty"` -} - -// ArchivedAt represents an archiving date change. -type ArchivedAt struct { - From *Timestamp `json:"from,omitempty"` - To *Timestamp `json:"to,omitempty"` -} - -// ProjectV2Item represents an item belonging to a project. -type ProjectV2Item struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - ProjectNodeID *string `json:"project_node_id,omitempty"` - ContentNodeID *string `json:"content_node_id,omitempty"` - ContentType *string `json:"content_type,omitempty"` - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ArchivedAt *Timestamp `json:"archived_at,omitempty"` -} - -// PublicEvent is triggered when a private repository is open sourced. -// According to GitHub: "Without a doubt: the best GitHub event." -// The Webhook event name is "public". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#public -type PublicEvent struct { - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PullRequestEvent is triggered when a pull request is assigned, unassigned, labeled, -// unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, -// locked, unlocked, a pull request review is requested, or a review request is removed. -// The Webhook event name is "pull_request". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/github-event-types#pullrequestevent -type PullRequestEvent struct { - // Action is the action that was performed. Possible values are: - // "assigned", "unassigned", "review_requested", "review_request_removed", "labeled", "unlabeled", - // "opened", "edited", "closed", "ready_for_review", "locked", "unlocked", or "reopened". - // If the action is "closed" and the "merged" key is "false", the pull request was closed with unmerged commits. - // If the action is "closed" and the "merged" key is "true", the pull request was merged. - // While webhooks are also triggered when a pull request is synchronized, Events API timelines - // don't include pull request events with the "synchronize" action. - Action *string `json:"action,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Number *int `json:"number,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - // RequestedReviewer is populated in "review_requested", "review_request_removed" event deliveries. - // A request affecting multiple reviewers at once is split into multiple - // such event deliveries, each with a single, different RequestedReviewer. - RequestedReviewer *User `json:"requested_reviewer,omitempty"` - // In the event that a team is requested instead of a user, "requested_team" gets sent in place of - // "requested_user" with the same delivery behavior. - RequestedTeam *Team `json:"requested_team,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Label *Label `json:"label,omitempty"` // Populated in "labeled" event deliveries. - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` - - // The following fields are only populated when the Action is "synchronize". - Before *string `json:"before,omitempty"` - After *string `json:"after,omitempty"` - - // The following will be populated if the event was performed by an App - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// PullRequestReviewEvent is triggered when a review is submitted on a pull -// request. -// The Webhook event name is "pull_request_review". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review -type PullRequestReviewEvent struct { - // Action is always "submitted". - Action *string `json:"action,omitempty"` - Review *PullRequestReview `json:"review,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -// PullRequestReviewCommentEvent is triggered when a comment is created on a -// portion of the unified diff of a pull request. -// The Webhook event name is "pull_request_review_comment". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review_comment -type PullRequestReviewCommentEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "created", "edited", "deleted". - Action *string `json:"action,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - Comment *PullRequestComment `json:"comment,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PullRequestReviewThreadEvent is triggered when a comment made as part of a -// review of a pull request is marked resolved or unresolved. -// The Webhook event name is "pull_request_review_thread". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review_thread -type PullRequestReviewThreadEvent struct { - // Action is the action that was performed on the comment. - // Possible values are: "resolved", "unresolved". - Action *string `json:"action,omitempty"` - Thread *PullRequestThread `json:"thread,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// PullRequestTargetEvent is triggered when a pull request is assigned, unassigned, labeled, -// unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, -// locked, unlocked, a pull request review is requested, or a review request is removed. -// The Webhook event name is "pull_request_target". -// -// GitHub API docs: https://docs.github.com/actions/events-that-trigger-workflows#pull_request_target -type PullRequestTargetEvent struct { - // Action is the action that was performed. Possible values are: - // "assigned", "unassigned", "labeled", "unlabeled", "opened", "edited", "closed", "reopened", - // "ready_for_review", "locked", "unlocked", "review_requested" or "review_request_removed". - // If the action is "closed" and the "merged" key is "false", the pull request was closed with unmerged commits. - // If the action is "closed" and the "merged" key is "true", the pull request was merged. - // While webhooks are also triggered when a pull request is synchronized, Events API timelines - // don't include pull request events with the "synchronize" action. - Action *string `json:"action,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Number *int `json:"number,omitempty"` - PullRequest *PullRequest `json:"pull_request,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - // RequestedReviewer is populated in "review_requested", "review_request_removed" event deliveries. - // A request affecting multiple reviewers at once is split into multiple - // such event deliveries, each with a single, different RequestedReviewer. - RequestedReviewer *User `json:"requested_reviewer,omitempty"` - // In the event that a team is requested instead of a user, "requested_team" gets sent in place of - // "requested_user" with the same delivery behavior. - RequestedTeam *Team `json:"requested_team,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Label *Label `json:"label,omitempty"` // Populated in "labeled" event deliveries. - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` - - // The following fields are only populated when the Action is "synchronize". - Before *string `json:"before,omitempty"` - After *string `json:"after,omitempty"` - - // The following will be populated if the event was performed by an App - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// PushEvent represents a git push to a GitHub repository. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#push -type PushEvent struct { - PushID *int64 `json:"push_id,omitempty"` - Head *string `json:"head,omitempty"` - Ref *string `json:"ref,omitempty"` - Size *int `json:"size,omitempty"` - Commits []*HeadCommit `json:"commits,omitempty"` - Before *string `json:"before,omitempty"` - DistinctSize *int `json:"distinct_size,omitempty"` - - // The following fields are only populated by Webhook events. - Action *string `json:"action,omitempty"` - After *string `json:"after,omitempty"` - Created *bool `json:"created,omitempty"` - Deleted *bool `json:"deleted,omitempty"` - Forced *bool `json:"forced,omitempty"` - BaseRef *string `json:"base_ref,omitempty"` - Compare *string `json:"compare,omitempty"` - Repo *PushEventRepository `json:"repository,omitempty"` - HeadCommit *HeadCommit `json:"head_commit,omitempty"` - Pusher *CommitAuthor `json:"pusher,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Organization *Organization `json:"organization,omitempty"` -} - -func (p PushEvent) String() string { - return Stringify(p) -} - -// HeadCommit represents a git commit in a GitHub PushEvent. -type HeadCommit struct { - Message *string `json:"message,omitempty"` - Author *CommitAuthor `json:"author,omitempty"` - URL *string `json:"url,omitempty"` - Distinct *bool `json:"distinct,omitempty"` - - // The following fields are only populated by Events API. - SHA *string `json:"sha,omitempty"` - - // The following fields are only populated by Webhook events. - ID *string `json:"id,omitempty"` - TreeID *string `json:"tree_id,omitempty"` - Timestamp *Timestamp `json:"timestamp,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` - Added []string `json:"added,omitempty"` - Removed []string `json:"removed,omitempty"` - Modified []string `json:"modified,omitempty"` -} - -func (h HeadCommit) String() string { - return Stringify(h) -} - -// PushEventRepository represents the repo object in a PushEvent payload. -type PushEventRepository struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - FullName *string `json:"full_name,omitempty"` - Owner *User `json:"owner,omitempty"` - Private *bool `json:"private,omitempty"` - Description *string `json:"description,omitempty"` - Fork *bool `json:"fork,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PushedAt *Timestamp `json:"pushed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Homepage *string `json:"homepage,omitempty"` - PullsURL *string `json:"pulls_url,omitempty"` - Size *int `json:"size,omitempty"` - StargazersCount *int `json:"stargazers_count,omitempty"` - WatchersCount *int `json:"watchers_count,omitempty"` - Language *string `json:"language,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasDownloads *bool `json:"has_downloads,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasPages *bool `json:"has_pages,omitempty"` - ForksCount *int `json:"forks_count,omitempty"` - Archived *bool `json:"archived,omitempty"` - Disabled *bool `json:"disabled,omitempty"` - OpenIssuesCount *int `json:"open_issues_count,omitempty"` - DefaultBranch *string `json:"default_branch,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - Organization *string `json:"organization,omitempty"` - URL *string `json:"url,omitempty"` - ArchiveURL *string `json:"archive_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - SSHURL *string `json:"ssh_url,omitempty"` - CloneURL *string `json:"clone_url,omitempty"` - SVNURL *string `json:"svn_url,omitempty"` - Topics []string `json:"topics,omitempty"` -} - -// PushEventRepoOwner is a basic representation of user/org in a PushEvent payload. -type PushEventRepoOwner struct { - Name *string `json:"name,omitempty"` - Email *string `json:"email,omitempty"` -} - -// ReleaseEvent is triggered when a release is published, unpublished, created, -// edited, deleted, or prereleased. -// The Webhook event name is "release". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#release -type ReleaseEvent struct { - // Action is the action that was performed. Possible values are: "published", "unpublished", - // "created", "edited", "deleted", or "prereleased". - Action *string `json:"action,omitempty"` - Release *RepositoryRelease `json:"release,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// RepositoryEvent is triggered when a repository is created, archived, unarchived, -// renamed, edited, transferred, made public, or made private. Organization hooks are -// also trigerred when a repository is deleted. -// The Webhook event name is "repository". -// -// Events of this type are not visible in timelines, they are only used to -// trigger organization webhooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#repository -type RepositoryEvent struct { - // Action is the action that was performed. Possible values are: "created", - // "deleted" (organization hooks only), "archived", "unarchived", "edited", "renamed", - // "transferred", "publicized", or "privatized". - Action *string `json:"action,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Changes *EditChange `json:"changes,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// RepositoryDispatchEvent is triggered when a client sends a POST request to the repository dispatch event endpoint. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#repository_dispatch -type RepositoryDispatchEvent struct { - // Action is the event_type that submitted with the repository dispatch payload. Value can be any string. - Action *string `json:"action,omitempty"` - Branch *string `json:"branch,omitempty"` - ClientPayload json.RawMessage `json:"client_payload,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// RepositoryImportEvent represents the activity related to a repository being imported to GitHub. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_import -type RepositoryImportEvent struct { - // Status represents the final state of the import. This can be one of "success", "cancelled", or "failure". - Status *string `json:"status,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// RepositoryVulnerabilityAlertEvent is triggered when a security alert is created, dismissed, or resolved. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#repository_vulnerability_alert -type RepositoryVulnerabilityAlertEvent struct { - // Action is the action that was performed. Possible values are: "create", "dismiss", "resolve". - Action *string `json:"action,omitempty"` - - // The security alert of the vulnerable dependency. - Alert *RepositoryVulnerabilityAlert `json:"alert,omitempty"` - - // The repository of the vulnerable dependency. - Repository *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` - - // The user that triggered the event. - Sender *User `json:"sender,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// RepositoryVulnerabilityAlert represents a repository security alert. -type RepositoryVulnerabilityAlert struct { - ID *int64 `json:"id,omitempty"` - AffectedRange *string `json:"affected_range,omitempty"` - AffectedPackageName *string `json:"affected_package_name,omitempty"` - ExternalReference *string `json:"external_reference,omitempty"` - ExternalIdentifier *string `json:"external_identifier,omitempty"` - GitHubSecurityAdvisoryID *string `json:"ghsa_id,omitempty"` - Severity *string `json:"severity,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - FixedIn *string `json:"fixed_in,omitempty"` - Dismisser *User `json:"dismisser,omitempty"` - DismissReason *string `json:"dismiss_reason,omitempty"` - DismissedAt *Timestamp `json:"dismissed_at,omitempty"` -} - -// SecretScanningAlertEvent is triggered when a secret scanning alert occurs in a repository. -// The Webhook name is secret_scanning_alert. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#secret_scanning_alert -type SecretScanningAlertEvent struct { - // Action is the action that was performed. Possible values are: "created", "resolved", or "reopened". - Action *string `json:"action,omitempty"` - - // Alert is the secret scanning alert involved in the event. - Alert *SecretScanningAlert `json:"alert,omitempty"` - - // Only populated by the "resolved" and "reopen" actions - Sender *User `json:"sender,omitempty"` - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// SecurityAndAnalysisEvent is triggered when code security and analysis features -// are enabled or disabled for a repository. -// -// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#security_and_analysis -type SecurityAndAnalysisEvent struct { - Changes *SecurityAndAnalysisChange `json:"changes,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// SecurityAndAnalysisChange represents the changes when security and analysis -// features are enabled or disabled for a repository. -type SecurityAndAnalysisChange struct { - From *SecurityAndAnalysisChangeFrom `json:"from,omitempty"` -} - -// SecurityAndAnalysisChangeFrom represents which change was made when security -// and analysis features are enabled or disabled for a repository. -type SecurityAndAnalysisChangeFrom struct { - SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis,omitempty"` -} - -// StarEvent is triggered when a star is added or removed from a repository. -// The Webhook event name is "star". -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#star -type StarEvent struct { - // Action is the action that was performed. Possible values are: "created" or "deleted". - Action *string `json:"action,omitempty"` - - // StarredAt is the time the star was created. It will be null for the "deleted" action. - StarredAt *Timestamp `json:"starred_at,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// StatusEvent is triggered when the status of a Git commit changes. -// The Webhook event name is "status". -// -// Events of this type are not visible in timelines, they are only used to -// trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#status -type StatusEvent struct { - SHA *string `json:"sha,omitempty"` - // State is the new state. Possible values are: "pending", "success", "failure", "error". - State *string `json:"state,omitempty"` - Description *string `json:"description,omitempty"` - TargetURL *string `json:"target_url,omitempty"` - Branches []*Branch `json:"branches,omitempty"` - - // The following fields are only populated by Webhook events. - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Context *string `json:"context,omitempty"` - Commit *RepositoryCommit `json:"commit,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// TeamEvent is triggered when an organization's team is created, modified or deleted. -// The Webhook event name is "team". -// -// Events of this type are not visible in timelines. These events are only used -// to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#team -type TeamEvent struct { - Action *string `json:"action,omitempty"` - Team *Team `json:"team,omitempty"` - Changes *TeamChange `json:"changes,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// TeamAddEvent is triggered when a repository is added to a team. -// The Webhook event name is "team_add". -// -// Events of this type are not visible in timelines. These events are only used -// to trigger hooks. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#team_add -type TeamAddEvent struct { - Team *Team `json:"team,omitempty"` - Repo *Repository `json:"repository,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// UserEvent is triggered when a user is created or deleted. -// The Webhook event name is "user". -// -// Only global webhooks can subscribe to this event type. -// -// GitHub API docs: https://developer.github.com/enterprise/v3/activity/events/types/#userevent-enterprise -type UserEvent struct { - User *User `json:"user,omitempty"` - // The action performed. Possible values are: "created" or "deleted". - Action *string `json:"action,omitempty"` - Enterprise *Enterprise `json:"enterprise,omitempty"` - Sender *User `json:"sender,omitempty"` - - // The following fields are only populated by Webhook events. - Installation *Installation `json:"installation,omitempty"` -} - -// WatchEvent is related to starring a repository, not watching. See this API -// blog post for an explanation: https://developer.github.com/changes/2012-09-05-watcher-api/ -// -// The event’s actor is the user who starred a repository, and the event’s -// repository is the repository that was starred. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#watch -type WatchEvent struct { - // Action is the action that was performed. Possible value is: "started". - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` - - // The following field is only present when the webhook is triggered on - // a repository belonging to an organization. - Org *Organization `json:"organization,omitempty"` -} - -// WorkflowDispatchEvent is triggered when someone triggers a workflow run on GitHub or -// sends a POST request to the create a workflow dispatch event endpoint. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch -type WorkflowDispatchEvent struct { - Inputs json.RawMessage `json:"inputs,omitempty"` - Ref *string `json:"ref,omitempty"` - Workflow *string `json:"workflow,omitempty"` - - // The following fields are only populated by Webhook events. - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// WorkflowJobEvent is triggered when a job is queued, started or completed. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_job -type WorkflowJobEvent struct { - WorkflowJob *WorkflowJob `json:"workflow_job,omitempty"` - - Action *string `json:"action,omitempty"` - - // The following fields are only populated by Webhook events. - - // Org is not nil when the webhook is configured for an organization or the event - // occurs from activity in a repository owned by an organization. - Org *Organization `json:"organization,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// WorkflowRunEvent is triggered when a GitHub Actions workflow run is requested or completed. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads#workflow_run -type WorkflowRunEvent struct { - Action *string `json:"action,omitempty"` - Workflow *Workflow `json:"workflow,omitempty"` - WorkflowRun *WorkflowRun `json:"workflow_run,omitempty"` - - // The following fields are only populated by Webhook events. - Org *Organization `json:"organization,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` - Installation *Installation `json:"installation,omitempty"` -} - -// SecurityAdvisory represents the advisory object in SecurityAdvisoryEvent payload. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory -type SecurityAdvisory struct { - CVSS *AdvisoryCVSS `json:"cvss,omitempty"` - CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` - GHSAID *string `json:"ghsa_id,omitempty"` - Summary *string `json:"summary,omitempty"` - Description *string `json:"description,omitempty"` - Severity *string `json:"severity,omitempty"` - Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` - References []*AdvisoryReference `json:"references,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - WithdrawnAt *Timestamp `json:"withdrawn_at,omitempty"` - Vulnerabilities []*AdvisoryVulnerability `json:"vulnerabilities,omitempty"` - CVEID *string `json:"cve_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Author *User `json:"author,omitempty"` - Publisher *User `json:"publisher,omitempty"` - State *string `json:"state,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - Submission *SecurityAdvisorySubmission `json:"submission,omitempty"` - CWEIDs []string `json:"cwe_ids,omitempty"` - Credits []*RepoAdvisoryCredit `json:"credits,omitempty"` - CreditsDetailed []*RepoAdvisoryCreditDetailed `json:"credits_detailed,omitempty"` - CollaboratingUsers []*User `json:"collaborating_users,omitempty"` - CollaboratingTeams []*Team `json:"collaborating_teams,omitempty"` - PrivateFork *Repository `json:"private_fork,omitempty"` -} - -// AdvisoryIdentifier represents the identifier for a Security Advisory. -type AdvisoryIdentifier struct { - Value *string `json:"value,omitempty"` - Type *string `json:"type,omitempty"` -} - -// AdvisoryReference represents the reference url for the security advisory. -type AdvisoryReference struct { - URL *string `json:"url,omitempty"` -} - -// AdvisoryVulnerability represents the vulnerability object for a Security Advisory. -type AdvisoryVulnerability struct { - Package *VulnerabilityPackage `json:"package,omitempty"` - Severity *string `json:"severity,omitempty"` - VulnerableVersionRange *string `json:"vulnerable_version_range,omitempty"` - FirstPatchedVersion *FirstPatchedVersion `json:"first_patched_version,omitempty"` - - // PatchedVersions and VulnerableFunctions are used in the following APIs: - // - https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories-for-an-organization - // - https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories - PatchedVersions *string `json:"patched_versions,omitempty"` - VulnerableFunctions []string `json:"vulnerable_functions,omitempty"` -} - -// VulnerabilityPackage represents the package object for an Advisory Vulnerability. -type VulnerabilityPackage struct { - Ecosystem *string `json:"ecosystem,omitempty"` - Name *string `json:"name,omitempty"` -} - -// FirstPatchedVersion represents the identifier for the first patched version of that vulnerability. -type FirstPatchedVersion struct { - Identifier *string `json:"identifier,omitempty"` -} - -// SecurityAdvisoryEvent is triggered when a security-related vulnerability is found in software on GitHub. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory -type SecurityAdvisoryEvent struct { - Action *string `json:"action,omitempty"` - SecurityAdvisory *SecurityAdvisory `json:"security_advisory,omitempty"` - - // The following fields are only populated by Webhook events. - Enterprise *Enterprise `json:"enterprise,omitempty"` - Installation *Installation `json:"installation,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Sender *User `json:"sender,omitempty"` -} - -// CodeScanningAlertEvent is triggered when a code scanning finds a potential vulnerability or error in your code. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#code_scanning_alert -type CodeScanningAlertEvent struct { - Action *string `json:"action,omitempty"` - Alert *Alert `json:"alert,omitempty"` - Ref *string `json:"ref,omitempty"` - // CommitOID is the commit SHA of the code scanning alert - CommitOID *string `json:"commit_oid,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Org *Organization `json:"organization,omitempty"` - Sender *User `json:"sender,omitempty"` - - Installation *Installation `json:"installation,omitempty"` -} diff --git a/vendor/github.com/google/go-github/v57/github/gists.go b/vendor/github.com/google/go-github/v57/github/gists.go deleted file mode 100644 index 08180c6d..00000000 --- a/vendor/github.com/google/go-github/v57/github/gists.go +++ /dev/null @@ -1,397 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// GistsService handles communication with the Gist related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/gists -type GistsService service - -// Gist represents a GitHub's gist. -type Gist struct { - ID *string `json:"id,omitempty"` - Description *string `json:"description,omitempty"` - Public *bool `json:"public,omitempty"` - Owner *User `json:"owner,omitempty"` - Files map[GistFilename]GistFile `json:"files,omitempty"` - Comments *int `json:"comments,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - GitPullURL *string `json:"git_pull_url,omitempty"` - GitPushURL *string `json:"git_push_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (g Gist) String() string { - return Stringify(g) -} - -// GistFilename represents filename on a gist. -type GistFilename string - -// GistFile represents a file on a gist. -type GistFile struct { - Size *int `json:"size,omitempty"` - Filename *string `json:"filename,omitempty"` - Language *string `json:"language,omitempty"` - Type *string `json:"type,omitempty"` - RawURL *string `json:"raw_url,omitempty"` - Content *string `json:"content,omitempty"` -} - -func (g GistFile) String() string { - return Stringify(g) -} - -// GistCommit represents a commit on a gist. -type GistCommit struct { - URL *string `json:"url,omitempty"` - Version *string `json:"version,omitempty"` - User *User `json:"user,omitempty"` - ChangeStatus *CommitStats `json:"change_status,omitempty"` - CommittedAt *Timestamp `json:"committed_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (gc GistCommit) String() string { - return Stringify(gc) -} - -// GistFork represents a fork of a gist. -type GistFork struct { - URL *string `json:"url,omitempty"` - User *User `json:"user,omitempty"` - ID *string `json:"id,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (gf GistFork) String() string { - return Stringify(gf) -} - -// GistListOptions specifies the optional parameters to the -// GistsService.List, GistsService.ListAll, and GistsService.ListStarred methods. -type GistListOptions struct { - // Since filters Gists by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// List gists for a user. Passing the empty string will list -// all public gists if called anonymously. However, if the call -// is authenticated, it will returns all gists for the authenticated -// user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gists-for-a-user -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gists-for-the-authenticated-user -// -//meta:operation GET /gists -//meta:operation GET /users/{username}/gists -func (s *GistsService) List(ctx context.Context, user string, opts *GistListOptions) ([]*Gist, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/gists", user) - } else { - u = "gists" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gists []*Gist - resp, err := s.client.Do(ctx, req, &gists) - if err != nil { - return nil, resp, err - } - - return gists, resp, nil -} - -// ListAll lists all public gists. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-public-gists -// -//meta:operation GET /gists/public -func (s *GistsService) ListAll(ctx context.Context, opts *GistListOptions) ([]*Gist, *Response, error) { - u, err := addOptions("gists/public", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gists []*Gist - resp, err := s.client.Do(ctx, req, &gists) - if err != nil { - return nil, resp, err - } - - return gists, resp, nil -} - -// ListStarred lists starred gists of authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-starred-gists -// -//meta:operation GET /gists/starred -func (s *GistsService) ListStarred(ctx context.Context, opts *GistListOptions) ([]*Gist, *Response, error) { - u, err := addOptions("gists/starred", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gists []*Gist - resp, err := s.client.Do(ctx, req, &gists) - if err != nil { - return nil, resp, err - } - - return gists, resp, nil -} - -// Get a single gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#get-a-gist -// -//meta:operation GET /gists/{gist_id} -func (s *GistsService) Get(ctx context.Context, id string) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - gist := new(Gist) - resp, err := s.client.Do(ctx, req, gist) - if err != nil { - return nil, resp, err - } - - return gist, resp, nil -} - -// GetRevision gets a specific revision of a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#get-a-gist-revision -// -//meta:operation GET /gists/{gist_id}/{sha} -func (s *GistsService) GetRevision(ctx context.Context, id, sha string) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v/%v", id, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - gist := new(Gist) - resp, err := s.client.Do(ctx, req, gist) - if err != nil { - return nil, resp, err - } - - return gist, resp, nil -} - -// Create a gist for authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#create-a-gist -// -//meta:operation POST /gists -func (s *GistsService) Create(ctx context.Context, gist *Gist) (*Gist, *Response, error) { - u := "gists" - req, err := s.client.NewRequest("POST", u, gist) - if err != nil { - return nil, nil, err - } - - g := new(Gist) - resp, err := s.client.Do(ctx, req, g) - if err != nil { - return nil, resp, err - } - - return g, resp, nil -} - -// Edit a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#update-a-gist -// -//meta:operation PATCH /gists/{gist_id} -func (s *GistsService) Edit(ctx context.Context, id string, gist *Gist) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v", id) - req, err := s.client.NewRequest("PATCH", u, gist) - if err != nil { - return nil, nil, err - } - - g := new(Gist) - resp, err := s.client.Do(ctx, req, g) - if err != nil { - return nil, resp, err - } - - return g, resp, nil -} - -// ListCommits lists commits of a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gist-commits -// -//meta:operation GET /gists/{gist_id}/commits -func (s *GistsService) ListCommits(ctx context.Context, id string, opts *ListOptions) ([]*GistCommit, *Response, error) { - u := fmt.Sprintf("gists/%v/commits", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gistCommits []*GistCommit - resp, err := s.client.Do(ctx, req, &gistCommits) - if err != nil { - return nil, resp, err - } - - return gistCommits, resp, nil -} - -// Delete a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#delete-a-gist -// -//meta:operation DELETE /gists/{gist_id} -func (s *GistsService) Delete(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("gists/%v", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Star a gist on behalf of authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#star-a-gist -// -//meta:operation PUT /gists/{gist_id}/star -func (s *GistsService) Star(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("gists/%v/star", id) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unstar a gist on a behalf of authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#unstar-a-gist -// -//meta:operation DELETE /gists/{gist_id}/star -func (s *GistsService) Unstar(ctx context.Context, id string) (*Response, error) { - u := fmt.Sprintf("gists/%v/star", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// IsStarred checks if a gist is starred by authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#check-if-a-gist-is-starred -// -//meta:operation GET /gists/{gist_id}/star -func (s *GistsService) IsStarred(ctx context.Context, id string) (bool, *Response, error) { - u := fmt.Sprintf("gists/%v/star", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - starred, err := parseBoolResponse(err) - return starred, resp, err -} - -// Fork a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#fork-a-gist -// -//meta:operation POST /gists/{gist_id}/forks -func (s *GistsService) Fork(ctx context.Context, id string) (*Gist, *Response, error) { - u := fmt.Sprintf("gists/%v/forks", id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - g := new(Gist) - resp, err := s.client.Do(ctx, req, g) - if err != nil { - return nil, resp, err - } - - return g, resp, nil -} - -// ListForks lists forks of a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/gists#list-gist-forks -// -//meta:operation GET /gists/{gist_id}/forks -func (s *GistsService) ListForks(ctx context.Context, id string, opts *ListOptions) ([]*GistFork, *Response, error) { - u := fmt.Sprintf("gists/%v/forks", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var gistForks []*GistFork - resp, err := s.client.Do(ctx, req, &gistForks) - if err != nil { - return nil, resp, err - } - - return gistForks, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/git_commits.go b/vendor/github.com/google/go-github/v57/github/git_commits.go deleted file mode 100644 index 573d38be..00000000 --- a/vendor/github.com/google/go-github/v57/github/git_commits.go +++ /dev/null @@ -1,225 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "errors" - "fmt" - "io" - "strings" -) - -// SignatureVerification represents GPG signature verification. -type SignatureVerification struct { - Verified *bool `json:"verified,omitempty"` - Reason *string `json:"reason,omitempty"` - Signature *string `json:"signature,omitempty"` - Payload *string `json:"payload,omitempty"` -} - -// MessageSigner is used by GitService.CreateCommit to sign a commit. -// -// To create a MessageSigner that signs a commit with a [golang.org/x/crypto/openpgp.Entity], -// or [github.com/ProtonMail/go-crypto/openpgp.Entity], use: -// -// commit.Signer = github.MessageSignerFunc(func(w io.Writer, r io.Reader) error { -// return openpgp.ArmoredDetachSign(w, openpgpEntity, r, nil) -// }) -type MessageSigner interface { - Sign(w io.Writer, r io.Reader) error -} - -// MessageSignerFunc is a single function implementation of MessageSigner. -type MessageSignerFunc func(w io.Writer, r io.Reader) error - -func (f MessageSignerFunc) Sign(w io.Writer, r io.Reader) error { - return f(w, r) -} - -// Commit represents a GitHub commit. -type Commit struct { - SHA *string `json:"sha,omitempty"` - Author *CommitAuthor `json:"author,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` - Message *string `json:"message,omitempty"` - Tree *Tree `json:"tree,omitempty"` - Parents []*Commit `json:"parents,omitempty"` - Stats *CommitStats `json:"stats,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - Verification *SignatureVerification `json:"verification,omitempty"` - NodeID *string `json:"node_id,omitempty"` - - // CommentCount is the number of GitHub comments on the commit. This - // is only populated for requests that fetch GitHub data like - // Pulls.ListCommits, Repositories.ListCommits, etc. - CommentCount *int `json:"comment_count,omitempty"` -} - -func (c Commit) String() string { - return Stringify(c) -} - -// CommitAuthor represents the author or committer of a commit. The commit -// author may not correspond to a GitHub User. -type CommitAuthor struct { - Date *Timestamp `json:"date,omitempty"` - Name *string `json:"name,omitempty"` - Email *string `json:"email,omitempty"` - - // The following fields are only populated by Webhook events. - Login *string `json:"username,omitempty"` // Renamed for go-github consistency. -} - -func (c CommitAuthor) String() string { - return Stringify(c) -} - -// GetCommit fetches the Commit object for a given SHA. -// -// GitHub API docs: https://docs.github.com/rest/git/commits#get-a-commit-object -// -//meta:operation GET /repos/{owner}/{repo}/git/commits/{commit_sha} -func (s *GitService) GetCommit(ctx context.Context, owner string, repo string, sha string) (*Commit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/commits/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - c := new(Commit) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// createCommit represents the body of a CreateCommit request. -type createCommit struct { - Author *CommitAuthor `json:"author,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` - Message *string `json:"message,omitempty"` - Tree *string `json:"tree,omitempty"` - Parents []string `json:"parents,omitempty"` - Signature *string `json:"signature,omitempty"` -} - -type CreateCommitOptions struct { - // CreateCommit will sign the commit with this signer. See MessageSigner doc for more details. - // Ignored on commits where Verification.Signature is defined. - Signer MessageSigner -} - -// CreateCommit creates a new commit in a repository. -// commit must not be nil. -// -// The commit.Committer is optional and will be filled with the commit.Author -// data if omitted. If the commit.Author is omitted, it will be filled in with -// the authenticated user’s information and the current date. -// -// GitHub API docs: https://docs.github.com/rest/git/commits#create-a-commit -// -//meta:operation POST /repos/{owner}/{repo}/git/commits -func (s *GitService) CreateCommit(ctx context.Context, owner string, repo string, commit *Commit, opts *CreateCommitOptions) (*Commit, *Response, error) { - if commit == nil { - return nil, nil, fmt.Errorf("commit must be provided") - } - if opts == nil { - opts = &CreateCommitOptions{} - } - - u := fmt.Sprintf("repos/%v/%v/git/commits", owner, repo) - - parents := make([]string, len(commit.Parents)) - for i, parent := range commit.Parents { - parents[i] = *parent.SHA - } - - body := &createCommit{ - Author: commit.Author, - Committer: commit.Committer, - Message: commit.Message, - Parents: parents, - } - if commit.Tree != nil { - body.Tree = commit.Tree.SHA - } - switch { - case commit.Verification != nil: - body.Signature = commit.Verification.Signature - case opts.Signer != nil: - signature, err := createSignature(opts.Signer, body) - if err != nil { - return nil, nil, err - } - body.Signature = &signature - } - - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - c := new(Commit) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -func createSignature(signer MessageSigner, commit *createCommit) (string, error) { - if signer == nil { - return "", errors.New("createSignature: invalid parameters") - } - - message, err := createSignatureMessage(commit) - if err != nil { - return "", err - } - - var writer bytes.Buffer - err = signer.Sign(&writer, strings.NewReader(message)) - if err != nil { - return "", err - } - - return writer.String(), nil -} - -func createSignatureMessage(commit *createCommit) (string, error) { - if commit == nil || commit.Message == nil || *commit.Message == "" || commit.Author == nil { - return "", errors.New("createSignatureMessage: invalid parameters") - } - - var message []string - - if commit.Tree != nil { - message = append(message, fmt.Sprintf("tree %s", *commit.Tree)) - } - - for _, parent := range commit.Parents { - message = append(message, fmt.Sprintf("parent %s", parent)) - } - - message = append(message, fmt.Sprintf("author %s <%s> %d %s", commit.Author.GetName(), commit.Author.GetEmail(), commit.Author.GetDate().Unix(), commit.Author.GetDate().Format("-0700"))) - - committer := commit.Committer - if committer == nil { - committer = commit.Author - } - - // There needs to be a double newline after committer - message = append(message, fmt.Sprintf("committer %s <%s> %d %s\n", committer.GetName(), committer.GetEmail(), committer.GetDate().Unix(), committer.GetDate().Format("-0700"))) - message = append(message, *commit.Message) - - return strings.Join(message, "\n"), nil -} diff --git a/vendor/github.com/google/go-github/v57/github/git_refs.go b/vendor/github.com/google/go-github/v57/github/git_refs.go deleted file mode 100644 index ad7b10d7..00000000 --- a/vendor/github.com/google/go-github/v57/github/git_refs.go +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/url" - "strings" -) - -// Reference represents a GitHub reference. -type Reference struct { - Ref *string `json:"ref"` - URL *string `json:"url"` - Object *GitObject `json:"object"` - NodeID *string `json:"node_id,omitempty"` -} - -func (r Reference) String() string { - return Stringify(r) -} - -// GitObject represents a Git object. -type GitObject struct { - Type *string `json:"type"` - SHA *string `json:"sha"` - URL *string `json:"url"` -} - -func (o GitObject) String() string { - return Stringify(o) -} - -// createRefRequest represents the payload for creating a reference. -type createRefRequest struct { - Ref *string `json:"ref"` - SHA *string `json:"sha"` -} - -// updateRefRequest represents the payload for updating a reference. -type updateRefRequest struct { - SHA *string `json:"sha"` - Force *bool `json:"force"` -} - -// GetRef fetches a single reference in a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#get-a-reference -// -//meta:operation GET /repos/{owner}/{repo}/git/ref/{ref} -func (s *GitService) GetRef(ctx context.Context, owner string, repo string, ref string) (*Reference, *Response, error) { - ref = strings.TrimPrefix(ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/ref/%v", owner, repo, refURLEscape(ref)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(Reference) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// refURLEscape escapes every path segment of the given ref. Those must -// not contain escaped "/" - as "%2F" - or github will not recognize it. -func refURLEscape(ref string) string { - parts := strings.Split(ref, "/") - for i, s := range parts { - parts[i] = url.PathEscape(s) - } - return strings.Join(parts, "/") -} - -// ReferenceListOptions specifies optional parameters to the -// GitService.ListMatchingRefs method. -type ReferenceListOptions struct { - Ref string `url:"-"` - - ListOptions -} - -// ListMatchingRefs lists references in a repository that match a supplied ref. -// Use an empty ref to list all references. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#list-matching-references -// -//meta:operation GET /repos/{owner}/{repo}/git/matching-refs/{ref} -func (s *GitService) ListMatchingRefs(ctx context.Context, owner, repo string, opts *ReferenceListOptions) ([]*Reference, *Response, error) { - var ref string - if opts != nil { - ref = strings.TrimPrefix(opts.Ref, "refs/") - } - u := fmt.Sprintf("repos/%v/%v/git/matching-refs/%v", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rs []*Reference - resp, err := s.client.Do(ctx, req, &rs) - if err != nil { - return nil, resp, err - } - - return rs, resp, nil -} - -// CreateRef creates a new ref in a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#create-a-reference -// -//meta:operation POST /repos/{owner}/{repo}/git/refs -func (s *GitService) CreateRef(ctx context.Context, owner string, repo string, ref *Reference) (*Reference, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/refs", owner, repo) - req, err := s.client.NewRequest("POST", u, &createRefRequest{ - // back-compat with previous behavior that didn't require 'refs/' prefix - Ref: String("refs/" + strings.TrimPrefix(*ref.Ref, "refs/")), - SHA: ref.Object.SHA, - }) - if err != nil { - return nil, nil, err - } - - r := new(Reference) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// UpdateRef updates an existing ref in a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#update-a-reference -// -//meta:operation PATCH /repos/{owner}/{repo}/git/refs/{ref} -func (s *GitService) UpdateRef(ctx context.Context, owner string, repo string, ref *Reference, force bool) (*Reference, *Response, error) { - refPath := strings.TrimPrefix(*ref.Ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, refURLEscape(refPath)) - req, err := s.client.NewRequest("PATCH", u, &updateRefRequest{ - SHA: ref.Object.SHA, - Force: &force, - }) - if err != nil { - return nil, nil, err - } - - r := new(Reference) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DeleteRef deletes a ref from a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/refs#delete-a-reference -// -//meta:operation DELETE /repos/{owner}/{repo}/git/refs/{ref} -func (s *GitService) DeleteRef(ctx context.Context, owner string, repo string, ref string) (*Response, error) { - ref = strings.TrimPrefix(ref, "refs/") - u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, refURLEscape(ref)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/github-accessors.go b/vendor/github.com/google/go-github/v57/github/github-accessors.go deleted file mode 100644 index e15eb102..00000000 --- a/vendor/github.com/google/go-github/v57/github/github-accessors.go +++ /dev/null @@ -1,25375 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by gen-accessors; DO NOT EDIT. -// Instead, please run "go generate ./..." as described here: -// https://github.com/google/go-github/blob/master/CONTRIBUTING.md#submitting-a-patch - -package github - -import ( - "encoding/json" - "time" -) - -// GetRetryAfter returns the RetryAfter field if it's non-nil, zero value otherwise. -func (a *AbuseRateLimitError) GetRetryAfter() time.Duration { - if a == nil || a.RetryAfter == nil { - return 0 - } - return *a.RetryAfter -} - -// GetGithubOwnedAllowed returns the GithubOwnedAllowed field if it's non-nil, zero value otherwise. -func (a *ActionsAllowed) GetGithubOwnedAllowed() bool { - if a == nil || a.GithubOwnedAllowed == nil { - return false - } - return *a.GithubOwnedAllowed -} - -// GetVerifiedAllowed returns the VerifiedAllowed field if it's non-nil, zero value otherwise. -func (a *ActionsAllowed) GetVerifiedAllowed() bool { - if a == nil || a.VerifiedAllowed == nil { - return false - } - return *a.VerifiedAllowed -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetKey() string { - if a == nil || a.Key == nil { - return "" - } - return *a.Key -} - -// GetLastAccessedAt returns the LastAccessedAt field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetLastAccessedAt() Timestamp { - if a == nil || a.LastAccessedAt == nil { - return Timestamp{} - } - return *a.LastAccessedAt -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetRef() string { - if a == nil || a.Ref == nil { - return "" - } - return *a.Ref -} - -// GetSizeInBytes returns the SizeInBytes field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetSizeInBytes() int64 { - if a == nil || a.SizeInBytes == nil { - return 0 - } - return *a.SizeInBytes -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (a *ActionsCache) GetVersion() string { - if a == nil || a.Version == nil { - return "" - } - return *a.Version -} - -// GetDirection returns the Direction field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetDirection() string { - if a == nil || a.Direction == nil { - return "" - } - return *a.Direction -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetKey() string { - if a == nil || a.Key == nil { - return "" - } - return *a.Key -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetRef() string { - if a == nil || a.Ref == nil { - return "" - } - return *a.Ref -} - -// GetSort returns the Sort field if it's non-nil, zero value otherwise. -func (a *ActionsCacheListOptions) GetSort() string { - if a == nil || a.Sort == nil { - return "" - } - return *a.Sort -} - -// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. -func (a *ActionsPermissions) GetAllowedActions() string { - if a == nil || a.AllowedActions == nil { - return "" - } - return *a.AllowedActions -} - -// GetEnabledRepositories returns the EnabledRepositories field if it's non-nil, zero value otherwise. -func (a *ActionsPermissions) GetEnabledRepositories() string { - if a == nil || a.EnabledRepositories == nil { - return "" - } - return *a.EnabledRepositories -} - -// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. -func (a *ActionsPermissions) GetSelectedActionsURL() string { - if a == nil || a.SelectedActionsURL == nil { - return "" - } - return *a.SelectedActionsURL -} - -// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsEnterprise) GetAllowedActions() string { - if a == nil || a.AllowedActions == nil { - return "" - } - return *a.AllowedActions -} - -// GetEnabledOrganizations returns the EnabledOrganizations field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsEnterprise) GetEnabledOrganizations() string { - if a == nil || a.EnabledOrganizations == nil { - return "" - } - return *a.EnabledOrganizations -} - -// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsEnterprise) GetSelectedActionsURL() string { - if a == nil || a.SelectedActionsURL == nil { - return "" - } - return *a.SelectedActionsURL -} - -// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsRepository) GetAllowedActions() string { - if a == nil || a.AllowedActions == nil { - return "" - } - return *a.AllowedActions -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsRepository) GetEnabled() bool { - if a == nil || a.Enabled == nil { - return false - } - return *a.Enabled -} - -// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. -func (a *ActionsPermissionsRepository) GetSelectedActionsURL() string { - if a == nil || a.SelectedActionsURL == nil { - return "" - } - return *a.SelectedActionsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetSelectedRepositoriesURL() string { - if a == nil || a.SelectedRepositoriesURL == nil { - return "" - } - return *a.SelectedRepositoriesURL -} - -// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. -func (a *ActionsVariable) GetSelectedRepositoryIDs() *SelectedRepoIDs { - if a == nil { - return nil - } - return a.SelectedRepositoryIDs -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (a *ActionsVariable) GetVisibility() string { - if a == nil || a.Visibility == nil { - return "" - } - return *a.Visibility -} - -// GetCountryCode returns the CountryCode field if it's non-nil, zero value otherwise. -func (a *ActorLocation) GetCountryCode() string { - if a == nil || a.CountryCode == nil { - return "" - } - return *a.CountryCode -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AdminEnforcedChanges) GetFrom() bool { - if a == nil || a.From == nil { - return false - } - return *a.From -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *AdminEnforcement) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetComments returns the Comments field. -func (a *AdminStats) GetComments() *CommentStats { - if a == nil { - return nil - } - return a.Comments -} - -// GetGists returns the Gists field. -func (a *AdminStats) GetGists() *GistStats { - if a == nil { - return nil - } - return a.Gists -} - -// GetHooks returns the Hooks field. -func (a *AdminStats) GetHooks() *HookStats { - if a == nil { - return nil - } - return a.Hooks -} - -// GetIssues returns the Issues field. -func (a *AdminStats) GetIssues() *IssueStats { - if a == nil { - return nil - } - return a.Issues -} - -// GetMilestones returns the Milestones field. -func (a *AdminStats) GetMilestones() *MilestoneStats { - if a == nil { - return nil - } - return a.Milestones -} - -// GetOrgs returns the Orgs field. -func (a *AdminStats) GetOrgs() *OrgStats { - if a == nil { - return nil - } - return a.Orgs -} - -// GetPages returns the Pages field. -func (a *AdminStats) GetPages() *PageStats { - if a == nil { - return nil - } - return a.Pages -} - -// GetPulls returns the Pulls field. -func (a *AdminStats) GetPulls() *PullStats { - if a == nil { - return nil - } - return a.Pulls -} - -// GetRepos returns the Repos field. -func (a *AdminStats) GetRepos() *RepoStats { - if a == nil { - return nil - } - return a.Repos -} - -// GetUsers returns the Users field. -func (a *AdminStats) GetUsers() *UserStats { - if a == nil { - return nil - } - return a.Users -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (a *AdvancedSecurity) GetStatus() string { - if a == nil || a.Status == nil { - return "" - } - return *a.Status -} - -// GetLastPushedDate returns the LastPushedDate field if it's non-nil, zero value otherwise. -func (a *AdvancedSecurityCommittersBreakdown) GetLastPushedDate() string { - if a == nil || a.LastPushedDate == nil { - return "" - } - return *a.LastPushedDate -} - -// GetUserLogin returns the UserLogin field if it's non-nil, zero value otherwise. -func (a *AdvancedSecurityCommittersBreakdown) GetUserLogin() string { - if a == nil || a.UserLogin == nil { - return "" - } - return *a.UserLogin -} - -// GetScore returns the Score field. -func (a *AdvisoryCVSS) GetScore() *float64 { - if a == nil { - return nil - } - return a.Score -} - -// GetVectorString returns the VectorString field if it's non-nil, zero value otherwise. -func (a *AdvisoryCVSS) GetVectorString() string { - if a == nil || a.VectorString == nil { - return "" - } - return *a.VectorString -} - -// GetCWEID returns the CWEID field if it's non-nil, zero value otherwise. -func (a *AdvisoryCWEs) GetCWEID() string { - if a == nil || a.CWEID == nil { - return "" - } - return *a.CWEID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AdvisoryCWEs) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (a *AdvisoryIdentifier) GetType() string { - if a == nil || a.Type == nil { - return "" - } - return *a.Type -} - -// GetValue returns the Value field if it's non-nil, zero value otherwise. -func (a *AdvisoryIdentifier) GetValue() string { - if a == nil || a.Value == nil { - return "" - } - return *a.Value -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *AdvisoryReference) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetFirstPatchedVersion returns the FirstPatchedVersion field. -func (a *AdvisoryVulnerability) GetFirstPatchedVersion() *FirstPatchedVersion { - if a == nil { - return nil - } - return a.FirstPatchedVersion -} - -// GetPackage returns the Package field. -func (a *AdvisoryVulnerability) GetPackage() *VulnerabilityPackage { - if a == nil { - return nil - } - return a.Package -} - -// GetPatchedVersions returns the PatchedVersions field if it's non-nil, zero value otherwise. -func (a *AdvisoryVulnerability) GetPatchedVersions() string { - if a == nil || a.PatchedVersions == nil { - return "" - } - return *a.PatchedVersions -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (a *AdvisoryVulnerability) GetSeverity() string { - if a == nil || a.Severity == nil { - return "" - } - return *a.Severity -} - -// GetVulnerableVersionRange returns the VulnerableVersionRange field if it's non-nil, zero value otherwise. -func (a *AdvisoryVulnerability) GetVulnerableVersionRange() string { - if a == nil || a.VulnerableVersionRange == nil { - return "" - } - return *a.VulnerableVersionRange -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetClosedAt() Timestamp { - if a == nil || a.ClosedAt == nil { - return Timestamp{} - } - return *a.ClosedAt -} - -// GetClosedBy returns the ClosedBy field. -func (a *Alert) GetClosedBy() *User { - if a == nil { - return nil - } - return a.ClosedBy -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetDismissedAt() Timestamp { - if a == nil || a.DismissedAt == nil { - return Timestamp{} - } - return *a.DismissedAt -} - -// GetDismissedBy returns the DismissedBy field. -func (a *Alert) GetDismissedBy() *User { - if a == nil { - return nil - } - return a.DismissedBy -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (a *Alert) GetDismissedComment() string { - if a == nil || a.DismissedComment == nil { - return "" - } - return *a.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (a *Alert) GetDismissedReason() string { - if a == nil || a.DismissedReason == nil { - return "" - } - return *a.DismissedReason -} - -// GetFixedAt returns the FixedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetFixedAt() Timestamp { - if a == nil || a.FixedAt == nil { - return Timestamp{} - } - return *a.FixedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (a *Alert) GetHTMLURL() string { - if a == nil || a.HTMLURL == nil { - return "" - } - return *a.HTMLURL -} - -// GetInstancesURL returns the InstancesURL field if it's non-nil, zero value otherwise. -func (a *Alert) GetInstancesURL() string { - if a == nil || a.InstancesURL == nil { - return "" - } - return *a.InstancesURL -} - -// GetMostRecentInstance returns the MostRecentInstance field. -func (a *Alert) GetMostRecentInstance() *MostRecentInstance { - if a == nil { - return nil - } - return a.MostRecentInstance -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (a *Alert) GetNumber() int { - if a == nil || a.Number == nil { - return 0 - } - return *a.Number -} - -// GetRepository returns the Repository field. -func (a *Alert) GetRepository() *Repository { - if a == nil { - return nil - } - return a.Repository -} - -// GetRule returns the Rule field. -func (a *Alert) GetRule() *Rule { - if a == nil { - return nil - } - return a.Rule -} - -// GetRuleDescription returns the RuleDescription field if it's non-nil, zero value otherwise. -func (a *Alert) GetRuleDescription() string { - if a == nil || a.RuleDescription == nil { - return "" - } - return *a.RuleDescription -} - -// GetRuleID returns the RuleID field if it's non-nil, zero value otherwise. -func (a *Alert) GetRuleID() string { - if a == nil || a.RuleID == nil { - return "" - } - return *a.RuleID -} - -// GetRuleSeverity returns the RuleSeverity field if it's non-nil, zero value otherwise. -func (a *Alert) GetRuleSeverity() string { - if a == nil || a.RuleSeverity == nil { - return "" - } - return *a.RuleSeverity -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (a *Alert) GetState() string { - if a == nil || a.State == nil { - return "" - } - return *a.State -} - -// GetTool returns the Tool field. -func (a *Alert) GetTool() *Tool { - if a == nil { - return nil - } - return a.Tool -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *Alert) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *Alert) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AllowDeletionsEnforcementLevelChanges) GetFrom() string { - if a == nil || a.From == nil { - return "" - } - return *a.From -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (a *AllowForkSyncing) GetEnabled() bool { - if a == nil || a.Enabled == nil { - return false - } - return *a.Enabled -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (a *AnalysesListOptions) GetRef() string { - if a == nil || a.Ref == nil { - return "" - } - return *a.Ref -} - -// GetSarifID returns the SarifID field if it's non-nil, zero value otherwise. -func (a *AnalysesListOptions) GetSarifID() string { - if a == nil || a.SarifID == nil { - return "" - } - return *a.SarifID -} - -// GetSSHKeyFingerprints returns the SSHKeyFingerprints map if it's non-nil, an empty map otherwise. -func (a *APIMeta) GetSSHKeyFingerprints() map[string]string { - if a == nil || a.SSHKeyFingerprints == nil { - return map[string]string{} - } - return a.SSHKeyFingerprints -} - -// GetVerifiablePasswordAuthentication returns the VerifiablePasswordAuthentication field if it's non-nil, zero value otherwise. -func (a *APIMeta) GetVerifiablePasswordAuthentication() bool { - if a == nil || a.VerifiablePasswordAuthentication == nil { - return false - } - return *a.VerifiablePasswordAuthentication -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *App) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (a *App) GetDescription() string { - if a == nil || a.Description == nil { - return "" - } - return *a.Description -} - -// GetExternalURL returns the ExternalURL field if it's non-nil, zero value otherwise. -func (a *App) GetExternalURL() string { - if a == nil || a.ExternalURL == nil { - return "" - } - return *a.ExternalURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (a *App) GetHTMLURL() string { - if a == nil || a.HTMLURL == nil { - return "" - } - return *a.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *App) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetInstallationsCount returns the InstallationsCount field if it's non-nil, zero value otherwise. -func (a *App) GetInstallationsCount() int { - if a == nil || a.InstallationsCount == nil { - return 0 - } - return *a.InstallationsCount -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *App) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (a *App) GetNodeID() string { - if a == nil || a.NodeID == nil { - return "" - } - return *a.NodeID -} - -// GetOwner returns the Owner field. -func (a *App) GetOwner() *User { - if a == nil { - return nil - } - return a.Owner -} - -// GetPermissions returns the Permissions field. -func (a *App) GetPermissions() *InstallationPermissions { - if a == nil { - return nil - } - return a.Permissions -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (a *App) GetSlug() string { - if a == nil || a.Slug == nil { - return "" - } - return *a.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *App) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetClientID() string { - if a == nil || a.ClientID == nil { - return "" - } - return *a.ClientID -} - -// GetClientSecret returns the ClientSecret field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetClientSecret() string { - if a == nil || a.ClientSecret == nil { - return "" - } - return *a.ClientSecret -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetDescription() string { - if a == nil || a.Description == nil { - return "" - } - return *a.Description -} - -// GetExternalURL returns the ExternalURL field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetExternalURL() string { - if a == nil || a.ExternalURL == nil { - return "" - } - return *a.ExternalURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetHTMLURL() string { - if a == nil || a.HTMLURL == nil { - return "" - } - return *a.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetNodeID() string { - if a == nil || a.NodeID == nil { - return "" - } - return *a.NodeID -} - -// GetOwner returns the Owner field. -func (a *AppConfig) GetOwner() *User { - if a == nil { - return nil - } - return a.Owner -} - -// GetPEM returns the PEM field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetPEM() string { - if a == nil || a.PEM == nil { - return "" - } - return *a.PEM -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetSlug() string { - if a == nil || a.Slug == nil { - return "" - } - return *a.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetWebhookSecret returns the WebhookSecret field if it's non-nil, zero value otherwise. -func (a *AppConfig) GetWebhookSecret() string { - if a == nil || a.WebhookSecret == nil { - return "" - } - return *a.WebhookSecret -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *ArchivedAt) GetFrom() Timestamp { - if a == nil || a.From == nil { - return Timestamp{} - } - return *a.From -} - -// GetTo returns the To field if it's non-nil, zero value otherwise. -func (a *ArchivedAt) GetTo() Timestamp { - if a == nil || a.To == nil { - return Timestamp{} - } - return *a.To -} - -// GetArchiveDownloadURL returns the ArchiveDownloadURL field if it's non-nil, zero value otherwise. -func (a *Artifact) GetArchiveDownloadURL() string { - if a == nil || a.ArchiveDownloadURL == nil { - return "" - } - return *a.ArchiveDownloadURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *Artifact) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetExpired returns the Expired field if it's non-nil, zero value otherwise. -func (a *Artifact) GetExpired() bool { - if a == nil || a.Expired == nil { - return false - } - return *a.Expired -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (a *Artifact) GetExpiresAt() Timestamp { - if a == nil || a.ExpiresAt == nil { - return Timestamp{} - } - return *a.ExpiresAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Artifact) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *Artifact) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (a *Artifact) GetNodeID() string { - if a == nil || a.NodeID == nil { - return "" - } - return *a.NodeID -} - -// GetSizeInBytes returns the SizeInBytes field if it's non-nil, zero value otherwise. -func (a *Artifact) GetSizeInBytes() int64 { - if a == nil || a.SizeInBytes == nil { - return 0 - } - return *a.SizeInBytes -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *Artifact) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *Artifact) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetWorkflowRun returns the WorkflowRun field. -func (a *Artifact) GetWorkflowRun() *ArtifactWorkflowRun { - if a == nil { - return nil - } - return a.WorkflowRun -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (a *ArtifactList) GetTotalCount() int64 { - if a == nil || a.TotalCount == nil { - return 0 - } - return *a.TotalCount -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetHeadBranch() string { - if a == nil || a.HeadBranch == nil { - return "" - } - return *a.HeadBranch -} - -// GetHeadRepositoryID returns the HeadRepositoryID field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetHeadRepositoryID() int64 { - if a == nil || a.HeadRepositoryID == nil { - return 0 - } - return *a.HeadRepositoryID -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetHeadSHA() string { - if a == nil || a.HeadSHA == nil { - return "" - } - return *a.HeadSHA -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (a *ArtifactWorkflowRun) GetRepositoryID() int64 { - if a == nil || a.RepositoryID == nil { - return 0 - } - return *a.RepositoryID -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (a *Attachment) GetBody() string { - if a == nil || a.Body == nil { - return "" - } - return *a.Body -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Attachment) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (a *Attachment) GetTitle() string { - if a == nil || a.Title == nil { - return "" - } - return *a.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetAction() string { - if a == nil || a.Action == nil { - return "" - } - return *a.Action -} - -// GetActive returns the Active field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActive() bool { - if a == nil || a.Active == nil { - return false - } - return *a.Active -} - -// GetActiveWas returns the ActiveWas field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActiveWas() bool { - if a == nil || a.ActiveWas == nil { - return false - } - return *a.ActiveWas -} - -// GetActor returns the Actor field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActor() string { - if a == nil || a.Actor == nil { - return "" - } - return *a.Actor -} - -// GetActorIP returns the ActorIP field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetActorIP() string { - if a == nil || a.ActorIP == nil { - return "" - } - return *a.ActorIP -} - -// GetActorLocation returns the ActorLocation field. -func (a *AuditEntry) GetActorLocation() *ActorLocation { - if a == nil { - return nil - } - return a.ActorLocation -} - -// GetBlockedUser returns the BlockedUser field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetBlockedUser() string { - if a == nil || a.BlockedUser == nil { - return "" - } - return *a.BlockedUser -} - -// GetBusiness returns the Business field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetBusiness() string { - if a == nil || a.Business == nil { - return "" - } - return *a.Business -} - -// GetCancelledAt returns the CancelledAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetCancelledAt() Timestamp { - if a == nil || a.CancelledAt == nil { - return Timestamp{} - } - return *a.CancelledAt -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetCompletedAt() Timestamp { - if a == nil || a.CompletedAt == nil { - return Timestamp{} - } - return *a.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetConclusion() string { - if a == nil || a.Conclusion == nil { - return "" - } - return *a.Conclusion -} - -// GetConfig returns the Config field. -func (a *AuditEntry) GetConfig() *HookConfig { - if a == nil { - return nil - } - return a.Config -} - -// GetConfigWas returns the ConfigWas field. -func (a *AuditEntry) GetConfigWas() *HookConfig { - if a == nil { - return nil - } - return a.ConfigWas -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetContentType() string { - if a == nil || a.ContentType == nil { - return "" - } - return *a.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetData returns the Data field. -func (a *AuditEntry) GetData() *AuditEntryData { - if a == nil { - return nil - } - return a.Data -} - -// GetDeployKeyFingerprint returns the DeployKeyFingerprint field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetDeployKeyFingerprint() string { - if a == nil || a.DeployKeyFingerprint == nil { - return "" - } - return *a.DeployKeyFingerprint -} - -// GetDocumentID returns the DocumentID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetDocumentID() string { - if a == nil || a.DocumentID == nil { - return "" - } - return *a.DocumentID -} - -// GetEmoji returns the Emoji field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetEmoji() string { - if a == nil || a.Emoji == nil { - return "" - } - return *a.Emoji -} - -// GetEnvironmentName returns the EnvironmentName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetEnvironmentName() string { - if a == nil || a.EnvironmentName == nil { - return "" - } - return *a.EnvironmentName -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetEvent() string { - if a == nil || a.Event == nil { - return "" - } - return *a.Event -} - -// GetExplanation returns the Explanation field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetExplanation() string { - if a == nil || a.Explanation == nil { - return "" - } - return *a.Explanation -} - -// GetExternalIdentityNameID returns the ExternalIdentityNameID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetExternalIdentityNameID() string { - if a == nil || a.ExternalIdentityNameID == nil { - return "" - } - return *a.ExternalIdentityNameID -} - -// GetExternalIdentityUsername returns the ExternalIdentityUsername field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetExternalIdentityUsername() string { - if a == nil || a.ExternalIdentityUsername == nil { - return "" - } - return *a.ExternalIdentityUsername -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHashedToken() string { - if a == nil || a.HashedToken == nil { - return "" - } - return *a.HashedToken -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHeadBranch() string { - if a == nil || a.HeadBranch == nil { - return "" - } - return *a.HeadBranch -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHeadSHA() string { - if a == nil || a.HeadSHA == nil { - return "" - } - return *a.HeadSHA -} - -// GetHookID returns the HookID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetHookID() int64 { - if a == nil || a.HookID == nil { - return 0 - } - return *a.HookID -} - -// GetIsHostedRunner returns the IsHostedRunner field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetIsHostedRunner() bool { - if a == nil || a.IsHostedRunner == nil { - return false - } - return *a.IsHostedRunner -} - -// GetJobName returns the JobName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetJobName() string { - if a == nil || a.JobName == nil { - return "" - } - return *a.JobName -} - -// GetJobWorkflowRef returns the JobWorkflowRef field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetJobWorkflowRef() string { - if a == nil || a.JobWorkflowRef == nil { - return "" - } - return *a.JobWorkflowRef -} - -// GetLimitedAvailability returns the LimitedAvailability field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetLimitedAvailability() bool { - if a == nil || a.LimitedAvailability == nil { - return false - } - return *a.LimitedAvailability -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetMessage() string { - if a == nil || a.Message == nil { - return "" - } - return *a.Message -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetOAuthApplicationID returns the OAuthApplicationID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOAuthApplicationID() int64 { - if a == nil || a.OAuthApplicationID == nil { - return 0 - } - return *a.OAuthApplicationID -} - -// GetOldPermission returns the OldPermission field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOldPermission() string { - if a == nil || a.OldPermission == nil { - return "" - } - return *a.OldPermission -} - -// GetOldUser returns the OldUser field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOldUser() string { - if a == nil || a.OldUser == nil { - return "" - } - return *a.OldUser -} - -// GetOpenSSHPublicKey returns the OpenSSHPublicKey field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOpenSSHPublicKey() string { - if a == nil || a.OpenSSHPublicKey == nil { - return "" - } - return *a.OpenSSHPublicKey -} - -// GetOperationType returns the OperationType field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOperationType() string { - if a == nil || a.OperationType == nil { - return "" - } - return *a.OperationType -} - -// GetOrg returns the Org field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOrg() string { - if a == nil || a.Org == nil { - return "" - } - return *a.Org -} - -// GetOrgID returns the OrgID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetOrgID() int64 { - if a == nil || a.OrgID == nil { - return 0 - } - return *a.OrgID -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPermission() string { - if a == nil || a.Permission == nil { - return "" - } - return *a.Permission -} - -// GetPreviousVisibility returns the PreviousVisibility field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPreviousVisibility() string { - if a == nil || a.PreviousVisibility == nil { - return "" - } - return *a.PreviousVisibility -} - -// GetProgrammaticAccessType returns the ProgrammaticAccessType field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetProgrammaticAccessType() string { - if a == nil || a.ProgrammaticAccessType == nil { - return "" - } - return *a.ProgrammaticAccessType -} - -// GetPullRequestID returns the PullRequestID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPullRequestID() int64 { - if a == nil || a.PullRequestID == nil { - return 0 - } - return *a.PullRequestID -} - -// GetPullRequestTitle returns the PullRequestTitle field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPullRequestTitle() string { - if a == nil || a.PullRequestTitle == nil { - return "" - } - return *a.PullRequestTitle -} - -// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetPullRequestURL() string { - if a == nil || a.PullRequestURL == nil { - return "" - } - return *a.PullRequestURL -} - -// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetReadOnly() string { - if a == nil || a.ReadOnly == nil { - return "" - } - return *a.ReadOnly -} - -// GetRepo returns the Repo field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRepo() string { - if a == nil || a.Repo == nil { - return "" - } - return *a.Repo -} - -// GetRepository returns the Repository field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRepository() string { - if a == nil || a.Repository == nil { - return "" - } - return *a.Repository -} - -// GetRepositoryPublic returns the RepositoryPublic field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRepositoryPublic() bool { - if a == nil || a.RepositoryPublic == nil { - return false - } - return *a.RepositoryPublic -} - -// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunAttempt() int64 { - if a == nil || a.RunAttempt == nil { - return 0 - } - return *a.RunAttempt -} - -// GetRunnerGroupID returns the RunnerGroupID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerGroupID() int64 { - if a == nil || a.RunnerGroupID == nil { - return 0 - } - return *a.RunnerGroupID -} - -// GetRunnerGroupName returns the RunnerGroupName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerGroupName() string { - if a == nil || a.RunnerGroupName == nil { - return "" - } - return *a.RunnerGroupName -} - -// GetRunnerID returns the RunnerID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerID() int64 { - if a == nil || a.RunnerID == nil { - return 0 - } - return *a.RunnerID -} - -// GetRunnerName returns the RunnerName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunnerName() string { - if a == nil || a.RunnerName == nil { - return "" - } - return *a.RunnerName -} - -// GetRunNumber returns the RunNumber field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetRunNumber() int64 { - if a == nil || a.RunNumber == nil { - return 0 - } - return *a.RunNumber -} - -// GetSourceVersion returns the SourceVersion field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetSourceVersion() string { - if a == nil || a.SourceVersion == nil { - return "" - } - return *a.SourceVersion -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetStartedAt() Timestamp { - if a == nil || a.StartedAt == nil { - return Timestamp{} - } - return *a.StartedAt -} - -// GetTargetLogin returns the TargetLogin field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTargetLogin() string { - if a == nil || a.TargetLogin == nil { - return "" - } - return *a.TargetLogin -} - -// GetTargetVersion returns the TargetVersion field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTargetVersion() string { - if a == nil || a.TargetVersion == nil { - return "" - } - return *a.TargetVersion -} - -// GetTeam returns the Team field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTeam() string { - if a == nil || a.Team == nil { - return "" - } - return *a.Team -} - -// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTimestamp() Timestamp { - if a == nil || a.Timestamp == nil { - return Timestamp{} - } - return *a.Timestamp -} - -// GetTokenID returns the TokenID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTokenID() int64 { - if a == nil || a.TokenID == nil { - return 0 - } - return *a.TokenID -} - -// GetTokenScopes returns the TokenScopes field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTokenScopes() string { - if a == nil || a.TokenScopes == nil { - return "" - } - return *a.TokenScopes -} - -// GetTopic returns the Topic field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTopic() string { - if a == nil || a.Topic == nil { - return "" - } - return *a.Topic -} - -// GetTransportProtocol returns the TransportProtocol field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTransportProtocol() int { - if a == nil || a.TransportProtocol == nil { - return 0 - } - return *a.TransportProtocol -} - -// GetTransportProtocolName returns the TransportProtocolName field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTransportProtocolName() string { - if a == nil || a.TransportProtocolName == nil { - return "" - } - return *a.TransportProtocolName -} - -// GetTriggerID returns the TriggerID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetTriggerID() int64 { - if a == nil || a.TriggerID == nil { - return 0 - } - return *a.TriggerID -} - -// GetUser returns the User field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetUser() string { - if a == nil || a.User == nil { - return "" - } - return *a.User -} - -// GetUserAgent returns the UserAgent field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetUserAgent() string { - if a == nil || a.UserAgent == nil { - return "" - } - return *a.UserAgent -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetVisibility() string { - if a == nil || a.Visibility == nil { - return "" - } - return *a.Visibility -} - -// GetWorkflowID returns the WorkflowID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetWorkflowID() int64 { - if a == nil || a.WorkflowID == nil { - return 0 - } - return *a.WorkflowID -} - -// GetWorkflowRunID returns the WorkflowRunID field if it's non-nil, zero value otherwise. -func (a *AuditEntry) GetWorkflowRunID() int64 { - if a == nil || a.WorkflowRunID == nil { - return 0 - } - return *a.WorkflowRunID -} - -// GetOldLogin returns the OldLogin field if it's non-nil, zero value otherwise. -func (a *AuditEntryData) GetOldLogin() string { - if a == nil || a.OldLogin == nil { - return "" - } - return *a.OldLogin -} - -// GetOldName returns the OldName field if it's non-nil, zero value otherwise. -func (a *AuditEntryData) GetOldName() string { - if a == nil || a.OldName == nil { - return "" - } - return *a.OldName -} - -// GetApp returns the App field. -func (a *Authorization) GetApp() *AuthorizationApp { - if a == nil { - return nil - } - return a.App -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (a *Authorization) GetCreatedAt() Timestamp { - if a == nil || a.CreatedAt == nil { - return Timestamp{} - } - return *a.CreatedAt -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *Authorization) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. -func (a *Authorization) GetHashedToken() string { - if a == nil || a.HashedToken == nil { - return "" - } - return *a.HashedToken -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Authorization) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (a *Authorization) GetNote() string { - if a == nil || a.Note == nil { - return "" - } - return *a.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (a *Authorization) GetNoteURL() string { - if a == nil || a.NoteURL == nil { - return "" - } - return *a.NoteURL -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (a *Authorization) GetToken() string { - if a == nil || a.Token == nil { - return "" - } - return *a.Token -} - -// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. -func (a *Authorization) GetTokenLastEight() string { - if a == nil || a.TokenLastEight == nil { - return "" - } - return *a.TokenLastEight -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (a *Authorization) GetUpdatedAt() Timestamp { - if a == nil || a.UpdatedAt == nil { - return Timestamp{} - } - return *a.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *Authorization) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetUser returns the User field. -func (a *Authorization) GetUser() *User { - if a == nil { - return nil - } - return a.User -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (a *AuthorizationApp) GetClientID() string { - if a == nil || a.ClientID == nil { - return "" - } - return *a.ClientID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (a *AuthorizationApp) GetName() string { - if a == nil || a.Name == nil { - return "" - } - return *a.Name -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (a *AuthorizationApp) GetURL() string { - if a == nil || a.URL == nil { - return "" - } - return *a.URL -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetClientID() string { - if a == nil || a.ClientID == nil { - return "" - } - return *a.ClientID -} - -// GetClientSecret returns the ClientSecret field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetClientSecret() string { - if a == nil || a.ClientSecret == nil { - return "" - } - return *a.ClientSecret -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetNote() string { - if a == nil || a.Note == nil { - return "" - } - return *a.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (a *AuthorizationRequest) GetNoteURL() string { - if a == nil || a.NoteURL == nil { - return "" - } - return *a.NoteURL -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (a *AuthorizationUpdateRequest) GetFingerprint() string { - if a == nil || a.Fingerprint == nil { - return "" - } - return *a.Fingerprint -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (a *AuthorizationUpdateRequest) GetNote() string { - if a == nil || a.Note == nil { - return "" - } - return *a.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (a *AuthorizationUpdateRequest) GetNoteURL() string { - if a == nil || a.NoteURL == nil { - return "" - } - return *a.NoteURL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AuthorizedActorsOnly) GetFrom() bool { - if a == nil || a.From == nil { - return false - } - return *a.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (a *AuthorizedDismissalActorsOnlyChanges) GetFrom() bool { - if a == nil || a.From == nil { - return false - } - return *a.From -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (a *Autolink) GetID() int64 { - if a == nil || a.ID == nil { - return 0 - } - return *a.ID -} - -// GetIsAlphanumeric returns the IsAlphanumeric field if it's non-nil, zero value otherwise. -func (a *Autolink) GetIsAlphanumeric() bool { - if a == nil || a.IsAlphanumeric == nil { - return false - } - return *a.IsAlphanumeric -} - -// GetKeyPrefix returns the KeyPrefix field if it's non-nil, zero value otherwise. -func (a *Autolink) GetKeyPrefix() string { - if a == nil || a.KeyPrefix == nil { - return "" - } - return *a.KeyPrefix -} - -// GetURLTemplate returns the URLTemplate field if it's non-nil, zero value otherwise. -func (a *Autolink) GetURLTemplate() string { - if a == nil || a.URLTemplate == nil { - return "" - } - return *a.URLTemplate -} - -// GetIsAlphanumeric returns the IsAlphanumeric field if it's non-nil, zero value otherwise. -func (a *AutolinkOptions) GetIsAlphanumeric() bool { - if a == nil || a.IsAlphanumeric == nil { - return false - } - return *a.IsAlphanumeric -} - -// GetKeyPrefix returns the KeyPrefix field if it's non-nil, zero value otherwise. -func (a *AutolinkOptions) GetKeyPrefix() string { - if a == nil || a.KeyPrefix == nil { - return "" - } - return *a.KeyPrefix -} - -// GetURLTemplate returns the URLTemplate field if it's non-nil, zero value otherwise. -func (a *AutolinkOptions) GetURLTemplate() string { - if a == nil || a.URLTemplate == nil { - return "" - } - return *a.URLTemplate -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (a *AutomatedSecurityFixes) GetEnabled() bool { - if a == nil || a.Enabled == nil { - return false - } - return *a.Enabled -} - -// GetPaused returns the Paused field if it's non-nil, zero value otherwise. -func (a *AutomatedSecurityFixes) GetPaused() bool { - if a == nil || a.Paused == nil { - return false - } - return *a.Paused -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (a *AutoTriggerCheck) GetAppID() int64 { - if a == nil || a.AppID == nil { - return 0 - } - return *a.AppID -} - -// GetSetting returns the Setting field if it's non-nil, zero value otherwise. -func (a *AutoTriggerCheck) GetSetting() bool { - if a == nil || a.Setting == nil { - return false - } - return *a.Setting -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (b *Blob) GetContent() string { - if b == nil || b.Content == nil { - return "" - } - return *b.Content -} - -// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. -func (b *Blob) GetEncoding() string { - if b == nil || b.Encoding == nil { - return "" - } - return *b.Encoding -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (b *Blob) GetNodeID() string { - if b == nil || b.NodeID == nil { - return "" - } - return *b.NodeID -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (b *Blob) GetSHA() string { - if b == nil || b.SHA == nil { - return "" - } - return *b.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (b *Blob) GetSize() int { - if b == nil || b.Size == nil { - return 0 - } - return *b.Size -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (b *Blob) GetURL() string { - if b == nil || b.URL == nil { - return "" - } - return *b.URL -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (b *BlockCreations) GetEnabled() bool { - if b == nil || b.Enabled == nil { - return false - } - return *b.Enabled -} - -// GetCommit returns the Commit field. -func (b *Branch) GetCommit() *RepositoryCommit { - if b == nil { - return nil - } - return b.Commit -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (b *Branch) GetName() string { - if b == nil || b.Name == nil { - return "" - } - return *b.Name -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (b *Branch) GetProtected() bool { - if b == nil || b.Protected == nil { - return false - } - return *b.Protected -} - -// GetCommit returns the Commit field. -func (b *BranchCommit) GetCommit() *Commit { - if b == nil { - return nil - } - return b.Commit -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (b *BranchCommit) GetName() string { - if b == nil || b.Name == nil { - return "" - } - return *b.Name -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (b *BranchCommit) GetProtected() bool { - if b == nil || b.Protected == nil { - return false - } - return *b.Protected -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (b *BranchListOptions) GetProtected() bool { - if b == nil || b.Protected == nil { - return false - } - return *b.Protected -} - -// GetCustomBranchPolicies returns the CustomBranchPolicies field if it's non-nil, zero value otherwise. -func (b *BranchPolicy) GetCustomBranchPolicies() bool { - if b == nil || b.CustomBranchPolicies == nil { - return false - } - return *b.CustomBranchPolicies -} - -// GetProtectedBranches returns the ProtectedBranches field if it's non-nil, zero value otherwise. -func (b *BranchPolicy) GetProtectedBranches() bool { - if b == nil || b.ProtectedBranches == nil { - return false - } - return *b.ProtectedBranches -} - -// GetAdminEnforced returns the AdminEnforced field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAdminEnforced() bool { - if b == nil || b.AdminEnforced == nil { - return false - } - return *b.AdminEnforced -} - -// GetAllowDeletionsEnforcementLevel returns the AllowDeletionsEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAllowDeletionsEnforcementLevel() string { - if b == nil || b.AllowDeletionsEnforcementLevel == nil { - return "" - } - return *b.AllowDeletionsEnforcementLevel -} - -// GetAllowForcePushesEnforcementLevel returns the AllowForcePushesEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAllowForcePushesEnforcementLevel() string { - if b == nil || b.AllowForcePushesEnforcementLevel == nil { - return "" - } - return *b.AllowForcePushesEnforcementLevel -} - -// GetAuthorizedActorsOnly returns the AuthorizedActorsOnly field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAuthorizedActorsOnly() bool { - if b == nil || b.AuthorizedActorsOnly == nil { - return false - } - return *b.AuthorizedActorsOnly -} - -// GetAuthorizedDismissalActorsOnly returns the AuthorizedDismissalActorsOnly field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetAuthorizedDismissalActorsOnly() bool { - if b == nil || b.AuthorizedDismissalActorsOnly == nil { - return false - } - return *b.AuthorizedDismissalActorsOnly -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetCreatedAt() Timestamp { - if b == nil || b.CreatedAt == nil { - return Timestamp{} - } - return *b.CreatedAt -} - -// GetDismissStaleReviewsOnPush returns the DismissStaleReviewsOnPush field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetDismissStaleReviewsOnPush() bool { - if b == nil || b.DismissStaleReviewsOnPush == nil { - return false - } - return *b.DismissStaleReviewsOnPush -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetID() int64 { - if b == nil || b.ID == nil { - return 0 - } - return *b.ID -} - -// GetIgnoreApprovalsFromContributors returns the IgnoreApprovalsFromContributors field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetIgnoreApprovalsFromContributors() bool { - if b == nil || b.IgnoreApprovalsFromContributors == nil { - return false - } - return *b.IgnoreApprovalsFromContributors -} - -// GetLinearHistoryRequirementEnforcementLevel returns the LinearHistoryRequirementEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetLinearHistoryRequirementEnforcementLevel() string { - if b == nil || b.LinearHistoryRequirementEnforcementLevel == nil { - return "" - } - return *b.LinearHistoryRequirementEnforcementLevel -} - -// GetMergeQueueEnforcementLevel returns the MergeQueueEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetMergeQueueEnforcementLevel() string { - if b == nil || b.MergeQueueEnforcementLevel == nil { - return "" - } - return *b.MergeQueueEnforcementLevel -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetName() string { - if b == nil || b.Name == nil { - return "" - } - return *b.Name -} - -// GetPullRequestReviewsEnforcementLevel returns the PullRequestReviewsEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetPullRequestReviewsEnforcementLevel() string { - if b == nil || b.PullRequestReviewsEnforcementLevel == nil { - return "" - } - return *b.PullRequestReviewsEnforcementLevel -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRepositoryID() int64 { - if b == nil || b.RepositoryID == nil { - return 0 - } - return *b.RepositoryID -} - -// GetRequireCodeOwnerReview returns the RequireCodeOwnerReview field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequireCodeOwnerReview() bool { - if b == nil || b.RequireCodeOwnerReview == nil { - return false - } - return *b.RequireCodeOwnerReview -} - -// GetRequiredApprovingReviewCount returns the RequiredApprovingReviewCount field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredApprovingReviewCount() int { - if b == nil || b.RequiredApprovingReviewCount == nil { - return 0 - } - return *b.RequiredApprovingReviewCount -} - -// GetRequiredConversationResolutionLevel returns the RequiredConversationResolutionLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredConversationResolutionLevel() string { - if b == nil || b.RequiredConversationResolutionLevel == nil { - return "" - } - return *b.RequiredConversationResolutionLevel -} - -// GetRequiredDeploymentsEnforcementLevel returns the RequiredDeploymentsEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredDeploymentsEnforcementLevel() string { - if b == nil || b.RequiredDeploymentsEnforcementLevel == nil { - return "" - } - return *b.RequiredDeploymentsEnforcementLevel -} - -// GetRequiredStatusChecksEnforcementLevel returns the RequiredStatusChecksEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetRequiredStatusChecksEnforcementLevel() string { - if b == nil || b.RequiredStatusChecksEnforcementLevel == nil { - return "" - } - return *b.RequiredStatusChecksEnforcementLevel -} - -// GetSignatureRequirementEnforcementLevel returns the SignatureRequirementEnforcementLevel field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetSignatureRequirementEnforcementLevel() string { - if b == nil || b.SignatureRequirementEnforcementLevel == nil { - return "" - } - return *b.SignatureRequirementEnforcementLevel -} - -// GetStrictRequiredStatusChecksPolicy returns the StrictRequiredStatusChecksPolicy field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetStrictRequiredStatusChecksPolicy() bool { - if b == nil || b.StrictRequiredStatusChecksPolicy == nil { - return false - } - return *b.StrictRequiredStatusChecksPolicy -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRule) GetUpdatedAt() Timestamp { - if b == nil || b.UpdatedAt == nil { - return Timestamp{} - } - return *b.UpdatedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (b *BranchProtectionRuleEvent) GetAction() string { - if b == nil || b.Action == nil { - return "" - } - return *b.Action -} - -// GetChanges returns the Changes field. -func (b *BranchProtectionRuleEvent) GetChanges() *ProtectionChanges { - if b == nil { - return nil - } - return b.Changes -} - -// GetInstallation returns the Installation field. -func (b *BranchProtectionRuleEvent) GetInstallation() *Installation { - if b == nil { - return nil - } - return b.Installation -} - -// GetOrg returns the Org field. -func (b *BranchProtectionRuleEvent) GetOrg() *Organization { - if b == nil { - return nil - } - return b.Org -} - -// GetRepo returns the Repo field. -func (b *BranchProtectionRuleEvent) GetRepo() *Repository { - if b == nil { - return nil - } - return b.Repo -} - -// GetRule returns the Rule field. -func (b *BranchProtectionRuleEvent) GetRule() *BranchProtectionRule { - if b == nil { - return nil - } - return b.Rule -} - -// GetSender returns the Sender field. -func (b *BranchProtectionRuleEvent) GetSender() *User { - if b == nil { - return nil - } - return b.Sender -} - -// GetActorID returns the ActorID field if it's non-nil, zero value otherwise. -func (b *BypassActor) GetActorID() int64 { - if b == nil || b.ActorID == nil { - return 0 - } - return *b.ActorID -} - -// GetActorType returns the ActorType field if it's non-nil, zero value otherwise. -func (b *BypassActor) GetActorType() string { - if b == nil || b.ActorType == nil { - return "" - } - return *b.ActorType -} - -// GetBypassMode returns the BypassMode field if it's non-nil, zero value otherwise. -func (b *BypassActor) GetBypassMode() string { - if b == nil || b.BypassMode == nil { - return "" - } - return *b.BypassMode -} - -// GetApp returns the App field. -func (c *CheckRun) GetApp() *App { - if c == nil { - return nil - } - return c.App -} - -// GetCheckSuite returns the CheckSuite field. -func (c *CheckRun) GetCheckSuite() *CheckSuite { - if c == nil { - return nil - } - return c.CheckSuite -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetCompletedAt() Timestamp { - if c == nil || c.CompletedAt == nil { - return Timestamp{} - } - return *c.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetConclusion() string { - if c == nil || c.Conclusion == nil { - return "" - } - return *c.Conclusion -} - -// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetDetailsURL() string { - if c == nil || c.DetailsURL == nil { - return "" - } - return *c.DetailsURL -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetExternalID() string { - if c == nil || c.ExternalID == nil { - return "" - } - return *c.ExternalID -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetHeadSHA() string { - if c == nil || c.HeadSHA == nil { - return "" - } - return *c.HeadSHA -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetOutput returns the Output field. -func (c *CheckRun) GetOutput() *CheckRunOutput { - if c == nil { - return nil - } - return c.Output -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetStartedAt() Timestamp { - if c == nil || c.StartedAt == nil { - return Timestamp{} - } - return *c.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CheckRun) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAnnotationLevel returns the AnnotationLevel field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetAnnotationLevel() string { - if c == nil || c.AnnotationLevel == nil { - return "" - } - return *c.AnnotationLevel -} - -// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetEndColumn() int { - if c == nil || c.EndColumn == nil { - return 0 - } - return *c.EndColumn -} - -// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetEndLine() int { - if c == nil || c.EndLine == nil { - return 0 - } - return *c.EndLine -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetMessage() string { - if c == nil || c.Message == nil { - return "" - } - return *c.Message -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetPath() string { - if c == nil || c.Path == nil { - return "" - } - return *c.Path -} - -// GetRawDetails returns the RawDetails field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetRawDetails() string { - if c == nil || c.RawDetails == nil { - return "" - } - return *c.RawDetails -} - -// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetStartColumn() int { - if c == nil || c.StartColumn == nil { - return 0 - } - return *c.StartColumn -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetStartLine() int { - if c == nil || c.StartLine == nil { - return 0 - } - return *c.StartLine -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (c *CheckRunAnnotation) GetTitle() string { - if c == nil || c.Title == nil { - return "" - } - return *c.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CheckRunEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetCheckRun returns the CheckRun field. -func (c *CheckRunEvent) GetCheckRun() *CheckRun { - if c == nil { - return nil - } - return c.CheckRun -} - -// GetInstallation returns the Installation field. -func (c *CheckRunEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CheckRunEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRepo returns the Repo field. -func (c *CheckRunEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetRequestedAction returns the RequestedAction field. -func (c *CheckRunEvent) GetRequestedAction() *RequestedAction { - if c == nil { - return nil - } - return c.RequestedAction -} - -// GetSender returns the Sender field. -func (c *CheckRunEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetAlt returns the Alt field if it's non-nil, zero value otherwise. -func (c *CheckRunImage) GetAlt() string { - if c == nil || c.Alt == nil { - return "" - } - return *c.Alt -} - -// GetCaption returns the Caption field if it's non-nil, zero value otherwise. -func (c *CheckRunImage) GetCaption() string { - if c == nil || c.Caption == nil { - return "" - } - return *c.Caption -} - -// GetImageURL returns the ImageURL field if it's non-nil, zero value otherwise. -func (c *CheckRunImage) GetImageURL() string { - if c == nil || c.ImageURL == nil { - return "" - } - return *c.ImageURL -} - -// GetAnnotationsCount returns the AnnotationsCount field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetAnnotationsCount() int { - if c == nil || c.AnnotationsCount == nil { - return 0 - } - return *c.AnnotationsCount -} - -// GetAnnotationsURL returns the AnnotationsURL field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetAnnotationsURL() string { - if c == nil || c.AnnotationsURL == nil { - return "" - } - return *c.AnnotationsURL -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetSummary() string { - if c == nil || c.Summary == nil { - return "" - } - return *c.Summary -} - -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetText() string { - if c == nil || c.Text == nil { - return "" - } - return *c.Text -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (c *CheckRunOutput) GetTitle() string { - if c == nil || c.Title == nil { - return "" - } - return *c.Title -} - -// GetAfterSHA returns the AfterSHA field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetAfterSHA() string { - if c == nil || c.AfterSHA == nil { - return "" - } - return *c.AfterSHA -} - -// GetApp returns the App field. -func (c *CheckSuite) GetApp() *App { - if c == nil { - return nil - } - return c.App -} - -// GetBeforeSHA returns the BeforeSHA field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetBeforeSHA() string { - if c == nil || c.BeforeSHA == nil { - return "" - } - return *c.BeforeSHA -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetConclusion() string { - if c == nil || c.Conclusion == nil { - return "" - } - return *c.Conclusion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetHeadBranch() string { - if c == nil || c.HeadBranch == nil { - return "" - } - return *c.HeadBranch -} - -// GetHeadCommit returns the HeadCommit field. -func (c *CheckSuite) GetHeadCommit() *Commit { - if c == nil { - return nil - } - return c.HeadCommit -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetHeadSHA() string { - if c == nil || c.HeadSHA == nil { - return "" - } - return *c.HeadSHA -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetRepository returns the Repository field. -func (c *CheckSuite) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CheckSuite) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CheckSuiteEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetCheckSuite returns the CheckSuite field. -func (c *CheckSuiteEvent) GetCheckSuite() *CheckSuite { - if c == nil { - return nil - } - return c.CheckSuite -} - -// GetInstallation returns the Installation field. -func (c *CheckSuiteEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CheckSuiteEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRepo returns the Repo field. -func (c *CheckSuiteEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CheckSuiteEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetPreferences returns the Preferences field. -func (c *CheckSuitePreferenceResults) GetPreferences() *PreferenceList { - if c == nil { - return nil - } - return c.Preferences -} - -// GetRepository returns the Repository field. -func (c *CheckSuitePreferenceResults) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetBody() string { - if c == nil || c.Body == nil { - return "" - } - return *c.Body -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetKey() string { - if c == nil || c.Key == nil { - return "" - } - return *c.Key -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CodeOfConduct) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetSuggestion returns the Suggestion field if it's non-nil, zero value otherwise. -func (c *CodeownersError) GetSuggestion() string { - if c == nil || c.Suggestion == nil { - return "" - } - return *c.Suggestion -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetContentType() string { - if c == nil || c.ContentType == nil { - return "" - } - return *c.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetLanguage() string { - if c == nil || c.Language == nil { - return "" - } - return *c.Language -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetSize() int64 { - if c == nil || c.Size == nil { - return 0 - } - return *c.Size -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetUploader returns the Uploader field. -func (c *CodeQLDatabase) GetUploader() *User { - if c == nil { - return nil - } - return c.Uploader -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CodeQLDatabase) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetPath() string { - if c == nil || c.Path == nil { - return "" - } - return *c.Path -} - -// GetRepository returns the Repository field. -func (c *CodeResult) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CodeResult) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetAlert returns the Alert field. -func (c *CodeScanningAlertEvent) GetAlert() *Alert { - if c == nil { - return nil - } - return c.Alert -} - -// GetCommitOID returns the CommitOID field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertEvent) GetCommitOID() string { - if c == nil || c.CommitOID == nil { - return "" - } - return *c.CommitOID -} - -// GetInstallation returns the Installation field. -func (c *CodeScanningAlertEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CodeScanningAlertEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertEvent) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetRepo returns the Repo field. -func (c *CodeScanningAlertEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CodeScanningAlertEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertState) GetDismissedComment() string { - if c == nil || c.DismissedComment == nil { - return "" - } - return *c.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (c *CodeScanningAlertState) GetDismissedReason() string { - if c == nil || c.DismissedReason == nil { - return "" - } - return *c.DismissedReason -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (c *CodeSearchResult) GetIncompleteResults() bool { - if c == nil || c.IncompleteResults == nil { - return false - } - return *c.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CodeSearchResult) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetBillableOwner returns the BillableOwner field. -func (c *Codespace) GetBillableOwner() *User { - if c == nil { - return nil - } - return c.BillableOwner -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetDevcontainerPath returns the DevcontainerPath field if it's non-nil, zero value otherwise. -func (c *Codespace) GetDevcontainerPath() string { - if c == nil || c.DevcontainerPath == nil { - return "" - } - return *c.DevcontainerPath -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (c *Codespace) GetDisplayName() string { - if c == nil || c.DisplayName == nil { - return "" - } - return *c.DisplayName -} - -// GetEnvironmentID returns the EnvironmentID field if it's non-nil, zero value otherwise. -func (c *Codespace) GetEnvironmentID() string { - if c == nil || c.EnvironmentID == nil { - return "" - } - return *c.EnvironmentID -} - -// GetGitStatus returns the GitStatus field. -func (c *Codespace) GetGitStatus() *CodespacesGitStatus { - if c == nil { - return nil - } - return c.GitStatus -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *Codespace) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetIdleTimeoutMinutes returns the IdleTimeoutMinutes field if it's non-nil, zero value otherwise. -func (c *Codespace) GetIdleTimeoutMinutes() int { - if c == nil || c.IdleTimeoutMinutes == nil { - return 0 - } - return *c.IdleTimeoutMinutes -} - -// GetIdleTimeoutNotice returns the IdleTimeoutNotice field if it's non-nil, zero value otherwise. -func (c *Codespace) GetIdleTimeoutNotice() string { - if c == nil || c.IdleTimeoutNotice == nil { - return "" - } - return *c.IdleTimeoutNotice -} - -// GetLastKnownStopNotice returns the LastKnownStopNotice field if it's non-nil, zero value otherwise. -func (c *Codespace) GetLastKnownStopNotice() string { - if c == nil || c.LastKnownStopNotice == nil { - return "" - } - return *c.LastKnownStopNotice -} - -// GetLastUsedAt returns the LastUsedAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetLastUsedAt() Timestamp { - if c == nil || c.LastUsedAt == nil { - return Timestamp{} - } - return *c.LastUsedAt -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (c *Codespace) GetLocation() string { - if c == nil || c.Location == nil { - return "" - } - return *c.Location -} - -// GetMachine returns the Machine field. -func (c *Codespace) GetMachine() *CodespacesMachine { - if c == nil { - return nil - } - return c.Machine -} - -// GetMachinesURL returns the MachinesURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetMachinesURL() string { - if c == nil || c.MachinesURL == nil { - return "" - } - return *c.MachinesURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *Codespace) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetOwner returns the Owner field. -func (c *Codespace) GetOwner() *User { - if c == nil { - return nil - } - return c.Owner -} - -// GetPendingOperation returns the PendingOperation field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPendingOperation() bool { - if c == nil || c.PendingOperation == nil { - return false - } - return *c.PendingOperation -} - -// GetPendingOperationDisabledReason returns the PendingOperationDisabledReason field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPendingOperationDisabledReason() string { - if c == nil || c.PendingOperationDisabledReason == nil { - return "" - } - return *c.PendingOperationDisabledReason -} - -// GetPrebuild returns the Prebuild field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPrebuild() bool { - if c == nil || c.Prebuild == nil { - return false - } - return *c.Prebuild -} - -// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetPullsURL() string { - if c == nil || c.PullsURL == nil { - return "" - } - return *c.PullsURL -} - -// GetRepository returns the Repository field. -func (c *Codespace) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetRetentionExpiresAt returns the RetentionExpiresAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetRetentionExpiresAt() Timestamp { - if c == nil || c.RetentionExpiresAt == nil { - return Timestamp{} - } - return *c.RetentionExpiresAt -} - -// GetRetentionPeriodMinutes returns the RetentionPeriodMinutes field if it's non-nil, zero value otherwise. -func (c *Codespace) GetRetentionPeriodMinutes() int { - if c == nil || c.RetentionPeriodMinutes == nil { - return 0 - } - return *c.RetentionPeriodMinutes -} - -// GetRuntimeConstraints returns the RuntimeConstraints field. -func (c *Codespace) GetRuntimeConstraints() *CodespacesRuntimeConstraints { - if c == nil { - return nil - } - return c.RuntimeConstraints -} - -// GetStartURL returns the StartURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetStartURL() string { - if c == nil || c.StartURL == nil { - return "" - } - return *c.StartURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (c *Codespace) GetState() string { - if c == nil || c.State == nil { - return "" - } - return *c.State -} - -// GetStopURL returns the StopURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetStopURL() string { - if c == nil || c.StopURL == nil { - return "" - } - return *c.StopURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *Codespace) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetWebURL returns the WebURL field if it's non-nil, zero value otherwise. -func (c *Codespace) GetWebURL() string { - if c == nil || c.WebURL == nil { - return "" - } - return *c.WebURL -} - -// GetAhead returns the Ahead field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetAhead() int { - if c == nil || c.Ahead == nil { - return 0 - } - return *c.Ahead -} - -// GetBehind returns the Behind field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetBehind() int { - if c == nil || c.Behind == nil { - return 0 - } - return *c.Behind -} - -// GetHasUncommittedChanges returns the HasUncommittedChanges field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetHasUncommittedChanges() bool { - if c == nil || c.HasUncommittedChanges == nil { - return false - } - return *c.HasUncommittedChanges -} - -// GetHasUnpushedChanges returns the HasUnpushedChanges field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetHasUnpushedChanges() bool { - if c == nil || c.HasUnpushedChanges == nil { - return false - } - return *c.HasUnpushedChanges -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CodespacesGitStatus) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetCPUs returns the CPUs field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetCPUs() int { - if c == nil || c.CPUs == nil { - return 0 - } - return *c.CPUs -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetDisplayName() string { - if c == nil || c.DisplayName == nil { - return "" - } - return *c.DisplayName -} - -// GetMemoryInBytes returns the MemoryInBytes field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetMemoryInBytes() int64 { - if c == nil || c.MemoryInBytes == nil { - return 0 - } - return *c.MemoryInBytes -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetOperatingSystem returns the OperatingSystem field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetOperatingSystem() string { - if c == nil || c.OperatingSystem == nil { - return "" - } - return *c.OperatingSystem -} - -// GetPrebuildAvailability returns the PrebuildAvailability field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetPrebuildAvailability() string { - if c == nil || c.PrebuildAvailability == nil { - return "" - } - return *c.PrebuildAvailability -} - -// GetStorageInBytes returns the StorageInBytes field if it's non-nil, zero value otherwise. -func (c *CodespacesMachine) GetStorageInBytes() int64 { - if c == nil || c.StorageInBytes == nil { - return 0 - } - return *c.StorageInBytes -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetInvitee returns the Invitee field. -func (c *CollaboratorInvitation) GetInvitee() *User { - if c == nil { - return nil - } - return c.Invitee -} - -// GetInviter returns the Inviter field. -func (c *CollaboratorInvitation) GetInviter() *User { - if c == nil { - return nil - } - return c.Inviter -} - -// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetPermissions() string { - if c == nil || c.Permissions == nil { - return "" - } - return *c.Permissions -} - -// GetRepo returns the Repo field. -func (c *CollaboratorInvitation) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CollaboratorInvitation) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetCommitURL() string { - if c == nil || c.CommitURL == nil { - return "" - } - return *c.CommitURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetRepositoryURL() string { - if c == nil || c.RepositoryURL == nil { - return "" - } - return *c.RepositoryURL -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetState() string { - if c == nil || c.State == nil { - return "" - } - return *c.State -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (c *CombinedStatus) GetTotalCount() int { - if c == nil || c.TotalCount == nil { - return 0 - } - return *c.TotalCount -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *Comment) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetAuthorAssociation() string { - if c == nil || c.AuthorAssociation == nil { - return "" - } - return *c.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetBody() string { - if c == nil || c.Body == nil { - return "" - } - return *c.Body -} - -// GetChildCommentCount returns the ChildCommentCount field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetChildCommentCount() int { - if c == nil || c.ChildCommentCount == nil { - return 0 - } - return *c.ChildCommentCount -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetCreatedAt() Timestamp { - if c == nil || c.CreatedAt == nil { - return Timestamp{} - } - return *c.CreatedAt -} - -// GetDiscussionID returns the DiscussionID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetDiscussionID() int64 { - if c == nil || c.DiscussionID == nil { - return 0 - } - return *c.DiscussionID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetParentID returns the ParentID field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetParentID() int64 { - if c == nil || c.ParentID == nil { - return 0 - } - return *c.ParentID -} - -// GetReactions returns the Reactions field. -func (c *CommentDiscussion) GetReactions() *Reactions { - if c == nil { - return nil - } - return c.Reactions -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetRepositoryURL() string { - if c == nil || c.RepositoryURL == nil { - return "" - } - return *c.RepositoryURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CommentDiscussion) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetUser returns the User field. -func (c *CommentDiscussion) GetUser() *User { - if c == nil { - return nil - } - return c.User -} - -// GetTotalCommitComments returns the TotalCommitComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalCommitComments() int { - if c == nil || c.TotalCommitComments == nil { - return 0 - } - return *c.TotalCommitComments -} - -// GetTotalGistComments returns the TotalGistComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalGistComments() int { - if c == nil || c.TotalGistComments == nil { - return 0 - } - return *c.TotalGistComments -} - -// GetTotalIssueComments returns the TotalIssueComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalIssueComments() int { - if c == nil || c.TotalIssueComments == nil { - return 0 - } - return *c.TotalIssueComments -} - -// GetTotalPullRequestComments returns the TotalPullRequestComments field if it's non-nil, zero value otherwise. -func (c *CommentStats) GetTotalPullRequestComments() int { - if c == nil || c.TotalPullRequestComments == nil { - return 0 - } - return *c.TotalPullRequestComments -} - -// GetAuthor returns the Author field. -func (c *Commit) GetAuthor() *CommitAuthor { - if c == nil { - return nil - } - return c.Author -} - -// GetCommentCount returns the CommentCount field if it's non-nil, zero value otherwise. -func (c *Commit) GetCommentCount() int { - if c == nil || c.CommentCount == nil { - return 0 - } - return *c.CommentCount -} - -// GetCommitter returns the Committer field. -func (c *Commit) GetCommitter() *CommitAuthor { - if c == nil { - return nil - } - return c.Committer -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *Commit) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (c *Commit) GetMessage() string { - if c == nil || c.Message == nil { - return "" - } - return *c.Message -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *Commit) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *Commit) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetStats returns the Stats field. -func (c *Commit) GetStats() *CommitStats { - if c == nil { - return nil - } - return c.Stats -} - -// GetTree returns the Tree field. -func (c *Commit) GetTree() *Tree { - if c == nil { - return nil - } - return c.Tree -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Commit) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetVerification returns the Verification field. -func (c *Commit) GetVerification() *SignatureVerification { - if c == nil { - return nil - } - return c.Verification -} - -// GetDate returns the Date field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetDate() Timestamp { - if c == nil || c.Date == nil { - return Timestamp{} - } - return *c.Date -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetEmail() string { - if c == nil || c.Email == nil { - return "" - } - return *c.Email -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetLogin() string { - if c == nil || c.Login == nil { - return "" - } - return *c.Login -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CommitAuthor) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *CommitCommentEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetComment returns the Comment field. -func (c *CommitCommentEvent) GetComment() *RepositoryComment { - if c == nil { - return nil - } - return c.Comment -} - -// GetInstallation returns the Installation field. -func (c *CommitCommentEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetOrg returns the Org field. -func (c *CommitCommentEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetRepo returns the Repo field. -func (c *CommitCommentEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CommitCommentEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetAdditions() int { - if c == nil || c.Additions == nil { - return 0 - } - return *c.Additions -} - -// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetBlobURL() string { - if c == nil || c.BlobURL == nil { - return "" - } - return *c.BlobURL -} - -// GetChanges returns the Changes field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetChanges() int { - if c == nil || c.Changes == nil { - return 0 - } - return *c.Changes -} - -// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetContentsURL() string { - if c == nil || c.ContentsURL == nil { - return "" - } - return *c.ContentsURL -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetDeletions() int { - if c == nil || c.Deletions == nil { - return 0 - } - return *c.Deletions -} - -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetFilename() string { - if c == nil || c.Filename == nil { - return "" - } - return *c.Filename -} - -// GetPatch returns the Patch field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetPatch() string { - if c == nil || c.Patch == nil { - return "" - } - return *c.Patch -} - -// GetPreviousFilename returns the PreviousFilename field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetPreviousFilename() string { - if c == nil || c.PreviousFilename == nil { - return "" - } - return *c.PreviousFilename -} - -// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetRawURL() string { - if c == nil || c.RawURL == nil { - return "" - } - return *c.RawURL -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CommitFile) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetAuthor returns the Author field. -func (c *CommitResult) GetAuthor() *User { - if c == nil { - return nil - } - return c.Author -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetCommentsURL() string { - if c == nil || c.CommentsURL == nil { - return "" - } - return *c.CommentsURL -} - -// GetCommit returns the Commit field. -func (c *CommitResult) GetCommit() *Commit { - if c == nil { - return nil - } - return c.Commit -} - -// GetCommitter returns the Committer field. -func (c *CommitResult) GetCommitter() *User { - if c == nil { - return nil - } - return c.Committer -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetRepository returns the Repository field. -func (c *CommitResult) GetRepository() *Repository { - if c == nil { - return nil - } - return c.Repository -} - -// GetScore returns the Score field. -func (c *CommitResult) GetScore() *float64 { - if c == nil { - return nil - } - return c.Score -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetSHA() string { - if c == nil || c.SHA == nil { - return "" - } - return *c.SHA -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CommitResult) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAheadBy returns the AheadBy field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetAheadBy() int { - if c == nil || c.AheadBy == nil { - return 0 - } - return *c.AheadBy -} - -// GetBaseCommit returns the BaseCommit field. -func (c *CommitsComparison) GetBaseCommit() *RepositoryCommit { - if c == nil { - return nil - } - return c.BaseCommit -} - -// GetBehindBy returns the BehindBy field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetBehindBy() int { - if c == nil || c.BehindBy == nil { - return 0 - } - return *c.BehindBy -} - -// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetDiffURL() string { - if c == nil || c.DiffURL == nil { - return "" - } - return *c.DiffURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetMergeBaseCommit returns the MergeBaseCommit field. -func (c *CommitsComparison) GetMergeBaseCommit() *RepositoryCommit { - if c == nil { - return nil - } - return c.MergeBaseCommit -} - -// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetPatchURL() string { - if c == nil || c.PatchURL == nil { - return "" - } - return *c.PatchURL -} - -// GetPermalinkURL returns the PermalinkURL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetPermalinkURL() string { - if c == nil || c.PermalinkURL == nil { - return "" - } - return *c.PermalinkURL -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetTotalCommits returns the TotalCommits field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetTotalCommits() int { - if c == nil || c.TotalCommits == nil { - return 0 - } - return *c.TotalCommits -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *CommitsComparison) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (c *CommitsSearchResult) GetIncompleteResults() bool { - if c == nil || c.IncompleteResults == nil { - return false - } - return *c.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CommitsSearchResult) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetAdditions() int { - if c == nil || c.Additions == nil { - return 0 - } - return *c.Additions -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetDeletions() int { - if c == nil || c.Deletions == nil { - return 0 - } - return *c.Deletions -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *CommitStats) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetCodeOfConduct returns the CodeOfConduct field. -func (c *CommunityHealthFiles) GetCodeOfConduct() *Metric { - if c == nil { - return nil - } - return c.CodeOfConduct -} - -// GetCodeOfConductFile returns the CodeOfConductFile field. -func (c *CommunityHealthFiles) GetCodeOfConductFile() *Metric { - if c == nil { - return nil - } - return c.CodeOfConductFile -} - -// GetContributing returns the Contributing field. -func (c *CommunityHealthFiles) GetContributing() *Metric { - if c == nil { - return nil - } - return c.Contributing -} - -// GetIssueTemplate returns the IssueTemplate field. -func (c *CommunityHealthFiles) GetIssueTemplate() *Metric { - if c == nil { - return nil - } - return c.IssueTemplate -} - -// GetLicense returns the License field. -func (c *CommunityHealthFiles) GetLicense() *Metric { - if c == nil { - return nil - } - return c.License -} - -// GetPullRequestTemplate returns the PullRequestTemplate field. -func (c *CommunityHealthFiles) GetPullRequestTemplate() *Metric { - if c == nil { - return nil - } - return c.PullRequestTemplate -} - -// GetReadme returns the Readme field. -func (c *CommunityHealthFiles) GetReadme() *Metric { - if c == nil { - return nil - } - return c.Readme -} - -// GetContentReportsEnabled returns the ContentReportsEnabled field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetContentReportsEnabled() bool { - if c == nil || c.ContentReportsEnabled == nil { - return false - } - return *c.ContentReportsEnabled -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetDocumentation returns the Documentation field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetDocumentation() string { - if c == nil || c.Documentation == nil { - return "" - } - return *c.Documentation -} - -// GetFiles returns the Files field. -func (c *CommunityHealthMetrics) GetFiles() *CommunityHealthFiles { - if c == nil { - return nil - } - return c.Files -} - -// GetHealthPercentage returns the HealthPercentage field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetHealthPercentage() int { - if c == nil || c.HealthPercentage == nil { - return 0 - } - return *c.HealthPercentage -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (c *CommunityHealthMetrics) GetUpdatedAt() Timestamp { - if c == nil || c.UpdatedAt == nil { - return Timestamp{} - } - return *c.UpdatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *ContentReference) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *ContentReference) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetReference returns the Reference field if it's non-nil, zero value otherwise. -func (c *ContentReference) GetReference() string { - if c == nil || c.Reference == nil { - return "" - } - return *c.Reference -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (c *ContentReferenceEvent) GetAction() string { - if c == nil || c.Action == nil { - return "" - } - return *c.Action -} - -// GetContentReference returns the ContentReference field. -func (c *ContentReferenceEvent) GetContentReference() *ContentReference { - if c == nil { - return nil - } - return c.ContentReference -} - -// GetInstallation returns the Installation field. -func (c *ContentReferenceEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetRepo returns the Repo field. -func (c *ContentReferenceEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *ContentReferenceEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetAvatarURL() string { - if c == nil || c.AvatarURL == nil { - return "" - } - return *c.AvatarURL -} - -// GetContributions returns the Contributions field if it's non-nil, zero value otherwise. -func (c *Contributor) GetContributions() int { - if c == nil || c.Contributions == nil { - return 0 - } - return *c.Contributions -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (c *Contributor) GetEmail() string { - if c == nil || c.Email == nil { - return "" - } - return *c.Email -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetEventsURL() string { - if c == nil || c.EventsURL == nil { - return "" - } - return *c.EventsURL -} - -// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetFollowersURL() string { - if c == nil || c.FollowersURL == nil { - return "" - } - return *c.FollowersURL -} - -// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetFollowingURL() string { - if c == nil || c.FollowingURL == nil { - return "" - } - return *c.FollowingURL -} - -// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetGistsURL() string { - if c == nil || c.GistsURL == nil { - return "" - } - return *c.GistsURL -} - -// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetGravatarID() string { - if c == nil || c.GravatarID == nil { - return "" - } - return *c.GravatarID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetHTMLURL() string { - if c == nil || c.HTMLURL == nil { - return "" - } - return *c.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *Contributor) GetLogin() string { - if c == nil || c.Login == nil { - return "" - } - return *c.Login -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *Contributor) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (c *Contributor) GetNodeID() string { - if c == nil || c.NodeID == nil { - return "" - } - return *c.NodeID -} - -// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetOrganizationsURL() string { - if c == nil || c.OrganizationsURL == nil { - return "" - } - return *c.OrganizationsURL -} - -// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetReceivedEventsURL() string { - if c == nil || c.ReceivedEventsURL == nil { - return "" - } - return *c.ReceivedEventsURL -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetReposURL() string { - if c == nil || c.ReposURL == nil { - return "" - } - return *c.ReposURL -} - -// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. -func (c *Contributor) GetSiteAdmin() bool { - if c == nil || c.SiteAdmin == nil { - return false - } - return *c.SiteAdmin -} - -// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetStarredURL() string { - if c == nil || c.StarredURL == nil { - return "" - } - return *c.StarredURL -} - -// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetSubscriptionsURL() string { - if c == nil || c.SubscriptionsURL == nil { - return "" - } - return *c.SubscriptionsURL -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (c *Contributor) GetType() string { - if c == nil || c.Type == nil { - return "" - } - return *c.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (c *Contributor) GetURL() string { - if c == nil || c.URL == nil { - return "" - } - return *c.URL -} - -// GetAuthor returns the Author field. -func (c *ContributorStats) GetAuthor() *Contributor { - if c == nil { - return nil - } - return c.Author -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (c *ContributorStats) GetTotal() int { - if c == nil || c.Total == nil { - return 0 - } - return *c.Total -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetCompletedAt() Timestamp { - if c == nil || c.CompletedAt == nil { - return Timestamp{} - } - return *c.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetConclusion() string { - if c == nil || c.Conclusion == nil { - return "" - } - return *c.Conclusion -} - -// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetDetailsURL() string { - if c == nil || c.DetailsURL == nil { - return "" - } - return *c.DetailsURL -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetExternalID() string { - if c == nil || c.ExternalID == nil { - return "" - } - return *c.ExternalID -} - -// GetOutput returns the Output field. -func (c *CreateCheckRunOptions) GetOutput() *CheckRunOutput { - if c == nil { - return nil - } - return c.Output -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetStartedAt() Timestamp { - if c == nil || c.StartedAt == nil { - return Timestamp{} - } - return *c.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (c *CreateCheckRunOptions) GetStatus() string { - if c == nil || c.Status == nil { - return "" - } - return *c.Status -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (c *CreateCheckSuiteOptions) GetHeadBranch() string { - if c == nil || c.HeadBranch == nil { - return "" - } - return *c.HeadBranch -} - -// GetClientIP returns the ClientIP field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetClientIP() string { - if c == nil || c.ClientIP == nil { - return "" - } - return *c.ClientIP -} - -// GetDevcontainerPath returns the DevcontainerPath field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetDevcontainerPath() string { - if c == nil || c.DevcontainerPath == nil { - return "" - } - return *c.DevcontainerPath -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetDisplayName() string { - if c == nil || c.DisplayName == nil { - return "" - } - return *c.DisplayName -} - -// GetGeo returns the Geo field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetGeo() string { - if c == nil || c.Geo == nil { - return "" - } - return *c.Geo -} - -// GetIdleTimeoutMinutes returns the IdleTimeoutMinutes field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetIdleTimeoutMinutes() int { - if c == nil || c.IdleTimeoutMinutes == nil { - return 0 - } - return *c.IdleTimeoutMinutes -} - -// GetMachine returns the Machine field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetMachine() string { - if c == nil || c.Machine == nil { - return "" - } - return *c.Machine -} - -// GetMultiRepoPermissionsOptOut returns the MultiRepoPermissionsOptOut field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetMultiRepoPermissionsOptOut() bool { - if c == nil || c.MultiRepoPermissionsOptOut == nil { - return false - } - return *c.MultiRepoPermissionsOptOut -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetRetentionPeriodMinutes returns the RetentionPeriodMinutes field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetRetentionPeriodMinutes() int { - if c == nil || c.RetentionPeriodMinutes == nil { - return 0 - } - return *c.RetentionPeriodMinutes -} - -// GetWorkingDirectory returns the WorkingDirectory field if it's non-nil, zero value otherwise. -func (c *CreateCodespaceOptions) GetWorkingDirectory() string { - if c == nil || c.WorkingDirectory == nil { - return "" - } - return *c.WorkingDirectory -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if c == nil || c.AllowsPublicRepositories == nil { - return false - } - return *c.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if c == nil || c.RestrictedToWorkflows == nil { - return false - } - return *c.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (c *CreateEnterpriseRunnerGroupRequest) GetVisibility() string { - if c == nil || c.Visibility == nil { - return "" - } - return *c.Visibility -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetInstallation returns the Installation field. -func (c *CreateEvent) GetInstallation() *Installation { - if c == nil { - return nil - } - return c.Installation -} - -// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetMasterBranch() string { - if c == nil || c.MasterBranch == nil { - return "" - } - return *c.MasterBranch -} - -// GetOrg returns the Org field. -func (c *CreateEvent) GetOrg() *Organization { - if c == nil { - return nil - } - return c.Org -} - -// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetPusherType() string { - if c == nil || c.PusherType == nil { - return "" - } - return *c.PusherType -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetRef() string { - if c == nil || c.Ref == nil { - return "" - } - return *c.Ref -} - -// GetRefType returns the RefType field if it's non-nil, zero value otherwise. -func (c *CreateEvent) GetRefType() string { - if c == nil || c.RefType == nil { - return "" - } - return *c.RefType -} - -// GetRepo returns the Repo field. -func (c *CreateEvent) GetRepo() *Repository { - if c == nil { - return nil - } - return c.Repo -} - -// GetSender returns the Sender field. -func (c *CreateEvent) GetSender() *User { - if c == nil { - return nil - } - return c.Sender -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (c *CreateOrgInvitationOptions) GetEmail() string { - if c == nil || c.Email == nil { - return "" - } - return *c.Email -} - -// GetInviteeID returns the InviteeID field if it's non-nil, zero value otherwise. -func (c *CreateOrgInvitationOptions) GetInviteeID() int64 { - if c == nil || c.InviteeID == nil { - return 0 - } - return *c.InviteeID -} - -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (c *CreateOrgInvitationOptions) GetRole() string { - if c == nil || c.Role == nil { - return "" - } - return *c.Role -} - -// GetBaseRole returns the BaseRole field if it's non-nil, zero value otherwise. -func (c *CreateOrUpdateCustomRoleOptions) GetBaseRole() string { - if c == nil || c.BaseRole == nil { - return "" - } - return *c.BaseRole -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CreateOrUpdateCustomRoleOptions) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CreateOrUpdateCustomRoleOptions) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (c *CreateProtectedChanges) GetFrom() bool { - if c == nil || c.From == nil { - return false - } - return *c.From -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if c == nil || c.AllowsPublicRepositories == nil { - return false - } - return *c.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if c == nil || c.RestrictedToWorkflows == nil { - return false - } - return *c.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (c *CreateRunnerGroupRequest) GetVisibility() string { - if c == nil || c.Visibility == nil { - return "" - } - return *c.Visibility -} - -// GetCanAdminsBypass returns the CanAdminsBypass field if it's non-nil, zero value otherwise. -func (c *CreateUpdateEnvironment) GetCanAdminsBypass() bool { - if c == nil || c.CanAdminsBypass == nil { - return false - } - return *c.CanAdminsBypass -} - -// GetDeploymentBranchPolicy returns the DeploymentBranchPolicy field. -func (c *CreateUpdateEnvironment) GetDeploymentBranchPolicy() *BranchPolicy { - if c == nil { - return nil - } - return c.DeploymentBranchPolicy -} - -// GetPreventSelfReview returns the PreventSelfReview field if it's non-nil, zero value otherwise. -func (c *CreateUpdateEnvironment) GetPreventSelfReview() bool { - if c == nil || c.PreventSelfReview == nil { - return false - } - return *c.PreventSelfReview -} - -// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. -func (c *CreateUpdateEnvironment) GetWaitTimer() int { - if c == nil || c.WaitTimer == nil { - return 0 - } - return *c.WaitTimer -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetRepositoryID() int64 { - if c == nil || c.RepositoryID == nil { - return 0 - } - return *c.RepositoryID -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetScope() string { - if c == nil || c.Scope == nil { - return "" - } - return *c.Scope -} - -// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. -func (c *CreateUpdateRequiredWorkflowOptions) GetSelectedRepositoryIDs() *SelectedRepoIDs { - if c == nil { - return nil - } - return c.SelectedRepositoryIDs -} - -// GetWorkflowFilePath returns the WorkflowFilePath field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetWorkflowFilePath() string { - if c == nil || c.WorkflowFilePath == nil { - return "" - } - return *c.WorkflowFilePath -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (c *CreateUserProjectOptions) GetBody() string { - if c == nil || c.Body == nil { - return "" - } - return *c.Body -} - -// GetCreated returns the Created field if it's non-nil, zero value otherwise. -func (c *CreationInfo) GetCreated() Timestamp { - if c == nil || c.Created == nil { - return Timestamp{} - } - return *c.Created -} - -// GetAuthorizedCredentialExpiresAt returns the AuthorizedCredentialExpiresAt field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialExpiresAt() Timestamp { - if c == nil || c.AuthorizedCredentialExpiresAt == nil { - return Timestamp{} - } - return *c.AuthorizedCredentialExpiresAt -} - -// GetAuthorizedCredentialID returns the AuthorizedCredentialID field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialID() int64 { - if c == nil || c.AuthorizedCredentialID == nil { - return 0 - } - return *c.AuthorizedCredentialID -} - -// GetAuthorizedCredentialNote returns the AuthorizedCredentialNote field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialNote() string { - if c == nil || c.AuthorizedCredentialNote == nil { - return "" - } - return *c.AuthorizedCredentialNote -} - -// GetAuthorizedCredentialTitle returns the AuthorizedCredentialTitle field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetAuthorizedCredentialTitle() string { - if c == nil || c.AuthorizedCredentialTitle == nil { - return "" - } - return *c.AuthorizedCredentialTitle -} - -// GetCredentialAccessedAt returns the CredentialAccessedAt field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialAccessedAt() Timestamp { - if c == nil || c.CredentialAccessedAt == nil { - return Timestamp{} - } - return *c.CredentialAccessedAt -} - -// GetCredentialAuthorizedAt returns the CredentialAuthorizedAt field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialAuthorizedAt() Timestamp { - if c == nil || c.CredentialAuthorizedAt == nil { - return Timestamp{} - } - return *c.CredentialAuthorizedAt -} - -// GetCredentialID returns the CredentialID field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialID() int64 { - if c == nil || c.CredentialID == nil { - return 0 - } - return *c.CredentialID -} - -// GetCredentialType returns the CredentialType field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetCredentialType() string { - if c == nil || c.CredentialType == nil { - return "" - } - return *c.CredentialType -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetFingerprint() string { - if c == nil || c.Fingerprint == nil { - return "" - } - return *c.Fingerprint -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetLogin() string { - if c == nil || c.Login == nil { - return "" - } - return *c.Login -} - -// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. -func (c *CredentialAuthorization) GetTokenLastEight() string { - if c == nil || c.TokenLastEight == nil { - return "" - } - return *c.TokenLastEight -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (c *Credit) GetType() string { - if c == nil || c.Type == nil { - return "" - } - return *c.Type -} - -// GetUser returns the User field. -func (c *Credit) GetUser() *User { - if c == nil { - return nil - } - return c.User -} - -// GetDefaultValue returns the DefaultValue field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetDefaultValue() string { - if c == nil || c.DefaultValue == nil { - return "" - } - return *c.DefaultValue -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetPropertyName returns the PropertyName field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetPropertyName() string { - if c == nil || c.PropertyName == nil { - return "" - } - return *c.PropertyName -} - -// GetRequired returns the Required field if it's non-nil, zero value otherwise. -func (c *CustomProperty) GetRequired() bool { - if c == nil || c.Required == nil { - return false - } - return *c.Required -} - -// GetValue returns the Value field if it's non-nil, zero value otherwise. -func (c *CustomPropertyValue) GetValue() string { - if c == nil || c.Value == nil { - return "" - } - return *c.Value -} - -// GetBaseRole returns the BaseRole field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetBaseRole() string { - if c == nil || c.BaseRole == nil { - return "" - } - return *c.BaseRole -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetDescription() string { - if c == nil || c.Description == nil { - return "" - } - return *c.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetID() int64 { - if c == nil || c.ID == nil { - return 0 - } - return *c.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (c *CustomRepoRoles) GetName() string { - if c == nil || c.Name == nil { - return "" - } - return *c.Name -} - -// GetQuerySuite returns the QuerySuite field if it's non-nil, zero value otherwise. -func (d *DefaultSetupConfiguration) GetQuerySuite() string { - if d == nil || d.QuerySuite == nil { - return "" - } - return *d.QuerySuite -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DefaultSetupConfiguration) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DefaultSetupConfiguration) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetConfirmDeleteURL returns the ConfirmDeleteURL field if it's non-nil, zero value otherwise. -func (d *DeleteAnalysis) GetConfirmDeleteURL() string { - if d == nil || d.ConfirmDeleteURL == nil { - return "" - } - return *d.ConfirmDeleteURL -} - -// GetNextAnalysisURL returns the NextAnalysisURL field if it's non-nil, zero value otherwise. -func (d *DeleteAnalysis) GetNextAnalysisURL() string { - if d == nil || d.NextAnalysisURL == nil { - return "" - } - return *d.NextAnalysisURL -} - -// GetInstallation returns the Installation field. -func (d *DeleteEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DeleteEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetPusherType() string { - if d == nil || d.PusherType == nil { - return "" - } - return *d.PusherType -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetRef() string { - if d == nil || d.Ref == nil { - return "" - } - return *d.Ref -} - -// GetRefType returns the RefType field if it's non-nil, zero value otherwise. -func (d *DeleteEvent) GetRefType() string { - if d == nil || d.RefType == nil { - return "" - } - return *d.RefType -} - -// GetRepo returns the Repo field. -func (d *DeleteEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeleteEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAutoDismissedAt returns the AutoDismissedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetAutoDismissedAt() Timestamp { - if d == nil || d.AutoDismissedAt == nil { - return Timestamp{} - } - return *d.AutoDismissedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDependency returns the Dependency field. -func (d *DependabotAlert) GetDependency() *Dependency { - if d == nil { - return nil - } - return d.Dependency -} - -// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetDismissedAt() Timestamp { - if d == nil || d.DismissedAt == nil { - return Timestamp{} - } - return *d.DismissedAt -} - -// GetDismissedBy returns the DismissedBy field. -func (d *DependabotAlert) GetDismissedBy() *User { - if d == nil { - return nil - } - return d.DismissedBy -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetDismissedComment() string { - if d == nil || d.DismissedComment == nil { - return "" - } - return *d.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetDismissedReason() string { - if d == nil || d.DismissedReason == nil { - return "" - } - return *d.DismissedReason -} - -// GetFixedAt returns the FixedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetFixedAt() Timestamp { - if d == nil || d.FixedAt == nil { - return Timestamp{} - } - return *d.FixedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetHTMLURL() string { - if d == nil || d.HTMLURL == nil { - return "" - } - return *d.HTMLURL -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetNumber() int { - if d == nil || d.Number == nil { - return 0 - } - return *d.Number -} - -// GetRepository returns the Repository field. -func (d *DependabotAlert) GetRepository() *Repository { - if d == nil { - return nil - } - return d.Repository -} - -// GetSecurityAdvisory returns the SecurityAdvisory field. -func (d *DependabotAlert) GetSecurityAdvisory() *DependabotSecurityAdvisory { - if d == nil { - return nil - } - return d.SecurityAdvisory -} - -// GetSecurityVulnerability returns the SecurityVulnerability field. -func (d *DependabotAlert) GetSecurityVulnerability() *AdvisoryVulnerability { - if d == nil { - return nil - } - return d.SecurityVulnerability -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *DependabotAlert) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DependabotAlertEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetAlert returns the Alert field. -func (d *DependabotAlertEvent) GetAlert() *DependabotAlert { - if d == nil { - return nil - } - return d.Alert -} - -// GetEnterprise returns the Enterprise field. -func (d *DependabotAlertEvent) GetEnterprise() *Enterprise { - if d == nil { - return nil - } - return d.Enterprise -} - -// GetInstallation returns the Installation field. -func (d *DependabotAlertEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrganization returns the Organization field. -func (d *DependabotAlertEvent) GetOrganization() *Organization { - if d == nil { - return nil - } - return d.Organization -} - -// GetRepo returns the Repo field. -func (d *DependabotAlertEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DependabotAlertEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. -func (d *DependabotAlertState) GetDismissedComment() string { - if d == nil || d.DismissedComment == nil { - return "" - } - return *d.DismissedComment -} - -// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. -func (d *DependabotAlertState) GetDismissedReason() string { - if d == nil || d.DismissedReason == nil { - return "" - } - return *d.DismissedReason -} - -// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetCVEID() string { - if d == nil || d.CVEID == nil { - return "" - } - return *d.CVEID -} - -// GetCVSS returns the CVSS field. -func (d *DependabotSecurityAdvisory) GetCVSS() *AdvisoryCVSS { - if d == nil { - return nil - } - return d.CVSS -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetGHSAID() string { - if d == nil || d.GHSAID == nil { - return "" - } - return *d.GHSAID -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetPublishedAt() Timestamp { - if d == nil || d.PublishedAt == nil { - return Timestamp{} - } - return *d.PublishedAt -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetSeverity() string { - if d == nil || d.Severity == nil { - return "" - } - return *d.Severity -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetSummary() string { - if d == nil || d.Summary == nil { - return "" - } - return *d.Summary -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetWithdrawnAt returns the WithdrawnAt field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityAdvisory) GetWithdrawnAt() Timestamp { - if d == nil || d.WithdrawnAt == nil { - return Timestamp{} - } - return *d.WithdrawnAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (d *DependabotSecurityUpdates) GetStatus() string { - if d == nil || d.Status == nil { - return "" - } - return *d.Status -} - -// GetManifestPath returns the ManifestPath field if it's non-nil, zero value otherwise. -func (d *Dependency) GetManifestPath() string { - if d == nil || d.ManifestPath == nil { - return "" - } - return *d.ManifestPath -} - -// GetPackage returns the Package field. -func (d *Dependency) GetPackage() *VulnerabilityPackage { - if d == nil { - return nil - } - return d.Package -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (d *Dependency) GetScope() string { - if d == nil || d.Scope == nil { - return "" - } - return *d.Scope -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DeployKeyEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetInstallation returns the Installation field. -func (d *DeployKeyEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetKey returns the Key field. -func (d *DeployKeyEvent) GetKey() *Key { - if d == nil { - return nil - } - return d.Key -} - -// GetOrganization returns the Organization field. -func (d *DeployKeyEvent) GetOrganization() *Organization { - if d == nil { - return nil - } - return d.Organization -} - -// GetRepo returns the Repo field. -func (d *DeployKeyEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeployKeyEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *Deployment) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetCreator returns the Creator field. -func (d *Deployment) GetCreator() *User { - if d == nil { - return nil - } - return d.Creator -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *Deployment) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *Deployment) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *Deployment) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *Deployment) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *Deployment) GetRef() string { - if d == nil || d.Ref == nil { - return "" - } - return *d.Ref -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { - return "" - } - return *d.RepositoryURL -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (d *Deployment) GetSHA() string { - if d == nil || d.SHA == nil { - return "" - } - return *d.SHA -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetStatusesURL() string { - if d == nil || d.StatusesURL == nil { - return "" - } - return *d.StatusesURL -} - -// GetTask returns the Task field if it's non-nil, zero value otherwise. -func (d *Deployment) GetTask() string { - if d == nil || d.Task == nil { - return "" - } - return *d.Task -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *Deployment) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *Deployment) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetName() string { - if d == nil || d.Name == nil { - return "" - } - return *d.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicy) GetType() string { - if d == nil || d.Type == nil { - return "" - } - return *d.Type -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicyRequest) GetName() string { - if d == nil || d.Name == nil { - return "" - } - return *d.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicyRequest) GetType() string { - if d == nil || d.Type == nil { - return "" - } - return *d.Type -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (d *DeploymentBranchPolicyResponse) GetTotalCount() int { - if d == nil || d.TotalCount == nil { - return 0 - } - return *d.TotalCount -} - -// GetDeployment returns the Deployment field. -func (d *DeploymentEvent) GetDeployment() *Deployment { - if d == nil { - return nil - } - return d.Deployment -} - -// GetInstallation returns the Installation field. -func (d *DeploymentEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DeploymentEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DeploymentEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeploymentEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetWorkflow returns the Workflow field. -func (d *DeploymentEvent) GetWorkflow() *Workflow { - if d == nil { - return nil - } - return d.Workflow -} - -// GetWorkflowRun returns the WorkflowRun field. -func (d *DeploymentEvent) GetWorkflowRun() *WorkflowRun { - if d == nil { - return nil - } - return d.WorkflowRun -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetDeployment returns the Deployment field. -func (d *DeploymentProtectionRuleEvent) GetDeployment() *Deployment { - if d == nil { - return nil - } - return d.Deployment -} - -// GetDeploymentCallbackURL returns the DeploymentCallbackURL field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetDeploymentCallbackURL() string { - if d == nil || d.DeploymentCallbackURL == nil { - return "" - } - return *d.DeploymentCallbackURL -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (d *DeploymentProtectionRuleEvent) GetEvent() string { - if d == nil || d.Event == nil { - return "" - } - return *d.Event -} - -// GetInstallation returns the Installation field. -func (d *DeploymentProtectionRuleEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrganization returns the Organization field. -func (d *DeploymentProtectionRuleEvent) GetOrganization() *Organization { - if d == nil { - return nil - } - return d.Organization -} - -// GetRepo returns the Repo field. -func (d *DeploymentProtectionRuleEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeploymentProtectionRuleEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAutoMerge returns the AutoMerge field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetAutoMerge() bool { - if d == nil || d.AutoMerge == nil { - return false - } - return *d.AutoMerge -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetProductionEnvironment returns the ProductionEnvironment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetProductionEnvironment() bool { - if d == nil || d.ProductionEnvironment == nil { - return false - } - return *d.ProductionEnvironment -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetRef() string { - if d == nil || d.Ref == nil { - return "" - } - return *d.Ref -} - -// GetRequiredContexts returns the RequiredContexts field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetRequiredContexts() []string { - if d == nil || d.RequiredContexts == nil { - return nil - } - return *d.RequiredContexts -} - -// GetTask returns the Task field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetTask() string { - if d == nil || d.Task == nil { - return "" - } - return *d.Task -} - -// GetTransientEnvironment returns the TransientEnvironment field if it's non-nil, zero value otherwise. -func (d *DeploymentRequest) GetTransientEnvironment() bool { - if d == nil || d.TransientEnvironment == nil { - return false - } - return *d.TransientEnvironment -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetCreator returns the Creator field. -func (d *DeploymentStatus) GetCreator() *User { - if d == nil { - return nil - } - return d.Creator -} - -// GetDeploymentURL returns the DeploymentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetDeploymentURL() string { - if d == nil || d.DeploymentURL == nil { - return "" - } - return *d.DeploymentURL -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetEnvironmentURL() string { - if d == nil || d.EnvironmentURL == nil { - return "" - } - return *d.EnvironmentURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetLogURL() string { - if d == nil || d.LogURL == nil { - return "" - } - return *d.LogURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { - return "" - } - return *d.RepositoryURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetTargetURL() string { - if d == nil || d.TargetURL == nil { - return "" - } - return *d.TargetURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatus) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetDeployment returns the Deployment field. -func (d *DeploymentStatusEvent) GetDeployment() *Deployment { - if d == nil { - return nil - } - return d.Deployment -} - -// GetDeploymentStatus returns the DeploymentStatus field. -func (d *DeploymentStatusEvent) GetDeploymentStatus() *DeploymentStatus { - if d == nil { - return nil - } - return d.DeploymentStatus -} - -// GetInstallation returns the Installation field. -func (d *DeploymentStatusEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DeploymentStatusEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DeploymentStatusEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DeploymentStatusEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAutoInactive returns the AutoInactive field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetAutoInactive() bool { - if d == nil || d.AutoInactive == nil { - return false - } - return *d.AutoInactive -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetEnvironment() string { - if d == nil || d.Environment == nil { - return "" - } - return *d.Environment -} - -// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetEnvironmentURL() string { - if d == nil || d.EnvironmentURL == nil { - return "" - } - return *d.EnvironmentURL -} - -// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetLogURL() string { - if d == nil || d.LogURL == nil { - return "" - } - return *d.LogURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DeploymentStatusRequest) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. -func (d *Discussion) GetActiveLockReason() string { - if d == nil || d.ActiveLockReason == nil { - return "" - } - return *d.ActiveLockReason -} - -// GetAnswerChosenAt returns the AnswerChosenAt field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAnswerChosenAt() Timestamp { - if d == nil || d.AnswerChosenAt == nil { - return Timestamp{} - } - return *d.AnswerChosenAt -} - -// GetAnswerChosenBy returns the AnswerChosenBy field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAnswerChosenBy() string { - if d == nil || d.AnswerChosenBy == nil { - return "" - } - return *d.AnswerChosenBy -} - -// GetAnswerHTMLURL returns the AnswerHTMLURL field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAnswerHTMLURL() string { - if d == nil || d.AnswerHTMLURL == nil { - return "" - } - return *d.AnswerHTMLURL -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (d *Discussion) GetAuthorAssociation() string { - if d == nil || d.AuthorAssociation == nil { - return "" - } - return *d.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (d *Discussion) GetBody() string { - if d == nil || d.Body == nil { - return "" - } - return *d.Body -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (d *Discussion) GetComments() int { - if d == nil || d.Comments == nil { - return 0 - } - return *d.Comments -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *Discussion) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDiscussionCategory returns the DiscussionCategory field. -func (d *Discussion) GetDiscussionCategory() *DiscussionCategory { - if d == nil { - return nil - } - return d.DiscussionCategory -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (d *Discussion) GetHTMLURL() string { - if d == nil || d.HTMLURL == nil { - return "" - } - return *d.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *Discussion) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetLocked returns the Locked field if it's non-nil, zero value otherwise. -func (d *Discussion) GetLocked() bool { - if d == nil || d.Locked == nil { - return false - } - return *d.Locked -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *Discussion) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (d *Discussion) GetNumber() int { - if d == nil || d.Number == nil { - return 0 - } - return *d.Number -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (d *Discussion) GetRepositoryURL() string { - if d == nil || d.RepositoryURL == nil { - return "" - } - return *d.RepositoryURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *Discussion) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (d *Discussion) GetTitle() string { - if d == nil || d.Title == nil { - return "" - } - return *d.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *Discussion) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetUser returns the User field. -func (d *Discussion) GetUser() *User { - if d == nil { - return nil - } - return d.User -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetDescription() string { - if d == nil || d.Description == nil { - return "" - } - return *d.Description -} - -// GetEmoji returns the Emoji field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetEmoji() string { - if d == nil || d.Emoji == nil { - return "" - } - return *d.Emoji -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetID() int64 { - if d == nil || d.ID == nil { - return 0 - } - return *d.ID -} - -// GetIsAnswerable returns the IsAnswerable field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetIsAnswerable() bool { - if d == nil || d.IsAnswerable == nil { - return false - } - return *d.IsAnswerable -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetName() string { - if d == nil || d.Name == nil { - return "" - } - return *d.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetRepositoryID() int64 { - if d == nil || d.RepositoryID == nil { - return 0 - } - return *d.RepositoryID -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetSlug() string { - if d == nil || d.Slug == nil { - return "" - } - return *d.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionCategory) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetAuthor returns the Author field. -func (d *DiscussionComment) GetAuthor() *User { - if d == nil { - return nil - } - return d.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetBody() string { - if d == nil || d.Body == nil { - return "" - } - return *d.Body -} - -// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetBodyHTML() string { - if d == nil || d.BodyHTML == nil { - return "" - } - return *d.BodyHTML -} - -// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetBodyVersion() string { - if d == nil || d.BodyVersion == nil { - return "" - } - return *d.BodyVersion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetCreatedAt() Timestamp { - if d == nil || d.CreatedAt == nil { - return Timestamp{} - } - return *d.CreatedAt -} - -// GetDiscussionURL returns the DiscussionURL field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetDiscussionURL() string { - if d == nil || d.DiscussionURL == nil { - return "" - } - return *d.DiscussionURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetHTMLURL() string { - if d == nil || d.HTMLURL == nil { - return "" - } - return *d.HTMLURL -} - -// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetLastEditedAt() Timestamp { - if d == nil || d.LastEditedAt == nil { - return Timestamp{} - } - return *d.LastEditedAt -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetNodeID() string { - if d == nil || d.NodeID == nil { - return "" - } - return *d.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetNumber() int { - if d == nil || d.Number == nil { - return 0 - } - return *d.Number -} - -// GetReactions returns the Reactions field. -func (d *DiscussionComment) GetReactions() *Reactions { - if d == nil { - return nil - } - return d.Reactions -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetUpdatedAt() Timestamp { - if d == nil || d.UpdatedAt == nil { - return Timestamp{} - } - return *d.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetURL() string { - if d == nil || d.URL == nil { - return "" - } - return *d.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DiscussionCommentEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetComment returns the Comment field. -func (d *DiscussionCommentEvent) GetComment() *CommentDiscussion { - if d == nil { - return nil - } - return d.Comment -} - -// GetDiscussion returns the Discussion field. -func (d *DiscussionCommentEvent) GetDiscussion() *Discussion { - if d == nil { - return nil - } - return d.Discussion -} - -// GetInstallation returns the Installation field. -func (d *DiscussionCommentEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DiscussionCommentEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DiscussionCommentEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DiscussionCommentEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (d *DiscussionEvent) GetAction() string { - if d == nil || d.Action == nil { - return "" - } - return *d.Action -} - -// GetDiscussion returns the Discussion field. -func (d *DiscussionEvent) GetDiscussion() *Discussion { - if d == nil { - return nil - } - return d.Discussion -} - -// GetInstallation returns the Installation field. -func (d *DiscussionEvent) GetInstallation() *Installation { - if d == nil { - return nil - } - return d.Installation -} - -// GetOrg returns the Org field. -func (d *DiscussionEvent) GetOrg() *Organization { - if d == nil { - return nil - } - return d.Org -} - -// GetRepo returns the Repo field. -func (d *DiscussionEvent) GetRepo() *Repository { - if d == nil { - return nil - } - return d.Repo -} - -// GetSender returns the Sender field. -func (d *DiscussionEvent) GetSender() *User { - if d == nil { - return nil - } - return d.Sender -} - -// GetApps returns the Apps field if it's non-nil, zero value otherwise. -func (d *DismissalRestrictionsRequest) GetApps() []string { - if d == nil || d.Apps == nil { - return nil - } - return *d.Apps -} - -// GetTeams returns the Teams field if it's non-nil, zero value otherwise. -func (d *DismissalRestrictionsRequest) GetTeams() []string { - if d == nil || d.Teams == nil { - return nil - } - return *d.Teams -} - -// GetUsers returns the Users field if it's non-nil, zero value otherwise. -func (d *DismissalRestrictionsRequest) GetUsers() []string { - if d == nil || d.Users == nil { - return nil - } - return *d.Users -} - -// GetDismissalCommitID returns the DismissalCommitID field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetDismissalCommitID() string { - if d == nil || d.DismissalCommitID == nil { - return "" - } - return *d.DismissalCommitID -} - -// GetDismissalMessage returns the DismissalMessage field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetDismissalMessage() string { - if d == nil || d.DismissalMessage == nil { - return "" - } - return *d.DismissalMessage -} - -// GetReviewID returns the ReviewID field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetReviewID() int64 { - if d == nil || d.ReviewID == nil { - return 0 - } - return *d.ReviewID -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (d *DismissedReview) GetState() string { - if d == nil || d.State == nil { - return "" - } - return *d.State -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (d *DismissStaleReviewsOnPushChanges) GetFrom() bool { - if d == nil || d.From == nil { - return false - } - return *d.From -} - -// GetClientPayload returns the ClientPayload field if it's non-nil, zero value otherwise. -func (d *DispatchRequestOptions) GetClientPayload() json.RawMessage { - if d == nil || d.ClientPayload == nil { - return json.RawMessage{} - } - return *d.ClientPayload -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetBody() string { - if d == nil || d.Body == nil { - return "" - } - return *d.Body -} - -// GetLine returns the Line field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetLine() int { - if d == nil || d.Line == nil { - return 0 - } - return *d.Line -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetPath() string { - if d == nil || d.Path == nil { - return "" - } - return *d.Path -} - -// GetPosition returns the Position field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetPosition() int { - if d == nil || d.Position == nil { - return 0 - } - return *d.Position -} - -// GetSide returns the Side field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetSide() string { - if d == nil || d.Side == nil { - return "" - } - return *d.Side -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetStartLine() int { - if d == nil || d.StartLine == nil { - return 0 - } - return *d.StartLine -} - -// GetStartSide returns the StartSide field if it's non-nil, zero value otherwise. -func (d *DraftReviewComment) GetStartSide() string { - if d == nil || d.StartSide == nil { - return "" - } - return *d.StartSide -} - -// GetRef returns the Ref field. -func (e *EditBase) GetRef() *EditRef { - if e == nil { - return nil - } - return e.Ref -} - -// GetSHA returns the SHA field. -func (e *EditBase) GetSHA() *EditSHA { - if e == nil { - return nil - } - return e.SHA -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditBody) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetBase returns the Base field. -func (e *EditChange) GetBase() *EditBase { - if e == nil { - return nil - } - return e.Base -} - -// GetBody returns the Body field. -func (e *EditChange) GetBody() *EditBody { - if e == nil { - return nil - } - return e.Body -} - -// GetDefaultBranch returns the DefaultBranch field. -func (e *EditChange) GetDefaultBranch() *EditDefaultBranch { - if e == nil { - return nil - } - return e.DefaultBranch -} - -// GetOwner returns the Owner field. -func (e *EditChange) GetOwner() *EditOwner { - if e == nil { - return nil - } - return e.Owner -} - -// GetRepo returns the Repo field. -func (e *EditChange) GetRepo() *EditRepo { - if e == nil { - return nil - } - return e.Repo -} - -// GetTitle returns the Title field. -func (e *EditChange) GetTitle() *EditTitle { - if e == nil { - return nil - } - return e.Title -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditDefaultBranch) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetOwnerInfo returns the OwnerInfo field. -func (e *EditOwner) GetOwnerInfo() *OwnerInfo { - if e == nil { - return nil - } - return e.OwnerInfo -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditRef) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetName returns the Name field. -func (e *EditRepo) GetName() *RepoName { - if e == nil { - return nil - } - return e.Name -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditSHA) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (e *EditTitle) GetFrom() string { - if e == nil || e.From == nil { - return "" - } - return *e.From -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetAvatarURL() string { - if e == nil || e.AvatarURL == nil { - return "" - } - return *e.AvatarURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetDescription() string { - if e == nil || e.Description == nil { - return "" - } - return *e.Description -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetHTMLURL() string { - if e == nil || e.HTMLURL == nil { - return "" - } - return *e.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetID() int { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetName() string { - if e == nil || e.Name == nil { - return "" - } - return *e.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetNodeID() string { - if e == nil || e.NodeID == nil { - return "" - } - return *e.NodeID -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetSlug() string { - if e == nil || e.Slug == nil { - return "" - } - return *e.Slug -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetUpdatedAt() Timestamp { - if e == nil || e.UpdatedAt == nil { - return Timestamp{} - } - return *e.UpdatedAt -} - -// GetWebsiteURL returns the WebsiteURL field if it's non-nil, zero value otherwise. -func (e *Enterprise) GetWebsiteURL() string { - if e == nil || e.WebsiteURL == nil { - return "" - } - return *e.WebsiteURL -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetAllowsPublicRepositories() bool { - if e == nil || e.AllowsPublicRepositories == nil { - return false - } - return *e.AllowsPublicRepositories -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetDefault() bool { - if e == nil || e.Default == nil { - return false - } - return *e.Default -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetID() int64 { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetInherited returns the Inherited field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetInherited() bool { - if e == nil || e.Inherited == nil { - return false - } - return *e.Inherited -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetName() string { - if e == nil || e.Name == nil { - return "" - } - return *e.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetRestrictedToWorkflows() bool { - if e == nil || e.RestrictedToWorkflows == nil { - return false - } - return *e.RestrictedToWorkflows -} - -// GetRunnersURL returns the RunnersURL field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetRunnersURL() string { - if e == nil || e.RunnersURL == nil { - return "" - } - return *e.RunnersURL -} - -// GetSelectedOrganizationsURL returns the SelectedOrganizationsURL field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetSelectedOrganizationsURL() string { - if e == nil || e.SelectedOrganizationsURL == nil { - return "" - } - return *e.SelectedOrganizationsURL -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetVisibility() string { - if e == nil || e.Visibility == nil { - return "" - } - return *e.Visibility -} - -// GetWorkflowRestrictionsReadOnly returns the WorkflowRestrictionsReadOnly field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroup) GetWorkflowRestrictionsReadOnly() bool { - if e == nil || e.WorkflowRestrictionsReadOnly == nil { - return false - } - return *e.WorkflowRestrictionsReadOnly -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (e *EnterpriseRunnerGroups) GetTotalCount() int { - if e == nil || e.TotalCount == nil { - return 0 - } - return *e.TotalCount -} - -// GetAdvancedSecurityEnabledForNewRepositories returns the AdvancedSecurityEnabledForNewRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetAdvancedSecurityEnabledForNewRepositories() bool { - if e == nil || e.AdvancedSecurityEnabledForNewRepositories == nil { - return false - } - return *e.AdvancedSecurityEnabledForNewRepositories -} - -// GetSecretScanningEnabledForNewRepositories returns the SecretScanningEnabledForNewRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningEnabledForNewRepositories() bool { - if e == nil || e.SecretScanningEnabledForNewRepositories == nil { - return false - } - return *e.SecretScanningEnabledForNewRepositories -} - -// GetSecretScanningPushProtectionCustomLink returns the SecretScanningPushProtectionCustomLink field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningPushProtectionCustomLink() string { - if e == nil || e.SecretScanningPushProtectionCustomLink == nil { - return "" - } - return *e.SecretScanningPushProtectionCustomLink -} - -// GetSecretScanningPushProtectionEnabledForNewRepositories returns the SecretScanningPushProtectionEnabledForNewRepositories field if it's non-nil, zero value otherwise. -func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningPushProtectionEnabledForNewRepositories() bool { - if e == nil || e.SecretScanningPushProtectionEnabledForNewRepositories == nil { - return false - } - return *e.SecretScanningPushProtectionEnabledForNewRepositories -} - -// GetCanAdminsBypass returns the CanAdminsBypass field if it's non-nil, zero value otherwise. -func (e *Environment) GetCanAdminsBypass() bool { - if e == nil || e.CanAdminsBypass == nil { - return false - } - return *e.CanAdminsBypass -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *Environment) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetDeploymentBranchPolicy returns the DeploymentBranchPolicy field. -func (e *Environment) GetDeploymentBranchPolicy() *BranchPolicy { - if e == nil { - return nil - } - return e.DeploymentBranchPolicy -} - -// GetEnvironmentName returns the EnvironmentName field if it's non-nil, zero value otherwise. -func (e *Environment) GetEnvironmentName() string { - if e == nil || e.EnvironmentName == nil { - return "" - } - return *e.EnvironmentName -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (e *Environment) GetHTMLURL() string { - if e == nil || e.HTMLURL == nil { - return "" - } - return *e.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *Environment) GetID() int64 { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (e *Environment) GetName() string { - if e == nil || e.Name == nil { - return "" - } - return *e.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (e *Environment) GetNodeID() string { - if e == nil || e.NodeID == nil { - return "" - } - return *e.NodeID -} - -// GetOwner returns the Owner field if it's non-nil, zero value otherwise. -func (e *Environment) GetOwner() string { - if e == nil || e.Owner == nil { - return "" - } - return *e.Owner -} - -// GetRepo returns the Repo field if it's non-nil, zero value otherwise. -func (e *Environment) GetRepo() string { - if e == nil || e.Repo == nil { - return "" - } - return *e.Repo -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (e *Environment) GetUpdatedAt() Timestamp { - if e == nil || e.UpdatedAt == nil { - return Timestamp{} - } - return *e.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (e *Environment) GetURL() string { - if e == nil || e.URL == nil { - return "" - } - return *e.URL -} - -// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. -func (e *Environment) GetWaitTimer() int { - if e == nil || e.WaitTimer == nil { - return 0 - } - return *e.WaitTimer -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (e *EnvResponse) GetTotalCount() int { - if e == nil || e.TotalCount == nil { - return 0 - } - return *e.TotalCount -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *EnvReviewers) GetID() int64 { - if e == nil || e.ID == nil { - return 0 - } - return *e.ID -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (e *EnvReviewers) GetType() string { - if e == nil || e.Type == nil { - return "" - } - return *e.Type -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *ErrorBlock) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetBlock returns the Block field. -func (e *ErrorResponse) GetBlock() *ErrorBlock { - if e == nil { - return nil - } - return e.Block -} - -// GetActor returns the Actor field. -func (e *Event) GetActor() *User { - if e == nil { - return nil - } - return e.Actor -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (e *Event) GetCreatedAt() Timestamp { - if e == nil || e.CreatedAt == nil { - return Timestamp{} - } - return *e.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (e *Event) GetID() string { - if e == nil || e.ID == nil { - return "" - } - return *e.ID -} - -// GetOrg returns the Org field. -func (e *Event) GetOrg() *Organization { - if e == nil { - return nil - } - return e.Org -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (e *Event) GetPublic() bool { - if e == nil || e.Public == nil { - return false - } - return *e.Public -} - -// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. -func (e *Event) GetRawPayload() json.RawMessage { - if e == nil || e.RawPayload == nil { - return json.RawMessage{} - } - return *e.RawPayload -} - -// GetRepo returns the Repo field. -func (e *Event) GetRepo() *Repository { - if e == nil { - return nil - } - return e.Repo -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (e *Event) GetType() string { - if e == nil || e.Type == nil { - return "" - } - return *e.Type -} - -// GetGroupID returns the GroupID field if it's non-nil, zero value otherwise. -func (e *ExternalGroup) GetGroupID() int64 { - if e == nil || e.GroupID == nil { - return 0 - } - return *e.GroupID -} - -// GetGroupName returns the GroupName field if it's non-nil, zero value otherwise. -func (e *ExternalGroup) GetGroupName() string { - if e == nil || e.GroupName == nil { - return "" - } - return *e.GroupName -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (e *ExternalGroup) GetUpdatedAt() Timestamp { - if e == nil || e.UpdatedAt == nil { - return Timestamp{} - } - return *e.UpdatedAt -} - -// GetMemberEmail returns the MemberEmail field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberEmail() string { - if e == nil || e.MemberEmail == nil { - return "" - } - return *e.MemberEmail -} - -// GetMemberID returns the MemberID field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberID() int64 { - if e == nil || e.MemberID == nil { - return 0 - } - return *e.MemberID -} - -// GetMemberLogin returns the MemberLogin field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberLogin() string { - if e == nil || e.MemberLogin == nil { - return "" - } - return *e.MemberLogin -} - -// GetMemberName returns the MemberName field if it's non-nil, zero value otherwise. -func (e *ExternalGroupMember) GetMemberName() string { - if e == nil || e.MemberName == nil { - return "" - } - return *e.MemberName -} - -// GetTeamID returns the TeamID field if it's non-nil, zero value otherwise. -func (e *ExternalGroupTeam) GetTeamID() int64 { - if e == nil || e.TeamID == nil { - return 0 - } - return *e.TeamID -} - -// GetTeamName returns the TeamName field if it's non-nil, zero value otherwise. -func (e *ExternalGroupTeam) GetTeamName() string { - if e == nil || e.TeamName == nil { - return "" - } - return *e.TeamName -} - -// GetHRef returns the HRef field if it's non-nil, zero value otherwise. -func (f *FeedLink) GetHRef() string { - if f == nil || f.HRef == nil { - return "" - } - return *f.HRef -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (f *FeedLink) GetType() string { - if f == nil || f.Type == nil { - return "" - } - return *f.Type -} - -// GetCurrentUser returns the CurrentUser field. -func (f *FeedLinks) GetCurrentUser() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUser -} - -// GetCurrentUserActor returns the CurrentUserActor field. -func (f *FeedLinks) GetCurrentUserActor() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUserActor -} - -// GetCurrentUserOrganization returns the CurrentUserOrganization field. -func (f *FeedLinks) GetCurrentUserOrganization() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUserOrganization -} - -// GetCurrentUserPublic returns the CurrentUserPublic field. -func (f *FeedLinks) GetCurrentUserPublic() *FeedLink { - if f == nil { - return nil - } - return f.CurrentUserPublic -} - -// GetTimeline returns the Timeline field. -func (f *FeedLinks) GetTimeline() *FeedLink { - if f == nil { - return nil - } - return f.Timeline -} - -// GetUser returns the User field. -func (f *FeedLinks) GetUser() *FeedLink { - if f == nil { - return nil - } - return f.User -} - -// GetCurrentUserActorURL returns the CurrentUserActorURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserActorURL() string { - if f == nil || f.CurrentUserActorURL == nil { - return "" - } - return *f.CurrentUserActorURL -} - -// GetCurrentUserOrganizationURL returns the CurrentUserOrganizationURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserOrganizationURL() string { - if f == nil || f.CurrentUserOrganizationURL == nil { - return "" - } - return *f.CurrentUserOrganizationURL -} - -// GetCurrentUserPublicURL returns the CurrentUserPublicURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserPublicURL() string { - if f == nil || f.CurrentUserPublicURL == nil { - return "" - } - return *f.CurrentUserPublicURL -} - -// GetCurrentUserURL returns the CurrentUserURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetCurrentUserURL() string { - if f == nil || f.CurrentUserURL == nil { - return "" - } - return *f.CurrentUserURL -} - -// GetLinks returns the Links field. -func (f *Feeds) GetLinks() *FeedLinks { - if f == nil { - return nil - } - return f.Links -} - -// GetTimelineURL returns the TimelineURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetTimelineURL() string { - if f == nil || f.TimelineURL == nil { - return "" - } - return *f.TimelineURL -} - -// GetUserURL returns the UserURL field if it's non-nil, zero value otherwise. -func (f *Feeds) GetUserURL() string { - if f == nil || f.UserURL == nil { - return "" - } - return *f.UserURL -} - -// GetIdentifier returns the Identifier field if it's non-nil, zero value otherwise. -func (f *FirstPatchedVersion) GetIdentifier() string { - if f == nil || f.Identifier == nil { - return "" - } - return *f.Identifier -} - -// GetForkee returns the Forkee field. -func (f *ForkEvent) GetForkee() *Repository { - if f == nil { - return nil - } - return f.Forkee -} - -// GetInstallation returns the Installation field. -func (f *ForkEvent) GetInstallation() *Installation { - if f == nil { - return nil - } - return f.Installation -} - -// GetRepo returns the Repo field. -func (f *ForkEvent) GetRepo() *Repository { - if f == nil { - return nil - } - return f.Repo -} - -// GetSender returns the Sender field. -func (f *ForkEvent) GetSender() *User { - if f == nil { - return nil - } - return f.Sender -} - -// GetWorkFolder returns the WorkFolder field if it's non-nil, zero value otherwise. -func (g *GenerateJITConfigRequest) GetWorkFolder() string { - if g == nil || g.WorkFolder == nil { - return "" - } - return *g.WorkFolder -} - -// GetPreviousTagName returns the PreviousTagName field if it's non-nil, zero value otherwise. -func (g *GenerateNotesOptions) GetPreviousTagName() string { - if g == nil || g.PreviousTagName == nil { - return "" - } - return *g.PreviousTagName -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (g *GenerateNotesOptions) GetTargetCommitish() string { - if g == nil || g.TargetCommitish == nil { - return "" - } - return *g.TargetCommitish -} - -// GetInclude returns the Include field if it's non-nil, zero value otherwise. -func (g *GetAuditLogOptions) GetInclude() string { - if g == nil || g.Include == nil { - return "" - } - return *g.Include -} - -// GetOrder returns the Order field if it's non-nil, zero value otherwise. -func (g *GetAuditLogOptions) GetOrder() string { - if g == nil || g.Order == nil { - return "" - } - return *g.Order -} - -// GetPhrase returns the Phrase field if it's non-nil, zero value otherwise. -func (g *GetAuditLogOptions) GetPhrase() string { - if g == nil || g.Phrase == nil { - return "" - } - return *g.Phrase -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (g *Gist) GetComments() int { - if g == nil || g.Comments == nil { - return 0 - } - return *g.Comments -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *Gist) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (g *Gist) GetDescription() string { - if g == nil || g.Description == nil { - return "" - } - return *g.Description -} - -// GetFiles returns the Files map if it's non-nil, an empty map otherwise. -func (g *Gist) GetFiles() map[GistFilename]GistFile { - if g == nil || g.Files == nil { - return map[GistFilename]GistFile{} - } - return g.Files -} - -// GetGitPullURL returns the GitPullURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetGitPullURL() string { - if g == nil || g.GitPullURL == nil { - return "" - } - return *g.GitPullURL -} - -// GetGitPushURL returns the GitPushURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetGitPushURL() string { - if g == nil || g.GitPushURL == nil { - return "" - } - return *g.GitPushURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (g *Gist) GetHTMLURL() string { - if g == nil || g.HTMLURL == nil { - return "" - } - return *g.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *Gist) GetID() string { - if g == nil || g.ID == nil { - return "" - } - return *g.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (g *Gist) GetNodeID() string { - if g == nil || g.NodeID == nil { - return "" - } - return *g.NodeID -} - -// GetOwner returns the Owner field. -func (g *Gist) GetOwner() *User { - if g == nil { - return nil - } - return g.Owner -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (g *Gist) GetPublic() bool { - if g == nil || g.Public == nil { - return false - } - return *g.Public -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *Gist) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} - } - return *g.UpdatedAt -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (g *GistComment) GetBody() string { - if g == nil || g.Body == nil { - return "" - } - return *g.Body -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GistComment) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GistComment) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistComment) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetUser returns the User field. -func (g *GistComment) GetUser() *User { - if g == nil { - return nil - } - return g.User -} - -// GetChangeStatus returns the ChangeStatus field. -func (g *GistCommit) GetChangeStatus() *CommitStats { - if g == nil { - return nil - } - return g.ChangeStatus -} - -// GetCommittedAt returns the CommittedAt field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetCommittedAt() Timestamp { - if g == nil || g.CommittedAt == nil { - return Timestamp{} - } - return *g.CommittedAt -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetNodeID() string { - if g == nil || g.NodeID == nil { - return "" - } - return *g.NodeID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetUser returns the User field. -func (g *GistCommit) GetUser() *User { - if g == nil { - return nil - } - return g.User -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (g *GistCommit) GetVersion() string { - if g == nil || g.Version == nil { - return "" - } - return *g.Version -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (g *GistFile) GetContent() string { - if g == nil || g.Content == nil { - return "" - } - return *g.Content -} - -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (g *GistFile) GetFilename() string { - if g == nil || g.Filename == nil { - return "" - } - return *g.Filename -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (g *GistFile) GetLanguage() string { - if g == nil || g.Language == nil { - return "" - } - return *g.Language -} - -// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. -func (g *GistFile) GetRawURL() string { - if g == nil || g.RawURL == nil { - return "" - } - return *g.RawURL -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (g *GistFile) GetSize() int { - if g == nil || g.Size == nil { - return 0 - } - return *g.Size -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GistFile) GetType() string { - if g == nil || g.Type == nil { - return "" - } - return *g.Type -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GistFork) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GistFork) GetID() string { - if g == nil || g.ID == nil { - return "" - } - return *g.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (g *GistFork) GetNodeID() string { - if g == nil || g.NodeID == nil { - return "" - } - return *g.NodeID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *GistFork) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} - } - return *g.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GistFork) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetUser returns the User field. -func (g *GistFork) GetUser() *User { - if g == nil { - return nil - } - return g.User -} - -// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. -func (g *GistStats) GetPrivateGists() int { - if g == nil || g.PrivateGists == nil { - return 0 - } - return *g.PrivateGists -} - -// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. -func (g *GistStats) GetPublicGists() int { - if g == nil || g.PublicGists == nil { - return 0 - } - return *g.PublicGists -} - -// GetTotalGists returns the TotalGists field if it's non-nil, zero value otherwise. -func (g *GistStats) GetTotalGists() int { - if g == nil || g.TotalGists == nil { - return 0 - } - return *g.TotalGists -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (g *GitHubAppAuthorizationEvent) GetAction() string { - if g == nil || g.Action == nil { - return "" - } - return *g.Action -} - -// GetInstallation returns the Installation field. -func (g *GitHubAppAuthorizationEvent) GetInstallation() *Installation { - if g == nil { - return nil - } - return g.Installation -} - -// GetSender returns the Sender field. -func (g *GitHubAppAuthorizationEvent) GetSender() *User { - if g == nil { - return nil - } - return g.Sender -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (g *Gitignore) GetName() string { - if g == nil || g.Name == nil { - return "" - } - return *g.Name -} - -// GetSource returns the Source field if it's non-nil, zero value otherwise. -func (g *Gitignore) GetSource() string { - if g == nil || g.Source == nil { - return "" - } - return *g.Source -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (g *GitObject) GetSHA() string { - if g == nil || g.SHA == nil { - return "" - } - return *g.SHA -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GitObject) GetType() string { - if g == nil || g.Type == nil { - return "" - } - return *g.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *GitObject) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetGithubReviewedAt returns the GithubReviewedAt field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetGithubReviewedAt() Timestamp { - if g == nil || g.GithubReviewedAt == nil { - return Timestamp{} - } - return *g.GithubReviewedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetNVDPublishedAt returns the NVDPublishedAt field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetNVDPublishedAt() Timestamp { - if g == nil || g.NVDPublishedAt == nil { - return Timestamp{} - } - return *g.NVDPublishedAt -} - -// GetRepositoryAdvisoryURL returns the RepositoryAdvisoryURL field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetRepositoryAdvisoryURL() string { - if g == nil || g.RepositoryAdvisoryURL == nil { - return "" - } - return *g.RepositoryAdvisoryURL -} - -// GetSourceCodeLocation returns the SourceCodeLocation field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetSourceCodeLocation() string { - if g == nil || g.SourceCodeLocation == nil { - return "" - } - return *g.SourceCodeLocation -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityAdvisory) GetType() string { - if g == nil || g.Type == nil { - return "" - } - return *g.Type -} - -// GetFirstPatchedVersion returns the FirstPatchedVersion field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityVulnerability) GetFirstPatchedVersion() string { - if g == nil || g.FirstPatchedVersion == nil { - return "" - } - return *g.FirstPatchedVersion -} - -// GetPackage returns the Package field. -func (g *GlobalSecurityVulnerability) GetPackage() *VulnerabilityPackage { - if g == nil { - return nil - } - return g.Package -} - -// GetVulnerableVersionRange returns the VulnerableVersionRange field if it's non-nil, zero value otherwise. -func (g *GlobalSecurityVulnerability) GetVulnerableVersionRange() string { - if g == nil || g.VulnerableVersionRange == nil { - return "" - } - return *g.VulnerableVersionRange -} - -// GetInstallation returns the Installation field. -func (g *GollumEvent) GetInstallation() *Installation { - if g == nil { - return nil - } - return g.Installation -} - -// GetOrg returns the Org field. -func (g *GollumEvent) GetOrg() *Organization { - if g == nil { - return nil - } - return g.Org -} - -// GetRepo returns the Repo field. -func (g *GollumEvent) GetRepo() *Repository { - if g == nil { - return nil - } - return g.Repo -} - -// GetSender returns the Sender field. -func (g *GollumEvent) GetSender() *User { - if g == nil { - return nil - } - return g.Sender -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (g *GPGEmail) GetEmail() string { - if g == nil || g.Email == nil { - return "" - } - return *g.Email -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (g *GPGEmail) GetVerified() bool { - if g == nil || g.Verified == nil { - return false - } - return *g.Verified -} - -// GetCanCertify returns the CanCertify field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanCertify() bool { - if g == nil || g.CanCertify == nil { - return false - } - return *g.CanCertify -} - -// GetCanEncryptComms returns the CanEncryptComms field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanEncryptComms() bool { - if g == nil || g.CanEncryptComms == nil { - return false - } - return *g.CanEncryptComms -} - -// GetCanEncryptStorage returns the CanEncryptStorage field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanEncryptStorage() bool { - if g == nil || g.CanEncryptStorage == nil { - return false - } - return *g.CanEncryptStorage -} - -// GetCanSign returns the CanSign field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCanSign() bool { - if g == nil || g.CanSign == nil { - return false - } - return *g.CanSign -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetExpiresAt() Timestamp { - if g == nil || g.ExpiresAt == nil { - return Timestamp{} - } - return *g.ExpiresAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetKeyID() string { - if g == nil || g.KeyID == nil { - return "" - } - return *g.KeyID -} - -// GetPrimaryKeyID returns the PrimaryKeyID field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetPrimaryKeyID() int64 { - if g == nil || g.PrimaryKeyID == nil { - return 0 - } - return *g.PrimaryKeyID -} - -// GetPublicKey returns the PublicKey field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetPublicKey() string { - if g == nil || g.PublicKey == nil { - return "" - } - return *g.PublicKey -} - -// GetRawKey returns the RawKey field if it's non-nil, zero value otherwise. -func (g *GPGKey) GetRawKey() string { - if g == nil || g.RawKey == nil { - return "" - } - return *g.RawKey -} - -// GetApp returns the App field. -func (g *Grant) GetApp() *AuthorizationApp { - if g == nil { - return nil - } - return g.App -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (g *Grant) GetCreatedAt() Timestamp { - if g == nil || g.CreatedAt == nil { - return Timestamp{} - } - return *g.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (g *Grant) GetID() int64 { - if g == nil || g.ID == nil { - return 0 - } - return *g.ID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (g *Grant) GetUpdatedAt() Timestamp { - if g == nil || g.UpdatedAt == nil { - return Timestamp{} - } - return *g.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (g *Grant) GetURL() string { - if g == nil || g.URL == nil { - return "" - } - return *g.URL -} - -// GetAuthor returns the Author field. -func (h *HeadCommit) GetAuthor() *CommitAuthor { - if h == nil { - return nil - } - return h.Author -} - -// GetCommitter returns the Committer field. -func (h *HeadCommit) GetCommitter() *CommitAuthor { - if h == nil { - return nil - } - return h.Committer -} - -// GetDistinct returns the Distinct field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetDistinct() bool { - if h == nil || h.Distinct == nil { - return false - } - return *h.Distinct -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetID() string { - if h == nil || h.ID == nil { - return "" - } - return *h.ID -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetMessage() string { - if h == nil || h.Message == nil { - return "" - } - return *h.Message -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetSHA() string { - if h == nil || h.SHA == nil { - return "" - } - return *h.SHA -} - -// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetTimestamp() Timestamp { - if h == nil || h.Timestamp == nil { - return Timestamp{} - } - return *h.Timestamp -} - -// GetTreeID returns the TreeID field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetTreeID() string { - if h == nil || h.TreeID == nil { - return "" - } - return *h.TreeID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (h *HeadCommit) GetURL() string { - if h == nil || h.URL == nil { - return "" - } - return *h.URL -} - -// GetActive returns the Active field if it's non-nil, zero value otherwise. -func (h *Hook) GetActive() bool { - if h == nil || h.Active == nil { - return false - } - return *h.Active -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (h *Hook) GetCreatedAt() Timestamp { - if h == nil || h.CreatedAt == nil { - return Timestamp{} - } - return *h.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (h *Hook) GetID() int64 { - if h == nil || h.ID == nil { - return 0 - } - return *h.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (h *Hook) GetName() string { - if h == nil || h.Name == nil { - return "" - } - return *h.Name -} - -// GetPingURL returns the PingURL field if it's non-nil, zero value otherwise. -func (h *Hook) GetPingURL() string { - if h == nil || h.PingURL == nil { - return "" - } - return *h.PingURL -} - -// GetTestURL returns the TestURL field if it's non-nil, zero value otherwise. -func (h *Hook) GetTestURL() string { - if h == nil || h.TestURL == nil { - return "" - } - return *h.TestURL -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (h *Hook) GetType() string { - if h == nil || h.Type == nil { - return "" - } - return *h.Type -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (h *Hook) GetUpdatedAt() Timestamp { - if h == nil || h.UpdatedAt == nil { - return Timestamp{} - } - return *h.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (h *Hook) GetURL() string { - if h == nil || h.URL == nil { - return "" - } - return *h.URL -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetContentType() string { - if h == nil || h.ContentType == nil { - return "" - } - return *h.ContentType -} - -// GetInsecureSSL returns the InsecureSSL field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetInsecureSSL() string { - if h == nil || h.InsecureSSL == nil { - return "" - } - return *h.InsecureSSL -} - -// GetSecret returns the Secret field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetSecret() string { - if h == nil || h.Secret == nil { - return "" - } - return *h.Secret -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (h *HookConfig) GetURL() string { - if h == nil || h.URL == nil { - return "" - } - return *h.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetAction() string { - if h == nil || h.Action == nil { - return "" - } - return *h.Action -} - -// GetDeliveredAt returns the DeliveredAt field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetDeliveredAt() Timestamp { - if h == nil || h.DeliveredAt == nil { - return Timestamp{} - } - return *h.DeliveredAt -} - -// GetDuration returns the Duration field. -func (h *HookDelivery) GetDuration() *float64 { - if h == nil { - return nil - } - return h.Duration -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetEvent() string { - if h == nil || h.Event == nil { - return "" - } - return *h.Event -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetGUID() string { - if h == nil || h.GUID == nil { - return "" - } - return *h.GUID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetID() int64 { - if h == nil || h.ID == nil { - return 0 - } - return *h.ID -} - -// GetInstallationID returns the InstallationID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetInstallationID() int64 { - if h == nil || h.InstallationID == nil { - return 0 - } - return *h.InstallationID -} - -// GetRedelivery returns the Redelivery field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetRedelivery() bool { - if h == nil || h.Redelivery == nil { - return false - } - return *h.Redelivery -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetRepositoryID() int64 { - if h == nil || h.RepositoryID == nil { - return 0 - } - return *h.RepositoryID -} - -// GetRequest returns the Request field. -func (h *HookDelivery) GetRequest() *HookRequest { - if h == nil { - return nil - } - return h.Request -} - -// GetResponse returns the Response field. -func (h *HookDelivery) GetResponse() *HookResponse { - if h == nil { - return nil - } - return h.Response -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetStatus() string { - if h == nil || h.Status == nil { - return "" - } - return *h.Status -} - -// GetStatusCode returns the StatusCode field if it's non-nil, zero value otherwise. -func (h *HookDelivery) GetStatusCode() int { - if h == nil || h.StatusCode == nil { - return 0 - } - return *h.StatusCode -} - -// GetHeaders returns the Headers map if it's non-nil, an empty map otherwise. -func (h *HookRequest) GetHeaders() map[string]string { - if h == nil || h.Headers == nil { - return map[string]string{} - } - return h.Headers -} - -// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. -func (h *HookRequest) GetRawPayload() json.RawMessage { - if h == nil || h.RawPayload == nil { - return json.RawMessage{} - } - return *h.RawPayload -} - -// GetHeaders returns the Headers map if it's non-nil, an empty map otherwise. -func (h *HookResponse) GetHeaders() map[string]string { - if h == nil || h.Headers == nil { - return map[string]string{} - } - return h.Headers -} - -// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. -func (h *HookResponse) GetRawPayload() json.RawMessage { - if h == nil || h.RawPayload == nil { - return json.RawMessage{} - } - return *h.RawPayload -} - -// GetActiveHooks returns the ActiveHooks field if it's non-nil, zero value otherwise. -func (h *HookStats) GetActiveHooks() int { - if h == nil || h.ActiveHooks == nil { - return 0 - } - return *h.ActiveHooks -} - -// GetInactiveHooks returns the InactiveHooks field if it's non-nil, zero value otherwise. -func (h *HookStats) GetInactiveHooks() int { - if h == nil || h.InactiveHooks == nil { - return 0 - } - return *h.InactiveHooks -} - -// GetTotalHooks returns the TotalHooks field if it's non-nil, zero value otherwise. -func (h *HookStats) GetTotalHooks() int { - if h == nil || h.TotalHooks == nil { - return 0 - } - return *h.TotalHooks -} - -// GetGroupDescription returns the GroupDescription field if it's non-nil, zero value otherwise. -func (i *IDPGroup) GetGroupDescription() string { - if i == nil || i.GroupDescription == nil { - return "" - } - return *i.GroupDescription -} - -// GetGroupID returns the GroupID field if it's non-nil, zero value otherwise. -func (i *IDPGroup) GetGroupID() string { - if i == nil || i.GroupID == nil { - return "" - } - return *i.GroupID -} - -// GetGroupName returns the GroupName field if it's non-nil, zero value otherwise. -func (i *IDPGroup) GetGroupName() string { - if i == nil || i.GroupName == nil { - return "" - } - return *i.GroupName -} - -// GetAuthorsCount returns the AuthorsCount field if it's non-nil, zero value otherwise. -func (i *Import) GetAuthorsCount() int { - if i == nil || i.AuthorsCount == nil { - return 0 - } - return *i.AuthorsCount -} - -// GetAuthorsURL returns the AuthorsURL field if it's non-nil, zero value otherwise. -func (i *Import) GetAuthorsURL() string { - if i == nil || i.AuthorsURL == nil { - return "" - } - return *i.AuthorsURL -} - -// GetCommitCount returns the CommitCount field if it's non-nil, zero value otherwise. -func (i *Import) GetCommitCount() int { - if i == nil || i.CommitCount == nil { - return 0 - } - return *i.CommitCount -} - -// GetFailedStep returns the FailedStep field if it's non-nil, zero value otherwise. -func (i *Import) GetFailedStep() string { - if i == nil || i.FailedStep == nil { - return "" - } - return *i.FailedStep -} - -// GetHasLargeFiles returns the HasLargeFiles field if it's non-nil, zero value otherwise. -func (i *Import) GetHasLargeFiles() bool { - if i == nil || i.HasLargeFiles == nil { - return false - } - return *i.HasLargeFiles -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Import) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetHumanName returns the HumanName field if it's non-nil, zero value otherwise. -func (i *Import) GetHumanName() string { - if i == nil || i.HumanName == nil { - return "" - } - return *i.HumanName -} - -// GetLargeFilesCount returns the LargeFilesCount field if it's non-nil, zero value otherwise. -func (i *Import) GetLargeFilesCount() int { - if i == nil || i.LargeFilesCount == nil { - return 0 - } - return *i.LargeFilesCount -} - -// GetLargeFilesSize returns the LargeFilesSize field if it's non-nil, zero value otherwise. -func (i *Import) GetLargeFilesSize() int { - if i == nil || i.LargeFilesSize == nil { - return 0 - } - return *i.LargeFilesSize -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (i *Import) GetMessage() string { - if i == nil || i.Message == nil { - return "" - } - return *i.Message -} - -// GetPercent returns the Percent field if it's non-nil, zero value otherwise. -func (i *Import) GetPercent() int { - if i == nil || i.Percent == nil { - return 0 - } - return *i.Percent -} - -// GetPushPercent returns the PushPercent field if it's non-nil, zero value otherwise. -func (i *Import) GetPushPercent() int { - if i == nil || i.PushPercent == nil { - return 0 - } - return *i.PushPercent -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (i *Import) GetRepositoryURL() string { - if i == nil || i.RepositoryURL == nil { - return "" - } - return *i.RepositoryURL -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (i *Import) GetStatus() string { - if i == nil || i.Status == nil { - return "" - } - return *i.Status -} - -// GetStatusText returns the StatusText field if it's non-nil, zero value otherwise. -func (i *Import) GetStatusText() string { - if i == nil || i.StatusText == nil { - return "" - } - return *i.StatusText -} - -// GetTFVCProject returns the TFVCProject field if it's non-nil, zero value otherwise. -func (i *Import) GetTFVCProject() string { - if i == nil || i.TFVCProject == nil { - return "" - } - return *i.TFVCProject -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *Import) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetUseLFS returns the UseLFS field if it's non-nil, zero value otherwise. -func (i *Import) GetUseLFS() string { - if i == nil || i.UseLFS == nil { - return "" - } - return *i.UseLFS -} - -// GetVCS returns the VCS field if it's non-nil, zero value otherwise. -func (i *Import) GetVCS() string { - if i == nil || i.VCS == nil { - return "" - } - return *i.VCS -} - -// GetVCSPassword returns the VCSPassword field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSPassword() string { - if i == nil || i.VCSPassword == nil { - return "" - } - return *i.VCSPassword -} - -// GetVCSURL returns the VCSURL field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSURL() string { - if i == nil || i.VCSURL == nil { - return "" - } - return *i.VCSURL -} - -// GetVCSUsername returns the VCSUsername field if it's non-nil, zero value otherwise. -func (i *Import) GetVCSUsername() string { - if i == nil || i.VCSUsername == nil { - return "" - } - return *i.VCSUsername -} - -// GetAccessTokensURL returns the AccessTokensURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetAccessTokensURL() string { - if i == nil || i.AccessTokensURL == nil { - return "" - } - return *i.AccessTokensURL -} - -// GetAccount returns the Account field. -func (i *Installation) GetAccount() *User { - if i == nil { - return nil - } - return i.Account -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (i *Installation) GetAppID() int64 { - if i == nil || i.AppID == nil { - return 0 - } - return *i.AppID -} - -// GetAppSlug returns the AppSlug field if it's non-nil, zero value otherwise. -func (i *Installation) GetAppSlug() string { - if i == nil || i.AppSlug == nil { - return "" - } - return *i.AppSlug -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetHasMultipleSingleFiles returns the HasMultipleSingleFiles field if it's non-nil, zero value otherwise. -func (i *Installation) GetHasMultipleSingleFiles() bool { - if i == nil || i.HasMultipleSingleFiles == nil { - return false - } - return *i.HasMultipleSingleFiles -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Installation) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *Installation) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetPermissions returns the Permissions field. -func (i *Installation) GetPermissions() *InstallationPermissions { - if i == nil { - return nil - } - return i.Permissions -} - -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (i *Installation) GetRepositoriesURL() string { - if i == nil || i.RepositoriesURL == nil { - return "" - } - return *i.RepositoriesURL -} - -// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. -func (i *Installation) GetRepositorySelection() string { - if i == nil || i.RepositorySelection == nil { - return "" - } - return *i.RepositorySelection -} - -// GetSingleFileName returns the SingleFileName field if it's non-nil, zero value otherwise. -func (i *Installation) GetSingleFileName() string { - if i == nil || i.SingleFileName == nil { - return "" - } - return *i.SingleFileName -} - -// GetSuspendedAt returns the SuspendedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetSuspendedAt() Timestamp { - if i == nil || i.SuspendedAt == nil { - return Timestamp{} - } - return *i.SuspendedAt -} - -// GetSuspendedBy returns the SuspendedBy field. -func (i *Installation) GetSuspendedBy() *User { - if i == nil { - return nil - } - return i.SuspendedBy -} - -// GetTargetID returns the TargetID field if it's non-nil, zero value otherwise. -func (i *Installation) GetTargetID() int64 { - if i == nil || i.TargetID == nil { - return 0 - } - return *i.TargetID -} - -// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. -func (i *Installation) GetTargetType() string { - if i == nil || i.TargetType == nil { - return "" - } - return *i.TargetType -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetLogin returns the Login field. -func (i *InstallationChanges) GetLogin() *InstallationLoginChange { - if i == nil { - return nil - } - return i.Login -} - -// GetSlug returns the Slug field. -func (i *InstallationChanges) GetSlug() *InstallationSlugChange { - if i == nil { - return nil - } - return i.Slug -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetInstallation returns the Installation field. -func (i *InstallationEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetOrg returns the Org field. -func (i *InstallationEvent) GetOrg() *Organization { - if i == nil { - return nil - } - return i.Org -} - -// GetRequester returns the Requester field. -func (i *InstallationEvent) GetRequester() *User { - if i == nil { - return nil - } - return i.Requester -} - -// GetSender returns the Sender field. -func (i *InstallationEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (i *InstallationLoginChange) GetFrom() string { - if i == nil || i.From == nil { - return "" - } - return *i.From -} - -// GetActions returns the Actions field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetActions() string { - if i == nil || i.Actions == nil { - return "" - } - return *i.Actions -} - -// GetAdministration returns the Administration field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetAdministration() string { - if i == nil || i.Administration == nil { - return "" - } - return *i.Administration -} - -// GetBlocking returns the Blocking field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetBlocking() string { - if i == nil || i.Blocking == nil { - return "" - } - return *i.Blocking -} - -// GetChecks returns the Checks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetChecks() string { - if i == nil || i.Checks == nil { - return "" - } - return *i.Checks -} - -// GetContentReferences returns the ContentReferences field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetContentReferences() string { - if i == nil || i.ContentReferences == nil { - return "" - } - return *i.ContentReferences -} - -// GetContents returns the Contents field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetContents() string { - if i == nil || i.Contents == nil { - return "" - } - return *i.Contents -} - -// GetDeployments returns the Deployments field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetDeployments() string { - if i == nil || i.Deployments == nil { - return "" - } - return *i.Deployments -} - -// GetEmails returns the Emails field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetEmails() string { - if i == nil || i.Emails == nil { - return "" - } - return *i.Emails -} - -// GetEnvironments returns the Environments field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetEnvironments() string { - if i == nil || i.Environments == nil { - return "" - } - return *i.Environments -} - -// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetFollowers() string { - if i == nil || i.Followers == nil { - return "" - } - return *i.Followers -} - -// GetIssues returns the Issues field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetIssues() string { - if i == nil || i.Issues == nil { - return "" - } - return *i.Issues -} - -// GetMembers returns the Members field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetMembers() string { - if i == nil || i.Members == nil { - return "" - } - return *i.Members -} - -// GetMetadata returns the Metadata field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetMetadata() string { - if i == nil || i.Metadata == nil { - return "" - } - return *i.Metadata -} - -// GetOrganizationAdministration returns the OrganizationAdministration field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationAdministration() string { - if i == nil || i.OrganizationAdministration == nil { - return "" - } - return *i.OrganizationAdministration -} - -// GetOrganizationCustomRoles returns the OrganizationCustomRoles field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationCustomRoles() string { - if i == nil || i.OrganizationCustomRoles == nil { - return "" - } - return *i.OrganizationCustomRoles -} - -// GetOrganizationHooks returns the OrganizationHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationHooks() string { - if i == nil || i.OrganizationHooks == nil { - return "" - } - return *i.OrganizationHooks -} - -// GetOrganizationPackages returns the OrganizationPackages field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationPackages() string { - if i == nil || i.OrganizationPackages == nil { - return "" - } - return *i.OrganizationPackages -} - -// GetOrganizationPlan returns the OrganizationPlan field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationPlan() string { - if i == nil || i.OrganizationPlan == nil { - return "" - } - return *i.OrganizationPlan -} - -// GetOrganizationPreReceiveHooks returns the OrganizationPreReceiveHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationPreReceiveHooks() string { - if i == nil || i.OrganizationPreReceiveHooks == nil { - return "" - } - return *i.OrganizationPreReceiveHooks -} - -// GetOrganizationProjects returns the OrganizationProjects field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationProjects() string { - if i == nil || i.OrganizationProjects == nil { - return "" - } - return *i.OrganizationProjects -} - -// GetOrganizationSecrets returns the OrganizationSecrets field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationSecrets() string { - if i == nil || i.OrganizationSecrets == nil { - return "" - } - return *i.OrganizationSecrets -} - -// GetOrganizationSelfHostedRunners returns the OrganizationSelfHostedRunners field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationSelfHostedRunners() string { - if i == nil || i.OrganizationSelfHostedRunners == nil { - return "" - } - return *i.OrganizationSelfHostedRunners -} - -// GetOrganizationUserBlocking returns the OrganizationUserBlocking field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetOrganizationUserBlocking() string { - if i == nil || i.OrganizationUserBlocking == nil { - return "" - } - return *i.OrganizationUserBlocking -} - -// GetPackages returns the Packages field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetPackages() string { - if i == nil || i.Packages == nil { - return "" - } - return *i.Packages -} - -// GetPages returns the Pages field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetPages() string { - if i == nil || i.Pages == nil { - return "" - } - return *i.Pages -} - -// GetPullRequests returns the PullRequests field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetPullRequests() string { - if i == nil || i.PullRequests == nil { - return "" - } - return *i.PullRequests -} - -// GetRepositoryHooks returns the RepositoryHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetRepositoryHooks() string { - if i == nil || i.RepositoryHooks == nil { - return "" - } - return *i.RepositoryHooks -} - -// GetRepositoryPreReceiveHooks returns the RepositoryPreReceiveHooks field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetRepositoryPreReceiveHooks() string { - if i == nil || i.RepositoryPreReceiveHooks == nil { - return "" - } - return *i.RepositoryPreReceiveHooks -} - -// GetRepositoryProjects returns the RepositoryProjects field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetRepositoryProjects() string { - if i == nil || i.RepositoryProjects == nil { - return "" - } - return *i.RepositoryProjects -} - -// GetSecrets returns the Secrets field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSecrets() string { - if i == nil || i.Secrets == nil { - return "" - } - return *i.Secrets -} - -// GetSecretScanningAlerts returns the SecretScanningAlerts field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSecretScanningAlerts() string { - if i == nil || i.SecretScanningAlerts == nil { - return "" - } - return *i.SecretScanningAlerts -} - -// GetSecurityEvents returns the SecurityEvents field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSecurityEvents() string { - if i == nil || i.SecurityEvents == nil { - return "" - } - return *i.SecurityEvents -} - -// GetSingleFile returns the SingleFile field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetSingleFile() string { - if i == nil || i.SingleFile == nil { - return "" - } - return *i.SingleFile -} - -// GetStatuses returns the Statuses field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetStatuses() string { - if i == nil || i.Statuses == nil { - return "" - } - return *i.Statuses -} - -// GetTeamDiscussions returns the TeamDiscussions field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetTeamDiscussions() string { - if i == nil || i.TeamDiscussions == nil { - return "" - } - return *i.TeamDiscussions -} - -// GetVulnerabilityAlerts returns the VulnerabilityAlerts field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetVulnerabilityAlerts() string { - if i == nil || i.VulnerabilityAlerts == nil { - return "" - } - return *i.VulnerabilityAlerts -} - -// GetWorkflows returns the Workflows field if it's non-nil, zero value otherwise. -func (i *InstallationPermissions) GetWorkflows() string { - if i == nil || i.Workflows == nil { - return "" - } - return *i.Workflows -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationRepositoriesEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetInstallation returns the Installation field. -func (i *InstallationRepositoriesEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetOrg returns the Org field. -func (i *InstallationRepositoriesEvent) GetOrg() *Organization { - if i == nil { - return nil - } - return i.Org -} - -// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. -func (i *InstallationRepositoriesEvent) GetRepositorySelection() string { - if i == nil || i.RepositorySelection == nil { - return "" - } - return *i.RepositorySelection -} - -// GetSender returns the Sender field. -func (i *InstallationRepositoriesEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetAccount returns the Account field. -func (i *InstallationRequest) GetAccount() *User { - if i == nil { - return nil - } - return i.Account -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *InstallationRequest) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *InstallationRequest) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *InstallationRequest) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetRequester returns the Requester field. -func (i *InstallationRequest) GetRequester() *User { - if i == nil { - return nil - } - return i.Requester -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (i *InstallationSlugChange) GetFrom() string { - if i == nil || i.From == nil { - return "" - } - return *i.From -} - -// GetAccount returns the Account field. -func (i *InstallationTargetEvent) GetAccount() *User { - if i == nil { - return nil - } - return i.Account -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *InstallationTargetEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetChanges returns the Changes field. -func (i *InstallationTargetEvent) GetChanges() *InstallationChanges { - if i == nil { - return nil - } - return i.Changes -} - -// GetEnterprise returns the Enterprise field. -func (i *InstallationTargetEvent) GetEnterprise() *Enterprise { - if i == nil { - return nil - } - return i.Enterprise -} - -// GetInstallation returns the Installation field. -func (i *InstallationTargetEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetOrganization returns the Organization field. -func (i *InstallationTargetEvent) GetOrganization() *Organization { - if i == nil { - return nil - } - return i.Organization -} - -// GetRepository returns the Repository field. -func (i *InstallationTargetEvent) GetRepository() *Repository { - if i == nil { - return nil - } - return i.Repository -} - -// GetSender returns the Sender field. -func (i *InstallationTargetEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. -func (i *InstallationTargetEvent) GetTargetType() string { - if i == nil || i.TargetType == nil { - return "" - } - return *i.TargetType -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (i *InstallationToken) GetExpiresAt() Timestamp { - if i == nil || i.ExpiresAt == nil { - return Timestamp{} - } - return *i.ExpiresAt -} - -// GetPermissions returns the Permissions field. -func (i *InstallationToken) GetPermissions() *InstallationPermissions { - if i == nil { - return nil - } - return i.Permissions -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (i *InstallationToken) GetToken() string { - if i == nil || i.Token == nil { - return "" - } - return *i.Token -} - -// GetPermissions returns the Permissions field. -func (i *InstallationTokenOptions) GetPermissions() *InstallationPermissions { - if i == nil { - return nil - } - return i.Permissions -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (i *InteractionRestriction) GetExpiresAt() Timestamp { - if i == nil || i.ExpiresAt == nil { - return Timestamp{} - } - return *i.ExpiresAt -} - -// GetLimit returns the Limit field if it's non-nil, zero value otherwise. -func (i *InteractionRestriction) GetLimit() string { - if i == nil || i.Limit == nil { - return "" - } - return *i.Limit -} - -// GetOrigin returns the Origin field if it's non-nil, zero value otherwise. -func (i *InteractionRestriction) GetOrigin() string { - if i == nil || i.Origin == nil { - return "" - } - return *i.Origin -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Invitation) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (i *Invitation) GetEmail() string { - if i == nil || i.Email == nil { - return "" - } - return *i.Email -} - -// GetFailedAt returns the FailedAt field if it's non-nil, zero value otherwise. -func (i *Invitation) GetFailedAt() Timestamp { - if i == nil || i.FailedAt == nil { - return Timestamp{} - } - return *i.FailedAt -} - -// GetFailedReason returns the FailedReason field if it's non-nil, zero value otherwise. -func (i *Invitation) GetFailedReason() string { - if i == nil || i.FailedReason == nil { - return "" - } - return *i.FailedReason -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Invitation) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetInvitationTeamURL returns the InvitationTeamURL field if it's non-nil, zero value otherwise. -func (i *Invitation) GetInvitationTeamURL() string { - if i == nil || i.InvitationTeamURL == nil { - return "" - } - return *i.InvitationTeamURL -} - -// GetInviter returns the Inviter field. -func (i *Invitation) GetInviter() *User { - if i == nil { - return nil - } - return i.Inviter -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (i *Invitation) GetLogin() string { - if i == nil || i.Login == nil { - return "" - } - return *i.Login -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *Invitation) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (i *Invitation) GetRole() string { - if i == nil || i.Role == nil { - return "" - } - return *i.Role -} - -// GetTeamCount returns the TeamCount field if it's non-nil, zero value otherwise. -func (i *Invitation) GetTeamCount() int { - if i == nil || i.TeamCount == nil { - return 0 - } - return *i.TeamCount -} - -// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. -func (i *Issue) GetActiveLockReason() string { - if i == nil || i.ActiveLockReason == nil { - return "" - } - return *i.ActiveLockReason -} - -// GetAssignee returns the Assignee field. -func (i *Issue) GetAssignee() *User { - if i == nil { - return nil - } - return i.Assignee -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (i *Issue) GetAuthorAssociation() string { - if i == nil || i.AuthorAssociation == nil { - return "" - } - return *i.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *Issue) GetBody() string { - if i == nil || i.Body == nil { - return "" - } - return *i.Body -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetClosedAt() Timestamp { - if i == nil || i.ClosedAt == nil { - return Timestamp{} - } - return *i.ClosedAt -} - -// GetClosedBy returns the ClosedBy field. -func (i *Issue) GetClosedBy() *User { - if i == nil { - return nil - } - return i.ClosedBy -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (i *Issue) GetComments() int { - if i == nil || i.Comments == nil { - return 0 - } - return *i.Comments -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetCommentsURL() string { - if i == nil || i.CommentsURL == nil { - return "" - } - return *i.CommentsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (i *Issue) GetDraft() bool { - if i == nil || i.Draft == nil { - return false - } - return *i.Draft -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetEventsURL() string { - if i == nil || i.EventsURL == nil { - return "" - } - return *i.EventsURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *Issue) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetLabelsURL() string { - if i == nil || i.LabelsURL == nil { - return "" - } - return *i.LabelsURL -} - -// GetLocked returns the Locked field if it's non-nil, zero value otherwise. -func (i *Issue) GetLocked() bool { - if i == nil || i.Locked == nil { - return false - } - return *i.Locked -} - -// GetMilestone returns the Milestone field. -func (i *Issue) GetMilestone() *Milestone { - if i == nil { - return nil - } - return i.Milestone -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *Issue) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (i *Issue) GetNumber() int { - if i == nil || i.Number == nil { - return 0 - } - return *i.Number -} - -// GetPullRequestLinks returns the PullRequestLinks field. -func (i *Issue) GetPullRequestLinks() *PullRequestLinks { - if i == nil { - return nil - } - return i.PullRequestLinks -} - -// GetReactions returns the Reactions field. -func (i *Issue) GetReactions() *Reactions { - if i == nil { - return nil - } - return i.Reactions -} - -// GetRepository returns the Repository field. -func (i *Issue) GetRepository() *Repository { - if i == nil { - return nil - } - return i.Repository -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (i *Issue) GetRepositoryURL() string { - if i == nil || i.RepositoryURL == nil { - return "" - } - return *i.RepositoryURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (i *Issue) GetState() string { - if i == nil || i.State == nil { - return "" - } - return *i.State -} - -// GetStateReason returns the StateReason field if it's non-nil, zero value otherwise. -func (i *Issue) GetStateReason() string { - if i == nil || i.StateReason == nil { - return "" - } - return *i.StateReason -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (i *Issue) GetTitle() string { - if i == nil || i.Title == nil { - return "" - } - return *i.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *Issue) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *Issue) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetUser returns the User field. -func (i *Issue) GetUser() *User { - if i == nil { - return nil - } - return i.User -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetAuthorAssociation() string { - if i == nil || i.AuthorAssociation == nil { - return "" - } - return *i.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetBody() string { - if i == nil || i.Body == nil { - return "" - } - return *i.Body -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetHTMLURL() string { - if i == nil || i.HTMLURL == nil { - return "" - } - return *i.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetIssueURL() string { - if i == nil || i.IssueURL == nil { - return "" - } - return *i.IssueURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetNodeID() string { - if i == nil || i.NodeID == nil { - return "" - } - return *i.NodeID -} - -// GetReactions returns the Reactions field. -func (i *IssueComment) GetReactions() *Reactions { - if i == nil { - return nil - } - return i.Reactions -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueComment) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetUser returns the User field. -func (i *IssueComment) GetUser() *User { - if i == nil { - return nil - } - return i.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *IssueCommentEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetChanges returns the Changes field. -func (i *IssueCommentEvent) GetChanges() *EditChange { - if i == nil { - return nil - } - return i.Changes -} - -// GetComment returns the Comment field. -func (i *IssueCommentEvent) GetComment() *IssueComment { - if i == nil { - return nil - } - return i.Comment -} - -// GetInstallation returns the Installation field. -func (i *IssueCommentEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetIssue returns the Issue field. -func (i *IssueCommentEvent) GetIssue() *Issue { - if i == nil { - return nil - } - return i.Issue -} - -// GetOrganization returns the Organization field. -func (i *IssueCommentEvent) GetOrganization() *Organization { - if i == nil { - return nil - } - return i.Organization -} - -// GetRepo returns the Repo field. -func (i *IssueCommentEvent) GetRepo() *Repository { - if i == nil { - return nil - } - return i.Repo -} - -// GetSender returns the Sender field. -func (i *IssueCommentEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetActor returns the Actor field. -func (i *IssueEvent) GetActor() *User { - if i == nil { - return nil - } - return i.Actor -} - -// GetAssignee returns the Assignee field. -func (i *IssueEvent) GetAssignee() *User { - if i == nil { - return nil - } - return i.Assignee -} - -// GetAssigner returns the Assigner field. -func (i *IssueEvent) GetAssigner() *User { - if i == nil { - return nil - } - return i.Assigner -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetCommitID() string { - if i == nil || i.CommitID == nil { - return "" - } - return *i.CommitID -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetDismissedReview returns the DismissedReview field. -func (i *IssueEvent) GetDismissedReview() *DismissedReview { - if i == nil { - return nil - } - return i.DismissedReview -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetEvent() string { - if i == nil || i.Event == nil { - return "" - } - return *i.Event -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetID() int64 { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetIssue returns the Issue field. -func (i *IssueEvent) GetIssue() *Issue { - if i == nil { - return nil - } - return i.Issue -} - -// GetLabel returns the Label field. -func (i *IssueEvent) GetLabel() *Label { - if i == nil { - return nil - } - return i.Label -} - -// GetLockReason returns the LockReason field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetLockReason() string { - if i == nil || i.LockReason == nil { - return "" - } - return *i.LockReason -} - -// GetMilestone returns the Milestone field. -func (i *IssueEvent) GetMilestone() *Milestone { - if i == nil { - return nil - } - return i.Milestone -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (i *IssueEvent) GetPerformedViaGithubApp() *App { - if i == nil { - return nil - } - return i.PerformedViaGithubApp -} - -// GetProjectCard returns the ProjectCard field. -func (i *IssueEvent) GetProjectCard() *ProjectCard { - if i == nil { - return nil - } - return i.ProjectCard -} - -// GetRename returns the Rename field. -func (i *IssueEvent) GetRename() *Rename { - if i == nil { - return nil - } - return i.Rename -} - -// GetRequestedReviewer returns the RequestedReviewer field. -func (i *IssueEvent) GetRequestedReviewer() *User { - if i == nil { - return nil - } - return i.RequestedReviewer -} - -// GetRequestedTeam returns the RequestedTeam field. -func (i *IssueEvent) GetRequestedTeam() *Team { - if i == nil { - return nil - } - return i.RequestedTeam -} - -// GetReviewRequester returns the ReviewRequester field. -func (i *IssueEvent) GetReviewRequester() *User { - if i == nil { - return nil - } - return i.ReviewRequester -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueEvent) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetAssignee() string { - if i == nil || i.Assignee == nil { - return "" - } - return *i.Assignee -} - -// GetClosed returns the Closed field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetClosed() bool { - if i == nil || i.Closed == nil { - return false - } - return *i.Closed -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetClosedAt() Timestamp { - if i == nil || i.ClosedAt == nil { - return Timestamp{} - } - return *i.ClosedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetMilestone() int { - if i == nil || i.Milestone == nil { - return 0 - } - return *i.Milestone -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImport) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetCode returns the Code field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetCode() string { - if i == nil || i.Code == nil { - return "" - } - return *i.Code -} - -// GetField returns the Field field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetField() string { - if i == nil || i.Field == nil { - return "" - } - return *i.Field -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetLocation() string { - if i == nil || i.Location == nil { - return "" - } - return *i.Location -} - -// GetResource returns the Resource field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetResource() string { - if i == nil || i.Resource == nil { - return "" - } - return *i.Resource -} - -// GetValue returns the Value field if it's non-nil, zero value otherwise. -func (i *IssueImportError) GetValue() string { - if i == nil || i.Value == nil { - return "" - } - return *i.Value -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetCreatedAt() Timestamp { - if i == nil || i.CreatedAt == nil { - return Timestamp{} - } - return *i.CreatedAt -} - -// GetDocumentationURL returns the DocumentationURL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetDocumentationURL() string { - if i == nil || i.DocumentationURL == nil { - return "" - } - return *i.DocumentationURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetID() int { - if i == nil || i.ID == nil { - return 0 - } - return *i.ID -} - -// GetImportIssuesURL returns the ImportIssuesURL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetImportIssuesURL() string { - if i == nil || i.ImportIssuesURL == nil { - return "" - } - return *i.ImportIssuesURL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetMessage() string { - if i == nil || i.Message == nil { - return "" - } - return *i.Message -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetRepositoryURL() string { - if i == nil || i.RepositoryURL == nil { - return "" - } - return *i.RepositoryURL -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetStatus() string { - if i == nil || i.Status == nil { - return "" - } - return *i.Status -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetUpdatedAt() Timestamp { - if i == nil || i.UpdatedAt == nil { - return Timestamp{} - } - return *i.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (i *IssueImportResponse) GetURL() string { - if i == nil || i.URL == nil { - return "" - } - return *i.URL -} - -// GetDirection returns the Direction field if it's non-nil, zero value otherwise. -func (i *IssueListCommentsOptions) GetDirection() string { - if i == nil || i.Direction == nil { - return "" - } - return *i.Direction -} - -// GetSince returns the Since field if it's non-nil, zero value otherwise. -func (i *IssueListCommentsOptions) GetSince() time.Time { - if i == nil || i.Since == nil { - return time.Time{} - } - return *i.Since -} - -// GetSort returns the Sort field if it's non-nil, zero value otherwise. -func (i *IssueListCommentsOptions) GetSort() string { - if i == nil || i.Sort == nil { - return "" - } - return *i.Sort -} - -// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetAssignee() string { - if i == nil || i.Assignee == nil { - return "" - } - return *i.Assignee -} - -// GetAssignees returns the Assignees field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetAssignees() []string { - if i == nil || i.Assignees == nil { - return nil - } - return *i.Assignees -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetBody() string { - if i == nil || i.Body == nil { - return "" - } - return *i.Body -} - -// GetLabels returns the Labels field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetLabels() []string { - if i == nil || i.Labels == nil { - return nil - } - return *i.Labels -} - -// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetMilestone() int { - if i == nil || i.Milestone == nil { - return 0 - } - return *i.Milestone -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetState() string { - if i == nil || i.State == nil { - return "" - } - return *i.State -} - -// GetStateReason returns the StateReason field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetStateReason() string { - if i == nil || i.StateReason == nil { - return "" - } - return *i.StateReason -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (i *IssueRequest) GetTitle() string { - if i == nil || i.Title == nil { - return "" - } - return *i.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (i *IssuesEvent) GetAction() string { - if i == nil || i.Action == nil { - return "" - } - return *i.Action -} - -// GetAssignee returns the Assignee field. -func (i *IssuesEvent) GetAssignee() *User { - if i == nil { - return nil - } - return i.Assignee -} - -// GetChanges returns the Changes field. -func (i *IssuesEvent) GetChanges() *EditChange { - if i == nil { - return nil - } - return i.Changes -} - -// GetInstallation returns the Installation field. -func (i *IssuesEvent) GetInstallation() *Installation { - if i == nil { - return nil - } - return i.Installation -} - -// GetIssue returns the Issue field. -func (i *IssuesEvent) GetIssue() *Issue { - if i == nil { - return nil - } - return i.Issue -} - -// GetLabel returns the Label field. -func (i *IssuesEvent) GetLabel() *Label { - if i == nil { - return nil - } - return i.Label -} - -// GetMilestone returns the Milestone field. -func (i *IssuesEvent) GetMilestone() *Milestone { - if i == nil { - return nil - } - return i.Milestone -} - -// GetOrg returns the Org field. -func (i *IssuesEvent) GetOrg() *Organization { - if i == nil { - return nil - } - return i.Org -} - -// GetRepo returns the Repo field. -func (i *IssuesEvent) GetRepo() *Repository { - if i == nil { - return nil - } - return i.Repo -} - -// GetSender returns the Sender field. -func (i *IssuesEvent) GetSender() *User { - if i == nil { - return nil - } - return i.Sender -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (i *IssuesSearchResult) GetIncompleteResults() bool { - if i == nil || i.IncompleteResults == nil { - return false - } - return *i.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (i *IssuesSearchResult) GetTotal() int { - if i == nil || i.Total == nil { - return 0 - } - return *i.Total -} - -// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. -func (i *IssueStats) GetClosedIssues() int { - if i == nil || i.ClosedIssues == nil { - return 0 - } - return *i.ClosedIssues -} - -// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. -func (i *IssueStats) GetOpenIssues() int { - if i == nil || i.OpenIssues == nil { - return 0 - } - return *i.OpenIssues -} - -// GetTotalIssues returns the TotalIssues field if it's non-nil, zero value otherwise. -func (i *IssueStats) GetTotalIssues() int { - if i == nil || i.TotalIssues == nil { - return 0 - } - return *i.TotalIssues -} - -// GetEncodedJITConfig returns the EncodedJITConfig field if it's non-nil, zero value otherwise. -func (j *JITRunnerConfig) GetEncodedJITConfig() string { - if j == nil || j.EncodedJITConfig == nil { - return "" - } - return *j.EncodedJITConfig -} - -// GetRunner returns the Runner field. -func (j *JITRunnerConfig) GetRunner() *Runner { - if j == nil { - return nil - } - return j.Runner -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (j *Jobs) GetTotalCount() int { - if j == nil || j.TotalCount == nil { - return 0 - } - return *j.TotalCount -} - -// GetAddedBy returns the AddedBy field if it's non-nil, zero value otherwise. -func (k *Key) GetAddedBy() string { - if k == nil || k.AddedBy == nil { - return "" - } - return *k.AddedBy -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (k *Key) GetCreatedAt() Timestamp { - if k == nil || k.CreatedAt == nil { - return Timestamp{} - } - return *k.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (k *Key) GetID() int64 { - if k == nil || k.ID == nil { - return 0 - } - return *k.ID -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (k *Key) GetKey() string { - if k == nil || k.Key == nil { - return "" - } - return *k.Key -} - -// GetLastUsed returns the LastUsed field if it's non-nil, zero value otherwise. -func (k *Key) GetLastUsed() Timestamp { - if k == nil || k.LastUsed == nil { - return Timestamp{} - } - return *k.LastUsed -} - -// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. -func (k *Key) GetReadOnly() bool { - if k == nil || k.ReadOnly == nil { - return false - } - return *k.ReadOnly -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (k *Key) GetTitle() string { - if k == nil || k.Title == nil { - return "" - } - return *k.Title -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (k *Key) GetURL() string { - if k == nil || k.URL == nil { - return "" - } - return *k.URL -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (k *Key) GetVerified() bool { - if k == nil || k.Verified == nil { - return false - } - return *k.Verified -} - -// GetColor returns the Color field if it's non-nil, zero value otherwise. -func (l *Label) GetColor() string { - if l == nil || l.Color == nil { - return "" - } - return *l.Color -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (l *Label) GetDefault() bool { - if l == nil || l.Default == nil { - return false - } - return *l.Default -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (l *Label) GetDescription() string { - if l == nil || l.Description == nil { - return "" - } - return *l.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (l *Label) GetID() int64 { - if l == nil || l.ID == nil { - return 0 - } - return *l.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *Label) GetName() string { - if l == nil || l.Name == nil { - return "" - } - return *l.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (l *Label) GetNodeID() string { - if l == nil || l.NodeID == nil { - return "" - } - return *l.NodeID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *Label) GetURL() string { - if l == nil || l.URL == nil { - return "" - } - return *l.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (l *LabelEvent) GetAction() string { - if l == nil || l.Action == nil { - return "" - } - return *l.Action -} - -// GetChanges returns the Changes field. -func (l *LabelEvent) GetChanges() *EditChange { - if l == nil { - return nil - } - return l.Changes -} - -// GetInstallation returns the Installation field. -func (l *LabelEvent) GetInstallation() *Installation { - if l == nil { - return nil - } - return l.Installation -} - -// GetLabel returns the Label field. -func (l *LabelEvent) GetLabel() *Label { - if l == nil { - return nil - } - return l.Label -} - -// GetOrg returns the Org field. -func (l *LabelEvent) GetOrg() *Organization { - if l == nil { - return nil - } - return l.Org -} - -// GetRepo returns the Repo field. -func (l *LabelEvent) GetRepo() *Repository { - if l == nil { - return nil - } - return l.Repo -} - -// GetSender returns the Sender field. -func (l *LabelEvent) GetSender() *User { - if l == nil { - return nil - } - return l.Sender -} - -// GetColor returns the Color field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetColor() string { - if l == nil || l.Color == nil { - return "" - } - return *l.Color -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetDefault() bool { - if l == nil || l.Default == nil { - return false - } - return *l.Default -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetDescription() string { - if l == nil || l.Description == nil { - return "" - } - return *l.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetID() int64 { - if l == nil || l.ID == nil { - return 0 - } - return *l.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetName() string { - if l == nil || l.Name == nil { - return "" - } - return *l.Name -} - -// GetScore returns the Score field. -func (l *LabelResult) GetScore() *float64 { - if l == nil { - return nil - } - return l.Score -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *LabelResult) GetURL() string { - if l == nil || l.URL == nil { - return "" - } - return *l.URL -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (l *LabelsSearchResult) GetIncompleteResults() bool { - if l == nil || l.IncompleteResults == nil { - return false - } - return *l.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (l *LabelsSearchResult) GetTotal() int { - if l == nil || l.Total == nil { - return 0 - } - return *l.Total -} - -// GetOID returns the OID field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetOID() string { - if l == nil || l.OID == nil { - return "" - } - return *l.OID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetPath() string { - if l == nil || l.Path == nil { - return "" - } - return *l.Path -} - -// GetRefName returns the RefName field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetRefName() string { - if l == nil || l.RefName == nil { - return "" - } - return *l.RefName -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (l *LargeFile) GetSize() int { - if l == nil || l.Size == nil { - return 0 - } - return *l.Size -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (l *License) GetBody() string { - if l == nil || l.Body == nil { - return "" - } - return *l.Body -} - -// GetConditions returns the Conditions field if it's non-nil, zero value otherwise. -func (l *License) GetConditions() []string { - if l == nil || l.Conditions == nil { - return nil - } - return *l.Conditions -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (l *License) GetDescription() string { - if l == nil || l.Description == nil { - return "" - } - return *l.Description -} - -// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. -func (l *License) GetFeatured() bool { - if l == nil || l.Featured == nil { - return false - } - return *l.Featured -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (l *License) GetHTMLURL() string { - if l == nil || l.HTMLURL == nil { - return "" - } - return *l.HTMLURL -} - -// GetImplementation returns the Implementation field if it's non-nil, zero value otherwise. -func (l *License) GetImplementation() string { - if l == nil || l.Implementation == nil { - return "" - } - return *l.Implementation -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (l *License) GetKey() string { - if l == nil || l.Key == nil { - return "" - } - return *l.Key -} - -// GetLimitations returns the Limitations field if it's non-nil, zero value otherwise. -func (l *License) GetLimitations() []string { - if l == nil || l.Limitations == nil { - return nil - } - return *l.Limitations -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (l *License) GetName() string { - if l == nil || l.Name == nil { - return "" - } - return *l.Name -} - -// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. -func (l *License) GetPermissions() []string { - if l == nil || l.Permissions == nil { - return nil - } - return *l.Permissions -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (l *License) GetSPDXID() string { - if l == nil || l.SPDXID == nil { - return "" - } - return *l.SPDXID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (l *License) GetURL() string { - if l == nil || l.URL == nil { - return "" - } - return *l.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (l *LinearHistoryRequirementEnforcementLevelChanges) GetFrom() string { - if l == nil || l.From == nil { - return "" - } - return *l.From -} - -// GetDirection returns the Direction field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetDirection() string { - if l == nil || l.Direction == nil { - return "" - } - return *l.Direction -} - -// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetEcosystem() string { - if l == nil || l.Ecosystem == nil { - return "" - } - return *l.Ecosystem -} - -// GetPackage returns the Package field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetPackage() string { - if l == nil || l.Package == nil { - return "" - } - return *l.Package -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetScope() string { - if l == nil || l.Scope == nil { - return "" - } - return *l.Scope -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetSeverity() string { - if l == nil || l.Severity == nil { - return "" - } - return *l.Severity -} - -// GetSort returns the Sort field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetSort() string { - if l == nil || l.Sort == nil { - return "" - } - return *l.Sort -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (l *ListAlertsOptions) GetState() string { - if l == nil || l.State == nil { - return "" - } - return *l.State -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetAppID() int64 { - if l == nil || l.AppID == nil { - return 0 - } - return *l.AppID -} - -// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetCheckName() string { - if l == nil || l.CheckName == nil { - return "" - } - return *l.CheckName -} - -// GetFilter returns the Filter field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetFilter() string { - if l == nil || l.Filter == nil { - return "" - } - return *l.Filter -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsOptions) GetStatus() string { - if l == nil || l.Status == nil { - return "" - } - return *l.Status -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (l *ListCheckRunsResults) GetTotal() int { - if l == nil || l.Total == nil { - return 0 - } - return *l.Total -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (l *ListCheckSuiteOptions) GetAppID() int { - if l == nil || l.AppID == nil { - return 0 - } - return *l.AppID -} - -// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. -func (l *ListCheckSuiteOptions) GetCheckName() string { - if l == nil || l.CheckName == nil { - return "" - } - return *l.CheckName -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (l *ListCheckSuiteResults) GetTotal() int { - if l == nil || l.Total == nil { - return 0 - } - return *l.Total -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (l *ListCodespaces) GetTotalCount() int { - if l == nil || l.TotalCount == nil { - return 0 - } - return *l.TotalCount -} - -// GetAffiliation returns the Affiliation field if it's non-nil, zero value otherwise. -func (l *ListCollaboratorOptions) GetAffiliation() string { - if l == nil || l.Affiliation == nil { - return "" - } - return *l.Affiliation -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (l *ListExternalGroupsOptions) GetDisplayName() string { - if l == nil || l.DisplayName == nil { - return "" - } - return *l.DisplayName -} - -// GetAffects returns the Affects field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetAffects() string { - if l == nil || l.Affects == nil { - return "" - } - return *l.Affects -} - -// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetCVEID() string { - if l == nil || l.CVEID == nil { - return "" - } - return *l.CVEID -} - -// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetEcosystem() string { - if l == nil || l.Ecosystem == nil { - return "" - } - return *l.Ecosystem -} - -// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetGHSAID() string { - if l == nil || l.GHSAID == nil { - return "" - } - return *l.GHSAID -} - -// GetIsWithdrawn returns the IsWithdrawn field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetIsWithdrawn() bool { - if l == nil || l.IsWithdrawn == nil { - return false - } - return *l.IsWithdrawn -} - -// GetModified returns the Modified field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetModified() string { - if l == nil || l.Modified == nil { - return "" - } - return *l.Modified -} - -// GetPublished returns the Published field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetPublished() string { - if l == nil || l.Published == nil { - return "" - } - return *l.Published -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetSeverity() string { - if l == nil || l.Severity == nil { - return "" - } - return *l.Severity -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetType() string { - if l == nil || l.Type == nil { - return "" - } - return *l.Type -} - -// GetUpdated returns the Updated field if it's non-nil, zero value otherwise. -func (l *ListGlobalSecurityAdvisoriesOptions) GetUpdated() string { - if l == nil || l.Updated == nil { - return "" - } - return *l.Updated -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (l *ListOrganizations) GetTotalCount() int { - if l == nil || l.TotalCount == nil { - return 0 - } - return *l.TotalCount -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (l *ListRepositories) GetTotalCount() int { - if l == nil || l.TotalCount == nil { - return 0 - } - return *l.TotalCount -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (l *ListSCIMProvisionedIdentitiesOptions) GetCount() int { - if l == nil || l.Count == nil { - return 0 - } - return *l.Count -} - -// GetFilter returns the Filter field if it's non-nil, zero value otherwise. -func (l *ListSCIMProvisionedIdentitiesOptions) GetFilter() string { - if l == nil || l.Filter == nil { - return "" - } - return *l.Filter -} - -// GetStartIndex returns the StartIndex field if it's non-nil, zero value otherwise. -func (l *ListSCIMProvisionedIdentitiesOptions) GetStartIndex() int { - if l == nil || l.StartIndex == nil { - return 0 - } - return *l.StartIndex -} - -// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. -func (l *Location) GetEndColumn() int { - if l == nil || l.EndColumn == nil { - return 0 - } - return *l.EndColumn -} - -// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. -func (l *Location) GetEndLine() int { - if l == nil || l.EndLine == nil { - return 0 - } - return *l.EndLine -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (l *Location) GetPath() string { - if l == nil || l.Path == nil { - return "" - } - return *l.Path -} - -// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. -func (l *Location) GetStartColumn() int { - if l == nil || l.StartColumn == nil { - return 0 - } - return *l.StartColumn -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (l *Location) GetStartLine() int { - if l == nil || l.StartLine == nil { - return 0 - } - return *l.StartLine -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (l *LockBranch) GetEnabled() bool { - if l == nil || l.Enabled == nil { - return false - } - return *l.Enabled -} - -// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. -func (m *MarketplacePendingChange) GetEffectiveDate() Timestamp { - if m == nil || m.EffectiveDate == nil { - return Timestamp{} - } - return *m.EffectiveDate -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePendingChange) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetPlan returns the Plan field. -func (m *MarketplacePendingChange) GetPlan() *MarketplacePlan { - if m == nil { - return nil - } - return m.Plan -} - -// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. -func (m *MarketplacePendingChange) GetUnitCount() int { - if m == nil || m.UnitCount == nil { - return 0 - } - return *m.UnitCount -} - -// GetAccountsURL returns the AccountsURL field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetAccountsURL() string { - if m == nil || m.AccountsURL == nil { - return "" - } - return *m.AccountsURL -} - -// GetBullets returns the Bullets field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetBullets() []string { - if m == nil || m.Bullets == nil { - return nil - } - return *m.Bullets -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetDescription() string { - if m == nil || m.Description == nil { - return "" - } - return *m.Description -} - -// GetHasFreeTrial returns the HasFreeTrial field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetHasFreeTrial() bool { - if m == nil || m.HasFreeTrial == nil { - return false - } - return *m.HasFreeTrial -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetMonthlyPriceInCents returns the MonthlyPriceInCents field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetMonthlyPriceInCents() int { - if m == nil || m.MonthlyPriceInCents == nil { - return 0 - } - return *m.MonthlyPriceInCents -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetName() string { - if m == nil || m.Name == nil { - return "" - } - return *m.Name -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetNumber() int { - if m == nil || m.Number == nil { - return 0 - } - return *m.Number -} - -// GetPriceModel returns the PriceModel field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetPriceModel() string { - if m == nil || m.PriceModel == nil { - return "" - } - return *m.PriceModel -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetUnitName returns the UnitName field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetUnitName() string { - if m == nil || m.UnitName == nil { - return "" - } - return *m.UnitName -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetYearlyPriceInCents returns the YearlyPriceInCents field if it's non-nil, zero value otherwise. -func (m *MarketplacePlan) GetYearlyPriceInCents() int { - if m == nil || m.YearlyPriceInCents == nil { - return 0 - } - return *m.YearlyPriceInCents -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetLogin() string { - if m == nil || m.Login == nil { - return "" - } - return *m.Login -} - -// GetMarketplacePendingChange returns the MarketplacePendingChange field. -func (m *MarketplacePlanAccount) GetMarketplacePendingChange() *MarketplacePendingChange { - if m == nil { - return nil - } - return m.MarketplacePendingChange -} - -// GetMarketplacePurchase returns the MarketplacePurchase field. -func (m *MarketplacePlanAccount) GetMarketplacePurchase() *MarketplacePurchase { - if m == nil { - return nil - } - return m.MarketplacePurchase -} - -// GetOrganizationBillingEmail returns the OrganizationBillingEmail field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetOrganizationBillingEmail() string { - if m == nil || m.OrganizationBillingEmail == nil { - return "" - } - return *m.OrganizationBillingEmail -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetType() string { - if m == nil || m.Type == nil { - return "" - } - return *m.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *MarketplacePlanAccount) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetAccount returns the Account field. -func (m *MarketplacePurchase) GetAccount() *MarketplacePurchaseAccount { - if m == nil { - return nil - } - return m.Account -} - -// GetBillingCycle returns the BillingCycle field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetBillingCycle() string { - if m == nil || m.BillingCycle == nil { - return "" - } - return *m.BillingCycle -} - -// GetFreeTrialEndsOn returns the FreeTrialEndsOn field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetFreeTrialEndsOn() Timestamp { - if m == nil || m.FreeTrialEndsOn == nil { - return Timestamp{} - } - return *m.FreeTrialEndsOn -} - -// GetNextBillingDate returns the NextBillingDate field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetNextBillingDate() Timestamp { - if m == nil || m.NextBillingDate == nil { - return Timestamp{} - } - return *m.NextBillingDate -} - -// GetOnFreeTrial returns the OnFreeTrial field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetOnFreeTrial() bool { - if m == nil || m.OnFreeTrial == nil { - return false - } - return *m.OnFreeTrial -} - -// GetPlan returns the Plan field. -func (m *MarketplacePurchase) GetPlan() *MarketplacePlan { - if m == nil { - return nil - } - return m.Plan -} - -// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetUnitCount() int { - if m == nil || m.UnitCount == nil { - return 0 - } - return *m.UnitCount -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchase) GetUpdatedAt() Timestamp { - if m == nil || m.UpdatedAt == nil { - return Timestamp{} - } - return *m.UpdatedAt -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetEmail() string { - if m == nil || m.Email == nil { - return "" - } - return *m.Email -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetLogin() string { - if m == nil || m.Login == nil { - return "" - } - return *m.Login -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetNodeID() string { - if m == nil || m.NodeID == nil { - return "" - } - return *m.NodeID -} - -// GetOrganizationBillingEmail returns the OrganizationBillingEmail field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetOrganizationBillingEmail() string { - if m == nil || m.OrganizationBillingEmail == nil { - return "" - } - return *m.OrganizationBillingEmail -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetType() string { - if m == nil || m.Type == nil { - return "" - } - return *m.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseAccount) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. -func (m *MarketplacePurchaseEvent) GetEffectiveDate() Timestamp { - if m == nil || m.EffectiveDate == nil { - return Timestamp{} - } - return *m.EffectiveDate -} - -// GetInstallation returns the Installation field. -func (m *MarketplacePurchaseEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMarketplacePurchase returns the MarketplacePurchase field. -func (m *MarketplacePurchaseEvent) GetMarketplacePurchase() *MarketplacePurchase { - if m == nil { - return nil - } - return m.MarketplacePurchase -} - -// GetOrg returns the Org field. -func (m *MarketplacePurchaseEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetPreviousMarketplacePurchase returns the PreviousMarketplacePurchase field. -func (m *MarketplacePurchaseEvent) GetPreviousMarketplacePurchase() *MarketplacePurchase { - if m == nil { - return nil - } - return m.PreviousMarketplacePurchase -} - -// GetSender returns the Sender field. -func (m *MarketplacePurchaseEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (m *Match) GetText() string { - if m == nil || m.Text == nil { - return "" - } - return *m.Text -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MemberEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetInstallation returns the Installation field. -func (m *MemberEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMember returns the Member field. -func (m *MemberEvent) GetMember() *User { - if m == nil { - return nil - } - return m.Member -} - -// GetOrg returns the Org field. -func (m *MemberEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MemberEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MemberEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetOrganization returns the Organization field. -func (m *Membership) GetOrganization() *Organization { - if m == nil { - return nil - } - return m.Organization -} - -// GetOrganizationURL returns the OrganizationURL field if it's non-nil, zero value otherwise. -func (m *Membership) GetOrganizationURL() string { - if m == nil || m.OrganizationURL == nil { - return "" - } - return *m.OrganizationURL -} - -// GetRole returns the Role field if it's non-nil, zero value otherwise. -func (m *Membership) GetRole() string { - if m == nil || m.Role == nil { - return "" - } - return *m.Role -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Membership) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Membership) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetUser returns the User field. -func (m *Membership) GetUser() *User { - if m == nil { - return nil - } - return m.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MembershipEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetInstallation returns the Installation field. -func (m *MembershipEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMember returns the Member field. -func (m *MembershipEvent) GetMember() *User { - if m == nil { - return nil - } - return m.Member -} - -// GetOrg returns the Org field. -func (m *MembershipEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (m *MembershipEvent) GetScope() string { - if m == nil || m.Scope == nil { - return "" - } - return *m.Scope -} - -// GetSender returns the Sender field. -func (m *MembershipEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetTeam returns the Team field. -func (m *MembershipEvent) GetTeam() *Team { - if m == nil { - return nil - } - return m.Team -} - -// GetBaseRef returns the BaseRef field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetBaseRef() string { - if m == nil || m.BaseRef == nil { - return "" - } - return *m.BaseRef -} - -// GetBaseSHA returns the BaseSHA field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetBaseSHA() string { - if m == nil || m.BaseSHA == nil { - return "" - } - return *m.BaseSHA -} - -// GetHeadCommit returns the HeadCommit field. -func (m *MergeGroup) GetHeadCommit() *Commit { - if m == nil { - return nil - } - return m.HeadCommit -} - -// GetHeadRef returns the HeadRef field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetHeadRef() string { - if m == nil || m.HeadRef == nil { - return "" - } - return *m.HeadRef -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (m *MergeGroup) GetHeadSHA() string { - if m == nil || m.HeadSHA == nil { - return "" - } - return *m.HeadSHA -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MergeGroupEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetInstallation returns the Installation field. -func (m *MergeGroupEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMergeGroup returns the MergeGroup field. -func (m *MergeGroupEvent) GetMergeGroup() *MergeGroup { - if m == nil { - return nil - } - return m.MergeGroup -} - -// GetOrg returns the Org field. -func (m *MergeGroupEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MergeGroupEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MergeGroupEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetText returns the Text field if it's non-nil, zero value otherwise. -func (m *Message) GetText() string { - if m == nil || m.Text == nil { - return "" - } - return *m.Text -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MetaEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetHook returns the Hook field. -func (m *MetaEvent) GetHook() *Hook { - if m == nil { - return nil - } - return m.Hook -} - -// GetHookID returns the HookID field if it's non-nil, zero value otherwise. -func (m *MetaEvent) GetHookID() int64 { - if m == nil || m.HookID == nil { - return 0 - } - return *m.HookID -} - -// GetInstallation returns the Installation field. -func (m *MetaEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetOrg returns the Org field. -func (m *MetaEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MetaEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MetaEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *Metric) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { - return "" - } - return *m.HTMLURL -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (m *Metric) GetKey() string { - if m == nil || m.Key == nil { - return "" - } - return *m.Key -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (m *Metric) GetName() string { - if m == nil || m.Name == nil { - return "" - } - return *m.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (m *Metric) GetNodeID() string { - if m == nil || m.NodeID == nil { - return "" - } - return *m.NodeID -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (m *Metric) GetSPDXID() string { - if m == nil || m.SPDXID == nil { - return "" - } - return *m.SPDXID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Metric) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (m *Migration) GetCreatedAt() string { - if m == nil || m.CreatedAt == nil { - return "" - } - return *m.CreatedAt -} - -// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. -func (m *Migration) GetExcludeAttachments() bool { - if m == nil || m.ExcludeAttachments == nil { - return false - } - return *m.ExcludeAttachments -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (m *Migration) GetGUID() string { - if m == nil || m.GUID == nil { - return "" - } - return *m.GUID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *Migration) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. -func (m *Migration) GetLockRepositories() bool { - if m == nil || m.LockRepositories == nil { - return false - } - return *m.LockRepositories -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Migration) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *Migration) GetUpdatedAt() string { - if m == nil || m.UpdatedAt == nil { - return "" - } - return *m.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Migration) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetClosedAt() Timestamp { - if m == nil || m.ClosedAt == nil { - return Timestamp{} - } - return *m.ClosedAt -} - -// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. -func (m *Milestone) GetClosedIssues() int { - if m == nil || m.ClosedIssues == nil { - return 0 - } - return *m.ClosedIssues -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetCreatedAt() Timestamp { - if m == nil || m.CreatedAt == nil { - return Timestamp{} - } - return *m.CreatedAt -} - -// GetCreator returns the Creator field. -func (m *Milestone) GetCreator() *User { - if m == nil { - return nil - } - return m.Creator -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (m *Milestone) GetDescription() string { - if m == nil || m.Description == nil { - return "" - } - return *m.Description -} - -// GetDueOn returns the DueOn field if it's non-nil, zero value otherwise. -func (m *Milestone) GetDueOn() Timestamp { - if m == nil || m.DueOn == nil { - return Timestamp{} - } - return *m.DueOn -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { - return "" - } - return *m.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (m *Milestone) GetID() int64 { - if m == nil || m.ID == nil { - return 0 - } - return *m.ID -} - -// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetLabelsURL() string { - if m == nil || m.LabelsURL == nil { - return "" - } - return *m.LabelsURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (m *Milestone) GetNodeID() string { - if m == nil || m.NodeID == nil { - return "" - } - return *m.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (m *Milestone) GetNumber() int { - if m == nil || m.Number == nil { - return 0 - } - return *m.Number -} - -// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. -func (m *Milestone) GetOpenIssues() int { - if m == nil || m.OpenIssues == nil { - return 0 - } - return *m.OpenIssues -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *Milestone) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (m *Milestone) GetTitle() string { - if m == nil || m.Title == nil { - return "" - } - return *m.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (m *Milestone) GetUpdatedAt() Timestamp { - if m == nil || m.UpdatedAt == nil { - return Timestamp{} - } - return *m.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (m *Milestone) GetURL() string { - if m == nil || m.URL == nil { - return "" - } - return *m.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (m *MilestoneEvent) GetAction() string { - if m == nil || m.Action == nil { - return "" - } - return *m.Action -} - -// GetChanges returns the Changes field. -func (m *MilestoneEvent) GetChanges() *EditChange { - if m == nil { - return nil - } - return m.Changes -} - -// GetInstallation returns the Installation field. -func (m *MilestoneEvent) GetInstallation() *Installation { - if m == nil { - return nil - } - return m.Installation -} - -// GetMilestone returns the Milestone field. -func (m *MilestoneEvent) GetMilestone() *Milestone { - if m == nil { - return nil - } - return m.Milestone -} - -// GetOrg returns the Org field. -func (m *MilestoneEvent) GetOrg() *Organization { - if m == nil { - return nil - } - return m.Org -} - -// GetRepo returns the Repo field. -func (m *MilestoneEvent) GetRepo() *Repository { - if m == nil { - return nil - } - return m.Repo -} - -// GetSender returns the Sender field. -func (m *MilestoneEvent) GetSender() *User { - if m == nil { - return nil - } - return m.Sender -} - -// GetClosedMilestones returns the ClosedMilestones field if it's non-nil, zero value otherwise. -func (m *MilestoneStats) GetClosedMilestones() int { - if m == nil || m.ClosedMilestones == nil { - return 0 - } - return *m.ClosedMilestones -} - -// GetOpenMilestones returns the OpenMilestones field if it's non-nil, zero value otherwise. -func (m *MilestoneStats) GetOpenMilestones() int { - if m == nil || m.OpenMilestones == nil { - return 0 - } - return *m.OpenMilestones -} - -// GetTotalMilestones returns the TotalMilestones field if it's non-nil, zero value otherwise. -func (m *MilestoneStats) GetTotalMilestones() int { - if m == nil || m.TotalMilestones == nil { - return 0 - } - return *m.TotalMilestones -} - -// GetAnalysisKey returns the AnalysisKey field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetAnalysisKey() string { - if m == nil || m.AnalysisKey == nil { - return "" - } - return *m.AnalysisKey -} - -// GetCategory returns the Category field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetCategory() string { - if m == nil || m.Category == nil { - return "" - } - return *m.Category -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetCommitSHA() string { - if m == nil || m.CommitSHA == nil { - return "" - } - return *m.CommitSHA -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetEnvironment() string { - if m == nil || m.Environment == nil { - return "" - } - return *m.Environment -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetHTMLURL() string { - if m == nil || m.HTMLURL == nil { - return "" - } - return *m.HTMLURL -} - -// GetLocation returns the Location field. -func (m *MostRecentInstance) GetLocation() *Location { - if m == nil { - return nil - } - return m.Location -} - -// GetMessage returns the Message field. -func (m *MostRecentInstance) GetMessage() *Message { - if m == nil { - return nil - } - return m.Message -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetRef() string { - if m == nil || m.Ref == nil { - return "" - } - return *m.Ref -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (m *MostRecentInstance) GetState() string { - if m == nil || m.State == nil { - return "" - } - return *m.State -} - -// GetBase returns the Base field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetBase() string { - if n == nil || n.Base == nil { - return "" - } - return *n.Base -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetBody() string { - if n == nil || n.Body == nil { - return "" - } - return *n.Body -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetDraft() bool { - if n == nil || n.Draft == nil { - return false - } - return *n.Draft -} - -// GetHead returns the Head field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetHead() string { - if n == nil || n.Head == nil { - return "" - } - return *n.Head -} - -// GetHeadRepo returns the HeadRepo field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetHeadRepo() string { - if n == nil || n.HeadRepo == nil { - return "" - } - return *n.HeadRepo -} - -// GetIssue returns the Issue field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetIssue() int { - if n == nil || n.Issue == nil { - return 0 - } - return *n.Issue -} - -// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetMaintainerCanModify() bool { - if n == nil || n.MaintainerCanModify == nil { - return false - } - return *n.MaintainerCanModify -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (n *NewPullRequest) GetTitle() string { - if n == nil || n.Title == nil { - return "" - } - return *n.Title -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetDescription() string { - if n == nil || n.Description == nil { - return "" - } - return *n.Description -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetLDAPDN() string { - if n == nil || n.LDAPDN == nil { - return "" - } - return *n.LDAPDN -} - -// GetParentTeamID returns the ParentTeamID field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetParentTeamID() int64 { - if n == nil || n.ParentTeamID == nil { - return 0 - } - return *n.ParentTeamID -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetPermission() string { - if n == nil || n.Permission == nil { - return "" - } - return *n.Permission -} - -// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. -func (n *NewTeam) GetPrivacy() string { - if n == nil || n.Privacy == nil { - return "" - } - return *n.Privacy -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (n *Notification) GetID() string { - if n == nil || n.ID == nil { - return "" - } - return *n.ID -} - -// GetLastReadAt returns the LastReadAt field if it's non-nil, zero value otherwise. -func (n *Notification) GetLastReadAt() Timestamp { - if n == nil || n.LastReadAt == nil { - return Timestamp{} - } - return *n.LastReadAt -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (n *Notification) GetReason() string { - if n == nil || n.Reason == nil { - return "" - } - return *n.Reason -} - -// GetRepository returns the Repository field. -func (n *Notification) GetRepository() *Repository { - if n == nil { - return nil - } - return n.Repository -} - -// GetSubject returns the Subject field. -func (n *Notification) GetSubject() *NotificationSubject { - if n == nil { - return nil - } - return n.Subject -} - -// GetUnread returns the Unread field if it's non-nil, zero value otherwise. -func (n *Notification) GetUnread() bool { - if n == nil || n.Unread == nil { - return false - } - return *n.Unread -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (n *Notification) GetUpdatedAt() Timestamp { - if n == nil || n.UpdatedAt == nil { - return Timestamp{} - } - return *n.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (n *Notification) GetURL() string { - if n == nil || n.URL == nil { - return "" - } - return *n.URL -} - -// GetLatestCommentURL returns the LatestCommentURL field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetLatestCommentURL() string { - if n == nil || n.LatestCommentURL == nil { - return "" - } - return *n.LatestCommentURL -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetTitle() string { - if n == nil || n.Title == nil { - return "" - } - return *n.Title -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetType() string { - if n == nil || n.Type == nil { - return "" - } - return *n.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (n *NotificationSubject) GetURL() string { - if n == nil || n.URL == nil { - return "" - } - return *n.URL -} - -// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. -func (o *OAuthAPP) GetClientID() string { - if o == nil || o.ClientID == nil { - return "" - } - return *o.ClientID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *OAuthAPP) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (o *OAuthAPP) GetURL() string { - if o == nil || o.URL == nil { - return "" - } - return *o.URL -} - -// GetUseDefault returns the UseDefault field if it's non-nil, zero value otherwise. -func (o *OIDCSubjectClaimCustomTemplate) GetUseDefault() bool { - if o == nil || o.UseDefault == nil { - return false - } - return *o.UseDefault -} - -// GetAdvancedSecurityEnabledForNewRepos returns the AdvancedSecurityEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetAdvancedSecurityEnabledForNewRepos() bool { - if o == nil || o.AdvancedSecurityEnabledForNewRepos == nil { - return false - } - return *o.AdvancedSecurityEnabledForNewRepos -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetAvatarURL() string { - if o == nil || o.AvatarURL == nil { - return "" - } - return *o.AvatarURL -} - -// GetBillingEmail returns the BillingEmail field if it's non-nil, zero value otherwise. -func (o *Organization) GetBillingEmail() string { - if o == nil || o.BillingEmail == nil { - return "" - } - return *o.BillingEmail -} - -// GetBlog returns the Blog field if it's non-nil, zero value otherwise. -func (o *Organization) GetBlog() string { - if o == nil || o.Blog == nil { - return "" - } - return *o.Blog -} - -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (o *Organization) GetCollaborators() int { - if o == nil || o.Collaborators == nil { - return 0 - } - return *o.Collaborators -} - -// GetCompany returns the Company field if it's non-nil, zero value otherwise. -func (o *Organization) GetCompany() string { - if o == nil || o.Company == nil { - return "" - } - return *o.Company -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (o *Organization) GetCreatedAt() Timestamp { - if o == nil || o.CreatedAt == nil { - return Timestamp{} - } - return *o.CreatedAt -} - -// GetDefaultRepoPermission returns the DefaultRepoPermission field if it's non-nil, zero value otherwise. -func (o *Organization) GetDefaultRepoPermission() string { - if o == nil || o.DefaultRepoPermission == nil { - return "" - } - return *o.DefaultRepoPermission -} - -// GetDefaultRepoSettings returns the DefaultRepoSettings field if it's non-nil, zero value otherwise. -func (o *Organization) GetDefaultRepoSettings() string { - if o == nil || o.DefaultRepoSettings == nil { - return "" - } - return *o.DefaultRepoSettings -} - -// GetDependabotAlertsEnabledForNewRepos returns the DependabotAlertsEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetDependabotAlertsEnabledForNewRepos() bool { - if o == nil || o.DependabotAlertsEnabledForNewRepos == nil { - return false - } - return *o.DependabotAlertsEnabledForNewRepos -} - -// GetDependabotSecurityUpdatesEnabledForNewRepos returns the DependabotSecurityUpdatesEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetDependabotSecurityUpdatesEnabledForNewRepos() bool { - if o == nil || o.DependabotSecurityUpdatesEnabledForNewRepos == nil { - return false - } - return *o.DependabotSecurityUpdatesEnabledForNewRepos -} - -// GetDependencyGraphEnabledForNewRepos returns the DependencyGraphEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetDependencyGraphEnabledForNewRepos() bool { - if o == nil || o.DependencyGraphEnabledForNewRepos == nil { - return false - } - return *o.DependencyGraphEnabledForNewRepos -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (o *Organization) GetDescription() string { - if o == nil || o.Description == nil { - return "" - } - return *o.Description -} - -// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. -func (o *Organization) GetDiskUsage() int { - if o == nil || o.DiskUsage == nil { - return 0 - } - return *o.DiskUsage -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (o *Organization) GetEmail() string { - if o == nil || o.Email == nil { - return "" - } - return *o.Email -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetEventsURL() string { - if o == nil || o.EventsURL == nil { - return "" - } - return *o.EventsURL -} - -// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. -func (o *Organization) GetFollowers() int { - if o == nil || o.Followers == nil { - return 0 - } - return *o.Followers -} - -// GetFollowing returns the Following field if it's non-nil, zero value otherwise. -func (o *Organization) GetFollowing() int { - if o == nil || o.Following == nil { - return 0 - } - return *o.Following -} - -// GetHasOrganizationProjects returns the HasOrganizationProjects field if it's non-nil, zero value otherwise. -func (o *Organization) GetHasOrganizationProjects() bool { - if o == nil || o.HasOrganizationProjects == nil { - return false - } - return *o.HasOrganizationProjects -} - -// GetHasRepositoryProjects returns the HasRepositoryProjects field if it's non-nil, zero value otherwise. -func (o *Organization) GetHasRepositoryProjects() bool { - if o == nil || o.HasRepositoryProjects == nil { - return false - } - return *o.HasRepositoryProjects -} - -// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetHooksURL() string { - if o == nil || o.HooksURL == nil { - return "" - } - return *o.HooksURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetHTMLURL() string { - if o == nil || o.HTMLURL == nil { - return "" - } - return *o.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (o *Organization) GetID() int64 { - if o == nil || o.ID == nil { - return 0 - } - return *o.ID -} - -// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetIssuesURL() string { - if o == nil || o.IssuesURL == nil { - return "" - } - return *o.IssuesURL -} - -// GetIsVerified returns the IsVerified field if it's non-nil, zero value otherwise. -func (o *Organization) GetIsVerified() bool { - if o == nil || o.IsVerified == nil { - return false - } - return *o.IsVerified -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (o *Organization) GetLocation() string { - if o == nil || o.Location == nil { - return "" - } - return *o.Location -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (o *Organization) GetLogin() string { - if o == nil || o.Login == nil { - return "" - } - return *o.Login -} - -// GetMembersAllowedRepositoryCreationType returns the MembersAllowedRepositoryCreationType field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersAllowedRepositoryCreationType() string { - if o == nil || o.MembersAllowedRepositoryCreationType == nil { - return "" - } - return *o.MembersAllowedRepositoryCreationType -} - -// GetMembersCanCreateInternalRepos returns the MembersCanCreateInternalRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreateInternalRepos() bool { - if o == nil || o.MembersCanCreateInternalRepos == nil { - return false - } - return *o.MembersCanCreateInternalRepos -} - -// GetMembersCanCreatePages returns the MembersCanCreatePages field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePages() bool { - if o == nil || o.MembersCanCreatePages == nil { - return false - } - return *o.MembersCanCreatePages -} - -// GetMembersCanCreatePrivatePages returns the MembersCanCreatePrivatePages field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePrivatePages() bool { - if o == nil || o.MembersCanCreatePrivatePages == nil { - return false - } - return *o.MembersCanCreatePrivatePages -} - -// GetMembersCanCreatePrivateRepos returns the MembersCanCreatePrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePrivateRepos() bool { - if o == nil || o.MembersCanCreatePrivateRepos == nil { - return false - } - return *o.MembersCanCreatePrivateRepos -} - -// GetMembersCanCreatePublicPages returns the MembersCanCreatePublicPages field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePublicPages() bool { - if o == nil || o.MembersCanCreatePublicPages == nil { - return false - } - return *o.MembersCanCreatePublicPages -} - -// GetMembersCanCreatePublicRepos returns the MembersCanCreatePublicRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreatePublicRepos() bool { - if o == nil || o.MembersCanCreatePublicRepos == nil { - return false - } - return *o.MembersCanCreatePublicRepos -} - -// GetMembersCanCreateRepos returns the MembersCanCreateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanCreateRepos() bool { - if o == nil || o.MembersCanCreateRepos == nil { - return false - } - return *o.MembersCanCreateRepos -} - -// GetMembersCanForkPrivateRepos returns the MembersCanForkPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersCanForkPrivateRepos() bool { - if o == nil || o.MembersCanForkPrivateRepos == nil { - return false - } - return *o.MembersCanForkPrivateRepos -} - -// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetMembersURL() string { - if o == nil || o.MembersURL == nil { - return "" - } - return *o.MembersURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *Organization) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (o *Organization) GetNodeID() string { - if o == nil || o.NodeID == nil { - return "" - } - return *o.NodeID -} - -// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetOwnedPrivateRepos() int64 { - if o == nil || o.OwnedPrivateRepos == nil { - return 0 - } - return *o.OwnedPrivateRepos -} - -// GetPlan returns the Plan field. -func (o *Organization) GetPlan() *Plan { - if o == nil { - return nil - } - return o.Plan -} - -// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. -func (o *Organization) GetPrivateGists() int { - if o == nil || o.PrivateGists == nil { - return 0 - } - return *o.PrivateGists -} - -// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicGists() int { - if o == nil || o.PublicGists == nil { - return 0 - } - return *o.PublicGists -} - -// GetPublicMembersURL returns the PublicMembersURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicMembersURL() string { - if o == nil || o.PublicMembersURL == nil { - return "" - } - return *o.PublicMembersURL -} - -// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetPublicRepos() int { - if o == nil || o.PublicRepos == nil { - return 0 - } - return *o.PublicRepos -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (o *Organization) GetReposURL() string { - if o == nil || o.ReposURL == nil { - return "" - } - return *o.ReposURL -} - -// GetSecretScanningEnabledForNewRepos returns the SecretScanningEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetSecretScanningEnabledForNewRepos() bool { - if o == nil || o.SecretScanningEnabledForNewRepos == nil { - return false - } - return *o.SecretScanningEnabledForNewRepos -} - -// GetSecretScanningPushProtectionEnabledForNewRepos returns the SecretScanningPushProtectionEnabledForNewRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetSecretScanningPushProtectionEnabledForNewRepos() bool { - if o == nil || o.SecretScanningPushProtectionEnabledForNewRepos == nil { - return false - } - return *o.SecretScanningPushProtectionEnabledForNewRepos -} - -// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. -func (o *Organization) GetTotalPrivateRepos() int64 { - if o == nil || o.TotalPrivateRepos == nil { - return 0 - } - return *o.TotalPrivateRepos -} - -// GetTwitterUsername returns the TwitterUsername field if it's non-nil, zero value otherwise. -func (o *Organization) GetTwitterUsername() string { - if o == nil || o.TwitterUsername == nil { - return "" - } - return *o.TwitterUsername -} - -// GetTwoFactorRequirementEnabled returns the TwoFactorRequirementEnabled field if it's non-nil, zero value otherwise. -func (o *Organization) GetTwoFactorRequirementEnabled() bool { - if o == nil || o.TwoFactorRequirementEnabled == nil { - return false - } - return *o.TwoFactorRequirementEnabled -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (o *Organization) GetType() string { - if o == nil || o.Type == nil { - return "" - } - return *o.Type -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (o *Organization) GetUpdatedAt() Timestamp { - if o == nil || o.UpdatedAt == nil { - return Timestamp{} - } - return *o.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (o *Organization) GetURL() string { - if o == nil || o.URL == nil { - return "" - } - return *o.URL -} - -// GetWebCommitSignoffRequired returns the WebCommitSignoffRequired field if it's non-nil, zero value otherwise. -func (o *Organization) GetWebCommitSignoffRequired() bool { - if o == nil || o.WebCommitSignoffRequired == nil { - return false - } - return *o.WebCommitSignoffRequired -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrganizationCustomRepoRoles) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (o *OrganizationEvent) GetAction() string { - if o == nil || o.Action == nil { - return "" - } - return *o.Action -} - -// GetInstallation returns the Installation field. -func (o *OrganizationEvent) GetInstallation() *Installation { - if o == nil { - return nil - } - return o.Installation -} - -// GetInvitation returns the Invitation field. -func (o *OrganizationEvent) GetInvitation() *Invitation { - if o == nil { - return nil - } - return o.Invitation -} - -// GetMembership returns the Membership field. -func (o *OrganizationEvent) GetMembership() *Membership { - if o == nil { - return nil - } - return o.Membership -} - -// GetOrganization returns the Organization field. -func (o *OrganizationEvent) GetOrganization() *Organization { - if o == nil { - return nil - } - return o.Organization -} - -// GetSender returns the Sender field. -func (o *OrganizationEvent) GetSender() *User { - if o == nil { - return nil - } - return o.Sender -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrganizationInstallations) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (o *OrgBlockEvent) GetAction() string { - if o == nil || o.Action == nil { - return "" - } - return *o.Action -} - -// GetBlockedUser returns the BlockedUser field. -func (o *OrgBlockEvent) GetBlockedUser() *User { - if o == nil { - return nil - } - return o.BlockedUser -} - -// GetInstallation returns the Installation field. -func (o *OrgBlockEvent) GetInstallation() *Installation { - if o == nil { - return nil - } - return o.Installation -} - -// GetOrganization returns the Organization field. -func (o *OrgBlockEvent) GetOrganization() *Organization { - if o == nil { - return nil - } - return o.Organization -} - -// GetSender returns the Sender field. -func (o *OrgBlockEvent) GetSender() *User { - if o == nil { - return nil - } - return o.Sender -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetCreatedAt() Timestamp { - if o == nil || o.CreatedAt == nil { - return Timestamp{} - } - return *o.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetID() int64 { - if o == nil || o.ID == nil { - return 0 - } - return *o.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetPath() string { - if o == nil || o.Path == nil { - return "" - } - return *o.Path -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetRef() string { - if o == nil || o.Ref == nil { - return "" - } - return *o.Ref -} - -// GetRepository returns the Repository field. -func (o *OrgRequiredWorkflow) GetRepository() *Repository { - if o == nil { - return nil - } - return o.Repository -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetScope() string { - if o == nil || o.Scope == nil { - return "" - } - return *o.Scope -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetSelectedRepositoriesURL() string { - if o == nil || o.SelectedRepositoriesURL == nil { - return "" - } - return *o.SelectedRepositoriesURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetState() string { - if o == nil || o.State == nil { - return "" - } - return *o.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetUpdatedAt() Timestamp { - if o == nil || o.UpdatedAt == nil { - return Timestamp{} - } - return *o.UpdatedAt -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflows) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - -// GetDisabledOrgs returns the DisabledOrgs field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetDisabledOrgs() int { - if o == nil || o.DisabledOrgs == nil { - return 0 - } - return *o.DisabledOrgs -} - -// GetTotalOrgs returns the TotalOrgs field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetTotalOrgs() int { - if o == nil || o.TotalOrgs == nil { - return 0 - } - return *o.TotalOrgs -} - -// GetTotalTeamMembers returns the TotalTeamMembers field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetTotalTeamMembers() int { - if o == nil || o.TotalTeamMembers == nil { - return 0 - } - return *o.TotalTeamMembers -} - -// GetTotalTeams returns the TotalTeams field if it's non-nil, zero value otherwise. -func (o *OrgStats) GetTotalTeams() int { - if o == nil || o.TotalTeams == nil { - return 0 - } - return *o.TotalTeams -} - -// GetOrg returns the Org field. -func (o *OwnerInfo) GetOrg() *User { - if o == nil { - return nil - } - return o.Org -} - -// GetUser returns the User field. -func (o *OwnerInfo) GetUser() *User { - if o == nil { - return nil - } - return o.User -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *Package) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Package) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *Package) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Package) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetOwner returns the Owner field. -func (p *Package) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. -func (p *Package) GetPackageType() string { - if p == nil || p.PackageType == nil { - return "" - } - return *p.PackageType -} - -// GetPackageVersion returns the PackageVersion field. -func (p *Package) GetPackageVersion() *PackageVersion { - if p == nil { - return nil - } - return p.PackageVersion -} - -// GetRegistry returns the Registry field. -func (p *Package) GetRegistry() *PackageRegistry { - if p == nil { - return nil - } - return p.Registry -} - -// GetRepository returns the Repository field. -func (p *Package) GetRepository() *Repository { - if p == nil { - return nil - } - return p.Repository -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *Package) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Package) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetVersionCount returns the VersionCount field if it's non-nil, zero value otherwise. -func (p *Package) GetVersionCount() int64 { - if p == nil || p.VersionCount == nil { - return 0 - } - return *p.VersionCount -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (p *Package) GetVisibility() string { - if p == nil || p.Visibility == nil { - return "" - } - return *p.Visibility -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PackageEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PackageEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PackageEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPackage returns the Package field. -func (p *PackageEvent) GetPackage() *Package { - if p == nil { - return nil - } - return p.Package -} - -// GetRepo returns the Repo field. -func (p *PackageEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PackageEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetAuthor returns the Author field. -func (p *PackageFile) GetAuthor() *User { - if p == nil { - return nil - } - return p.Author -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetContentType() string { - if p == nil || p.ContentType == nil { - return "" - } - return *p.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetDownloadURL() string { - if p == nil || p.DownloadURL == nil { - return "" - } - return *p.DownloadURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetMD5 returns the MD5 field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetMD5() string { - if p == nil || p.MD5 == nil { - return "" - } - return *p.MD5 -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetSHA1 returns the SHA1 field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetSHA1() string { - if p == nil || p.SHA1 == nil { - return "" - } - return *p.SHA1 -} - -// GetSHA256 returns the SHA256 field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetSHA256() string { - if p == nil || p.SHA256 == nil { - return "" - } - return *p.SHA256 -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetSize() int64 { - if p == nil || p.Size == nil { - return 0 - } - return *p.Size -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PackageFile) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. -func (p *PackageListOptions) GetPackageType() string { - if p == nil || p.PackageType == nil { - return "" - } - return *p.PackageType -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PackageListOptions) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (p *PackageListOptions) GetVisibility() string { - if p == nil || p.Visibility == nil { - return "" - } - return *p.Visibility -} - -// GetContainer returns the Container field. -func (p *PackageMetadata) GetContainer() *PackageContainerMetadata { - if p == nil { - return nil - } - return p.Container -} - -// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. -func (p *PackageMetadata) GetPackageType() string { - if p == nil || p.PackageType == nil { - return "" - } - return *p.PackageType -} - -// GetAboutURL returns the AboutURL field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetAboutURL() string { - if p == nil || p.AboutURL == nil { - return "" - } - return *p.AboutURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetType() string { - if p == nil || p.Type == nil { - return "" - } - return *p.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetVendor returns the Vendor field if it's non-nil, zero value otherwise. -func (p *PackageRegistry) GetVendor() string { - if p == nil || p.Vendor == nil { - return "" - } - return *p.Vendor -} - -// GetAuthor returns the Author field. -func (p *PackageRelease) GetAuthor() *User { - if p == nil { - return nil - } - return p.Author -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetDraft() bool { - if p == nil || p.Draft == nil { - return false - } - return *p.Draft -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetPrerelease() bool { - if p == nil || p.Prerelease == nil { - return false - } - return *p.Prerelease -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetPublishedAt() Timestamp { - if p == nil || p.PublishedAt == nil { - return Timestamp{} - } - return *p.PublishedAt -} - -// GetTagName returns the TagName field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetTagName() string { - if p == nil || p.TagName == nil { - return "" - } - return *p.TagName -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetTargetCommitish() string { - if p == nil || p.TargetCommitish == nil { - return "" - } - return *p.TargetCommitish -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PackageRelease) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetAuthor returns the Author field. -func (p *PackageVersion) GetAuthor() *User { - if p == nil { - return nil - } - return p.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetBodyHTML() string { - if p == nil || p.BodyHTML == nil { - return "" - } - return *p.BodyHTML -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetDraft() bool { - if p == nil || p.Draft == nil { - return false - } - return *p.Draft -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetInstallationCommand returns the InstallationCommand field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetInstallationCommand() string { - if p == nil || p.InstallationCommand == nil { - return "" - } - return *p.InstallationCommand -} - -// GetManifest returns the Manifest field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetManifest() string { - if p == nil || p.Manifest == nil { - return "" - } - return *p.Manifest -} - -// GetMetadata returns the Metadata field. -func (p *PackageVersion) GetMetadata() *PackageMetadata { - if p == nil { - return nil - } - return p.Metadata -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetPackageHTMLURL returns the PackageHTMLURL field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetPackageHTMLURL() string { - if p == nil || p.PackageHTMLURL == nil { - return "" - } - return *p.PackageHTMLURL -} - -// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetPrerelease() bool { - if p == nil || p.Prerelease == nil { - return false - } - return *p.Prerelease -} - -// GetRelease returns the Release field. -func (p *PackageVersion) GetRelease() *PackageRelease { - if p == nil { - return nil - } - return p.Release -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetSummary() string { - if p == nil || p.Summary == nil { - return "" - } - return *p.Summary -} - -// GetTagName returns the TagName field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetTagName() string { - if p == nil || p.TagName == nil { - return "" - } - return *p.TagName -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetTargetCommitish() string { - if p == nil || p.TargetCommitish == nil { - return "" - } - return *p.TargetCommitish -} - -// GetTargetOID returns the TargetOID field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetTargetOID() string { - if p == nil || p.TargetOID == nil { - return "" - } - return *p.TargetOID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (p *PackageVersion) GetVersion() string { - if p == nil || p.Version == nil { - return "" - } - return *p.Version -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *Page) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Page) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetPageName returns the PageName field if it's non-nil, zero value otherwise. -func (p *Page) GetPageName() string { - if p == nil || p.PageName == nil { - return "" - } - return *p.PageName -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (p *Page) GetSHA() string { - if p == nil || p.SHA == nil { - return "" - } - return *p.SHA -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (p *Page) GetSummary() string { - if p == nil || p.Summary == nil { - return "" - } - return *p.Summary -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *Page) GetTitle() string { - if p == nil || p.Title == nil { - return "" - } - return *p.Title -} - -// GetBuild returns the Build field. -func (p *PageBuildEvent) GetBuild() *PagesBuild { - if p == nil { - return nil - } - return p.Build -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PageBuildEvent) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetInstallation returns the Installation field. -func (p *PageBuildEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PageBuildEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetRepo returns the Repo field. -func (p *PageBuildEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PageBuildEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetBuildType returns the BuildType field if it's non-nil, zero value otherwise. -func (p *Pages) GetBuildType() string { - if p == nil || p.BuildType == nil { - return "" - } - return *p.BuildType -} - -// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. -func (p *Pages) GetCNAME() string { - if p == nil || p.CNAME == nil { - return "" - } - return *p.CNAME -} - -// GetCustom404 returns the Custom404 field if it's non-nil, zero value otherwise. -func (p *Pages) GetCustom404() bool { - if p == nil || p.Custom404 == nil { - return false - } - return *p.Custom404 -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Pages) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetHTTPSCertificate returns the HTTPSCertificate field. -func (p *Pages) GetHTTPSCertificate() *PagesHTTPSCertificate { - if p == nil { - return nil - } - return p.HTTPSCertificate -} - -// GetHTTPSEnforced returns the HTTPSEnforced field if it's non-nil, zero value otherwise. -func (p *Pages) GetHTTPSEnforced() bool { - if p == nil || p.HTTPSEnforced == nil { - return false - } - return *p.HTTPSEnforced -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (p *Pages) GetPublic() bool { - if p == nil || p.Public == nil { - return false - } - return *p.Public -} - -// GetSource returns the Source field. -func (p *Pages) GetSource() *PagesSource { - if p == nil { - return nil - } - return p.Source -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (p *Pages) GetStatus() string { - if p == nil || p.Status == nil { - return "" - } - return *p.Status -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Pages) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetCommit returns the Commit field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetCommit() string { - if p == nil || p.Commit == nil { - return "" - } - return *p.Commit -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDuration returns the Duration field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetDuration() int { - if p == nil || p.Duration == nil { - return 0 - } - return *p.Duration -} - -// GetError returns the Error field. -func (p *PagesBuild) GetError() *PagesError { - if p == nil { - return nil - } - return p.Error -} - -// GetPusher returns the Pusher field. -func (p *PagesBuild) GetPusher() *User { - if p == nil { - return nil - } - return p.Pusher -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetStatus() string { - if p == nil || p.Status == nil { - return "" - } - return *p.Status -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PagesBuild) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetCAAError returns the CAAError field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetCAAError() string { - if p == nil || p.CAAError == nil { - return "" - } - return *p.CAAError -} - -// GetDNSResolves returns the DNSResolves field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetDNSResolves() bool { - if p == nil || p.DNSResolves == nil { - return false - } - return *p.DNSResolves -} - -// GetEnforcesHTTPS returns the EnforcesHTTPS field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetEnforcesHTTPS() bool { - if p == nil || p.EnforcesHTTPS == nil { - return false - } - return *p.EnforcesHTTPS -} - -// GetHasCNAMERecord returns the HasCNAMERecord field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHasCNAMERecord() bool { - if p == nil || p.HasCNAMERecord == nil { - return false - } - return *p.HasCNAMERecord -} - -// GetHasMXRecordsPresent returns the HasMXRecordsPresent field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHasMXRecordsPresent() bool { - if p == nil || p.HasMXRecordsPresent == nil { - return false - } - return *p.HasMXRecordsPresent -} - -// GetHost returns the Host field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHost() string { - if p == nil || p.Host == nil { - return "" - } - return *p.Host -} - -// GetHTTPSError returns the HTTPSError field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetHTTPSError() string { - if p == nil || p.HTTPSError == nil { - return "" - } - return *p.HTTPSError -} - -// GetIsApexDomain returns the IsApexDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsApexDomain() bool { - if p == nil || p.IsApexDomain == nil { - return false - } - return *p.IsApexDomain -} - -// GetIsARecord returns the IsARecord field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsARecord() bool { - if p == nil || p.IsARecord == nil { - return false - } - return *p.IsARecord -} - -// GetIsCloudflareIP returns the IsCloudflareIP field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCloudflareIP() bool { - if p == nil || p.IsCloudflareIP == nil { - return false - } - return *p.IsCloudflareIP -} - -// GetIsCNAMEToFastly returns the IsCNAMEToFastly field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCNAMEToFastly() bool { - if p == nil || p.IsCNAMEToFastly == nil { - return false - } - return *p.IsCNAMEToFastly -} - -// GetIsCNAMEToGithubUserDomain returns the IsCNAMEToGithubUserDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCNAMEToGithubUserDomain() bool { - if p == nil || p.IsCNAMEToGithubUserDomain == nil { - return false - } - return *p.IsCNAMEToGithubUserDomain -} - -// GetIsCNAMEToPagesDotGithubDotCom returns the IsCNAMEToPagesDotGithubDotCom field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsCNAMEToPagesDotGithubDotCom() bool { - if p == nil || p.IsCNAMEToPagesDotGithubDotCom == nil { - return false - } - return *p.IsCNAMEToPagesDotGithubDotCom -} - -// GetIsFastlyIP returns the IsFastlyIP field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsFastlyIP() bool { - if p == nil || p.IsFastlyIP == nil { - return false - } - return *p.IsFastlyIP -} - -// GetIsHTTPSEligible returns the IsHTTPSEligible field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsHTTPSEligible() bool { - if p == nil || p.IsHTTPSEligible == nil { - return false - } - return *p.IsHTTPSEligible -} - -// GetIsNonGithubPagesIPPresent returns the IsNonGithubPagesIPPresent field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsNonGithubPagesIPPresent() bool { - if p == nil || p.IsNonGithubPagesIPPresent == nil { - return false - } - return *p.IsNonGithubPagesIPPresent -} - -// GetIsOldIPAddress returns the IsOldIPAddress field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsOldIPAddress() bool { - if p == nil || p.IsOldIPAddress == nil { - return false - } - return *p.IsOldIPAddress -} - -// GetIsPagesDomain returns the IsPagesDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsPagesDomain() bool { - if p == nil || p.IsPagesDomain == nil { - return false - } - return *p.IsPagesDomain -} - -// GetIsPointedToGithubPagesIP returns the IsPointedToGithubPagesIP field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsPointedToGithubPagesIP() bool { - if p == nil || p.IsPointedToGithubPagesIP == nil { - return false - } - return *p.IsPointedToGithubPagesIP -} - -// GetIsProxied returns the IsProxied field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsProxied() bool { - if p == nil || p.IsProxied == nil { - return false - } - return *p.IsProxied -} - -// GetIsServedByPages returns the IsServedByPages field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsServedByPages() bool { - if p == nil || p.IsServedByPages == nil { - return false - } - return *p.IsServedByPages -} - -// GetIsValid returns the IsValid field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsValid() bool { - if p == nil || p.IsValid == nil { - return false - } - return *p.IsValid -} - -// GetIsValidDomain returns the IsValidDomain field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetIsValidDomain() bool { - if p == nil || p.IsValidDomain == nil { - return false - } - return *p.IsValidDomain -} - -// GetNameservers returns the Nameservers field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetNameservers() string { - if p == nil || p.Nameservers == nil { - return "" - } - return *p.Nameservers -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetReason() string { - if p == nil || p.Reason == nil { - return "" - } - return *p.Reason -} - -// GetRespondsToHTTPS returns the RespondsToHTTPS field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetRespondsToHTTPS() bool { - if p == nil || p.RespondsToHTTPS == nil { - return false - } - return *p.RespondsToHTTPS -} - -// GetShouldBeARecord returns the ShouldBeARecord field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetShouldBeARecord() bool { - if p == nil || p.ShouldBeARecord == nil { - return false - } - return *p.ShouldBeARecord -} - -// GetURI returns the URI field if it's non-nil, zero value otherwise. -func (p *PagesDomain) GetURI() string { - if p == nil || p.URI == nil { - return "" - } - return *p.URI -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PagesError) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetAltDomain returns the AltDomain field. -func (p *PagesHealthCheckResponse) GetAltDomain() *PagesDomain { - if p == nil { - return nil - } - return p.AltDomain -} - -// GetDomain returns the Domain field. -func (p *PagesHealthCheckResponse) GetDomain() *PagesDomain { - if p == nil { - return nil - } - return p.Domain -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (p *PagesHTTPSCertificate) GetDescription() string { - if p == nil || p.Description == nil { - return "" - } - return *p.Description -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (p *PagesHTTPSCertificate) GetExpiresAt() string { - if p == nil || p.ExpiresAt == nil { - return "" - } - return *p.ExpiresAt -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PagesHTTPSCertificate) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (p *PagesSource) GetBranch() string { - if p == nil || p.Branch == nil { - return "" - } - return *p.Branch -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (p *PagesSource) GetPath() string { - if p == nil || p.Path == nil { - return "" - } - return *p.Path -} - -// GetTotalPages returns the TotalPages field if it's non-nil, zero value otherwise. -func (p *PageStats) GetTotalPages() int { - if p == nil || p.TotalPages == nil { - return 0 - } - return *p.TotalPages -} - -// GetBuildType returns the BuildType field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetBuildType() string { - if p == nil || p.BuildType == nil { - return "" - } - return *p.BuildType -} - -// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetCNAME() string { - if p == nil || p.CNAME == nil { - return "" - } - return *p.CNAME -} - -// GetHTTPSEnforced returns the HTTPSEnforced field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetHTTPSEnforced() bool { - if p == nil || p.HTTPSEnforced == nil { - return false - } - return *p.HTTPSEnforced -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (p *PagesUpdate) GetPublic() bool { - if p == nil || p.Public == nil { - return false - } - return *p.Public -} - -// GetSource returns the Source field. -func (p *PagesUpdate) GetSource() *PagesSource { - if p == nil { - return nil - } - return p.Source -} - -// GetOrg returns the Org map if it's non-nil, an empty map otherwise. -func (p *PersonalAccessTokenPermissions) GetOrg() map[string]string { - if p == nil || p.Org == nil { - return map[string]string{} - } - return p.Org -} - -// GetOther returns the Other map if it's non-nil, an empty map otherwise. -func (p *PersonalAccessTokenPermissions) GetOther() map[string]string { - if p == nil || p.Other == nil { - return map[string]string{} - } - return p.Other -} - -// GetRepo returns the Repo map if it's non-nil, an empty map otherwise. -func (p *PersonalAccessTokenPermissions) GetRepo() map[string]string { - if p == nil || p.Repo == nil { - return map[string]string{} - } - return p.Repo -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetOrg returns the Org field. -func (p *PersonalAccessTokenRequest) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetOwner returns the Owner field. -func (p *PersonalAccessTokenRequest) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPermissionsAdded returns the PermissionsAdded field. -func (p *PersonalAccessTokenRequest) GetPermissionsAdded() *PersonalAccessTokenPermissions { - if p == nil { - return nil - } - return p.PermissionsAdded -} - -// GetPermissionsResult returns the PermissionsResult field. -func (p *PersonalAccessTokenRequest) GetPermissionsResult() *PersonalAccessTokenPermissions { - if p == nil { - return nil - } - return p.PermissionsResult -} - -// GetPermissionsUpgraded returns the PermissionsUpgraded field. -func (p *PersonalAccessTokenRequest) GetPermissionsUpgraded() *PersonalAccessTokenPermissions { - if p == nil { - return nil - } - return p.PermissionsUpgraded -} - -// GetRepositoryCount returns the RepositoryCount field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetRepositoryCount() int64 { - if p == nil || p.RepositoryCount == nil { - return 0 - } - return *p.RepositoryCount -} - -// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetRepositorySelection() string { - if p == nil || p.RepositorySelection == nil { - return "" - } - return *p.RepositorySelection -} - -// GetTokenExpired returns the TokenExpired field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetTokenExpired() bool { - if p == nil || p.TokenExpired == nil { - return false - } - return *p.TokenExpired -} - -// GetTokenExpiresAt returns the TokenExpiresAt field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetTokenExpiresAt() Timestamp { - if p == nil || p.TokenExpiresAt == nil { - return Timestamp{} - } - return *p.TokenExpiresAt -} - -// GetTokenLastUsedAt returns the TokenLastUsedAt field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequest) GetTokenLastUsedAt() Timestamp { - if p == nil || p.TokenLastUsedAt == nil { - return Timestamp{} - } - return *p.TokenLastUsedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PersonalAccessTokenRequestEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PersonalAccessTokenRequestEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PersonalAccessTokenRequestEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPersonalAccessTokenRequest returns the PersonalAccessTokenRequest field. -func (p *PersonalAccessTokenRequestEvent) GetPersonalAccessTokenRequest() *PersonalAccessTokenRequest { - if p == nil { - return nil - } - return p.PersonalAccessTokenRequest -} - -// GetSender returns the Sender field. -func (p *PersonalAccessTokenRequestEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetHook returns the Hook field. -func (p *PingEvent) GetHook() *Hook { - if p == nil { - return nil - } - return p.Hook -} - -// GetHookID returns the HookID field if it's non-nil, zero value otherwise. -func (p *PingEvent) GetHookID() int64 { - if p == nil || p.HookID == nil { - return 0 - } - return *p.HookID -} - -// GetInstallation returns the Installation field. -func (p *PingEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PingEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetRepo returns the Repo field. -func (p *PingEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PingEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetZen returns the Zen field if it's non-nil, zero value otherwise. -func (p *PingEvent) GetZen() string { - if p == nil || p.Zen == nil { - return "" - } - return *p.Zen -} - -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (p *Plan) GetCollaborators() int { - if p == nil || p.Collaborators == nil { - return 0 - } - return *p.Collaborators -} - -// GetFilledSeats returns the FilledSeats field if it's non-nil, zero value otherwise. -func (p *Plan) GetFilledSeats() int { - if p == nil || p.FilledSeats == nil { - return 0 - } - return *p.FilledSeats -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Plan) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetPrivateRepos returns the PrivateRepos field if it's non-nil, zero value otherwise. -func (p *Plan) GetPrivateRepos() int64 { - if p == nil || p.PrivateRepos == nil { - return 0 - } - return *p.PrivateRepos -} - -// GetSeats returns the Seats field if it's non-nil, zero value otherwise. -func (p *Plan) GetSeats() int { - if p == nil || p.Seats == nil { - return 0 - } - return *p.Seats -} - -// GetSpace returns the Space field if it's non-nil, zero value otherwise. -func (p *Plan) GetSpace() int { - if p == nil || p.Space == nil { - return 0 - } - return *p.Space -} - -// GetCode returns the Code field if it's non-nil, zero value otherwise. -func (p *PolicyOverrideReason) GetCode() string { - if p == nil || p.Code == nil { - return "" - } - return *p.Code -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PolicyOverrideReason) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetConfigURL returns the ConfigURL field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetConfigURL() string { - if p == nil || p.ConfigURL == nil { - return "" - } - return *p.ConfigURL -} - -// GetEnforcement returns the Enforcement field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetEnforcement() string { - if p == nil || p.Enforcement == nil { - return "" - } - return *p.Enforcement -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PreReceiveHook) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetHRef returns the HRef field if it's non-nil, zero value otherwise. -func (p *PRLink) GetHRef() string { - if p == nil || p.HRef == nil { - return "" - } - return *p.HRef -} - -// GetComments returns the Comments field. -func (p *PRLinks) GetComments() *PRLink { - if p == nil { - return nil - } - return p.Comments -} - -// GetCommits returns the Commits field. -func (p *PRLinks) GetCommits() *PRLink { - if p == nil { - return nil - } - return p.Commits -} - -// GetHTML returns the HTML field. -func (p *PRLinks) GetHTML() *PRLink { - if p == nil { - return nil - } - return p.HTML -} - -// GetIssue returns the Issue field. -func (p *PRLinks) GetIssue() *PRLink { - if p == nil { - return nil - } - return p.Issue -} - -// GetReviewComment returns the ReviewComment field. -func (p *PRLinks) GetReviewComment() *PRLink { - if p == nil { - return nil - } - return p.ReviewComment -} - -// GetReviewComments returns the ReviewComments field. -func (p *PRLinks) GetReviewComments() *PRLink { - if p == nil { - return nil - } - return p.ReviewComments -} - -// GetSelf returns the Self field. -func (p *PRLinks) GetSelf() *PRLink { - if p == nil { - return nil - } - return p.Self -} - -// GetStatuses returns the Statuses field. -func (p *PRLinks) GetStatuses() *PRLink { - if p == nil { - return nil - } - return p.Statuses -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *Project) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetColumnsURL returns the ColumnsURL field if it's non-nil, zero value otherwise. -func (p *Project) GetColumnsURL() string { - if p == nil || p.ColumnsURL == nil { - return "" - } - return *p.ColumnsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *Project) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *Project) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *Project) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *Project) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *Project) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *Project) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *Project) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. -func (p *Project) GetOrganizationPermission() string { - if p == nil || p.OrganizationPermission == nil { - return "" - } - return *p.OrganizationPermission -} - -// GetOwnerURL returns the OwnerURL field if it's non-nil, zero value otherwise. -func (p *Project) GetOwnerURL() string { - if p == nil || p.OwnerURL == nil { - return "" - } - return *p.OwnerURL -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (p *Project) GetPrivate() bool { - if p == nil || p.Private == nil { - return false - } - return *p.Private -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *Project) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *Project) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Project) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectBody) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetArchived() bool { - if p == nil || p.Archived == nil { - return false - } - return *p.Archived -} - -// GetColumnID returns the ColumnID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnID() int64 { - if p == nil || p.ColumnID == nil { - return 0 - } - return *p.ColumnID -} - -// GetColumnName returns the ColumnName field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnName() string { - if p == nil || p.ColumnName == nil { - return "" - } - return *p.ColumnName -} - -// GetColumnURL returns the ColumnURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetColumnURL() string { - if p == nil || p.ColumnURL == nil { - return "" - } - return *p.ColumnURL -} - -// GetContentURL returns the ContentURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetContentURL() string { - if p == nil || p.ContentURL == nil { - return "" - } - return *p.ContentURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *ProjectCard) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetNote() string { - if p == nil || p.Note == nil { - return "" - } - return *p.Note -} - -// GetPreviousColumnName returns the PreviousColumnName field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetPreviousColumnName() string { - if p == nil || p.PreviousColumnName == nil { - return "" - } - return *p.PreviousColumnName -} - -// GetProjectID returns the ProjectID field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetProjectID() int64 { - if p == nil || p.ProjectID == nil { - return 0 - } - return *p.ProjectID -} - -// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetProjectURL() string { - if p == nil || p.ProjectURL == nil { - return "" - } - return *p.ProjectURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *ProjectCard) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetNote returns the Note field. -func (p *ProjectCardChange) GetNote() *ProjectCardNote { - if p == nil { - return nil - } - return p.Note -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectCardEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. -func (p *ProjectCardEvent) GetAfterID() int64 { - if p == nil || p.AfterID == nil { - return 0 - } - return *p.AfterID -} - -// GetChanges returns the Changes field. -func (p *ProjectCardEvent) GetChanges() *ProjectCardChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectCardEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectCardEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectCard returns the ProjectCard field. -func (p *ProjectCardEvent) GetProjectCard() *ProjectCard { - if p == nil { - return nil - } - return p.ProjectCard -} - -// GetRepo returns the Repo field. -func (p *ProjectCardEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *ProjectCardEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetArchivedState returns the ArchivedState field if it's non-nil, zero value otherwise. -func (p *ProjectCardListOptions) GetArchivedState() string { - if p == nil || p.ArchivedState == nil { - return "" - } - return *p.ArchivedState -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectCardNote) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (p *ProjectCardOptions) GetArchived() bool { - if p == nil || p.Archived == nil { - return false - } - return *p.Archived -} - -// GetBody returns the Body field. -func (p *ProjectChange) GetBody() *ProjectBody { - if p == nil { - return nil - } - return p.Body -} - -// GetName returns the Name field. -func (p *ProjectChange) GetName() *ProjectName { - if p == nil { - return nil - } - return p.Name -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (p *ProjectCollaboratorOptions) GetPermission() string { - if p == nil || p.Permission == nil { - return "" - } - return *p.Permission -} - -// GetCardsURL returns the CardsURL field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetCardsURL() string { - if p == nil || p.CardsURL == nil { - return "" - } - return *p.CardsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetProjectURL() string { - if p == nil || p.ProjectURL == nil { - return "" - } - return *p.ProjectURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *ProjectColumn) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetName returns the Name field. -func (p *ProjectColumnChange) GetName() *ProjectColumnName { - if p == nil { - return nil - } - return p.Name -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectColumnEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. -func (p *ProjectColumnEvent) GetAfterID() int64 { - if p == nil || p.AfterID == nil { - return 0 - } - return *p.AfterID -} - -// GetChanges returns the Changes field. -func (p *ProjectColumnEvent) GetChanges() *ProjectColumnChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectColumnEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectColumnEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectColumn returns the ProjectColumn field. -func (p *ProjectColumnEvent) GetProjectColumn() *ProjectColumn { - if p == nil { - return nil - } - return p.ProjectColumn -} - -// GetRepo returns the Repo field. -func (p *ProjectColumnEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *ProjectColumnEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectColumnName) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetChanges returns the Changes field. -func (p *ProjectEvent) GetChanges() *ProjectChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProject returns the Project field. -func (p *ProjectEvent) GetProject() *Project { - if p == nil { - return nil - } - return p.Project -} - -// GetRepo returns the Repo field. -func (p *ProjectEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *ProjectEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *ProjectName) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetOrganizationPermission() string { - if p == nil || p.OrganizationPermission == nil { - return "" - } - return *p.OrganizationPermission -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetPrivate() bool { - if p == nil || p.Private == nil { - return false - } - return *p.Private -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (p *ProjectPermissionLevel) GetPermission() string { - if p == nil || p.Permission == nil { - return "" - } - return *p.Permission -} - -// GetUser returns the User field. -func (p *ProjectPermissionLevel) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetClosedAt() Timestamp { - if p == nil || p.ClosedAt == nil { - return Timestamp{} - } - return *p.ClosedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *ProjectsV2) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetDeletedAt returns the DeletedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetDeletedAt() Timestamp { - if p == nil || p.DeletedAt == nil { - return Timestamp{} - } - return *p.DeletedAt -} - -// GetDeletedBy returns the DeletedBy field. -func (p *ProjectsV2) GetDeletedBy() *User { - if p == nil { - return nil - } - return p.DeletedBy -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetDescription() string { - if p == nil || p.Description == nil { - return "" - } - return *p.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOwner returns the Owner field. -func (p *ProjectsV2) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPublic returns the Public field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetPublic() bool { - if p == nil || p.Public == nil { - return false - } - return *p.Public -} - -// GetShortDescription returns the ShortDescription field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetShortDescription() string { - if p == nil || p.ShortDescription == nil { - return "" - } - return *p.ShortDescription -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetTitle() string { - if p == nil || p.Title == nil { - return "" - } - return *p.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectsV2) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectV2Event) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *ProjectV2Event) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectV2Event) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectsV2 returns the ProjectsV2 field. -func (p *ProjectV2Event) GetProjectsV2() *ProjectsV2 { - if p == nil { - return nil - } - return p.ProjectsV2 -} - -// GetSender returns the Sender field. -func (p *ProjectV2Event) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetArchivedAt returns the ArchivedAt field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetArchivedAt() Timestamp { - if p == nil || p.ArchivedAt == nil { - return Timestamp{} - } - return *p.ArchivedAt -} - -// GetContentNodeID returns the ContentNodeID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetContentNodeID() string { - if p == nil || p.ContentNodeID == nil { - return "" - } - return *p.ContentNodeID -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetContentType() string { - if p == nil || p.ContentType == nil { - return "" - } - return *p.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetCreator returns the Creator field. -func (p *ProjectV2Item) GetCreator() *User { - if p == nil { - return nil - } - return p.Creator -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetProjectNodeID returns the ProjectNodeID field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetProjectNodeID() string { - if p == nil || p.ProjectNodeID == nil { - return "" - } - return *p.ProjectNodeID -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *ProjectV2Item) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetArchivedAt returns the ArchivedAt field. -func (p *ProjectV2ItemChange) GetArchivedAt() *ArchivedAt { - if p == nil { - return nil - } - return p.ArchivedAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *ProjectV2ItemEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetChanges returns the Changes field. -func (p *ProjectV2ItemEvent) GetChanges() *ProjectV2ItemChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *ProjectV2ItemEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *ProjectV2ItemEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetProjectV2Item returns the ProjectV2Item field. -func (p *ProjectV2ItemEvent) GetProjectV2Item() *ProjectV2Item { - if p == nil { - return nil - } - return p.ProjectV2Item -} - -// GetSender returns the Sender field. -func (p *ProjectV2ItemEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetAllowDeletions returns the AllowDeletions field. -func (p *Protection) GetAllowDeletions() *AllowDeletions { - if p == nil { - return nil - } - return p.AllowDeletions -} - -// GetAllowForcePushes returns the AllowForcePushes field. -func (p *Protection) GetAllowForcePushes() *AllowForcePushes { - if p == nil { - return nil - } - return p.AllowForcePushes -} - -// GetAllowForkSyncing returns the AllowForkSyncing field. -func (p *Protection) GetAllowForkSyncing() *AllowForkSyncing { - if p == nil { - return nil - } - return p.AllowForkSyncing -} - -// GetBlockCreations returns the BlockCreations field. -func (p *Protection) GetBlockCreations() *BlockCreations { - if p == nil { - return nil - } - return p.BlockCreations -} - -// GetEnforceAdmins returns the EnforceAdmins field. -func (p *Protection) GetEnforceAdmins() *AdminEnforcement { - if p == nil { - return nil - } - return p.EnforceAdmins -} - -// GetLockBranch returns the LockBranch field. -func (p *Protection) GetLockBranch() *LockBranch { - if p == nil { - return nil - } - return p.LockBranch -} - -// GetRequiredConversationResolution returns the RequiredConversationResolution field. -func (p *Protection) GetRequiredConversationResolution() *RequiredConversationResolution { - if p == nil { - return nil - } - return p.RequiredConversationResolution -} - -// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. -func (p *Protection) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcement { - if p == nil { - return nil - } - return p.RequiredPullRequestReviews -} - -// GetRequiredSignatures returns the RequiredSignatures field. -func (p *Protection) GetRequiredSignatures() *SignaturesProtectedBranch { - if p == nil { - return nil - } - return p.RequiredSignatures -} - -// GetRequiredStatusChecks returns the RequiredStatusChecks field. -func (p *Protection) GetRequiredStatusChecks() *RequiredStatusChecks { - if p == nil { - return nil - } - return p.RequiredStatusChecks -} - -// GetRequireLinearHistory returns the RequireLinearHistory field. -func (p *Protection) GetRequireLinearHistory() *RequireLinearHistory { - if p == nil { - return nil - } - return p.RequireLinearHistory -} - -// GetRestrictions returns the Restrictions field. -func (p *Protection) GetRestrictions() *BranchRestrictions { - if p == nil { - return nil - } - return p.Restrictions -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *Protection) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetAdminEnforced returns the AdminEnforced field. -func (p *ProtectionChanges) GetAdminEnforced() *AdminEnforcedChanges { - if p == nil { - return nil - } - return p.AdminEnforced -} - -// GetAllowDeletionsEnforcementLevel returns the AllowDeletionsEnforcementLevel field. -func (p *ProtectionChanges) GetAllowDeletionsEnforcementLevel() *AllowDeletionsEnforcementLevelChanges { - if p == nil { - return nil - } - return p.AllowDeletionsEnforcementLevel -} - -// GetAuthorizedActorNames returns the AuthorizedActorNames field. -func (p *ProtectionChanges) GetAuthorizedActorNames() *AuthorizedActorNames { - if p == nil { - return nil - } - return p.AuthorizedActorNames -} - -// GetAuthorizedActorsOnly returns the AuthorizedActorsOnly field. -func (p *ProtectionChanges) GetAuthorizedActorsOnly() *AuthorizedActorsOnly { - if p == nil { - return nil - } - return p.AuthorizedActorsOnly -} - -// GetAuthorizedDismissalActorsOnly returns the AuthorizedDismissalActorsOnly field. -func (p *ProtectionChanges) GetAuthorizedDismissalActorsOnly() *AuthorizedDismissalActorsOnlyChanges { - if p == nil { - return nil - } - return p.AuthorizedDismissalActorsOnly -} - -// GetCreateProtected returns the CreateProtected field. -func (p *ProtectionChanges) GetCreateProtected() *CreateProtectedChanges { - if p == nil { - return nil - } - return p.CreateProtected -} - -// GetDismissStaleReviewsOnPush returns the DismissStaleReviewsOnPush field. -func (p *ProtectionChanges) GetDismissStaleReviewsOnPush() *DismissStaleReviewsOnPushChanges { - if p == nil { - return nil - } - return p.DismissStaleReviewsOnPush -} - -// GetLinearHistoryRequirementEnforcementLevel returns the LinearHistoryRequirementEnforcementLevel field. -func (p *ProtectionChanges) GetLinearHistoryRequirementEnforcementLevel() *LinearHistoryRequirementEnforcementLevelChanges { - if p == nil { - return nil - } - return p.LinearHistoryRequirementEnforcementLevel -} - -// GetPullRequestReviewsEnforcementLevel returns the PullRequestReviewsEnforcementLevel field. -func (p *ProtectionChanges) GetPullRequestReviewsEnforcementLevel() *PullRequestReviewsEnforcementLevelChanges { - if p == nil { - return nil - } - return p.PullRequestReviewsEnforcementLevel -} - -// GetRequireCodeOwnerReview returns the RequireCodeOwnerReview field. -func (p *ProtectionChanges) GetRequireCodeOwnerReview() *RequireCodeOwnerReviewChanges { - if p == nil { - return nil - } - return p.RequireCodeOwnerReview -} - -// GetRequiredConversationResolutionLevel returns the RequiredConversationResolutionLevel field. -func (p *ProtectionChanges) GetRequiredConversationResolutionLevel() *RequiredConversationResolutionLevelChanges { - if p == nil { - return nil - } - return p.RequiredConversationResolutionLevel -} - -// GetRequiredDeploymentsEnforcementLevel returns the RequiredDeploymentsEnforcementLevel field. -func (p *ProtectionChanges) GetRequiredDeploymentsEnforcementLevel() *RequiredDeploymentsEnforcementLevelChanges { - if p == nil { - return nil - } - return p.RequiredDeploymentsEnforcementLevel -} - -// GetRequiredStatusChecks returns the RequiredStatusChecks field. -func (p *ProtectionChanges) GetRequiredStatusChecks() *RequiredStatusChecksChanges { - if p == nil { - return nil - } - return p.RequiredStatusChecks -} - -// GetRequiredStatusChecksEnforcementLevel returns the RequiredStatusChecksEnforcementLevel field. -func (p *ProtectionChanges) GetRequiredStatusChecksEnforcementLevel() *RequiredStatusChecksEnforcementLevelChanges { - if p == nil { - return nil - } - return p.RequiredStatusChecksEnforcementLevel -} - -// GetSignatureRequirementEnforcementLevel returns the SignatureRequirementEnforcementLevel field. -func (p *ProtectionChanges) GetSignatureRequirementEnforcementLevel() *SignatureRequirementEnforcementLevelChanges { - if p == nil { - return nil - } - return p.SignatureRequirementEnforcementLevel -} - -// GetAllowDeletions returns the AllowDeletions field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetAllowDeletions() bool { - if p == nil || p.AllowDeletions == nil { - return false - } - return *p.AllowDeletions -} - -// GetAllowForcePushes returns the AllowForcePushes field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetAllowForcePushes() bool { - if p == nil || p.AllowForcePushes == nil { - return false - } - return *p.AllowForcePushes -} - -// GetAllowForkSyncing returns the AllowForkSyncing field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetAllowForkSyncing() bool { - if p == nil || p.AllowForkSyncing == nil { - return false - } - return *p.AllowForkSyncing -} - -// GetBlockCreations returns the BlockCreations field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetBlockCreations() bool { - if p == nil || p.BlockCreations == nil { - return false - } - return *p.BlockCreations -} - -// GetLockBranch returns the LockBranch field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetLockBranch() bool { - if p == nil || p.LockBranch == nil { - return false - } - return *p.LockBranch -} - -// GetRequiredConversationResolution returns the RequiredConversationResolution field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetRequiredConversationResolution() bool { - if p == nil || p.RequiredConversationResolution == nil { - return false - } - return *p.RequiredConversationResolution -} - -// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. -func (p *ProtectionRequest) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcementRequest { - if p == nil { - return nil - } - return p.RequiredPullRequestReviews -} - -// GetRequiredStatusChecks returns the RequiredStatusChecks field. -func (p *ProtectionRequest) GetRequiredStatusChecks() *RequiredStatusChecks { - if p == nil { - return nil - } - return p.RequiredStatusChecks -} - -// GetRequireLinearHistory returns the RequireLinearHistory field if it's non-nil, zero value otherwise. -func (p *ProtectionRequest) GetRequireLinearHistory() bool { - if p == nil || p.RequireLinearHistory == nil { - return false - } - return *p.RequireLinearHistory -} - -// GetRestrictions returns the Restrictions field. -func (p *ProtectionRequest) GetRestrictions() *BranchRestrictionsRequest { - if p == nil { - return nil - } - return p.Restrictions -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetPreventSelfReview returns the PreventSelfReview field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetPreventSelfReview() bool { - if p == nil || p.PreventSelfReview == nil { - return false - } - return *p.PreventSelfReview -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetType() string { - if p == nil || p.Type == nil { - return "" - } - return *p.Type -} - -// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. -func (p *ProtectionRule) GetWaitTimer() int { - if p == nil || p.WaitTimer == nil { - return 0 - } - return *p.WaitTimer -} - -// GetInstallation returns the Installation field. -func (p *PublicEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PublicEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetRepo returns the Repo field. -func (p *PublicEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PublicEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (p *PublicKey) GetKey() string { - if p == nil || p.Key == nil { - return "" - } - return *p.Key -} - -// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. -func (p *PublicKey) GetKeyID() string { - if p == nil || p.KeyID == nil { - return "" - } - return *p.KeyID -} - -// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetActiveLockReason() string { - if p == nil || p.ActiveLockReason == nil { - return "" - } - return *p.ActiveLockReason -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetAdditions() int { - if p == nil || p.Additions == nil { - return 0 - } - return *p.Additions -} - -// GetAssignee returns the Assignee field. -func (p *PullRequest) GetAssignee() *User { - if p == nil { - return nil - } - return p.Assignee -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetAuthorAssociation() string { - if p == nil || p.AuthorAssociation == nil { - return "" - } - return *p.AuthorAssociation -} - -// GetAutoMerge returns the AutoMerge field. -func (p *PullRequest) GetAutoMerge() *PullRequestAutoMerge { - if p == nil { - return nil - } - return p.AutoMerge -} - -// GetBase returns the Base field. -func (p *PullRequest) GetBase() *PullRequestBranch { - if p == nil { - return nil - } - return p.Base -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetChangedFiles returns the ChangedFiles field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetChangedFiles() int { - if p == nil || p.ChangedFiles == nil { - return 0 - } - return *p.ChangedFiles -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetClosedAt() Timestamp { - if p == nil || p.ClosedAt == nil { - return Timestamp{} - } - return *p.ClosedAt -} - -// GetComments returns the Comments field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetComments() int { - if p == nil || p.Comments == nil { - return 0 - } - return *p.Comments -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCommentsURL() string { - if p == nil || p.CommentsURL == nil { - return "" - } - return *p.CommentsURL -} - -// GetCommits returns the Commits field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCommits() int { - if p == nil || p.Commits == nil { - return 0 - } - return *p.Commits -} - -// GetCommitsURL returns the CommitsURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCommitsURL() string { - if p == nil || p.CommitsURL == nil { - return "" - } - return *p.CommitsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetDeletions() int { - if p == nil || p.Deletions == nil { - return 0 - } - return *p.Deletions -} - -// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetDiffURL() string { - if p == nil || p.DiffURL == nil { - return "" - } - return *p.DiffURL -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetDraft() bool { - if p == nil || p.Draft == nil { - return false - } - return *p.Draft -} - -// GetHead returns the Head field. -func (p *PullRequest) GetHead() *PullRequestBranch { - if p == nil { - return nil - } - return p.Head -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetIssueURL() string { - if p == nil || p.IssueURL == nil { - return "" - } - return *p.IssueURL -} - -// GetLinks returns the Links field. -func (p *PullRequest) GetLinks() *PRLinks { - if p == nil { - return nil - } - return p.Links -} - -// GetLocked returns the Locked field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetLocked() bool { - if p == nil || p.Locked == nil { - return false - } - return *p.Locked -} - -// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMaintainerCanModify() bool { - if p == nil || p.MaintainerCanModify == nil { - return false - } - return *p.MaintainerCanModify -} - -// GetMergeable returns the Mergeable field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergeable() bool { - if p == nil || p.Mergeable == nil { - return false - } - return *p.Mergeable -} - -// GetMergeableState returns the MergeableState field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergeableState() string { - if p == nil || p.MergeableState == nil { - return "" - } - return *p.MergeableState -} - -// GetMergeCommitSHA returns the MergeCommitSHA field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergeCommitSHA() string { - if p == nil || p.MergeCommitSHA == nil { - return "" - } - return *p.MergeCommitSHA -} - -// GetMerged returns the Merged field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMerged() bool { - if p == nil || p.Merged == nil { - return false - } - return *p.Merged -} - -// GetMergedAt returns the MergedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetMergedAt() Timestamp { - if p == nil || p.MergedAt == nil { - return Timestamp{} - } - return *p.MergedAt -} - -// GetMergedBy returns the MergedBy field. -func (p *PullRequest) GetMergedBy() *User { - if p == nil { - return nil - } - return p.MergedBy -} - -// GetMilestone returns the Milestone field. -func (p *PullRequest) GetMilestone() *Milestone { - if p == nil { - return nil - } - return p.Milestone -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetPatchURL() string { - if p == nil || p.PatchURL == nil { - return "" - } - return *p.PatchURL -} - -// GetRebaseable returns the Rebaseable field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetRebaseable() bool { - if p == nil || p.Rebaseable == nil { - return false - } - return *p.Rebaseable -} - -// GetReviewComments returns the ReviewComments field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetReviewComments() int { - if p == nil || p.ReviewComments == nil { - return 0 - } - return *p.ReviewComments -} - -// GetReviewCommentsURL returns the ReviewCommentsURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetReviewCommentsURL() string { - if p == nil || p.ReviewCommentsURL == nil { - return "" - } - return *p.ReviewCommentsURL -} - -// GetReviewCommentURL returns the ReviewCommentURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetReviewCommentURL() string { - if p == nil || p.ReviewCommentURL == nil { - return "" - } - return *p.ReviewCommentURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetStatusesURL() string { - if p == nil || p.StatusesURL == nil { - return "" - } - return *p.StatusesURL -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetTitle() string { - if p == nil || p.Title == nil { - return "" - } - return *p.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequest) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetUser returns the User field. -func (p *PullRequest) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetCommitMessage returns the CommitMessage field if it's non-nil, zero value otherwise. -func (p *PullRequestAutoMerge) GetCommitMessage() string { - if p == nil || p.CommitMessage == nil { - return "" - } - return *p.CommitMessage -} - -// GetCommitTitle returns the CommitTitle field if it's non-nil, zero value otherwise. -func (p *PullRequestAutoMerge) GetCommitTitle() string { - if p == nil || p.CommitTitle == nil { - return "" - } - return *p.CommitTitle -} - -// GetEnabledBy returns the EnabledBy field. -func (p *PullRequestAutoMerge) GetEnabledBy() *User { - if p == nil { - return nil - } - return p.EnabledBy -} - -// GetMergeMethod returns the MergeMethod field if it's non-nil, zero value otherwise. -func (p *PullRequestAutoMerge) GetMergeMethod() string { - if p == nil || p.MergeMethod == nil { - return "" - } - return *p.MergeMethod -} - -// GetLabel returns the Label field if it's non-nil, zero value otherwise. -func (p *PullRequestBranch) GetLabel() string { - if p == nil || p.Label == nil { - return "" - } - return *p.Label -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (p *PullRequestBranch) GetRef() string { - if p == nil || p.Ref == nil { - return "" - } - return *p.Ref -} - -// GetRepo returns the Repo field. -func (p *PullRequestBranch) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (p *PullRequestBranch) GetSHA() string { - if p == nil || p.SHA == nil { - return "" - } - return *p.SHA -} - -// GetUser returns the User field. -func (p *PullRequestBranch) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetExpectedHeadSHA returns the ExpectedHeadSHA field if it's non-nil, zero value otherwise. -func (p *PullRequestBranchUpdateOptions) GetExpectedHeadSHA() string { - if p == nil || p.ExpectedHeadSHA == nil { - return "" - } - return *p.ExpectedHeadSHA -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PullRequestBranchUpdateResponse) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequestBranchUpdateResponse) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetAuthorAssociation() string { - if p == nil || p.AuthorAssociation == nil { - return "" - } - return *p.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetCommitID() string { - if p == nil || p.CommitID == nil { - return "" - } - return *p.CommitID -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDiffHunk returns the DiffHunk field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetDiffHunk() string { - if p == nil || p.DiffHunk == nil { - return "" - } - return *p.DiffHunk -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetInReplyTo returns the InReplyTo field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetInReplyTo() int64 { - if p == nil || p.InReplyTo == nil { - return 0 - } - return *p.InReplyTo -} - -// GetLine returns the Line field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetLine() int { - if p == nil || p.Line == nil { - return 0 - } - return *p.Line -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetOriginalCommitID returns the OriginalCommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalCommitID() string { - if p == nil || p.OriginalCommitID == nil { - return "" - } - return *p.OriginalCommitID -} - -// GetOriginalLine returns the OriginalLine field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalLine() int { - if p == nil || p.OriginalLine == nil { - return 0 - } - return *p.OriginalLine -} - -// GetOriginalPosition returns the OriginalPosition field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalPosition() int { - if p == nil || p.OriginalPosition == nil { - return 0 - } - return *p.OriginalPosition -} - -// GetOriginalStartLine returns the OriginalStartLine field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetOriginalStartLine() int { - if p == nil || p.OriginalStartLine == nil { - return 0 - } - return *p.OriginalStartLine -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPath() string { - if p == nil || p.Path == nil { - return "" - } - return *p.Path -} - -// GetPosition returns the Position field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPosition() int { - if p == nil || p.Position == nil { - return 0 - } - return *p.Position -} - -// GetPullRequestReviewID returns the PullRequestReviewID field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPullRequestReviewID() int64 { - if p == nil || p.PullRequestReviewID == nil { - return 0 - } - return *p.PullRequestReviewID -} - -// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetPullRequestURL() string { - if p == nil || p.PullRequestURL == nil { - return "" - } - return *p.PullRequestURL -} - -// GetReactions returns the Reactions field. -func (p *PullRequestComment) GetReactions() *Reactions { - if p == nil { - return nil - } - return p.Reactions -} - -// GetSide returns the Side field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetSide() string { - if p == nil || p.Side == nil { - return "" - } - return *p.Side -} - -// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetStartLine() int { - if p == nil || p.StartLine == nil { - return 0 - } - return *p.StartLine -} - -// GetStartSide returns the StartSide field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetStartSide() string { - if p == nil || p.StartSide == nil { - return "" - } - return *p.StartSide -} - -// GetSubjectType returns the SubjectType field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetSubjectType() string { - if p == nil || p.SubjectType == nil { - return "" - } - return *p.SubjectType -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequestComment) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetUser returns the User field. -func (p *PullRequestComment) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfter returns the After field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetAfter() string { - if p == nil || p.After == nil { - return "" - } - return *p.After -} - -// GetAssignee returns the Assignee field. -func (p *PullRequestEvent) GetAssignee() *User { - if p == nil { - return nil - } - return p.Assignee -} - -// GetBefore returns the Before field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetBefore() string { - if p == nil || p.Before == nil { - return "" - } - return *p.Before -} - -// GetChanges returns the Changes field. -func (p *PullRequestEvent) GetChanges() *EditChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *PullRequestEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetLabel returns the Label field. -func (p *PullRequestEvent) GetLabel() *Label { - if p == nil { - return nil - } - return p.Label -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *PullRequestEvent) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOrganization returns the Organization field. -func (p *PullRequestEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (p *PullRequestEvent) GetPerformedViaGithubApp() *App { - if p == nil { - return nil - } - return p.PerformedViaGithubApp -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetRequestedReviewer returns the RequestedReviewer field. -func (p *PullRequestEvent) GetRequestedReviewer() *User { - if p == nil { - return nil - } - return p.RequestedReviewer -} - -// GetRequestedTeam returns the RequestedTeam field. -func (p *PullRequestEvent) GetRequestedTeam() *Team { - if p == nil { - return nil - } - return p.RequestedTeam -} - -// GetSender returns the Sender field. -func (p *PullRequestEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetDiffURL() string { - if p == nil || p.DiffURL == nil { - return "" - } - return *p.DiffURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetPatchURL() string { - if p == nil || p.PatchURL == nil { - return "" - } - return *p.PatchURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PullRequestLinks) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetMerged returns the Merged field if it's non-nil, zero value otherwise. -func (p *PullRequestMergeResult) GetMerged() bool { - if p == nil || p.Merged == nil { - return false - } - return *p.Merged -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PullRequestMergeResult) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (p *PullRequestMergeResult) GetSHA() string { - if p == nil || p.SHA == nil { - return "" - } - return *p.SHA -} - -// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetAuthorAssociation() string { - if p == nil || p.AuthorAssociation == nil { - return "" - } - return *p.AuthorAssociation -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetCommitID() string { - if p == nil || p.CommitID == nil { - return "" - } - return *p.CommitID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetPullRequestURL() string { - if p == nil || p.PullRequestURL == nil { - return "" - } - return *p.PullRequestURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetState() string { - if p == nil || p.State == nil { - return "" - } - return *p.State -} - -// GetSubmittedAt returns the SubmittedAt field if it's non-nil, zero value otherwise. -func (p *PullRequestReview) GetSubmittedAt() Timestamp { - if p == nil || p.SubmittedAt == nil { - return Timestamp{} - } - return *p.SubmittedAt -} - -// GetUser returns the User field. -func (p *PullRequestReview) GetUser() *User { - if p == nil { - return nil - } - return p.User -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewCommentEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetChanges returns the Changes field. -func (p *PullRequestReviewCommentEvent) GetChanges() *EditChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetComment returns the Comment field. -func (p *PullRequestReviewCommentEvent) GetComment() *PullRequestComment { - if p == nil { - return nil - } - return p.Comment -} - -// GetInstallation returns the Installation field. -func (p *PullRequestReviewCommentEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PullRequestReviewCommentEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestReviewCommentEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestReviewCommentEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PullRequestReviewCommentEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewDismissalRequest) GetMessage() string { - if p == nil || p.Message == nil { - return "" - } - return *p.Message -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PullRequestReviewEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrganization returns the Organization field. -func (p *PullRequestReviewEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestReviewEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestReviewEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetReview returns the Review field. -func (p *PullRequestReviewEvent) GetReview() *PullRequestReview { - if p == nil { - return nil - } - return p.Review -} - -// GetSender returns the Sender field. -func (p *PullRequestReviewEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetBody() string { - if p == nil || p.Body == nil { - return "" - } - return *p.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetCommitID() string { - if p == nil || p.CommitID == nil { - return "" - } - return *p.CommitID -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetEvent() string { - if p == nil || p.Event == nil { - return "" - } - return *p.Event -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewRequest) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetBypassPullRequestAllowances returns the BypassPullRequestAllowances field. -func (p *PullRequestReviewsEnforcement) GetBypassPullRequestAllowances() *BypassPullRequestAllowances { - if p == nil { - return nil - } - return p.BypassPullRequestAllowances -} - -// GetDismissalRestrictions returns the DismissalRestrictions field. -func (p *PullRequestReviewsEnforcement) GetDismissalRestrictions() *DismissalRestrictions { - if p == nil { - return nil - } - return p.DismissalRestrictions -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementLevelChanges) GetFrom() string { - if p == nil || p.From == nil { - return "" - } - return *p.From -} - -// GetBypassPullRequestAllowancesRequest returns the BypassPullRequestAllowancesRequest field. -func (p *PullRequestReviewsEnforcementRequest) GetBypassPullRequestAllowancesRequest() *BypassPullRequestAllowancesRequest { - if p == nil { - return nil - } - return p.BypassPullRequestAllowancesRequest -} - -// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. -func (p *PullRequestReviewsEnforcementRequest) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { - if p == nil { - return nil - } - return p.DismissalRestrictionsRequest -} - -// GetRequireLastPushApproval returns the RequireLastPushApproval field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementRequest) GetRequireLastPushApproval() bool { - if p == nil || p.RequireLastPushApproval == nil { - return false - } - return *p.RequireLastPushApproval -} - -// GetBypassPullRequestAllowancesRequest returns the BypassPullRequestAllowancesRequest field. -func (p *PullRequestReviewsEnforcementUpdate) GetBypassPullRequestAllowancesRequest() *BypassPullRequestAllowancesRequest { - if p == nil { - return nil - } - return p.BypassPullRequestAllowancesRequest -} - -// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. -func (p *PullRequestReviewsEnforcementUpdate) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { - if p == nil { - return nil - } - return p.DismissalRestrictionsRequest -} - -// GetDismissStaleReviews returns the DismissStaleReviews field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementUpdate) GetDismissStaleReviews() bool { - if p == nil || p.DismissStaleReviews == nil { - return false - } - return *p.DismissStaleReviews -} - -// GetRequireCodeOwnerReviews returns the RequireCodeOwnerReviews field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementUpdate) GetRequireCodeOwnerReviews() bool { - if p == nil || p.RequireCodeOwnerReviews == nil { - return false - } - return *p.RequireCodeOwnerReviews -} - -// GetRequireLastPushApproval returns the RequireLastPushApproval field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewsEnforcementUpdate) GetRequireLastPushApproval() bool { - if p == nil || p.RequireLastPushApproval == nil { - return false - } - return *p.RequireLastPushApproval -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestReviewThreadEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetInstallation returns the Installation field. -func (p *PullRequestReviewThreadEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrg returns the Org field. -func (p *PullRequestReviewThreadEvent) GetOrg() *Organization { - if p == nil { - return nil - } - return p.Org -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestReviewThreadEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestReviewThreadEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PullRequestReviewThreadEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetThread returns the Thread field. -func (p *PullRequestReviewThreadEvent) GetThread() *PullRequestThread { - if p == nil { - return nil - } - return p.Thread -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfter returns the After field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetAfter() string { - if p == nil || p.After == nil { - return "" - } - return *p.After -} - -// GetAssignee returns the Assignee field. -func (p *PullRequestTargetEvent) GetAssignee() *User { - if p == nil { - return nil - } - return p.Assignee -} - -// GetBefore returns the Before field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetBefore() string { - if p == nil || p.Before == nil { - return "" - } - return *p.Before -} - -// GetChanges returns the Changes field. -func (p *PullRequestTargetEvent) GetChanges() *EditChange { - if p == nil { - return nil - } - return p.Changes -} - -// GetInstallation returns the Installation field. -func (p *PullRequestTargetEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetLabel returns the Label field. -func (p *PullRequestTargetEvent) GetLabel() *Label { - if p == nil { - return nil - } - return p.Label -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (p *PullRequestTargetEvent) GetNumber() int { - if p == nil || p.Number == nil { - return 0 - } - return *p.Number -} - -// GetOrganization returns the Organization field. -func (p *PullRequestTargetEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (p *PullRequestTargetEvent) GetPerformedViaGithubApp() *App { - if p == nil { - return nil - } - return p.PerformedViaGithubApp -} - -// GetPullRequest returns the PullRequest field. -func (p *PullRequestTargetEvent) GetPullRequest() *PullRequest { - if p == nil { - return nil - } - return p.PullRequest -} - -// GetRepo returns the Repo field. -func (p *PullRequestTargetEvent) GetRepo() *Repository { - if p == nil { - return nil - } - return p.Repo -} - -// GetRequestedReviewer returns the RequestedReviewer field. -func (p *PullRequestTargetEvent) GetRequestedReviewer() *User { - if p == nil { - return nil - } - return p.RequestedReviewer -} - -// GetRequestedTeam returns the RequestedTeam field. -func (p *PullRequestTargetEvent) GetRequestedTeam() *Team { - if p == nil { - return nil - } - return p.RequestedTeam -} - -// GetSender returns the Sender field. -func (p *PullRequestTargetEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PullRequestThread) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PullRequestThread) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetMergablePulls returns the MergablePulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetMergablePulls() int { - if p == nil || p.MergablePulls == nil { - return 0 - } - return *p.MergablePulls -} - -// GetMergedPulls returns the MergedPulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetMergedPulls() int { - if p == nil || p.MergedPulls == nil { - return 0 - } - return *p.MergedPulls -} - -// GetTotalPulls returns the TotalPulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetTotalPulls() int { - if p == nil || p.TotalPulls == nil { - return 0 - } - return *p.TotalPulls -} - -// GetUnmergablePulls returns the UnmergablePulls field if it's non-nil, zero value otherwise. -func (p *PullStats) GetUnmergablePulls() int { - if p == nil || p.UnmergablePulls == nil { - return 0 - } - return *p.UnmergablePulls -} - -// GetCommits returns the Commits field if it's non-nil, zero value otherwise. -func (p *PunchCard) GetCommits() int { - if p == nil || p.Commits == nil { - return 0 - } - return *p.Commits -} - -// GetDay returns the Day field if it's non-nil, zero value otherwise. -func (p *PunchCard) GetDay() int { - if p == nil || p.Day == nil { - return 0 - } - return *p.Day -} - -// GetHour returns the Hour field if it's non-nil, zero value otherwise. -func (p *PunchCard) GetHour() int { - if p == nil || p.Hour == nil { - return 0 - } - return *p.Hour -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetAction() string { - if p == nil || p.Action == nil { - return "" - } - return *p.Action -} - -// GetAfter returns the After field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetAfter() string { - if p == nil || p.After == nil { - return "" - } - return *p.After -} - -// GetBaseRef returns the BaseRef field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetBaseRef() string { - if p == nil || p.BaseRef == nil { - return "" - } - return *p.BaseRef -} - -// GetBefore returns the Before field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetBefore() string { - if p == nil || p.Before == nil { - return "" - } - return *p.Before -} - -// GetCommits returns the Commits slice if it's non-nil, nil otherwise. -func (p *PushEvent) GetCommits() []*HeadCommit { - if p == nil || p.Commits == nil { - return nil - } - return p.Commits -} - -// GetCompare returns the Compare field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetCompare() string { - if p == nil || p.Compare == nil { - return "" - } - return *p.Compare -} - -// GetCreated returns the Created field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetCreated() bool { - if p == nil || p.Created == nil { - return false - } - return *p.Created -} - -// GetDeleted returns the Deleted field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetDeleted() bool { - if p == nil || p.Deleted == nil { - return false - } - return *p.Deleted -} - -// GetDistinctSize returns the DistinctSize field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetDistinctSize() int { - if p == nil || p.DistinctSize == nil { - return 0 - } - return *p.DistinctSize -} - -// GetForced returns the Forced field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetForced() bool { - if p == nil || p.Forced == nil { - return false - } - return *p.Forced -} - -// GetHead returns the Head field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetHead() string { - if p == nil || p.Head == nil { - return "" - } - return *p.Head -} - -// GetHeadCommit returns the HeadCommit field. -func (p *PushEvent) GetHeadCommit() *HeadCommit { - if p == nil { - return nil - } - return p.HeadCommit -} - -// GetInstallation returns the Installation field. -func (p *PushEvent) GetInstallation() *Installation { - if p == nil { - return nil - } - return p.Installation -} - -// GetOrganization returns the Organization field. -func (p *PushEvent) GetOrganization() *Organization { - if p == nil { - return nil - } - return p.Organization -} - -// GetPusher returns the Pusher field. -func (p *PushEvent) GetPusher() *CommitAuthor { - if p == nil { - return nil - } - return p.Pusher -} - -// GetPushID returns the PushID field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetPushID() int64 { - if p == nil || p.PushID == nil { - return 0 - } - return *p.PushID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetRef() string { - if p == nil || p.Ref == nil { - return "" - } - return *p.Ref -} - -// GetRepo returns the Repo field. -func (p *PushEvent) GetRepo() *PushEventRepository { - if p == nil { - return nil - } - return p.Repo -} - -// GetSender returns the Sender field. -func (p *PushEvent) GetSender() *User { - if p == nil { - return nil - } - return p.Sender -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (p *PushEvent) GetSize() int { - if p == nil || p.Size == nil { - return 0 - } - return *p.Size -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (p *PushEventRepoOwner) GetEmail() string { - if p == nil || p.Email == nil { - return "" - } - return *p.Email -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PushEventRepoOwner) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetArchived() bool { - if p == nil || p.Archived == nil { - return false - } - return *p.Archived -} - -// GetArchiveURL returns the ArchiveURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetArchiveURL() string { - if p == nil || p.ArchiveURL == nil { - return "" - } - return *p.ArchiveURL -} - -// GetCloneURL returns the CloneURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetCloneURL() string { - if p == nil || p.CloneURL == nil { - return "" - } - return *p.CloneURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetCreatedAt() Timestamp { - if p == nil || p.CreatedAt == nil { - return Timestamp{} - } - return *p.CreatedAt -} - -// GetDefaultBranch returns the DefaultBranch field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetDefaultBranch() string { - if p == nil || p.DefaultBranch == nil { - return "" - } - return *p.DefaultBranch -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetDescription() string { - if p == nil || p.Description == nil { - return "" - } - return *p.Description -} - -// GetDisabled returns the Disabled field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetDisabled() bool { - if p == nil || p.Disabled == nil { - return false - } - return *p.Disabled -} - -// GetFork returns the Fork field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetFork() bool { - if p == nil || p.Fork == nil { - return false - } - return *p.Fork -} - -// GetForksCount returns the ForksCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetForksCount() int { - if p == nil || p.ForksCount == nil { - return 0 - } - return *p.ForksCount -} - -// GetFullName returns the FullName field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetFullName() string { - if p == nil || p.FullName == nil { - return "" - } - return *p.FullName -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetGitURL() string { - if p == nil || p.GitURL == nil { - return "" - } - return *p.GitURL -} - -// GetHasDownloads returns the HasDownloads field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasDownloads() bool { - if p == nil || p.HasDownloads == nil { - return false - } - return *p.HasDownloads -} - -// GetHasIssues returns the HasIssues field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasIssues() bool { - if p == nil || p.HasIssues == nil { - return false - } - return *p.HasIssues -} - -// GetHasPages returns the HasPages field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasPages() bool { - if p == nil || p.HasPages == nil { - return false - } - return *p.HasPages -} - -// GetHasWiki returns the HasWiki field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHasWiki() bool { - if p == nil || p.HasWiki == nil { - return false - } - return *p.HasWiki -} - -// GetHomepage returns the Homepage field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHomepage() string { - if p == nil || p.Homepage == nil { - return "" - } - return *p.Homepage -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetHTMLURL() string { - if p == nil || p.HTMLURL == nil { - return "" - } - return *p.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetID() int64 { - if p == nil || p.ID == nil { - return 0 - } - return *p.ID -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetLanguage() string { - if p == nil || p.Language == nil { - return "" - } - return *p.Language -} - -// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetMasterBranch() string { - if p == nil || p.MasterBranch == nil { - return "" - } - return *p.MasterBranch -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetName() string { - if p == nil || p.Name == nil { - return "" - } - return *p.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetNodeID() string { - if p == nil || p.NodeID == nil { - return "" - } - return *p.NodeID -} - -// GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetOpenIssuesCount() int { - if p == nil || p.OpenIssuesCount == nil { - return 0 - } - return *p.OpenIssuesCount -} - -// GetOrganization returns the Organization field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetOrganization() string { - if p == nil || p.Organization == nil { - return "" - } - return *p.Organization -} - -// GetOwner returns the Owner field. -func (p *PushEventRepository) GetOwner() *User { - if p == nil { - return nil - } - return p.Owner -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetPrivate() bool { - if p == nil || p.Private == nil { - return false - } - return *p.Private -} - -// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetPullsURL() string { - if p == nil || p.PullsURL == nil { - return "" - } - return *p.PullsURL -} - -// GetPushedAt returns the PushedAt field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetPushedAt() Timestamp { - if p == nil || p.PushedAt == nil { - return Timestamp{} - } - return *p.PushedAt -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetSize() int { - if p == nil || p.Size == nil { - return 0 - } - return *p.Size -} - -// GetSSHURL returns the SSHURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetSSHURL() string { - if p == nil || p.SSHURL == nil { - return "" - } - return *p.SSHURL -} - -// GetStargazersCount returns the StargazersCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetStargazersCount() int { - if p == nil || p.StargazersCount == nil { - return 0 - } - return *p.StargazersCount -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetStatusesURL() string { - if p == nil || p.StatusesURL == nil { - return "" - } - return *p.StatusesURL -} - -// GetSVNURL returns the SVNURL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetSVNURL() string { - if p == nil || p.SVNURL == nil { - return "" - } - return *p.SVNURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetUpdatedAt() Timestamp { - if p == nil || p.UpdatedAt == nil { - return Timestamp{} - } - return *p.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetURL() string { - if p == nil || p.URL == nil { - return "" - } - return *p.URL -} - -// GetWatchersCount returns the WatchersCount field if it's non-nil, zero value otherwise. -func (p *PushEventRepository) GetWatchersCount() int { - if p == nil || p.WatchersCount == nil { - return 0 - } - return *p.WatchersCount -} - -// GetActionsRunnerRegistration returns the ActionsRunnerRegistration field. -func (r *RateLimits) GetActionsRunnerRegistration() *Rate { - if r == nil { - return nil - } - return r.ActionsRunnerRegistration -} - -// GetCodeScanningUpload returns the CodeScanningUpload field. -func (r *RateLimits) GetCodeScanningUpload() *Rate { - if r == nil { - return nil - } - return r.CodeScanningUpload -} - -// GetCore returns the Core field. -func (r *RateLimits) GetCore() *Rate { - if r == nil { - return nil - } - return r.Core -} - -// GetGraphQL returns the GraphQL field. -func (r *RateLimits) GetGraphQL() *Rate { - if r == nil { - return nil - } - return r.GraphQL -} - -// GetIntegrationManifest returns the IntegrationManifest field. -func (r *RateLimits) GetIntegrationManifest() *Rate { - if r == nil { - return nil - } - return r.IntegrationManifest -} - -// GetSCIM returns the SCIM field. -func (r *RateLimits) GetSCIM() *Rate { - if r == nil { - return nil - } - return r.SCIM -} - -// GetSearch returns the Search field. -func (r *RateLimits) GetSearch() *Rate { - if r == nil { - return nil - } - return r.Search -} - -// GetSourceImport returns the SourceImport field. -func (r *RateLimits) GetSourceImport() *Rate { - if r == nil { - return nil - } - return r.SourceImport -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (r *Reaction) GetContent() string { - if r == nil || r.Content == nil { - return "" - } - return *r.Content -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Reaction) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Reaction) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetUser returns the User field. -func (r *Reaction) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetConfused returns the Confused field if it's non-nil, zero value otherwise. -func (r *Reactions) GetConfused() int { - if r == nil || r.Confused == nil { - return 0 - } - return *r.Confused -} - -// GetEyes returns the Eyes field if it's non-nil, zero value otherwise. -func (r *Reactions) GetEyes() int { - if r == nil || r.Eyes == nil { - return 0 - } - return *r.Eyes -} - -// GetHeart returns the Heart field if it's non-nil, zero value otherwise. -func (r *Reactions) GetHeart() int { - if r == nil || r.Heart == nil { - return 0 - } - return *r.Heart -} - -// GetHooray returns the Hooray field if it's non-nil, zero value otherwise. -func (r *Reactions) GetHooray() int { - if r == nil || r.Hooray == nil { - return 0 - } - return *r.Hooray -} - -// GetLaugh returns the Laugh field if it's non-nil, zero value otherwise. -func (r *Reactions) GetLaugh() int { - if r == nil || r.Laugh == nil { - return 0 - } - return *r.Laugh -} - -// GetMinusOne returns the MinusOne field if it's non-nil, zero value otherwise. -func (r *Reactions) GetMinusOne() int { - if r == nil || r.MinusOne == nil { - return 0 - } - return *r.MinusOne -} - -// GetPlusOne returns the PlusOne field if it's non-nil, zero value otherwise. -func (r *Reactions) GetPlusOne() int { - if r == nil || r.PlusOne == nil { - return 0 - } - return *r.PlusOne -} - -// GetRocket returns the Rocket field if it's non-nil, zero value otherwise. -func (r *Reactions) GetRocket() int { - if r == nil || r.Rocket == nil { - return 0 - } - return *r.Rocket -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *Reactions) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *Reactions) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Reference) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetObject returns the Object field. -func (r *Reference) GetObject() *GitObject { - if r == nil { - return nil - } - return r.Object -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (r *Reference) GetRef() string { - if r == nil || r.Ref == nil { - return "" - } - return *r.Ref -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *Reference) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *ReferencedWorkflow) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (r *ReferencedWorkflow) GetRef() string { - if r == nil || r.Ref == nil { - return "" - } - return *r.Ref -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *ReferencedWorkflow) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (r *RegistrationToken) GetExpiresAt() Timestamp { - if r == nil || r.ExpiresAt == nil { - return Timestamp{} - } - return *r.ExpiresAt -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (r *RegistrationToken) GetToken() string { - if r == nil || r.Token == nil { - return "" - } - return *r.Token -} - -// GetBrowserDownloadURL returns the BrowserDownloadURL field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetBrowserDownloadURL() string { - if r == nil || r.BrowserDownloadURL == nil { - return "" - } - return *r.BrowserDownloadURL -} - -// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetContentType() string { - if r == nil || r.ContentType == nil { - return "" - } - return *r.ContentType -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDownloadCount returns the DownloadCount field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetDownloadCount() int { - if r == nil || r.DownloadCount == nil { - return 0 - } - return *r.DownloadCount -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetLabel returns the Label field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetLabel() string { - if r == nil || r.Label == nil { - return "" - } - return *r.Label -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetUploader returns the Uploader field. -func (r *ReleaseAsset) GetUploader() *User { - if r == nil { - return nil - } - return r.Uploader -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *ReleaseAsset) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *ReleaseEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetInstallation returns the Installation field. -func (r *ReleaseEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *ReleaseEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRelease returns the Release field. -func (r *ReleaseEvent) GetRelease() *RepositoryRelease { - if r == nil { - return nil - } - return r.Release -} - -// GetRepo returns the Repo field. -func (r *ReleaseEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *ReleaseEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (r *RemoveToken) GetExpiresAt() Timestamp { - if r == nil || r.ExpiresAt == nil { - return Timestamp{} - } - return *r.ExpiresAt -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (r *RemoveToken) GetToken() string { - if r == nil || r.Token == nil { - return "" - } - return *r.Token -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *Rename) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetTo returns the To field if it's non-nil, zero value otherwise. -func (r *Rename) GetTo() string { - if r == nil || r.To == nil { - return "" - } - return *r.To -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (r *RenameOrgResponse) GetMessage() string { - if r == nil || r.Message == nil { - return "" - } - return *r.Message -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RenameOrgResponse) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCredit) GetLogin() string { - if r == nil || r.Login == nil { - return "" - } - return *r.Login -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCredit) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCreditDetailed) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepoAdvisoryCreditDetailed) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetUser returns the User field. -func (r *RepoAdvisoryCreditDetailed) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetDownloadLocation returns the DownloadLocation field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetDownloadLocation() string { - if r == nil || r.DownloadLocation == nil { - return "" - } - return *r.DownloadLocation -} - -// GetFilesAnalyzed returns the FilesAnalyzed field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetFilesAnalyzed() bool { - if r == nil || r.FilesAnalyzed == nil { - return false - } - return *r.FilesAnalyzed -} - -// GetLicenseConcluded returns the LicenseConcluded field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetLicenseConcluded() string { - if r == nil || r.LicenseConcluded == nil { - return "" - } - return *r.LicenseConcluded -} - -// GetLicenseDeclared returns the LicenseDeclared field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetLicenseDeclared() string { - if r == nil || r.LicenseDeclared == nil { - return "" - } - return *r.LicenseDeclared -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetSPDXID() string { - if r == nil || r.SPDXID == nil { - return "" - } - return *r.SPDXID -} - -// GetVersionInfo returns the VersionInfo field if it's non-nil, zero value otherwise. -func (r *RepoDependencies) GetVersionInfo() string { - if r == nil || r.VersionInfo == nil { - return "" - } - return *r.VersionInfo -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamRequest) GetBranch() string { - if r == nil || r.Branch == nil { - return "" - } - return *r.Branch -} - -// GetBaseBranch returns the BaseBranch field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamResult) GetBaseBranch() string { - if r == nil || r.BaseBranch == nil { - return "" - } - return *r.BaseBranch -} - -// GetMergeType returns the MergeType field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamResult) GetMergeType() string { - if r == nil || r.MergeType == nil { - return "" - } - return *r.MergeType -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (r *RepoMergeUpstreamResult) GetMessage() string { - if r == nil || r.Message == nil { - return "" - } - return *r.Message -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RepoName) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetBadgeURL() string { - if r == nil || r.BadgeURL == nil { - return "" - } - return *r.BadgeURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSourceRepository returns the SourceRepository field. -func (r *RepoRequiredWorkflow) GetSourceRepository() *Repository { - if r == nil { - return nil - } - return r.SourceRepository -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflows) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (r *RepositoriesSearchResult) GetIncompleteResults() bool { - if r == nil || r.IncompleteResults == nil { - return false - } - return *r.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (r *RepositoriesSearchResult) GetTotal() int { - if r == nil || r.Total == nil { - return 0 - } - return *r.Total -} - -// GetAllowAutoMerge returns the AllowAutoMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowAutoMerge() bool { - if r == nil || r.AllowAutoMerge == nil { - return false - } - return *r.AllowAutoMerge -} - -// GetAllowForking returns the AllowForking field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowForking() bool { - if r == nil || r.AllowForking == nil { - return false - } - return *r.AllowForking -} - -// GetAllowMergeCommit returns the AllowMergeCommit field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowMergeCommit() bool { - if r == nil || r.AllowMergeCommit == nil { - return false - } - return *r.AllowMergeCommit -} - -// GetAllowRebaseMerge returns the AllowRebaseMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowRebaseMerge() bool { - if r == nil || r.AllowRebaseMerge == nil { - return false - } - return *r.AllowRebaseMerge -} - -// GetAllowSquashMerge returns the AllowSquashMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowSquashMerge() bool { - if r == nil || r.AllowSquashMerge == nil { - return false - } - return *r.AllowSquashMerge -} - -// GetAllowUpdateBranch returns the AllowUpdateBranch field if it's non-nil, zero value otherwise. -func (r *Repository) GetAllowUpdateBranch() bool { - if r == nil || r.AllowUpdateBranch == nil { - return false - } - return *r.AllowUpdateBranch -} - -// GetArchived returns the Archived field if it's non-nil, zero value otherwise. -func (r *Repository) GetArchived() bool { - if r == nil || r.Archived == nil { - return false - } - return *r.Archived -} - -// GetArchiveURL returns the ArchiveURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetArchiveURL() string { - if r == nil || r.ArchiveURL == nil { - return "" - } - return *r.ArchiveURL -} - -// GetAssigneesURL returns the AssigneesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetAssigneesURL() string { - if r == nil || r.AssigneesURL == nil { - return "" - } - return *r.AssigneesURL -} - -// GetAutoInit returns the AutoInit field if it's non-nil, zero value otherwise. -func (r *Repository) GetAutoInit() bool { - if r == nil || r.AutoInit == nil { - return false - } - return *r.AutoInit -} - -// GetBlobsURL returns the BlobsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetBlobsURL() string { - if r == nil || r.BlobsURL == nil { - return "" - } - return *r.BlobsURL -} - -// GetBranchesURL returns the BranchesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetBranchesURL() string { - if r == nil || r.BranchesURL == nil { - return "" - } - return *r.BranchesURL -} - -// GetCloneURL returns the CloneURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCloneURL() string { - if r == nil || r.CloneURL == nil { - return "" - } - return *r.CloneURL -} - -// GetCodeOfConduct returns the CodeOfConduct field. -func (r *Repository) GetCodeOfConduct() *CodeOfConduct { - if r == nil { - return nil - } - return r.CodeOfConduct -} - -// GetCollaboratorsURL returns the CollaboratorsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCollaboratorsURL() string { - if r == nil || r.CollaboratorsURL == nil { - return "" - } - return *r.CollaboratorsURL -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCommentsURL() string { - if r == nil || r.CommentsURL == nil { - return "" - } - return *r.CommentsURL -} - -// GetCommitsURL returns the CommitsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCommitsURL() string { - if r == nil || r.CommitsURL == nil { - return "" - } - return *r.CommitsURL -} - -// GetCompareURL returns the CompareURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetCompareURL() string { - if r == nil || r.CompareURL == nil { - return "" - } - return *r.CompareURL -} - -// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetContentsURL() string { - if r == nil || r.ContentsURL == nil { - return "" - } - return *r.ContentsURL -} - -// GetContributorsURL returns the ContributorsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetContributorsURL() string { - if r == nil || r.ContributorsURL == nil { - return "" - } - return *r.ContributorsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *Repository) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDefaultBranch returns the DefaultBranch field if it's non-nil, zero value otherwise. -func (r *Repository) GetDefaultBranch() string { - if r == nil || r.DefaultBranch == nil { - return "" - } - return *r.DefaultBranch -} - -// GetDeleteBranchOnMerge returns the DeleteBranchOnMerge field if it's non-nil, zero value otherwise. -func (r *Repository) GetDeleteBranchOnMerge() bool { - if r == nil || r.DeleteBranchOnMerge == nil { - return false - } - return *r.DeleteBranchOnMerge -} - -// GetDeploymentsURL returns the DeploymentsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetDeploymentsURL() string { - if r == nil || r.DeploymentsURL == nil { - return "" - } - return *r.DeploymentsURL -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (r *Repository) GetDescription() string { - if r == nil || r.Description == nil { - return "" - } - return *r.Description -} - -// GetDisabled returns the Disabled field if it's non-nil, zero value otherwise. -func (r *Repository) GetDisabled() bool { - if r == nil || r.Disabled == nil { - return false - } - return *r.Disabled -} - -// GetDownloadsURL returns the DownloadsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetDownloadsURL() string { - if r == nil || r.DownloadsURL == nil { - return "" - } - return *r.DownloadsURL -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetEventsURL() string { - if r == nil || r.EventsURL == nil { - return "" - } - return *r.EventsURL -} - -// GetFork returns the Fork field if it's non-nil, zero value otherwise. -func (r *Repository) GetFork() bool { - if r == nil || r.Fork == nil { - return false - } - return *r.Fork -} - -// GetForksCount returns the ForksCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetForksCount() int { - if r == nil || r.ForksCount == nil { - return 0 - } - return *r.ForksCount -} - -// GetForksURL returns the ForksURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetForksURL() string { - if r == nil || r.ForksURL == nil { - return "" - } - return *r.ForksURL -} - -// GetFullName returns the FullName field if it's non-nil, zero value otherwise. -func (r *Repository) GetFullName() string { - if r == nil || r.FullName == nil { - return "" - } - return *r.FullName -} - -// GetGitCommitsURL returns the GitCommitsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitCommitsURL() string { - if r == nil || r.GitCommitsURL == nil { - return "" - } - return *r.GitCommitsURL -} - -// GetGitignoreTemplate returns the GitignoreTemplate field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitignoreTemplate() string { - if r == nil || r.GitignoreTemplate == nil { - return "" - } - return *r.GitignoreTemplate -} - -// GetGitRefsURL returns the GitRefsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitRefsURL() string { - if r == nil || r.GitRefsURL == nil { - return "" - } - return *r.GitRefsURL -} - -// GetGitTagsURL returns the GitTagsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitTagsURL() string { - if r == nil || r.GitTagsURL == nil { - return "" - } - return *r.GitTagsURL -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetGitURL() string { - if r == nil || r.GitURL == nil { - return "" - } - return *r.GitURL -} - -// GetHasDiscussions returns the HasDiscussions field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasDiscussions() bool { - if r == nil || r.HasDiscussions == nil { - return false - } - return *r.HasDiscussions -} - -// GetHasDownloads returns the HasDownloads field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasDownloads() bool { - if r == nil || r.HasDownloads == nil { - return false - } - return *r.HasDownloads -} - -// GetHasIssues returns the HasIssues field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasIssues() bool { - if r == nil || r.HasIssues == nil { - return false - } - return *r.HasIssues -} - -// GetHasPages returns the HasPages field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasPages() bool { - if r == nil || r.HasPages == nil { - return false - } - return *r.HasPages -} - -// GetHasProjects returns the HasProjects field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasProjects() bool { - if r == nil || r.HasProjects == nil { - return false - } - return *r.HasProjects -} - -// GetHasWiki returns the HasWiki field if it's non-nil, zero value otherwise. -func (r *Repository) GetHasWiki() bool { - if r == nil || r.HasWiki == nil { - return false - } - return *r.HasWiki -} - -// GetHomepage returns the Homepage field if it's non-nil, zero value otherwise. -func (r *Repository) GetHomepage() string { - if r == nil || r.Homepage == nil { - return "" - } - return *r.Homepage -} - -// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetHooksURL() string { - if r == nil || r.HooksURL == nil { - return "" - } - return *r.HooksURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Repository) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetIssueCommentURL returns the IssueCommentURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetIssueCommentURL() string { - if r == nil || r.IssueCommentURL == nil { - return "" - } - return *r.IssueCommentURL -} - -// GetIssueEventsURL returns the IssueEventsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetIssueEventsURL() string { - if r == nil || r.IssueEventsURL == nil { - return "" - } - return *r.IssueEventsURL -} - -// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetIssuesURL() string { - if r == nil || r.IssuesURL == nil { - return "" - } - return *r.IssuesURL -} - -// GetIsTemplate returns the IsTemplate field if it's non-nil, zero value otherwise. -func (r *Repository) GetIsTemplate() bool { - if r == nil || r.IsTemplate == nil { - return false - } - return *r.IsTemplate -} - -// GetKeysURL returns the KeysURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetKeysURL() string { - if r == nil || r.KeysURL == nil { - return "" - } - return *r.KeysURL -} - -// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetLabelsURL() string { - if r == nil || r.LabelsURL == nil { - return "" - } - return *r.LabelsURL -} - -// GetLanguage returns the Language field if it's non-nil, zero value otherwise. -func (r *Repository) GetLanguage() string { - if r == nil || r.Language == nil { - return "" - } - return *r.Language -} - -// GetLanguagesURL returns the LanguagesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetLanguagesURL() string { - if r == nil || r.LanguagesURL == nil { - return "" - } - return *r.LanguagesURL -} - -// GetLicense returns the License field. -func (r *Repository) GetLicense() *License { - if r == nil { - return nil - } - return r.License -} - -// GetLicenseTemplate returns the LicenseTemplate field if it's non-nil, zero value otherwise. -func (r *Repository) GetLicenseTemplate() string { - if r == nil || r.LicenseTemplate == nil { - return "" - } - return *r.LicenseTemplate -} - -// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. -func (r *Repository) GetMasterBranch() string { - if r == nil || r.MasterBranch == nil { - return "" - } - return *r.MasterBranch -} - -// GetMergeCommitMessage returns the MergeCommitMessage field if it's non-nil, zero value otherwise. -func (r *Repository) GetMergeCommitMessage() string { - if r == nil || r.MergeCommitMessage == nil { - return "" - } - return *r.MergeCommitMessage -} - -// GetMergeCommitTitle returns the MergeCommitTitle field if it's non-nil, zero value otherwise. -func (r *Repository) GetMergeCommitTitle() string { - if r == nil || r.MergeCommitTitle == nil { - return "" - } - return *r.MergeCommitTitle -} - -// GetMergesURL returns the MergesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetMergesURL() string { - if r == nil || r.MergesURL == nil { - return "" - } - return *r.MergesURL -} - -// GetMilestonesURL returns the MilestonesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetMilestonesURL() string { - if r == nil || r.MilestonesURL == nil { - return "" - } - return *r.MilestonesURL -} - -// GetMirrorURL returns the MirrorURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetMirrorURL() string { - if r == nil || r.MirrorURL == nil { - return "" - } - return *r.MirrorURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *Repository) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNetworkCount returns the NetworkCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetNetworkCount() int { - if r == nil || r.NetworkCount == nil { - return 0 - } - return *r.NetworkCount -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Repository) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetNotificationsURL returns the NotificationsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetNotificationsURL() string { - if r == nil || r.NotificationsURL == nil { - return "" - } - return *r.NotificationsURL -} - -// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. -func (r *Repository) GetOpenIssues() int { - if r == nil || r.OpenIssues == nil { - return 0 - } - return *r.OpenIssues -} - -// GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetOpenIssuesCount() int { - if r == nil || r.OpenIssuesCount == nil { - return 0 - } - return *r.OpenIssuesCount -} - -// GetOrganization returns the Organization field. -func (r *Repository) GetOrganization() *Organization { - if r == nil { - return nil - } - return r.Organization -} - -// GetOwner returns the Owner field. -func (r *Repository) GetOwner() *User { - if r == nil { - return nil - } - return r.Owner -} - -// GetParent returns the Parent field. -func (r *Repository) GetParent() *Repository { - if r == nil { - return nil - } - return r.Parent -} - -// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. -func (r *Repository) GetPermissions() map[string]bool { - if r == nil || r.Permissions == nil { - return map[string]bool{} - } - return r.Permissions -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (r *Repository) GetPrivate() bool { - if r == nil || r.Private == nil { - return false - } - return *r.Private -} - -// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetPullsURL() string { - if r == nil || r.PullsURL == nil { - return "" - } - return *r.PullsURL -} - -// GetPushedAt returns the PushedAt field if it's non-nil, zero value otherwise. -func (r *Repository) GetPushedAt() Timestamp { - if r == nil || r.PushedAt == nil { - return Timestamp{} - } - return *r.PushedAt -} - -// GetReleasesURL returns the ReleasesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetReleasesURL() string { - if r == nil || r.ReleasesURL == nil { - return "" - } - return *r.ReleasesURL -} - -// GetRoleName returns the RoleName field if it's non-nil, zero value otherwise. -func (r *Repository) GetRoleName() string { - if r == nil || r.RoleName == nil { - return "" - } - return *r.RoleName -} - -// GetSecurityAndAnalysis returns the SecurityAndAnalysis field. -func (r *Repository) GetSecurityAndAnalysis() *SecurityAndAnalysis { - if r == nil { - return nil - } - return r.SecurityAndAnalysis -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *Repository) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetSource returns the Source field. -func (r *Repository) GetSource() *Repository { - if r == nil { - return nil - } - return r.Source -} - -// GetSquashMergeCommitMessage returns the SquashMergeCommitMessage field if it's non-nil, zero value otherwise. -func (r *Repository) GetSquashMergeCommitMessage() string { - if r == nil || r.SquashMergeCommitMessage == nil { - return "" - } - return *r.SquashMergeCommitMessage -} - -// GetSquashMergeCommitTitle returns the SquashMergeCommitTitle field if it's non-nil, zero value otherwise. -func (r *Repository) GetSquashMergeCommitTitle() string { - if r == nil || r.SquashMergeCommitTitle == nil { - return "" - } - return *r.SquashMergeCommitTitle -} - -// GetSSHURL returns the SSHURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSSHURL() string { - if r == nil || r.SSHURL == nil { - return "" - } - return *r.SSHURL -} - -// GetStargazersCount returns the StargazersCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetStargazersCount() int { - if r == nil || r.StargazersCount == nil { - return 0 - } - return *r.StargazersCount -} - -// GetStargazersURL returns the StargazersURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetStargazersURL() string { - if r == nil || r.StargazersURL == nil { - return "" - } - return *r.StargazersURL -} - -// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetStatusesURL() string { - if r == nil || r.StatusesURL == nil { - return "" - } - return *r.StatusesURL -} - -// GetSubscribersCount returns the SubscribersCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetSubscribersCount() int { - if r == nil || r.SubscribersCount == nil { - return 0 - } - return *r.SubscribersCount -} - -// GetSubscribersURL returns the SubscribersURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSubscribersURL() string { - if r == nil || r.SubscribersURL == nil { - return "" - } - return *r.SubscribersURL -} - -// GetSubscriptionURL returns the SubscriptionURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSubscriptionURL() string { - if r == nil || r.SubscriptionURL == nil { - return "" - } - return *r.SubscriptionURL -} - -// GetSVNURL returns the SVNURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetSVNURL() string { - if r == nil || r.SVNURL == nil { - return "" - } - return *r.SVNURL -} - -// GetTagsURL returns the TagsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetTagsURL() string { - if r == nil || r.TagsURL == nil { - return "" - } - return *r.TagsURL -} - -// GetTeamID returns the TeamID field if it's non-nil, zero value otherwise. -func (r *Repository) GetTeamID() int64 { - if r == nil || r.TeamID == nil { - return 0 - } - return *r.TeamID -} - -// GetTeamsURL returns the TeamsURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetTeamsURL() string { - if r == nil || r.TeamsURL == nil { - return "" - } - return *r.TeamsURL -} - -// GetTemplateRepository returns the TemplateRepository field. -func (r *Repository) GetTemplateRepository() *Repository { - if r == nil { - return nil - } - return r.TemplateRepository -} - -// GetTreesURL returns the TreesURL field if it's non-nil, zero value otherwise. -func (r *Repository) GetTreesURL() string { - if r == nil || r.TreesURL == nil { - return "" - } - return *r.TreesURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *Repository) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *Repository) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetUseSquashPRTitleAsDefault returns the UseSquashPRTitleAsDefault field if it's non-nil, zero value otherwise. -func (r *Repository) GetUseSquashPRTitleAsDefault() bool { - if r == nil || r.UseSquashPRTitleAsDefault == nil { - return false - } - return *r.UseSquashPRTitleAsDefault -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (r *Repository) GetVisibility() string { - if r == nil || r.Visibility == nil { - return "" - } - return *r.Visibility -} - -// GetWatchers returns the Watchers field if it's non-nil, zero value otherwise. -func (r *Repository) GetWatchers() int { - if r == nil || r.Watchers == nil { - return 0 - } - return *r.Watchers -} - -// GetWatchersCount returns the WatchersCount field if it's non-nil, zero value otherwise. -func (r *Repository) GetWatchersCount() int { - if r == nil || r.WatchersCount == nil { - return 0 - } - return *r.WatchersCount -} - -// GetWebCommitSignoffRequired returns the WebCommitSignoffRequired field if it's non-nil, zero value otherwise. -func (r *Repository) GetWebCommitSignoffRequired() bool { - if r == nil || r.WebCommitSignoffRequired == nil { - return false - } - return *r.WebCommitSignoffRequired -} - -// GetAccessLevel returns the AccessLevel field if it's non-nil, zero value otherwise. -func (r *RepositoryActionsAccessLevel) GetAccessLevel() string { - if r == nil || r.AccessLevel == nil { - return "" - } - return *r.AccessLevel -} - -// GetAdvancedSecurityCommitters returns the AdvancedSecurityCommitters field if it's non-nil, zero value otherwise. -func (r *RepositoryActiveCommitters) GetAdvancedSecurityCommitters() int { - if r == nil || r.AdvancedSecurityCommitters == nil { - return 0 - } - return *r.AdvancedSecurityCommitters -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryActiveCommitters) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetBody() string { - if r == nil || r.Body == nil { - return "" - } - return *r.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetCommitID() string { - if r == nil || r.CommitID == nil { - return "" - } - return *r.CommitID -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetPosition returns the Position field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetPosition() int { - if r == nil || r.Position == nil { - return 0 - } - return *r.Position -} - -// GetReactions returns the Reactions field. -func (r *RepositoryComment) GetReactions() *Reactions { - if r == nil { - return nil - } - return r.Reactions -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryComment) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetUser returns the User field. -func (r *RepositoryComment) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetAuthor returns the Author field. -func (r *RepositoryCommit) GetAuthor() *User { - if r == nil { - return nil - } - return r.Author -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetCommentsURL() string { - if r == nil || r.CommentsURL == nil { - return "" - } - return *r.CommentsURL -} - -// GetCommit returns the Commit field. -func (r *RepositoryCommit) GetCommit() *Commit { - if r == nil { - return nil - } - return r.Commit -} - -// GetCommitter returns the Committer field. -func (r *RepositoryCommit) GetCommitter() *User { - if r == nil { - return nil - } - return r.Committer -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetStats returns the Stats field. -func (r *RepositoryCommit) GetStats() *CommitStats { - if r == nil { - return nil - } - return r.Stats -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryCommit) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetDownloadURL() string { - if r == nil || r.DownloadURL == nil { - return "" - } - return *r.DownloadURL -} - -// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetEncoding() string { - if r == nil || r.Encoding == nil { - return "" - } - return *r.Encoding -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetGitURL() string { - if r == nil || r.GitURL == nil { - return "" - } - return *r.GitURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetSubmoduleGitURL returns the SubmoduleGitURL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetSubmoduleGitURL() string { - if r == nil || r.SubmoduleGitURL == nil { - return "" - } - return *r.SubmoduleGitURL -} - -// GetTarget returns the Target field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetTarget() string { - if r == nil || r.Target == nil { - return "" - } - return *r.Target -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryContent) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetAuthor returns the Author field. -func (r *RepositoryContentFileOptions) GetAuthor() *CommitAuthor { - if r == nil { - return nil - } - return r.Author -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (r *RepositoryContentFileOptions) GetBranch() string { - if r == nil || r.Branch == nil { - return "" - } - return *r.Branch -} - -// GetCommitter returns the Committer field. -func (r *RepositoryContentFileOptions) GetCommitter() *CommitAuthor { - if r == nil { - return nil - } - return r.Committer -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (r *RepositoryContentFileOptions) GetMessage() string { - if r == nil || r.Message == nil { - return "" - } - return *r.Message -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryContentFileOptions) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetContent returns the Content field. -func (r *RepositoryContentResponse) GetContent() *RepositoryContent { - if r == nil { - return nil - } - return r.Content -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *RepositoryDispatchEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetBranch returns the Branch field if it's non-nil, zero value otherwise. -func (r *RepositoryDispatchEvent) GetBranch() string { - if r == nil || r.Branch == nil { - return "" - } - return *r.Branch -} - -// GetInstallation returns the Installation field. -func (r *RepositoryDispatchEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *RepositoryDispatchEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepo returns the Repo field. -func (r *RepositoryDispatchEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *RepositoryDispatchEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *RepositoryEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetChanges returns the Changes field. -func (r *RepositoryEvent) GetChanges() *EditChange { - if r == nil { - return nil - } - return r.Changes -} - -// GetInstallation returns the Installation field. -func (r *RepositoryEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *RepositoryEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepo returns the Repo field. -func (r *RepositoryEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *RepositoryEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetOrg returns the Org field. -func (r *RepositoryImportEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepo returns the Repo field. -func (r *RepositoryImportEvent) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetSender returns the Sender field. -func (r *RepositoryImportEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (r *RepositoryImportEvent) GetStatus() string { - if r == nil || r.Status == nil { - return "" - } - return *r.Status -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetInvitee returns the Invitee field. -func (r *RepositoryInvitation) GetInvitee() *User { - if r == nil { - return nil - } - return r.Invitee -} - -// GetInviter returns the Inviter field. -func (r *RepositoryInvitation) GetInviter() *User { - if r == nil { - return nil - } - return r.Inviter -} - -// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetPermissions() string { - if r == nil || r.Permissions == nil { - return "" - } - return *r.Permissions -} - -// GetRepo returns the Repo field. -func (r *RepositoryInvitation) GetRepo() *Repository { - if r == nil { - return nil - } - return r.Repo -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryInvitation) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetContent() string { - if r == nil || r.Content == nil { - return "" - } - return *r.Content -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetDownloadURL() string { - if r == nil || r.DownloadURL == nil { - return "" - } - return *r.DownloadURL -} - -// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetEncoding() string { - if r == nil || r.Encoding == nil { - return "" - } - return *r.Encoding -} - -// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetGitURL() string { - if r == nil || r.GitURL == nil { - return "" - } - return *r.GitURL -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetLicense returns the License field. -func (r *RepositoryLicense) GetLicense() *License { - if r == nil { - return nil - } - return r.License -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetSHA() string { - if r == nil || r.SHA == nil { - return "" - } - return *r.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetSize() int { - if r == nil || r.Size == nil { - return 0 - } - return *r.Size -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryLicense) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetBase returns the Base field if it's non-nil, zero value otherwise. -func (r *RepositoryMergeRequest) GetBase() string { - if r == nil || r.Base == nil { - return "" - } - return *r.Base -} - -// GetCommitMessage returns the CommitMessage field if it's non-nil, zero value otherwise. -func (r *RepositoryMergeRequest) GetCommitMessage() string { - if r == nil || r.CommitMessage == nil { - return "" - } - return *r.CommitMessage -} - -// GetHead returns the Head field if it's non-nil, zero value otherwise. -func (r *RepositoryMergeRequest) GetHead() string { - if r == nil || r.Head == nil { - return "" - } - return *r.Head -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (r *RepositoryPermissionLevel) GetPermission() string { - if r == nil || r.Permission == nil { - return "" - } - return *r.Permission -} - -// GetUser returns the User field. -func (r *RepositoryPermissionLevel) GetUser() *User { - if r == nil { - return nil - } - return r.User -} - -// GetAssetsURL returns the AssetsURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetAssetsURL() string { - if r == nil || r.AssetsURL == nil { - return "" - } - return *r.AssetsURL -} - -// GetAuthor returns the Author field. -func (r *RepositoryRelease) GetAuthor() *User { - if r == nil { - return nil - } - return r.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetBody() string { - if r == nil || r.Body == nil { - return "" - } - return *r.Body -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDiscussionCategoryName returns the DiscussionCategoryName field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetDiscussionCategoryName() string { - if r == nil || r.DiscussionCategoryName == nil { - return "" - } - return *r.DiscussionCategoryName -} - -// GetDraft returns the Draft field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetDraft() bool { - if r == nil || r.Draft == nil { - return false - } - return *r.Draft -} - -// GetGenerateReleaseNotes returns the GenerateReleaseNotes field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetGenerateReleaseNotes() bool { - if r == nil || r.GenerateReleaseNotes == nil { - return false - } - return *r.GenerateReleaseNotes -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetMakeLatest returns the MakeLatest field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetMakeLatest() string { - if r == nil || r.MakeLatest == nil { - return "" - } - return *r.MakeLatest -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetPrerelease() bool { - if r == nil || r.Prerelease == nil { - return false - } - return *r.Prerelease -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetPublishedAt() Timestamp { - if r == nil || r.PublishedAt == nil { - return Timestamp{} - } - return *r.PublishedAt -} - -// GetTagName returns the TagName field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetTagName() string { - if r == nil || r.TagName == nil { - return "" - } - return *r.TagName -} - -// GetTarballURL returns the TarballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetTarballURL() string { - if r == nil || r.TarballURL == nil { - return "" - } - return *r.TarballURL -} - -// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetTargetCommitish() string { - if r == nil || r.TargetCommitish == nil { - return "" - } - return *r.TargetCommitish -} - -// GetUploadURL returns the UploadURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetUploadURL() string { - if r == nil || r.UploadURL == nil { - return "" - } - return *r.UploadURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetZipballURL returns the ZipballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryRelease) GetZipballURL() string { - if r == nil || r.ZipballURL == nil { - return "" - } - return *r.ZipballURL -} - -// GetParameters returns the Parameters field if it's non-nil, zero value otherwise. -func (r *RepositoryRule) GetParameters() json.RawMessage { - if r == nil || r.Parameters == nil { - return json.RawMessage{} - } - return *r.Parameters -} - -// GetCommit returns the Commit field. -func (r *RepositoryTag) GetCommit() *Commit { - if r == nil { - return nil - } - return r.Commit -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepositoryTag) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetTarballURL returns the TarballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryTag) GetTarballURL() string { - if r == nil || r.TarballURL == nil { - return "" - } - return *r.TarballURL -} - -// GetZipballURL returns the ZipballURL field if it's non-nil, zero value otherwise. -func (r *RepositoryTag) GetZipballURL() string { - if r == nil || r.ZipballURL == nil { - return "" - } - return *r.ZipballURL -} - -// GetAffectedPackageName returns the AffectedPackageName field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetAffectedPackageName() string { - if r == nil || r.AffectedPackageName == nil { - return "" - } - return *r.AffectedPackageName -} - -// GetAffectedRange returns the AffectedRange field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetAffectedRange() string { - if r == nil || r.AffectedRange == nil { - return "" - } - return *r.AffectedRange -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetDismissedAt() Timestamp { - if r == nil || r.DismissedAt == nil { - return Timestamp{} - } - return *r.DismissedAt -} - -// GetDismisser returns the Dismisser field. -func (r *RepositoryVulnerabilityAlert) GetDismisser() *User { - if r == nil { - return nil - } - return r.Dismisser -} - -// GetDismissReason returns the DismissReason field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetDismissReason() string { - if r == nil || r.DismissReason == nil { - return "" - } - return *r.DismissReason -} - -// GetExternalIdentifier returns the ExternalIdentifier field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetExternalIdentifier() string { - if r == nil || r.ExternalIdentifier == nil { - return "" - } - return *r.ExternalIdentifier -} - -// GetExternalReference returns the ExternalReference field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetExternalReference() string { - if r == nil || r.ExternalReference == nil { - return "" - } - return *r.ExternalReference -} - -// GetFixedIn returns the FixedIn field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetFixedIn() string { - if r == nil || r.FixedIn == nil { - return "" - } - return *r.FixedIn -} - -// GetGitHubSecurityAdvisoryID returns the GitHubSecurityAdvisoryID field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetGitHubSecurityAdvisoryID() string { - if r == nil || r.GitHubSecurityAdvisoryID == nil { - return "" - } - return *r.GitHubSecurityAdvisoryID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlert) GetSeverity() string { - if r == nil || r.Severity == nil { - return "" - } - return *r.Severity -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (r *RepositoryVulnerabilityAlertEvent) GetAction() string { - if r == nil || r.Action == nil { - return "" - } - return *r.Action -} - -// GetAlert returns the Alert field. -func (r *RepositoryVulnerabilityAlertEvent) GetAlert() *RepositoryVulnerabilityAlert { - if r == nil { - return nil - } - return r.Alert -} - -// GetInstallation returns the Installation field. -func (r *RepositoryVulnerabilityAlertEvent) GetInstallation() *Installation { - if r == nil { - return nil - } - return r.Installation -} - -// GetOrg returns the Org field. -func (r *RepositoryVulnerabilityAlertEvent) GetOrg() *Organization { - if r == nil { - return nil - } - return r.Org -} - -// GetRepository returns the Repository field. -func (r *RepositoryVulnerabilityAlertEvent) GetRepository() *Repository { - if r == nil { - return nil - } - return r.Repository -} - -// GetSender returns the Sender field. -func (r *RepositoryVulnerabilityAlertEvent) GetSender() *User { - if r == nil { - return nil - } - return r.Sender -} - -// GetForkRepos returns the ForkRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetForkRepos() int { - if r == nil || r.ForkRepos == nil { - return 0 - } - return *r.ForkRepos -} - -// GetOrgRepos returns the OrgRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetOrgRepos() int { - if r == nil || r.OrgRepos == nil { - return 0 - } - return *r.OrgRepos -} - -// GetRootRepos returns the RootRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetRootRepos() int { - if r == nil || r.RootRepos == nil { - return 0 - } - return *r.RootRepos -} - -// GetTotalPushes returns the TotalPushes field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetTotalPushes() int { - if r == nil || r.TotalPushes == nil { - return 0 - } - return *r.TotalPushes -} - -// GetTotalRepos returns the TotalRepos field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetTotalRepos() int { - if r == nil || r.TotalRepos == nil { - return 0 - } - return *r.TotalRepos -} - -// GetTotalWikis returns the TotalWikis field if it's non-nil, zero value otherwise. -func (r *RepoStats) GetTotalWikis() int { - if r == nil || r.TotalWikis == nil { - return 0 - } - return *r.TotalWikis -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetAvatarURL() string { - if r == nil || r.AvatarURL == nil { - return "" - } - return *r.AvatarURL -} - -// GetContext returns the Context field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetContext() string { - if r == nil || r.Context == nil { - return "" - } - return *r.Context -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetCreator returns the Creator field. -func (r *RepoStatus) GetCreator() *User { - if r == nil { - return nil - } - return r.Creator -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetDescription() string { - if r == nil || r.Description == nil { - return "" - } - return *r.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetTargetURL() string { - if r == nil || r.TargetURL == nil { - return "" - } - return *r.TargetURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepoStatus) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequireCodeOwnerReviewChanges) GetFrom() bool { - if r == nil || r.From == nil { - return false - } - return *r.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequiredConversationResolutionLevelChanges) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequiredDeploymentsEnforcementLevelChanges) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RequiredReviewer) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetAppID returns the AppID field if it's non-nil, zero value otherwise. -func (r *RequiredStatusCheck) GetAppID() int64 { - if r == nil || r.AppID == nil { - return 0 - } - return *r.AppID -} - -// GetContextsURL returns the ContextsURL field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecks) GetContextsURL() string { - if r == nil || r.ContextsURL == nil { - return "" - } - return *r.ContextsURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecks) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecksEnforcementLevelChanges) GetFrom() string { - if r == nil || r.From == nil { - return "" - } - return *r.From -} - -// GetStrict returns the Strict field if it's non-nil, zero value otherwise. -func (r *RequiredStatusChecksRequest) GetStrict() bool { - if r == nil || r.Strict == nil { - return false - } - return *r.Strict -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RequiredWorkflowSelectedRepos) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *ReviewersRequest) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (r *ReviewPersonalAccessTokenRequestOptions) GetReason() string { - if r == nil || r.Reason == nil { - return "" - } - return *r.Reason -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (r *Rule) GetDescription() string { - if r == nil || r.Description == nil { - return "" - } - return *r.Description -} - -// GetFullDescription returns the FullDescription field if it's non-nil, zero value otherwise. -func (r *Rule) GetFullDescription() string { - if r == nil || r.FullDescription == nil { - return "" - } - return *r.FullDescription -} - -// GetHelp returns the Help field if it's non-nil, zero value otherwise. -func (r *Rule) GetHelp() string { - if r == nil || r.Help == nil { - return "" - } - return *r.Help -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Rule) GetID() string { - if r == nil || r.ID == nil { - return "" - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *Rule) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetSecuritySeverityLevel returns the SecuritySeverityLevel field if it's non-nil, zero value otherwise. -func (r *Rule) GetSecuritySeverityLevel() string { - if r == nil || r.SecuritySeverityLevel == nil { - return "" - } - return *r.SecuritySeverityLevel -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (r *Rule) GetSeverity() string { - if r == nil || r.Severity == nil { - return "" - } - return *r.Severity -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RulePatternParameters) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNegate returns the Negate field if it's non-nil, zero value otherwise. -func (r *RulePatternParameters) GetNegate() bool { - if r == nil || r.Negate == nil { - return false - } - return *r.Negate -} - -// GetIntegrationID returns the IntegrationID field if it's non-nil, zero value otherwise. -func (r *RuleRequiredStatusChecks) GetIntegrationID() int64 { - if r == nil || r.IntegrationID == nil { - return 0 - } - return *r.IntegrationID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (r *RuleRequiredWorkflow) GetRef() string { - if r == nil || r.Ref == nil { - return "" - } - return *r.Ref -} - -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (r *RuleRequiredWorkflow) GetRepositoryID() int64 { - if r == nil || r.RepositoryID == nil { - return 0 - } - return *r.RepositoryID -} - -// GetSha returns the Sha field if it's non-nil, zero value otherwise. -func (r *RuleRequiredWorkflow) GetSha() string { - if r == nil || r.Sha == nil { - return "" - } - return *r.Sha -} - -// GetConditions returns the Conditions field. -func (r *Ruleset) GetConditions() *RulesetConditions { - if r == nil { - return nil - } - return r.Conditions -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetLinks returns the Links field. -func (r *Ruleset) GetLinks() *RulesetLinks { - if r == nil { - return nil - } - return r.Links -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetSourceType returns the SourceType field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetSourceType() string { - if r == nil || r.SourceType == nil { - return "" - } - return *r.SourceType -} - -// GetTarget returns the Target field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetTarget() string { - if r == nil || r.Target == nil { - return "" - } - return *r.Target -} - -// GetRefName returns the RefName field. -func (r *RulesetConditions) GetRefName() *RulesetRefConditionParameters { - if r == nil { - return nil - } - return r.RefName -} - -// GetRepositoryID returns the RepositoryID field. -func (r *RulesetConditions) GetRepositoryID() *RulesetRepositoryIDsConditionParameters { - if r == nil { - return nil - } - return r.RepositoryID -} - -// GetRepositoryName returns the RepositoryName field. -func (r *RulesetConditions) GetRepositoryName() *RulesetRepositoryNamesConditionParameters { - if r == nil { - return nil - } - return r.RepositoryName -} - -// GetHRef returns the HRef field if it's non-nil, zero value otherwise. -func (r *RulesetLink) GetHRef() string { - if r == nil || r.HRef == nil { - return "" - } - return *r.HRef -} - -// GetSelf returns the Self field. -func (r *RulesetLinks) GetSelf() *RulesetLink { - if r == nil { - return nil - } - return r.Self -} - -// GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (r *RulesetRepositoryNamesConditionParameters) GetProtected() bool { - if r == nil || r.Protected == nil { - return false - } - return *r.Protected -} - -// GetBusy returns the Busy field if it's non-nil, zero value otherwise. -func (r *Runner) GetBusy() bool { - if r == nil || r.Busy == nil { - return false - } - return *r.Busy -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *Runner) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *Runner) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetOS returns the OS field if it's non-nil, zero value otherwise. -func (r *Runner) GetOS() string { - if r == nil || r.OS == nil { - return "" - } - return *r.OS -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (r *Runner) GetStatus() string { - if r == nil || r.Status == nil { - return "" - } - return *r.Status -} - -// GetArchitecture returns the Architecture field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetArchitecture() string { - if r == nil || r.Architecture == nil { - return "" - } - return *r.Architecture -} - -// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetDownloadURL() string { - if r == nil || r.DownloadURL == nil { - return "" - } - return *r.DownloadURL -} - -// GetFilename returns the Filename field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetFilename() string { - if r == nil || r.Filename == nil { - return "" - } - return *r.Filename -} - -// GetOS returns the OS field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetOS() string { - if r == nil || r.OS == nil { - return "" - } - return *r.OS -} - -// GetSHA256Checksum returns the SHA256Checksum field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetSHA256Checksum() string { - if r == nil || r.SHA256Checksum == nil { - return "" - } - return *r.SHA256Checksum -} - -// GetTempDownloadToken returns the TempDownloadToken field if it's non-nil, zero value otherwise. -func (r *RunnerApplicationDownload) GetTempDownloadToken() string { - if r == nil || r.TempDownloadToken == nil { - return "" - } - return *r.TempDownloadToken -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetAllowsPublicRepositories() bool { - if r == nil || r.AllowsPublicRepositories == nil { - return false - } - return *r.AllowsPublicRepositories -} - -// GetDefault returns the Default field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetDefault() bool { - if r == nil || r.Default == nil { - return false - } - return *r.Default -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetInherited returns the Inherited field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetInherited() bool { - if r == nil || r.Inherited == nil { - return false - } - return *r.Inherited -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetRestrictedToWorkflows() bool { - if r == nil || r.RestrictedToWorkflows == nil { - return false - } - return *r.RestrictedToWorkflows -} - -// GetRunnersURL returns the RunnersURL field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetRunnersURL() string { - if r == nil || r.RunnersURL == nil { - return "" - } - return *r.RunnersURL -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetSelectedRepositoriesURL() string { - if r == nil || r.SelectedRepositoriesURL == nil { - return "" - } - return *r.SelectedRepositoriesURL -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetVisibility() string { - if r == nil || r.Visibility == nil { - return "" - } - return *r.Visibility -} - -// GetWorkflowRestrictionsReadOnly returns the WorkflowRestrictionsReadOnly field if it's non-nil, zero value otherwise. -func (r *RunnerGroup) GetWorkflowRestrictionsReadOnly() bool { - if r == nil || r.WorkflowRestrictionsReadOnly == nil { - return false - } - return *r.WorkflowRestrictionsReadOnly -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RunnerLabels) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RunnerLabels) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (r *RunnerLabels) GetType() string { - if r == nil || r.Type == nil { - return "" - } - return *r.Type -} - -// GetCheckoutURI returns the CheckoutURI field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetCheckoutURI() string { - if s == nil || s.CheckoutURI == nil { - return "" - } - return *s.CheckoutURI -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetCommitSHA() string { - if s == nil || s.CommitSHA == nil { - return "" - } - return *s.CommitSHA -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetRef() string { - if s == nil || s.Ref == nil { - return "" - } - return *s.Ref -} - -// GetSarif returns the Sarif field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetSarif() string { - if s == nil || s.Sarif == nil { - return "" - } - return *s.Sarif -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetStartedAt() Timestamp { - if s == nil || s.StartedAt == nil { - return Timestamp{} - } - return *s.StartedAt -} - -// GetToolName returns the ToolName field if it's non-nil, zero value otherwise. -func (s *SarifAnalysis) GetToolName() string { - if s == nil || s.ToolName == nil { - return "" - } - return *s.ToolName -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SarifID) GetID() string { - if s == nil || s.ID == nil { - return "" - } - return *s.ID -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SarifID) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetAnalysesURL returns the AnalysesURL field if it's non-nil, zero value otherwise. -func (s *SARIFUpload) GetAnalysesURL() string { - if s == nil || s.AnalysesURL == nil { - return "" - } - return *s.AnalysesURL -} - -// GetProcessingStatus returns the ProcessingStatus field if it's non-nil, zero value otherwise. -func (s *SARIFUpload) GetProcessingStatus() string { - if s == nil || s.ProcessingStatus == nil { - return "" - } - return *s.ProcessingStatus -} - -// GetSBOM returns the SBOM field. -func (s *SBOM) GetSBOM() *SBOMInfo { - if s == nil { - return nil - } - return s.SBOM -} - -// GetCreationInfo returns the CreationInfo field. -func (s *SBOMInfo) GetCreationInfo() *CreationInfo { - if s == nil { - return nil - } - return s.CreationInfo -} - -// GetDataLicense returns the DataLicense field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetDataLicense() string { - if s == nil || s.DataLicense == nil { - return "" - } - return *s.DataLicense -} - -// GetDocumentNamespace returns the DocumentNamespace field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetDocumentNamespace() string { - if s == nil || s.DocumentNamespace == nil { - return "" - } - return *s.DocumentNamespace -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetName() string { - if s == nil || s.Name == nil { - return "" - } - return *s.Name -} - -// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetSPDXID() string { - if s == nil || s.SPDXID == nil { - return "" - } - return *s.SPDXID -} - -// GetSPDXVersion returns the SPDXVersion field if it's non-nil, zero value otherwise. -func (s *SBOMInfo) GetSPDXVersion() string { - if s == nil || s.SPDXVersion == nil { - return "" - } - return *s.SPDXVersion -} - -// GetAnalysisKey returns the AnalysisKey field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetAnalysisKey() string { - if s == nil || s.AnalysisKey == nil { - return "" - } - return *s.AnalysisKey -} - -// GetCategory returns the Category field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetCategory() string { - if s == nil || s.Category == nil { - return "" - } - return *s.Category -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetCommitSHA() string { - if s == nil || s.CommitSHA == nil { - return "" - } - return *s.CommitSHA -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetDeletable returns the Deletable field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetDeletable() bool { - if s == nil || s.Deletable == nil { - return false - } - return *s.Deletable -} - -// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetEnvironment() string { - if s == nil || s.Environment == nil { - return "" - } - return *s.Environment -} - -// GetError returns the Error field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetError() string { - if s == nil || s.Error == nil { - return "" - } - return *s.Error -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetRef() string { - if s == nil || s.Ref == nil { - return "" - } - return *s.Ref -} - -// GetResultsCount returns the ResultsCount field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetResultsCount() int { - if s == nil || s.ResultsCount == nil { - return 0 - } - return *s.ResultsCount -} - -// GetRulesCount returns the RulesCount field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetRulesCount() int { - if s == nil || s.RulesCount == nil { - return 0 - } - return *s.RulesCount -} - -// GetSarifID returns the SarifID field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetSarifID() string { - if s == nil || s.SarifID == nil { - return "" - } - return *s.SarifID -} - -// GetTool returns the Tool field. -func (s *ScanningAnalysis) GetTool() *Tool { - if s == nil { - return nil - } - return s.Tool -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetWarning returns the Warning field if it's non-nil, zero value otherwise. -func (s *ScanningAnalysis) GetWarning() string { - if s == nil || s.Warning == nil { - return "" - } - return *s.Warning -} - -// GetCreated returns the Created field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetCreated() Timestamp { - if s == nil || s.Created == nil { - return Timestamp{} - } - return *s.Created -} - -// GetLastModified returns the LastModified field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetLastModified() Timestamp { - if s == nil || s.LastModified == nil { - return Timestamp{} - } - return *s.LastModified -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetLocation() string { - if s == nil || s.Location == nil { - return "" - } - return *s.Location -} - -// GetResourceType returns the ResourceType field if it's non-nil, zero value otherwise. -func (s *SCIMMeta) GetResourceType() string { - if s == nil || s.ResourceType == nil { - return "" - } - return *s.ResourceType -} - -// GetItemsPerPage returns the ItemsPerPage field if it's non-nil, zero value otherwise. -func (s *SCIMProvisionedIdentities) GetItemsPerPage() int { - if s == nil || s.ItemsPerPage == nil { - return 0 - } - return *s.ItemsPerPage -} - -// GetStartIndex returns the StartIndex field if it's non-nil, zero value otherwise. -func (s *SCIMProvisionedIdentities) GetStartIndex() int { - if s == nil || s.StartIndex == nil { - return 0 - } - return *s.StartIndex -} - -// GetTotalResults returns the TotalResults field if it's non-nil, zero value otherwise. -func (s *SCIMProvisionedIdentities) GetTotalResults() int { - if s == nil || s.TotalResults == nil { - return 0 - } - return *s.TotalResults -} - -// GetActive returns the Active field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetActive() bool { - if s == nil || s.Active == nil { - return false - } - return *s.Active -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetDisplayName() string { - if s == nil || s.DisplayName == nil { - return "" - } - return *s.DisplayName -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetExternalID() string { - if s == nil || s.ExternalID == nil { - return "" - } - return *s.ExternalID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SCIMUserAttributes) GetID() string { - if s == nil || s.ID == nil { - return "" - } - return *s.ID -} - -// GetMeta returns the Meta field. -func (s *SCIMUserAttributes) GetMeta() *SCIMMeta { - if s == nil { - return nil - } - return s.Meta -} - -// GetPrimary returns the Primary field if it's non-nil, zero value otherwise. -func (s *SCIMUserEmail) GetPrimary() bool { - if s == nil || s.Primary == nil { - return false - } - return *s.Primary -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (s *SCIMUserEmail) GetType() string { - if s == nil || s.Type == nil { - return "" - } - return *s.Type -} - -// GetFormatted returns the Formatted field if it's non-nil, zero value otherwise. -func (s *SCIMUserName) GetFormatted() string { - if s == nil || s.Formatted == nil { - return "" - } - return *s.Formatted -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (s *SecretScanning) GetStatus() string { - if s == nil || s.Status == nil { - return "" - } - return *s.Status -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetHTMLURL() string { - if s == nil || s.HTMLURL == nil { - return "" - } - return *s.HTMLURL -} - -// GetLocationsURL returns the LocationsURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetLocationsURL() string { - if s == nil || s.LocationsURL == nil { - return "" - } - return *s.LocationsURL -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetNumber() int { - if s == nil || s.Number == nil { - return 0 - } - return *s.Number -} - -// GetPushProtectionBypassed returns the PushProtectionBypassed field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetPushProtectionBypassed() bool { - if s == nil || s.PushProtectionBypassed == nil { - return false - } - return *s.PushProtectionBypassed -} - -// GetPushProtectionBypassedAt returns the PushProtectionBypassedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetPushProtectionBypassedAt() Timestamp { - if s == nil || s.PushProtectionBypassedAt == nil { - return Timestamp{} - } - return *s.PushProtectionBypassedAt -} - -// GetPushProtectionBypassedBy returns the PushProtectionBypassedBy field. -func (s *SecretScanningAlert) GetPushProtectionBypassedBy() *User { - if s == nil { - return nil - } - return s.PushProtectionBypassedBy -} - -// GetRepository returns the Repository field. -func (s *SecretScanningAlert) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetResolution returns the Resolution field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetResolution() string { - if s == nil || s.Resolution == nil { - return "" - } - return *s.Resolution -} - -// GetResolutionComment returns the ResolutionComment field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetResolutionComment() string { - if s == nil || s.ResolutionComment == nil { - return "" - } - return *s.ResolutionComment -} - -// GetResolvedAt returns the ResolvedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetResolvedAt() Timestamp { - if s == nil || s.ResolvedAt == nil { - return Timestamp{} - } - return *s.ResolvedAt -} - -// GetResolvedBy returns the ResolvedBy field. -func (s *SecretScanningAlert) GetResolvedBy() *User { - if s == nil { - return nil - } - return s.ResolvedBy -} - -// GetSecret returns the Secret field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetSecret() string { - if s == nil || s.Secret == nil { - return "" - } - return *s.Secret -} - -// GetSecretType returns the SecretType field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetSecretType() string { - if s == nil || s.SecretType == nil { - return "" - } - return *s.SecretType -} - -// GetSecretTypeDisplayName returns the SecretTypeDisplayName field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetSecretTypeDisplayName() string { - if s == nil || s.SecretTypeDisplayName == nil { - return "" - } - return *s.SecretTypeDisplayName -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetState() string { - if s == nil || s.State == nil { - return "" - } - return *s.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetUpdatedAt() Timestamp { - if s == nil || s.UpdatedAt == nil { - return Timestamp{} - } - return *s.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlert) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertEvent) GetAction() string { - if s == nil || s.Action == nil { - return "" - } - return *s.Action -} - -// GetAlert returns the Alert field. -func (s *SecretScanningAlertEvent) GetAlert() *SecretScanningAlert { - if s == nil { - return nil - } - return s.Alert -} - -// GetEnterprise returns the Enterprise field. -func (s *SecretScanningAlertEvent) GetEnterprise() *Enterprise { - if s == nil { - return nil - } - return s.Enterprise -} - -// GetInstallation returns the Installation field. -func (s *SecretScanningAlertEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrganization returns the Organization field. -func (s *SecretScanningAlertEvent) GetOrganization() *Organization { - if s == nil { - return nil - } - return s.Organization -} - -// GetRepo returns the Repo field. -func (s *SecretScanningAlertEvent) GetRepo() *Repository { - if s == nil { - return nil - } - return s.Repo -} - -// GetSender returns the Sender field. -func (s *SecretScanningAlertEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetDetails returns the Details field. -func (s *SecretScanningAlertLocation) GetDetails() *SecretScanningAlertLocationDetails { - if s == nil { - return nil - } - return s.Details -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocation) GetType() string { - if s == nil || s.Type == nil { - return "" - } - return *s.Type -} - -// GetBlobSHA returns the BlobSHA field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetBlobSHA() string { - if s == nil || s.BlobSHA == nil { - return "" - } - return *s.BlobSHA -} - -// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetBlobURL() string { - if s == nil || s.BlobURL == nil { - return "" - } - return *s.BlobURL -} - -// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetCommitSHA() string { - if s == nil || s.CommitSHA == nil { - return "" - } - return *s.CommitSHA -} - -// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetCommitURL() string { - if s == nil || s.CommitURL == nil { - return "" - } - return *s.CommitURL -} - -// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetEndColumn() int { - if s == nil || s.EndColumn == nil { - return 0 - } - return *s.EndColumn -} - -// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetEndLine() int { - if s == nil || s.EndLine == nil { - return 0 - } - return *s.EndLine -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetPath() string { - if s == nil || s.Path == nil { - return "" - } - return *s.Path -} - -// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetStartColumn() int { - if s == nil || s.StartColumn == nil { - return 0 - } - return *s.StartColumn -} - -// GetStartline returns the Startline field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertLocationDetails) GetStartline() int { - if s == nil || s.Startline == nil { - return 0 - } - return *s.Startline -} - -// GetResolution returns the Resolution field if it's non-nil, zero value otherwise. -func (s *SecretScanningAlertUpdateOptions) GetResolution() string { - if s == nil || s.Resolution == nil { - return "" - } - return *s.Resolution -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (s *SecretScanningPushProtection) GetStatus() string { - if s == nil || s.Status == nil { - return "" - } - return *s.Status -} - -// GetAuthor returns the Author field. -func (s *SecurityAdvisory) GetAuthor() *User { - if s == nil { - return nil - } - return s.Author -} - -// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetClosedAt() Timestamp { - if s == nil || s.ClosedAt == nil { - return Timestamp{} - } - return *s.ClosedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetCVEID() string { - if s == nil || s.CVEID == nil { - return "" - } - return *s.CVEID -} - -// GetCVSS returns the CVSS field. -func (s *SecurityAdvisory) GetCVSS() *AdvisoryCVSS { - if s == nil { - return nil - } - return s.CVSS -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetDescription() string { - if s == nil || s.Description == nil { - return "" - } - return *s.Description -} - -// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetGHSAID() string { - if s == nil || s.GHSAID == nil { - return "" - } - return *s.GHSAID -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetHTMLURL() string { - if s == nil || s.HTMLURL == nil { - return "" - } - return *s.HTMLURL -} - -// GetPrivateFork returns the PrivateFork field. -func (s *SecurityAdvisory) GetPrivateFork() *Repository { - if s == nil { - return nil - } - return s.PrivateFork -} - -// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetPublishedAt() Timestamp { - if s == nil || s.PublishedAt == nil { - return Timestamp{} - } - return *s.PublishedAt -} - -// GetPublisher returns the Publisher field. -func (s *SecurityAdvisory) GetPublisher() *User { - if s == nil { - return nil - } - return s.Publisher -} - -// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetSeverity() string { - if s == nil || s.Severity == nil { - return "" - } - return *s.Severity -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetState() string { - if s == nil || s.State == nil { - return "" - } - return *s.State -} - -// GetSubmission returns the Submission field. -func (s *SecurityAdvisory) GetSubmission() *SecurityAdvisorySubmission { - if s == nil { - return nil - } - return s.Submission -} - -// GetSummary returns the Summary field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetSummary() string { - if s == nil || s.Summary == nil { - return "" - } - return *s.Summary -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetUpdatedAt() Timestamp { - if s == nil || s.UpdatedAt == nil { - return Timestamp{} - } - return *s.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetWithdrawnAt returns the WithdrawnAt field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisory) GetWithdrawnAt() Timestamp { - if s == nil || s.WithdrawnAt == nil { - return Timestamp{} - } - return *s.WithdrawnAt -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisoryEvent) GetAction() string { - if s == nil || s.Action == nil { - return "" - } - return *s.Action -} - -// GetEnterprise returns the Enterprise field. -func (s *SecurityAdvisoryEvent) GetEnterprise() *Enterprise { - if s == nil { - return nil - } - return s.Enterprise -} - -// GetInstallation returns the Installation field. -func (s *SecurityAdvisoryEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrganization returns the Organization field. -func (s *SecurityAdvisoryEvent) GetOrganization() *Organization { - if s == nil { - return nil - } - return s.Organization -} - -// GetRepository returns the Repository field. -func (s *SecurityAdvisoryEvent) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetSecurityAdvisory returns the SecurityAdvisory field. -func (s *SecurityAdvisoryEvent) GetSecurityAdvisory() *SecurityAdvisory { - if s == nil { - return nil - } - return s.SecurityAdvisory -} - -// GetSender returns the Sender field. -func (s *SecurityAdvisoryEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetAccepted returns the Accepted field if it's non-nil, zero value otherwise. -func (s *SecurityAdvisorySubmission) GetAccepted() bool { - if s == nil || s.Accepted == nil { - return false - } - return *s.Accepted -} - -// GetAdvancedSecurity returns the AdvancedSecurity field. -func (s *SecurityAndAnalysis) GetAdvancedSecurity() *AdvancedSecurity { - if s == nil { - return nil - } - return s.AdvancedSecurity -} - -// GetDependabotSecurityUpdates returns the DependabotSecurityUpdates field. -func (s *SecurityAndAnalysis) GetDependabotSecurityUpdates() *DependabotSecurityUpdates { - if s == nil { - return nil - } - return s.DependabotSecurityUpdates -} - -// GetSecretScanning returns the SecretScanning field. -func (s *SecurityAndAnalysis) GetSecretScanning() *SecretScanning { - if s == nil { - return nil - } - return s.SecretScanning -} - -// GetSecretScanningPushProtection returns the SecretScanningPushProtection field. -func (s *SecurityAndAnalysis) GetSecretScanningPushProtection() *SecretScanningPushProtection { - if s == nil { - return nil - } - return s.SecretScanningPushProtection -} - -// GetFrom returns the From field. -func (s *SecurityAndAnalysisChange) GetFrom() *SecurityAndAnalysisChangeFrom { - if s == nil { - return nil - } - return s.From -} - -// GetSecurityAndAnalysis returns the SecurityAndAnalysis field. -func (s *SecurityAndAnalysisChangeFrom) GetSecurityAndAnalysis() *SecurityAndAnalysis { - if s == nil { - return nil - } - return s.SecurityAndAnalysis -} - -// GetChanges returns the Changes field. -func (s *SecurityAndAnalysisEvent) GetChanges() *SecurityAndAnalysisChange { - if s == nil { - return nil - } - return s.Changes -} - -// GetEnterprise returns the Enterprise field. -func (s *SecurityAndAnalysisEvent) GetEnterprise() *Enterprise { - if s == nil { - return nil - } - return s.Enterprise -} - -// GetInstallation returns the Installation field. -func (s *SecurityAndAnalysisEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrganization returns the Organization field. -func (s *SecurityAndAnalysisEvent) GetOrganization() *Organization { - if s == nil { - return nil - } - return s.Organization -} - -// GetRepository returns the Repository field. -func (s *SecurityAndAnalysisEvent) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetSender returns the Sender field. -func (s *SecurityAndAnalysisEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (s *SelectedReposList) GetTotalCount() int { - if s == nil || s.TotalCount == nil { - return 0 - } - return *s.TotalCount -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (s *SignatureRequirementEnforcementLevelChanges) GetFrom() string { - if s == nil || s.From == nil { - return "" - } - return *s.From -} - -// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. -func (s *SignaturesProtectedBranch) GetEnabled() bool { - if s == nil || s.Enabled == nil { - return false - } - return *s.Enabled -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SignaturesProtectedBranch) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetPayload returns the Payload field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetPayload() string { - if s == nil || s.Payload == nil { - return "" - } - return *s.Payload -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetReason() string { - if s == nil || s.Reason == nil { - return "" - } - return *s.Reason -} - -// GetSignature returns the Signature field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetSignature() string { - if s == nil || s.Signature == nil { - return "" - } - return *s.Signature -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (s *SignatureVerification) GetVerified() bool { - if s == nil || s.Verified == nil { - return false - } - return *s.Verified -} - -// GetActor returns the Actor field. -func (s *Source) GetActor() *User { - if s == nil { - return nil - } - return s.Actor -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *Source) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetIssue returns the Issue field. -func (s *Source) GetIssue() *Issue { - if s == nil { - return nil - } - return s.Issue -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (s *Source) GetType() string { - if s == nil || s.Type == nil { - return "" - } - return *s.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *Source) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetEmail() string { - if s == nil || s.Email == nil { - return "" - } - return *s.Email -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetImportURL returns the ImportURL field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetImportURL() string { - if s == nil || s.ImportURL == nil { - return "" - } - return *s.ImportURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetName() string { - if s == nil || s.Name == nil { - return "" - } - return *s.Name -} - -// GetRemoteID returns the RemoteID field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetRemoteID() string { - if s == nil || s.RemoteID == nil { - return "" - } - return *s.RemoteID -} - -// GetRemoteName returns the RemoteName field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetRemoteName() string { - if s == nil || s.RemoteName == nil { - return "" - } - return *s.RemoteName -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *SourceImportAuthor) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetKey returns the Key field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetKey() string { - if s == nil || s.Key == nil { - return "" - } - return *s.Key -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (s *SSHSigningKey) GetTitle() string { - if s == nil || s.Title == nil { - return "" - } - return *s.Title -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (s *StarEvent) GetAction() string { - if s == nil || s.Action == nil { - return "" - } - return *s.Action -} - -// GetInstallation returns the Installation field. -func (s *StarEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetOrg returns the Org field. -func (s *StarEvent) GetOrg() *Organization { - if s == nil { - return nil - } - return s.Org -} - -// GetRepo returns the Repo field. -func (s *StarEvent) GetRepo() *Repository { - if s == nil { - return nil - } - return s.Repo -} - -// GetSender returns the Sender field. -func (s *StarEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. -func (s *StarEvent) GetStarredAt() Timestamp { - if s == nil || s.StarredAt == nil { - return Timestamp{} - } - return *s.StarredAt -} - -// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. -func (s *Stargazer) GetStarredAt() Timestamp { - if s == nil || s.StarredAt == nil { - return Timestamp{} - } - return *s.StarredAt -} - -// GetUser returns the User field. -func (s *Stargazer) GetUser() *User { - if s == nil { - return nil - } - return s.User -} - -// GetRepository returns the Repository field. -func (s *StarredRepository) GetRepository() *Repository { - if s == nil { - return nil - } - return s.Repository -} - -// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. -func (s *StarredRepository) GetStarredAt() Timestamp { - if s == nil || s.StarredAt == nil { - return Timestamp{} - } - return *s.StarredAt -} - -// GetCommit returns the Commit field. -func (s *StatusEvent) GetCommit() *RepositoryCommit { - if s == nil { - return nil - } - return s.Commit -} - -// GetContext returns the Context field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetContext() string { - if s == nil || s.Context == nil { - return "" - } - return *s.Context -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetDescription() string { - if s == nil || s.Description == nil { - return "" - } - return *s.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetID() int64 { - if s == nil || s.ID == nil { - return 0 - } - return *s.ID -} - -// GetInstallation returns the Installation field. -func (s *StatusEvent) GetInstallation() *Installation { - if s == nil { - return nil - } - return s.Installation -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetName() string { - if s == nil || s.Name == nil { - return "" - } - return *s.Name -} - -// GetOrg returns the Org field. -func (s *StatusEvent) GetOrg() *Organization { - if s == nil { - return nil - } - return s.Org -} - -// GetRepo returns the Repo field. -func (s *StatusEvent) GetRepo() *Repository { - if s == nil { - return nil - } - return s.Repo -} - -// GetSender returns the Sender field. -func (s *StatusEvent) GetSender() *User { - if s == nil { - return nil - } - return s.Sender -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetSHA() string { - if s == nil || s.SHA == nil { - return "" - } - return *s.SHA -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetState() string { - if s == nil || s.State == nil { - return "" - } - return *s.State -} - -// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetTargetURL() string { - if s == nil || s.TargetURL == nil { - return "" - } - return *s.TargetURL -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (s *StatusEvent) GetUpdatedAt() Timestamp { - if s == nil || s.UpdatedAt == nil { - return Timestamp{} - } - return *s.UpdatedAt -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (s *Subscription) GetCreatedAt() Timestamp { - if s == nil || s.CreatedAt == nil { - return Timestamp{} - } - return *s.CreatedAt -} - -// GetIgnored returns the Ignored field if it's non-nil, zero value otherwise. -func (s *Subscription) GetIgnored() bool { - if s == nil || s.Ignored == nil { - return false - } - return *s.Ignored -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (s *Subscription) GetReason() string { - if s == nil || s.Reason == nil { - return "" - } - return *s.Reason -} - -// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. -func (s *Subscription) GetRepositoryURL() string { - if s == nil || s.RepositoryURL == nil { - return "" - } - return *s.RepositoryURL -} - -// GetSubscribed returns the Subscribed field if it's non-nil, zero value otherwise. -func (s *Subscription) GetSubscribed() bool { - if s == nil || s.Subscribed == nil { - return false - } - return *s.Subscribed -} - -// GetThreadURL returns the ThreadURL field if it's non-nil, zero value otherwise. -func (s *Subscription) GetThreadURL() string { - if s == nil || s.ThreadURL == nil { - return "" - } - return *s.ThreadURL -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (s *Subscription) GetURL() string { - if s == nil || s.URL == nil { - return "" - } - return *s.URL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (t *Tag) GetMessage() string { - if t == nil || t.Message == nil { - return "" - } - return *t.Message -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (t *Tag) GetNodeID() string { - if t == nil || t.NodeID == nil { - return "" - } - return *t.NodeID -} - -// GetObject returns the Object field. -func (t *Tag) GetObject() *GitObject { - if t == nil { - return nil - } - return t.Object -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *Tag) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetTag returns the Tag field if it's non-nil, zero value otherwise. -func (t *Tag) GetTag() string { - if t == nil || t.Tag == nil { - return "" - } - return *t.Tag -} - -// GetTagger returns the Tagger field. -func (t *Tag) GetTagger() *CommitAuthor { - if t == nil { - return nil - } - return t.Tagger -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *Tag) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetVerification returns the Verification field. -func (t *Tag) GetVerification() *SignatureVerification { - if t == nil { - return nil - } - return t.Verification -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *TagProtection) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetPattern returns the Pattern field if it's non-nil, zero value otherwise. -func (t *TagProtection) GetPattern() string { - if t == nil || t.Pattern == nil { - return "" - } - return *t.Pattern -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetCompletedAt() Timestamp { - if t == nil || t.CompletedAt == nil { - return Timestamp{} - } - return *t.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetConclusion() string { - if t == nil || t.Conclusion == nil { - return "" - } - return *t.Conclusion -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetNumber() int64 { - if t == nil || t.Number == nil { - return 0 - } - return *t.Number -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetStartedAt() Timestamp { - if t == nil || t.StartedAt == nil { - return Timestamp{} - } - return *t.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (t *TaskStep) GetStatus() string { - if t == nil || t.Status == nil { - return "" - } - return *t.Status -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *Team) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (t *Team) GetHTMLURL() string { - if t == nil || t.HTMLURL == nil { - return "" - } - return *t.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *Team) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (t *Team) GetLDAPDN() string { - if t == nil || t.LDAPDN == nil { - return "" - } - return *t.LDAPDN -} - -// GetMembersCount returns the MembersCount field if it's non-nil, zero value otherwise. -func (t *Team) GetMembersCount() int { - if t == nil || t.MembersCount == nil { - return 0 - } - return *t.MembersCount -} - -// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. -func (t *Team) GetMembersURL() string { - if t == nil || t.MembersURL == nil { - return "" - } - return *t.MembersURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *Team) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (t *Team) GetNodeID() string { - if t == nil || t.NodeID == nil { - return "" - } - return *t.NodeID -} - -// GetOrganization returns the Organization field. -func (t *Team) GetOrganization() *Organization { - if t == nil { - return nil - } - return t.Organization -} - -// GetParent returns the Parent field. -func (t *Team) GetParent() *Team { - if t == nil { - return nil - } - return t.Parent -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (t *Team) GetPermission() string { - if t == nil || t.Permission == nil { - return "" - } - return *t.Permission -} - -// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. -func (t *Team) GetPermissions() map[string]bool { - if t == nil || t.Permissions == nil { - return map[string]bool{} - } - return t.Permissions -} - -// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. -func (t *Team) GetPrivacy() string { - if t == nil || t.Privacy == nil { - return "" - } - return *t.Privacy -} - -// GetReposCount returns the ReposCount field if it's non-nil, zero value otherwise. -func (t *Team) GetReposCount() int { - if t == nil || t.ReposCount == nil { - return 0 - } - return *t.ReposCount -} - -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (t *Team) GetRepositoriesURL() string { - if t == nil || t.RepositoriesURL == nil { - return "" - } - return *t.RepositoriesURL -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (t *Team) GetSlug() string { - if t == nil || t.Slug == nil { - return "" - } - return *t.Slug -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *Team) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetInstallation returns the Installation field. -func (t *TeamAddEvent) GetInstallation() *Installation { - if t == nil { - return nil - } - return t.Installation -} - -// GetOrg returns the Org field. -func (t *TeamAddEvent) GetOrg() *Organization { - if t == nil { - return nil - } - return t.Org -} - -// GetRepo returns the Repo field. -func (t *TeamAddEvent) GetRepo() *Repository { - if t == nil { - return nil - } - return t.Repo -} - -// GetSender returns the Sender field. -func (t *TeamAddEvent) GetSender() *User { - if t == nil { - return nil - } - return t.Sender -} - -// GetTeam returns the Team field. -func (t *TeamAddEvent) GetTeam() *Team { - if t == nil { - return nil - } - return t.Team -} - -// GetDescription returns the Description field. -func (t *TeamChange) GetDescription() *TeamDescription { - if t == nil { - return nil - } - return t.Description -} - -// GetName returns the Name field. -func (t *TeamChange) GetName() *TeamName { - if t == nil { - return nil - } - return t.Name -} - -// GetPrivacy returns the Privacy field. -func (t *TeamChange) GetPrivacy() *TeamPrivacy { - if t == nil { - return nil - } - return t.Privacy -} - -// GetRepository returns the Repository field. -func (t *TeamChange) GetRepository() *TeamRepository { - if t == nil { - return nil - } - return t.Repository -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (t *TeamDescription) GetFrom() string { - if t == nil || t.From == nil { - return "" - } - return *t.From -} - -// GetAuthor returns the Author field. -func (t *TeamDiscussion) GetAuthor() *User { - if t == nil { - return nil - } - return t.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetBody() string { - if t == nil || t.Body == nil { - return "" - } - return *t.Body -} - -// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetBodyHTML() string { - if t == nil || t.BodyHTML == nil { - return "" - } - return *t.BodyHTML -} - -// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetBodyVersion() string { - if t == nil || t.BodyVersion == nil { - return "" - } - return *t.BodyVersion -} - -// GetCommentsCount returns the CommentsCount field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetCommentsCount() int { - if t == nil || t.CommentsCount == nil { - return 0 - } - return *t.CommentsCount -} - -// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetCommentsURL() string { - if t == nil || t.CommentsURL == nil { - return "" - } - return *t.CommentsURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetCreatedAt() Timestamp { - if t == nil || t.CreatedAt == nil { - return Timestamp{} - } - return *t.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetHTMLURL() string { - if t == nil || t.HTMLURL == nil { - return "" - } - return *t.HTMLURL -} - -// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetLastEditedAt() Timestamp { - if t == nil || t.LastEditedAt == nil { - return Timestamp{} - } - return *t.LastEditedAt -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetNodeID() string { - if t == nil || t.NodeID == nil { - return "" - } - return *t.NodeID -} - -// GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetNumber() int { - if t == nil || t.Number == nil { - return 0 - } - return *t.Number -} - -// GetPinned returns the Pinned field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetPinned() bool { - if t == nil || t.Pinned == nil { - return false - } - return *t.Pinned -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetPrivate() bool { - if t == nil || t.Private == nil { - return false - } - return *t.Private -} - -// GetReactions returns the Reactions field. -func (t *TeamDiscussion) GetReactions() *Reactions { - if t == nil { - return nil - } - return t.Reactions -} - -// GetTeamURL returns the TeamURL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetTeamURL() string { - if t == nil || t.TeamURL == nil { - return "" - } - return *t.TeamURL -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetTitle() string { - if t == nil || t.Title == nil { - return "" - } - return *t.Title -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetUpdatedAt() Timestamp { - if t == nil || t.UpdatedAt == nil { - return Timestamp{} - } - return *t.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (t *TeamEvent) GetAction() string { - if t == nil || t.Action == nil { - return "" - } - return *t.Action -} - -// GetChanges returns the Changes field. -func (t *TeamEvent) GetChanges() *TeamChange { - if t == nil { - return nil - } - return t.Changes -} - -// GetInstallation returns the Installation field. -func (t *TeamEvent) GetInstallation() *Installation { - if t == nil { - return nil - } - return t.Installation -} - -// GetOrg returns the Org field. -func (t *TeamEvent) GetOrg() *Organization { - if t == nil { - return nil - } - return t.Org -} - -// GetRepo returns the Repo field. -func (t *TeamEvent) GetRepo() *Repository { - if t == nil { - return nil - } - return t.Repo -} - -// GetSender returns the Sender field. -func (t *TeamEvent) GetSender() *User { - if t == nil { - return nil - } - return t.Sender -} - -// GetTeam returns the Team field. -func (t *TeamEvent) GetTeam() *Team { - if t == nil { - return nil - } - return t.Team -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetLDAPDN() string { - if t == nil || t.LDAPDN == nil { - return "" - } - return *t.LDAPDN -} - -// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetMembersURL() string { - if t == nil || t.MembersURL == nil { - return "" - } - return *t.MembersURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetPermission() string { - if t == nil || t.Permission == nil { - return "" - } - return *t.Permission -} - -// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetPrivacy() string { - if t == nil || t.Privacy == nil { - return "" - } - return *t.Privacy -} - -// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetRepositoriesURL() string { - if t == nil || t.RepositoriesURL == nil { - return "" - } - return *t.RepositoriesURL -} - -// GetSlug returns the Slug field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetSlug() string { - if t == nil || t.Slug == nil { - return "" - } - return *t.Slug -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *TeamLDAPMapping) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (t *TeamName) GetFrom() string { - if t == nil || t.From == nil { - return "" - } - return *t.From -} - -// GetFrom returns the From field. -func (t *TeamPermissions) GetFrom() *TeamPermissionsFrom { - if t == nil { - return nil - } - return t.From -} - -// GetAdmin returns the Admin field if it's non-nil, zero value otherwise. -func (t *TeamPermissionsFrom) GetAdmin() bool { - if t == nil || t.Admin == nil { - return false - } - return *t.Admin -} - -// GetPull returns the Pull field if it's non-nil, zero value otherwise. -func (t *TeamPermissionsFrom) GetPull() bool { - if t == nil || t.Pull == nil { - return false - } - return *t.Pull -} - -// GetPush returns the Push field if it's non-nil, zero value otherwise. -func (t *TeamPermissionsFrom) GetPush() bool { - if t == nil || t.Push == nil { - return false - } - return *t.Push -} - -// GetFrom returns the From field if it's non-nil, zero value otherwise. -func (t *TeamPrivacy) GetFrom() string { - if t == nil || t.From == nil { - return "" - } - return *t.From -} - -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (t *TeamProjectOptions) GetPermission() string { - if t == nil || t.Permission == nil { - return "" - } - return *t.Permission -} - -// GetPermissions returns the Permissions field. -func (t *TeamRepository) GetPermissions() *TeamPermissions { - if t == nil { - return nil - } - return t.Permissions -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetIncludeAllBranches returns the IncludeAllBranches field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetIncludeAllBranches() bool { - if t == nil || t.IncludeAllBranches == nil { - return false - } - return *t.IncludeAllBranches -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetOwner returns the Owner field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetOwner() string { - if t == nil || t.Owner == nil { - return "" - } - return *t.Owner -} - -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (t *TemplateRepoRequest) GetPrivate() bool { - if t == nil || t.Private == nil { - return false - } - return *t.Private -} - -// GetFragment returns the Fragment field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetFragment() string { - if t == nil || t.Fragment == nil { - return "" - } - return *t.Fragment -} - -// GetObjectType returns the ObjectType field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetObjectType() string { - if t == nil || t.ObjectType == nil { - return "" - } - return *t.ObjectType -} - -// GetObjectURL returns the ObjectURL field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetObjectURL() string { - if t == nil || t.ObjectURL == nil { - return "" - } - return *t.ObjectURL -} - -// GetProperty returns the Property field if it's non-nil, zero value otherwise. -func (t *TextMatch) GetProperty() string { - if t == nil || t.Property == nil { - return "" - } - return *t.Property -} - -// GetActor returns the Actor field. -func (t *Timeline) GetActor() *User { - if t == nil { - return nil - } - return t.Actor -} - -// GetAssignee returns the Assignee field. -func (t *Timeline) GetAssignee() *User { - if t == nil { - return nil - } - return t.Assignee -} - -// GetAssigner returns the Assigner field. -func (t *Timeline) GetAssigner() *User { - if t == nil { - return nil - } - return t.Assigner -} - -// GetAuthor returns the Author field. -func (t *Timeline) GetAuthor() *CommitAuthor { - if t == nil { - return nil - } - return t.Author -} - -// GetBody returns the Body field if it's non-nil, zero value otherwise. -func (t *Timeline) GetBody() string { - if t == nil || t.Body == nil { - return "" - } - return *t.Body -} - -// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. -func (t *Timeline) GetCommitID() string { - if t == nil || t.CommitID == nil { - return "" - } - return *t.CommitID -} - -// GetCommitter returns the Committer field. -func (t *Timeline) GetCommitter() *CommitAuthor { - if t == nil { - return nil - } - return t.Committer -} - -// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. -func (t *Timeline) GetCommitURL() string { - if t == nil || t.CommitURL == nil { - return "" - } - return *t.CommitURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (t *Timeline) GetCreatedAt() Timestamp { - if t == nil || t.CreatedAt == nil { - return Timestamp{} - } - return *t.CreatedAt -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (t *Timeline) GetEvent() string { - if t == nil || t.Event == nil { - return "" - } - return *t.Event -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (t *Timeline) GetID() int64 { - if t == nil || t.ID == nil { - return 0 - } - return *t.ID -} - -// GetLabel returns the Label field. -func (t *Timeline) GetLabel() *Label { - if t == nil { - return nil - } - return t.Label -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (t *Timeline) GetMessage() string { - if t == nil || t.Message == nil { - return "" - } - return *t.Message -} - -// GetMilestone returns the Milestone field. -func (t *Timeline) GetMilestone() *Milestone { - if t == nil { - return nil - } - return t.Milestone -} - -// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. -func (t *Timeline) GetPerformedViaGithubApp() *App { - if t == nil { - return nil - } - return t.PerformedViaGithubApp -} - -// GetProjectCard returns the ProjectCard field. -func (t *Timeline) GetProjectCard() *ProjectCard { - if t == nil { - return nil - } - return t.ProjectCard -} - -// GetRename returns the Rename field. -func (t *Timeline) GetRename() *Rename { - if t == nil { - return nil - } - return t.Rename -} - -// GetRequestedTeam returns the RequestedTeam field. -func (t *Timeline) GetRequestedTeam() *Team { - if t == nil { - return nil - } - return t.RequestedTeam -} - -// GetRequester returns the Requester field. -func (t *Timeline) GetRequester() *User { - if t == nil { - return nil - } - return t.Requester -} - -// GetReviewer returns the Reviewer field. -func (t *Timeline) GetReviewer() *User { - if t == nil { - return nil - } - return t.Reviewer -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *Timeline) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetSource returns the Source field. -func (t *Timeline) GetSource() *Source { - if t == nil { - return nil - } - return t.Source -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (t *Timeline) GetState() string { - if t == nil || t.State == nil { - return "" - } - return *t.State -} - -// GetSubmittedAt returns the SubmittedAt field if it's non-nil, zero value otherwise. -func (t *Timeline) GetSubmittedAt() Timestamp { - if t == nil || t.SubmittedAt == nil { - return Timestamp{} - } - return *t.SubmittedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *Timeline) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetUser returns the User field. -func (t *Timeline) GetUser() *User { - if t == nil { - return nil - } - return t.User -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (t *Tool) GetGUID() string { - if t == nil || t.GUID == nil { - return "" - } - return *t.GUID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *Tool) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetVersion returns the Version field if it's non-nil, zero value otherwise. -func (t *Tool) GetVersion() string { - if t == nil || t.Version == nil { - return "" - } - return *t.Version -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetCreatedAt() Timestamp { - if t == nil || t.CreatedAt == nil { - return Timestamp{} - } - return *t.CreatedAt -} - -// GetCreatedBy returns the CreatedBy field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetCreatedBy() string { - if t == nil || t.CreatedBy == nil { - return "" - } - return *t.CreatedBy -} - -// GetCurated returns the Curated field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetCurated() bool { - if t == nil || t.Curated == nil { - return false - } - return *t.Curated -} - -// GetDescription returns the Description field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetDescription() string { - if t == nil || t.Description == nil { - return "" - } - return *t.Description -} - -// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetDisplayName() string { - if t == nil || t.DisplayName == nil { - return "" - } - return *t.DisplayName -} - -// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetFeatured() bool { - if t == nil || t.Featured == nil { - return false - } - return *t.Featured -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetName() string { - if t == nil || t.Name == nil { - return "" - } - return *t.Name -} - -// GetScore returns the Score field. -func (t *TopicResult) GetScore() *float64 { - if t == nil { - return nil - } - return t.Score -} - -// GetShortDescription returns the ShortDescription field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetShortDescription() string { - if t == nil || t.ShortDescription == nil { - return "" - } - return *t.ShortDescription -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (t *TopicResult) GetUpdatedAt() string { - if t == nil || t.UpdatedAt == nil { - return "" - } - return *t.UpdatedAt -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (t *TopicsSearchResult) GetIncompleteResults() bool { - if t == nil || t.IncompleteResults == nil { - return false - } - return *t.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (t *TopicsSearchResult) GetTotal() int { - if t == nil || t.Total == nil { - return 0 - } - return *t.Total -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficClones) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficClones) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficData) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. -func (t *TrafficData) GetTimestamp() Timestamp { - if t == nil || t.Timestamp == nil { - return Timestamp{} - } - return *t.Timestamp -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficData) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetPath() string { - if t == nil || t.Path == nil { - return "" - } - return *t.Path -} - -// GetTitle returns the Title field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetTitle() string { - if t == nil || t.Title == nil { - return "" - } - return *t.Title -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficPath) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficReferrer) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetReferrer returns the Referrer field if it's non-nil, zero value otherwise. -func (t *TrafficReferrer) GetReferrer() string { - if t == nil || t.Referrer == nil { - return "" - } - return *t.Referrer -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficReferrer) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetCount returns the Count field if it's non-nil, zero value otherwise. -func (t *TrafficViews) GetCount() int { - if t == nil || t.Count == nil { - return 0 - } - return *t.Count -} - -// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. -func (t *TrafficViews) GetUniques() int { - if t == nil || t.Uniques == nil { - return 0 - } - return *t.Uniques -} - -// GetNewName returns the NewName field if it's non-nil, zero value otherwise. -func (t *TransferRequest) GetNewName() string { - if t == nil || t.NewName == nil { - return "" - } - return *t.NewName -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *Tree) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetTruncated returns the Truncated field if it's non-nil, zero value otherwise. -func (t *Tree) GetTruncated() bool { - if t == nil || t.Truncated == nil { - return false - } - return *t.Truncated -} - -// GetContent returns the Content field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetContent() string { - if t == nil || t.Content == nil { - return "" - } - return *t.Content -} - -// GetMode returns the Mode field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetMode() string { - if t == nil || t.Mode == nil { - return "" - } - return *t.Mode -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetPath() string { - if t == nil || t.Path == nil { - return "" - } - return *t.Path -} - -// GetSHA returns the SHA field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetSHA() string { - if t == nil || t.SHA == nil { - return "" - } - return *t.SHA -} - -// GetSize returns the Size field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetSize() int { - if t == nil || t.Size == nil { - return 0 - } - return *t.Size -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetType() string { - if t == nil || t.Type == nil { - return "" - } - return *t.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (t *TreeEntry) GetURL() string { - if t == nil || t.URL == nil { - return "" - } - return *t.URL -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (u *UpdateAttributeForSCIMUserOperations) GetPath() string { - if u == nil || u.Path == nil { - return "" - } - return *u.Path -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetCompletedAt() Timestamp { - if u == nil || u.CompletedAt == nil { - return Timestamp{} - } - return *u.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetConclusion() string { - if u == nil || u.Conclusion == nil { - return "" - } - return *u.Conclusion -} - -// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetDetailsURL() string { - if u == nil || u.DetailsURL == nil { - return "" - } - return *u.DetailsURL -} - -// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetExternalID() string { - if u == nil || u.ExternalID == nil { - return "" - } - return *u.ExternalID -} - -// GetOutput returns the Output field. -func (u *UpdateCheckRunOptions) GetOutput() *CheckRunOutput { - if u == nil { - return nil - } - return u.Output -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (u *UpdateCheckRunOptions) GetStatus() string { - if u == nil || u.Status == nil { - return "" - } - return *u.Status -} - -// GetQuerySuite returns the QuerySuite field if it's non-nil, zero value otherwise. -func (u *UpdateDefaultSetupConfigurationOptions) GetQuerySuite() string { - if u == nil || u.QuerySuite == nil { - return "" - } - return *u.QuerySuite -} - -// GetRunID returns the RunID field if it's non-nil, zero value otherwise. -func (u *UpdateDefaultSetupConfigurationResponse) GetRunID() int64 { - if u == nil || u.RunID == nil { - return 0 - } - return *u.RunID -} - -// GetRunURL returns the RunURL field if it's non-nil, zero value otherwise. -func (u *UpdateDefaultSetupConfigurationResponse) GetRunURL() string { - if u == nil || u.RunURL == nil { - return "" - } - return *u.RunURL -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if u == nil || u.AllowsPublicRepositories == nil { - return false - } - return *u.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetName() string { - if u == nil || u.Name == nil { - return "" - } - return *u.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if u == nil || u.RestrictedToWorkflows == nil { - return false - } - return *u.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (u *UpdateEnterpriseRunnerGroupRequest) GetVisibility() string { - if u == nil || u.Visibility == nil { - return "" - } - return *u.Visibility -} - -// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetAllowsPublicRepositories() bool { - if u == nil || u.AllowsPublicRepositories == nil { - return false - } - return *u.AllowsPublicRepositories -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetName() string { - if u == nil || u.Name == nil { - return "" - } - return *u.Name -} - -// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetRestrictedToWorkflows() bool { - if u == nil || u.RestrictedToWorkflows == nil { - return false - } - return *u.RestrictedToWorkflows -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (u *UpdateRunnerGroupRequest) GetVisibility() string { - if u == nil || u.Visibility == nil { - return "" - } - return *u.Visibility -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (u *User) GetAvatarURL() string { - if u == nil || u.AvatarURL == nil { - return "" - } - return *u.AvatarURL -} - -// GetBio returns the Bio field if it's non-nil, zero value otherwise. -func (u *User) GetBio() string { - if u == nil || u.Bio == nil { - return "" - } - return *u.Bio -} - -// GetBlog returns the Blog field if it's non-nil, zero value otherwise. -func (u *User) GetBlog() string { - if u == nil || u.Blog == nil { - return "" - } - return *u.Blog -} - -// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. -func (u *User) GetCollaborators() int { - if u == nil || u.Collaborators == nil { - return 0 - } - return *u.Collaborators -} - -// GetCompany returns the Company field if it's non-nil, zero value otherwise. -func (u *User) GetCompany() string { - if u == nil || u.Company == nil { - return "" - } - return *u.Company -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (u *User) GetCreatedAt() Timestamp { - if u == nil || u.CreatedAt == nil { - return Timestamp{} - } - return *u.CreatedAt -} - -// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. -func (u *User) GetDiskUsage() int { - if u == nil || u.DiskUsage == nil { - return 0 - } - return *u.DiskUsage -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (u *User) GetEmail() string { - if u == nil || u.Email == nil { - return "" - } - return *u.Email -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (u *User) GetEventsURL() string { - if u == nil || u.EventsURL == nil { - return "" - } - return *u.EventsURL -} - -// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. -func (u *User) GetFollowers() int { - if u == nil || u.Followers == nil { - return 0 - } - return *u.Followers -} - -// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. -func (u *User) GetFollowersURL() string { - if u == nil || u.FollowersURL == nil { - return "" - } - return *u.FollowersURL -} - -// GetFollowing returns the Following field if it's non-nil, zero value otherwise. -func (u *User) GetFollowing() int { - if u == nil || u.Following == nil { - return 0 - } - return *u.Following -} - -// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. -func (u *User) GetFollowingURL() string { - if u == nil || u.FollowingURL == nil { - return "" - } - return *u.FollowingURL -} - -// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. -func (u *User) GetGistsURL() string { - if u == nil || u.GistsURL == nil { - return "" - } - return *u.GistsURL -} - -// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. -func (u *User) GetGravatarID() string { - if u == nil || u.GravatarID == nil { - return "" - } - return *u.GravatarID -} - -// GetHireable returns the Hireable field if it's non-nil, zero value otherwise. -func (u *User) GetHireable() bool { - if u == nil || u.Hireable == nil { - return false - } - return *u.Hireable -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (u *User) GetHTMLURL() string { - if u == nil || u.HTMLURL == nil { - return "" - } - return *u.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *User) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetLdapDn returns the LdapDn field if it's non-nil, zero value otherwise. -func (u *User) GetLdapDn() string { - if u == nil || u.LdapDn == nil { - return "" - } - return *u.LdapDn -} - -// GetLocation returns the Location field if it's non-nil, zero value otherwise. -func (u *User) GetLocation() string { - if u == nil || u.Location == nil { - return "" - } - return *u.Location -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (u *User) GetLogin() string { - if u == nil || u.Login == nil { - return "" - } - return *u.Login -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (u *User) GetName() string { - if u == nil || u.Name == nil { - return "" - } - return *u.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (u *User) GetNodeID() string { - if u == nil || u.NodeID == nil { - return "" - } - return *u.NodeID -} - -// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. -func (u *User) GetOrganizationsURL() string { - if u == nil || u.OrganizationsURL == nil { - return "" - } - return *u.OrganizationsURL -} - -// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. -func (u *User) GetOwnedPrivateRepos() int64 { - if u == nil || u.OwnedPrivateRepos == nil { - return 0 - } - return *u.OwnedPrivateRepos -} - -// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. -func (u *User) GetPermissions() map[string]bool { - if u == nil || u.Permissions == nil { - return map[string]bool{} - } - return u.Permissions -} - -// GetPlan returns the Plan field. -func (u *User) GetPlan() *Plan { - if u == nil { - return nil - } - return u.Plan -} - -// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. -func (u *User) GetPrivateGists() int { - if u == nil || u.PrivateGists == nil { - return 0 - } - return *u.PrivateGists -} - -// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. -func (u *User) GetPublicGists() int { - if u == nil || u.PublicGists == nil { - return 0 - } - return *u.PublicGists -} - -// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. -func (u *User) GetPublicRepos() int { - if u == nil || u.PublicRepos == nil { - return 0 - } - return *u.PublicRepos -} - -// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. -func (u *User) GetReceivedEventsURL() string { - if u == nil || u.ReceivedEventsURL == nil { - return "" - } - return *u.ReceivedEventsURL -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (u *User) GetReposURL() string { - if u == nil || u.ReposURL == nil { - return "" - } - return *u.ReposURL -} - -// GetRoleName returns the RoleName field if it's non-nil, zero value otherwise. -func (u *User) GetRoleName() string { - if u == nil || u.RoleName == nil { - return "" - } - return *u.RoleName -} - -// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. -func (u *User) GetSiteAdmin() bool { - if u == nil || u.SiteAdmin == nil { - return false - } - return *u.SiteAdmin -} - -// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. -func (u *User) GetStarredURL() string { - if u == nil || u.StarredURL == nil { - return "" - } - return *u.StarredURL -} - -// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. -func (u *User) GetSubscriptionsURL() string { - if u == nil || u.SubscriptionsURL == nil { - return "" - } - return *u.SubscriptionsURL -} - -// GetSuspendedAt returns the SuspendedAt field if it's non-nil, zero value otherwise. -func (u *User) GetSuspendedAt() Timestamp { - if u == nil || u.SuspendedAt == nil { - return Timestamp{} - } - return *u.SuspendedAt -} - -// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. -func (u *User) GetTotalPrivateRepos() int64 { - if u == nil || u.TotalPrivateRepos == nil { - return 0 - } - return *u.TotalPrivateRepos -} - -// GetTwitterUsername returns the TwitterUsername field if it's non-nil, zero value otherwise. -func (u *User) GetTwitterUsername() string { - if u == nil || u.TwitterUsername == nil { - return "" - } - return *u.TwitterUsername -} - -// GetTwoFactorAuthentication returns the TwoFactorAuthentication field if it's non-nil, zero value otherwise. -func (u *User) GetTwoFactorAuthentication() bool { - if u == nil || u.TwoFactorAuthentication == nil { - return false - } - return *u.TwoFactorAuthentication -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (u *User) GetType() string { - if u == nil || u.Type == nil { - return "" - } - return *u.Type -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (u *User) GetUpdatedAt() Timestamp { - if u == nil || u.UpdatedAt == nil { - return Timestamp{} - } - return *u.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *User) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetApp returns the App field. -func (u *UserAuthorization) GetApp() *OAuthAPP { - if u == nil { - return nil - } - return u.App -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetCreatedAt() Timestamp { - if u == nil || u.CreatedAt == nil { - return Timestamp{} - } - return *u.CreatedAt -} - -// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetFingerprint() string { - if u == nil || u.Fingerprint == nil { - return "" - } - return *u.Fingerprint -} - -// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetHashedToken() string { - if u == nil || u.HashedToken == nil { - return "" - } - return *u.HashedToken -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetNote returns the Note field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetNote() string { - if u == nil || u.Note == nil { - return "" - } - return *u.Note -} - -// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetNoteURL() string { - if u == nil || u.NoteURL == nil { - return "" - } - return *u.NoteURL -} - -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetToken() string { - if u == nil || u.Token == nil { - return "" - } - return *u.Token -} - -// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetTokenLastEight() string { - if u == nil || u.TokenLastEight == nil { - return "" - } - return *u.TokenLastEight -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetUpdatedAt() Timestamp { - if u == nil || u.UpdatedAt == nil { - return Timestamp{} - } - return *u.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *UserAuthorization) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetMessage returns the Message field if it's non-nil, zero value otherwise. -func (u *UserContext) GetMessage() string { - if u == nil || u.Message == nil { - return "" - } - return *u.Message -} - -// GetOcticon returns the Octicon field if it's non-nil, zero value otherwise. -func (u *UserContext) GetOcticon() string { - if u == nil || u.Octicon == nil { - return "" - } - return *u.Octicon -} - -// GetEmail returns the Email field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetEmail() string { - if u == nil || u.Email == nil { - return "" - } - return *u.Email -} - -// GetPrimary returns the Primary field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetPrimary() bool { - if u == nil || u.Primary == nil { - return false - } - return *u.Primary -} - -// GetVerified returns the Verified field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetVerified() bool { - if u == nil || u.Verified == nil { - return false - } - return *u.Verified -} - -// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. -func (u *UserEmail) GetVisibility() string { - if u == nil || u.Visibility == nil { - return "" - } - return *u.Visibility -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (u *UserEvent) GetAction() string { - if u == nil || u.Action == nil { - return "" - } - return *u.Action -} - -// GetEnterprise returns the Enterprise field. -func (u *UserEvent) GetEnterprise() *Enterprise { - if u == nil { - return nil - } - return u.Enterprise -} - -// GetInstallation returns the Installation field. -func (u *UserEvent) GetInstallation() *Installation { - if u == nil { - return nil - } - return u.Installation -} - -// GetSender returns the Sender field. -func (u *UserEvent) GetSender() *User { - if u == nil { - return nil - } - return u.Sender -} - -// GetUser returns the User field. -func (u *UserEvent) GetUser() *User { - if u == nil { - return nil - } - return u.User -} - -// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetAvatarURL() string { - if u == nil || u.AvatarURL == nil { - return "" - } - return *u.AvatarURL -} - -// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetEventsURL() string { - if u == nil || u.EventsURL == nil { - return "" - } - return *u.EventsURL -} - -// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetFollowersURL() string { - if u == nil || u.FollowersURL == nil { - return "" - } - return *u.FollowersURL -} - -// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetFollowingURL() string { - if u == nil || u.FollowingURL == nil { - return "" - } - return *u.FollowingURL -} - -// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetGistsURL() string { - if u == nil || u.GistsURL == nil { - return "" - } - return *u.GistsURL -} - -// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetGravatarID() string { - if u == nil || u.GravatarID == nil { - return "" - } - return *u.GravatarID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetLDAPDN() string { - if u == nil || u.LDAPDN == nil { - return "" - } - return *u.LDAPDN -} - -// GetLogin returns the Login field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetLogin() string { - if u == nil || u.Login == nil { - return "" - } - return *u.Login -} - -// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetOrganizationsURL() string { - if u == nil || u.OrganizationsURL == nil { - return "" - } - return *u.OrganizationsURL -} - -// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetReceivedEventsURL() string { - if u == nil || u.ReceivedEventsURL == nil { - return "" - } - return *u.ReceivedEventsURL -} - -// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetReposURL() string { - if u == nil || u.ReposURL == nil { - return "" - } - return *u.ReposURL -} - -// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetSiteAdmin() bool { - if u == nil || u.SiteAdmin == nil { - return false - } - return *u.SiteAdmin -} - -// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetStarredURL() string { - if u == nil || u.StarredURL == nil { - return "" - } - return *u.StarredURL -} - -// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetSubscriptionsURL() string { - if u == nil || u.SubscriptionsURL == nil { - return "" - } - return *u.SubscriptionsURL -} - -// GetType returns the Type field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetType() string { - if u == nil || u.Type == nil { - return "" - } - return *u.Type -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *UserLDAPMapping) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetCreatedAt() string { - if u == nil || u.CreatedAt == nil { - return "" - } - return *u.CreatedAt -} - -// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetExcludeAttachments() bool { - if u == nil || u.ExcludeAttachments == nil { - return false - } - return *u.ExcludeAttachments -} - -// GetGUID returns the GUID field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetGUID() string { - if u == nil || u.GUID == nil { - return "" - } - return *u.GUID -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetID() int64 { - if u == nil || u.ID == nil { - return 0 - } - return *u.ID -} - -// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetLockRepositories() bool { - if u == nil || u.LockRepositories == nil { - return false - } - return *u.LockRepositories -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetState() string { - if u == nil || u.State == nil { - return "" - } - return *u.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetUpdatedAt() string { - if u == nil || u.UpdatedAt == nil { - return "" - } - return *u.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (u *UserMigration) GetURL() string { - if u == nil || u.URL == nil { - return "" - } - return *u.URL -} - -// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. -func (u *UsersSearchResult) GetIncompleteResults() bool { - if u == nil || u.IncompleteResults == nil { - return false - } - return *u.IncompleteResults -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (u *UsersSearchResult) GetTotal() int { - if u == nil || u.Total == nil { - return 0 - } - return *u.Total -} - -// GetAdminUsers returns the AdminUsers field if it's non-nil, zero value otherwise. -func (u *UserStats) GetAdminUsers() int { - if u == nil || u.AdminUsers == nil { - return 0 - } - return *u.AdminUsers -} - -// GetSuspendedUsers returns the SuspendedUsers field if it's non-nil, zero value otherwise. -func (u *UserStats) GetSuspendedUsers() int { - if u == nil || u.SuspendedUsers == nil { - return 0 - } - return *u.SuspendedUsers -} - -// GetTotalUsers returns the TotalUsers field if it's non-nil, zero value otherwise. -func (u *UserStats) GetTotalUsers() int { - if u == nil || u.TotalUsers == nil { - return 0 - } - return *u.TotalUsers -} - -// GetReason returns the Reason field if it's non-nil, zero value otherwise. -func (u *UserSuspendOptions) GetReason() string { - if u == nil || u.Reason == nil { - return "" - } - return *u.Reason -} - -// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. -func (v *VulnerabilityPackage) GetEcosystem() string { - if v == nil || v.Ecosystem == nil { - return "" - } - return *v.Ecosystem -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (v *VulnerabilityPackage) GetName() string { - if v == nil || v.Name == nil { - return "" - } - return *v.Name -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (w *WatchEvent) GetAction() string { - if w == nil || w.Action == nil { - return "" - } - return *w.Action -} - -// GetInstallation returns the Installation field. -func (w *WatchEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WatchEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRepo returns the Repo field. -func (w *WatchEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WatchEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetTotal returns the Total field if it's non-nil, zero value otherwise. -func (w *WeeklyCommitActivity) GetTotal() int { - if w == nil || w.Total == nil { - return 0 - } - return *w.Total -} - -// GetWeek returns the Week field if it's non-nil, zero value otherwise. -func (w *WeeklyCommitActivity) GetWeek() Timestamp { - if w == nil || w.Week == nil { - return Timestamp{} - } - return *w.Week -} - -// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetAdditions() int { - if w == nil || w.Additions == nil { - return 0 - } - return *w.Additions -} - -// GetCommits returns the Commits field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetCommits() int { - if w == nil || w.Commits == nil { - return 0 - } - return *w.Commits -} - -// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetDeletions() int { - if w == nil || w.Deletions == nil { - return 0 - } - return *w.Deletions -} - -// GetWeek returns the Week field if it's non-nil, zero value otherwise. -func (w *WeeklyStats) GetWeek() Timestamp { - if w == nil || w.Week == nil { - return Timestamp{} - } - return *w.Week -} - -// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. -func (w *Workflow) GetBadgeURL() string { - if w == nil || w.BadgeURL == nil { - return "" - } - return *w.BadgeURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (w *Workflow) GetCreatedAt() Timestamp { - if w == nil || w.CreatedAt == nil { - return Timestamp{} - } - return *w.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (w *Workflow) GetHTMLURL() string { - if w == nil || w.HTMLURL == nil { - return "" - } - return *w.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (w *Workflow) GetID() int64 { - if w == nil || w.ID == nil { - return 0 - } - return *w.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (w *Workflow) GetName() string { - if w == nil || w.Name == nil { - return "" - } - return *w.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (w *Workflow) GetNodeID() string { - if w == nil || w.NodeID == nil { - return "" - } - return *w.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (w *Workflow) GetPath() string { - if w == nil || w.Path == nil { - return "" - } - return *w.Path -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (w *Workflow) GetState() string { - if w == nil || w.State == nil { - return "" - } - return *w.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (w *Workflow) GetUpdatedAt() Timestamp { - if w == nil || w.UpdatedAt == nil { - return Timestamp{} - } - return *w.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (w *Workflow) GetURL() string { - if w == nil || w.URL == nil { - return "" - } - return *w.URL -} - -// GetTotalMS returns the TotalMS field if it's non-nil, zero value otherwise. -func (w *WorkflowBill) GetTotalMS() int64 { - if w == nil || w.TotalMS == nil { - return 0 - } - return *w.TotalMS -} - -// GetInstallation returns the Installation field. -func (w *WorkflowDispatchEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WorkflowDispatchEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (w *WorkflowDispatchEvent) GetRef() string { - if w == nil || w.Ref == nil { - return "" - } - return *w.Ref -} - -// GetRepo returns the Repo field. -func (w *WorkflowDispatchEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WorkflowDispatchEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetWorkflow returns the Workflow field if it's non-nil, zero value otherwise. -func (w *WorkflowDispatchEvent) GetWorkflow() string { - if w == nil || w.Workflow == nil { - return "" - } - return *w.Workflow -} - -// GetCheckRunURL returns the CheckRunURL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetCheckRunURL() string { - if w == nil || w.CheckRunURL == nil { - return "" - } - return *w.CheckRunURL -} - -// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetCompletedAt() Timestamp { - if w == nil || w.CompletedAt == nil { - return Timestamp{} - } - return *w.CompletedAt -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetConclusion() string { - if w == nil || w.Conclusion == nil { - return "" - } - return *w.Conclusion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetCreatedAt() Timestamp { - if w == nil || w.CreatedAt == nil { - return Timestamp{} - } - return *w.CreatedAt -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetHeadBranch() string { - if w == nil || w.HeadBranch == nil { - return "" - } - return *w.HeadBranch -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetHeadSHA() string { - if w == nil || w.HeadSHA == nil { - return "" - } - return *w.HeadSHA -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetHTMLURL() string { - if w == nil || w.HTMLURL == nil { - return "" - } - return *w.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetID() int64 { - if w == nil || w.ID == nil { - return 0 - } - return *w.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetName() string { - if w == nil || w.Name == nil { - return "" - } - return *w.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetNodeID() string { - if w == nil || w.NodeID == nil { - return "" - } - return *w.NodeID -} - -// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunAttempt() int64 { - if w == nil || w.RunAttempt == nil { - return 0 - } - return *w.RunAttempt -} - -// GetRunID returns the RunID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunID() int64 { - if w == nil || w.RunID == nil { - return 0 - } - return *w.RunID -} - -// GetRunnerGroupID returns the RunnerGroupID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerGroupID() int64 { - if w == nil || w.RunnerGroupID == nil { - return 0 - } - return *w.RunnerGroupID -} - -// GetRunnerGroupName returns the RunnerGroupName field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerGroupName() string { - if w == nil || w.RunnerGroupName == nil { - return "" - } - return *w.RunnerGroupName -} - -// GetRunnerID returns the RunnerID field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerID() int64 { - if w == nil || w.RunnerID == nil { - return 0 - } - return *w.RunnerID -} - -// GetRunnerName returns the RunnerName field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunnerName() string { - if w == nil || w.RunnerName == nil { - return "" - } - return *w.RunnerName -} - -// GetRunURL returns the RunURL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetRunURL() string { - if w == nil || w.RunURL == nil { - return "" - } - return *w.RunURL -} - -// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetStartedAt() Timestamp { - if w == nil || w.StartedAt == nil { - return Timestamp{} - } - return *w.StartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetStatus() string { - if w == nil || w.Status == nil { - return "" - } - return *w.Status -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetURL() string { - if w == nil || w.URL == nil { - return "" - } - return *w.URL -} - -// GetWorkflowName returns the WorkflowName field if it's non-nil, zero value otherwise. -func (w *WorkflowJob) GetWorkflowName() string { - if w == nil || w.WorkflowName == nil { - return "" - } - return *w.WorkflowName -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (w *WorkflowJobEvent) GetAction() string { - if w == nil || w.Action == nil { - return "" - } - return *w.Action -} - -// GetInstallation returns the Installation field. -func (w *WorkflowJobEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WorkflowJobEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRepo returns the Repo field. -func (w *WorkflowJobEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WorkflowJobEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetWorkflowJob returns the WorkflowJob field. -func (w *WorkflowJobEvent) GetWorkflowJob() *WorkflowJob { - if w == nil { - return nil - } - return w.WorkflowJob -} - -// GetActor returns the Actor field. -func (w *WorkflowRun) GetActor() *User { - if w == nil { - return nil - } - return w.Actor -} - -// GetArtifactsURL returns the ArtifactsURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetArtifactsURL() string { - if w == nil || w.ArtifactsURL == nil { - return "" - } - return *w.ArtifactsURL -} - -// GetCancelURL returns the CancelURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCancelURL() string { - if w == nil || w.CancelURL == nil { - return "" - } - return *w.CancelURL -} - -// GetCheckSuiteID returns the CheckSuiteID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCheckSuiteID() int64 { - if w == nil || w.CheckSuiteID == nil { - return 0 - } - return *w.CheckSuiteID -} - -// GetCheckSuiteNodeID returns the CheckSuiteNodeID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCheckSuiteNodeID() string { - if w == nil || w.CheckSuiteNodeID == nil { - return "" - } - return *w.CheckSuiteNodeID -} - -// GetCheckSuiteURL returns the CheckSuiteURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCheckSuiteURL() string { - if w == nil || w.CheckSuiteURL == nil { - return "" - } - return *w.CheckSuiteURL -} - -// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetConclusion() string { - if w == nil || w.Conclusion == nil { - return "" - } - return *w.Conclusion -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetCreatedAt() Timestamp { - if w == nil || w.CreatedAt == nil { - return Timestamp{} - } - return *w.CreatedAt -} - -// GetDisplayTitle returns the DisplayTitle field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetDisplayTitle() string { - if w == nil || w.DisplayTitle == nil { - return "" - } - return *w.DisplayTitle -} - -// GetEvent returns the Event field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetEvent() string { - if w == nil || w.Event == nil { - return "" - } - return *w.Event -} - -// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetHeadBranch() string { - if w == nil || w.HeadBranch == nil { - return "" - } - return *w.HeadBranch -} - -// GetHeadCommit returns the HeadCommit field. -func (w *WorkflowRun) GetHeadCommit() *HeadCommit { - if w == nil { - return nil - } - return w.HeadCommit -} - -// GetHeadRepository returns the HeadRepository field. -func (w *WorkflowRun) GetHeadRepository() *Repository { - if w == nil { - return nil - } - return w.HeadRepository -} - -// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetHeadSHA() string { - if w == nil || w.HeadSHA == nil { - return "" - } - return *w.HeadSHA -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetHTMLURL() string { - if w == nil || w.HTMLURL == nil { - return "" - } - return *w.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetID() int64 { - if w == nil || w.ID == nil { - return 0 - } - return *w.ID -} - -// GetJobsURL returns the JobsURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetJobsURL() string { - if w == nil || w.JobsURL == nil { - return "" - } - return *w.JobsURL -} - -// GetLogsURL returns the LogsURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetLogsURL() string { - if w == nil || w.LogsURL == nil { - return "" - } - return *w.LogsURL -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetName() string { - if w == nil || w.Name == nil { - return "" - } - return *w.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetNodeID() string { - if w == nil || w.NodeID == nil { - return "" - } - return *w.NodeID -} - -// GetPreviousAttemptURL returns the PreviousAttemptURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetPreviousAttemptURL() string { - if w == nil || w.PreviousAttemptURL == nil { - return "" - } - return *w.PreviousAttemptURL -} - -// GetRepository returns the Repository field. -func (w *WorkflowRun) GetRepository() *Repository { - if w == nil { - return nil - } - return w.Repository -} - -// GetRerunURL returns the RerunURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRerunURL() string { - if w == nil || w.RerunURL == nil { - return "" - } - return *w.RerunURL -} - -// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRunAttempt() int { - if w == nil || w.RunAttempt == nil { - return 0 - } - return *w.RunAttempt -} - -// GetRunNumber returns the RunNumber field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRunNumber() int { - if w == nil || w.RunNumber == nil { - return 0 - } - return *w.RunNumber -} - -// GetRunStartedAt returns the RunStartedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetRunStartedAt() Timestamp { - if w == nil || w.RunStartedAt == nil { - return Timestamp{} - } - return *w.RunStartedAt -} - -// GetStatus returns the Status field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetStatus() string { - if w == nil || w.Status == nil { - return "" - } - return *w.Status -} - -// GetTriggeringActor returns the TriggeringActor field. -func (w *WorkflowRun) GetTriggeringActor() *User { - if w == nil { - return nil - } - return w.TriggeringActor -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetUpdatedAt() Timestamp { - if w == nil || w.UpdatedAt == nil { - return Timestamp{} - } - return *w.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetURL() string { - if w == nil || w.URL == nil { - return "" - } - return *w.URL -} - -// GetWorkflowID returns the WorkflowID field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetWorkflowID() int64 { - if w == nil || w.WorkflowID == nil { - return 0 - } - return *w.WorkflowID -} - -// GetWorkflowURL returns the WorkflowURL field if it's non-nil, zero value otherwise. -func (w *WorkflowRun) GetWorkflowURL() string { - if w == nil || w.WorkflowURL == nil { - return "" - } - return *w.WorkflowURL -} - -// GetExcludePullRequests returns the ExcludePullRequests field if it's non-nil, zero value otherwise. -func (w *WorkflowRunAttemptOptions) GetExcludePullRequests() bool { - if w == nil || w.ExcludePullRequests == nil { - return false - } - return *w.ExcludePullRequests -} - -// GetJobs returns the Jobs field if it's non-nil, zero value otherwise. -func (w *WorkflowRunBill) GetJobs() int { - if w == nil || w.Jobs == nil { - return 0 - } - return *w.Jobs -} - -// GetTotalMS returns the TotalMS field if it's non-nil, zero value otherwise. -func (w *WorkflowRunBill) GetTotalMS() int64 { - if w == nil || w.TotalMS == nil { - return 0 - } - return *w.TotalMS -} - -// GetAction returns the Action field if it's non-nil, zero value otherwise. -func (w *WorkflowRunEvent) GetAction() string { - if w == nil || w.Action == nil { - return "" - } - return *w.Action -} - -// GetInstallation returns the Installation field. -func (w *WorkflowRunEvent) GetInstallation() *Installation { - if w == nil { - return nil - } - return w.Installation -} - -// GetOrg returns the Org field. -func (w *WorkflowRunEvent) GetOrg() *Organization { - if w == nil { - return nil - } - return w.Org -} - -// GetRepo returns the Repo field. -func (w *WorkflowRunEvent) GetRepo() *Repository { - if w == nil { - return nil - } - return w.Repo -} - -// GetSender returns the Sender field. -func (w *WorkflowRunEvent) GetSender() *User { - if w == nil { - return nil - } - return w.Sender -} - -// GetWorkflow returns the Workflow field. -func (w *WorkflowRunEvent) GetWorkflow() *Workflow { - if w == nil { - return nil - } - return w.Workflow -} - -// GetWorkflowRun returns the WorkflowRun field. -func (w *WorkflowRunEvent) GetWorkflowRun() *WorkflowRun { - if w == nil { - return nil - } - return w.WorkflowRun -} - -// GetDurationMS returns the DurationMS field if it's non-nil, zero value otherwise. -func (w *WorkflowRunJobRun) GetDurationMS() int64 { - if w == nil || w.DurationMS == nil { - return 0 - } - return *w.DurationMS -} - -// GetJobID returns the JobID field if it's non-nil, zero value otherwise. -func (w *WorkflowRunJobRun) GetJobID() int { - if w == nil || w.JobID == nil { - return 0 - } - return *w.JobID -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (w *WorkflowRuns) GetTotalCount() int { - if w == nil || w.TotalCount == nil { - return 0 - } - return *w.TotalCount -} - -// GetBillable returns the Billable field. -func (w *WorkflowRunUsage) GetBillable() *WorkflowRunBillMap { - if w == nil { - return nil - } - return w.Billable -} - -// GetRunDurationMS returns the RunDurationMS field if it's non-nil, zero value otherwise. -func (w *WorkflowRunUsage) GetRunDurationMS() int64 { - if w == nil || w.RunDurationMS == nil { - return 0 - } - return *w.RunDurationMS -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (w *Workflows) GetTotalCount() int { - if w == nil || w.TotalCount == nil { - return 0 - } - return *w.TotalCount -} - -// GetBillable returns the Billable field. -func (w *WorkflowUsage) GetBillable() *WorkflowBillMap { - if w == nil { - return nil - } - return w.Billable -} diff --git a/vendor/github.com/google/go-github/v57/github/github.go b/vendor/github.com/google/go-github/v57/github/github.go deleted file mode 100644 index c248b256..00000000 --- a/vendor/github.com/google/go-github/v57/github/github.go +++ /dev/null @@ -1,1537 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:generate go run gen-accessors.go -//go:generate go run gen-stringify-test.go -//go:generate ../script/metadata.sh update-go - -package github - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "net/url" - "reflect" - "strconv" - "strings" - "sync" - "time" - - "github.com/google/go-querystring/query" -) - -const ( - Version = "v57.0.0" - - defaultAPIVersion = "2022-11-28" - defaultBaseURL = "https://api.github.com/" - defaultUserAgent = "go-github" + "/" + Version - uploadBaseURL = "https://uploads.github.com/" - - headerAPIVersion = "X-GitHub-Api-Version" - headerRateLimit = "X-RateLimit-Limit" - headerRateRemaining = "X-RateLimit-Remaining" - headerRateReset = "X-RateLimit-Reset" - headerOTP = "X-GitHub-OTP" - headerRetryAfter = "Retry-After" - - headerTokenExpiration = "GitHub-Authentication-Token-Expiration" - - mediaTypeV3 = "application/vnd.github.v3+json" - defaultMediaType = "application/octet-stream" - mediaTypeV3SHA = "application/vnd.github.v3.sha" - mediaTypeV3Diff = "application/vnd.github.v3.diff" - mediaTypeV3Patch = "application/vnd.github.v3.patch" - mediaTypeOrgPermissionRepo = "application/vnd.github.v3.repository+json" - mediaTypeIssueImportAPI = "application/vnd.github.golden-comet-preview+json" - - // Media Type values to access preview APIs - // These media types will be added to the API request as headers - // and used to enable particular features on GitHub API that are still in preview. - // After some time, specific media types will be promoted (to a "stable" state). - // From then on, the preview headers are not required anymore to activate the additional - // feature on GitHub.com's API. However, this API header might still be needed for users - // to run a GitHub Enterprise Server on-premise. - // It's not uncommon for GitHub Enterprise Server customers to run older versions which - // would probably rely on the preview headers for some time. - // While the header promotion is going out for GitHub.com, it may be some time before it - // even arrives in GitHub Enterprise Server. - // We keep those preview headers around to avoid breaking older GitHub Enterprise Server - // versions. Additionally, non-functional (preview) headers don't create any side effects - // on GitHub Cloud version. - // - // See https://github.com/google/go-github/pull/2125 for full context. - - // https://developer.github.com/changes/2014-12-09-new-attributes-for-stars-api/ - mediaTypeStarringPreview = "application/vnd.github.v3.star+json" - - // https://help.github.com/enterprise/2.4/admin/guides/migrations/exporting-the-github-com-organization-s-repositories/ - mediaTypeMigrationsPreview = "application/vnd.github.wyandotte-preview+json" - - // https://developer.github.com/changes/2016-04-06-deployment-and-deployment-status-enhancements/ - mediaTypeDeploymentStatusPreview = "application/vnd.github.ant-man-preview+json" - - // https://developer.github.com/changes/2018-10-16-deployments-environments-states-and-auto-inactive-updates/ - mediaTypeExpandDeploymentStatusPreview = "application/vnd.github.flash-preview+json" - - // https://developer.github.com/changes/2016-05-12-reactions-api-preview/ - mediaTypeReactionsPreview = "application/vnd.github.squirrel-girl-preview" - - // https://developer.github.com/changes/2016-05-23-timeline-preview-api/ - mediaTypeTimelinePreview = "application/vnd.github.mockingbird-preview+json" - - // https://developer.github.com/changes/2016-09-14-projects-api/ - mediaTypeProjectsPreview = "application/vnd.github.inertia-preview+json" - - // https://developer.github.com/changes/2017-01-05-commit-search-api/ - mediaTypeCommitSearchPreview = "application/vnd.github.cloak-preview+json" - - // https://developer.github.com/changes/2017-02-28-user-blocking-apis-and-webhook/ - mediaTypeBlockUsersPreview = "application/vnd.github.giant-sentry-fist-preview+json" - - // https://developer.github.com/changes/2017-05-23-coc-api/ - mediaTypeCodesOfConductPreview = "application/vnd.github.scarlet-witch-preview+json" - - // https://developer.github.com/changes/2017-07-17-update-topics-on-repositories/ - mediaTypeTopicsPreview = "application/vnd.github.mercy-preview+json" - - // https://developer.github.com/changes/2018-03-16-protected-branches-required-approving-reviews/ - mediaTypeRequiredApprovingReviewsPreview = "application/vnd.github.luke-cage-preview+json" - - // https://developer.github.com/changes/2018-05-07-new-checks-api-public-beta/ - mediaTypeCheckRunsPreview = "application/vnd.github.antiope-preview+json" - - // https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/ - mediaTypePreReceiveHooksPreview = "application/vnd.github.eye-scream-preview" - - // https://developer.github.com/changes/2018-02-22-protected-branches-required-signatures/ - mediaTypeSignaturePreview = "application/vnd.github.zzzax-preview+json" - - // https://developer.github.com/changes/2018-09-05-project-card-events/ - mediaTypeProjectCardDetailsPreview = "application/vnd.github.starfox-preview+json" - - // https://developer.github.com/changes/2018-12-18-interactions-preview/ - mediaTypeInteractionRestrictionsPreview = "application/vnd.github.sombra-preview+json" - - // https://developer.github.com/changes/2019-03-14-enabling-disabling-pages/ - mediaTypeEnablePagesAPIPreview = "application/vnd.github.switcheroo-preview+json" - - // https://developer.github.com/changes/2019-04-24-vulnerability-alerts/ - mediaTypeRequiredVulnerabilityAlertsPreview = "application/vnd.github.dorian-preview+json" - - // https://developer.github.com/changes/2019-05-29-update-branch-api/ - mediaTypeUpdatePullRequestBranchPreview = "application/vnd.github.lydian-preview+json" - - // https://developer.github.com/changes/2019-04-11-pulls-branches-for-commit/ - mediaTypeListPullsOrBranchesForCommitPreview = "application/vnd.github.groot-preview+json" - - // https://docs.github.com/rest/previews/#repository-creation-permissions - mediaTypeMemberAllowedRepoCreationTypePreview = "application/vnd.github.surtur-preview+json" - - // https://docs.github.com/rest/previews/#create-and-use-repository-templates - mediaTypeRepositoryTemplatePreview = "application/vnd.github.baptiste-preview+json" - - // https://developer.github.com/changes/2019-10-03-multi-line-comments/ - mediaTypeMultiLineCommentsPreview = "application/vnd.github.comfort-fade-preview+json" - - // https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api/ - mediaTypeOAuthAppPreview = "application/vnd.github.doctor-strange-preview+json" - - // https://developer.github.com/changes/2019-12-03-internal-visibility-changes/ - mediaTypeRepositoryVisibilityPreview = "application/vnd.github.nebula-preview+json" - - // https://developer.github.com/changes/2018-12-10-content-attachments-api/ - mediaTypeContentAttachmentsPreview = "application/vnd.github.corsair-preview+json" -) - -var errNonNilContext = errors.New("context must be non-nil") - -// A Client manages communication with the GitHub API. -type Client struct { - clientMu sync.Mutex // clientMu protects the client during calls that modify the CheckRedirect func. - client *http.Client // HTTP client used to communicate with the API. - - // Base URL for API requests. Defaults to the public GitHub API, but can be - // set to a domain endpoint to use with GitHub Enterprise. BaseURL should - // always be specified with a trailing slash. - BaseURL *url.URL - - // Base URL for uploading files. - UploadURL *url.URL - - // User agent used when communicating with the GitHub API. - UserAgent string - - rateMu sync.Mutex - rateLimits [categories]Rate // Rate limits for the client as determined by the most recent API calls. - secondaryRateLimitReset time.Time // Secondary rate limit reset for the client as determined by the most recent API calls. - - common service // Reuse a single struct instead of allocating one for each service on the heap. - - // Services used for talking to different parts of the GitHub API. - Actions *ActionsService - Activity *ActivityService - Admin *AdminService - Apps *AppsService - Authorizations *AuthorizationsService - Billing *BillingService - Checks *ChecksService - CodeScanning *CodeScanningService - CodesOfConduct *CodesOfConductService - Codespaces *CodespacesService - Dependabot *DependabotService - DependencyGraph *DependencyGraphService - Emojis *EmojisService - Enterprise *EnterpriseService - Gists *GistsService - Git *GitService - Gitignores *GitignoresService - Interactions *InteractionsService - IssueImport *IssueImportService - Issues *IssuesService - Licenses *LicensesService - Markdown *MarkdownService - Marketplace *MarketplaceService - Meta *MetaService - Migrations *MigrationService - Organizations *OrganizationsService - Projects *ProjectsService - PullRequests *PullRequestsService - RateLimit *RateLimitService - Reactions *ReactionsService - Repositories *RepositoriesService - SCIM *SCIMService - Search *SearchService - SecretScanning *SecretScanningService - SecurityAdvisories *SecurityAdvisoriesService - Teams *TeamsService - Users *UsersService -} - -type service struct { - client *Client -} - -// Client returns the http.Client used by this GitHub client. -func (c *Client) Client() *http.Client { - c.clientMu.Lock() - defer c.clientMu.Unlock() - clientCopy := *c.client - return &clientCopy -} - -// ListOptions specifies the optional parameters to various List methods that -// support offset pagination. -type ListOptions struct { - // For paginated result sets, page of results to retrieve. - Page int `url:"page,omitempty"` - - // For paginated result sets, the number of results to include per page. - PerPage int `url:"per_page,omitempty"` -} - -// ListCursorOptions specifies the optional parameters to various List methods that -// support cursor pagination. -type ListCursorOptions struct { - // For paginated result sets, page of results to retrieve. - Page string `url:"page,omitempty"` - - // For paginated result sets, the number of results to include per page. - PerPage int `url:"per_page,omitempty"` - - // For paginated result sets, the number of results per page (max 100), starting from the first matching result. - // This parameter must not be used in combination with last. - First int `url:"first,omitempty"` - - // For paginated result sets, the number of results per page (max 100), starting from the last matching result. - // This parameter must not be used in combination with first. - Last int `url:"last,omitempty"` - - // A cursor, as given in the Link header. If specified, the query only searches for events after this cursor. - After string `url:"after,omitempty"` - - // A cursor, as given in the Link header. If specified, the query only searches for events before this cursor. - Before string `url:"before,omitempty"` - - // A cursor, as given in the Link header. If specified, the query continues the search using this cursor. - Cursor string `url:"cursor,omitempty"` -} - -// UploadOptions specifies the parameters to methods that support uploads. -type UploadOptions struct { - Name string `url:"name,omitempty"` - Label string `url:"label,omitempty"` - MediaType string `url:"-"` -} - -// RawType represents type of raw format of a request instead of JSON. -type RawType uint8 - -const ( - // Diff format. - Diff RawType = 1 + iota - // Patch format. - Patch -) - -// RawOptions specifies parameters when user wants to get raw format of -// a response instead of JSON. -type RawOptions struct { - Type RawType -} - -// addOptions adds the parameters in opts as URL query parameters to s. opts -// must be a struct whose fields may contain "url" tags. -func addOptions(s string, opts interface{}) (string, error) { - v := reflect.ValueOf(opts) - if v.Kind() == reflect.Ptr && v.IsNil() { - return s, nil - } - - u, err := url.Parse(s) - if err != nil { - return s, err - } - - qs, err := query.Values(opts) - if err != nil { - return s, err - } - - u.RawQuery = qs.Encode() - return u.String(), nil -} - -// NewClient returns a new GitHub API client. If a nil httpClient is -// provided, a new http.Client will be used. To use API methods which require -// authentication, either use Client.WithAuthToken or provide NewClient with -// an http.Client that will perform the authentication for you (such as that -// provided by the golang.org/x/oauth2 library). -func NewClient(httpClient *http.Client) *Client { - c := &Client{client: httpClient} - c.initialize() - return c -} - -// WithAuthToken returns a copy of the client configured to use the provided token for the Authorization header. -func (c *Client) WithAuthToken(token string) *Client { - c2 := c.copy() - defer c2.initialize() - transport := c2.client.Transport - if transport == nil { - transport = http.DefaultTransport - } - c2.client.Transport = roundTripperFunc( - func(req *http.Request) (*http.Response, error) { - req = req.Clone(req.Context()) - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token)) - return transport.RoundTrip(req) - }, - ) - return c2 -} - -// WithEnterpriseURLs returns a copy of the client configured to use the provided base and -// upload URLs. If the base URL does not have the suffix "/api/v3/", it will be added -// automatically. If the upload URL does not have the suffix "/api/uploads", it will be -// added automatically. -// -// Note that WithEnterpriseURLs is a convenience helper only; -// its behavior is equivalent to setting the BaseURL and UploadURL fields. -// -// Another important thing is that by default, the GitHub Enterprise URL format -// should be http(s)://[hostname]/api/v3/ or you will always receive the 406 status code. -// The upload URL format should be http(s)://[hostname]/api/uploads/. -func (c *Client) WithEnterpriseURLs(baseURL, uploadURL string) (*Client, error) { - c2 := c.copy() - defer c2.initialize() - var err error - c2.BaseURL, err = url.Parse(baseURL) - if err != nil { - return nil, err - } - - if !strings.HasSuffix(c2.BaseURL.Path, "/") { - c2.BaseURL.Path += "/" - } - if !strings.HasSuffix(c2.BaseURL.Path, "/api/v3/") && - !strings.HasPrefix(c2.BaseURL.Host, "api.") && - !strings.Contains(c2.BaseURL.Host, ".api.") { - c2.BaseURL.Path += "api/v3/" - } - - c2.UploadURL, err = url.Parse(uploadURL) - if err != nil { - return nil, err - } - - if !strings.HasSuffix(c2.UploadURL.Path, "/") { - c2.UploadURL.Path += "/" - } - if !strings.HasSuffix(c2.UploadURL.Path, "/api/uploads/") && - !strings.HasPrefix(c2.UploadURL.Host, "api.") && - !strings.Contains(c2.UploadURL.Host, ".api.") { - c2.UploadURL.Path += "api/uploads/" - } - return c2, nil -} - -// initialize sets default values and initializes services. -func (c *Client) initialize() { - if c.client == nil { - c.client = &http.Client{} - } - if c.BaseURL == nil { - c.BaseURL, _ = url.Parse(defaultBaseURL) - } - if c.UploadURL == nil { - c.UploadURL, _ = url.Parse(uploadBaseURL) - } - if c.UserAgent == "" { - c.UserAgent = defaultUserAgent - } - c.common.client = c - c.Actions = (*ActionsService)(&c.common) - c.Activity = (*ActivityService)(&c.common) - c.Admin = (*AdminService)(&c.common) - c.Apps = (*AppsService)(&c.common) - c.Authorizations = (*AuthorizationsService)(&c.common) - c.Billing = (*BillingService)(&c.common) - c.Checks = (*ChecksService)(&c.common) - c.CodeScanning = (*CodeScanningService)(&c.common) - c.Codespaces = (*CodespacesService)(&c.common) - c.CodesOfConduct = (*CodesOfConductService)(&c.common) - c.Dependabot = (*DependabotService)(&c.common) - c.DependencyGraph = (*DependencyGraphService)(&c.common) - c.Emojis = (*EmojisService)(&c.common) - c.Enterprise = (*EnterpriseService)(&c.common) - c.Gists = (*GistsService)(&c.common) - c.Git = (*GitService)(&c.common) - c.Gitignores = (*GitignoresService)(&c.common) - c.Interactions = (*InteractionsService)(&c.common) - c.IssueImport = (*IssueImportService)(&c.common) - c.Issues = (*IssuesService)(&c.common) - c.Licenses = (*LicensesService)(&c.common) - c.Markdown = (*MarkdownService)(&c.common) - c.Marketplace = &MarketplaceService{client: c} - c.Meta = (*MetaService)(&c.common) - c.Migrations = (*MigrationService)(&c.common) - c.Organizations = (*OrganizationsService)(&c.common) - c.Projects = (*ProjectsService)(&c.common) - c.PullRequests = (*PullRequestsService)(&c.common) - c.RateLimit = (*RateLimitService)(&c.common) - c.Reactions = (*ReactionsService)(&c.common) - c.Repositories = (*RepositoriesService)(&c.common) - c.SCIM = (*SCIMService)(&c.common) - c.Search = (*SearchService)(&c.common) - c.SecretScanning = (*SecretScanningService)(&c.common) - c.SecurityAdvisories = (*SecurityAdvisoriesService)(&c.common) - c.Teams = (*TeamsService)(&c.common) - c.Users = (*UsersService)(&c.common) -} - -// copy returns a copy of the current client. It must be initialized before use. -func (c *Client) copy() *Client { - c.clientMu.Lock() - // can't use *c here because that would copy mutexes by value. - clone := Client{ - client: c.client, - UserAgent: c.UserAgent, - BaseURL: c.BaseURL, - UploadURL: c.UploadURL, - secondaryRateLimitReset: c.secondaryRateLimitReset, - } - c.clientMu.Unlock() - if clone.client == nil { - clone.client = &http.Client{} - } - c.rateMu.Lock() - copy(clone.rateLimits[:], c.rateLimits[:]) - c.rateMu.Unlock() - return &clone -} - -// NewClientWithEnvProxy enhances NewClient with the HttpProxy env. -func NewClientWithEnvProxy() *Client { - return NewClient(&http.Client{Transport: &http.Transport{Proxy: http.ProxyFromEnvironment}}) -} - -// NewTokenClient returns a new GitHub API client authenticated with the provided token. -// Deprecated: Use NewClient(nil).WithAuthToken(token) instead. -func NewTokenClient(_ context.Context, token string) *Client { - // This always returns a nil error. - return NewClient(nil).WithAuthToken(token) -} - -// NewEnterpriseClient returns a new GitHub API client with provided -// base URL and upload URL (often is your GitHub Enterprise hostname). -// -// Deprecated: Use NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) instead. -func NewEnterpriseClient(baseURL, uploadURL string, httpClient *http.Client) (*Client, error) { - return NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) -} - -// RequestOption represents an option that can modify an http.Request. -type RequestOption func(req *http.Request) - -// WithVersion overrides the GitHub v3 API version for this individual request. -// For more information, see: -// https://github.blog/2022-11-28-to-infinity-and-beyond-enabling-the-future-of-githubs-rest-api-with-api-versioning/ -func WithVersion(version string) RequestOption { - return func(req *http.Request) { - req.Header.Set(headerAPIVersion, version) - } -} - -// NewRequest creates an API request. A relative URL can be provided in urlStr, -// in which case it is resolved relative to the BaseURL of the Client. -// Relative URLs should always be specified without a preceding slash. If -// specified, the value pointed to by body is JSON encoded and included as the -// request body. -func (c *Client) NewRequest(method, urlStr string, body interface{}, opts ...RequestOption) (*http.Request, error) { - if !strings.HasSuffix(c.BaseURL.Path, "/") { - return nil, fmt.Errorf("BaseURL must have a trailing slash, but %q does not", c.BaseURL) - } - - u, err := c.BaseURL.Parse(urlStr) - if err != nil { - return nil, err - } - - var buf io.ReadWriter - if body != nil { - buf = &bytes.Buffer{} - enc := json.NewEncoder(buf) - enc.SetEscapeHTML(false) - err := enc.Encode(body) - if err != nil { - return nil, err - } - } - - req, err := http.NewRequest(method, u.String(), buf) - if err != nil { - return nil, err - } - - if body != nil { - req.Header.Set("Content-Type", "application/json") - } - req.Header.Set("Accept", mediaTypeV3) - if c.UserAgent != "" { - req.Header.Set("User-Agent", c.UserAgent) - } - req.Header.Set(headerAPIVersion, defaultAPIVersion) - - for _, opt := range opts { - opt(req) - } - - return req, nil -} - -// NewFormRequest creates an API request. A relative URL can be provided in urlStr, -// in which case it is resolved relative to the BaseURL of the Client. -// Relative URLs should always be specified without a preceding slash. -// Body is sent with Content-Type: application/x-www-form-urlencoded. -func (c *Client) NewFormRequest(urlStr string, body io.Reader, opts ...RequestOption) (*http.Request, error) { - if !strings.HasSuffix(c.BaseURL.Path, "/") { - return nil, fmt.Errorf("BaseURL must have a trailing slash, but %q does not", c.BaseURL) - } - - u, err := c.BaseURL.Parse(urlStr) - if err != nil { - return nil, err - } - - req, err := http.NewRequest(http.MethodPost, u.String(), body) - if err != nil { - return nil, err - } - - req.Header.Set("Content-Type", "application/x-www-form-urlencoded") - req.Header.Set("Accept", mediaTypeV3) - if c.UserAgent != "" { - req.Header.Set("User-Agent", c.UserAgent) - } - req.Header.Set(headerAPIVersion, defaultAPIVersion) - - for _, opt := range opts { - opt(req) - } - - return req, nil -} - -// NewUploadRequest creates an upload request. A relative URL can be provided in -// urlStr, in which case it is resolved relative to the UploadURL of the Client. -// Relative URLs should always be specified without a preceding slash. -func (c *Client) NewUploadRequest(urlStr string, reader io.Reader, size int64, mediaType string, opts ...RequestOption) (*http.Request, error) { - if !strings.HasSuffix(c.UploadURL.Path, "/") { - return nil, fmt.Errorf("UploadURL must have a trailing slash, but %q does not", c.UploadURL) - } - u, err := c.UploadURL.Parse(urlStr) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", u.String(), reader) - if err != nil { - return nil, err - } - - req.ContentLength = size - - if mediaType == "" { - mediaType = defaultMediaType - } - req.Header.Set("Content-Type", mediaType) - req.Header.Set("Accept", mediaTypeV3) - req.Header.Set("User-Agent", c.UserAgent) - req.Header.Set(headerAPIVersion, defaultAPIVersion) - - for _, opt := range opts { - opt(req) - } - - return req, nil -} - -// Response is a GitHub API response. This wraps the standard http.Response -// returned from GitHub and provides convenient access to things like -// pagination links. -type Response struct { - *http.Response - - // These fields provide the page values for paginating through a set of - // results. Any or all of these may be set to the zero value for - // responses that are not part of a paginated set, or for which there - // are no additional pages. - // - // These fields support what is called "offset pagination" and should - // be used with the ListOptions struct. - NextPage int - PrevPage int - FirstPage int - LastPage int - - // Additionally, some APIs support "cursor pagination" instead of offset. - // This means that a token points directly to the next record which - // can lead to O(1) performance compared to O(n) performance provided - // by offset pagination. - // - // For APIs that support cursor pagination (such as - // TeamsService.ListIDPGroupsInOrganization), the following field - // will be populated to point to the next page. - // - // To use this token, set ListCursorOptions.Page to this value before - // calling the endpoint again. - NextPageToken string - - // For APIs that support cursor pagination, such as RepositoriesService.ListHookDeliveries, - // the following field will be populated to point to the next page. - // Set ListCursorOptions.Cursor to this value when calling the endpoint again. - Cursor string - - // For APIs that support before/after pagination, such as OrganizationsService.AuditLog. - Before string - After string - - // Explicitly specify the Rate type so Rate's String() receiver doesn't - // propagate to Response. - Rate Rate - - // token's expiration date. Timestamp is 0001-01-01 when token doesn't expire. - // So it is valid for TokenExpiration.Equal(Timestamp{}) or TokenExpiration.Time.After(time.Now()) - TokenExpiration Timestamp -} - -// newResponse creates a new Response for the provided http.Response. -// r must not be nil. -func newResponse(r *http.Response) *Response { - response := &Response{Response: r} - response.populatePageValues() - response.Rate = parseRate(r) - response.TokenExpiration = parseTokenExpiration(r) - return response -} - -// populatePageValues parses the HTTP Link response headers and populates the -// various pagination link values in the Response. -func (r *Response) populatePageValues() { - if links, ok := r.Response.Header["Link"]; ok && len(links) > 0 { - for _, link := range strings.Split(links[0], ",") { - segments := strings.Split(strings.TrimSpace(link), ";") - - // link must at least have href and rel - if len(segments) < 2 { - continue - } - - // ensure href is properly formatted - if !strings.HasPrefix(segments[0], "<") || !strings.HasSuffix(segments[0], ">") { - continue - } - - // try to pull out page parameter - url, err := url.Parse(segments[0][1 : len(segments[0])-1]) - if err != nil { - continue - } - - q := url.Query() - - if cursor := q.Get("cursor"); cursor != "" { - for _, segment := range segments[1:] { - switch strings.TrimSpace(segment) { - case `rel="next"`: - r.Cursor = cursor - } - } - - continue - } - - page := q.Get("page") - since := q.Get("since") - before := q.Get("before") - after := q.Get("after") - - if page == "" && before == "" && after == "" && since == "" { - continue - } - - if since != "" && page == "" { - page = since - } - - for _, segment := range segments[1:] { - switch strings.TrimSpace(segment) { - case `rel="next"`: - if r.NextPage, err = strconv.Atoi(page); err != nil { - r.NextPageToken = page - } - r.After = after - case `rel="prev"`: - r.PrevPage, _ = strconv.Atoi(page) - r.Before = before - case `rel="first"`: - r.FirstPage, _ = strconv.Atoi(page) - case `rel="last"`: - r.LastPage, _ = strconv.Atoi(page) - } - } - } - } -} - -// parseRate parses the rate related headers. -func parseRate(r *http.Response) Rate { - var rate Rate - if limit := r.Header.Get(headerRateLimit); limit != "" { - rate.Limit, _ = strconv.Atoi(limit) - } - if remaining := r.Header.Get(headerRateRemaining); remaining != "" { - rate.Remaining, _ = strconv.Atoi(remaining) - } - if reset := r.Header.Get(headerRateReset); reset != "" { - if v, _ := strconv.ParseInt(reset, 10, 64); v != 0 { - rate.Reset = Timestamp{time.Unix(v, 0)} - } - } - return rate -} - -// parseSecondaryRate parses the secondary rate related headers, -// and returns the time to retry after. -func parseSecondaryRate(r *http.Response) *time.Duration { - // According to GitHub support, the "Retry-After" header value will be - // an integer which represents the number of seconds that one should - // wait before resuming making requests. - if v := r.Header.Get(headerRetryAfter); v != "" { - retryAfterSeconds, _ := strconv.ParseInt(v, 10, 64) // Error handling is noop. - retryAfter := time.Duration(retryAfterSeconds) * time.Second - return &retryAfter - } - - // According to GitHub support, endpoints might return x-ratelimit-reset instead, - // as an integer which represents the number of seconds since epoch UTC, - // represting the time to resume making requests. - if v := r.Header.Get(headerRateReset); v != "" { - secondsSinceEpoch, _ := strconv.ParseInt(v, 10, 64) // Error handling is noop. - retryAfter := time.Until(time.Unix(secondsSinceEpoch, 0)) - return &retryAfter - } - - return nil -} - -// parseTokenExpiration parses the TokenExpiration related headers. -// Returns 0001-01-01 if the header is not defined or could not be parsed. -func parseTokenExpiration(r *http.Response) Timestamp { - if v := r.Header.Get(headerTokenExpiration); v != "" { - if t, err := time.Parse("2006-01-02 15:04:05 MST", v); err == nil { - return Timestamp{t.Local()} - } - // Some tokens include the timezone offset instead of the timezone. - // https://github.com/google/go-github/issues/2649 - if t, err := time.Parse("2006-01-02 15:04:05 -0700", v); err == nil { - return Timestamp{t.Local()} - } - } - return Timestamp{} // 0001-01-01 00:00:00 -} - -type requestContext uint8 - -const ( - bypassRateLimitCheck requestContext = iota -) - -// BareDo sends an API request and lets you handle the api response. If an error -// or API Error occurs, the error will contain more information. Otherwise you -// are supposed to read and close the response's Body. If rate limit is exceeded -// and reset time is in the future, BareDo returns *RateLimitError immediately -// without making a network API call. -// -// The provided ctx must be non-nil, if it is nil an error is returned. If it is -// canceled or times out, ctx.Err() will be returned. -func (c *Client) BareDo(ctx context.Context, req *http.Request) (*Response, error) { - if ctx == nil { - return nil, errNonNilContext - } - - req = withContext(ctx, req) - - rateLimitCategory := category(req.Method, req.URL.Path) - - if bypass := ctx.Value(bypassRateLimitCheck); bypass == nil { - // If we've hit rate limit, don't make further requests before Reset time. - if err := c.checkRateLimitBeforeDo(req, rateLimitCategory); err != nil { - return &Response{ - Response: err.Response, - Rate: err.Rate, - }, err - } - // If we've hit a secondary rate limit, don't make further requests before Retry After. - if err := c.checkSecondaryRateLimitBeforeDo(req); err != nil { - return &Response{ - Response: err.Response, - }, err - } - } - - resp, err := c.client.Do(req) - if err != nil { - // If we got an error, and the context has been canceled, - // the context's error is probably more useful. - select { - case <-ctx.Done(): - return nil, ctx.Err() - default: - } - - // If the error type is *url.Error, sanitize its URL before returning. - if e, ok := err.(*url.Error); ok { - if url, err := url.Parse(e.URL); err == nil { - e.URL = sanitizeURL(url).String() - return nil, e - } - } - - return nil, err - } - - response := newResponse(resp) - - // Don't update the rate limits if this was a cached response. - // X-From-Cache is set by https://github.com/gregjones/httpcache - if response.Header.Get("X-From-Cache") == "" { - c.rateMu.Lock() - c.rateLimits[rateLimitCategory] = response.Rate - c.rateMu.Unlock() - } - - err = CheckResponse(resp) - if err != nil { - defer resp.Body.Close() - // Special case for AcceptedErrors. If an AcceptedError - // has been encountered, the response's payload will be - // added to the AcceptedError and returned. - // - // Issue #1022 - aerr, ok := err.(*AcceptedError) - if ok { - b, readErr := io.ReadAll(resp.Body) - if readErr != nil { - return response, readErr - } - - aerr.Raw = b - err = aerr - } - - // Update the secondary rate limit if we hit it. - rerr, ok := err.(*AbuseRateLimitError) - if ok && rerr.RetryAfter != nil { - c.rateMu.Lock() - c.secondaryRateLimitReset = time.Now().Add(*rerr.RetryAfter) - c.rateMu.Unlock() - } - } - return response, err -} - -// Do sends an API request and returns the API response. The API response is -// JSON decoded and stored in the value pointed to by v, or returned as an -// error if an API error has occurred. If v implements the io.Writer interface, -// the raw response body will be written to v, without attempting to first -// decode it. If v is nil, and no error hapens, the response is returned as is. -// If rate limit is exceeded and reset time is in the future, Do returns -// *RateLimitError immediately without making a network API call. -// -// The provided ctx must be non-nil, if it is nil an error is returned. If it -// is canceled or times out, ctx.Err() will be returned. -func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Response, error) { - resp, err := c.BareDo(ctx, req) - if err != nil { - return resp, err - } - defer resp.Body.Close() - - switch v := v.(type) { - case nil: - case io.Writer: - _, err = io.Copy(v, resp.Body) - default: - decErr := json.NewDecoder(resp.Body).Decode(v) - if decErr == io.EOF { - decErr = nil // ignore EOF errors caused by empty response body - } - if decErr != nil { - err = decErr - } - } - return resp, err -} - -// checkRateLimitBeforeDo does not make any network calls, but uses existing knowledge from -// current client state in order to quickly check if *RateLimitError can be immediately returned -// from Client.Do, and if so, returns it so that Client.Do can skip making a network API call unnecessarily. -// Otherwise it returns nil, and Client.Do should proceed normally. -func (c *Client) checkRateLimitBeforeDo(req *http.Request, rateLimitCategory rateLimitCategory) *RateLimitError { - c.rateMu.Lock() - rate := c.rateLimits[rateLimitCategory] - c.rateMu.Unlock() - if !rate.Reset.Time.IsZero() && rate.Remaining == 0 && time.Now().Before(rate.Reset.Time) { - // Create a fake response. - resp := &http.Response{ - Status: http.StatusText(http.StatusForbidden), - StatusCode: http.StatusForbidden, - Request: req, - Header: make(http.Header), - Body: io.NopCloser(strings.NewReader("")), - } - return &RateLimitError{ - Rate: rate, - Response: resp, - Message: fmt.Sprintf("API rate limit of %v still exceeded until %v, not making remote request.", rate.Limit, rate.Reset.Time), - } - } - - return nil -} - -// checkSecondaryRateLimitBeforeDo does not make any network calls, but uses existing knowledge from -// current client state in order to quickly check if *AbuseRateLimitError can be immediately returned -// from Client.Do, and if so, returns it so that Client.Do can skip making a network API call unnecessarily. -// Otherwise it returns nil, and Client.Do should proceed normally. -func (c *Client) checkSecondaryRateLimitBeforeDo(req *http.Request) *AbuseRateLimitError { - c.rateMu.Lock() - secondary := c.secondaryRateLimitReset - c.rateMu.Unlock() - if !secondary.IsZero() && time.Now().Before(secondary) { - // Create a fake response. - resp := &http.Response{ - Status: http.StatusText(http.StatusForbidden), - StatusCode: http.StatusForbidden, - Request: req, - Header: make(http.Header), - Body: io.NopCloser(strings.NewReader("")), - } - - retryAfter := time.Until(secondary) - return &AbuseRateLimitError{ - Response: resp, - Message: fmt.Sprintf("API secondary rate limit exceeded until %v, not making remote request.", secondary), - RetryAfter: &retryAfter, - } - } - - return nil -} - -// compareHTTPResponse returns whether two http.Response objects are equal or not. -// Currently, only StatusCode is checked. This function is used when implementing the -// Is(error) bool interface for the custom error types in this package. -func compareHTTPResponse(r1, r2 *http.Response) bool { - if r1 == nil && r2 == nil { - return true - } - - if r1 != nil && r2 != nil { - return r1.StatusCode == r2.StatusCode - } - return false -} - -/* -An ErrorResponse reports one or more errors caused by an API request. - -GitHub API docs: https://docs.github.com/rest/#client-errors -*/ -type ErrorResponse struct { - Response *http.Response `json:"-"` // HTTP response that caused this error - Message string `json:"message"` // error message - Errors []Error `json:"errors"` // more detail on individual errors - // Block is only populated on certain types of errors such as code 451. - Block *ErrorBlock `json:"block,omitempty"` - // Most errors will also include a documentation_url field pointing - // to some content that might help you resolve the error, see - // https://docs.github.com/rest/#client-errors - DocumentationURL string `json:"documentation_url,omitempty"` -} - -// ErrorBlock contains a further explanation for the reason of an error. -// See https://developer.github.com/changes/2016-03-17-the-451-status-code-is-now-supported/ -// for more information. -type ErrorBlock struct { - Reason string `json:"reason,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -func (r *ErrorResponse) Error() string { - if r.Response != nil && r.Response.Request != nil { - return fmt.Sprintf("%v %v: %d %v %+v", - r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), - r.Response.StatusCode, r.Message, r.Errors) - } - - if r.Response != nil { - return fmt.Sprintf("%d %v %+v", r.Response.StatusCode, r.Message, r.Errors) - } - - return fmt.Sprintf("%v %+v", r.Message, r.Errors) -} - -// Is returns whether the provided error equals this error. -func (r *ErrorResponse) Is(target error) bool { - v, ok := target.(*ErrorResponse) - if !ok { - return false - } - - if r.Message != v.Message || (r.DocumentationURL != v.DocumentationURL) || - !compareHTTPResponse(r.Response, v.Response) { - return false - } - - // Compare Errors. - if len(r.Errors) != len(v.Errors) { - return false - } - for idx := range r.Errors { - if r.Errors[idx] != v.Errors[idx] { - return false - } - } - - // Compare Block. - if (r.Block != nil && v.Block == nil) || (r.Block == nil && v.Block != nil) { - return false - } - if r.Block != nil && v.Block != nil { - if r.Block.Reason != v.Block.Reason { - return false - } - if (r.Block.CreatedAt != nil && v.Block.CreatedAt == nil) || (r.Block.CreatedAt == - nil && v.Block.CreatedAt != nil) { - return false - } - if r.Block.CreatedAt != nil && v.Block.CreatedAt != nil { - if *(r.Block.CreatedAt) != *(v.Block.CreatedAt) { - return false - } - } - } - - return true -} - -// TwoFactorAuthError occurs when using HTTP Basic Authentication for a user -// that has two-factor authentication enabled. The request can be reattempted -// by providing a one-time password in the request. -type TwoFactorAuthError ErrorResponse - -func (r *TwoFactorAuthError) Error() string { return (*ErrorResponse)(r).Error() } - -// RateLimitError occurs when GitHub returns 403 Forbidden response with a rate limit -// remaining value of 0. -type RateLimitError struct { - Rate Rate // Rate specifies last known rate limit for the client - Response *http.Response // HTTP response that caused this error - Message string `json:"message"` // error message -} - -func (r *RateLimitError) Error() string { - return fmt.Sprintf("%v %v: %d %v %v", - r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), - r.Response.StatusCode, r.Message, formatRateReset(time.Until(r.Rate.Reset.Time))) -} - -// Is returns whether the provided error equals this error. -func (r *RateLimitError) Is(target error) bool { - v, ok := target.(*RateLimitError) - if !ok { - return false - } - - return r.Rate == v.Rate && - r.Message == v.Message && - compareHTTPResponse(r.Response, v.Response) -} - -// AcceptedError occurs when GitHub returns 202 Accepted response with an -// empty body, which means a job was scheduled on the GitHub side to process -// the information needed and cache it. -// Technically, 202 Accepted is not a real error, it's just used to -// indicate that results are not ready yet, but should be available soon. -// The request can be repeated after some time. -type AcceptedError struct { - // Raw contains the response body. - Raw []byte -} - -func (*AcceptedError) Error() string { - return "job scheduled on GitHub side; try again later" -} - -// Is returns whether the provided error equals this error. -func (ae *AcceptedError) Is(target error) bool { - v, ok := target.(*AcceptedError) - if !ok { - return false - } - return bytes.Equal(ae.Raw, v.Raw) -} - -// AbuseRateLimitError occurs when GitHub returns 403 Forbidden response with the -// "documentation_url" field value equal to "https://docs.github.com/rest/overview/rate-limits-for-the-rest-api#about-secondary-rate-limits". -type AbuseRateLimitError struct { - Response *http.Response // HTTP response that caused this error - Message string `json:"message"` // error message - - // RetryAfter is provided with some abuse rate limit errors. If present, - // it is the amount of time that the client should wait before retrying. - // Otherwise, the client should try again later (after an unspecified amount of time). - RetryAfter *time.Duration -} - -func (r *AbuseRateLimitError) Error() string { - return fmt.Sprintf("%v %v: %d %v", - r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), - r.Response.StatusCode, r.Message) -} - -// Is returns whether the provided error equals this error. -func (r *AbuseRateLimitError) Is(target error) bool { - v, ok := target.(*AbuseRateLimitError) - if !ok { - return false - } - - return r.Message == v.Message && - r.RetryAfter == v.RetryAfter && - compareHTTPResponse(r.Response, v.Response) -} - -// sanitizeURL redacts the client_secret parameter from the URL which may be -// exposed to the user. -func sanitizeURL(uri *url.URL) *url.URL { - if uri == nil { - return nil - } - params := uri.Query() - if len(params.Get("client_secret")) > 0 { - params.Set("client_secret", "REDACTED") - uri.RawQuery = params.Encode() - } - return uri -} - -/* -An Error reports more details on an individual error in an ErrorResponse. -These are the possible validation error codes: - - missing: - resource does not exist - missing_field: - a required field on a resource has not been set - invalid: - the formatting of a field is invalid - already_exists: - another resource has the same valid as this field - custom: - some resources return this (e.g. github.User.CreateKey()), additional - information is set in the Message field of the Error - -GitHub error responses structure are often undocumented and inconsistent. -Sometimes error is just a simple string (Issue #540). -In such cases, Message represents an error message as a workaround. - -GitHub API docs: https://docs.github.com/rest/#client-errors -*/ -type Error struct { - Resource string `json:"resource"` // resource on which the error occurred - Field string `json:"field"` // field on which the error occurred - Code string `json:"code"` // validation error code - Message string `json:"message"` // Message describing the error. Errors with Code == "custom" will always have this set. -} - -func (e *Error) Error() string { - return fmt.Sprintf("%v error caused by %v field on %v resource", - e.Code, e.Field, e.Resource) -} - -func (e *Error) UnmarshalJSON(data []byte) error { - type aliasError Error // avoid infinite recursion by using type alias. - if err := json.Unmarshal(data, (*aliasError)(e)); err != nil { - return json.Unmarshal(data, &e.Message) // data can be json string. - } - return nil -} - -// CheckResponse checks the API response for errors, and returns them if -// present. A response is considered an error if it has a status code outside -// the 200 range or equal to 202 Accepted. -// API error responses are expected to have response -// body, and a JSON response body that maps to ErrorResponse. -// -// The error type will be *RateLimitError for rate limit exceeded errors, -// *AcceptedError for 202 Accepted status codes, -// and *TwoFactorAuthError for two-factor authentication errors. -func CheckResponse(r *http.Response) error { - if r.StatusCode == http.StatusAccepted { - return &AcceptedError{} - } - if c := r.StatusCode; 200 <= c && c <= 299 { - return nil - } - - errorResponse := &ErrorResponse{Response: r} - data, err := io.ReadAll(r.Body) - if err == nil && data != nil { - err = json.Unmarshal(data, errorResponse) - if err != nil { - // reset the response as if this never happened - errorResponse = &ErrorResponse{Response: r} - } - } - // Re-populate error response body because GitHub error responses are often - // undocumented and inconsistent. - // Issue #1136, #540. - r.Body = io.NopCloser(bytes.NewBuffer(data)) - switch { - case r.StatusCode == http.StatusUnauthorized && strings.HasPrefix(r.Header.Get(headerOTP), "required"): - return (*TwoFactorAuthError)(errorResponse) - case r.StatusCode == http.StatusForbidden && r.Header.Get(headerRateRemaining) == "0": - return &RateLimitError{ - Rate: parseRate(r), - Response: errorResponse.Response, - Message: errorResponse.Message, - } - case r.StatusCode == http.StatusForbidden && - (strings.HasSuffix(errorResponse.DocumentationURL, "#abuse-rate-limits") || - strings.HasSuffix(errorResponse.DocumentationURL, "secondary-rate-limits")): - abuseRateLimitError := &AbuseRateLimitError{ - Response: errorResponse.Response, - Message: errorResponse.Message, - } - if retryAfter := parseSecondaryRate(r); retryAfter != nil { - abuseRateLimitError.RetryAfter = retryAfter - } - return abuseRateLimitError - default: - return errorResponse - } -} - -// parseBoolResponse determines the boolean result from a GitHub API response. -// Several GitHub API methods return boolean responses indicated by the HTTP -// status code in the response (true indicated by a 204, false indicated by a -// 404). This helper function will determine that result and hide the 404 -// error if present. Any other error will be returned through as-is. -func parseBoolResponse(err error) (bool, error) { - if err == nil { - return true, nil - } - - if err, ok := err.(*ErrorResponse); ok && err.Response.StatusCode == http.StatusNotFound { - // Simply false. In this one case, we do not pass the error through. - return false, nil - } - - // some other real error occurred - return false, err -} - -type rateLimitCategory uint8 - -const ( - coreCategory rateLimitCategory = iota - searchCategory - graphqlCategory - integrationManifestCategory - sourceImportCategory - codeScanningUploadCategory - actionsRunnerRegistrationCategory - scimCategory - - categories // An array of this length will be able to contain all rate limit categories. -) - -// category returns the rate limit category of the endpoint, determined by HTTP method and Request.URL.Path. -func category(method, path string) rateLimitCategory { - switch { - // https://docs.github.com/rest/rate-limit#about-rate-limits - default: - // NOTE: coreCategory is returned for actionsRunnerRegistrationCategory too, - // because no API found for this category. - return coreCategory - case strings.HasPrefix(path, "/search/"): - return searchCategory - case path == "/graphql": - return graphqlCategory - case strings.HasPrefix(path, "/app-manifests/") && - strings.HasSuffix(path, "/conversions") && - method == http.MethodPost: - return integrationManifestCategory - - // https://docs.github.com/rest/migrations/source-imports#start-an-import - case strings.HasPrefix(path, "/repos/") && - strings.HasSuffix(path, "/import") && - method == http.MethodPut: - return sourceImportCategory - - // https://docs.github.com/rest/code-scanning#upload-an-analysis-as-sarif-data - case strings.HasSuffix(path, "/code-scanning/sarifs"): - return codeScanningUploadCategory - - // https://docs.github.com/enterprise-cloud@latest/rest/scim - case strings.HasPrefix(path, "/scim/"): - return scimCategory - } -} - -// RateLimits returns the rate limits for the current client. -// -// Deprecated: Use RateLimitService.Get instead. -func (c *Client) RateLimits(ctx context.Context) (*RateLimits, *Response, error) { - return c.RateLimit.Get(ctx) -} - -func setCredentialsAsHeaders(req *http.Request, id, secret string) *http.Request { - // To set extra headers, we must make a copy of the Request so - // that we don't modify the Request we were given. This is required by the - // specification of http.RoundTripper. - // - // Since we are going to modify only req.Header here, we only need a deep copy - // of req.Header. - convertedRequest := new(http.Request) - *convertedRequest = *req - convertedRequest.Header = make(http.Header, len(req.Header)) - - for k, s := range req.Header { - convertedRequest.Header[k] = append([]string(nil), s...) - } - convertedRequest.SetBasicAuth(id, secret) - return convertedRequest -} - -/* -UnauthenticatedRateLimitedTransport allows you to make unauthenticated calls -that need to use a higher rate limit associated with your OAuth application. - - t := &github.UnauthenticatedRateLimitedTransport{ - ClientID: "your app's client ID", - ClientSecret: "your app's client secret", - } - client := github.NewClient(t.Client()) - -This will add the client id and secret as a base64-encoded string in the format -ClientID:ClientSecret and apply it as an "Authorization": "Basic" header. - -See https://docs.github.com/rest/#unauthenticated-rate-limited-requests for -more information. -*/ -type UnauthenticatedRateLimitedTransport struct { - // ClientID is the GitHub OAuth client ID of the current application, which - // can be found by selecting its entry in the list at - // https://github.com/settings/applications. - ClientID string - - // ClientSecret is the GitHub OAuth client secret of the current - // application. - ClientSecret string - - // Transport is the underlying HTTP transport to use when making requests. - // It will default to http.DefaultTransport if nil. - Transport http.RoundTripper -} - -// RoundTrip implements the RoundTripper interface. -func (t *UnauthenticatedRateLimitedTransport) RoundTrip(req *http.Request) (*http.Response, error) { - if t.ClientID == "" { - return nil, errors.New("t.ClientID is empty") - } - if t.ClientSecret == "" { - return nil, errors.New("t.ClientSecret is empty") - } - - req2 := setCredentialsAsHeaders(req, t.ClientID, t.ClientSecret) - // Make the HTTP request. - return t.transport().RoundTrip(req2) -} - -// Client returns an *http.Client that makes requests which are subject to the -// rate limit of your OAuth application. -func (t *UnauthenticatedRateLimitedTransport) Client() *http.Client { - return &http.Client{Transport: t} -} - -func (t *UnauthenticatedRateLimitedTransport) transport() http.RoundTripper { - if t.Transport != nil { - return t.Transport - } - return http.DefaultTransport -} - -// BasicAuthTransport is an http.RoundTripper that authenticates all requests -// using HTTP Basic Authentication with the provided username and password. It -// additionally supports users who have two-factor authentication enabled on -// their GitHub account. -type BasicAuthTransport struct { - Username string // GitHub username - Password string // GitHub password - OTP string // one-time password for users with two-factor auth enabled - - // Transport is the underlying HTTP transport to use when making requests. - // It will default to http.DefaultTransport if nil. - Transport http.RoundTripper -} - -// RoundTrip implements the RoundTripper interface. -func (t *BasicAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { - req2 := setCredentialsAsHeaders(req, t.Username, t.Password) - if t.OTP != "" { - req2.Header.Set(headerOTP, t.OTP) - } - return t.transport().RoundTrip(req2) -} - -// Client returns an *http.Client that makes requests that are authenticated -// using HTTP Basic Authentication. -func (t *BasicAuthTransport) Client() *http.Client { - return &http.Client{Transport: t} -} - -func (t *BasicAuthTransport) transport() http.RoundTripper { - if t.Transport != nil { - return t.Transport - } - return http.DefaultTransport -} - -// formatRateReset formats d to look like "[rate reset in 2s]" or -// "[rate reset in 87m02s]" for the positive durations. And like "[rate limit was reset 87m02s ago]" -// for the negative cases. -func formatRateReset(d time.Duration) string { - isNegative := d < 0 - if isNegative { - d *= -1 - } - secondsTotal := int(0.5 + d.Seconds()) - minutes := secondsTotal / 60 - seconds := secondsTotal - minutes*60 - - var timeString string - if minutes > 0 { - timeString = fmt.Sprintf("%dm%02ds", minutes, seconds) - } else { - timeString = fmt.Sprintf("%ds", seconds) - } - - if isNegative { - return fmt.Sprintf("[rate limit was reset %v ago]", timeString) - } - return fmt.Sprintf("[rate reset in %v]", timeString) -} - -// When using roundTripWithOptionalFollowRedirect, note that it -// is the responsibility of the caller to close the response body. -func (c *Client) roundTripWithOptionalFollowRedirect(ctx context.Context, u string, maxRedirects int, opts ...RequestOption) (*http.Response, error) { - req, err := c.NewRequest("GET", u, nil, opts...) - if err != nil { - return nil, err - } - - var resp *http.Response - // Use http.DefaultTransport if no custom Transport is configured - req = withContext(ctx, req) - if c.client.Transport == nil { - resp, err = http.DefaultTransport.RoundTrip(req) - } else { - resp, err = c.client.Transport.RoundTrip(req) - } - if err != nil { - return nil, err - } - - // If redirect response is returned, follow it - if maxRedirects > 0 && resp.StatusCode == http.StatusMovedPermanently { - _ = resp.Body.Close() - u = resp.Header.Get("Location") - resp, err = c.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects-1, opts...) - } - return resp, err -} - -// Bool is a helper routine that allocates a new bool value -// to store v and returns a pointer to it. -func Bool(v bool) *bool { return &v } - -// Int is a helper routine that allocates a new int value -// to store v and returns a pointer to it. -func Int(v int) *int { return &v } - -// Int64 is a helper routine that allocates a new int64 value -// to store v and returns a pointer to it. -func Int64(v int64) *int64 { return &v } - -// String is a helper routine that allocates a new string value -// to store v and returns a pointer to it. -func String(v string) *string { return &v } - -// roundTripperFunc creates a RoundTripper (transport) -type roundTripperFunc func(*http.Request) (*http.Response, error) - -func (fn roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { - return fn(r) -} diff --git a/vendor/github.com/google/go-github/v57/github/interactions_orgs.go b/vendor/github.com/google/go-github/v57/github/interactions_orgs.go deleted file mode 100644 index f0ba0b15..00000000 --- a/vendor/github.com/google/go-github/v57/github/interactions_orgs.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetRestrictionsForOrg fetches the interaction restrictions for an organization. -// -// GitHub API docs: https://docs.github.com/rest/interactions/orgs#get-interaction-restrictions-for-an-organization -// -//meta:operation GET /orgs/{org}/interaction-limits -func (s *InteractionsService) GetRestrictionsForOrg(ctx context.Context, organization string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("orgs/%v/interaction-limits", organization) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - organizationInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, organizationInteractions) - if err != nil { - return nil, resp, err - } - - return organizationInteractions, resp, nil -} - -// UpdateRestrictionsForOrg adds or updates the interaction restrictions for an organization. -// -// limit specifies the group of GitHub users who can comment, open issues, or create pull requests -// in public repositories for the given organization. -// Possible values are: "existing_users", "contributors_only", "collaborators_only". -// -// GitHub API docs: https://docs.github.com/rest/interactions/orgs#set-interaction-restrictions-for-an-organization -// -//meta:operation PUT /orgs/{org}/interaction-limits -func (s *InteractionsService) UpdateRestrictionsForOrg(ctx context.Context, organization, limit string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("orgs/%v/interaction-limits", organization) - - interaction := &InteractionRestriction{Limit: String(limit)} - - req, err := s.client.NewRequest("PUT", u, interaction) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - organizationInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, organizationInteractions) - if err != nil { - return nil, resp, err - } - - return organizationInteractions, resp, nil -} - -// RemoveRestrictionsFromOrg removes the interaction restrictions for an organization. -// -// GitHub API docs: https://docs.github.com/rest/interactions/orgs#remove-interaction-restrictions-for-an-organization -// -//meta:operation DELETE /orgs/{org}/interaction-limits -func (s *InteractionsService) RemoveRestrictionsFromOrg(ctx context.Context, organization string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/interaction-limits", organization) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/interactions_repos.go b/vendor/github.com/google/go-github/v57/github/interactions_repos.go deleted file mode 100644 index 9c044bad..00000000 --- a/vendor/github.com/google/go-github/v57/github/interactions_repos.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetRestrictionsForRepo fetches the interaction restrictions for a repository. -// -// GitHub API docs: https://docs.github.com/rest/interactions/repos#get-interaction-restrictions-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/interaction-limits -func (s *InteractionsService) GetRestrictionsForRepo(ctx context.Context, owner, repo string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - repositoryInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, repositoryInteractions) - if err != nil { - return nil, resp, err - } - - return repositoryInteractions, resp, nil -} - -// UpdateRestrictionsForRepo adds or updates the interaction restrictions for a repository. -// -// limit specifies the group of GitHub users who can comment, open issues, or create pull requests -// for the given repository. -// Possible values are: "existing_users", "contributors_only", "collaborators_only". -// -// GitHub API docs: https://docs.github.com/rest/interactions/repos#set-interaction-restrictions-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/interaction-limits -func (s *InteractionsService) UpdateRestrictionsForRepo(ctx context.Context, owner, repo, limit string) (*InteractionRestriction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) - - interaction := &InteractionRestriction{Limit: String(limit)} - - req, err := s.client.NewRequest("PUT", u, interaction) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - repositoryInteractions := new(InteractionRestriction) - - resp, err := s.client.Do(ctx, req, repositoryInteractions) - if err != nil { - return nil, resp, err - } - - return repositoryInteractions, resp, nil -} - -// RemoveRestrictionsFromRepo removes the interaction restrictions for a repository. -// -// GitHub API docs: https://docs.github.com/rest/interactions/repos#remove-interaction-restrictions-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/interaction-limits -func (s *InteractionsService) RemoveRestrictionsFromRepo(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/issues.go b/vendor/github.com/google/go-github/v57/github/issues.go deleted file mode 100644 index 1c07fef8..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues.go +++ /dev/null @@ -1,382 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// IssuesService handles communication with the issue related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/issues/ -type IssuesService service - -// Issue represents a GitHub issue on a repository. -// -// Note: As far as the GitHub API is concerned, every pull request is an issue, -// but not every issue is a pull request. Some endpoints, events, and webhooks -// may also return pull requests via this struct. If PullRequestLinks is nil, -// this is an issue, and if PullRequestLinks is not nil, this is a pull request. -// The IsPullRequest helper method can be used to check that. -type Issue struct { - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - // StateReason can be one of: "completed", "not_planned", "reopened". - StateReason *string `json:"state_reason,omitempty"` - Locked *bool `json:"locked,omitempty"` - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - AuthorAssociation *string `json:"author_association,omitempty"` - User *User `json:"user,omitempty"` - Labels []*Label `json:"labels,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Comments *int `json:"comments,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClosedBy *User `json:"closed_by,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - LabelsURL *string `json:"labels_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - PullRequestLinks *PullRequestLinks `json:"pull_request,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - Assignees []*User `json:"assignees,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Draft *bool `json:"draft,omitempty"` - - // TextMatches is only populated from search results that request text matches - // See: search.go and https://docs.github.com/rest/search/#text-match-metadata - TextMatches []*TextMatch `json:"text_matches,omitempty"` - - // ActiveLockReason is populated only when LockReason is provided while locking the issue. - // Possible values are: "off-topic", "too heated", "resolved", and "spam". - ActiveLockReason *string `json:"active_lock_reason,omitempty"` -} - -func (i Issue) String() string { - return Stringify(i) -} - -// IsPullRequest reports whether the issue is also a pull request. It uses the -// method recommended by GitHub's API documentation, which is to check whether -// PullRequestLinks is non-nil. -func (i Issue) IsPullRequest() bool { - return i.PullRequestLinks != nil -} - -// IssueRequest represents a request to create/edit an issue. -// It is separate from Issue above because otherwise Labels -// and Assignee fail to serialize to the correct JSON. -type IssueRequest struct { - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - Labels *[]string `json:"labels,omitempty"` - Assignee *string `json:"assignee,omitempty"` - State *string `json:"state,omitempty"` - // StateReason can be 'completed' or 'not_planned'. - StateReason *string `json:"state_reason,omitempty"` - Milestone *int `json:"milestone,omitempty"` - Assignees *[]string `json:"assignees,omitempty"` -} - -// IssueListOptions specifies the optional parameters to the IssuesService.List -// and IssuesService.ListByOrg methods. -type IssueListOptions struct { - // Filter specifies which issues to list. Possible values are: assigned, - // created, mentioned, subscribed, all. Default is "assigned". - Filter string `url:"filter,omitempty"` - - // State filters issues based on their state. Possible values are: open, - // closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Labels filters issues based on their label. - Labels []string `url:"labels,comma,omitempty"` - - // Sort specifies how to sort issues. Possible values are: created, updated, - // and comments. Default value is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort issues. Possible values are: asc, desc. - // Default is "desc". - Direction string `url:"direction,omitempty"` - - // Since filters issues by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// PullRequestLinks object is added to the Issue object when it's an issue included -// in the IssueCommentEvent webhook payload, if the webhook is fired by a comment on a PR. -type PullRequestLinks struct { - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - DiffURL *string `json:"diff_url,omitempty"` - PatchURL *string `json:"patch_url,omitempty"` -} - -// List the issues for the authenticated user. If all is true, list issues -// across all the user's visible repositories including owned, member, and -// organization repositories; if false, list only owned and member -// repositories. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-issues-assigned-to-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-user-account-issues-assigned-to-the-authenticated-user -// -//meta:operation GET /issues -//meta:operation GET /user/issues -func (s *IssuesService) List(ctx context.Context, all bool, opts *IssueListOptions) ([]*Issue, *Response, error) { - var u string - if all { - u = "issues" - } else { - u = "user/issues" - } - return s.listIssues(ctx, u, opts) -} - -// ListByOrg fetches the issues in the specified organization for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-organization-issues-assigned-to-the-authenticated-user -// -//meta:operation GET /orgs/{org}/issues -func (s *IssuesService) ListByOrg(ctx context.Context, org string, opts *IssueListOptions) ([]*Issue, *Response, error) { - u := fmt.Sprintf("orgs/%v/issues", org) - return s.listIssues(ctx, u, opts) -} - -func (s *IssuesService) listIssues(ctx context.Context, u string, opts *IssueListOptions) ([]*Issue, *Response, error) { - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var issues []*Issue - resp, err := s.client.Do(ctx, req, &issues) - if err != nil { - return nil, resp, err - } - - return issues, resp, nil -} - -// IssueListByRepoOptions specifies the optional parameters to the -// IssuesService.ListByRepo method. -type IssueListByRepoOptions struct { - // Milestone limits issues for the specified milestone. Possible values are - // a milestone number, "none" for issues with no milestone, "*" for issues - // with any milestone. - Milestone string `url:"milestone,omitempty"` - - // State filters issues based on their state. Possible values are: open, - // closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Assignee filters issues based on their assignee. Possible values are a - // user name, "none" for issues that are not assigned, "*" for issues with - // any assigned user. - Assignee string `url:"assignee,omitempty"` - - // Creator filters issues based on their creator. - Creator string `url:"creator,omitempty"` - - // Mentioned filters issues to those mentioned a specific user. - Mentioned string `url:"mentioned,omitempty"` - - // Labels filters issues based on their label. - Labels []string `url:"labels,omitempty,comma"` - - // Sort specifies how to sort issues. Possible values are: created, updated, - // and comments. Default value is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort issues. Possible values are: asc, desc. - // Default is "desc". - Direction string `url:"direction,omitempty"` - - // Since filters issues by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// ListByRepo lists the issues for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#list-repository-issues -// -//meta:operation GET /repos/{owner}/{repo}/issues -func (s *IssuesService) ListByRepo(ctx context.Context, owner string, repo string, opts *IssueListByRepoOptions) ([]*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var issues []*Issue - resp, err := s.client.Do(ctx, req, &issues) - if err != nil { - return nil, resp, err - } - - return issues, resp, nil -} - -// Get a single issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#get-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number} -func (s *IssuesService) Get(ctx context.Context, owner string, repo string, number int) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - issue := new(Issue) - resp, err := s.client.Do(ctx, req, issue) - if err != nil { - return nil, resp, err - } - - return issue, resp, nil -} - -// Create a new issue on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#create-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues -func (s *IssuesService) Create(ctx context.Context, owner string, repo string, issue *IssueRequest) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues", owner, repo) - req, err := s.client.NewRequest("POST", u, issue) - if err != nil { - return nil, nil, err - } - - i := new(Issue) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// Edit (update) an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#update-an-issue -// -//meta:operation PATCH /repos/{owner}/{repo}/issues/{issue_number} -func (s *IssuesService) Edit(ctx context.Context, owner string, repo string, number int, issue *IssueRequest) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d", owner, repo, number) - req, err := s.client.NewRequest("PATCH", u, issue) - if err != nil { - return nil, nil, err - } - - i := new(Issue) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// RemoveMilestone removes a milestone from an issue. -// -// This is a helper method to explicitly update an issue with a `null` milestone, thereby removing it. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#update-an-issue -// -//meta:operation PATCH /repos/{owner}/{repo}/issues/{issue_number} -func (s *IssuesService) RemoveMilestone(ctx context.Context, owner, repo string, issueNumber int) (*Issue, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v", owner, repo, issueNumber) - req, err := s.client.NewRequest("PATCH", u, &struct { - Milestone *Milestone `json:"milestone"` - }{}) - if err != nil { - return nil, nil, err - } - - i := new(Issue) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// LockIssueOptions specifies the optional parameters to the -// IssuesService.Lock method. -type LockIssueOptions struct { - // LockReason specifies the reason to lock this issue. - // Providing a lock reason can help make it clearer to contributors why an issue - // was locked. Possible values are: "off-topic", "too heated", "resolved", and "spam". - LockReason string `json:"lock_reason,omitempty"` -} - -// Lock an issue's conversation. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#lock-an-issue -// -//meta:operation PUT /repos/{owner}/{repo}/issues/{issue_number}/lock -func (s *IssuesService) Lock(ctx context.Context, owner string, repo string, number int, opts *LockIssueOptions) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unlock an issue's conversation. -// -// GitHub API docs: https://docs.github.com/rest/issues/issues#unlock-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock -func (s *IssuesService) Unlock(ctx context.Context, owner string, repo string, number int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_events.go b/vendor/github.com/google/go-github/v57/github/issues_events.go deleted file mode 100644 index 23a16bcd..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_events.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// IssueEvent represents an event that occurred around an Issue or Pull Request. -type IssueEvent struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - - // The User that generated this event. - Actor *User `json:"actor,omitempty"` - - // Event identifies the actual type of Event that occurred. Possible - // values are: - // - // closed - // The Actor closed the issue. - // If the issue was closed by commit message, CommitID holds the SHA1 hash of the commit. - // - // merged - // The Actor merged into master a branch containing a commit mentioning the issue. - // CommitID holds the SHA1 of the merge commit. - // - // referenced - // The Actor committed to master a commit mentioning the issue in its commit message. - // CommitID holds the SHA1 of the commit. - // - // reopened, unlocked - // The Actor did that to the issue. - // - // locked - // The Actor locked the issue. - // LockReason holds the reason of locking the issue (if provided while locking). - // - // renamed - // The Actor changed the issue title from Rename.From to Rename.To. - // - // mentioned - // Someone unspecified @mentioned the Actor [sic] in an issue comment body. - // - // assigned, unassigned - // The Assigner assigned the issue to or removed the assignment from the Assignee. - // - // labeled, unlabeled - // The Actor added or removed the Label from the issue. - // - // milestoned, demilestoned - // The Actor added or removed the issue from the Milestone. - // - // subscribed, unsubscribed - // The Actor subscribed to or unsubscribed from notifications for an issue. - // - // head_ref_deleted, head_ref_restored - // The pull request’s branch was deleted or restored. - // - // review_dismissed - // The review was dismissed and `DismissedReview` will be populated below. - // - // review_requested, review_request_removed - // The Actor requested or removed the request for a review. - // RequestedReviewer or RequestedTeam, and ReviewRequester will be populated below. - // - Event *string `json:"event,omitempty"` - - CreatedAt *Timestamp `json:"created_at,omitempty"` - Issue *Issue `json:"issue,omitempty"` - - // Only present on certain events; see above. - Assignee *User `json:"assignee,omitempty"` - Assigner *User `json:"assigner,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - Label *Label `json:"label,omitempty"` - Rename *Rename `json:"rename,omitempty"` - LockReason *string `json:"lock_reason,omitempty"` - ProjectCard *ProjectCard `json:"project_card,omitempty"` - DismissedReview *DismissedReview `json:"dismissed_review,omitempty"` - RequestedReviewer *User `json:"requested_reviewer,omitempty"` - RequestedTeam *Team `json:"requested_team,omitempty"` - ReviewRequester *User `json:"review_requester,omitempty"` - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// DismissedReview represents details for 'dismissed_review' events. -type DismissedReview struct { - // State represents the state of the dismissed review. - // Possible values are: "commented", "approved", and "changes_requested". - State *string `json:"state,omitempty"` - ReviewID *int64 `json:"review_id,omitempty"` - DismissalMessage *string `json:"dismissal_message,omitempty"` - DismissalCommitID *string `json:"dismissal_commit_id,omitempty"` -} - -// ListIssueEvents lists events for the specified issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#list-issue-events -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/events -func (s *IssuesService) ListIssueEvents(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/events", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeProjectCardDetailsPreview) - - var events []*IssueEvent - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListRepositoryEvents lists events for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#list-issue-events-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/issues/events -func (s *IssuesService) ListRepositoryEvents(ctx context.Context, owner, repo string, opts *ListOptions) ([]*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*IssueEvent - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// GetEvent returns the specified issue event. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#get-an-issue-event -// -//meta:operation GET /repos/{owner}/{repo}/issues/events/{event_id} -func (s *IssuesService) GetEvent(ctx context.Context, owner, repo string, id int64) (*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/events/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - event := new(IssueEvent) - resp, err := s.client.Do(ctx, req, event) - if err != nil { - return nil, resp, err - } - - return event, resp, nil -} - -// Rename contains details for 'renamed' events. -type Rename struct { - From *string `json:"from,omitempty"` - To *string `json:"to,omitempty"` -} - -func (r Rename) String() string { - return Stringify(r) -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_labels.go b/vendor/github.com/google/go-github/v57/github/issues_labels.go deleted file mode 100644 index 51e7fe6a..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_labels.go +++ /dev/null @@ -1,253 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Label represents a GitHub label on an Issue -type Label struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Color *string `json:"color,omitempty"` - Description *string `json:"description,omitempty"` - Default *bool `json:"default,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (l Label) String() string { - return Stringify(l) -} - -// ListLabels lists all labels for a repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#list-labels-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/labels -func (s *IssuesService) ListLabels(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var labels []*Label - resp, err := s.client.Do(ctx, req, &labels) - if err != nil { - return nil, resp, err - } - - return labels, resp, nil -} - -// GetLabel gets a single label. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#get-a-label -// -//meta:operation GET /repos/{owner}/{repo}/labels/{name} -func (s *IssuesService) GetLabel(ctx context.Context, owner string, repo string, name string) (*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - label := new(Label) - resp, err := s.client.Do(ctx, req, label) - if err != nil { - return nil, resp, err - } - - return label, resp, nil -} - -// CreateLabel creates a new label on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#create-a-label -// -//meta:operation POST /repos/{owner}/{repo}/labels -func (s *IssuesService) CreateLabel(ctx context.Context, owner string, repo string, label *Label) (*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels", owner, repo) - req, err := s.client.NewRequest("POST", u, label) - if err != nil { - return nil, nil, err - } - - l := new(Label) - resp, err := s.client.Do(ctx, req, l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// EditLabel edits a label. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#update-a-label -// -//meta:operation PATCH /repos/{owner}/{repo}/labels/{name} -func (s *IssuesService) EditLabel(ctx context.Context, owner string, repo string, name string, label *Label) (*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) - req, err := s.client.NewRequest("PATCH", u, label) - if err != nil { - return nil, nil, err - } - - l := new(Label) - resp, err := s.client.Do(ctx, req, l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// DeleteLabel deletes a label. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#delete-a-label -// -//meta:operation DELETE /repos/{owner}/{repo}/labels/{name} -func (s *IssuesService) DeleteLabel(ctx context.Context, owner string, repo string, name string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListLabelsByIssue lists all labels for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#list-labels-for-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) ListLabelsByIssue(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var labels []*Label - resp, err := s.client.Do(ctx, req, &labels) - if err != nil { - return nil, resp, err - } - - return labels, resp, nil -} - -// AddLabelsToIssue adds labels to an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#add-labels-to-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) AddLabelsToIssue(ctx context.Context, owner string, repo string, number int, labels []string) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - req, err := s.client.NewRequest("POST", u, labels) - if err != nil { - return nil, nil, err - } - - var l []*Label - resp, err := s.client.Do(ctx, req, &l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// RemoveLabelForIssue removes a label for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#remove-a-label-from-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name} -func (s *IssuesService) RemoveLabelForIssue(ctx context.Context, owner string, repo string, number int, label string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels/%v", owner, repo, number, label) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ReplaceLabelsForIssue replaces all labels for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#set-labels-for-an-issue -// -//meta:operation PUT /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) ReplaceLabelsForIssue(ctx context.Context, owner string, repo string, number int, labels []string) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - req, err := s.client.NewRequest("PUT", u, labels) - if err != nil { - return nil, nil, err - } - - var l []*Label - resp, err := s.client.Do(ctx, req, &l) - if err != nil { - return nil, resp, err - } - - return l, resp, nil -} - -// RemoveLabelsForIssue removes all labels for an issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#remove-all-labels-from-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels -func (s *IssuesService) RemoveLabelsForIssue(ctx context.Context, owner string, repo string, number int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListLabelsForMilestone lists labels for every issue in a milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/labels#list-labels-for-issues-in-a-milestone -// -//meta:operation GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels -func (s *IssuesService) ListLabelsForMilestone(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*Label, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d/labels", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var labels []*Label - resp, err := s.client.Do(ctx, req, &labels) - if err != nil { - return nil, resp, err - } - - return labels, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/issues_timeline.go b/vendor/github.com/google/go-github/v57/github/issues_timeline.go deleted file mode 100644 index 0aa589af..00000000 --- a/vendor/github.com/google/go-github/v57/github/issues_timeline.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" -) - -// Timeline represents an event that occurred around an Issue or Pull Request. -// -// It is similar to an IssueEvent but may contain more information. -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/events/issue-event-types -type Timeline struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - CommitURL *string `json:"commit_url,omitempty"` - - // The User object that generated the event. - Actor *User `json:"actor,omitempty"` - - // The person who commented on the issue. - User *User `json:"user,omitempty"` - - // The person who authored the commit. - Author *CommitAuthor `json:"author,omitempty"` - // The person who committed the commit on behalf of the author. - Committer *CommitAuthor `json:"committer,omitempty"` - // The SHA of the commit in the pull request. - SHA *string `json:"sha,omitempty"` - // The commit message. - Message *string `json:"message,omitempty"` - // A list of parent commits. - Parents []*Commit `json:"parents,omitempty"` - - // Event identifies the actual type of Event that occurred. Possible values - // are: - // - // assigned - // The issue was assigned to the assignee. - // - // closed - // The issue was closed by the actor. When the commit_id is present, it - // identifies the commit that closed the issue using "closes / fixes #NN" - // syntax. - // - // commented - // A comment was added to the issue. - // - // committed - // A commit was added to the pull request's 'HEAD' branch. Only provided - // for pull requests. - // - // cross-referenced - // The issue was referenced from another issue. The 'source' attribute - // contains the 'id', 'actor', and 'url' of the reference's source. - // - // demilestoned - // The issue was removed from a milestone. - // - // head_ref_deleted - // The pull request's branch was deleted. - // - // head_ref_restored - // The pull request's branch was restored. - // - // labeled - // A label was added to the issue. - // - // locked - // The issue was locked by the actor. - // - // mentioned - // The actor was @mentioned in an issue body. - // - // merged - // The issue was merged by the actor. The 'commit_id' attribute is the - // SHA1 of the HEAD commit that was merged. - // - // milestoned - // The issue was added to a milestone. - // - // referenced - // The issue was referenced from a commit message. The 'commit_id' - // attribute is the commit SHA1 of where that happened. - // - // renamed - // The issue title was changed. - // - // reopened - // The issue was reopened by the actor. - // - // reviewed - // The pull request was reviewed. - // - // subscribed - // The actor subscribed to receive notifications for an issue. - // - // unassigned - // The assignee was unassigned from the issue. - // - // unlabeled - // A label was removed from the issue. - // - // unlocked - // The issue was unlocked by the actor. - // - // unsubscribed - // The actor unsubscribed to stop receiving notifications for an issue. - // - Event *string `json:"event,omitempty"` - - // The string SHA of a commit that referenced this Issue or Pull Request. - CommitID *string `json:"commit_id,omitempty"` - // The timestamp indicating when the event occurred. - CreatedAt *Timestamp `json:"created_at,omitempty"` - // The Label object including `name` and `color` attributes. Only provided for - // 'labeled' and 'unlabeled' events. - Label *Label `json:"label,omitempty"` - // The User object which was assigned to (or unassigned from) this Issue or - // Pull Request. Only provided for 'assigned' and 'unassigned' events. - Assignee *User `json:"assignee,omitempty"` - Assigner *User `json:"assigner,omitempty"` - - // The Milestone object including a 'title' attribute. - // Only provided for 'milestoned' and 'demilestoned' events. - Milestone *Milestone `json:"milestone,omitempty"` - // The 'id', 'actor', and 'url' for the source of a reference from another issue. - // Only provided for 'cross-referenced' events. - Source *Source `json:"source,omitempty"` - // An object containing rename details including 'from' and 'to' attributes. - // Only provided for 'renamed' events. - Rename *Rename `json:"rename,omitempty"` - ProjectCard *ProjectCard `json:"project_card,omitempty"` - // The state of a submitted review. Can be one of: 'commented', - // 'changes_requested' or 'approved'. - // Only provided for 'reviewed' events. - State *string `json:"state,omitempty"` - - // The person requested to review the pull request. - Reviewer *User `json:"requested_reviewer,omitempty"` - // RequestedTeam contains the team requested to review the pull request. - RequestedTeam *Team `json:"requested_team,omitempty"` - // The person who requested a review. - Requester *User `json:"review_requester,omitempty"` - - // The review summary text. - Body *string `json:"body,omitempty"` - SubmittedAt *Timestamp `json:"submitted_at,omitempty"` - - PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` -} - -// Source represents a reference's source. -type Source struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Actor *User `json:"actor,omitempty"` - Type *string `json:"type,omitempty"` - Issue *Issue `json:"issue,omitempty"` -} - -// ListIssueTimeline lists events for the specified issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/timeline#list-timeline-events-for-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/timeline -func (s *IssuesService) ListIssueTimeline(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*Timeline, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/timeline", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeTimelinePreview, mediaTypeProjectCardDetailsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var events []*Timeline - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/markdown.go b/vendor/github.com/google/go-github/v57/github/markdown.go deleted file mode 100644 index fe3b3112..00000000 --- a/vendor/github.com/google/go-github/v57/github/markdown.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" -) - -// MarkdownService provides access to markdown-related functions in the GitHub API. -type MarkdownService service - -// MarkdownOptions specifies optional parameters to the Render method. -type MarkdownOptions struct { - // Mode identifies the rendering mode. Possible values are: - // markdown - render a document as plain Render, just like - // README files are rendered. - // - // gfm - to render a document as user-content, e.g. like user - // comments or issues are rendered. In GFM mode, hard line breaks are - // always taken into account, and issue and user mentions are linked - // accordingly. - // - // Default is "markdown". - Mode string - - // Context identifies the repository context. Only taken into account - // when rendering as "gfm". - Context string -} - -type markdownRenderRequest struct { - Text *string `json:"text,omitempty"` - Mode *string `json:"mode,omitempty"` - Context *string `json:"context,omitempty"` -} - -// Render renders an arbitrary Render document. -// -// GitHub API docs: https://docs.github.com/rest/markdown/markdown#render-a-markdown-document -// -//meta:operation POST /markdown -func (s *MarkdownService) Render(ctx context.Context, text string, opts *MarkdownOptions) (string, *Response, error) { - request := &markdownRenderRequest{Text: String(text)} - if opts != nil { - if opts.Mode != "" { - request.Mode = String(opts.Mode) - } - if opts.Context != "" { - request.Context = String(opts.Context) - } - } - - req, err := s.client.NewRequest("POST", "markdown", request) - if err != nil { - return "", nil, err - } - - buf := new(bytes.Buffer) - resp, err := s.client.Do(ctx, req, buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/messages.go b/vendor/github.com/google/go-github/v57/github/messages.go deleted file mode 100644 index 72edbd9f..00000000 --- a/vendor/github.com/google/go-github/v57/github/messages.go +++ /dev/null @@ -1,352 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file provides functions for validating payloads from GitHub Webhooks. -// GitHub API docs: https://developer.github.com/webhooks/securing/#validating-payloads-from-github - -package github - -import ( - "crypto/hmac" - "crypto/sha1" - "crypto/sha256" - "crypto/sha512" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "hash" - "io" - "mime" - "net/http" - "net/url" - "reflect" - "sort" - "strings" -) - -const ( - // sha1Prefix is the prefix used by GitHub before the HMAC hexdigest. - sha1Prefix = "sha1" - // sha256Prefix and sha512Prefix are provided for future compatibility. - sha256Prefix = "sha256" - sha512Prefix = "sha512" - // SHA1SignatureHeader is the GitHub header key used to pass the HMAC-SHA1 hexdigest. - SHA1SignatureHeader = "X-Hub-Signature" - // SHA256SignatureHeader is the GitHub header key used to pass the HMAC-SHA256 hexdigest. - SHA256SignatureHeader = "X-Hub-Signature-256" - // EventTypeHeader is the GitHub header key used to pass the event type. - EventTypeHeader = "X-Github-Event" - // DeliveryIDHeader is the GitHub header key used to pass the unique ID for the webhook event. - DeliveryIDHeader = "X-Github-Delivery" -) - -var ( - // eventTypeMapping maps webhooks types to their corresponding go-github struct types. - eventTypeMapping = map[string]interface{}{ - "branch_protection_rule": &BranchProtectionRuleEvent{}, - "check_run": &CheckRunEvent{}, - "check_suite": &CheckSuiteEvent{}, - "code_scanning_alert": &CodeScanningAlertEvent{}, - "commit_comment": &CommitCommentEvent{}, - "content_reference": &ContentReferenceEvent{}, - "create": &CreateEvent{}, - "delete": &DeleteEvent{}, - "dependabot_alert": &DependabotAlertEvent{}, - "deploy_key": &DeployKeyEvent{}, - "deployment": &DeploymentEvent{}, - "deployment_status": &DeploymentStatusEvent{}, - "deployment_protection_rule": &DeploymentProtectionRuleEvent{}, - "discussion": &DiscussionEvent{}, - "discussion_comment": &DiscussionCommentEvent{}, - "fork": &ForkEvent{}, - "github_app_authorization": &GitHubAppAuthorizationEvent{}, - "gollum": &GollumEvent{}, - "installation": &InstallationEvent{}, - "installation_repositories": &InstallationRepositoriesEvent{}, - "installation_target": &InstallationTargetEvent{}, - "issue_comment": &IssueCommentEvent{}, - "issues": &IssuesEvent{}, - "label": &LabelEvent{}, - "marketplace_purchase": &MarketplacePurchaseEvent{}, - "member": &MemberEvent{}, - "membership": &MembershipEvent{}, - "merge_group": &MergeGroupEvent{}, - "meta": &MetaEvent{}, - "milestone": &MilestoneEvent{}, - "organization": &OrganizationEvent{}, - "org_block": &OrgBlockEvent{}, - "package": &PackageEvent{}, - "page_build": &PageBuildEvent{}, - "personal_access_token_request": &PersonalAccessTokenRequestEvent{}, - "ping": &PingEvent{}, - "project": &ProjectEvent{}, - "project_card": &ProjectCardEvent{}, - "project_column": &ProjectColumnEvent{}, - "projects_v2": &ProjectV2Event{}, - "projects_v2_item": &ProjectV2ItemEvent{}, - "public": &PublicEvent{}, - "pull_request": &PullRequestEvent{}, - "pull_request_review": &PullRequestReviewEvent{}, - "pull_request_review_comment": &PullRequestReviewCommentEvent{}, - "pull_request_review_thread": &PullRequestReviewThreadEvent{}, - "pull_request_target": &PullRequestTargetEvent{}, - "push": &PushEvent{}, - "repository": &RepositoryEvent{}, - "repository_dispatch": &RepositoryDispatchEvent{}, - "repository_import": &RepositoryImportEvent{}, - "repository_vulnerability_alert": &RepositoryVulnerabilityAlertEvent{}, - "release": &ReleaseEvent{}, - "secret_scanning_alert": &SecretScanningAlertEvent{}, - "security_advisory": &SecurityAdvisoryEvent{}, - "security_and_analysis": &SecurityAndAnalysisEvent{}, - "star": &StarEvent{}, - "status": &StatusEvent{}, - "team": &TeamEvent{}, - "team_add": &TeamAddEvent{}, - "user": &UserEvent{}, - "watch": &WatchEvent{}, - "workflow_dispatch": &WorkflowDispatchEvent{}, - "workflow_job": &WorkflowJobEvent{}, - "workflow_run": &WorkflowRunEvent{}, - } - // forward mapping of event types to the string names of the structs - messageToTypeName = make(map[string]string, len(eventTypeMapping)) - // Inverse map of the above - typeToMessageMapping = make(map[string]string, len(eventTypeMapping)) -) - -func init() { - for k, v := range eventTypeMapping { - typename := reflect.TypeOf(v).Elem().Name() - messageToTypeName[k] = typename - typeToMessageMapping[typename] = k - } -} - -// genMAC generates the HMAC signature for a message provided the secret key -// and hashFunc. -func genMAC(message, key []byte, hashFunc func() hash.Hash) []byte { - mac := hmac.New(hashFunc, key) - mac.Write(message) - return mac.Sum(nil) -} - -// checkMAC reports whether messageMAC is a valid HMAC tag for message. -func checkMAC(message, messageMAC, key []byte, hashFunc func() hash.Hash) bool { - expectedMAC := genMAC(message, key, hashFunc) - return hmac.Equal(messageMAC, expectedMAC) -} - -// messageMAC returns the hex-decoded HMAC tag from the signature and its -// corresponding hash function. -func messageMAC(signature string) ([]byte, func() hash.Hash, error) { - if signature == "" { - return nil, nil, errors.New("missing signature") - } - sigParts := strings.SplitN(signature, "=", 2) - if len(sigParts) != 2 { - return nil, nil, fmt.Errorf("error parsing signature %q", signature) - } - - var hashFunc func() hash.Hash - switch sigParts[0] { - case sha1Prefix: - hashFunc = sha1.New - case sha256Prefix: - hashFunc = sha256.New - case sha512Prefix: - hashFunc = sha512.New - default: - return nil, nil, fmt.Errorf("unknown hash type prefix: %q", sigParts[0]) - } - - buf, err := hex.DecodeString(sigParts[1]) - if err != nil { - return nil, nil, fmt.Errorf("error decoding signature %q: %v", signature, err) - } - return buf, hashFunc, nil -} - -// ValidatePayloadFromBody validates an incoming GitHub Webhook event request body -// and returns the (JSON) payload. -// The Content-Type header of the payload can be "application/json" or "application/x-www-form-urlencoded". -// If the Content-Type is neither then an error is returned. -// secretToken is the GitHub Webhook secret token. -// If your webhook does not contain a secret token, you can pass an empty secretToken. -// Webhooks without a secret token are not secure and should be avoided. -// -// Example usage: -// -// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { -// // read signature from request -// signature := "" -// payload, err := github.ValidatePayloadFromBody(r.Header.Get("Content-Type"), r.Body, signature, s.webhookSecretKey) -// if err != nil { ... } -// // Process payload... -// } -func ValidatePayloadFromBody(contentType string, readable io.Reader, signature string, secretToken []byte) (payload []byte, err error) { - var body []byte // Raw body that GitHub uses to calculate the signature. - - switch contentType { - case "application/json": - var err error - if body, err = io.ReadAll(readable); err != nil { - return nil, err - } - - // If the content type is application/json, - // the JSON payload is just the original body. - payload = body - - case "application/x-www-form-urlencoded": - // payloadFormParam is the name of the form parameter that the JSON payload - // will be in if a webhook has its content type set to application/x-www-form-urlencoded. - const payloadFormParam = "payload" - - var err error - if body, err = io.ReadAll(readable); err != nil { - return nil, err - } - - // If the content type is application/x-www-form-urlencoded, - // the JSON payload will be under the "payload" form param. - form, err := url.ParseQuery(string(body)) - if err != nil { - return nil, err - } - payload = []byte(form.Get(payloadFormParam)) - - default: - return nil, fmt.Errorf("webhook request has unsupported Content-Type %q", contentType) - } - - // Validate the signature if present or if one is expected (secretToken is non-empty). - if len(secretToken) > 0 || len(signature) > 0 { - if err := ValidateSignature(signature, body, secretToken); err != nil { - return nil, err - } - } - - return payload, nil -} - -// ValidatePayload validates an incoming GitHub Webhook event request -// and returns the (JSON) payload. -// The Content-Type header of the payload can be "application/json" or "application/x-www-form-urlencoded". -// If the Content-Type is neither then an error is returned. -// secretToken is the GitHub Webhook secret token. -// If your webhook does not contain a secret token, you can pass nil or an empty slice. -// This is intended for local development purposes only and all webhooks should ideally set up a secret token. -// -// Example usage: -// -// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { -// payload, err := github.ValidatePayload(r, s.webhookSecretKey) -// if err != nil { ... } -// // Process payload... -// } -func ValidatePayload(r *http.Request, secretToken []byte) (payload []byte, err error) { - signature := r.Header.Get(SHA256SignatureHeader) - if signature == "" { - signature = r.Header.Get(SHA1SignatureHeader) - } - - contentType, _, err := mime.ParseMediaType(r.Header.Get("Content-Type")) - if err != nil { - return nil, err - } - - return ValidatePayloadFromBody(contentType, r.Body, signature, secretToken) -} - -// ValidateSignature validates the signature for the given payload. -// signature is the GitHub hash signature delivered in the X-Hub-Signature header. -// payload is the JSON payload sent by GitHub Webhooks. -// secretToken is the GitHub Webhook secret token. -// -// GitHub API docs: https://developer.github.com/webhooks/securing/#validating-payloads-from-github -func ValidateSignature(signature string, payload, secretToken []byte) error { - messageMAC, hashFunc, err := messageMAC(signature) - if err != nil { - return err - } - if !checkMAC(payload, messageMAC, secretToken, hashFunc) { - return errors.New("payload signature check failed") - } - return nil -} - -// WebHookType returns the event type of webhook request r. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/events/github-event-types -func WebHookType(r *http.Request) string { - return r.Header.Get(EventTypeHeader) -} - -// DeliveryID returns the unique delivery ID of webhook request r. -// -// GitHub API docs: https://docs.github.com/developers/webhooks-and-events/events/github-event-types -func DeliveryID(r *http.Request) string { - return r.Header.Get(DeliveryIDHeader) -} - -// ParseWebHook parses the event payload. For recognized event types, a -// value of the corresponding struct type will be returned (as returned -// by Event.ParsePayload()). An error will be returned for unrecognized event -// types. -// -// Example usage: -// -// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { -// payload, err := github.ValidatePayload(r, s.webhookSecretKey) -// if err != nil { ... } -// event, err := github.ParseWebHook(github.WebHookType(r), payload) -// if err != nil { ... } -// switch event := event.(type) { -// case *github.CommitCommentEvent: -// processCommitCommentEvent(event) -// case *github.CreateEvent: -// processCreateEvent(event) -// ... -// } -// } -func ParseWebHook(messageType string, payload []byte) (interface{}, error) { - eventType, ok := messageToTypeName[messageType] - if !ok { - return nil, fmt.Errorf("unknown X-Github-Event in message: %v", messageType) - } - - event := Event{ - Type: &eventType, - RawPayload: (*json.RawMessage)(&payload), - } - return event.ParsePayload() -} - -// MessageTypes returns a sorted list of all the known GitHub event type strings -// supported by go-github. -func MessageTypes() []string { - types := make([]string, 0, len(eventTypeMapping)) - for t := range eventTypeMapping { - types = append(types, t) - } - sort.Strings(types) - return types -} - -// EventForType returns an empty struct matching the specified GitHub event type. -// If messageType does not match any known event types, it returns nil. -func EventForType(messageType string) interface{} { - prototype := eventTypeMapping[messageType] - if prototype == nil { - return nil - } - // return a _copy_ of the pointed-to-object. Unfortunately, for this we - // need to use reflection. If we store the actual objects in the map, - // we still need to use reflection to convert from `any` to the actual - // type, so this was deemed the lesser of two evils. (#2865) - return reflect.New(reflect.TypeOf(prototype).Elem()).Interface() -} diff --git a/vendor/github.com/google/go-github/v57/github/meta.go b/vendor/github.com/google/go-github/v57/github/meta.go deleted file mode 100644 index 1da8fcf1..00000000 --- a/vendor/github.com/google/go-github/v57/github/meta.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" - "net/url" -) - -// MetaService provides access to functions in the GitHub API that GitHub categorizes as "meta". -type MetaService service - -// APIMeta represents metadata about the GitHub API. -type APIMeta struct { - // An Array of IP addresses in CIDR format specifying the addresses - // that incoming service hooks will originate from on GitHub.com. - Hooks []string `json:"hooks,omitempty"` - - // An Array of IP addresses in CIDR format specifying the Git servers - // for GitHub.com. - Git []string `json:"git,omitempty"` - - // Whether authentication with username and password is supported. - // (GitHub Enterprise instances using CAS or OAuth for authentication - // will return false. Features like Basic Authentication with a - // username and password, sudo mode, and two-factor authentication are - // not supported on these servers.) - VerifiablePasswordAuthentication *bool `json:"verifiable_password_authentication,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub Packages. - Packages []string `json:"packages,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub Pages websites. - Pages []string `json:"pages,omitempty"` - - // An Array of IP addresses specifying the addresses that source imports - // will originate from on GitHub.com. - Importer []string `json:"importer,omitempty"` - - // An array of IP addresses in CIDR format specifying the IP addresses - // GitHub Actions will originate from. - Actions []string `json:"actions,omitempty"` - - // An array of IP addresses in CIDR format specifying the IP addresses - // Dependabot will originate from. - Dependabot []string `json:"dependabot,omitempty"` - - // A map of algorithms to SSH key fingerprints. - SSHKeyFingerprints map[string]string `json:"ssh_key_fingerprints,omitempty"` - - // An array of SSH keys. - SSHKeys []string `json:"ssh_keys,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub websites. - Web []string `json:"web,omitempty"` - - // An array of IP addresses in CIDR format specifying the addresses - // which serve GitHub APIs. - API []string `json:"api,omitempty"` -} - -// Get returns information about GitHub.com, the service. Or, if you access -// this endpoint on your organization’s GitHub Enterprise installation, this -// endpoint provides information about that installation. -// -// GitHub API docs: https://docs.github.com/rest/meta/meta#get-github-meta-information -// -//meta:operation GET /meta -func (s *MetaService) Get(ctx context.Context) (*APIMeta, *Response, error) { - req, err := s.client.NewRequest("GET", "meta", nil) - if err != nil { - return nil, nil, err - } - - meta := new(APIMeta) - resp, err := s.client.Do(ctx, req, meta) - if err != nil { - return nil, resp, err - } - - return meta, resp, nil -} - -// APIMeta returns information about GitHub.com. -// -// Deprecated: Use MetaService.Get instead. -func (c *Client) APIMeta(ctx context.Context) (*APIMeta, *Response, error) { - return c.Meta.Get(ctx) -} - -// Octocat returns an ASCII art octocat with the specified message in a speech -// bubble. If message is empty, a random zen phrase is used. -// -// GitHub API docs: https://docs.github.com/rest/meta/meta#get-octocat -// -//meta:operation GET /octocat -func (s *MetaService) Octocat(ctx context.Context, message string) (string, *Response, error) { - u := "octocat" - if message != "" { - u = fmt.Sprintf("%s?s=%s", u, url.QueryEscape(message)) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - buf := new(bytes.Buffer) - resp, err := s.client.Do(ctx, req, buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// Octocat returns an ASCII art octocat with the specified message in a speech -// bubble. If message is empty, a random zen phrase is used. -// -// Deprecated: Use MetaService.Octocat instead. -func (c *Client) Octocat(ctx context.Context, message string) (string, *Response, error) { - return c.Meta.Octocat(ctx, message) -} - -// Zen returns a random line from The Zen of GitHub. -// -// See also: http://warpspire.com/posts/taste/ -// -// GitHub API docs: https://docs.github.com/rest/meta/meta#get-the-zen-of-github -// -//meta:operation GET /zen -func (s *MetaService) Zen(ctx context.Context) (string, *Response, error) { - req, err := s.client.NewRequest("GET", "zen", nil) - if err != nil { - return "", nil, err - } - - buf := new(bytes.Buffer) - resp, err := s.client.Do(ctx, req, buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// Zen returns a random line from The Zen of GitHub. -// -// Deprecated: Use MetaService.Zen instead. -func (c *Client) Zen(ctx context.Context) (string, *Response, error) { - return c.Meta.Zen(ctx) -} diff --git a/vendor/github.com/google/go-github/v57/github/migrations.go b/vendor/github.com/google/go-github/v57/github/migrations.go deleted file mode 100644 index 5af88170..00000000 --- a/vendor/github.com/google/go-github/v57/github/migrations.go +++ /dev/null @@ -1,240 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" - "net/http" - "strings" -) - -// MigrationService provides access to the migration related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/migration/ -type MigrationService service - -// Migration represents a GitHub migration (archival). -type Migration struct { - ID *int64 `json:"id,omitempty"` - GUID *string `json:"guid,omitempty"` - // State is the current state of a migration. - // Possible values are: - // "pending" which means the migration hasn't started yet, - // "exporting" which means the migration is in progress, - // "exported" which means the migration finished successfully, or - // "failed" which means the migration failed. - State *string `json:"state,omitempty"` - // LockRepositories indicates whether repositories are locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` - URL *string `json:"url,omitempty"` - CreatedAt *string `json:"created_at,omitempty"` - UpdatedAt *string `json:"updated_at,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -func (m Migration) String() string { - return Stringify(m) -} - -// MigrationOptions specifies the optional parameters to Migration methods. -type MigrationOptions struct { - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories bool - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments bool -} - -// startMigration represents the body of a StartMigration request. -type startMigration struct { - // Repositories is a slice of repository names to migrate. - Repositories []string `json:"repositories,omitempty"` - - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` -} - -// StartMigration starts the generation of a migration archive. -// repos is a slice of repository names to migrate. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#start-an-organization-migration -// -//meta:operation POST /orgs/{org}/migrations -func (s *MigrationService) StartMigration(ctx context.Context, org string, repos []string, opts *MigrationOptions) (*Migration, *Response, error) { - u := fmt.Sprintf("orgs/%v/migrations", org) - - body := &startMigration{Repositories: repos} - if opts != nil { - body.LockRepositories = Bool(opts.LockRepositories) - body.ExcludeAttachments = Bool(opts.ExcludeAttachments) - } - - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &Migration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// ListMigrations lists the most recent migrations. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#list-organization-migrations -// -//meta:operation GET /orgs/{org}/migrations -func (s *MigrationService) ListMigrations(ctx context.Context, org string, opts *ListOptions) ([]*Migration, *Response, error) { - u := fmt.Sprintf("orgs/%v/migrations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - var m []*Migration - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// MigrationStatus gets the status of a specific migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#get-an-organization-migration-status -// -//meta:operation GET /orgs/{org}/migrations/{migration_id} -func (s *MigrationService) MigrationStatus(ctx context.Context, org string, id int64) (*Migration, *Response, error) { - u := fmt.Sprintf("orgs/%v/migrations/%v", org, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &Migration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// MigrationArchiveURL fetches a migration archive URL. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#download-an-organization-migration-archive -// -//meta:operation GET /orgs/{org}/migrations/{migration_id}/archive -func (s *MigrationService) MigrationArchiveURL(ctx context.Context, org string, id int64) (url string, err error) { - u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - s.client.clientMu.Lock() - defer s.client.clientMu.Unlock() - - // Disable the redirect mechanism because AWS fails if the GitHub auth token is provided. - var loc string - saveRedirect := s.client.client.CheckRedirect - s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { - loc = req.URL.String() - return errors.New("disable redirect") - } - defer func() { s.client.client.CheckRedirect = saveRedirect }() - - _, err = s.client.Do(ctx, req, nil) // expect error from disable redirect - if err == nil { - return "", errors.New("expected redirect, none provided") - } - if !strings.Contains(err.Error(), "disable redirect") { - return "", err - } - return loc, nil -} - -// DeleteMigration deletes a previous migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#delete-an-organization-migration-archive -// -//meta:operation DELETE /orgs/{org}/migrations/{migration_id}/archive -func (s *MigrationService) DeleteMigration(ctx context.Context, org string, id int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnlockRepo unlocks a repository that was locked for migration. -// id is the migration ID. -// You should unlock each migrated repository and delete them when the migration -// is complete and you no longer need the source data. -// -// GitHub API docs: https://docs.github.com/rest/migrations/orgs#unlock-an-organization-repository -// -//meta:operation DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock -func (s *MigrationService) UnlockRepo(ctx context.Context, org string, id int64, repo string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/migrations/%v/repos/%v/lock", org, id, repo) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/migrations_source_import.go b/vendor/github.com/google/go-github/v57/github/migrations_source_import.go deleted file mode 100644 index 3b161232..00000000 --- a/vendor/github.com/google/go-github/v57/github/migrations_source_import.go +++ /dev/null @@ -1,321 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Import represents a repository import request. -type Import struct { - // The URL of the originating repository. - VCSURL *string `json:"vcs_url,omitempty"` - // The originating VCS type. Can be one of 'subversion', 'git', - // 'mercurial', or 'tfvc'. Without this parameter, the import job will - // take additional time to detect the VCS type before beginning the - // import. This detection step will be reflected in the response. - VCS *string `json:"vcs,omitempty"` - // VCSUsername and VCSPassword are only used for StartImport calls that - // are importing a password-protected repository. - VCSUsername *string `json:"vcs_username,omitempty"` - VCSPassword *string `json:"vcs_password,omitempty"` - // For a tfvc import, the name of the project that is being imported. - TFVCProject *string `json:"tfvc_project,omitempty"` - - // LFS related fields that may be preset in the Import Progress response - - // Describes whether the import has been opted in or out of using Git - // LFS. The value can be 'opt_in', 'opt_out', or 'undecided' if no - // action has been taken. - UseLFS *string `json:"use_lfs,omitempty"` - // Describes whether files larger than 100MB were found during the - // importing step. - HasLargeFiles *bool `json:"has_large_files,omitempty"` - // The total size in gigabytes of files larger than 100MB found in the - // originating repository. - LargeFilesSize *int `json:"large_files_size,omitempty"` - // The total number of files larger than 100MB found in the originating - // repository. To see a list of these files, call LargeFiles. - LargeFilesCount *int `json:"large_files_count,omitempty"` - - // Identifies the current status of an import. An import that does not - // have errors will progress through these steps: - // - // detecting - the "detection" step of the import is in progress - // because the request did not include a VCS parameter. The - // import is identifying the type of source control present at - // the URL. - // importing - the "raw" step of the import is in progress. This is - // where commit data is fetched from the original repository. - // The import progress response will include CommitCount (the - // total number of raw commits that will be imported) and - // Percent (0 - 100, the current progress through the import). - // mapping - the "rewrite" step of the import is in progress. This - // is where SVN branches are converted to Git branches, and - // where author updates are applied. The import progress - // response does not include progress information. - // pushing - the "push" step of the import is in progress. This is - // where the importer updates the repository on GitHub. The - // import progress response will include PushPercent, which is - // the percent value reported by git push when it is "Writing - // objects". - // complete - the import is complete, and the repository is ready - // on GitHub. - // - // If there are problems, you will see one of these in the status field: - // - // auth_failed - the import requires authentication in order to - // connect to the original repository. Make an UpdateImport - // request, and include VCSUsername and VCSPassword. - // error - the import encountered an error. The import progress - // response will include the FailedStep and an error message. - // Contact GitHub support for more information. - // detection_needs_auth - the importer requires authentication for - // the originating repository to continue detection. Make an - // UpdatImport request, and include VCSUsername and - // VCSPassword. - // detection_found_nothing - the importer didn't recognize any - // source control at the URL. - // detection_found_multiple - the importer found several projects - // or repositories at the provided URL. When this is the case, - // the Import Progress response will also include a - // ProjectChoices field with the possible project choices as - // values. Make an UpdateImport request, and include VCS and - // (if applicable) TFVCProject. - Status *string `json:"status,omitempty"` - CommitCount *int `json:"commit_count,omitempty"` - StatusText *string `json:"status_text,omitempty"` - AuthorsCount *int `json:"authors_count,omitempty"` - Percent *int `json:"percent,omitempty"` - PushPercent *int `json:"push_percent,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - AuthorsURL *string `json:"authors_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - Message *string `json:"message,omitempty"` - FailedStep *string `json:"failed_step,omitempty"` - - // Human readable display name, provided when the Import appears as - // part of ProjectChoices. - HumanName *string `json:"human_name,omitempty"` - - // When the importer finds several projects or repositories at the - // provided URLs, this will identify the available choices. Call - // UpdateImport with the selected Import value. - ProjectChoices []*Import `json:"project_choices,omitempty"` -} - -func (i Import) String() string { - return Stringify(i) -} - -// SourceImportAuthor identifies an author imported from a source repository. -// -// GitHub API docs: https://docs.github.com/rest/migration/source_imports/#get-commit-authors -type SourceImportAuthor struct { - ID *int64 `json:"id,omitempty"` - RemoteID *string `json:"remote_id,omitempty"` - RemoteName *string `json:"remote_name,omitempty"` - Email *string `json:"email,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - ImportURL *string `json:"import_url,omitempty"` -} - -func (a SourceImportAuthor) String() string { - return Stringify(a) -} - -// LargeFile identifies a file larger than 100MB found during a repository import. -// -// GitHub API docs: https://docs.github.com/rest/migration/source_imports/#get-large-files -type LargeFile struct { - RefName *string `json:"ref_name,omitempty"` - Path *string `json:"path,omitempty"` - OID *string `json:"oid,omitempty"` - Size *int `json:"size,omitempty"` -} - -func (f LargeFile) String() string { - return Stringify(f) -} - -// StartImport initiates a repository import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#start-an-import -// -//meta:operation PUT /repos/{owner}/{repo}/import -func (s *MigrationService) StartImport(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("PUT", u, in) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// ImportProgress queries for the status and progress of an ongoing repository import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#get-an-import-status -// -//meta:operation GET /repos/{owner}/{repo}/import -func (s *MigrationService) ImportProgress(ctx context.Context, owner, repo string) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// UpdateImport initiates a repository import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#update-an-import -// -//meta:operation PATCH /repos/{owner}/{repo}/import -func (s *MigrationService) UpdateImport(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("PATCH", u, in) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// CommitAuthors gets the authors mapped from the original repository. -// -// Each type of source control system represents authors in a different way. -// For example, a Git commit author has a display name and an email address, -// but a Subversion commit author just has a username. The GitHub Importer will -// make the author information valid, but the author might not be correct. For -// example, it will change the bare Subversion username "hubot" into something -// like "hubot ". -// -// This method and MapCommitAuthor allow you to provide correct Git author -// information. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#get-commit-authors -// -//meta:operation GET /repos/{owner}/{repo}/import/authors -func (s *MigrationService) CommitAuthors(ctx context.Context, owner, repo string) ([]*SourceImportAuthor, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/authors", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var authors []*SourceImportAuthor - resp, err := s.client.Do(ctx, req, &authors) - if err != nil { - return nil, resp, err - } - - return authors, resp, nil -} - -// MapCommitAuthor updates an author's identity for the import. Your -// application can continue updating authors any time before you push new -// commits to the repository. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#map-a-commit-author -// -//meta:operation PATCH /repos/{owner}/{repo}/import/authors/{author_id} -func (s *MigrationService) MapCommitAuthor(ctx context.Context, owner, repo string, id int64, author *SourceImportAuthor) (*SourceImportAuthor, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/authors/%v", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, author) - if err != nil { - return nil, nil, err - } - - out := new(SourceImportAuthor) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// SetLFSPreference sets whether imported repositories should use Git LFS for -// files larger than 100MB. Only the UseLFS field on the provided Import is -// used. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference -// -//meta:operation PATCH /repos/{owner}/{repo}/import/lfs -func (s *MigrationService) SetLFSPreference(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/lfs", owner, repo) - req, err := s.client.NewRequest("PATCH", u, in) - if err != nil { - return nil, nil, err - } - - out := new(Import) - resp, err := s.client.Do(ctx, req, out) - if err != nil { - return nil, resp, err - } - - return out, resp, nil -} - -// LargeFiles lists files larger than 100MB found during the import. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#get-large-files -// -//meta:operation GET /repos/{owner}/{repo}/import/large_files -func (s *MigrationService) LargeFiles(ctx context.Context, owner, repo string) ([]*LargeFile, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/large_files", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var files []*LargeFile - resp, err := s.client.Do(ctx, req, &files) - if err != nil { - return nil, resp, err - } - - return files, resp, nil -} - -// CancelImport stops an import for a repository. -// -// GitHub API docs: https://docs.github.com/rest/migrations/source-imports#cancel-an-import -// -//meta:operation DELETE /repos/{owner}/{repo}/import -func (s *MigrationService) CancelImport(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/import", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/migrations_user.go b/vendor/github.com/google/go-github/v57/github/migrations_user.go deleted file mode 100644 index 1f907cd4..00000000 --- a/vendor/github.com/google/go-github/v57/github/migrations_user.go +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" - "net/http" -) - -// UserMigration represents a GitHub migration (archival). -type UserMigration struct { - ID *int64 `json:"id,omitempty"` - GUID *string `json:"guid,omitempty"` - // State is the current state of a migration. - // Possible values are: - // "pending" which means the migration hasn't started yet, - // "exporting" which means the migration is in progress, - // "exported" which means the migration finished successfully, or - // "failed" which means the migration failed. - State *string `json:"state,omitempty"` - // LockRepositories indicates whether repositories are locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` - URL *string `json:"url,omitempty"` - CreatedAt *string `json:"created_at,omitempty"` - UpdatedAt *string `json:"updated_at,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -func (m UserMigration) String() string { - return Stringify(m) -} - -// UserMigrationOptions specifies the optional parameters to Migration methods. -type UserMigrationOptions struct { - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories bool - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments bool -} - -// startUserMigration represents the body of a StartMigration request. -type startUserMigration struct { - // Repositories is a slice of repository names to migrate. - Repositories []string `json:"repositories,omitempty"` - - // LockRepositories indicates whether repositories should be locked (to prevent - // manipulation) while migrating data. - LockRepositories *bool `json:"lock_repositories,omitempty"` - - // ExcludeAttachments indicates whether attachments should be excluded from - // the migration (to reduce migration archive file size). - ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` -} - -// StartUserMigration starts the generation of a migration archive. -// repos is a slice of repository names to migrate. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#start-a-user-migration -// -//meta:operation POST /user/migrations -func (s *MigrationService) StartUserMigration(ctx context.Context, repos []string, opts *UserMigrationOptions) (*UserMigration, *Response, error) { - u := "user/migrations" - - body := &startUserMigration{Repositories: repos} - if opts != nil { - body.LockRepositories = Bool(opts.LockRepositories) - body.ExcludeAttachments = Bool(opts.ExcludeAttachments) - } - - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &UserMigration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// ListUserMigrations lists the most recent migrations. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#list-user-migrations -// -//meta:operation GET /user/migrations -func (s *MigrationService) ListUserMigrations(ctx context.Context, opts *ListOptions) ([]*UserMigration, *Response, error) { - u := "user/migrations" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - var m []*UserMigration - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// UserMigrationStatus gets the status of a specific migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#get-a-user-migration-status -// -//meta:operation GET /user/migrations/{migration_id} -func (s *MigrationService) UserMigrationStatus(ctx context.Context, id int64) (*UserMigration, *Response, error) { - u := fmt.Sprintf("user/migrations/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &UserMigration{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// UserMigrationArchiveURL gets the URL for a specific migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#download-a-user-migration-archive -// -//meta:operation GET /user/migrations/{migration_id}/archive -func (s *MigrationService) UserMigrationArchiveURL(ctx context.Context, id int64) (string, error) { - url := fmt.Sprintf("user/migrations/%v/archive", id) - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return "", err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - m := &UserMigration{} - - var loc string - originalRedirect := s.client.client.CheckRedirect - s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { - loc = req.URL.String() - return http.ErrUseLastResponse - } - defer func() { - s.client.client.CheckRedirect = originalRedirect - }() - resp, err := s.client.Do(ctx, req, m) - if err == nil { - return "", errors.New("expected redirect, none provided") - } - loc = resp.Header.Get("Location") - return loc, nil -} - -// DeleteUserMigration will delete a previous migration archive. -// id is the migration ID. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#delete-a-user-migration-archive -// -//meta:operation DELETE /user/migrations/{migration_id}/archive -func (s *MigrationService) DeleteUserMigration(ctx context.Context, id int64) (*Response, error) { - url := fmt.Sprintf("user/migrations/%v/archive", id) - - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnlockUserRepo will unlock a repo that was locked for migration. -// id is migration ID. -// You should unlock each migrated repository and delete them when the migration -// is complete and you no longer need the source data. -// -// GitHub API docs: https://docs.github.com/rest/migrations/users#unlock-a-user-repository -// -//meta:operation DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock -func (s *MigrationService) UnlockUserRepo(ctx context.Context, id int64, repo string) (*Response, error) { - url := fmt.Sprintf("user/migrations/%v/repos/%v/lock", id, repo) - - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMigrationsPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs.go b/vendor/github.com/google/go-github/v57/github/orgs.go deleted file mode 100644 index 4d346527..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs.go +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrganizationsService provides access to the organization related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/orgs/ -type OrganizationsService service - -// Organization represents a GitHub organization account. -type Organization struct { - Login *string `json:"login,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Name *string `json:"name,omitempty"` - Company *string `json:"company,omitempty"` - Blog *string `json:"blog,omitempty"` - Location *string `json:"location,omitempty"` - Email *string `json:"email,omitempty"` - TwitterUsername *string `json:"twitter_username,omitempty"` - Description *string `json:"description,omitempty"` - PublicRepos *int `json:"public_repos,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` - Followers *int `json:"followers,omitempty"` - Following *int `json:"following,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - TotalPrivateRepos *int64 `json:"total_private_repos,omitempty"` - OwnedPrivateRepos *int64 `json:"owned_private_repos,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - DiskUsage *int `json:"disk_usage,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - BillingEmail *string `json:"billing_email,omitempty"` - Type *string `json:"type,omitempty"` - Plan *Plan `json:"plan,omitempty"` - TwoFactorRequirementEnabled *bool `json:"two_factor_requirement_enabled,omitempty"` - IsVerified *bool `json:"is_verified,omitempty"` - HasOrganizationProjects *bool `json:"has_organization_projects,omitempty"` - HasRepositoryProjects *bool `json:"has_repository_projects,omitempty"` - - // DefaultRepoPermission can be one of: "read", "write", "admin", or "none". (Default: "read"). - // It is only used in OrganizationsService.Edit. - DefaultRepoPermission *string `json:"default_repository_permission,omitempty"` - // DefaultRepoSettings can be one of: "read", "write", "admin", or "none". (Default: "read"). - // It is only used in OrganizationsService.Get. - DefaultRepoSettings *string `json:"default_repository_settings,omitempty"` - - // MembersCanCreateRepos default value is true and is only used in Organizations.Edit. - MembersCanCreateRepos *bool `json:"members_can_create_repositories,omitempty"` - - // https://developer.github.com/changes/2019-12-03-internal-visibility-changes/#rest-v3-api - MembersCanCreatePublicRepos *bool `json:"members_can_create_public_repositories,omitempty"` - MembersCanCreatePrivateRepos *bool `json:"members_can_create_private_repositories,omitempty"` - MembersCanCreateInternalRepos *bool `json:"members_can_create_internal_repositories,omitempty"` - - // MembersCanForkPrivateRepos toggles whether organization members can fork private organization repositories. - MembersCanForkPrivateRepos *bool `json:"members_can_fork_private_repositories,omitempty"` - - // MembersAllowedRepositoryCreationType denotes if organization members can create repositories - // and the type of repositories they can create. Possible values are: "all", "private", or "none". - // - // Deprecated: Use MembersCanCreatePublicRepos, MembersCanCreatePrivateRepos, MembersCanCreateInternalRepos - // instead. The new fields overrides the existing MembersAllowedRepositoryCreationType during 'edit' - // operation and does not consider 'internal' repositories during 'get' operation - MembersAllowedRepositoryCreationType *string `json:"members_allowed_repository_creation_type,omitempty"` - - // MembersCanCreatePages toggles whether organization members can create GitHub Pages sites. - MembersCanCreatePages *bool `json:"members_can_create_pages,omitempty"` - // MembersCanCreatePublicPages toggles whether organization members can create public GitHub Pages sites. - MembersCanCreatePublicPages *bool `json:"members_can_create_public_pages,omitempty"` - // MembersCanCreatePrivatePages toggles whether organization members can create private GitHub Pages sites. - MembersCanCreatePrivatePages *bool `json:"members_can_create_private_pages,omitempty"` - // WebCommitSignoffRequire toggles - WebCommitSignoffRequired *bool `json:"web_commit_signoff_required,omitempty"` - // AdvancedSecurityAuditLogEnabled toggles whether the advanced security audit log is enabled. - AdvancedSecurityEnabledForNewRepos *bool `json:"advanced_security_enabled_for_new_repositories,omitempty"` - // DependabotAlertsEnabled toggles whether dependabot alerts are enabled. - DependabotAlertsEnabledForNewRepos *bool `json:"dependabot_alerts_enabled_for_new_repositories,omitempty"` - // DependabotSecurityUpdatesEnabled toggles whether dependabot security updates are enabled. - DependabotSecurityUpdatesEnabledForNewRepos *bool `json:"dependabot_security_updates_enabled_for_new_repositories,omitempty"` - // DependabotGraphEnabledForNewRepos toggles whether dependabot graph is enabled on new repositories. - DependencyGraphEnabledForNewRepos *bool `json:"dependency_graph_enabled_for_new_repositories,omitempty"` - // SecretScanningEnabled toggles whether secret scanning is enabled on new repositories. - SecretScanningEnabledForNewRepos *bool `json:"secret_scanning_enabled_for_new_repositories,omitempty"` - // SecretScanningPushProtectionEnabledForNewRepos toggles whether secret scanning push protection is enabled on new repositories. - SecretScanningPushProtectionEnabledForNewRepos *bool `json:"secret_scanning_push_protection_enabled_for_new_repositories,omitempty"` - - // API URLs - URL *string `json:"url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - HooksURL *string `json:"hooks_url,omitempty"` - IssuesURL *string `json:"issues_url,omitempty"` - MembersURL *string `json:"members_url,omitempty"` - PublicMembersURL *string `json:"public_members_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` -} - -// OrganizationInstallations represents GitHub app installations for an organization. -type OrganizationInstallations struct { - TotalCount *int `json:"total_count,omitempty"` - Installations []*Installation `json:"installations,omitempty"` -} - -func (o Organization) String() string { - return Stringify(o) -} - -// Plan represents the payment plan for an account. See plans at https://github.com/plans. -type Plan struct { - Name *string `json:"name,omitempty"` - Space *int `json:"space,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - PrivateRepos *int64 `json:"private_repos,omitempty"` - FilledSeats *int `json:"filled_seats,omitempty"` - Seats *int `json:"seats,omitempty"` -} - -func (p Plan) String() string { - return Stringify(p) -} - -// OrganizationsListOptions specifies the optional parameters to the -// OrganizationsService.ListAll method. -type OrganizationsListOptions struct { - // Since filters Organizations by ID. - Since int64 `url:"since,omitempty"` - - // Note: Pagination is powered exclusively by the Since parameter, - // ListOptions.Page has no effect. - // ListOptions.PerPage controls an undocumented GitHub API parameter. - ListOptions -} - -// ListAll lists all organizations, in the order that they were created on GitHub. -// -// Note: Pagination is powered exclusively by the since parameter. To continue -// listing the next set of organizations, use the ID of the last-returned organization -// as the opts.Since parameter for the next call. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-organizations -// -//meta:operation GET /organizations -func (s *OrganizationsService) ListAll(ctx context.Context, opts *OrganizationsListOptions) ([]*Organization, *Response, error) { - u, err := addOptions("organizations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - orgs := []*Organization{} - resp, err := s.client.Do(ctx, req, &orgs) - if err != nil { - return nil, resp, err - } - return orgs, resp, nil -} - -// List the organizations for a user. Passing the empty string will list -// organizations for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-organizations-for-a-user -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-organizations-for-the-authenticated-user -// -//meta:operation GET /user/orgs -//meta:operation GET /users/{username}/orgs -func (s *OrganizationsService) List(ctx context.Context, user string, opts *ListOptions) ([]*Organization, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/orgs", user) - } else { - u = "user/orgs" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var orgs []*Organization - resp, err := s.client.Do(ctx, req, &orgs) - if err != nil { - return nil, resp, err - } - - return orgs, resp, nil -} - -// Get fetches an organization by name. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#get-an-organization -// -//meta:operation GET /orgs/{org} -func (s *OrganizationsService) Get(ctx context.Context, org string) (*Organization, *Response, error) { - u := fmt.Sprintf("orgs/%v", org) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMemberAllowedRepoCreationTypePreview) - - organization := new(Organization) - resp, err := s.client.Do(ctx, req, organization) - if err != nil { - return nil, resp, err - } - - return organization, resp, nil -} - -// GetByID fetches an organization. -// -// Note: GetByID uses the undocumented GitHub API endpoint "GET /organizations/{organization_id}". -// -//meta:operation GET /organizations/{organization_id} -func (s *OrganizationsService) GetByID(ctx context.Context, id int64) (*Organization, *Response, error) { - u := fmt.Sprintf("organizations/%d", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - organization := new(Organization) - resp, err := s.client.Do(ctx, req, organization) - if err != nil { - return nil, resp, err - } - - return organization, resp, nil -} - -// Edit an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#update-an-organization -// -//meta:operation PATCH /orgs/{org} -func (s *OrganizationsService) Edit(ctx context.Context, name string, org *Organization) (*Organization, *Response, error) { - u := fmt.Sprintf("orgs/%v", name) - req, err := s.client.NewRequest("PATCH", u, org) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeMemberAllowedRepoCreationTypePreview) - - o := new(Organization) - resp, err := s.client.Do(ctx, req, o) - if err != nil { - return nil, resp, err - } - - return o, resp, nil -} - -// Delete an organization by name. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#delete-an-organization -// -//meta:operation DELETE /orgs/{org} -func (s *OrganizationsService) Delete(ctx context.Context, org string) (*Response, error) { - u := fmt.Sprintf("orgs/%v", org) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListInstallations lists installations for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/orgs#list-app-installations-for-an-organization -// -//meta:operation GET /orgs/{org}/installations -func (s *OrganizationsService) ListInstallations(ctx context.Context, org string, opts *ListOptions) (*OrganizationInstallations, *Response, error) { - u := fmt.Sprintf("orgs/%v/installations", org) - - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(OrganizationInstallations) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_audit_log.go b/vendor/github.com/google/go-github/v57/github/orgs_audit_log.go deleted file mode 100644 index e3afd311..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_audit_log.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetAuditLogOptions sets up optional parameters to query audit-log endpoint. -type GetAuditLogOptions struct { - Phrase *string `url:"phrase,omitempty"` // A search phrase. (Optional.) - Include *string `url:"include,omitempty"` // Event type includes. Can be one of "web", "git", "all". Default: "web". (Optional.) - Order *string `url:"order,omitempty"` // The order of audit log events. Can be one of "asc" or "desc". Default: "desc". (Optional.) - - ListCursorOptions -} - -// HookConfig describes metadata about a webhook configuration. -type HookConfig struct { - ContentType *string `json:"content_type,omitempty"` - InsecureSSL *string `json:"insecure_ssl,omitempty"` - URL *string `json:"url,omitempty"` - - // Secret is returned obfuscated by GitHub, but it can be set for outgoing requests. - Secret *string `json:"secret,omitempty"` -} - -// ActorLocation contains information about reported location for an actor. -type ActorLocation struct { - CountryCode *string `json:"country_code,omitempty"` -} - -// PolicyOverrideReason contains user-supplied information about why a policy was overridden. -type PolicyOverrideReason struct { - Code *string `json:"code,omitempty"` - Message *string `json:"message,omitempty"` -} - -// AuditEntry describes the fields that may be represented by various audit-log "action" entries. -// For a list of actions see - https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#audit-log-actions -type AuditEntry struct { - ActorIP *string `json:"actor_ip,omitempty"` - Action *string `json:"action,omitempty"` // The name of the action that was performed, for example `user.login` or `repo.create`. - Active *bool `json:"active,omitempty"` - ActiveWas *bool `json:"active_was,omitempty"` - Actor *string `json:"actor,omitempty"` // The actor who performed the action. - ActorLocation *ActorLocation `json:"actor_location,omitempty"` - BlockedUser *string `json:"blocked_user,omitempty"` - Business *string `json:"business,omitempty"` - CancelledAt *Timestamp `json:"cancelled_at,omitempty"` - CompletedAt *Timestamp `json:"completed_at,omitempty"` - Conclusion *string `json:"conclusion,omitempty"` - Config *HookConfig `json:"config,omitempty"` - ConfigWas *HookConfig `json:"config_was,omitempty"` - ContentType *string `json:"content_type,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - DeployKeyFingerprint *string `json:"deploy_key_fingerprint,omitempty"` - DocumentID *string `json:"_document_id,omitempty"` - Emoji *string `json:"emoji,omitempty"` - EnvironmentName *string `json:"environment_name,omitempty"` - Event *string `json:"event,omitempty"` - Events []string `json:"events,omitempty"` - EventsWere []string `json:"events_were,omitempty"` - Explanation *string `json:"explanation,omitempty"` - ExternalIdentityNameID *string `json:"external_identity_nameid,omitempty"` - ExternalIdentityUsername *string `json:"external_identity_username,omitempty"` - Fingerprint *string `json:"fingerprint,omitempty"` - HashedToken *string `json:"hashed_token,omitempty"` - HeadBranch *string `json:"head_branch,omitempty"` - HeadSHA *string `json:"head_sha,omitempty"` - HookID *int64 `json:"hook_id,omitempty"` - IsHostedRunner *bool `json:"is_hosted_runner,omitempty"` - JobName *string `json:"job_name,omitempty"` - JobWorkflowRef *string `json:"job_workflow_ref,omitempty"` - LimitedAvailability *bool `json:"limited_availability,omitempty"` - Message *string `json:"message,omitempty"` - Name *string `json:"name,omitempty"` - OAuthApplicationID *int64 `json:"oauth_application_id,omitempty"` - OldUser *string `json:"old_user,omitempty"` - OldPermission *string `json:"old_permission,omitempty"` // The permission level for membership changes, for example `admin` or `read`. - OpenSSHPublicKey *string `json:"openssh_public_key,omitempty"` - OperationType *string `json:"operation_type,omitempty"` - Org *string `json:"org,omitempty"` - OrgID *int64 `json:"org_id,omitempty"` - OverriddenCodes []string `json:"overridden_codes,omitempty"` - Permission *string `json:"permission,omitempty"` // The permission level for membership changes, for example `admin` or `read`. - PreviousVisibility *string `json:"previous_visibility,omitempty"` - ProgrammaticAccessType *string `json:"programmatic_access_type,omitempty"` - PullRequestID *int64 `json:"pull_request_id,omitempty"` - PullRequestTitle *string `json:"pull_request_title,omitempty"` - PullRequestURL *string `json:"pull_request_url,omitempty"` - ReadOnly *string `json:"read_only,omitempty"` - Reasons []*PolicyOverrideReason `json:"reasons,omitempty"` - Repo *string `json:"repo,omitempty"` - Repository *string `json:"repository,omitempty"` - RepositoryPublic *bool `json:"repository_public,omitempty"` - RunAttempt *int64 `json:"run_attempt,omitempty"` - RunnerGroupID *int64 `json:"runner_group_id,omitempty"` - RunnerGroupName *string `json:"runner_group_name,omitempty"` - RunnerID *int64 `json:"runner_id,omitempty"` - RunnerLabels []string `json:"runner_labels,omitempty"` - RunnerName *string `json:"runner_name,omitempty"` - RunNumber *int64 `json:"run_number,omitempty"` - SecretsPassed []string `json:"secrets_passed,omitempty"` - SourceVersion *string `json:"source_version,omitempty"` - StartedAt *Timestamp `json:"started_at,omitempty"` - TargetLogin *string `json:"target_login,omitempty"` - TargetVersion *string `json:"target_version,omitempty"` - Team *string `json:"team,omitempty"` - Timestamp *Timestamp `json:"@timestamp,omitempty"` // The time the audit log event occurred, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). - TokenID *int64 `json:"token_id,omitempty"` - TokenScopes *string `json:"token_scopes,omitempty"` - Topic *string `json:"topic,omitempty"` - TransportProtocolName *string `json:"transport_protocol_name,omitempty"` // A human readable name for the protocol (for example, HTTP or SSH) used to transfer Git data. - TransportProtocol *int `json:"transport_protocol,omitempty"` // The type of protocol (for example, HTTP=1 or SSH=2) used to transfer Git data. - TriggerID *int64 `json:"trigger_id,omitempty"` - User *string `json:"user,omitempty"` // The user that was affected by the action performed (if available). - UserAgent *string `json:"user_agent,omitempty"` - Visibility *string `json:"visibility,omitempty"` // The repository visibility, for example `public` or `private`. - WorkflowID *int64 `json:"workflow_id,omitempty"` - WorkflowRunID *int64 `json:"workflow_run_id,omitempty"` - - Data *AuditEntryData `json:"data,omitempty"` -} - -// AuditEntryData represents additional information stuffed into a `data` field. -type AuditEntryData struct { - OldName *string `json:"old_name,omitempty"` // The previous name of the repository, for a name change - OldLogin *string `json:"old_login,omitempty"` // The previous name of the organization, for a name change -} - -// GetAuditLog gets the audit-log entries for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/orgs#get-the-audit-log-for-an-organization -// -//meta:operation GET /orgs/{org}/audit-log -func (s *OrganizationsService) GetAuditLog(ctx context.Context, org string, opts *GetAuditLogOptions) ([]*AuditEntry, *Response, error) { - u := fmt.Sprintf("orgs/%v/audit-log", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var auditEntries []*AuditEntry - resp, err := s.client.Do(ctx, req, &auditEntries) - if err != nil { - return nil, resp, err - } - - return auditEntries, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go b/vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go deleted file mode 100644 index eed0f0c6..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_credential_authorizations.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" -) - -// CredentialAuthorization represents a credential authorized through SAML SSO. -type CredentialAuthorization struct { - // User login that owns the underlying credential. - Login *string `json:"login,omitempty"` - - // Unique identifier for the credential. - CredentialID *int64 `json:"credential_id,omitempty"` - - // Human-readable description of the credential type. - CredentialType *string `json:"credential_type,omitempty"` - - // Last eight characters of the credential. - // Only included in responses with credential_type of personal access token. - TokenLastEight *string `json:"token_last_eight,omitempty"` - - // Date when the credential was authorized for use. - CredentialAuthorizedAt *Timestamp `json:"credential_authorized_at,omitempty"` - - // Date when the credential was last accessed. - // May be null if it was never accessed. - CredentialAccessedAt *Timestamp `json:"credential_accessed_at,omitempty"` - - // List of oauth scopes the token has been granted. - Scopes []string `json:"scopes,omitempty"` - - // Unique string to distinguish the credential. - // Only included in responses with credential_type of SSH Key. - Fingerprint *string `json:"fingerprint,omitempty"` - - AuthorizedCredentialID *int64 `json:"authorized_credential_id,omitempty"` - - // The title given to the ssh key. - // This will only be present when the credential is an ssh key. - AuthorizedCredentialTitle *string `json:"authorized_credential_title,omitempty"` - - // The note given to the token. - // This will only be present when the credential is a token. - AuthorizedCredentialNote *string `json:"authorized_credential_note,omitempty"` - - // The expiry for the token. - // This will only be present when the credential is a token. - AuthorizedCredentialExpiresAt *Timestamp `json:"authorized_credential_expires_at,omitempty"` -} - -// ListCredentialAuthorizations lists credentials authorized through SAML SSO -// for a given organization. Only available with GitHub Enterprise Cloud. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/orgs#list-saml-sso-authorizations-for-an-organization -// -//meta:operation GET /orgs/{org}/credential-authorizations -func (s *OrganizationsService) ListCredentialAuthorizations(ctx context.Context, org string, opts *ListOptions) ([]*CredentialAuthorization, *Response, error) { - u := fmt.Sprintf("orgs/%v/credential-authorizations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest(http.MethodGet, u, nil) - if err != nil { - return nil, nil, err - } - - var creds []*CredentialAuthorization - resp, err := s.client.Do(ctx, req, &creds) - if err != nil { - return nil, resp, err - } - - return creds, resp, nil -} - -// RemoveCredentialAuthorization revokes the SAML SSO authorization for a given -// credential within an organization. Only available with GitHub Enterprise Cloud. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/orgs#remove-a-saml-sso-authorization-for-an-organization -// -//meta:operation DELETE /orgs/{org}/credential-authorizations/{credential_id} -func (s *OrganizationsService) RemoveCredentialAuthorization(ctx context.Context, org string, credentialID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/credential-authorizations/%v", org, credentialID) - req, err := s.client.NewRequest(http.MethodDelete, u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go b/vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go deleted file mode 100644 index 45de896a..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_custom_roles.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrganizationCustomRepoRoles represents custom repository roles available in specified organization. -type OrganizationCustomRepoRoles struct { - TotalCount *int `json:"total_count,omitempty"` - CustomRepoRoles []*CustomRepoRoles `json:"custom_roles,omitempty"` -} - -// CustomRepoRoles represents custom repository roles for an organization. -// See https://docs.github.com/enterprise-cloud@latest/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization -// for more information. -type CustomRepoRoles struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - BaseRole *string `json:"base_role,omitempty"` - Permissions []string `json:"permissions,omitempty"` -} - -// ListCustomRepoRoles lists the custom repository roles available in this organization. -// In order to see custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#list-custom-repository-roles-in-an-organization -// -//meta:operation GET /orgs/{org}/custom-repository-roles -func (s *OrganizationsService) ListCustomRepoRoles(ctx context.Context, org string) (*OrganizationCustomRepoRoles, *Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - customRepoRoles := new(OrganizationCustomRepoRoles) - resp, err := s.client.Do(ctx, req, customRepoRoles) - if err != nil { - return nil, resp, err - } - - return customRepoRoles, resp, nil -} - -// CreateOrUpdateCustomRoleOptions represents options required to create or update a custom repository role. -type CreateOrUpdateCustomRoleOptions struct { - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - BaseRole *string `json:"base_role,omitempty"` - Permissions []string `json:"permissions,omitempty"` -} - -// CreateCustomRepoRole creates a custom repository role in this organization. -// In order to create custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#create-a-custom-repository-role -// -//meta:operation POST /orgs/{org}/custom-repository-roles -func (s *OrganizationsService) CreateCustomRepoRole(ctx context.Context, org string, opts *CreateOrUpdateCustomRoleOptions) (*CustomRepoRoles, *Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles", org) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - resultingRole := new(CustomRepoRoles) - resp, err := s.client.Do(ctx, req, resultingRole) - if err != nil { - return nil, resp, err - } - - return resultingRole, resp, err -} - -// UpdateCustomRepoRole updates a custom repository role in this organization. -// In order to update custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#update-a-custom-repository-role -// -//meta:operation PATCH /orgs/{org}/custom-repository-roles/{role_id} -func (s *OrganizationsService) UpdateCustomRepoRole(ctx context.Context, org, roleID string, opts *CreateOrUpdateCustomRoleOptions) (*CustomRepoRoles, *Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles/%v", org, roleID) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - resultingRole := new(CustomRepoRoles) - resp, err := s.client.Do(ctx, req, resultingRole) - if err != nil { - return nil, resp, err - } - - return resultingRole, resp, err -} - -// DeleteCustomRepoRole deletes an existing custom repository role in this organization. -// In order to delete custom repository roles in an organization, the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/orgs/custom-roles#delete-a-custom-repository-role -// -//meta:operation DELETE /orgs/{org}/custom-repository-roles/{role_id} -func (s *OrganizationsService) DeleteCustomRepoRole(ctx context.Context, org, roleID string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/custom-repository-roles/%v", org, roleID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resultingRole := new(CustomRepoRoles) - resp, err := s.client.Do(ctx, req, resultingRole) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_members.go b/vendor/github.com/google/go-github/v57/github/orgs_members.go deleted file mode 100644 index 5bc23657..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_members.go +++ /dev/null @@ -1,422 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Membership represents the status of a user's membership in an organization or team. -type Membership struct { - URL *string `json:"url,omitempty"` - - // State is the user's status within the organization or team. - // Possible values are: "active", "pending" - State *string `json:"state,omitempty"` - - // Role identifies the user's role within the organization or team. - // Possible values for organization membership: - // member - non-owner organization member - // admin - organization owner - // - // Possible values for team membership are: - // member - a normal member of the team - // maintainer - a team maintainer. Able to add/remove other team - // members, promote other team members to team - // maintainer, and edit the team’s name and description - Role *string `json:"role,omitempty"` - - // For organization membership, the API URL of the organization. - OrganizationURL *string `json:"organization_url,omitempty"` - - // For organization membership, the organization the membership is for. - Organization *Organization `json:"organization,omitempty"` - - // For organization membership, the user the membership is for. - User *User `json:"user,omitempty"` -} - -func (m Membership) String() string { - return Stringify(m) -} - -// ListMembersOptions specifies optional parameters to the -// OrganizationsService.ListMembers method. -type ListMembersOptions struct { - // If true (or if the authenticated user is not an owner of the - // organization), list only publicly visible members. - PublicOnly bool `url:"-"` - - // Filter members returned in the list. Possible values are: - // 2fa_disabled, all. Default is "all". - Filter string `url:"filter,omitempty"` - - // Role filters members returned by their role in the organization. - // Possible values are: - // all - all members of the organization, regardless of role - // admin - organization owners - // member - non-owner organization members - // - // Default is "all". - Role string `url:"role,omitempty"` - - ListOptions -} - -// ListMembers lists the members for an organization. If the authenticated -// user is an owner of the organization, this will return both concealed and -// public members, otherwise it will only return public members. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-organization-members -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-public-organization-members -// -//meta:operation GET /orgs/{org}/members -//meta:operation GET /orgs/{org}/public_members -func (s *OrganizationsService) ListMembers(ctx context.Context, org string, opts *ListMembersOptions) ([]*User, *Response, error) { - var u string - if opts != nil && opts.PublicOnly { - u = fmt.Sprintf("orgs/%v/public_members", org) - } else { - u = fmt.Sprintf("orgs/%v/members", org) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// IsMember checks if a user is a member of an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#check-organization-membership-for-a-user -// -//meta:operation GET /orgs/{org}/members/{username} -func (s *OrganizationsService) IsMember(ctx context.Context, org, user string) (bool, *Response, error) { - u := fmt.Sprintf("orgs/%v/members/%v", org, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - member, err := parseBoolResponse(err) - return member, resp, err -} - -// IsPublicMember checks if a user is a public member of an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#check-public-organization-membership-for-a-user -// -//meta:operation GET /orgs/{org}/public_members/{username} -func (s *OrganizationsService) IsPublicMember(ctx context.Context, org, user string) (bool, *Response, error) { - u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - member, err := parseBoolResponse(err) - return member, resp, err -} - -// RemoveMember removes a user from all teams of an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#remove-an-organization-member -// -//meta:operation DELETE /orgs/{org}/members/{username} -func (s *OrganizationsService) RemoveMember(ctx context.Context, org, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/members/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PublicizeMembership publicizes a user's membership in an organization. (A -// user cannot publicize the membership for another user.) -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#set-public-organization-membership-for-the-authenticated-user -// -//meta:operation PUT /orgs/{org}/public_members/{username} -func (s *OrganizationsService) PublicizeMembership(ctx context.Context, org, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ConcealMembership conceals a user's membership in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#remove-public-organization-membership-for-the-authenticated-user -// -//meta:operation DELETE /orgs/{org}/public_members/{username} -func (s *OrganizationsService) ConcealMembership(ctx context.Context, org, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListOrgMembershipsOptions specifies optional parameters to the -// OrganizationsService.ListOrgMemberships method. -type ListOrgMembershipsOptions struct { - // Filter memberships to include only those with the specified state. - // Possible values are: "active", "pending". - State string `url:"state,omitempty"` - - ListOptions -} - -// ListOrgMemberships lists the organization memberships for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-organization-memberships-for-the-authenticated-user -// -//meta:operation GET /user/memberships/orgs -func (s *OrganizationsService) ListOrgMemberships(ctx context.Context, opts *ListOrgMembershipsOptions) ([]*Membership, *Response, error) { - u := "user/memberships/orgs" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var memberships []*Membership - resp, err := s.client.Do(ctx, req, &memberships) - if err != nil { - return nil, resp, err - } - - return memberships, resp, nil -} - -// GetOrgMembership gets the membership for a user in a specified organization. -// Passing an empty string for user will get the membership for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#get-an-organization-membership-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/orgs/members#get-organization-membership-for-a-user -// -//meta:operation GET /orgs/{org}/memberships/{username} -//meta:operation GET /user/memberships/orgs/{org} -func (s *OrganizationsService) GetOrgMembership(ctx context.Context, user, org string) (*Membership, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("orgs/%v/memberships/%v", org, user) - } else { - u = fmt.Sprintf("user/memberships/orgs/%v", org) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - membership := new(Membership) - resp, err := s.client.Do(ctx, req, membership) - if err != nil { - return nil, resp, err - } - - return membership, resp, nil -} - -// EditOrgMembership edits the membership for user in specified organization. -// Passing an empty string for user will edit the membership for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#set-organization-membership-for-a-user -// GitHub API docs: https://docs.github.com/rest/orgs/members#update-an-organization-membership-for-the-authenticated-user -// -//meta:operation PUT /orgs/{org}/memberships/{username} -//meta:operation PATCH /user/memberships/orgs/{org} -func (s *OrganizationsService) EditOrgMembership(ctx context.Context, user, org string, membership *Membership) (*Membership, *Response, error) { - var u, method string - if user != "" { - u = fmt.Sprintf("orgs/%v/memberships/%v", org, user) - method = "PUT" - } else { - u = fmt.Sprintf("user/memberships/orgs/%v", org) - method = "PATCH" - } - - req, err := s.client.NewRequest(method, u, membership) - if err != nil { - return nil, nil, err - } - - m := new(Membership) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// RemoveOrgMembership removes user from the specified organization. If the -// user has been invited to the organization, this will cancel their invitation. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#remove-organization-membership-for-a-user -// -//meta:operation DELETE /orgs/{org}/memberships/{username} -func (s *OrganizationsService) RemoveOrgMembership(ctx context.Context, user, org string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/memberships/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListPendingOrgInvitations returns a list of pending invitations. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-pending-organization-invitations -// -//meta:operation GET /orgs/{org}/invitations -func (s *OrganizationsService) ListPendingOrgInvitations(ctx context.Context, org string, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/invitations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} - -// CreateOrgInvitationOptions specifies the parameters to the OrganizationService.Invite -// method. -type CreateOrgInvitationOptions struct { - // GitHub user ID for the person you are inviting. Not required if you provide Email. - InviteeID *int64 `json:"invitee_id,omitempty"` - // Email address of the person you are inviting, which can be an existing GitHub user. - // Not required if you provide InviteeID - Email *string `json:"email,omitempty"` - // Specify role for new member. Can be one of: - // * admin - Organization owners with full administrative rights to the - // organization and complete access to all repositories and teams. - // * direct_member - Non-owner organization members with ability to see - // other members and join teams by invitation. - // * billing_manager - Non-owner organization members with ability to - // manage the billing settings of your organization. - // Default is "direct_member". - Role *string `json:"role,omitempty"` - TeamID []int64 `json:"team_ids,omitempty"` -} - -// CreateOrgInvitation invites people to an organization by using their GitHub user ID or their email address. -// In order to create invitations in an organization, -// the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#create-an-organization-invitation -// -//meta:operation POST /orgs/{org}/invitations -func (s *OrganizationsService) CreateOrgInvitation(ctx context.Context, org string, opts *CreateOrgInvitationOptions) (*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/invitations", org) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - var invitation *Invitation - resp, err := s.client.Do(ctx, req, &invitation) - if err != nil { - return nil, resp, err - } - - return invitation, resp, nil -} - -// ListOrgInvitationTeams lists all teams associated with an invitation. In order to see invitations in an organization, -// the authenticated user must be an organization owner. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-organization-invitation-teams -// -//meta:operation GET /orgs/{org}/invitations/{invitation_id}/teams -func (s *OrganizationsService) ListOrgInvitationTeams(ctx context.Context, org, invitationID string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/invitations/%v/teams", org, invitationID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var orgInvitationTeams []*Team - resp, err := s.client.Do(ctx, req, &orgInvitationTeams) - if err != nil { - return nil, resp, err - } - - return orgInvitationTeams, resp, nil -} - -// ListFailedOrgInvitations returns a list of failed inviatations. -// -// GitHub API docs: https://docs.github.com/rest/orgs/members#list-failed-organization-invitations -// -//meta:operation GET /orgs/{org}/failed_invitations -func (s *OrganizationsService) ListFailedOrgInvitations(ctx context.Context, org string, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/failed_invitations", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var failedInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &failedInvitations) - if err != nil { - return nil, resp, err - } - - return failedInvitations, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_packages.go b/vendor/github.com/google/go-github/v57/github/orgs_packages.go deleted file mode 100644 index 4fb9a63b..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_packages.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListPackages lists the packages for an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-packages-for-an-organization -// -//meta:operation GET /orgs/{org}/packages -func (s *OrganizationsService) ListPackages(ctx context.Context, org string, opts *PackageListOptions) ([]*Package, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var packages []*Package - resp, err := s.client.Do(ctx, req, &packages) - if err != nil { - return nil, resp, err - } - - return packages, resp, nil -} - -// GetPackage gets a package by name from an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-for-an-organization -// -//meta:operation GET /orgs/{org}/packages/{package_type}/{package_name} -func (s *OrganizationsService) GetPackage(ctx context.Context, org, packageType, packageName string) (*Package, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v", org, packageType, packageName) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pack *Package - resp, err := s.client.Do(ctx, req, &pack) - if err != nil { - return nil, resp, err - } - - return pack, resp, nil -} - -// DeletePackage deletes a package from an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-for-an-organization -// -//meta:operation DELETE /orgs/{org}/packages/{package_type}/{package_name} -func (s *OrganizationsService) DeletePackage(ctx context.Context, org, packageType, packageName string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v", org, packageType, packageName) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RestorePackage restores a package to an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-for-an-organization -// -//meta:operation POST /orgs/{org}/packages/{package_type}/{package_name}/restore -func (s *OrganizationsService) RestorePackage(ctx context.Context, org, packageType, packageName string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/restore", org, packageType, packageName) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageGetAllVersions gets all versions of a package in an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-package-versions-for-a-package-owned-by-an-organization -// -//meta:operation GET /orgs/{org}/packages/{package_type}/{package_name}/versions -func (s *OrganizationsService) PackageGetAllVersions(ctx context.Context, org, packageType, packageName string, opts *PackageListOptions) ([]*PackageVersion, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions", org, packageType, packageName) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var versions []*PackageVersion - resp, err := s.client.Do(ctx, req, &versions) - if err != nil { - return nil, resp, err - } - - return versions, resp, nil -} - -// PackageGetVersion gets a specific version of a package in an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-version-for-an-organization -// -//meta:operation GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *OrganizationsService) PackageGetVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*PackageVersion, *Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v", org, packageType, packageName, packageVersionID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var version *PackageVersion - resp, err := s.client.Do(ctx, req, &version) - if err != nil { - return nil, resp, err - } - - return version, resp, nil -} - -// PackageDeleteVersion deletes a package version from an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-package-version-for-an-organization -// -//meta:operation DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *OrganizationsService) PackageDeleteVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v", org, packageType, packageName, packageVersionID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageRestoreVersion restores a package version to an organization. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-package-version-for-an-organization -// -//meta:operation POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore -func (s *OrganizationsService) PackageRestoreVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v/restore", org, packageType, packageName, packageVersionID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go b/vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go deleted file mode 100644 index 0d786114..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_personal_access_tokens.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" -) - -// ReviewPersonalAccessTokenRequestOptions specifies the parameters to the ReviewPersonalAccessTokenRequest method. -type ReviewPersonalAccessTokenRequestOptions struct { - Action string `json:"action"` - Reason *string `json:"reason,omitempty"` -} - -// ReviewPersonalAccessTokenRequest approves or denies a pending request to access organization resources via a fine-grained personal access token. -// Only GitHub Apps can call this API, using the `organization_personal_access_token_requests: write` permission. -// `action` can be one of `approve` or `deny`. -// -// GitHub API docs: https://docs.github.com/rest/orgs/personal-access-tokens#review-a-request-to-access-organization-resources-with-a-fine-grained-personal-access-token -// -//meta:operation POST /orgs/{org}/personal-access-token-requests/{pat_request_id} -func (s *OrganizationsService) ReviewPersonalAccessTokenRequest(ctx context.Context, org string, requestID int64, opts ReviewPersonalAccessTokenRequestOptions) (*Response, error) { - u := fmt.Sprintf("orgs/%v/personal-access-token-requests/%v", org, requestID) - - req, err := s.client.NewRequest(http.MethodPost, u, &opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_projects.go b/vendor/github.com/google/go-github/v57/github/orgs_projects.go deleted file mode 100644 index 454d8cf1..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_projects.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListProjects lists the projects for an organization. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#list-organization-projects -// -//meta:operation GET /orgs/{org}/projects -func (s *OrganizationsService) ListProjects(ctx context.Context, org string, opts *ProjectListOptions) ([]*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/projects", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// CreateProject creates a GitHub Project for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#create-an-organization-project -// -//meta:operation POST /orgs/{org}/projects -func (s *OrganizationsService) CreateProject(ctx context.Context, org string, opts *ProjectOptions) (*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/projects", org) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_properties.go b/vendor/github.com/google/go-github/v57/github/orgs_properties.go deleted file mode 100644 index 1daac811..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_properties.go +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// CustomProperty represents an organization custom property object. -type CustomProperty struct { - // PropertyName is required for most endpoints except when calling CreateOrUpdateCustomProperty; - // where this is sent in the path and thus can be omitted. - PropertyName *string `json:"property_name,omitempty"` - // Possible values for ValueType are: string, single_select - ValueType string `json:"value_type"` - Required *bool `json:"required,omitempty"` - DefaultValue *string `json:"default_value,omitempty"` - Description *string `json:"description,omitempty"` - AllowedValues []string `json:"allowed_values,omitempty"` -} - -// RepoCustomPropertyValue represents a repository custom property value. -type RepoCustomPropertyValue struct { - RepositoryID int64 `json:"repository_id"` - RepositoryName string `json:"repository_name"` - RepositoryFullName string `json:"repository_full_name"` - Properties []*CustomPropertyValue `json:"properties"` -} - -// CustomPropertyValue represents a custom property value. -type CustomPropertyValue struct { - PropertyName string `json:"property_name"` - Value *string `json:"value,omitempty"` -} - -// GetAllCustomProperties gets all custom properties that are defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#get-all-custom-properties-for-an-organization -// -//meta:operation GET /orgs/{org}/properties/schema -func (s *OrganizationsService) GetAllCustomProperties(ctx context.Context, org string) ([]*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var customProperties []*CustomProperty - resp, err := s.client.Do(ctx, req, &customProperties) - if err != nil { - return nil, resp, err - } - - return customProperties, resp, nil -} - -// CreateOrUpdateCustomProperties creates new or updates existing custom properties that are defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#create-or-update-custom-properties-for-an-organization -// -//meta:operation PATCH /orgs/{org}/properties/schema -func (s *OrganizationsService) CreateOrUpdateCustomProperties(ctx context.Context, org string, properties []*CustomProperty) ([]*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema", org) - - params := struct { - Properties []*CustomProperty `json:"properties"` - }{ - Properties: properties, - } - - req, err := s.client.NewRequest("PATCH", u, params) - if err != nil { - return nil, nil, err - } - - var customProperties []*CustomProperty - resp, err := s.client.Do(ctx, req, &customProperties) - if err != nil { - return nil, resp, err - } - - return customProperties, resp, nil -} - -// GetCustomProperty gets a custom property that is defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#get-a-custom-property-for-an-organization -// -//meta:operation GET /orgs/{org}/properties/schema/{custom_property_name} -func (s *OrganizationsService) GetCustomProperty(ctx context.Context, org, name string) (*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema/%v", org, name) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var customProperty *CustomProperty - resp, err := s.client.Do(ctx, req, &customProperty) - if err != nil { - return nil, resp, err - } - - return customProperty, resp, nil -} - -// CreateOrUpdateCustomProperty creates a new or updates an existing custom property that is defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#create-or-update-a-custom-property-for-an-organization -// -//meta:operation PUT /orgs/{org}/properties/schema/{custom_property_name} -func (s *OrganizationsService) CreateOrUpdateCustomProperty(ctx context.Context, org, customPropertyName string, property *CustomProperty) (*CustomProperty, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema/%v", org, customPropertyName) - - req, err := s.client.NewRequest("PUT", u, property) - if err != nil { - return nil, nil, err - } - - var customProperty *CustomProperty - resp, err := s.client.Do(ctx, req, &customProperty) - if err != nil { - return nil, resp, err - } - - return customProperty, resp, nil -} - -// RemoveCustomProperty removes a custom property that is defined for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#remove-a-custom-property-for-an-organization -// -//meta:operation DELETE /orgs/{org}/properties/schema/{custom_property_name} -func (s *OrganizationsService) RemoveCustomProperty(ctx context.Context, org, customPropertyName string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/properties/schema/%v", org, customPropertyName) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListCustomPropertyValues lists all custom property values for repositories in the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#list-custom-property-values-for-organization-repositories -// -//meta:operation GET /orgs/{org}/properties/values -func (s *OrganizationsService) ListCustomPropertyValues(ctx context.Context, org string, opts *ListOptions) ([]*RepoCustomPropertyValue, *Response, error) { - u := fmt.Sprintf("orgs/%v/properties/values", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repoCustomPropertyValues []*RepoCustomPropertyValue - resp, err := s.client.Do(ctx, req, &repoCustomPropertyValues) - if err != nil { - return nil, resp, err - } - - return repoCustomPropertyValues, resp, nil -} - -// CreateOrUpdateRepoCustomPropertyValues creates new or updates existing custom property values across multiple repositories for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/properties#create-or-update-custom-property-values-for-organization-repositories -// -//meta:operation PATCH /orgs/{org}/properties/values -func (s *OrganizationsService) CreateOrUpdateRepoCustomPropertyValues(ctx context.Context, org string, repoNames []string, properties []*CustomProperty) (*Response, error) { - u := fmt.Sprintf("orgs/%v/properties/values", org) - - params := struct { - RepositoryNames []string `json:"repository_names"` - Properties []*CustomProperty `json:"properties"` - }{ - RepositoryNames: repoNames, - Properties: properties, - } - - req, err := s.client.NewRequest("PATCH", u, params) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_rules.go b/vendor/github.com/google/go-github/v57/github/orgs_rules.go deleted file mode 100644 index 37c06a73..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_rules.go +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetAllOrganizationRulesets gets all the rulesets for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#get-all-organization-repository-rulesets -// -//meta:operation GET /orgs/{org}/rulesets -func (s *OrganizationsService) GetAllOrganizationRulesets(ctx context.Context, org string) ([]*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rulesets []*Ruleset - resp, err := s.client.Do(ctx, req, &rulesets) - if err != nil { - return nil, resp, err - } - - return rulesets, resp, nil -} - -// CreateOrganizationRuleset creates a ruleset for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#create-an-organization-repository-ruleset -// -//meta:operation POST /orgs/{org}/rulesets -func (s *OrganizationsService) CreateOrganizationRuleset(ctx context.Context, org string, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets", org) - - req, err := s.client.NewRequest("POST", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// GetOrganizationRuleset gets a ruleset from the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#get-an-organization-repository-ruleset -// -//meta:operation GET /orgs/{org}/rulesets/{ruleset_id} -func (s *OrganizationsService) GetOrganizationRuleset(ctx context.Context, org string, rulesetID int64) (*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// UpdateOrganizationRuleset updates a ruleset from the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#update-an-organization-repository-ruleset -// -//meta:operation PUT /orgs/{org}/rulesets/{ruleset_id} -func (s *OrganizationsService) UpdateOrganizationRuleset(ctx context.Context, org string, rulesetID int64, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) - - req, err := s.client.NewRequest("PUT", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// DeleteOrganizationRuleset deletes a ruleset from the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/rules#delete-an-organization-repository-ruleset -// -//meta:operation DELETE /orgs/{org}/rulesets/{ruleset_id} -func (s *OrganizationsService) DeleteOrganizationRuleset(ctx context.Context, org string, rulesetID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/orgs_security_managers.go b/vendor/github.com/google/go-github/v57/github/orgs_security_managers.go deleted file mode 100644 index 08037727..00000000 --- a/vendor/github.com/google/go-github/v57/github/orgs_security_managers.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListSecurityManagerTeams lists all security manager teams for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/security-managers#list-security-manager-teams -// -//meta:operation GET /orgs/{org}/security-managers -func (s *OrganizationsService) ListSecurityManagerTeams(ctx context.Context, org string) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/security-managers", org) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// AddSecurityManagerTeam adds a team to the list of security managers for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/security-managers#add-a-security-manager-team -// -//meta:operation PUT /orgs/{org}/security-managers/teams/{team_slug} -func (s *OrganizationsService) AddSecurityManagerTeam(ctx context.Context, org, team string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/security-managers/teams/%v", org, team) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSecurityManagerTeam removes a team from the list of security managers for an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/security-managers#remove-a-security-manager-team -// -//meta:operation DELETE /orgs/{org}/security-managers/teams/{team_slug} -func (s *OrganizationsService) RemoveSecurityManagerTeam(ctx context.Context, org, team string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/security-managers/teams/%v", org, team) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/packages.go b/vendor/github.com/google/go-github/v57/github/packages.go deleted file mode 100644 index ef7df074..00000000 --- a/vendor/github.com/google/go-github/v57/github/packages.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// Package represents a GitHub package. -type Package struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - PackageType *string `json:"package_type,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Owner *User `json:"owner,omitempty"` - PackageVersion *PackageVersion `json:"package_version,omitempty"` - Registry *PackageRegistry `json:"registry,omitempty"` - URL *string `json:"url,omitempty"` - VersionCount *int64 `json:"version_count,omitempty"` - Visibility *string `json:"visibility,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -func (p Package) String() string { - return Stringify(p) -} - -// PackageVersion represents a GitHub package version. -type PackageVersion struct { - ID *int64 `json:"id,omitempty"` - Version *string `json:"version,omitempty"` - Summary *string `json:"summary,omitempty"` - Body *string `json:"body,omitempty"` - BodyHTML *string `json:"body_html,omitempty"` - Release *PackageRelease `json:"release,omitempty"` - Manifest *string `json:"manifest,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - TargetOID *string `json:"target_oid,omitempty"` - Draft *bool `json:"draft,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PackageFiles []*PackageFile `json:"package_files,omitempty"` - Author *User `json:"author,omitempty"` - InstallationCommand *string `json:"installation_command,omitempty"` - Metadata *PackageMetadata `json:"metadata,omitempty"` - PackageHTMLURL *string `json:"package_html_url,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (pv PackageVersion) String() string { - return Stringify(pv) -} - -// PackageRelease represents a GitHub package version release. -type PackageRelease struct { - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ID *int64 `json:"id,omitempty"` - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - Name *string `json:"name,omitempty"` - Draft *bool `json:"draft,omitempty"` - Author *User `json:"author,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` -} - -func (r PackageRelease) String() string { - return Stringify(r) -} - -// PackageFile represents a GitHub package version release file. -type PackageFile struct { - DownloadURL *string `json:"download_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - SHA256 *string `json:"sha256,omitempty"` - SHA1 *string `json:"sha1,omitempty"` - MD5 *string `json:"md5,omitempty"` - ContentType *string `json:"content_type,omitempty"` - State *string `json:"state,omitempty"` - Author *User `json:"author,omitempty"` - Size *int64 `json:"size,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -func (pf PackageFile) String() string { - return Stringify(pf) -} - -// PackageRegistry represents a GitHub package registry. -type PackageRegistry struct { - AboutURL *string `json:"about_url,omitempty"` - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` - URL *string `json:"url,omitempty"` - Vendor *string `json:"vendor,omitempty"` -} - -func (r PackageRegistry) String() string { - return Stringify(r) -} - -// PackageListOptions represents the optional list options for a package. -type PackageListOptions struct { - // Visibility of packages "public", "internal" or "private". - Visibility *string `url:"visibility,omitempty"` - - // PackageType represents the type of package. - // It can be one of "npm", "maven", "rubygems", "nuget", "docker", or "container". - PackageType *string `url:"package_type,omitempty"` - - // State of package either "active" or "deleted". - State *string `url:"state,omitempty"` - - ListOptions -} - -// PackageMetadata represents metadata from a package. -type PackageMetadata struct { - PackageType *string `json:"package_type,omitempty"` - Container *PackageContainerMetadata `json:"container,omitempty"` -} - -func (r PackageMetadata) String() string { - return Stringify(r) -} - -// PackageContainerMetadata represents container metadata for docker container packages. -type PackageContainerMetadata struct { - Tags []string `json:"tags,omitempty"` -} - -func (r PackageContainerMetadata) String() string { - return Stringify(r) -} diff --git a/vendor/github.com/google/go-github/v57/github/projects.go b/vendor/github.com/google/go-github/v57/github/projects.go deleted file mode 100644 index c5c42f89..00000000 --- a/vendor/github.com/google/go-github/v57/github/projects.go +++ /dev/null @@ -1,634 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ProjectsService provides access to the projects functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/projects -type ProjectsService service - -// Project represents a GitHub Project. -type Project struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - ColumnsURL *string `json:"columns_url,omitempty"` - OwnerURL *string `json:"owner_url,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` - OrganizationPermission *string `json:"organization_permission,omitempty"` - Private *bool `json:"private,omitempty"` - - // The User object that generated the project. - Creator *User `json:"creator,omitempty"` -} - -func (p Project) String() string { - return Stringify(p) -} - -// GetProject gets a GitHub Project for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#get-a-project -// -//meta:operation GET /projects/{project_id} -func (s *ProjectsService) GetProject(ctx context.Context, id int64) (*Project, *Response, error) { - u := fmt.Sprintf("projects/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} - -// ProjectOptions specifies the parameters to the -// RepositoriesService.CreateProject and -// ProjectsService.UpdateProject methods. -type ProjectOptions struct { - // The name of the project. (Required for creation; optional for update.) - Name *string `json:"name,omitempty"` - // The body of the project. (Optional.) - Body *string `json:"body,omitempty"` - - // The following field(s) are only applicable for update. - // They should be left with zero values for creation. - - // State of the project. Either "open" or "closed". (Optional.) - State *string `json:"state,omitempty"` - // The permission level that all members of the project's organization - // will have on this project. - // Setting the organization permission is only available - // for organization projects. (Optional.) - OrganizationPermission *string `json:"organization_permission,omitempty"` - // Sets visibility of the project within the organization. - // Setting visibility is only available - // for organization projects.(Optional.) - Private *bool `json:"private,omitempty"` -} - -// UpdateProject updates a repository project. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#update-a-project -// -//meta:operation PATCH /projects/{project_id} -func (s *ProjectsService) UpdateProject(ctx context.Context, id int64, opts *ProjectOptions) (*Project, *Response, error) { - u := fmt.Sprintf("projects/%v", id) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} - -// DeleteProject deletes a GitHub Project from a repository. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#delete-a-project -// -//meta:operation DELETE /projects/{project_id} -func (s *ProjectsService) DeleteProject(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("projects/%v", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectColumn represents a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/repos/projects/ -type ProjectColumn struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - ProjectURL *string `json:"project_url,omitempty"` - CardsURL *string `json:"cards_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// ListProjectColumns lists the columns of a GitHub Project for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#list-project-columns -// -//meta:operation GET /projects/{project_id}/columns -func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int64, opts *ListOptions) ([]*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/%v/columns", projectID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - columns := []*ProjectColumn{} - resp, err := s.client.Do(ctx, req, &columns) - if err != nil { - return nil, resp, err - } - - return columns, resp, nil -} - -// GetProjectColumn gets a column of a GitHub Project for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#get-a-project-column -// -//meta:operation GET /projects/columns/{column_id} -func (s *ProjectsService) GetProjectColumn(ctx context.Context, id int64) (*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/columns/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - column := &ProjectColumn{} - resp, err := s.client.Do(ctx, req, column) - if err != nil { - return nil, resp, err - } - - return column, resp, nil -} - -// ProjectColumnOptions specifies the parameters to the -// ProjectsService.CreateProjectColumn and -// ProjectsService.UpdateProjectColumn methods. -type ProjectColumnOptions struct { - // The name of the project column. (Required for creation and update.) - Name string `json:"name"` -} - -// CreateProjectColumn creates a column for the specified (by number) project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#create-a-project-column -// -//meta:operation POST /projects/{project_id}/columns -func (s *ProjectsService) CreateProjectColumn(ctx context.Context, projectID int64, opts *ProjectColumnOptions) (*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/%v/columns", projectID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - column := &ProjectColumn{} - resp, err := s.client.Do(ctx, req, column) - if err != nil { - return nil, resp, err - } - - return column, resp, nil -} - -// UpdateProjectColumn updates a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#update-an-existing-project-column -// -//meta:operation PATCH /projects/columns/{column_id} -func (s *ProjectsService) UpdateProjectColumn(ctx context.Context, columnID int64, opts *ProjectColumnOptions) (*ProjectColumn, *Response, error) { - u := fmt.Sprintf("projects/columns/%v", columnID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - column := &ProjectColumn{} - resp, err := s.client.Do(ctx, req, column) - if err != nil { - return nil, resp, err - } - - return column, resp, nil -} - -// DeleteProjectColumn deletes a column from a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#delete-a-project-column -// -//meta:operation DELETE /projects/columns/{column_id} -func (s *ProjectsService) DeleteProjectColumn(ctx context.Context, columnID int64) (*Response, error) { - u := fmt.Sprintf("projects/columns/%v", columnID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectColumnMoveOptions specifies the parameters to the -// ProjectsService.MoveProjectColumn method. -type ProjectColumnMoveOptions struct { - // Position can be one of "first", "last", or "after:", where - // is the ID of a column in the same project. (Required.) - Position string `json:"position"` -} - -// MoveProjectColumn moves a column within a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/columns#move-a-project-column -// -//meta:operation POST /projects/columns/{column_id}/moves -func (s *ProjectsService) MoveProjectColumn(ctx context.Context, columnID int64, opts *ProjectColumnMoveOptions) (*Response, error) { - u := fmt.Sprintf("projects/columns/%v/moves", columnID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectCard represents a card in a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards/#get-a-project-card -type ProjectCard struct { - URL *string `json:"url,omitempty"` - ColumnURL *string `json:"column_url,omitempty"` - ContentURL *string `json:"content_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Note *string `json:"note,omitempty"` - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Archived *bool `json:"archived,omitempty"` - - // The following fields are only populated by Webhook events. - ColumnID *int64 `json:"column_id,omitempty"` - - // The following fields are only populated by Events API. - ProjectID *int64 `json:"project_id,omitempty"` - ProjectURL *string `json:"project_url,omitempty"` - ColumnName *string `json:"column_name,omitempty"` - PreviousColumnName *string `json:"previous_column_name,omitempty"` // Populated in "moved_columns_in_project" event deliveries. -} - -// ProjectCardListOptions specifies the optional parameters to the -// ProjectsService.ListProjectCards method. -type ProjectCardListOptions struct { - // ArchivedState is used to list all, archived, or not_archived project cards. - // Defaults to not_archived when you omit this parameter. - ArchivedState *string `url:"archived_state,omitempty"` - - ListOptions -} - -// ListProjectCards lists the cards in a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#list-project-cards -// -//meta:operation GET /projects/columns/{column_id}/cards -func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int64, opts *ProjectCardListOptions) ([]*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/%v/cards", columnID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - cards := []*ProjectCard{} - resp, err := s.client.Do(ctx, req, &cards) - if err != nil { - return nil, resp, err - } - - return cards, resp, nil -} - -// GetProjectCard gets a card in a column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#get-a-project-card -// -//meta:operation GET /projects/columns/cards/{card_id} -func (s *ProjectsService) GetProjectCard(ctx context.Context, cardID int64) (*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v", cardID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - card := &ProjectCard{} - resp, err := s.client.Do(ctx, req, card) - if err != nil { - return nil, resp, err - } - - return card, resp, nil -} - -// ProjectCardOptions specifies the parameters to the -// ProjectsService.CreateProjectCard and -// ProjectsService.UpdateProjectCard methods. -type ProjectCardOptions struct { - // The note of the card. Note and ContentID are mutually exclusive. - Note string `json:"note,omitempty"` - // The ID (not Number) of the Issue to associate with this card. - // Note and ContentID are mutually exclusive. - ContentID int64 `json:"content_id,omitempty"` - // The type of content to associate with this card. Possible values are: "Issue" and "PullRequest". - ContentType string `json:"content_type,omitempty"` - // Use true to archive a project card. - // Specify false if you need to restore a previously archived project card. - Archived *bool `json:"archived,omitempty"` -} - -// CreateProjectCard creates a card in the specified column of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#create-a-project-card -// -//meta:operation POST /projects/columns/{column_id}/cards -func (s *ProjectsService) CreateProjectCard(ctx context.Context, columnID int64, opts *ProjectCardOptions) (*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/%v/cards", columnID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - card := &ProjectCard{} - resp, err := s.client.Do(ctx, req, card) - if err != nil { - return nil, resp, err - } - - return card, resp, nil -} - -// UpdateProjectCard updates a card of a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#update-an-existing-project-card -// -//meta:operation PATCH /projects/columns/cards/{card_id} -func (s *ProjectsService) UpdateProjectCard(ctx context.Context, cardID int64, opts *ProjectCardOptions) (*ProjectCard, *Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v", cardID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - card := &ProjectCard{} - resp, err := s.client.Do(ctx, req, card) - if err != nil { - return nil, resp, err - } - - return card, resp, nil -} - -// DeleteProjectCard deletes a card from a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#delete-a-project-card -// -//meta:operation DELETE /projects/columns/cards/{card_id} -func (s *ProjectsService) DeleteProjectCard(ctx context.Context, cardID int64) (*Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v", cardID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectCardMoveOptions specifies the parameters to the -// ProjectsService.MoveProjectCard method. -type ProjectCardMoveOptions struct { - // Position can be one of "top", "bottom", or "after:", where - // is the ID of a card in the same project. - Position string `json:"position"` - // ColumnID is the ID of a column in the same project. Note that ColumnID - // is required when using Position "after:" when that card is in - // another column; otherwise it is optional. - ColumnID int64 `json:"column_id,omitempty"` -} - -// MoveProjectCard moves a card within a GitHub Project. -// -// GitHub API docs: https://docs.github.com/rest/projects/cards#move-a-project-card -// -//meta:operation POST /projects/columns/cards/{card_id}/moves -func (s *ProjectsService) MoveProjectCard(ctx context.Context, cardID int64, opts *ProjectCardMoveOptions) (*Response, error) { - u := fmt.Sprintf("projects/columns/cards/%v/moves", cardID) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ProjectCollaboratorOptions specifies the optional parameters to the -// ProjectsService.AddProjectCollaborator method. -type ProjectCollaboratorOptions struct { - // Permission specifies the permission to grant to the collaborator. - // Possible values are: - // "read" - can read, but not write to or administer this project. - // "write" - can read and write, but not administer this project. - // "admin" - can read, write and administer this project. - // - // Default value is "write" - Permission *string `json:"permission,omitempty"` -} - -// AddProjectCollaborator adds a collaborator to an organization project and sets -// their permission level. You must be an organization owner or a project admin to add a collaborator. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#add-project-collaborator -// -//meta:operation PUT /projects/{project_id}/collaborators/{username} -func (s *ProjectsService) AddProjectCollaborator(ctx context.Context, id int64, username string, opts *ProjectCollaboratorOptions) (*Response, error) { - u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// RemoveProjectCollaborator removes a collaborator from an organization project. -// You must be an organization owner or a project admin to remove a collaborator. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#remove-user-as-a-collaborator -// -//meta:operation DELETE /projects/{project_id}/collaborators/{username} -func (s *ProjectsService) RemoveProjectCollaborator(ctx context.Context, id int64, username string) (*Response, error) { - u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - return s.client.Do(ctx, req, nil) -} - -// ListCollaboratorOptions specifies the optional parameters to the -// ProjectsService.ListProjectCollaborators method. -type ListCollaboratorOptions struct { - // Affiliation specifies how collaborators should be filtered by their affiliation. - // Possible values are: - // "outside" - All outside collaborators of an organization-owned repository - // "direct" - All collaborators with permissions to an organization-owned repository, - // regardless of organization membership status - // "all" - All collaborators the authenticated user can see - // - // Default value is "all". - Affiliation *string `url:"affiliation,omitempty"` - - ListOptions -} - -// ListProjectCollaborators lists the collaborators for an organization project. For a project, -// the list of collaborators includes outside collaborators, organization members that are direct -// collaborators, organization members with access through team memberships, organization members -// with access through default organization permissions, and organization owners. You must be an -// organization owner or a project admin to list collaborators. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#list-project-collaborators -// -//meta:operation GET /projects/{project_id}/collaborators -func (s *ProjectsService) ListProjectCollaborators(ctx context.Context, id int64, opts *ListCollaboratorOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("projects/%v/collaborators", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ProjectPermissionLevel represents the permission level an organization -// member has for a given project. -type ProjectPermissionLevel struct { - // Possible values: "admin", "write", "read", "none" - Permission *string `json:"permission,omitempty"` - - User *User `json:"user,omitempty"` -} - -// ReviewProjectCollaboratorPermission returns the collaborator's permission level for an organization -// project. Possible values for the permission key: "admin", "write", "read", "none". -// You must be an organization owner or a project admin to review a user's permission level. -// -// GitHub API docs: https://docs.github.com/rest/projects/collaborators#get-project-permission-for-a-user -// -//meta:operation GET /projects/{project_id}/collaborators/{username}/permission -func (s *ProjectsService) ReviewProjectCollaboratorPermission(ctx context.Context, id int64, username string) (*ProjectPermissionLevel, *Response, error) { - u := fmt.Sprintf("projects/%v/collaborators/%v/permission", id, username) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - ppl := new(ProjectPermissionLevel) - resp, err := s.client.Do(ctx, req, ppl) - if err != nil { - return nil, resp, err - } - return ppl, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/pulls.go b/vendor/github.com/google/go-github/v57/github/pulls.go deleted file mode 100644 index 80df9fa6..00000000 --- a/vendor/github.com/google/go-github/v57/github/pulls.go +++ /dev/null @@ -1,508 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" -) - -// PullRequestsService handles communication with the pull request related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/pulls/ -type PullRequestsService service - -// PullRequestAutoMerge represents the "auto_merge" response for a PullRequest. -type PullRequestAutoMerge struct { - EnabledBy *User `json:"enabled_by,omitempty"` - MergeMethod *string `json:"merge_method,omitempty"` - CommitTitle *string `json:"commit_title,omitempty"` - CommitMessage *string `json:"commit_message,omitempty"` -} - -// PullRequest represents a GitHub pull request on a repository. -type PullRequest struct { - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - Locked *bool `json:"locked,omitempty"` - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - MergedAt *Timestamp `json:"merged_at,omitempty"` - Labels []*Label `json:"labels,omitempty"` - User *User `json:"user,omitempty"` - Draft *bool `json:"draft,omitempty"` - Merged *bool `json:"merged,omitempty"` - Mergeable *bool `json:"mergeable,omitempty"` - MergeableState *string `json:"mergeable_state,omitempty"` - MergedBy *User `json:"merged_by,omitempty"` - MergeCommitSHA *string `json:"merge_commit_sha,omitempty"` - Rebaseable *bool `json:"rebaseable,omitempty"` - Comments *int `json:"comments,omitempty"` - Commits *int `json:"commits,omitempty"` - Additions *int `json:"additions,omitempty"` - Deletions *int `json:"deletions,omitempty"` - ChangedFiles *int `json:"changed_files,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - IssueURL *string `json:"issue_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - DiffURL *string `json:"diff_url,omitempty"` - PatchURL *string `json:"patch_url,omitempty"` - CommitsURL *string `json:"commits_url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - ReviewCommentsURL *string `json:"review_comments_url,omitempty"` - ReviewCommentURL *string `json:"review_comment_url,omitempty"` - ReviewComments *int `json:"review_comments,omitempty"` - Assignee *User `json:"assignee,omitempty"` - Assignees []*User `json:"assignees,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` - AuthorAssociation *string `json:"author_association,omitempty"` - NodeID *string `json:"node_id,omitempty"` - RequestedReviewers []*User `json:"requested_reviewers,omitempty"` - AutoMerge *PullRequestAutoMerge `json:"auto_merge,omitempty"` - - // RequestedTeams is populated as part of the PullRequestEvent. - // See, https://docs.github.com/developers/webhooks-and-events/github-event-types#pullrequestevent for an example. - RequestedTeams []*Team `json:"requested_teams,omitempty"` - - Links *PRLinks `json:"_links,omitempty"` - Head *PullRequestBranch `json:"head,omitempty"` - Base *PullRequestBranch `json:"base,omitempty"` - - // ActiveLockReason is populated only when LockReason is provided while locking the pull request. - // Possible values are: "off-topic", "too heated", "resolved", and "spam". - ActiveLockReason *string `json:"active_lock_reason,omitempty"` -} - -func (p PullRequest) String() string { - return Stringify(p) -} - -// PRLink represents a single link object from GitHub pull request _links. -type PRLink struct { - HRef *string `json:"href,omitempty"` -} - -// PRLinks represents the "_links" object in a GitHub pull request. -type PRLinks struct { - Self *PRLink `json:"self,omitempty"` - HTML *PRLink `json:"html,omitempty"` - Issue *PRLink `json:"issue,omitempty"` - Comments *PRLink `json:"comments,omitempty"` - ReviewComments *PRLink `json:"review_comments,omitempty"` - ReviewComment *PRLink `json:"review_comment,omitempty"` - Commits *PRLink `json:"commits,omitempty"` - Statuses *PRLink `json:"statuses,omitempty"` -} - -// PullRequestBranch represents a base or head branch in a GitHub pull request. -type PullRequestBranch struct { - Label *string `json:"label,omitempty"` - Ref *string `json:"ref,omitempty"` - SHA *string `json:"sha,omitempty"` - Repo *Repository `json:"repo,omitempty"` - User *User `json:"user,omitempty"` -} - -// PullRequestListOptions specifies the optional parameters to the -// PullRequestsService.List method. -type PullRequestListOptions struct { - // State filters pull requests based on their state. Possible values are: - // open, closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Head filters pull requests by head user and branch name in the format of: - // "user:ref-name". - Head string `url:"head,omitempty"` - - // Base filters pull requests by base branch name. - Base string `url:"base,omitempty"` - - // Sort specifies how to sort pull requests. Possible values are: created, - // updated, popularity, long-running. Default is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort pull requests. Possible values are: asc, desc. - // If Sort is "created" or not specified, Default is "desc", otherwise Default - // is "asc" - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// List the pull requests for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#list-pull-requests -// -//meta:operation GET /repos/{owner}/{repo}/pulls -func (s *PullRequestsService) List(ctx context.Context, owner string, repo string, opts *PullRequestListOptions) ([]*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pulls []*PullRequest - resp, err := s.client.Do(ctx, req, &pulls) - if err != nil { - return nil, resp, err - } - - return pulls, resp, nil -} - -// ListPullRequestsWithCommit returns pull requests associated with a commit SHA. -// -// The results may include open and closed pull requests. -// By default, the PullRequestListOptions State filters for "open". -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#list-pull-requests-associated-with-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls -func (s *PullRequestsService) ListPullRequestsWithCommit(ctx context.Context, owner, repo, sha string, opts *ListOptions) ([]*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/pulls", owner, repo, sha) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeListPullsOrBranchesForCommitPreview) - var pulls []*PullRequest - resp, err := s.client.Do(ctx, req, &pulls) - if err != nil { - return nil, resp, err - } - - return pulls, resp, nil -} - -// Get a single pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#get-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number} -func (s *PullRequestsService) Get(ctx context.Context, owner string, repo string, number int) (*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - pull := new(PullRequest) - resp, err := s.client.Do(ctx, req, pull) - if err != nil { - return nil, resp, err - } - - return pull, resp, nil -} - -// GetRaw gets a single pull request in raw (diff or patch) format. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#get-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number} -func (s *PullRequestsService) GetRaw(ctx context.Context, owner string, repo string, number int, opts RawOptions) (string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - switch opts.Type { - case Diff: - req.Header.Set("Accept", mediaTypeV3Diff) - case Patch: - req.Header.Set("Accept", mediaTypeV3Patch) - default: - return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) - } - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// NewPullRequest represents a new pull request to be created. -type NewPullRequest struct { - Title *string `json:"title,omitempty"` - Head *string `json:"head,omitempty"` - HeadRepo *string `json:"head_repo,omitempty"` - Base *string `json:"base,omitempty"` - Body *string `json:"body,omitempty"` - Issue *int `json:"issue,omitempty"` - MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` - Draft *bool `json:"draft,omitempty"` -} - -// Create a new pull request on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#create-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls -func (s *PullRequestsService) Create(ctx context.Context, owner string, repo string, pull *NewPullRequest) (*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls", owner, repo) - req, err := s.client.NewRequest("POST", u, pull) - if err != nil { - return nil, nil, err - } - - p := new(PullRequest) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// PullRequestBranchUpdateOptions specifies the optional parameters to the -// PullRequestsService.UpdateBranch method. -type PullRequestBranchUpdateOptions struct { - // ExpectedHeadSHA specifies the most recent commit on the pull request's branch. - // Default value is the SHA of the pull request's current HEAD ref. - ExpectedHeadSHA *string `json:"expected_head_sha,omitempty"` -} - -// PullRequestBranchUpdateResponse specifies the response of pull request branch update. -type PullRequestBranchUpdateResponse struct { - Message *string `json:"message,omitempty"` - URL *string `json:"url,omitempty"` -} - -// UpdateBranch updates the pull request branch with latest upstream changes. -// -// This method might return an AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it has now scheduled the update of the pull request branch in a background task. -// A follow up request, after a delay of a second or so, should result -// in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#update-a-pull-request-branch -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch -func (s *PullRequestsService) UpdateBranch(ctx context.Context, owner, repo string, number int, opts *PullRequestBranchUpdateOptions) (*PullRequestBranchUpdateResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/update-branch", owner, repo, number) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeUpdatePullRequestBranchPreview) - - p := new(PullRequestBranchUpdateResponse) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -type pullRequestUpdate struct { - Title *string `json:"title,omitempty"` - Body *string `json:"body,omitempty"` - State *string `json:"state,omitempty"` - Base *string `json:"base,omitempty"` - MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` -} - -// Edit a pull request. -// pull must not be nil. -// -// The following fields are editable: Title, Body, State, Base.Ref and MaintainerCanModify. -// Base.Ref updates the base branch of the pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#update-a-pull-request -// -//meta:operation PATCH /repos/{owner}/{repo}/pulls/{pull_number} -func (s *PullRequestsService) Edit(ctx context.Context, owner string, repo string, number int, pull *PullRequest) (*PullRequest, *Response, error) { - if pull == nil { - return nil, nil, fmt.Errorf("pull must be provided") - } - - u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) - - update := &pullRequestUpdate{ - Title: pull.Title, - Body: pull.Body, - State: pull.State, - MaintainerCanModify: pull.MaintainerCanModify, - } - // avoid updating the base branch when closing the Pull Request - // - otherwise the GitHub API server returns a "Validation Failed" error: - // "Cannot change base branch of closed pull request". - if pull.Base != nil && pull.GetState() != "closed" { - update.Base = pull.Base.Ref - } - - req, err := s.client.NewRequest("PATCH", u, update) - if err != nil { - return nil, nil, err - } - - p := new(PullRequest) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// ListCommits lists the commits in a pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#list-commits-on-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/commits -func (s *PullRequestsService) ListCommits(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/commits", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var commits []*RepositoryCommit - resp, err := s.client.Do(ctx, req, &commits) - if err != nil { - return nil, resp, err - } - - return commits, resp, nil -} - -// ListFiles lists the files in a pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#list-pull-requests-files -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/files -func (s *PullRequestsService) ListFiles(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*CommitFile, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/files", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var commitFiles []*CommitFile - resp, err := s.client.Do(ctx, req, &commitFiles) - if err != nil { - return nil, resp, err - } - - return commitFiles, resp, nil -} - -// IsMerged checks if a pull request has been merged. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#check-if-a-pull-request-has-been-merged -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/merge -func (s *PullRequestsService) IsMerged(ctx context.Context, owner string, repo string, number int) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/merge", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - merged, err := parseBoolResponse(err) - return merged, resp, err -} - -// PullRequestMergeResult represents the result of merging a pull request. -type PullRequestMergeResult struct { - SHA *string `json:"sha,omitempty"` - Merged *bool `json:"merged,omitempty"` - Message *string `json:"message,omitempty"` -} - -// PullRequestOptions lets you define how a pull request will be merged. -type PullRequestOptions struct { - CommitTitle string // Title for the automatic commit message. (Optional.) - SHA string // SHA that pull request head must match to allow merge. (Optional.) - - // The merge method to use. Possible values include: "merge", "squash", and "rebase" with the default being merge. (Optional.) - MergeMethod string - - // If false, an empty string commit message will use the default commit message. If true, an empty string commit message will be used. - DontDefaultIfBlank bool -} - -type pullRequestMergeRequest struct { - CommitMessage *string `json:"commit_message,omitempty"` - CommitTitle string `json:"commit_title,omitempty"` - MergeMethod string `json:"merge_method,omitempty"` - SHA string `json:"sha,omitempty"` -} - -// Merge a pull request. -// commitMessage is an extra detail to append to automatic commit message. -// -// GitHub API docs: https://docs.github.com/rest/pulls/pulls#merge-a-pull-request -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge -func (s *PullRequestsService) Merge(ctx context.Context, owner string, repo string, number int, commitMessage string, options *PullRequestOptions) (*PullRequestMergeResult, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/merge", owner, repo, number) - - pullRequestBody := &pullRequestMergeRequest{} - if commitMessage != "" { - pullRequestBody.CommitMessage = &commitMessage - } - if options != nil { - pullRequestBody.CommitTitle = options.CommitTitle - pullRequestBody.MergeMethod = options.MergeMethod - pullRequestBody.SHA = options.SHA - if options.DontDefaultIfBlank && commitMessage == "" { - pullRequestBody.CommitMessage = &commitMessage - } - } - req, err := s.client.NewRequest("PUT", u, pullRequestBody) - if err != nil { - return nil, nil, err - } - - mergeResult := new(PullRequestMergeResult) - resp, err := s.client.Do(ctx, req, mergeResult) - if err != nil { - return nil, resp, err - } - - return mergeResult, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/pulls_reviewers.go b/vendor/github.com/google/go-github/v57/github/pulls_reviewers.go deleted file mode 100644 index 3f0c50b7..00000000 --- a/vendor/github.com/google/go-github/v57/github/pulls_reviewers.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ReviewersRequest specifies users and teams for a pull request review request. -type ReviewersRequest struct { - NodeID *string `json:"node_id,omitempty"` - Reviewers []string `json:"reviewers,omitempty"` - TeamReviewers []string `json:"team_reviewers,omitempty"` -} - -// Reviewers represents reviewers of a pull request. -type Reviewers struct { - Users []*User `json:"users,omitempty"` - Teams []*Team `json:"teams,omitempty"` -} - -// RequestReviewers creates a review request for the provided reviewers for the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/review-requests#request-reviewers-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers -func (s *PullRequestsService) RequestReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*PullRequest, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) - req, err := s.client.NewRequest("POST", u, &reviewers) - if err != nil { - return nil, nil, err - } - - r := new(PullRequest) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// ListReviewers lists reviewers whose reviews have been requested on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/review-requests#get-all-requested-reviewers-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers -func (s *PullRequestsService) ListReviewers(ctx context.Context, owner, repo string, number int, opts *ListOptions) (*Reviewers, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/requested_reviewers", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - reviewers := new(Reviewers) - resp, err := s.client.Do(ctx, req, reviewers) - if err != nil { - return nil, resp, err - } - - return reviewers, resp, nil -} - -// RemoveReviewers removes the review request for the provided reviewers for the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/review-requests#remove-requested-reviewers-from-a-pull-request -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers -func (s *PullRequestsService) RemoveReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, &reviewers) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/rate_limit.go b/vendor/github.com/google/go-github/v57/github/rate_limit.go deleted file mode 100644 index 0fc15f81..00000000 --- a/vendor/github.com/google/go-github/v57/github/rate_limit.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import "context" - -// RateLimitService provides access to rate limit functions in the GitHub API. -type RateLimitService service - -// Rate represents the rate limit for the current client. -type Rate struct { - // The number of requests per hour the client is currently limited to. - Limit int `json:"limit"` - - // The number of remaining requests the client can make this hour. - Remaining int `json:"remaining"` - - // The time at which the current rate limit will reset. - Reset Timestamp `json:"reset"` -} - -func (r Rate) String() string { - return Stringify(r) -} - -// RateLimits represents the rate limits for the current client. -type RateLimits struct { - // The rate limit for non-search API requests. Unauthenticated - // requests are limited to 60 per hour. Authenticated requests are - // limited to 5,000 per hour. - // - // GitHub API docs: https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting - Core *Rate `json:"core"` - - // The rate limit for search API requests. Unauthenticated requests - // are limited to 10 requests per minutes. Authenticated requests are - // limited to 30 per minute. - // - // GitHub API docs: https://docs.github.com/en/rest/search#rate-limit - Search *Rate `json:"search"` - - // GitHub API docs: https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit - GraphQL *Rate `json:"graphql"` - - // GitHub API dos: https://docs.github.com/en/rest/rate-limit - IntegrationManifest *Rate `json:"integration_manifest"` - - SourceImport *Rate `json:"source_import"` - CodeScanningUpload *Rate `json:"code_scanning_upload"` - ActionsRunnerRegistration *Rate `json:"actions_runner_registration"` - SCIM *Rate `json:"scim"` -} - -func (r RateLimits) String() string { - return Stringify(r) -} - -// Get returns the rate limits for the current client. -// -// GitHub API docs: https://docs.github.com/rest/rate-limit/rate-limit#get-rate-limit-status-for-the-authenticated-user -// -//meta:operation GET /rate_limit -func (s *RateLimitService) Get(ctx context.Context) (*RateLimits, *Response, error) { - req, err := s.client.NewRequest("GET", "rate_limit", nil) - if err != nil { - return nil, nil, err - } - - response := new(struct { - Resources *RateLimits `json:"resources"` - }) - - // This resource is not subject to rate limits. - ctx = context.WithValue(ctx, bypassRateLimitCheck, true) - resp, err := s.client.Do(ctx, req, response) - if err != nil { - return nil, resp, err - } - - if response.Resources != nil { - s.client.rateMu.Lock() - if response.Resources.Core != nil { - s.client.rateLimits[coreCategory] = *response.Resources.Core - } - if response.Resources.Search != nil { - s.client.rateLimits[searchCategory] = *response.Resources.Search - } - if response.Resources.GraphQL != nil { - s.client.rateLimits[graphqlCategory] = *response.Resources.GraphQL - } - if response.Resources.IntegrationManifest != nil { - s.client.rateLimits[integrationManifestCategory] = *response.Resources.IntegrationManifest - } - if response.Resources.SourceImport != nil { - s.client.rateLimits[sourceImportCategory] = *response.Resources.SourceImport - } - if response.Resources.CodeScanningUpload != nil { - s.client.rateLimits[codeScanningUploadCategory] = *response.Resources.CodeScanningUpload - } - if response.Resources.ActionsRunnerRegistration != nil { - s.client.rateLimits[actionsRunnerRegistrationCategory] = *response.Resources.ActionsRunnerRegistration - } - if response.Resources.SCIM != nil { - s.client.rateLimits[scimCategory] = *response.Resources.SCIM - } - s.client.rateMu.Unlock() - } - - return response.Resources, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/reactions.go b/vendor/github.com/google/go-github/v57/github/reactions.go deleted file mode 100644 index 1aa7ac38..00000000 --- a/vendor/github.com/google/go-github/v57/github/reactions.go +++ /dev/null @@ -1,570 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" -) - -// ReactionsService provides access to the reactions-related functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/reactions -type ReactionsService service - -// Reaction represents a GitHub reaction. -type Reaction struct { - // ID is the Reaction ID. - ID *int64 `json:"id,omitempty"` - User *User `json:"user,omitempty"` - NodeID *string `json:"node_id,omitempty"` - // Content is the type of reaction. - // Possible values are: - // "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". - Content *string `json:"content,omitempty"` -} - -// Reactions represents a summary of GitHub reactions. -type Reactions struct { - TotalCount *int `json:"total_count,omitempty"` - PlusOne *int `json:"+1,omitempty"` - MinusOne *int `json:"-1,omitempty"` - Laugh *int `json:"laugh,omitempty"` - Confused *int `json:"confused,omitempty"` - Heart *int `json:"heart,omitempty"` - Hooray *int `json:"hooray,omitempty"` - Rocket *int `json:"rocket,omitempty"` - Eyes *int `json:"eyes,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (r Reaction) String() string { - return Stringify(r) -} - -// ListCommentReactionOptions specifies the optional parameters to the -// ReactionsService.ListCommentReactions method. -type ListCommentReactionOptions struct { - // Content restricts the returned comment reactions to only those with the given type. - // Omit this parameter to list all reactions to a commit comment. - // Possible values are: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". - Content string `url:"content,omitempty"` - - ListOptions -} - -// ListCommentReactions lists the reactions for a commit comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-commit-comment -// -//meta:operation GET /repos/{owner}/{repo}/comments/{comment_id}/reactions -func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListCommentReactionOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateCommentReaction creates a reaction for a commit comment. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-commit-comment -// -//meta:operation POST /repos/{owner}/{repo}/comments/{comment_id}/reactions -func (s *ReactionsService) CreateCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteCommentReaction deletes the reaction for a commit comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-commit-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions/%v", owner, repo, commentID, reactionID) - - return s.deleteReaction(ctx, u) -} - -// DeleteCommentReactionByID deletes the reaction for a commit comment by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-commit-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { - u := fmt.Sprintf("repositories/%v/comments/%v/reactions/%v", repoID, commentID, reactionID) - - return s.deleteReaction(ctx, u) -} - -// ListIssueReactions lists the reactions for an issue. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-an-issue -// -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/reactions -func (s *ReactionsService) ListIssueReactions(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/reactions", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateIssueReaction creates a reaction for an issue. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/reactions -func (s *ReactionsService) CreateIssueReaction(ctx context.Context, owner, repo string, number int, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%v/reactions", owner, repo, number) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteIssueReaction deletes the reaction to an issue. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueReaction(ctx context.Context, owner, repo string, issueNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/issues/%v/reactions/%v", owner, repo, issueNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteIssueReactionByID deletes the reaction to an issue by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueReactionByID(ctx context.Context, repoID, issueNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repositories/%v/issues/%v/reactions/%v", repoID, issueNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListIssueCommentReactions lists the reactions for an issue comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-an-issue-comment -// -//meta:operation GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions -func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateIssueCommentReaction creates a reaction for an issue comment. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-an-issue-comment -// -//meta:operation POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions -func (s *ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteIssueCommentReaction deletes the reaction to an issue comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions/%v", owner, repo, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteIssueCommentReactionByID deletes the reaction to an issue comment by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-an-issue-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeleteIssueCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repositories/%v/issues/comments/%v/reactions/%v", repoID, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListPullRequestCommentReactions lists the reactions for a pull request review comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-pull-request-review-comment -// -//meta:operation GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions -func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreatePullRequestCommentReaction creates a reaction for a pull request review comment. -// Note that if you have already created a reaction of type content, the -// previously created reaction will be returned with Status: 200 OK. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-pull-request-review-comment -// -//meta:operation POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions -func (s *ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeletePullRequestCommentReaction deletes the reaction to a pull request review comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-pull-request-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeletePullRequestCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions/%v", owner, repo, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeletePullRequestCommentReactionByID deletes the reaction to a pull request review comment by repository ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-a-pull-request-comment-reaction -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id} -func (s *ReactionsService) DeletePullRequestCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { - url := fmt.Sprintf("repositories/%v/pulls/comments/%v/reactions/%v", repoID, commentID, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListTeamDiscussionReactions lists the reactions for a team discussion. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-team-discussion-legacy -// -//meta:operation GET /teams/{team_id}/discussions/{discussion_number}/reactions -func (s *ReactionsService) ListTeamDiscussionReactions(ctx context.Context, teamID int64, discussionNumber int, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// CreateTeamDiscussionReaction creates a reaction for a team discussion. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion-legacy -// -//meta:operation POST /teams/{team_id}/discussions/{discussion_number}/reactions -func (s *ReactionsService) CreateTeamDiscussionReaction(ctx context.Context, teamID int64, discussionNumber int, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteTeamDiscussionReaction deletes the reaction to a team discussion. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-team-discussion-reaction -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteTeamDiscussionReaction(ctx context.Context, org, teamSlug string, discussionNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/reactions/%v", org, teamSlug, discussionNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteTeamDiscussionReactionByOrgIDAndTeamID deletes the reaction to a team discussion by organization ID and team ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions -func (s *ReactionsService) DeleteTeamDiscussionReactionByOrgIDAndTeamID(ctx context.Context, orgID, teamID, discussionNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/reactions/%v", orgID, teamID, discussionNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// ListTeamDiscussionCommentReactions lists the reactions for a team discussion comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#list-reactions-for-a-team-discussion-comment-legacy -// -//meta:operation GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions -func (s *ReactionsService) ListTeamDiscussionCommentReactions(ctx context.Context, teamID int64, discussionNumber, commentNumber int, opts *ListOptions) ([]*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var m []*Reaction - resp, err := s.client.Do(ctx, req, &m) - if err != nil { - return nil, resp, err - } - return m, resp, nil -} - -// CreateTeamDiscussionCommentReaction creates a reaction for a team discussion comment. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion-comment-legacy -// -//meta:operation POST /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions -func (s *ReactionsService) CreateTeamDiscussionCommentReaction(ctx context.Context, teamID int64, discussionNumber, commentNumber int, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteTeamDiscussionCommentReaction deletes the reaction to a team discussion comment. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#delete-team-discussion-comment-reaction -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id} -func (s *ReactionsService) DeleteTeamDiscussionCommentReaction(ctx context.Context, org, teamSlug string, discussionNumber, commentNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v/reactions/%v", org, teamSlug, discussionNumber, commentNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -// DeleteTeamDiscussionCommentReactionByOrgIDAndTeamID deletes the reaction to a team discussion comment by organization ID and team ID. -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-team-discussion-comment -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions -func (s *ReactionsService) DeleteTeamDiscussionCommentReactionByOrgIDAndTeamID(ctx context.Context, orgID, teamID, discussionNumber, commentNumber int, reactionID int64) (*Response, error) { - url := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v/reactions/%v", orgID, teamID, discussionNumber, commentNumber, reactionID) - - return s.deleteReaction(ctx, url) -} - -func (s *ReactionsService) deleteReaction(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest(http.MethodDelete, url, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - return s.client.Do(ctx, req, nil) -} - -// CreateReleaseReaction creates a reaction to a release. -// Note that a response with a Status: 200 OK means that you already -// added the reaction type to this release. -// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". -// -// GitHub API docs: https://docs.github.com/rest/reactions/reactions#create-reaction-for-a-release -// -//meta:operation POST /repos/{owner}/{repo}/releases/{release_id}/reactions -func (s *ReactionsService) CreateReleaseReaction(ctx context.Context, owner, repo string, releaseID int64, content string) (*Reaction, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/releases/%v/reactions", owner, repo, releaseID) - - body := &Reaction{Content: String(content)} - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeReactionsPreview) - - m := &Reaction{} - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos.go b/vendor/github.com/google/go-github/v57/github/repos.go deleted file mode 100644 index 5fcf219b..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos.go +++ /dev/null @@ -1,2387 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "net/http" - "net/url" - "strings" -) - -const githubBranchNotProtected string = "Branch not protected" - -var ErrBranchNotProtected = errors.New("branch is not protected") - -// RepositoriesService handles communication with the repository related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/repos/ -type RepositoriesService service - -// Repository represents a GitHub repository. -type Repository struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Name *string `json:"name,omitempty"` - FullName *string `json:"full_name,omitempty"` - Description *string `json:"description,omitempty"` - Homepage *string `json:"homepage,omitempty"` - CodeOfConduct *CodeOfConduct `json:"code_of_conduct,omitempty"` - DefaultBranch *string `json:"default_branch,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PushedAt *Timestamp `json:"pushed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CloneURL *string `json:"clone_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - MirrorURL *string `json:"mirror_url,omitempty"` - SSHURL *string `json:"ssh_url,omitempty"` - SVNURL *string `json:"svn_url,omitempty"` - Language *string `json:"language,omitempty"` - Fork *bool `json:"fork,omitempty"` - ForksCount *int `json:"forks_count,omitempty"` - NetworkCount *int `json:"network_count,omitempty"` - OpenIssuesCount *int `json:"open_issues_count,omitempty"` - OpenIssues *int `json:"open_issues,omitempty"` // Deprecated: Replaced by OpenIssuesCount. For backward compatibility OpenIssues is still populated. - StargazersCount *int `json:"stargazers_count,omitempty"` - SubscribersCount *int `json:"subscribers_count,omitempty"` - WatchersCount *int `json:"watchers_count,omitempty"` // Deprecated: Replaced by StargazersCount. For backward compatibility WatchersCount is still populated. - Watchers *int `json:"watchers,omitempty"` // Deprecated: Replaced by StargazersCount. For backward compatibility Watchers is still populated. - Size *int `json:"size,omitempty"` - AutoInit *bool `json:"auto_init,omitempty"` - Parent *Repository `json:"parent,omitempty"` - Source *Repository `json:"source,omitempty"` - TemplateRepository *Repository `json:"template_repository,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Permissions map[string]bool `json:"permissions,omitempty"` - AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` - AllowUpdateBranch *bool `json:"allow_update_branch,omitempty"` - AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` - AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` - AllowAutoMerge *bool `json:"allow_auto_merge,omitempty"` - AllowForking *bool `json:"allow_forking,omitempty"` - WebCommitSignoffRequired *bool `json:"web_commit_signoff_required,omitempty"` - DeleteBranchOnMerge *bool `json:"delete_branch_on_merge,omitempty"` - UseSquashPRTitleAsDefault *bool `json:"use_squash_pr_title_as_default,omitempty"` - SquashMergeCommitTitle *string `json:"squash_merge_commit_title,omitempty"` // Can be one of: "PR_TITLE", "COMMIT_OR_PR_TITLE" - SquashMergeCommitMessage *string `json:"squash_merge_commit_message,omitempty"` // Can be one of: "PR_BODY", "COMMIT_MESSAGES", "BLANK" - MergeCommitTitle *string `json:"merge_commit_title,omitempty"` // Can be one of: "PR_TITLE", "MERGE_MESSAGE" - MergeCommitMessage *string `json:"merge_commit_message,omitempty"` // Can be one of: "PR_BODY", "PR_TITLE", "BLANK" - Topics []string `json:"topics,omitempty"` - Archived *bool `json:"archived,omitempty"` - Disabled *bool `json:"disabled,omitempty"` - - // Only provided when using RepositoriesService.Get while in preview - License *License `json:"license,omitempty"` - - // Additional mutable fields when creating and editing a repository - Private *bool `json:"private,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasPages *bool `json:"has_pages,omitempty"` - HasProjects *bool `json:"has_projects,omitempty"` - HasDownloads *bool `json:"has_downloads,omitempty"` - HasDiscussions *bool `json:"has_discussions,omitempty"` - IsTemplate *bool `json:"is_template,omitempty"` - LicenseTemplate *string `json:"license_template,omitempty"` - GitignoreTemplate *string `json:"gitignore_template,omitempty"` - - // Options for configuring Advanced Security and Secret Scanning - SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis,omitempty"` - - // Creating an organization repository. Required for non-owners. - TeamID *int64 `json:"team_id,omitempty"` - - // API URLs - URL *string `json:"url,omitempty"` - ArchiveURL *string `json:"archive_url,omitempty"` - AssigneesURL *string `json:"assignees_url,omitempty"` - BlobsURL *string `json:"blobs_url,omitempty"` - BranchesURL *string `json:"branches_url,omitempty"` - CollaboratorsURL *string `json:"collaborators_url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - CommitsURL *string `json:"commits_url,omitempty"` - CompareURL *string `json:"compare_url,omitempty"` - ContentsURL *string `json:"contents_url,omitempty"` - ContributorsURL *string `json:"contributors_url,omitempty"` - DeploymentsURL *string `json:"deployments_url,omitempty"` - DownloadsURL *string `json:"downloads_url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - ForksURL *string `json:"forks_url,omitempty"` - GitCommitsURL *string `json:"git_commits_url,omitempty"` - GitRefsURL *string `json:"git_refs_url,omitempty"` - GitTagsURL *string `json:"git_tags_url,omitempty"` - HooksURL *string `json:"hooks_url,omitempty"` - IssueCommentURL *string `json:"issue_comment_url,omitempty"` - IssueEventsURL *string `json:"issue_events_url,omitempty"` - IssuesURL *string `json:"issues_url,omitempty"` - KeysURL *string `json:"keys_url,omitempty"` - LabelsURL *string `json:"labels_url,omitempty"` - LanguagesURL *string `json:"languages_url,omitempty"` - MergesURL *string `json:"merges_url,omitempty"` - MilestonesURL *string `json:"milestones_url,omitempty"` - NotificationsURL *string `json:"notifications_url,omitempty"` - PullsURL *string `json:"pulls_url,omitempty"` - ReleasesURL *string `json:"releases_url,omitempty"` - StargazersURL *string `json:"stargazers_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - SubscribersURL *string `json:"subscribers_url,omitempty"` - SubscriptionURL *string `json:"subscription_url,omitempty"` - TagsURL *string `json:"tags_url,omitempty"` - TreesURL *string `json:"trees_url,omitempty"` - TeamsURL *string `json:"teams_url,omitempty"` - - // TextMatches is only populated from search results that request text matches - // See: search.go and https://docs.github.com/rest/search/#text-match-metadata - TextMatches []*TextMatch `json:"text_matches,omitempty"` - - // Visibility is only used for Create and Edit endpoints. The visibility field - // overrides the field parameter when both are used. - // Can be one of public, private or internal. - Visibility *string `json:"visibility,omitempty"` - - // RoleName is only returned by the API 'check team permissions for a repository'. - // See: teams.go (IsTeamRepoByID) https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-repository - RoleName *string `json:"role_name,omitempty"` -} - -func (r Repository) String() string { - return Stringify(r) -} - -// BranchListOptions specifies the optional parameters to the -// RepositoriesService.ListBranches method. -type BranchListOptions struct { - // Setting to true returns only protected branches. - // When set to false, only unprotected branches are returned. - // Omitting this parameter returns all branches. - // Default: nil - Protected *bool `url:"protected,omitempty"` - - ListOptions -} - -// RepositoryListOptions specifies the optional parameters to the -// RepositoriesService.List method. -type RepositoryListOptions struct { - // See RepositoryListByAuthenticatedUserOptions.Visibility - Visibility string `url:"visibility,omitempty"` - - // See RepositoryListByAuthenticatedUserOptions.Affiliation - Affiliation string `url:"affiliation,omitempty"` - - // See RepositoryListByUserOptions.Type or RepositoryListByAuthenticatedUserOptions.Type - Type string `url:"type,omitempty"` - - // See RepositoryListByUserOptions.Sort or RepositoryListByAuthenticatedUserOptions.Sort - Sort string `url:"sort,omitempty"` - - // See RepositoryListByUserOptions.Direction or RepositoryListByAuthenticatedUserOptions.Direction - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// SecurityAndAnalysis specifies the optional advanced security features -// that are enabled on a given repository. -type SecurityAndAnalysis struct { - AdvancedSecurity *AdvancedSecurity `json:"advanced_security,omitempty"` - SecretScanning *SecretScanning `json:"secret_scanning,omitempty"` - SecretScanningPushProtection *SecretScanningPushProtection `json:"secret_scanning_push_protection,omitempty"` - DependabotSecurityUpdates *DependabotSecurityUpdates `json:"dependabot_security_updates,omitempty"` -} - -func (s SecurityAndAnalysis) String() string { - return Stringify(s) -} - -// AdvancedSecurity specifies the state of advanced security on a repository. -// -// GitHub API docs: https://docs.github.com/github/getting-started-with-github/learning-about-github/about-github-advanced-security -type AdvancedSecurity struct { - Status *string `json:"status,omitempty"` -} - -func (a AdvancedSecurity) String() string { - return Stringify(a) -} - -// SecretScanning specifies the state of secret scanning on a repository. -// -// GitHub API docs: https://docs.github.com/code-security/secret-security/about-secret-scanning -type SecretScanning struct { - Status *string `json:"status,omitempty"` -} - -func (s SecretScanning) String() string { - return Stringify(s) -} - -// SecretScanningPushProtection specifies the state of secret scanning push protection on a repository. -// -// GitHub API docs: https://docs.github.com/code-security/secret-scanning/about-secret-scanning#about-secret-scanning-for-partner-patterns -type SecretScanningPushProtection struct { - Status *string `json:"status,omitempty"` -} - -func (s SecretScanningPushProtection) String() string { - return Stringify(s) -} - -// DependabotSecurityUpdates specifies the state of Dependabot security updates on a repository. -// -// GitHub API docs: https://docs.github.com/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates -type DependabotSecurityUpdates struct { - Status *string `json:"status,omitempty"` -} - -func (d DependabotSecurityUpdates) String() string { - return Stringify(d) -} - -// List calls either RepositoriesService.ListByUser or RepositoriesService.ListByAuthenticatedUser -// depending on whether user is empty. -// -// Deprecated: Use RepositoriesService.ListByUser or RepositoriesService.ListByAuthenticatedUser instead. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-a-user -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-the-authenticated-user -// -//meta:operation GET /user/repos -//meta:operation GET /users/{username}/repos -func (s *RepositoriesService) List(ctx context.Context, user string, opts *RepositoryListOptions) ([]*Repository, *Response, error) { - if opts == nil { - opts = &RepositoryListOptions{} - } - if user != "" { - return s.ListByUser(ctx, user, &RepositoryListByUserOptions{ - Type: opts.Type, - Sort: opts.Sort, - Direction: opts.Direction, - ListOptions: opts.ListOptions, - }) - } - return s.ListByAuthenticatedUser(ctx, &RepositoryListByAuthenticatedUserOptions{ - Visibility: opts.Visibility, - Affiliation: opts.Affiliation, - Type: opts.Type, - Sort: opts.Sort, - Direction: opts.Direction, - ListOptions: opts.ListOptions, - }) -} - -// RepositoryListByUserOptions specifies the optional parameters to the -// RepositoriesService.ListByUser method. -type RepositoryListByUserOptions struct { - // Limit results to repositories of the specified type. - // Default: owner - // Can be one of: all, owner, member - Type string `url:"type,omitempty"` - - // The property to sort the results by. - // Default: full_name - // Can be one of: created, updated, pushed, full_name - Sort string `url:"sort,omitempty"` - - // The order to sort by. - // Default: asc when using full_name, otherwise desc. - // Can be one of: asc, desc - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListByUser lists public repositories for the specified user. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-a-user -// -//meta:operation GET /users/{username}/repos -func (s *RepositoriesService) ListByUser(ctx context.Context, user string, opts *RepositoryListByUserOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("users/%v/repos", user) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryListByAuthenticatedUserOptions specifies the optional parameters to the -// RepositoriesService.ListByAuthenticatedUser method. -type RepositoryListByAuthenticatedUserOptions struct { - // Limit results to repositories with the specified visibility. - // Default: all - // Can be one of: all, public, private - Visibility string `url:"visibility,omitempty"` - - // List repos of given affiliation[s]. - // Comma-separated list of values. Can include: - // * owner: Repositories that are owned by the authenticated user. - // * collaborator: Repositories that the user has been added to as a - // collaborator. - // * organization_member: Repositories that the user has access to through - // being a member of an organization. This includes every repository on - // every team that the user is on. - // Default: owner,collaborator,organization_member - Affiliation string `url:"affiliation,omitempty"` - - // Limit results to repositories of the specified type. Will cause a 422 error if - // used in the same request as visibility or affiliation. - // Default: all - // Can be one of: all, owner, public, private, member - Type string `url:"type,omitempty"` - - // The property to sort the results by. - // Default: full_name - // Can be one of: created, updated, pushed, full_name - Sort string `url:"sort,omitempty"` - - // Direction in which to sort repositories. Can be one of asc or desc. - // Default: when using full_name: asc; otherwise desc - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListByAuthenticatedUser lists repositories for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repositories-for-the-authenticated-user -// -//meta:operation GET /user/repos -func (s *RepositoriesService) ListByAuthenticatedUser(ctx context.Context, opts *RepositoryListByAuthenticatedUserOptions) ([]*Repository, *Response, error) { - u := "user/repos" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryListByOrgOptions specifies the optional parameters to the -// RepositoriesService.ListByOrg method. -type RepositoryListByOrgOptions struct { - // Type of repositories to list. Possible values are: all, public, private, - // forks, sources, member. Default is "all". - Type string `url:"type,omitempty"` - - // How to sort the repository list. Can be one of created, updated, pushed, - // full_name. Default is "created". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort repositories. Can be one of asc or desc. - // Default when using full_name: asc; otherwise desc. - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListByOrg lists the repositories for an organization. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-organization-repositories -// -//meta:operation GET /orgs/{org}/repos -func (s *RepositoriesService) ListByOrg(ctx context.Context, org string, opts *RepositoryListByOrgOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("orgs/%v/repos", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeTopicsPreview, mediaTypeRepositoryVisibilityPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryListAllOptions specifies the optional parameters to the -// RepositoriesService.ListAll method. -type RepositoryListAllOptions struct { - // ID of the last repository seen - Since int64 `url:"since,omitempty"` -} - -// ListAll lists all GitHub repositories in the order that they were created. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-public-repositories -// -//meta:operation GET /repositories -func (s *RepositoriesService) ListAll(ctx context.Context, opts *RepositoryListAllOptions) ([]*Repository, *Response, error) { - u, err := addOptions("repositories", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// createRepoRequest is a subset of Repository and is used internally -// by Create to pass only the known fields for the endpoint. -// -// See https://github.com/google/go-github/issues/1014 for more -// information. -type createRepoRequest struct { - // Name is required when creating a repo. - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - Homepage *string `json:"homepage,omitempty"` - - Private *bool `json:"private,omitempty"` - Visibility *string `json:"visibility,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasProjects *bool `json:"has_projects,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasDiscussions *bool `json:"has_discussions,omitempty"` - IsTemplate *bool `json:"is_template,omitempty"` - - // Creating an organization repository. Required for non-owners. - TeamID *int64 `json:"team_id,omitempty"` - - AutoInit *bool `json:"auto_init,omitempty"` - GitignoreTemplate *string `json:"gitignore_template,omitempty"` - LicenseTemplate *string `json:"license_template,omitempty"` - AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` - AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` - AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` - AllowUpdateBranch *bool `json:"allow_update_branch,omitempty"` - AllowAutoMerge *bool `json:"allow_auto_merge,omitempty"` - AllowForking *bool `json:"allow_forking,omitempty"` - DeleteBranchOnMerge *bool `json:"delete_branch_on_merge,omitempty"` - UseSquashPRTitleAsDefault *bool `json:"use_squash_pr_title_as_default,omitempty"` - SquashMergeCommitTitle *string `json:"squash_merge_commit_title,omitempty"` - SquashMergeCommitMessage *string `json:"squash_merge_commit_message,omitempty"` - MergeCommitTitle *string `json:"merge_commit_title,omitempty"` - MergeCommitMessage *string `json:"merge_commit_message,omitempty"` -} - -// Create a new repository. If an organization is specified, the new -// repository will be created under that org. If the empty string is -// specified, it will be created for the authenticated user. -// -// Note that only a subset of the repo fields are used and repo must -// not be nil. -// -// Also note that this method will return the response without actually -// waiting for GitHub to finish creating the repository and letting the -// changes propagate throughout its servers. You may set up a loop with -// exponential back-off to verify repository's creation. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-a-repository-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-an-organization-repository -// -//meta:operation POST /orgs/{org}/repos -//meta:operation POST /user/repos -func (s *RepositoriesService) Create(ctx context.Context, org string, repo *Repository) (*Repository, *Response, error) { - var u string - if org != "" { - u = fmt.Sprintf("orgs/%v/repos", org) - } else { - u = "user/repos" - } - - repoReq := &createRepoRequest{ - Name: repo.Name, - Description: repo.Description, - Homepage: repo.Homepage, - Private: repo.Private, - Visibility: repo.Visibility, - HasIssues: repo.HasIssues, - HasProjects: repo.HasProjects, - HasWiki: repo.HasWiki, - HasDiscussions: repo.HasDiscussions, - IsTemplate: repo.IsTemplate, - TeamID: repo.TeamID, - AutoInit: repo.AutoInit, - GitignoreTemplate: repo.GitignoreTemplate, - LicenseTemplate: repo.LicenseTemplate, - AllowSquashMerge: repo.AllowSquashMerge, - AllowMergeCommit: repo.AllowMergeCommit, - AllowRebaseMerge: repo.AllowRebaseMerge, - AllowUpdateBranch: repo.AllowUpdateBranch, - AllowAutoMerge: repo.AllowAutoMerge, - AllowForking: repo.AllowForking, - DeleteBranchOnMerge: repo.DeleteBranchOnMerge, - UseSquashPRTitleAsDefault: repo.UseSquashPRTitleAsDefault, - SquashMergeCommitTitle: repo.SquashMergeCommitTitle, - SquashMergeCommitMessage: repo.SquashMergeCommitMessage, - MergeCommitTitle: repo.MergeCommitTitle, - MergeCommitMessage: repo.MergeCommitMessage, - } - - req, err := s.client.NewRequest("POST", u, repoReq) - if err != nil { - return nil, nil, err - } - - acceptHeaders := []string{mediaTypeRepositoryTemplatePreview, mediaTypeRepositoryVisibilityPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// TemplateRepoRequest represents a request to create a repository from a template. -type TemplateRepoRequest struct { - // Name is required when creating a repo. - Name *string `json:"name,omitempty"` - Owner *string `json:"owner,omitempty"` - Description *string `json:"description,omitempty"` - - IncludeAllBranches *bool `json:"include_all_branches,omitempty"` - Private *bool `json:"private,omitempty"` -} - -// CreateFromTemplate generates a repository from a template. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-a-repository-using-a-template -// -//meta:operation POST /repos/{template_owner}/{template_repo}/generate -func (s *RepositoriesService) CreateFromTemplate(ctx context.Context, templateOwner, templateRepo string, templateRepoReq *TemplateRepoRequest) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/generate", templateOwner, templateRepo) - - req, err := s.client.NewRequest("POST", u, templateRepoReq) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeRepositoryTemplatePreview) - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// Get fetches a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#get-a-repository -// -//meta:operation GET /repos/{owner}/{repo} -func (s *RepositoriesService) Get(ctx context.Context, owner, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when the license support fully launches - // https://docs.github.com/rest/licenses/#get-a-repositorys-license - acceptHeaders := []string{ - mediaTypeCodesOfConductPreview, - mediaTypeTopicsPreview, - mediaTypeRepositoryTemplatePreview, - mediaTypeRepositoryVisibilityPreview, - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// GetCodeOfConduct gets the contents of a repository's code of conduct. -// Note that https://docs.github.com/rest/codes-of-conduct#about-the-codes-of-conduct-api -// says to use the GET /repos/{owner}/{repo} endpoint. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#get-a-repository -// -//meta:operation GET /repos/{owner}/{repo} -func (s *RepositoriesService) GetCodeOfConduct(ctx context.Context, owner, repo string) (*CodeOfConduct, *Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeCodesOfConductPreview) - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r.GetCodeOfConduct(), resp, nil -} - -// GetByID fetches a repository. -// -// Note: GetByID uses the undocumented GitHub API endpoint "GET /repositories/{repository_id}". -// -//meta:operation GET /repositories/{repository_id} -func (s *RepositoriesService) GetByID(ctx context.Context, id int64) (*Repository, *Response, error) { - u := fmt.Sprintf("repositories/%d", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// Edit updates a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#update-a-repository -// -//meta:operation PATCH /repos/{owner}/{repo} -func (s *RepositoriesService) Edit(ctx context.Context, owner, repo string, repository *Repository) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("PATCH", u, repository) - if err != nil { - return nil, nil, err - } - - acceptHeaders := []string{mediaTypeRepositoryTemplatePreview, mediaTypeRepositoryVisibilityPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// Delete a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#delete-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo} -func (s *RepositoriesService) Delete(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Contributor represents a repository contributor -type Contributor struct { - Login *string `json:"login,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - GravatarID *string `json:"gravatar_id,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - FollowersURL *string `json:"followers_url,omitempty"` - FollowingURL *string `json:"following_url,omitempty"` - GistsURL *string `json:"gists_url,omitempty"` - StarredURL *string `json:"starred_url,omitempty"` - SubscriptionsURL *string `json:"subscriptions_url,omitempty"` - OrganizationsURL *string `json:"organizations_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - ReceivedEventsURL *string `json:"received_events_url,omitempty"` - Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin,omitempty"` - Contributions *int `json:"contributions,omitempty"` - Name *string `json:"name,omitempty"` - Email *string `json:"email,omitempty"` -} - -// ListContributorsOptions specifies the optional parameters to the -// RepositoriesService.ListContributors method. -type ListContributorsOptions struct { - // Include anonymous contributors in results or not - Anon string `url:"anon,omitempty"` - - ListOptions -} - -// GetVulnerabilityAlerts checks if vulnerability alerts are enabled for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#check-if-vulnerability-alerts-are-enabled-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/vulnerability-alerts -func (s *RepositoriesService) GetVulnerabilityAlerts(ctx context.Context, owner, repository string) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) - - resp, err := s.client.Do(ctx, req, nil) - vulnerabilityAlertsEnabled, err := parseBoolResponse(err) - return vulnerabilityAlertsEnabled, resp, err -} - -// EnableVulnerabilityAlerts enables vulnerability alerts and the dependency graph for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#enable-vulnerability-alerts -// -//meta:operation PUT /repos/{owner}/{repo}/vulnerability-alerts -func (s *RepositoriesService) EnableVulnerabilityAlerts(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) - - return s.client.Do(ctx, req, nil) -} - -// DisableVulnerabilityAlerts disables vulnerability alerts and the dependency graph for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#disable-vulnerability-alerts -// -//meta:operation DELETE /repos/{owner}/{repo}/vulnerability-alerts -func (s *RepositoriesService) DisableVulnerabilityAlerts(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) - - return s.client.Do(ctx, req, nil) -} - -// GetAutomatedSecurityFixes checks if the automated security fixes for a repository are enabled. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#check-if-automated-security-fixes-are-enabled-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/automated-security-fixes -func (s *RepositoriesService) GetAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*AutomatedSecurityFixes, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - p := new(AutomatedSecurityFixes) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - return p, resp, nil -} - -// EnableAutomatedSecurityFixes enables the automated security fixes for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#enable-automated-security-fixes -// -//meta:operation PUT /repos/{owner}/{repo}/automated-security-fixes -func (s *RepositoriesService) EnableAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DisableAutomatedSecurityFixes disables vulnerability alerts and the dependency graph for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#disable-automated-security-fixes -// -//meta:operation DELETE /repos/{owner}/{repo}/automated-security-fixes -func (s *RepositoriesService) DisableAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListContributors lists contributors for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-contributors -// -//meta:operation GET /repos/{owner}/{repo}/contributors -func (s *RepositoriesService) ListContributors(ctx context.Context, owner string, repository string, opts *ListContributorsOptions) ([]*Contributor, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/contributors", owner, repository) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var contributor []*Contributor - resp, err := s.client.Do(ctx, req, &contributor) - if err != nil { - return nil, resp, err - } - - return contributor, resp, nil -} - -// ListLanguages lists languages for the specified repository. The returned map -// specifies the languages and the number of bytes of code written in that -// language. For example: -// -// { -// "C": 78769, -// "Python": 7769 -// } -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-languages -// -//meta:operation GET /repos/{owner}/{repo}/languages -func (s *RepositoriesService) ListLanguages(ctx context.Context, owner string, repo string) (map[string]int, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/languages", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - languages := make(map[string]int) - resp, err := s.client.Do(ctx, req, &languages) - if err != nil { - return nil, resp, err - } - - return languages, resp, nil -} - -// ListTeams lists the teams for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-teams -// -//meta:operation GET /repos/{owner}/{repo}/teams -func (s *RepositoriesService) ListTeams(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/teams", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// RepositoryTag represents a repository tag. -type RepositoryTag struct { - Name *string `json:"name,omitempty"` - Commit *Commit `json:"commit,omitempty"` - ZipballURL *string `json:"zipball_url,omitempty"` - TarballURL *string `json:"tarball_url,omitempty"` -} - -// ListTags lists tags for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-repository-tags -// -//meta:operation GET /repos/{owner}/{repo}/tags -func (s *RepositoriesService) ListTags(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*RepositoryTag, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var tags []*RepositoryTag - resp, err := s.client.Do(ctx, req, &tags) - if err != nil { - return nil, resp, err - } - - return tags, resp, nil -} - -// Branch represents a repository branch -type Branch struct { - Name *string `json:"name,omitempty"` - Commit *RepositoryCommit `json:"commit,omitempty"` - Protected *bool `json:"protected,omitempty"` -} - -// Protection represents a repository branch's protection. -type Protection struct { - RequiredStatusChecks *RequiredStatusChecks `json:"required_status_checks"` - RequiredPullRequestReviews *PullRequestReviewsEnforcement `json:"required_pull_request_reviews"` - EnforceAdmins *AdminEnforcement `json:"enforce_admins"` - Restrictions *BranchRestrictions `json:"restrictions"` - RequireLinearHistory *RequireLinearHistory `json:"required_linear_history"` - AllowForcePushes *AllowForcePushes `json:"allow_force_pushes"` - AllowDeletions *AllowDeletions `json:"allow_deletions"` - RequiredConversationResolution *RequiredConversationResolution `json:"required_conversation_resolution"` - BlockCreations *BlockCreations `json:"block_creations,omitempty"` - LockBranch *LockBranch `json:"lock_branch,omitempty"` - AllowForkSyncing *AllowForkSyncing `json:"allow_fork_syncing,omitempty"` - RequiredSignatures *SignaturesProtectedBranch `json:"required_signatures,omitempty"` - URL *string `json:"url,omitempty"` -} - -// BlockCreations represents whether users can push changes that create branches. If this is true, this -// setting blocks pushes that create new branches, unless the push is initiated by a user, team, or app -// which has the ability to push. -type BlockCreations struct { - Enabled *bool `json:"enabled,omitempty"` -} - -// LockBranch represents if the branch is marked as read-only. If this is true, users will not be able to push to the branch. -type LockBranch struct { - Enabled *bool `json:"enabled,omitempty"` -} - -// AllowForkSyncing represents whether users can pull changes from upstream when the branch is locked. -type AllowForkSyncing struct { - Enabled *bool `json:"enabled,omitempty"` -} - -// BranchProtectionRule represents the rule applied to a repositories branch. -type BranchProtectionRule struct { - ID *int64 `json:"id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Name *string `json:"name,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PullRequestReviewsEnforcementLevel *string `json:"pull_request_reviews_enforcement_level,omitempty"` - RequiredApprovingReviewCount *int `json:"required_approving_review_count,omitempty"` - DismissStaleReviewsOnPush *bool `json:"dismiss_stale_reviews_on_push,omitempty"` - AuthorizedDismissalActorsOnly *bool `json:"authorized_dismissal_actors_only,omitempty"` - IgnoreApprovalsFromContributors *bool `json:"ignore_approvals_from_contributors,omitempty"` - RequireCodeOwnerReview *bool `json:"require_code_owner_review,omitempty"` - RequiredStatusChecks []string `json:"required_status_checks,omitempty"` - RequiredStatusChecksEnforcementLevel *string `json:"required_status_checks_enforcement_level,omitempty"` - StrictRequiredStatusChecksPolicy *bool `json:"strict_required_status_checks_policy,omitempty"` - SignatureRequirementEnforcementLevel *string `json:"signature_requirement_enforcement_level,omitempty"` - LinearHistoryRequirementEnforcementLevel *string `json:"linear_history_requirement_enforcement_level,omitempty"` - AdminEnforced *bool `json:"admin_enforced,omitempty"` - AllowForcePushesEnforcementLevel *string `json:"allow_force_pushes_enforcement_level,omitempty"` - AllowDeletionsEnforcementLevel *string `json:"allow_deletions_enforcement_level,omitempty"` - MergeQueueEnforcementLevel *string `json:"merge_queue_enforcement_level,omitempty"` - RequiredDeploymentsEnforcementLevel *string `json:"required_deployments_enforcement_level,omitempty"` - RequiredConversationResolutionLevel *string `json:"required_conversation_resolution_level,omitempty"` - AuthorizedActorsOnly *bool `json:"authorized_actors_only,omitempty"` - AuthorizedActorNames []string `json:"authorized_actor_names,omitempty"` -} - -// ProtectionChanges represents the changes to the rule if the BranchProtection was edited. -type ProtectionChanges struct { - AdminEnforced *AdminEnforcedChanges `json:"admin_enforced,omitempty"` - AllowDeletionsEnforcementLevel *AllowDeletionsEnforcementLevelChanges `json:"allow_deletions_enforcement_level,omitempty"` - AuthorizedActorNames *AuthorizedActorNames `json:"authorized_actor_names,omitempty"` - AuthorizedActorsOnly *AuthorizedActorsOnly `json:"authorized_actors_only,omitempty"` - AuthorizedDismissalActorsOnly *AuthorizedDismissalActorsOnlyChanges `json:"authorized_dismissal_actors_only,omitempty"` - CreateProtected *CreateProtectedChanges `json:"create_protected,omitempty"` - DismissStaleReviewsOnPush *DismissStaleReviewsOnPushChanges `json:"dismiss_stale_reviews_on_push,omitempty"` - LinearHistoryRequirementEnforcementLevel *LinearHistoryRequirementEnforcementLevelChanges `json:"linear_history_requirement_enforcement_level,omitempty"` - PullRequestReviewsEnforcementLevel *PullRequestReviewsEnforcementLevelChanges `json:"pull_request_reviews_enforcement_level,omitempty"` - RequireCodeOwnerReview *RequireCodeOwnerReviewChanges `json:"require_code_owner_review,omitempty"` - RequiredConversationResolutionLevel *RequiredConversationResolutionLevelChanges `json:"required_conversation_resolution_level,omitempty"` - RequiredDeploymentsEnforcementLevel *RequiredDeploymentsEnforcementLevelChanges `json:"required_deployments_enforcement_level,omitempty"` - RequiredStatusChecks *RequiredStatusChecksChanges `json:"required_status_checks,omitempty"` - RequiredStatusChecksEnforcementLevel *RequiredStatusChecksEnforcementLevelChanges `json:"required_status_checks_enforcement_level,omitempty"` - SignatureRequirementEnforcementLevel *SignatureRequirementEnforcementLevelChanges `json:"signature_requirement_enforcement_level,omitempty"` -} - -// AdminEnforcedChanges represents the changes made to the AdminEnforced policy. -type AdminEnforcedChanges struct { - From *bool `json:"from,omitempty"` -} - -// AllowDeletionsEnforcementLevelChanges represents the changes made to the AllowDeletionsEnforcementLevel policy. -type AllowDeletionsEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// AuthorizedActorNames represents who are authorized to edit the branch protection rules. -type AuthorizedActorNames struct { - From []string `json:"from,omitempty"` -} - -// AuthorizedActorsOnly represents if the branch rule can be edited by authorized actors only. -type AuthorizedActorsOnly struct { - From *bool `json:"from,omitempty"` -} - -// AuthorizedDismissalActorsOnlyChanges represents the changes made to the AuthorizedDismissalActorsOnly policy. -type AuthorizedDismissalActorsOnlyChanges struct { - From *bool `json:"from,omitempty"` -} - -// CreateProtectedChanges represents the changes made to the CreateProtected policy. -type CreateProtectedChanges struct { - From *bool `json:"from,omitempty"` -} - -// DismissStaleReviewsOnPushChanges represents the changes made to the DismissStaleReviewsOnPushChanges policy. -type DismissStaleReviewsOnPushChanges struct { - From *bool `json:"from,omitempty"` -} - -// LinearHistoryRequirementEnforcementLevelChanges represents the changes made to the LinearHistoryRequirementEnforcementLevel policy. -type LinearHistoryRequirementEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// PullRequestReviewsEnforcementLevelChanges represents the changes made to the PullRequestReviewsEnforcementLevel policy. -type PullRequestReviewsEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// RequireCodeOwnerReviewChanges represents the changes made to the RequireCodeOwnerReview policy. -type RequireCodeOwnerReviewChanges struct { - From *bool `json:"from,omitempty"` -} - -// RequiredConversationResolutionLevelChanges represents the changes made to the RequiredConversationResolutionLevel policy. -type RequiredConversationResolutionLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// RequiredDeploymentsEnforcementLevelChanges represents the changes made to the RequiredDeploymentsEnforcementLevel policy. -type RequiredDeploymentsEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// RequiredStatusChecksChanges represents the changes made to the RequiredStatusChecks policy. -type RequiredStatusChecksChanges struct { - From []string `json:"from,omitempty"` -} - -// RequiredStatusChecksEnforcementLevelChanges represents the changes made to the RequiredStatusChecksEnforcementLevel policy. -type RequiredStatusChecksEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// SignatureRequirementEnforcementLevelChanges represents the changes made to the SignatureRequirementEnforcementLevel policy. -type SignatureRequirementEnforcementLevelChanges struct { - From *string `json:"from,omitempty"` -} - -// ProtectionRequest represents a request to create/edit a branch's protection. -type ProtectionRequest struct { - RequiredStatusChecks *RequiredStatusChecks `json:"required_status_checks"` - RequiredPullRequestReviews *PullRequestReviewsEnforcementRequest `json:"required_pull_request_reviews"` - EnforceAdmins bool `json:"enforce_admins"` - Restrictions *BranchRestrictionsRequest `json:"restrictions"` - // Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. - RequireLinearHistory *bool `json:"required_linear_history,omitempty"` - // Permits force pushes to the protected branch by anyone with write access to the repository. - AllowForcePushes *bool `json:"allow_force_pushes,omitempty"` - // Allows deletion of the protected branch by anyone with write access to the repository. - AllowDeletions *bool `json:"allow_deletions,omitempty"` - // RequiredConversationResolution, if set to true, requires all comments - // on the pull request to be resolved before it can be merged to a protected branch. - RequiredConversationResolution *bool `json:"required_conversation_resolution,omitempty"` - // BlockCreations, if set to true, will cause the restrictions setting to also block pushes - // which create new branches, unless initiated by a user, team, app with the ability to push. - BlockCreations *bool `json:"block_creations,omitempty"` - // LockBranch, if set to true, will prevent users from pushing to the branch. - LockBranch *bool `json:"lock_branch,omitempty"` - // AllowForkSyncing, if set to true, will allow users to pull changes from upstream - // when the branch is locked. - AllowForkSyncing *bool `json:"allow_fork_syncing,omitempty"` -} - -// RequiredStatusChecks represents the protection status of a individual branch. -type RequiredStatusChecks struct { - // Require branches to be up to date before merging. (Required.) - Strict bool `json:"strict"` - // The list of status checks to require in order to merge into this - // branch. (Deprecated. Note: only one of Contexts/Checks can be populated, - // but at least one must be populated). - Contexts []string `json:"contexts,omitempty"` - // The list of status checks to require in order to merge into this - // branch. - Checks []*RequiredStatusCheck `json:"checks,omitempty"` - ContextsURL *string `json:"contexts_url,omitempty"` - URL *string `json:"url,omitempty"` -} - -// RequiredStatusChecksRequest represents a request to edit a protected branch's status checks. -type RequiredStatusChecksRequest struct { - Strict *bool `json:"strict,omitempty"` - // Note: if both Contexts and Checks are populated, - // the GitHub API will only use Checks. - Contexts []string `json:"contexts,omitempty"` - Checks []*RequiredStatusCheck `json:"checks,omitempty"` -} - -// RequiredStatusCheck represents a status check of a protected branch. -type RequiredStatusCheck struct { - // The name of the required check. - Context string `json:"context"` - // The ID of the GitHub App that must provide this check. - // Omit this field to automatically select the GitHub App - // that has recently provided this check, - // or any app if it was not set by a GitHub App. - // Pass -1 to explicitly allow any app to set the status. - AppID *int64 `json:"app_id,omitempty"` -} - -// PullRequestReviewsEnforcement represents the pull request reviews enforcement of a protected branch. -type PullRequestReviewsEnforcement struct { - // Allow specific users, teams, or apps to bypass pull request requirements. - BypassPullRequestAllowances *BypassPullRequestAllowances `json:"bypass_pull_request_allowances,omitempty"` - // Specifies which users, teams and apps can dismiss pull request reviews. - DismissalRestrictions *DismissalRestrictions `json:"dismissal_restrictions,omitempty"` - // Specifies if approved reviews are dismissed automatically, when a new commit is pushed. - DismissStaleReviews bool `json:"dismiss_stale_reviews"` - // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. - RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` - // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. - // Valid values are 1-6. - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. - RequireLastPushApproval bool `json:"require_last_push_approval"` -} - -// PullRequestReviewsEnforcementRequest represents request to set the pull request review -// enforcement of a protected branch. It is separate from PullRequestReviewsEnforcement above -// because the request structure is different from the response structure. -type PullRequestReviewsEnforcementRequest struct { - // Allow specific users, teams, or apps to bypass pull request requirements. - BypassPullRequestAllowancesRequest *BypassPullRequestAllowancesRequest `json:"bypass_pull_request_allowances,omitempty"` - // Specifies which users, teams and apps should be allowed to dismiss pull request reviews. - // User, team and app dismissal restrictions are only available for - // organization-owned repositories. Must be nil for personal repositories. - DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` - // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. (Required) - DismissStaleReviews bool `json:"dismiss_stale_reviews"` - // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. - RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` - // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. - // Valid values are 1-6. - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. - RequireLastPushApproval *bool `json:"require_last_push_approval,omitempty"` -} - -// PullRequestReviewsEnforcementUpdate represents request to patch the pull request review -// enforcement of a protected branch. It is separate from PullRequestReviewsEnforcementRequest above -// because the patch request does not require all fields to be initialized. -type PullRequestReviewsEnforcementUpdate struct { - // Allow specific users, teams, or apps to bypass pull request requirements. - BypassPullRequestAllowancesRequest *BypassPullRequestAllowancesRequest `json:"bypass_pull_request_allowances,omitempty"` - // Specifies which users, teams and apps can dismiss pull request reviews. Can be omitted. - DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` - // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. Can be omitted. - DismissStaleReviews *bool `json:"dismiss_stale_reviews,omitempty"` - // RequireCodeOwnerReviews specifies if merging pull requests is blocked until code owners have reviewed. - RequireCodeOwnerReviews *bool `json:"require_code_owner_reviews,omitempty"` - // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. - // Valid values are 1 - 6 or 0 to not require reviewers. - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. - RequireLastPushApproval *bool `json:"require_last_push_approval,omitempty"` -} - -// RequireLinearHistory represents the configuration to enforce branches with no merge commit. -type RequireLinearHistory struct { - Enabled bool `json:"enabled"` -} - -// AllowDeletions represents the configuration to accept deletion of protected branches. -type AllowDeletions struct { - Enabled bool `json:"enabled"` -} - -// AllowForcePushes represents the configuration to accept forced pushes on protected branches. -type AllowForcePushes struct { - Enabled bool `json:"enabled"` -} - -// RequiredConversationResolution requires all comments on the pull request to be resolved before it can be -// merged to a protected branch when enabled. -type RequiredConversationResolution struct { - Enabled bool `json:"enabled"` -} - -// AdminEnforcement represents the configuration to enforce required status checks for repository administrators. -type AdminEnforcement struct { - URL *string `json:"url,omitempty"` - Enabled bool `json:"enabled"` -} - -// BranchRestrictions represents the restriction that only certain users or -// teams may push to a branch. -type BranchRestrictions struct { - // The list of user logins with push access. - Users []*User `json:"users"` - // The list of team slugs with push access. - Teams []*Team `json:"teams"` - // The list of app slugs with push access. - Apps []*App `json:"apps"` -} - -// BranchRestrictionsRequest represents the request to create/edit the -// restriction that only certain users or teams may push to a branch. It is -// separate from BranchRestrictions above because the request structure is -// different from the response structure. -type BranchRestrictionsRequest struct { - // The list of user logins with push access. (Required; use []string{} instead of nil for empty list.) - Users []string `json:"users"` - // The list of team slugs with push access. (Required; use []string{} instead of nil for empty list.) - Teams []string `json:"teams"` - // The list of app slugs with push access. - Apps []string `json:"apps"` -} - -// BypassPullRequestAllowances represents the people, teams, or apps who are allowed to bypass required pull requests. -type BypassPullRequestAllowances struct { - // The list of users allowed to bypass pull request requirements. - Users []*User `json:"users"` - // The list of teams allowed to bypass pull request requirements. - Teams []*Team `json:"teams"` - // The list of apps allowed to bypass pull request requirements. - Apps []*App `json:"apps"` -} - -// BypassPullRequestAllowancesRequest represents the people, teams, or apps who are -// allowed to bypass required pull requests. -// It is separate from BypassPullRequestAllowances above because the request structure is -// different from the response structure. -type BypassPullRequestAllowancesRequest struct { - // The list of user logins allowed to bypass pull request requirements. - Users []string `json:"users"` - // The list of team slugs allowed to bypass pull request requirements. - Teams []string `json:"teams"` - // The list of app slugs allowed to bypass pull request requirements. - Apps []string `json:"apps"` -} - -// DismissalRestrictions specifies which users and teams can dismiss pull request reviews. -type DismissalRestrictions struct { - // The list of users who can dimiss pull request reviews. - Users []*User `json:"users"` - // The list of teams which can dismiss pull request reviews. - Teams []*Team `json:"teams"` - // The list of apps which can dismiss pull request reviews. - Apps []*App `json:"apps"` -} - -// DismissalRestrictionsRequest represents the request to create/edit the -// restriction to allows only specific users, teams or apps to dimiss pull request reviews. It is -// separate from DismissalRestrictions above because the request structure is -// different from the response structure. -// Note: Both Users and Teams must be nil, or both must be non-nil. -type DismissalRestrictionsRequest struct { - // The list of user logins who can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) - Users *[]string `json:"users,omitempty"` - // The list of team slugs which can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) - Teams *[]string `json:"teams,omitempty"` - // The list of app slugs which can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) - Apps *[]string `json:"apps,omitempty"` -} - -// SignaturesProtectedBranch represents the protection status of an individual branch. -type SignaturesProtectedBranch struct { - URL *string `json:"url,omitempty"` - // Commits pushed to matching branches must have verified signatures. - Enabled *bool `json:"enabled,omitempty"` -} - -// AutomatedSecurityFixes represents their status. -type AutomatedSecurityFixes struct { - Enabled *bool `json:"enabled"` - Paused *bool `json:"paused"` -} - -// ListBranches lists branches for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#list-branches -// -//meta:operation GET /repos/{owner}/{repo}/branches -func (s *RepositoriesService) ListBranches(ctx context.Context, owner string, repo string, opts *BranchListOptions) ([]*Branch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var branches []*Branch - resp, err := s.client.Do(ctx, req, &branches) - if err != nil { - return nil, resp, err - } - - return branches, resp, nil -} - -// GetBranch gets the specified branch for a repository. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#get-a-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch} -func (s *RepositoriesService) GetBranch(ctx context.Context, owner, repo, branch string, maxRedirects int) (*Branch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v", owner, repo, url.PathEscape(branch)) - - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - b := new(Branch) - err = json.NewDecoder(resp.Body).Decode(b) - return b, newResponse(resp), err -} - -// renameBranchRequest represents a request to rename a branch. -type renameBranchRequest struct { - NewName string `json:"new_name"` -} - -// RenameBranch renames a branch in a repository. -// -// To rename a non-default branch: Users must have push access. GitHub Apps must have the `contents:write` repository permission. -// To rename the default branch: Users must have admin or owner permissions. GitHub Apps must have the `administration:write` repository permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#rename-a-branch -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/rename -func (s *RepositoriesService) RenameBranch(ctx context.Context, owner, repo, branch, newName string) (*Branch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/rename", owner, repo, url.PathEscape(branch)) - r := &renameBranchRequest{NewName: newName} - req, err := s.client.NewRequest("POST", u, r) - if err != nil { - return nil, nil, err - } - - b := new(Branch) - resp, err := s.client.Do(ctx, req, b) - if err != nil { - return nil, resp, err - } - - return b, resp, nil -} - -// GetBranchProtection gets the protection of a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-branch-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection -func (s *RepositoriesService) GetBranchProtection(ctx context.Context, owner, repo, branch string) (*Protection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - p := new(Protection) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - if isBranchNotProtected(err) { - err = ErrBranchNotProtected - } - return nil, resp, err - } - - return p, resp, nil -} - -// GetRequiredStatusChecks gets the required status checks for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-status-checks-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks -func (s *RepositoriesService) GetRequiredStatusChecks(ctx context.Context, owner, repo, branch string) (*RequiredStatusChecks, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - p := new(RequiredStatusChecks) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - if isBranchNotProtected(err) { - err = ErrBranchNotProtected - } - return nil, resp, err - } - - return p, resp, nil -} - -// ListRequiredStatusChecksContexts lists the required status checks contexts for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-all-status-check-contexts -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts -func (s *RepositoriesService) ListRequiredStatusChecksContexts(ctx context.Context, owner, repo, branch string) (contexts []string, resp *Response, err error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks/contexts", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - resp, err = s.client.Do(ctx, req, &contexts) - if err != nil { - if isBranchNotProtected(err) { - err = ErrBranchNotProtected - } - return nil, resp, err - } - - return contexts, resp, nil -} - -// UpdateBranchProtection updates the protection of a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-branch-protection -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection -func (s *RepositoriesService) UpdateBranchProtection(ctx context.Context, owner, repo, branch string, preq *ProtectionRequest) (*Protection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, preq) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - p := new(Protection) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// RemoveBranchProtection removes the protection of a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-branch-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection -func (s *RepositoriesService) RemoveBranchProtection(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetSignaturesProtectedBranch gets required signatures of protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-commit-signature-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures -func (s *RepositoriesService) GetSignaturesProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeSignaturePreview) - - p := new(SignaturesProtectedBranch) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} - -// RequireSignaturesOnProtectedBranch makes signed commits required on a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#create-commit-signature-protection -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures -func (s *RepositoriesService) RequireSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeSignaturePreview) - - r := new(SignaturesProtectedBranch) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// OptionalSignaturesOnProtectedBranch removes required signed commits on a given branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-commit-signature-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures -func (s *RepositoriesService) OptionalSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeSignaturePreview) - - return s.client.Do(ctx, req, nil) -} - -// UpdateRequiredStatusChecks updates the required status checks for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-status-check-protection -// -//meta:operation PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks -func (s *RepositoriesService) UpdateRequiredStatusChecks(ctx context.Context, owner, repo, branch string, sreq *RequiredStatusChecksRequest) (*RequiredStatusChecks, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PATCH", u, sreq) - if err != nil { - return nil, nil, err - } - - sc := new(RequiredStatusChecks) - resp, err := s.client.Do(ctx, req, sc) - if err != nil { - return nil, resp, err - } - - return sc, resp, nil -} - -// RemoveRequiredStatusChecks removes the required status checks for a given protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-status-check-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks -func (s *RepositoriesService) RemoveRequiredStatusChecks(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// License gets the contents of a repository's license if one is detected. -// -// GitHub API docs: https://docs.github.com/rest/licenses/licenses#get-the-license-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/license -func (s *RepositoriesService) License(ctx context.Context, owner, repo string) (*RepositoryLicense, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/license", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - r := &RepositoryLicense{} - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// GetPullRequestReviewEnforcement gets pull request review enforcement of a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-pull-request-review-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) GetPullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string) (*PullRequestReviewsEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - r := new(PullRequestReviewsEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// UpdatePullRequestReviewEnforcement patches pull request review enforcement of a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-pull-request-review-protection -// -//meta:operation PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) UpdatePullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string, patch *PullRequestReviewsEnforcementUpdate) (*PullRequestReviewsEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PATCH", u, patch) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - r := new(PullRequestReviewsEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DisableDismissalRestrictions disables dismissal restrictions of a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#update-pull-request-review-protection -// -//meta:operation PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) DisableDismissalRestrictions(ctx context.Context, owner, repo, branch string) (*PullRequestReviewsEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - - data := new(struct { - DismissalRestrictionsRequest `json:"dismissal_restrictions"` - }) - - req, err := s.client.NewRequest("PATCH", u, data) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) - - r := new(PullRequestReviewsEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// RemovePullRequestReviewEnforcement removes pull request enforcement of a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-pull-request-review-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews -func (s *RepositoriesService) RemovePullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetAdminEnforcement gets admin enforcement information of a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-admin-branch-protection -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins -func (s *RepositoriesService) GetAdminEnforcement(ctx context.Context, owner, repo, branch string) (*AdminEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(AdminEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// AddAdminEnforcement adds admin enforcement to a protected branch. -// It requires admin access and branch protection to be enabled. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-admin-branch-protection -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins -func (s *RepositoriesService) AddAdminEnforcement(ctx context.Context, owner, repo, branch string) (*AdminEnforcement, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(AdminEnforcement) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// RemoveAdminEnforcement removes admin enforcement from a protected branch. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#delete-admin-branch-protection -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins -func (s *RepositoriesService) RemoveAdminEnforcement(ctx context.Context, owner, repo, branch string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// repositoryTopics represents a collection of repository topics. -type repositoryTopics struct { - Names []string `json:"names"` -} - -// ListAllTopics lists topics for a repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#get-all-repository-topics -// -//meta:operation GET /repos/{owner}/{repo}/topics -func (s *RepositoriesService) ListAllTopics(ctx context.Context, owner, repo string) ([]string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeTopicsPreview) - - topics := new(repositoryTopics) - resp, err := s.client.Do(ctx, req, topics) - if err != nil { - return nil, resp, err - } - - return topics.Names, resp, nil -} - -// ReplaceAllTopics replaces all repository topics. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#replace-all-repository-topics -// -//meta:operation PUT /repos/{owner}/{repo}/topics -func (s *RepositoriesService) ReplaceAllTopics(ctx context.Context, owner, repo string, topics []string) ([]string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) - t := &repositoryTopics{ - Names: topics, - } - if t.Names == nil { - t.Names = []string{} - } - req, err := s.client.NewRequest("PUT", u, t) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeTopicsPreview) - - t = new(repositoryTopics) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t.Names, resp, nil -} - -// ListApps lists the GitHub apps that have push access to a given protected branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// Deprecated: Please use ListAppRestrictions instead. -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-apps-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) ListApps(ctx context.Context, owner, repo, branch string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var apps []*App - resp, err := s.client.Do(ctx, req, &apps) - if err != nil { - return nil, resp, err - } - - return apps, resp, nil -} - -// ListAppRestrictions lists the GitHub apps that have push access to a given protected branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: This is a wrapper around ListApps so a naming convention with ListUserRestrictions and ListTeamRestrictions is preserved. -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-apps-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) ListAppRestrictions(ctx context.Context, owner, repo, branch string) ([]*App, *Response, error) { - return s.ListApps(ctx, owner, repo, branch) -} - -// ReplaceAppRestrictions replaces the apps that have push access to a given protected branch. -// It removes all apps that previously had push access and grants push access to the new list of apps. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-app-access-restrictions -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) ReplaceAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, apps) - if err != nil { - return nil, nil, err - } - - var newApps []*App - resp, err := s.client.Do(ctx, req, &newApps) - if err != nil { - return nil, resp, err - } - - return newApps, resp, nil -} - -// AddAppRestrictions grants the specified apps push access to a given protected branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#add-app-access-restrictions -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) AddAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, apps) - if err != nil { - return nil, nil, err - } - - var newApps []*App - resp, err := s.client.Do(ctx, req, &newApps) - if err != nil { - return nil, resp, err - } - - return newApps, resp, nil -} - -// RemoveAppRestrictions removes the restrictions of an app from pushing to this branch. -// It requires the GitHub apps to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-app-access-restrictions -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps -func (s *RepositoriesService) RemoveAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, apps) - if err != nil { - return nil, nil, err - } - - var newApps []*App - resp, err := s.client.Do(ctx, req, &newApps) - if err != nil { - return nil, resp, err - } - - return newApps, resp, nil -} - -// ListTeamRestrictions lists the GitHub teams that have push access to a given protected branch. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-teams-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) ListTeamRestrictions(ctx context.Context, owner, repo, branch string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ReplaceTeamRestrictions replaces the team that have push access to a given protected branch. -// This removes all teams that previously had push access and grants push access to the new list of teams. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-team-access-restrictions -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) ReplaceTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, teams) - if err != nil { - return nil, nil, err - } - - var newTeams []*Team - resp, err := s.client.Do(ctx, req, &newTeams) - if err != nil { - return nil, resp, err - } - - return newTeams, resp, nil -} - -// AddTeamRestrictions grants the specified teams push access to a given protected branch. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#add-team-access-restrictions -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) AddTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, teams) - if err != nil { - return nil, nil, err - } - - var newTeams []*Team - resp, err := s.client.Do(ctx, req, &newTeams) - if err != nil { - return nil, resp, err - } - - return newTeams, resp, nil -} - -// RemoveTeamRestrictions removes the restrictions of a team from pushing to this branch. -// It requires the GitHub teams to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-team-access-restrictions -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams -func (s *RepositoriesService) RemoveTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, teams) - if err != nil { - return nil, nil, err - } - - var newTeams []*Team - resp, err := s.client.Do(ctx, req, &newTeams) - if err != nil { - return nil, resp, err - } - - return newTeams, resp, nil -} - -// ListUserRestrictions lists the GitHub users that have push access to a given protected branch. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#get-users-with-access-to-the-protected-branch -// -//meta:operation GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) ListUserRestrictions(ctx context.Context, owner, repo, branch string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ReplaceUserRestrictions replaces the user that have push access to a given protected branch. -// It removes all users that previously had push access and grants push access to the new list of users. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#set-user-access-restrictions -// -//meta:operation PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) ReplaceUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("PUT", u, users) - if err != nil { - return nil, nil, err - } - - var newUsers []*User - resp, err := s.client.Do(ctx, req, &newUsers) - if err != nil { - return nil, resp, err - } - - return newUsers, resp, nil -} - -// AddUserRestrictions grants the specified users push access to a given protected branch. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#add-user-access-restrictions -// -//meta:operation POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) AddUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("POST", u, users) - if err != nil { - return nil, nil, err - } - - var newUsers []*User - resp, err := s.client.Do(ctx, req, &newUsers) - if err != nil { - return nil, resp, err - } - - return newUsers, resp, nil -} - -// RemoveUserRestrictions removes the restrictions of a user from pushing to this branch. -// It requires the GitHub users to have `write` access to the `content` permission. -// -// Note: The list of users, apps, and teams in total is limited to 100 items. -// -// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . -// -// GitHub API docs: https://docs.github.com/rest/branches/branch-protection#remove-user-access-restrictions -// -//meta:operation DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users -func (s *RepositoriesService) RemoveUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) - req, err := s.client.NewRequest("DELETE", u, users) - if err != nil { - return nil, nil, err - } - - var newUsers []*User - resp, err := s.client.Do(ctx, req, &newUsers) - if err != nil { - return nil, resp, err - } - - return newUsers, resp, nil -} - -// TransferRequest represents a request to transfer a repository. -type TransferRequest struct { - NewOwner string `json:"new_owner"` - NewName *string `json:"new_name,omitempty"` - TeamID []int64 `json:"team_ids,omitempty"` -} - -// Transfer transfers a repository from one account or organization to another. -// -// This method might return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it has now scheduled the transfer of the repository in a background task. -// A follow up request, after a delay of a second or so, should result -// in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#transfer-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/transfer -func (s *RepositoriesService) Transfer(ctx context.Context, owner, repo string, transfer TransferRequest) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/transfer", owner, repo) - - req, err := s.client.NewRequest("POST", u, &transfer) - if err != nil { - return nil, nil, err - } - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DispatchRequestOptions represents a request to trigger a repository_dispatch event. -type DispatchRequestOptions struct { - // EventType is a custom webhook event name. (Required.) - EventType string `json:"event_type"` - // ClientPayload is a custom JSON payload with extra information about the webhook event. - // Defaults to an empty JSON object. - ClientPayload *json.RawMessage `json:"client_payload,omitempty"` -} - -// Dispatch triggers a repository_dispatch event in a GitHub Actions workflow. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#create-a-repository-dispatch-event -// -//meta:operation POST /repos/{owner}/{repo}/dispatches -func (s *RepositoriesService) Dispatch(ctx context.Context, owner, repo string, opts DispatchRequestOptions) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/dispatches", owner, repo) - - req, err := s.client.NewRequest("POST", u, &opts) - if err != nil { - return nil, nil, err - } - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// isBranchNotProtected determines whether a branch is not protected -// based on the error message returned by GitHub API. -func isBranchNotProtected(err error) bool { - errorResponse, ok := err.(*ErrorResponse) - return ok && errorResponse.Message == githubBranchNotProtected -} - -// EnablePrivateReporting enables private reporting of vulnerabilities for a -// repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#enable-private-vulnerability-reporting-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/private-vulnerability-reporting -func (s *RepositoriesService) EnablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DisablePrivateReporting disables private reporting of vulnerabilities for a -// repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#disable-private-vulnerability-reporting-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/private-vulnerability-reporting -func (s *RepositoriesService) DisablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go b/vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go deleted file mode 100644 index 2dcc367d..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_actions_permissions.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ActionsPermissionsRepository represents a policy for repositories and allowed actions in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions -type ActionsPermissionsRepository struct { - Enabled *bool `json:"enabled,omitempty"` - AllowedActions *string `json:"allowed_actions,omitempty"` - SelectedActionsURL *string `json:"selected_actions_url,omitempty"` -} - -func (a ActionsPermissionsRepository) String() string { - return Stringify(a) -} - -// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-github-actions-permissions-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/permissions -func (s *RepositoriesService) GetActionsPermissions(ctx context.Context, owner, repo string) (*ActionsPermissionsRepository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissionsRepository) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} - -// EditActionsPermissions sets the permissions policy for repositories and allowed actions in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-github-actions-permissions-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/permissions -func (s *RepositoriesService) EditActionsPermissions(ctx context.Context, owner, repo string, actionsPermissionsRepository ActionsPermissionsRepository) (*ActionsPermissionsRepository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions", owner, repo) - req, err := s.client.NewRequest("PUT", u, actionsPermissionsRepository) - if err != nil { - return nil, nil, err - } - - permissions := new(ActionsPermissionsRepository) - resp, err := s.client.Do(ctx, req, permissions) - if err != nil { - return nil, resp, err - } - - return permissions, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_autolinks.go b/vendor/github.com/google/go-github/v57/github/repos_autolinks.go deleted file mode 100644 index 200605aa..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_autolinks.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AutolinkOptions specifies parameters for RepositoriesService.AddAutolink method. -type AutolinkOptions struct { - KeyPrefix *string `json:"key_prefix,omitempty"` - URLTemplate *string `json:"url_template,omitempty"` - IsAlphanumeric *bool `json:"is_alphanumeric,omitempty"` -} - -// Autolink represents autolinks to external resources like JIRA issues and Zendesk tickets. -type Autolink struct { - ID *int64 `json:"id,omitempty"` - KeyPrefix *string `json:"key_prefix,omitempty"` - URLTemplate *string `json:"url_template,omitempty"` - IsAlphanumeric *bool `json:"is_alphanumeric,omitempty"` -} - -// ListAutolinks returns a list of autolinks configured for the given repository. -// Information about autolinks are only available to repository administrators. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#list-all-autolinks-of-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/autolinks -func (s *RepositoriesService) ListAutolinks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Autolink, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var autolinks []*Autolink - resp, err := s.client.Do(ctx, req, &autolinks) - if err != nil { - return nil, resp, err - } - - return autolinks, resp, nil -} - -// AddAutolink creates an autolink reference for a repository. -// Users with admin access to the repository can create an autolink. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#create-an-autolink-reference-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/autolinks -func (s *RepositoriesService) AddAutolink(ctx context.Context, owner, repo string, opts *AutolinkOptions) (*Autolink, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - al := new(Autolink) - resp, err := s.client.Do(ctx, req, al) - if err != nil { - return nil, resp, err - } - return al, resp, nil -} - -// GetAutolink returns a single autolink reference by ID that was configured for the given repository. -// Information about autolinks are only available to repository administrators. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#get-an-autolink-reference-of-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/autolinks/{autolink_id} -func (s *RepositoriesService) GetAutolink(ctx context.Context, owner, repo string, id int64) (*Autolink, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var autolink *Autolink - resp, err := s.client.Do(ctx, req, &autolink) - if err != nil { - return nil, resp, err - } - - return autolink, resp, nil -} - -// DeleteAutolink deletes a single autolink reference by ID that was configured for the given repository. -// Information about autolinks are only available to repository administrators. -// -// GitHub API docs: https://docs.github.com/rest/repos/autolinks#delete-an-autolink-reference-from-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/autolinks/{autolink_id} -func (s *RepositoriesService) DeleteAutolink(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/autolinks/%v", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_collaborators.go b/vendor/github.com/google/go-github/v57/github/repos_collaborators.go deleted file mode 100644 index 15a4e77a..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_collaborators.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListCollaboratorsOptions specifies the optional parameters to the -// RepositoriesService.ListCollaborators method. -type ListCollaboratorsOptions struct { - // Affiliation specifies how collaborators should be filtered by their affiliation. - // Possible values are: - // outside - All outside collaborators of an organization-owned repository - // direct - All collaborators with permissions to an organization-owned repository, - // regardless of organization membership status - // all - All collaborators the authenticated user can see - // - // Default value is "all". - Affiliation string `url:"affiliation,omitempty"` - - // Permission specifies how collaborators should be filtered by the permissions they have on the repository. - // Possible values are: - // "pull", "triage", "push", "maintain", "admin" - // - // If not specified, all collaborators will be returned. - Permission string `url:"permission,omitempty"` - - ListOptions -} - -// CollaboratorInvitation represents an invitation created when adding a collaborator. -// GitHub API docs: https://docs.github.com/rest/repos/collaborators/#response-when-a-new-invitation-is-created -type CollaboratorInvitation struct { - ID *int64 `json:"id,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Invitee *User `json:"invitee,omitempty"` - Inviter *User `json:"inviter,omitempty"` - Permissions *string `json:"permissions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` -} - -// ListCollaborators lists the GitHub users that have access to the repository. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#list-repository-collaborators -// -//meta:operation GET /repos/{owner}/{repo}/collaborators -func (s *RepositoriesService) ListCollaborators(ctx context.Context, owner, repo string, opts *ListCollaboratorsOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// IsCollaborator checks whether the specified GitHub user has collaborator -// access to the given repo. -// Note: This will return false if the user is not a collaborator OR the user -// is not a GitHub user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#check-if-a-user-is-a-repository-collaborator -// -//meta:operation GET /repos/{owner}/{repo}/collaborators/{username} -func (s *RepositoriesService) IsCollaborator(ctx context.Context, owner, repo, user string) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - isCollab, err := parseBoolResponse(err) - return isCollab, resp, err -} - -// RepositoryPermissionLevel represents the permission level an organization -// member has for a given repository. -type RepositoryPermissionLevel struct { - // Possible values: "admin", "write", "read", "none" - Permission *string `json:"permission,omitempty"` - - User *User `json:"user,omitempty"` -} - -// GetPermissionLevel retrieves the specific permission level a collaborator has for a given repository. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#get-repository-permissions-for-a-user -// -//meta:operation GET /repos/{owner}/{repo}/collaborators/{username}/permission -func (s *RepositoriesService) GetPermissionLevel(ctx context.Context, owner, repo, user string) (*RepositoryPermissionLevel, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v/permission", owner, repo, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - rpl := new(RepositoryPermissionLevel) - resp, err := s.client.Do(ctx, req, rpl) - if err != nil { - return nil, resp, err - } - - return rpl, resp, nil -} - -// RepositoryAddCollaboratorOptions specifies the optional parameters to the -// RepositoriesService.AddCollaborator method. -type RepositoryAddCollaboratorOptions struct { - // Permission specifies the permission to grant the user on this repository. - // Possible values are: - // pull - team members can pull, but not push to or administer this repository - // push - team members can pull and push, but not administer this repository - // admin - team members can pull, push and administer this repository - // maintain - team members can manage the repository without access to sensitive or destructive actions. - // triage - team members can proactively manage issues and pull requests without write access. - // - // Default value is "push". This option is only valid for organization-owned repositories. - Permission string `json:"permission,omitempty"` -} - -// AddCollaborator sends an invitation to the specified GitHub user -// to become a collaborator to the given repo. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#add-a-repository-collaborator -// -//meta:operation PUT /repos/{owner}/{repo}/collaborators/{username} -func (s *RepositoriesService) AddCollaborator(ctx context.Context, owner, repo, user string, opts *RepositoryAddCollaboratorOptions) (*CollaboratorInvitation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - acr := new(CollaboratorInvitation) - resp, err := s.client.Do(ctx, req, acr) - if err != nil { - return nil, resp, err - } - - return acr, resp, nil -} - -// RemoveCollaborator removes the specified GitHub user as collaborator from the given repo. -// Note: Does not return error if a valid user that is not a collaborator is removed. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/collaborators#remove-a-repository-collaborator -// -//meta:operation DELETE /repos/{owner}/{repo}/collaborators/{username} -func (s *RepositoriesService) RemoveCollaborator(ctx context.Context, owner, repo, user string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_contents.go b/vendor/github.com/google/go-github/v57/github/repos_contents.go deleted file mode 100644 index 9539a5c4..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_contents.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Repository contents API methods. -// GitHub API docs: https://docs.github.com/rest/repos/contents/ - -package github - -import ( - "context" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "net/url" - "path" - "strings" -) - -var ErrPathForbidden = errors.New("path must not contain '..' due to auth vulnerability issue") - -// RepositoryContent represents a file or directory in a github repository. -type RepositoryContent struct { - Type *string `json:"type,omitempty"` - // Target is only set if the type is "symlink" and the target is not a normal file. - // If Target is set, Path will be the symlink path. - Target *string `json:"target,omitempty"` - Encoding *string `json:"encoding,omitempty"` - Size *int `json:"size,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - // Content contains the actual file content, which may be encoded. - // Callers should call GetContent which will decode the content if - // necessary. - Content *string `json:"content,omitempty"` - SHA *string `json:"sha,omitempty"` - URL *string `json:"url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - DownloadURL *string `json:"download_url,omitempty"` - SubmoduleGitURL *string `json:"submodule_git_url,omitempty"` -} - -// RepositoryContentResponse holds the parsed response from CreateFile, UpdateFile, and DeleteFile. -type RepositoryContentResponse struct { - Content *RepositoryContent `json:"content,omitempty"` - Commit `json:"commit,omitempty"` -} - -// RepositoryContentFileOptions specifies optional parameters for CreateFile, UpdateFile, and DeleteFile. -type RepositoryContentFileOptions struct { - Message *string `json:"message,omitempty"` - Content []byte `json:"content"` // unencoded - SHA *string `json:"sha,omitempty"` - Branch *string `json:"branch,omitempty"` - Author *CommitAuthor `json:"author,omitempty"` - Committer *CommitAuthor `json:"committer,omitempty"` -} - -// RepositoryContentGetOptions represents an optional ref parameter, which can be a SHA, -// branch, or tag -type RepositoryContentGetOptions struct { - Ref string `url:"ref,omitempty"` -} - -// String converts RepositoryContent to a string. It's primarily for testing. -func (r RepositoryContent) String() string { - return Stringify(r) -} - -// GetContent returns the content of r, decoding it if necessary. -func (r *RepositoryContent) GetContent() (string, error) { - var encoding string - if r.Encoding != nil { - encoding = *r.Encoding - } - - switch encoding { - case "base64": - if r.Content == nil { - return "", errors.New("malformed response: base64 encoding of null content") - } - c, err := base64.StdEncoding.DecodeString(*r.Content) - return string(c), err - case "": - if r.Content == nil { - return "", nil - } - return *r.Content, nil - case "none": - return "", errors.New("unsupported content encoding: none, this may occur when file size > 1 MB, if that is the case consider using DownloadContents") - default: - return "", fmt.Errorf("unsupported content encoding: %v", encoding) - } -} - -// GetReadme gets the Readme file for the repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-a-repository-readme -// -//meta:operation GET /repos/{owner}/{repo}/readme -func (s *RepositoriesService) GetReadme(ctx context.Context, owner, repo string, opts *RepositoryContentGetOptions) (*RepositoryContent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/readme", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - readme := new(RepositoryContent) - resp, err := s.client.Do(ctx, req, readme) - if err != nil { - return nil, resp, err - } - - return readme, resp, nil -} - -// DownloadContents returns an io.ReadCloser that reads the contents of the -// specified file. This function will work with files of any size, as opposed -// to GetContents which is limited to 1 Mb files. It is the caller's -// responsibility to close the ReadCloser. -// -// It is possible for the download to result in a failed response when the -// returned error is nil. Callers should check the returned Response status -// code to verify the content is from a successful response. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-repository-content -// -//meta:operation GET /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) DownloadContents(ctx context.Context, owner, repo, filepath string, opts *RepositoryContentGetOptions) (io.ReadCloser, *Response, error) { - dir := path.Dir(filepath) - filename := path.Base(filepath) - _, dirContents, resp, err := s.GetContents(ctx, owner, repo, dir, opts) - if err != nil { - return nil, resp, err - } - - for _, contents := range dirContents { - if *contents.Name == filename { - if contents.DownloadURL == nil || *contents.DownloadURL == "" { - return nil, resp, fmt.Errorf("no download link found for %s", filepath) - } - - dlResp, err := s.client.client.Get(*contents.DownloadURL) - if err != nil { - return nil, &Response{Response: dlResp}, err - } - - return dlResp.Body, &Response{Response: dlResp}, nil - } - } - - return nil, resp, fmt.Errorf("no file named %s found in %s", filename, dir) -} - -// DownloadContentsWithMeta is identical to DownloadContents but additionally -// returns the RepositoryContent of the requested file. This additional data -// is useful for future operations involving the requested file. For merely -// reading the content of a file, DownloadContents is perfectly adequate. -// -// It is possible for the download to result in a failed response when the -// returned error is nil. Callers should check the returned Response status -// code to verify the content is from a successful response. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-repository-content -// -//meta:operation GET /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) DownloadContentsWithMeta(ctx context.Context, owner, repo, filepath string, opts *RepositoryContentGetOptions) (io.ReadCloser, *RepositoryContent, *Response, error) { - dir := path.Dir(filepath) - filename := path.Base(filepath) - _, dirContents, resp, err := s.GetContents(ctx, owner, repo, dir, opts) - if err != nil { - return nil, nil, resp, err - } - - for _, contents := range dirContents { - if *contents.Name == filename { - if contents.DownloadURL == nil || *contents.DownloadURL == "" { - return nil, contents, resp, fmt.Errorf("no download link found for %s", filepath) - } - - dlResp, err := s.client.client.Get(*contents.DownloadURL) - if err != nil { - return nil, contents, &Response{Response: dlResp}, err - } - - return dlResp.Body, contents, &Response{Response: dlResp}, nil - } - } - - return nil, nil, resp, fmt.Errorf("no file named %s found in %s", filename, dir) -} - -// GetContents can return either the metadata and content of a single file -// (when path references a file) or the metadata of all the files and/or -// subdirectories of a directory (when path references a directory). To make it -// easy to distinguish between both result types and to mimic the API as much -// as possible, both result types will be returned but only one will contain a -// value and the other will be nil. -// -// Due to an auth vulnerability issue in the GitHub v3 API, ".." is not allowed -// to appear anywhere in the "path" or this method will return an error. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#get-repository-content -// -//meta:operation GET /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) GetContents(ctx context.Context, owner, repo, path string, opts *RepositoryContentGetOptions) (fileContent *RepositoryContent, directoryContent []*RepositoryContent, resp *Response, err error) { - if strings.Contains(path, "..") { - return nil, nil, nil, ErrPathForbidden - } - - escapedPath := (&url.URL{Path: strings.TrimSuffix(path, "/")}).String() - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, escapedPath) - u, err = addOptions(u, opts) - if err != nil { - return nil, nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, nil, err - } - - var rawJSON json.RawMessage - resp, err = s.client.Do(ctx, req, &rawJSON) - if err != nil { - return nil, nil, resp, err - } - - fileUnmarshalError := json.Unmarshal(rawJSON, &fileContent) - if fileUnmarshalError == nil { - return fileContent, nil, resp, nil - } - - directoryUnmarshalError := json.Unmarshal(rawJSON, &directoryContent) - if directoryUnmarshalError == nil { - return nil, directoryContent, resp, nil - } - - return nil, nil, resp, fmt.Errorf("unmarshalling failed for both file and directory content: %s and %s", fileUnmarshalError, directoryUnmarshalError) -} - -// CreateFile creates a new file in a repository at the given path and returns -// the commit and file metadata. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#create-or-update-file-contents -// -//meta:operation PUT /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) CreateFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - createResponse := new(RepositoryContentResponse) - resp, err := s.client.Do(ctx, req, createResponse) - if err != nil { - return nil, resp, err - } - - return createResponse, resp, nil -} - -// UpdateFile updates a file in a repository at the given path and returns the -// commit and file metadata. Requires the blob SHA of the file being updated. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#create-or-update-file-contents -// -//meta:operation PUT /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) UpdateFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - updateResponse := new(RepositoryContentResponse) - resp, err := s.client.Do(ctx, req, updateResponse) - if err != nil { - return nil, resp, err - } - - return updateResponse, resp, nil -} - -// DeleteFile deletes a file from a repository and returns the commit. -// Requires the blob SHA of the file to be deleted. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#delete-a-file -// -//meta:operation DELETE /repos/{owner}/{repo}/contents/{path} -func (s *RepositoriesService) DeleteFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) - req, err := s.client.NewRequest("DELETE", u, opts) - if err != nil { - return nil, nil, err - } - - deleteResponse := new(RepositoryContentResponse) - resp, err := s.client.Do(ctx, req, deleteResponse) - if err != nil { - return nil, resp, err - } - - return deleteResponse, resp, nil -} - -// ArchiveFormat is used to define the archive type when calling GetArchiveLink. -type ArchiveFormat string - -const ( - // Tarball specifies an archive in gzipped tar format. - Tarball ArchiveFormat = "tarball" - - // Zipball specifies an archive in zip format. - Zipball ArchiveFormat = "zipball" -) - -// GetArchiveLink returns an URL to download a tarball or zipball archive for a -// repository. The archiveFormat can be specified by either the github.Tarball -// or github.Zipball constant. -// -// GitHub API docs: https://docs.github.com/rest/repos/contents#download-a-repository-archive-tar -// GitHub API docs: https://docs.github.com/rest/repos/contents#download-a-repository-archive-zip -// -//meta:operation GET /repos/{owner}/{repo}/tarball/{ref} -//meta:operation GET /repos/{owner}/{repo}/zipball/{ref} -func (s *RepositoriesService) GetArchiveLink(ctx context.Context, owner, repo string, archiveformat ArchiveFormat, opts *RepositoryContentGetOptions, maxRedirects int) (*url.URL, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/%s", owner, repo, archiveformat) - if opts != nil && opts.Ref != "" { - u += fmt.Sprintf("/%s", opts.Ref) - } - resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) - if err != nil { - return nil, nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusFound { - return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) - } - - parsedURL, err := url.Parse(resp.Header.Get("Location")) - if err != nil { - return nil, newResponse(resp), err - } - - return parsedURL, newResponse(resp), nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_deployments.go b/vendor/github.com/google/go-github/v57/github/repos_deployments.go deleted file mode 100644 index d8c0b632..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_deployments.go +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" - "strings" -) - -// Deployment represents a deployment in a repo -type Deployment struct { - URL *string `json:"url,omitempty"` - ID *int64 `json:"id,omitempty"` - SHA *string `json:"sha,omitempty"` - Ref *string `json:"ref,omitempty"` - Task *string `json:"task,omitempty"` - Payload json.RawMessage `json:"payload,omitempty"` - Environment *string `json:"environment,omitempty"` - Description *string `json:"description,omitempty"` - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// DeploymentRequest represents a deployment request -type DeploymentRequest struct { - Ref *string `json:"ref,omitempty"` - Task *string `json:"task,omitempty"` - AutoMerge *bool `json:"auto_merge,omitempty"` - RequiredContexts *[]string `json:"required_contexts,omitempty"` - Payload interface{} `json:"payload,omitempty"` - Environment *string `json:"environment,omitempty"` - Description *string `json:"description,omitempty"` - TransientEnvironment *bool `json:"transient_environment,omitempty"` - ProductionEnvironment *bool `json:"production_environment,omitempty"` -} - -// DeploymentsListOptions specifies the optional parameters to the -// RepositoriesService.ListDeployments method. -type DeploymentsListOptions struct { - // SHA of the Deployment. - SHA string `url:"sha,omitempty"` - - // List deployments for a given ref. - Ref string `url:"ref,omitempty"` - - // List deployments for a given task. - Task string `url:"task,omitempty"` - - // List deployments for a given environment. - Environment string `url:"environment,omitempty"` - - ListOptions -} - -// ListDeployments lists the deployments of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#list-deployments -// -//meta:operation GET /repos/{owner}/{repo}/deployments -func (s *RepositoriesService) ListDeployments(ctx context.Context, owner, repo string, opts *DeploymentsListOptions) ([]*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var deployments []*Deployment - resp, err := s.client.Do(ctx, req, &deployments) - if err != nil { - return nil, resp, err - } - - return deployments, resp, nil -} - -// GetDeployment returns a single deployment of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#get-a-deployment -// -//meta:operation GET /repos/{owner}/{repo}/deployments/{deployment_id} -func (s *RepositoriesService) GetDeployment(ctx context.Context, owner, repo string, deploymentID int64) (*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v", owner, repo, deploymentID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deployment := new(Deployment) - resp, err := s.client.Do(ctx, req, deployment) - if err != nil { - return nil, resp, err - } - - return deployment, resp, nil -} - -// CreateDeployment creates a new deployment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#create-a-deployment -// -//meta:operation POST /repos/{owner}/{repo}/deployments -func (s *RepositoriesService) CreateDeployment(ctx context.Context, owner, repo string, request *DeploymentRequest) (*Deployment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments", owner, repo) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - d := new(Deployment) - resp, err := s.client.Do(ctx, req, d) - if err != nil { - return nil, resp, err - } - - return d, resp, nil -} - -// DeleteDeployment deletes an existing deployment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/deployments#delete-a-deployment -// -//meta:operation DELETE /repos/{owner}/{repo}/deployments/{deployment_id} -func (s *RepositoriesService) DeleteDeployment(ctx context.Context, owner, repo string, deploymentID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v", owner, repo, deploymentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// DeploymentStatus represents the status of a -// particular deployment. -type DeploymentStatus struct { - ID *int64 `json:"id,omitempty"` - // State is the deployment state. - // Possible values are: "pending", "success", "failure", "error", - // "inactive", "in_progress", "queued". - State *string `json:"state,omitempty"` - Creator *User `json:"creator,omitempty"` - Description *string `json:"description,omitempty"` - Environment *string `json:"environment,omitempty"` - NodeID *string `json:"node_id,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - TargetURL *string `json:"target_url,omitempty"` - DeploymentURL *string `json:"deployment_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - EnvironmentURL *string `json:"environment_url,omitempty"` - LogURL *string `json:"log_url,omitempty"` - URL *string `json:"url,omitempty"` -} - -// DeploymentStatusRequest represents a deployment request -type DeploymentStatusRequest struct { - State *string `json:"state,omitempty"` - LogURL *string `json:"log_url,omitempty"` - Description *string `json:"description,omitempty"` - Environment *string `json:"environment,omitempty"` - EnvironmentURL *string `json:"environment_url,omitempty"` - AutoInactive *bool `json:"auto_inactive,omitempty"` -} - -// ListDeploymentStatuses lists the statuses of a given deployment of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/statuses#list-deployment-statuses -// -//meta:operation GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses -func (s *RepositoriesService) ListDeploymentStatuses(ctx context.Context, owner, repo string, deployment int64, opts *ListOptions) ([]*DeploymentStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var statuses []*DeploymentStatus - resp, err := s.client.Do(ctx, req, &statuses) - if err != nil { - return nil, resp, err - } - - return statuses, resp, nil -} - -// GetDeploymentStatus returns a single deployment status of a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/statuses#get-a-deployment-status -// -//meta:operation GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id} -func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, repo string, deploymentID, deploymentStatusID int64) (*DeploymentStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses/%v", owner, repo, deploymentID, deploymentStatusID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - d := new(DeploymentStatus) - resp, err := s.client.Do(ctx, req, d) - if err != nil { - return nil, resp, err - } - - return d, resp, nil -} - -// CreateDeploymentStatus creates a new status for a deployment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/statuses#create-a-deployment-status -// -//meta:operation POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses -func (s *RepositoriesService) CreateDeploymentStatus(ctx context.Context, owner, repo string, deployment int64, request *DeploymentStatusRequest) (*DeploymentStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - d := new(DeploymentStatus) - resp, err := s.client.Do(ctx, req, d) - if err != nil { - return nil, resp, err - } - - return d, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_environments.go b/vendor/github.com/google/go-github/v57/github/repos_environments.go deleted file mode 100644 index ed81e3a1..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_environments.go +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" - "net/http" -) - -// Environment represents a single environment in a repository. -type Environment struct { - Owner *string `json:"owner,omitempty"` - Repo *string `json:"repo,omitempty"` - EnvironmentName *string `json:"environment_name,omitempty"` - WaitTimer *int `json:"wait_timer,omitempty"` - Reviewers []*EnvReviewers `json:"reviewers,omitempty"` - DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy,omitempty"` - // Return/response only values - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - CanAdminsBypass *bool `json:"can_admins_bypass,omitempty"` - ProtectionRules []*ProtectionRule `json:"protection_rules,omitempty"` -} - -// EnvReviewers represents a single environment reviewer entry. -type EnvReviewers struct { - Type *string `json:"type,omitempty"` - ID *int64 `json:"id,omitempty"` -} - -// BranchPolicy represents the options for whether a branch deployment policy is applied to this environment. -type BranchPolicy struct { - ProtectedBranches *bool `json:"protected_branches,omitempty"` - CustomBranchPolicies *bool `json:"custom_branch_policies,omitempty"` -} - -// EnvResponse represents the slightly different format of response that comes back when you list an environment. -type EnvResponse struct { - TotalCount *int `json:"total_count,omitempty"` - Environments []*Environment `json:"environments,omitempty"` -} - -// ProtectionRule represents a single protection rule applied to the environment. -type ProtectionRule struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - PreventSelfReview *bool `json:"prevent_self_review,omitempty"` - Type *string `json:"type,omitempty"` - WaitTimer *int `json:"wait_timer,omitempty"` - Reviewers []*RequiredReviewer `json:"reviewers,omitempty"` -} - -// RequiredReviewer represents a required reviewer. -type RequiredReviewer struct { - Type *string `json:"type,omitempty"` - Reviewer interface{} `json:"reviewer,omitempty"` -} - -// EnvironmentListOptions specifies the optional parameters to the -// RepositoriesService.ListEnvironments method. -type EnvironmentListOptions struct { - ListOptions -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// This helps us handle the fact that RequiredReviewer can have either a User or Team type reviewer field. -func (r *RequiredReviewer) UnmarshalJSON(data []byte) error { - type aliasReviewer RequiredReviewer - var reviewer aliasReviewer - if err := json.Unmarshal(data, &reviewer); err != nil { - return err - } - - r.Type = reviewer.Type - - switch *reviewer.Type { - case "User": - reviewer.Reviewer = &User{} - if err := json.Unmarshal(data, &reviewer); err != nil { - return err - } - r.Reviewer = reviewer.Reviewer - case "Team": - reviewer.Reviewer = &Team{} - if err := json.Unmarshal(data, &reviewer); err != nil { - return err - } - r.Reviewer = reviewer.Reviewer - default: - r.Type = nil - r.Reviewer = nil - return fmt.Errorf("reviewer.Type is %T, not a string of 'User' or 'Team', unable to unmarshal", reviewer.Type) - } - - return nil -} - -// ListEnvironments lists all environments for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#list-environments -// -//meta:operation GET /repos/{owner}/{repo}/environments -func (s *RepositoriesService) ListEnvironments(ctx context.Context, owner, repo string, opts *EnvironmentListOptions) (*EnvResponse, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var list *EnvResponse - resp, err := s.client.Do(ctx, req, &list) - if err != nil { - return nil, resp, err - } - return list, resp, nil -} - -// GetEnvironment get a single environment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#get-an-environment -// -//meta:operation GET /repos/{owner}/{repo}/environments/{environment_name} -func (s *RepositoriesService) GetEnvironment(ctx context.Context, owner, repo, name string) (*Environment, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var env *Environment - resp, err := s.client.Do(ctx, req, &env) - if err != nil { - return nil, resp, err - } - return env, resp, nil -} - -// MarshalJSON implements the json.Marshaler interface. -// As the only way to clear a WaitTimer is to set it to 0, a missing WaitTimer object should default to 0, not null. -// As the default value for CanAdminsBypass is true, a nil value here marshals to true. -func (c *CreateUpdateEnvironment) MarshalJSON() ([]byte, error) { - type Alias CreateUpdateEnvironment - if c.WaitTimer == nil { - c.WaitTimer = Int(0) - } - if c.CanAdminsBypass == nil { - c.CanAdminsBypass = Bool(true) - } - return json.Marshal(&struct { - *Alias - }{ - Alias: (*Alias)(c), - }) -} - -// CreateUpdateEnvironment represents the fields required for the create/update operation -// following the Create/Update release example. -// See https://github.com/google/go-github/issues/992 for more information. -// Removed omitempty here as the API expects null values for reviewers and deployment_branch_policy to clear them. -type CreateUpdateEnvironment struct { - WaitTimer *int `json:"wait_timer"` - Reviewers []*EnvReviewers `json:"reviewers"` - CanAdminsBypass *bool `json:"can_admins_bypass"` - DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy"` - PreventSelfReview *bool `json:"prevent_self_review,omitempty"` -} - -// createUpdateEnvironmentNoEnterprise represents the fields accepted for Pro/Teams private repos. -// Ref: https://docs.github.com/actions/deployment/targeting-different-environments/using-environments-for-deployment -// See https://github.com/google/go-github/issues/2602 for more information. -type createUpdateEnvironmentNoEnterprise struct { - DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy"` -} - -// CreateUpdateEnvironment create or update a new environment for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#create-or-update-an-environment -// -//meta:operation PUT /repos/{owner}/{repo}/environments/{environment_name} -func (s *RepositoriesService) CreateUpdateEnvironment(ctx context.Context, owner, repo, name string, environment *CreateUpdateEnvironment) (*Environment, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) - - req, err := s.client.NewRequest("PUT", u, environment) - if err != nil { - return nil, nil, err - } - - e := new(Environment) - resp, err := s.client.Do(ctx, req, e) - if err != nil { - // The API returns 422 when the pricing plan doesn't support all the fields sent. - // This path will be executed for Pro/Teams private repos. - // For public repos, regardless of the pricing plan, all fields supported. - // For Free plan private repos the returned error code is 404. - // We are checking that the user didn't try to send a value for unsupported fields, - // and return an error if they did. - if resp != nil && resp.StatusCode == http.StatusUnprocessableEntity && environment != nil && len(environment.Reviewers) == 0 && environment.GetWaitTimer() == 0 { - return s.createNewEnvNoEnterprise(ctx, u, environment) - } - return nil, resp, err - } - return e, resp, nil -} - -// createNewEnvNoEnterprise is an internal function for cases where the original call returned 422. -// Currently only the `deployment_branch_policy` parameter is supported for Pro/Team private repos. -func (s *RepositoriesService) createNewEnvNoEnterprise(ctx context.Context, u string, environment *CreateUpdateEnvironment) (*Environment, *Response, error) { - req, err := s.client.NewRequest("PUT", u, &createUpdateEnvironmentNoEnterprise{ - DeploymentBranchPolicy: environment.DeploymentBranchPolicy, - }) - if err != nil { - return nil, nil, err - } - - e := new(Environment) - resp, err := s.client.Do(ctx, req, e) - if err != nil { - return nil, resp, err - } - return e, resp, nil -} - -// DeleteEnvironment delete an environment from a repository. -// -// GitHub API docs: https://docs.github.com/rest/deployments/environments#delete-an-environment -// -//meta:operation DELETE /repos/{owner}/{repo}/environments/{environment_name} -func (s *RepositoriesService) DeleteEnvironment(ctx context.Context, owner, repo, name string) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_hooks.go b/vendor/github.com/google/go-github/v57/github/repos_hooks.go deleted file mode 100644 index 8768d603..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_hooks.go +++ /dev/null @@ -1,271 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "net/url" - "strings" -) - -// WebHookPayload represents the data that is received from GitHub when a push -// event hook is triggered. The format of these payloads pre-date most of the -// GitHub v3 API, so there are lots of minor incompatibilities with the types -// defined in the rest of the API. Therefore, several types are duplicated -// here to account for these differences. -// -// GitHub API docs: https://help.github.com/articles/post-receive-hooks -// -// Deprecated: Please use PushEvent instead. -type WebHookPayload = PushEvent - -// WebHookCommit represents the commit variant we receive from GitHub in a -// WebHookPayload. -// -// Deprecated: Please use HeadCommit instead. -type WebHookCommit = HeadCommit - -// WebHookAuthor represents the author or committer of a commit, as specified -// in a WebHookCommit. The commit author may not correspond to a GitHub User. -// -// Deprecated: Please use CommitAuthor instead. -// NOTE Breaking API change: the `Username` field is now called `Login`. -type WebHookAuthor = CommitAuthor - -// Hook represents a GitHub (web and service) hook for a repository. -type Hook struct { - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - ID *int64 `json:"id,omitempty"` - Type *string `json:"type,omitempty"` - Name *string `json:"name,omitempty"` - TestURL *string `json:"test_url,omitempty"` - PingURL *string `json:"ping_url,omitempty"` - LastResponse map[string]interface{} `json:"last_response,omitempty"` - - // Only the following fields are used when creating a hook. - // Config is required. - Config map[string]interface{} `json:"config,omitempty"` - Events []string `json:"events,omitempty"` - Active *bool `json:"active,omitempty"` -} - -func (h Hook) String() string { - return Stringify(h) -} - -// createHookRequest is a subset of Hook and is used internally -// by CreateHook to pass only the known fields for the endpoint. -// -// See https://github.com/google/go-github/issues/1015 for more -// information. -type createHookRequest struct { - // Config is required. - Name string `json:"name"` - Config map[string]interface{} `json:"config,omitempty"` - Events []string `json:"events,omitempty"` - Active *bool `json:"active,omitempty"` -} - -// CreateHook creates a Hook for the specified repository. -// Config is a required field. -// -// Note that only a subset of the hook fields are used and hook must -// not be nil. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#create-a-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks -func (s *RepositoriesService) CreateHook(ctx context.Context, owner, repo string, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks", owner, repo) - - hookReq := &createHookRequest{ - Name: "web", - Events: hook.Events, - Active: hook.Active, - Config: hook.Config, - } - - req, err := s.client.NewRequest("POST", u, hookReq) - if err != nil { - return nil, nil, err - } - - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// ListHooks lists all Hooks for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#list-repository-webhooks -// -//meta:operation GET /repos/{owner}/{repo}/hooks -func (s *RepositoriesService) ListHooks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var hooks []*Hook - resp, err := s.client.Do(ctx, req, &hooks) - if err != nil { - return nil, resp, err - } - - return hooks, resp, nil -} - -// GetHook returns a single specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#get-a-repository-webhook -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id} -func (s *RepositoriesService) GetHook(ctx context.Context, owner, repo string, id int64) (*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// EditHook updates a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#update-a-repository-webhook -// -//meta:operation PATCH /repos/{owner}/{repo}/hooks/{hook_id} -func (s *RepositoriesService) EditHook(ctx context.Context, owner, repo string, id int64, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, hook) - if err != nil { - return nil, nil, err - } - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// DeleteHook deletes a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#delete-a-repository-webhook -// -//meta:operation DELETE /repos/{owner}/{repo}/hooks/{hook_id} -func (s *RepositoriesService) DeleteHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// PingHook triggers a 'ping' event to be sent to the Hook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#ping-a-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks/{hook_id}/pings -func (s *RepositoriesService) PingHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d/pings", owner, repo, id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// TestHook triggers a test Hook by github. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repos#test-the-push-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks/{hook_id}/tests -func (s *RepositoriesService) TestHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%d/tests", owner, repo, id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// Subscribe lets servers register to receive updates when a topic is updated. -// -// GitHub API docs: https://docs.github.com/webhooks/about-webhooks-for-repositories#pubsubhubbub -// -//meta:operation POST /hub -func (s *RepositoriesService) Subscribe(ctx context.Context, owner, repo, event, callback string, secret []byte) (*Response, error) { - req, err := s.createWebSubRequest("subscribe", owner, repo, event, callback, secret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unsubscribe lets servers unregister to no longer receive updates when a topic is updated. -// -// GitHub API docs: https://docs.github.com/webhooks/about-webhooks-for-repositories#pubsubhubbub -// -//meta:operation POST /hub -func (s *RepositoriesService) Unsubscribe(ctx context.Context, owner, repo, event, callback string, secret []byte) (*Response, error) { - req, err := s.createWebSubRequest("unsubscribe", owner, repo, event, callback, secret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// createWebSubRequest returns a subscribe/unsubscribe request that implements -// the WebSub (formerly PubSubHubbub) protocol. -// -// See: https://www.w3.org/TR/websub/#subscriber-sends-subscription-request -func (s *RepositoriesService) createWebSubRequest(hubMode, owner, repo, event, callback string, secret []byte) (*http.Request, error) { - topic := fmt.Sprintf( - "https://github.com/%s/%s/events/%s", - owner, - repo, - event, - ) - form := url.Values{} - form.Add("hub.mode", hubMode) - form.Add("hub.topic", topic) - form.Add("hub.callback", callback) - if secret != nil { - form.Add("hub.secret", string(secret)) - } - body := strings.NewReader(form.Encode()) - - req, err := s.client.NewFormRequest("hub", body) - if err != nil { - return nil, err - } - - return req, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go b/vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go deleted file mode 100644 index 2203d761..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_hooks_configuration.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetHookConfiguration returns the configuration for the specified repository webhook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-config#get-a-webhook-configuration-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id}/config -func (s *RepositoriesService) GetHookConfiguration(ctx context.Context, owner, repo string, id int64) (*HookConfig, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - config := new(HookConfig) - resp, err := s.client.Do(ctx, req, config) - if err != nil { - return nil, resp, err - } - - return config, resp, nil -} - -// EditHookConfiguration updates the configuration for the specified repository webhook. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-config#update-a-webhook-configuration-for-a-repository -// -//meta:operation PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config -func (s *RepositoriesService) EditHookConfiguration(ctx context.Context, owner, repo string, id int64, config *HookConfig) (*HookConfig, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, config) - if err != nil { - return nil, nil, err - } - - c := new(HookConfig) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go b/vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go deleted file mode 100644 index 6e1fd86f..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_hooks_deliveries.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// HookDelivery represents the data that is received from GitHub's Webhook Delivery API -// -// GitHub API docs: -// - https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook -// - https://docs.github.com/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook -type HookDelivery struct { - ID *int64 `json:"id,omitempty"` - GUID *string `json:"guid,omitempty"` - DeliveredAt *Timestamp `json:"delivered_at,omitempty"` - Redelivery *bool `json:"redelivery,omitempty"` - Duration *float64 `json:"duration,omitempty"` - Status *string `json:"status,omitempty"` - StatusCode *int `json:"status_code,omitempty"` - Event *string `json:"event,omitempty"` - Action *string `json:"action,omitempty"` - InstallationID *int64 `json:"installation_id,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - - // Request is populated by GetHookDelivery. - Request *HookRequest `json:"request,omitempty"` - // Response is populated by GetHookDelivery. - Response *HookResponse `json:"response,omitempty"` -} - -func (d HookDelivery) String() string { - return Stringify(d) -} - -// HookRequest is a part of HookDelivery that contains -// the HTTP headers and the JSON payload of the webhook request. -type HookRequest struct { - Headers map[string]string `json:"headers,omitempty"` - RawPayload *json.RawMessage `json:"payload,omitempty"` -} - -func (r HookRequest) String() string { - return Stringify(r) -} - -// HookResponse is a part of HookDelivery that contains -// the HTTP headers and the response body served by the webhook endpoint. -type HookResponse struct { - Headers map[string]string `json:"headers,omitempty"` - RawPayload *json.RawMessage `json:"payload,omitempty"` -} - -func (r HookResponse) String() string { - return Stringify(r) -} - -// ListHookDeliveries lists webhook deliveries for a webhook configured in a repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries -func (s *RepositoriesService) ListHookDeliveries(ctx context.Context, owner, repo string, id int64, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deliveries := []*HookDelivery{} - resp, err := s.client.Do(ctx, req, &deliveries) - if err != nil { - return nil, resp, err - } - - return deliveries, resp, nil -} - -// GetHookDelivery returns a delivery for a webhook configured in a repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook -// -//meta:operation GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id} -func (s *RepositoriesService) GetHookDelivery(ctx context.Context, owner, repo string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries/%v", owner, repo, hookID, deliveryID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// RedeliverHookDelivery redelivers a delivery for a webhook configured in a repository. -// -// GitHub API docs: https://docs.github.com/rest/webhooks/repo-deliveries#redeliver-a-delivery-for-a-repository-webhook -// -//meta:operation POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts -func (s *RepositoriesService) RedeliverHookDelivery(ctx context.Context, owner, repo string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries/%v/attempts", owner, repo, hookID, deliveryID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// ParseRequestPayload parses the request payload. For recognized event types, -// a value of the corresponding struct type will be returned. -func (d *HookDelivery) ParseRequestPayload() (interface{}, error) { - eType, ok := messageToTypeName[d.GetEvent()] - if !ok { - return nil, fmt.Errorf("unsupported event type %q", d.GetEvent()) - } - - e := &Event{Type: &eType, RawPayload: d.Request.RawPayload} - return e.ParsePayload() -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_invitations.go b/vendor/github.com/google/go-github/v57/github/repos_invitations.go deleted file mode 100644 index 4922e0b2..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_invitations.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryInvitation represents an invitation to collaborate on a repo. -type RepositoryInvitation struct { - ID *int64 `json:"id,omitempty"` - Repo *Repository `json:"repository,omitempty"` - Invitee *User `json:"invitee,omitempty"` - Inviter *User `json:"inviter,omitempty"` - - // Permissions represents the permissions that the associated user will have - // on the repository. Possible values are: "read", "write", "admin". - Permissions *string `json:"permissions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` -} - -// ListInvitations lists all currently-open repository invitations. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#list-repository-invitations -// -//meta:operation GET /repos/{owner}/{repo}/invitations -func (s *RepositoriesService) ListInvitations(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryInvitation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/invitations", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - invites := []*RepositoryInvitation{} - resp, err := s.client.Do(ctx, req, &invites) - if err != nil { - return nil, resp, err - } - - return invites, resp, nil -} - -// DeleteInvitation deletes a repository invitation. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#delete-a-repository-invitation -// -//meta:operation DELETE /repos/{owner}/{repo}/invitations/{invitation_id} -func (s *RepositoriesService) DeleteInvitation(ctx context.Context, owner, repo string, invitationID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/invitations/%v", owner, repo, invitationID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UpdateInvitation updates the permissions associated with a repository -// invitation. -// -// permissions represents the permissions that the associated user will have -// on the repository. Possible values are: "read", "write", "admin". -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#update-a-repository-invitation -// -//meta:operation PATCH /repos/{owner}/{repo}/invitations/{invitation_id} -func (s *RepositoriesService) UpdateInvitation(ctx context.Context, owner, repo string, invitationID int64, permissions string) (*RepositoryInvitation, *Response, error) { - opts := &struct { - Permissions string `json:"permissions"` - }{Permissions: permissions} - u := fmt.Sprintf("repos/%v/%v/invitations/%v", owner, repo, invitationID) - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - invite := &RepositoryInvitation{} - resp, err := s.client.Do(ctx, req, invite) - if err != nil { - return nil, resp, err - } - - return invite, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_pages.go b/vendor/github.com/google/go-github/v57/github/repos_pages.go deleted file mode 100644 index 6b9ba76e..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_pages.go +++ /dev/null @@ -1,324 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Pages represents a GitHub Pages site configuration. -type Pages struct { - URL *string `json:"url,omitempty"` - Status *string `json:"status,omitempty"` - CNAME *string `json:"cname,omitempty"` - Custom404 *bool `json:"custom_404,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BuildType *string `json:"build_type,omitempty"` - Source *PagesSource `json:"source,omitempty"` - Public *bool `json:"public,omitempty"` - HTTPSCertificate *PagesHTTPSCertificate `json:"https_certificate,omitempty"` - HTTPSEnforced *bool `json:"https_enforced,omitempty"` -} - -// PagesSource represents a GitHub page's source. -type PagesSource struct { - Branch *string `json:"branch,omitempty"` - Path *string `json:"path,omitempty"` -} - -// PagesError represents a build error for a GitHub Pages site. -type PagesError struct { - Message *string `json:"message,omitempty"` -} - -// PagesBuild represents the build information for a GitHub Pages site. -type PagesBuild struct { - URL *string `json:"url,omitempty"` - Status *string `json:"status,omitempty"` - Error *PagesError `json:"error,omitempty"` - Pusher *User `json:"pusher,omitempty"` - Commit *string `json:"commit,omitempty"` - Duration *int `json:"duration,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -// PagesDomain represents a domain associated with a GitHub Pages site. -type PagesDomain struct { - Host *string `json:"host,omitempty"` - URI *string `json:"uri,omitempty"` - Nameservers *string `json:"nameservers,omitempty"` - DNSResolves *bool `json:"dns_resolves,omitempty"` - IsProxied *bool `json:"is_proxied,omitempty"` - IsCloudflareIP *bool `json:"is_cloudflare_ip,omitempty"` - IsFastlyIP *bool `json:"is_fastly_ip,omitempty"` - IsOldIPAddress *bool `json:"is_old_ip_address,omitempty"` - IsARecord *bool `json:"is_a_record,omitempty"` - HasCNAMERecord *bool `json:"has_cname_record,omitempty"` - HasMXRecordsPresent *bool `json:"has_mx_records_present,omitempty"` - IsValidDomain *bool `json:"is_valid_domain,omitempty"` - IsApexDomain *bool `json:"is_apex_domain,omitempty"` - ShouldBeARecord *bool `json:"should_be_a_record,omitempty"` - IsCNAMEToGithubUserDomain *bool `json:"is_cname_to_github_user_domain,omitempty"` - IsCNAMEToPagesDotGithubDotCom *bool `json:"is_cname_to_pages_dot_github_dot_com,omitempty"` - IsCNAMEToFastly *bool `json:"is_cname_to_fastly,omitempty"` - IsPointedToGithubPagesIP *bool `json:"is_pointed_to_github_pages_ip,omitempty"` - IsNonGithubPagesIPPresent *bool `json:"is_non_github_pages_ip_present,omitempty"` - IsPagesDomain *bool `json:"is_pages_domain,omitempty"` - IsServedByPages *bool `json:"is_served_by_pages,omitempty"` - IsValid *bool `json:"is_valid,omitempty"` - Reason *string `json:"reason,omitempty"` - RespondsToHTTPS *bool `json:"responds_to_https,omitempty"` - EnforcesHTTPS *bool `json:"enforces_https,omitempty"` - HTTPSError *string `json:"https_error,omitempty"` - IsHTTPSEligible *bool `json:"is_https_eligible,omitempty"` - CAAError *string `json:"caa_error,omitempty"` -} - -// PagesHealthCheckResponse represents the response given for the health check of a GitHub Pages site. -type PagesHealthCheckResponse struct { - Domain *PagesDomain `json:"domain,omitempty"` - AltDomain *PagesDomain `json:"alt_domain,omitempty"` -} - -// PagesHTTPSCertificate represents the HTTPS Certificate information for a GitHub Pages site. -type PagesHTTPSCertificate struct { - State *string `json:"state,omitempty"` - Description *string `json:"description,omitempty"` - Domains []string `json:"domains,omitempty"` - // GitHub's API doesn't return a standard Timestamp, rather it returns a YYYY-MM-DD string. - ExpiresAt *string `json:"expires_at,omitempty"` -} - -// createPagesRequest is a subset of Pages and is used internally -// by EnablePages to pass only the known fields for the endpoint. -type createPagesRequest struct { - BuildType *string `json:"build_type,omitempty"` - Source *PagesSource `json:"source,omitempty"` -} - -// EnablePages enables GitHub Pages for the named repo. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#create-a-github-pages-site -// -//meta:operation POST /repos/{owner}/{repo}/pages -func (s *RepositoriesService) EnablePages(ctx context.Context, owner, repo string, pages *Pages) (*Pages, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - - pagesReq := &createPagesRequest{ - BuildType: pages.BuildType, - Source: pages.Source, - } - - req, err := s.client.NewRequest("POST", u, pagesReq) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeEnablePagesAPIPreview) - - enable := new(Pages) - resp, err := s.client.Do(ctx, req, enable) - if err != nil { - return nil, resp, err - } - - return enable, resp, nil -} - -// PagesUpdate sets up parameters needed to update a GitHub Pages site. -type PagesUpdate struct { - // CNAME represents a custom domain for the repository. - // Leaving CNAME empty will remove the custom domain. - CNAME *string `json:"cname"` - // BuildType is optional and can either be "legacy" or "workflow". - // "workflow" - You are using a github workflow to build your pages. - // "legacy" - You are deploying from a branch. - BuildType *string `json:"build_type,omitempty"` - // Source must include the branch name, and may optionally specify the subdirectory "/docs". - // Possible values for Source.Branch are usually "gh-pages", "main", and "master", - // or any other existing branch name. - // Possible values for Source.Path are: "/", and "/docs". - Source *PagesSource `json:"source,omitempty"` - // Public configures access controls for the site. - // If "true", the site will be accessible to anyone on the internet. If "false", - // the site will be accessible to anyone with read access to the repository that - // published the site. - Public *bool `json:"public,omitempty"` - // HTTPSEnforced specifies whether HTTPS should be enforced for the repository. - HTTPSEnforced *bool `json:"https_enforced,omitempty"` -} - -// UpdatePages updates GitHub Pages for the named repo. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#update-information-about-a-github-pages-site -// -//meta:operation PUT /repos/{owner}/{repo}/pages -func (s *RepositoriesService) UpdatePages(ctx context.Context, owner, repo string, opts *PagesUpdate) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DisablePages disables GitHub Pages for the named repo. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#delete-a-github-pages-site -// -//meta:operation DELETE /repos/{owner}/{repo}/pages -func (s *RepositoriesService) DisablePages(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeEnablePagesAPIPreview) - - return s.client.Do(ctx, req, nil) -} - -// GetPagesInfo fetches information about a GitHub Pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-a-github-pages-site -// -//meta:operation GET /repos/{owner}/{repo}/pages -func (s *RepositoriesService) GetPagesInfo(ctx context.Context, owner, repo string) (*Pages, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - site := new(Pages) - resp, err := s.client.Do(ctx, req, site) - if err != nil { - return nil, resp, err - } - - return site, resp, nil -} - -// ListPagesBuilds lists the builds for a GitHub Pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#list-github-pages-builds -// -//meta:operation GET /repos/{owner}/{repo}/pages/builds -func (s *RepositoriesService) ListPagesBuilds(ctx context.Context, owner, repo string, opts *ListOptions) ([]*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pages []*PagesBuild - resp, err := s.client.Do(ctx, req, &pages) - if err != nil { - return nil, resp, err - } - - return pages, resp, nil -} - -// GetLatestPagesBuild fetches the latest build information for a GitHub pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-latest-pages-build -// -//meta:operation GET /repos/{owner}/{repo}/pages/builds/latest -func (s *RepositoriesService) GetLatestPagesBuild(ctx context.Context, owner, repo string) (*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds/latest", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - build := new(PagesBuild) - resp, err := s.client.Do(ctx, req, build) - if err != nil { - return nil, resp, err - } - - return build, resp, nil -} - -// GetPageBuild fetches the specific build information for a GitHub pages site. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-github-pages-build -// -//meta:operation GET /repos/{owner}/{repo}/pages/builds/{build_id} -func (s *RepositoriesService) GetPageBuild(ctx context.Context, owner, repo string, id int64) (*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds/%v", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - build := new(PagesBuild) - resp, err := s.client.Do(ctx, req, build) - if err != nil { - return nil, resp, err - } - - return build, resp, nil -} - -// RequestPageBuild requests a build of a GitHub Pages site without needing to push new commit. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#request-a-github-pages-build -// -//meta:operation POST /repos/{owner}/{repo}/pages/builds -func (s *RepositoriesService) RequestPageBuild(ctx context.Context, owner, repo string) (*PagesBuild, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/builds", owner, repo) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - build := new(PagesBuild) - resp, err := s.client.Do(ctx, req, build) - if err != nil { - return nil, resp, err - } - - return build, resp, nil -} - -// GetPageHealthCheck gets a DNS health check for the CNAME record configured for a repository's GitHub Pages. -// -// GitHub API docs: https://docs.github.com/rest/pages/pages#get-a-dns-health-check-for-github-pages -// -//meta:operation GET /repos/{owner}/{repo}/pages/health -func (s *RepositoriesService) GetPageHealthCheck(ctx context.Context, owner, repo string) (*PagesHealthCheckResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pages/health", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - healthCheckResponse := new(PagesHealthCheckResponse) - resp, err := s.client.Do(ctx, req, healthCheckResponse) - if err != nil { - return nil, resp, err - } - - return healthCheckResponse, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go deleted file mode 100644 index e8361383..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_prereceive_hooks.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// PreReceiveHook represents a GitHub pre-receive hook for a repository. -type PreReceiveHook struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Enforcement *string `json:"enforcement,omitempty"` - ConfigURL *string `json:"configuration_url,omitempty"` -} - -func (p PreReceiveHook) String() string { - return Stringify(p) -} - -// ListPreReceiveHooks lists all pre-receive hooks for the specified repository. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#list-pre-receive-hooks-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/pre-receive-hooks -func (s *RepositoriesService) ListPreReceiveHooks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*PreReceiveHook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - var hooks []*PreReceiveHook - resp, err := s.client.Do(ctx, req, &hooks) - if err != nil { - return nil, resp, err - } - - return hooks, resp, nil -} - -// GetPreReceiveHook returns a single specified pre-receive hook. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#get-a-pre-receive-hook-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/pre-receive-hooks/{pre_receive_hook_id} -func (s *RepositoriesService) GetPreReceiveHook(ctx context.Context, owner, repo string, id int64) (*PreReceiveHook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - h := new(PreReceiveHook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// UpdatePreReceiveHook updates a specified pre-receive hook. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#update-pre-receive-hook-enforcement-for-a-repository -// -//meta:operation PATCH /repos/{owner}/{repo}/pre-receive-hooks/{pre_receive_hook_id} -func (s *RepositoriesService) UpdatePreReceiveHook(ctx context.Context, owner, repo string, id int64, hook *PreReceiveHook) (*PreReceiveHook, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, hook) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - h := new(PreReceiveHook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// DeletePreReceiveHook deletes a specified pre-receive hook. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/repo-pre-receive-hooks#remove-pre-receive-hook-enforcement-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/pre-receive-hooks/{pre_receive_hook_id} -func (s *RepositoriesService) DeletePreReceiveHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_projects.go b/vendor/github.com/google/go-github/v57/github/repos_projects.go deleted file mode 100644 index 9269d4e9..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_projects.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ProjectListOptions specifies the optional parameters to the -// OrganizationsService.ListProjects and RepositoriesService.ListProjects methods. -type ProjectListOptions struct { - // Indicates the state of the projects to return. Can be either open, closed, or all. Default: open - State string `url:"state,omitempty"` - - ListOptions -} - -// ListProjects lists the projects for a repo. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#list-repository-projects -// -//meta:operation GET /repos/{owner}/{repo}/projects -func (s *RepositoriesService) ListProjects(ctx context.Context, owner, repo string, opts *ProjectListOptions) ([]*Project, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/projects", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// CreateProject creates a GitHub Project for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#create-a-repository-project -// -//meta:operation POST /repos/{owner}/{repo}/projects -func (s *RepositoriesService) CreateProject(ctx context.Context, owner, repo string, opts *ProjectOptions) (*Project, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/projects", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_releases.go b/vendor/github.com/google/go-github/v57/github/repos_releases.go deleted file mode 100644 index 7231db6d..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_releases.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" - "io" - "mime" - "net/http" - "os" - "path/filepath" - "strings" -) - -// RepositoryRelease represents a GitHub release in a repository. -type RepositoryRelease struct { - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Draft *bool `json:"draft,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - // MakeLatest can be one of: "true", "false", or "legacy". - MakeLatest *string `json:"make_latest,omitempty"` - DiscussionCategoryName *string `json:"discussion_category_name,omitempty"` - - // The following fields are not used in EditRelease: - GenerateReleaseNotes *bool `json:"generate_release_notes,omitempty"` - - // The following fields are not used in CreateRelease or EditRelease: - ID *int64 `json:"id,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PublishedAt *Timestamp `json:"published_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - AssetsURL *string `json:"assets_url,omitempty"` - Assets []*ReleaseAsset `json:"assets,omitempty"` - UploadURL *string `json:"upload_url,omitempty"` - ZipballURL *string `json:"zipball_url,omitempty"` - TarballURL *string `json:"tarball_url,omitempty"` - Author *User `json:"author,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (r RepositoryRelease) String() string { - return Stringify(r) -} - -// RepositoryReleaseNotes represents a GitHub-generated release notes. -type RepositoryReleaseNotes struct { - Name string `json:"name"` - Body string `json:"body"` -} - -// GenerateNotesOptions represents the options to generate release notes. -type GenerateNotesOptions struct { - TagName string `json:"tag_name"` - PreviousTagName *string `json:"previous_tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` -} - -// ReleaseAsset represents a GitHub release asset in a repository. -type ReleaseAsset struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Label *string `json:"label,omitempty"` - State *string `json:"state,omitempty"` - ContentType *string `json:"content_type,omitempty"` - Size *int `json:"size,omitempty"` - DownloadCount *int `json:"download_count,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - BrowserDownloadURL *string `json:"browser_download_url,omitempty"` - Uploader *User `json:"uploader,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (r ReleaseAsset) String() string { - return Stringify(r) -} - -// ListReleases lists the releases for a repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#list-releases -// -//meta:operation GET /repos/{owner}/{repo}/releases -func (s *RepositoriesService) ListReleases(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var releases []*RepositoryRelease - resp, err := s.client.Do(ctx, req, &releases) - if err != nil { - return nil, resp, err - } - return releases, resp, nil -} - -// GetRelease fetches a single release. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#get-a-release -// -//meta:operation GET /repos/{owner}/{repo}/releases/{release_id} -func (s *RepositoriesService) GetRelease(ctx context.Context, owner, repo string, id int64) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) - return s.getSingleRelease(ctx, u) -} - -// GetLatestRelease fetches the latest published release for the repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#get-the-latest-release -// -//meta:operation GET /repos/{owner}/{repo}/releases/latest -func (s *RepositoriesService) GetLatestRelease(ctx context.Context, owner, repo string) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/latest", owner, repo) - return s.getSingleRelease(ctx, u) -} - -// GetReleaseByTag fetches a release with the specified tag. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#get-a-release-by-tag-name -// -//meta:operation GET /repos/{owner}/{repo}/releases/tags/{tag} -func (s *RepositoriesService) GetReleaseByTag(ctx context.Context, owner, repo, tag string) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/tags/%s", owner, repo, tag) - return s.getSingleRelease(ctx, u) -} - -// GenerateReleaseNotes generates the release notes for the given tag. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#generate-release-notes-content-for-a-release -// -//meta:operation POST /repos/{owner}/{repo}/releases/generate-notes -func (s *RepositoriesService) GenerateReleaseNotes(ctx context.Context, owner, repo string, opts *GenerateNotesOptions) (*RepositoryReleaseNotes, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/generate-notes", owner, repo) - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - r := new(RepositoryReleaseNotes) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -func (s *RepositoriesService) getSingleRelease(ctx context.Context, url string) (*RepositoryRelease, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - release := new(RepositoryRelease) - resp, err := s.client.Do(ctx, req, release) - if err != nil { - return nil, resp, err - } - return release, resp, nil -} - -// repositoryReleaseRequest is a subset of RepositoryRelease and -// is used internally by CreateRelease and EditRelease to pass -// only the known fields for these endpoints. -// -// See https://github.com/google/go-github/issues/992 for more -// information. -type repositoryReleaseRequest struct { - TagName *string `json:"tag_name,omitempty"` - TargetCommitish *string `json:"target_commitish,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Draft *bool `json:"draft,omitempty"` - Prerelease *bool `json:"prerelease,omitempty"` - MakeLatest *string `json:"make_latest,omitempty"` - GenerateReleaseNotes *bool `json:"generate_release_notes,omitempty"` - DiscussionCategoryName *string `json:"discussion_category_name,omitempty"` -} - -// CreateRelease adds a new release for a repository. -// -// Note that only a subset of the release fields are used. -// See RepositoryRelease for more information. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#create-a-release -// -//meta:operation POST /repos/{owner}/{repo}/releases -func (s *RepositoriesService) CreateRelease(ctx context.Context, owner, repo string, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases", owner, repo) - - releaseReq := &repositoryReleaseRequest{ - TagName: release.TagName, - TargetCommitish: release.TargetCommitish, - Name: release.Name, - Body: release.Body, - Draft: release.Draft, - Prerelease: release.Prerelease, - MakeLatest: release.MakeLatest, - DiscussionCategoryName: release.DiscussionCategoryName, - GenerateReleaseNotes: release.GenerateReleaseNotes, - } - - req, err := s.client.NewRequest("POST", u, releaseReq) - if err != nil { - return nil, nil, err - } - - r := new(RepositoryRelease) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - return r, resp, nil -} - -// EditRelease edits a repository release. -// -// Note that only a subset of the release fields are used. -// See RepositoryRelease for more information. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#update-a-release -// -//meta:operation PATCH /repos/{owner}/{repo}/releases/{release_id} -func (s *RepositoriesService) EditRelease(ctx context.Context, owner, repo string, id int64, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) - - releaseReq := &repositoryReleaseRequest{ - TagName: release.TagName, - TargetCommitish: release.TargetCommitish, - Name: release.Name, - Body: release.Body, - Draft: release.Draft, - Prerelease: release.Prerelease, - MakeLatest: release.MakeLatest, - DiscussionCategoryName: release.DiscussionCategoryName, - } - - req, err := s.client.NewRequest("PATCH", u, releaseReq) - if err != nil { - return nil, nil, err - } - - r := new(RepositoryRelease) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - return r, resp, nil -} - -// DeleteRelease delete a single release from a repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/releases#delete-a-release -// -//meta:operation DELETE /repos/{owner}/{repo}/releases/{release_id} -func (s *RepositoriesService) DeleteRelease(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListReleaseAssets lists the release's assets. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#list-release-assets -// -//meta:operation GET /repos/{owner}/{repo}/releases/{release_id}/assets -func (s *RepositoriesService) ListReleaseAssets(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var assets []*ReleaseAsset - resp, err := s.client.Do(ctx, req, &assets) - if err != nil { - return nil, resp, err - } - return assets, resp, nil -} - -// GetReleaseAsset fetches a single release asset. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#get-a-release-asset -// -//meta:operation GET /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) GetReleaseAsset(ctx context.Context, owner, repo string, id int64) (*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - asset := new(ReleaseAsset) - resp, err := s.client.Do(ctx, req, asset) - if err != nil { - return nil, resp, err - } - return asset, resp, nil -} - -// DownloadReleaseAsset downloads a release asset or returns a redirect URL. -// -// DownloadReleaseAsset returns an io.ReadCloser that reads the contents of the -// specified release asset. It is the caller's responsibility to close the ReadCloser. -// If a redirect is returned, the redirect URL will be returned as a string instead -// of the io.ReadCloser. Exactly one of rc and redirectURL will be zero. -// -// followRedirectsClient can be passed to download the asset from a redirected -// location. Passing http.DefaultClient is recommended unless special circumstances -// exist, but it's possible to pass any http.Client. If nil is passed the -// redirectURL will be returned instead. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#get-a-release-asset -// -//meta:operation GET /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) DownloadReleaseAsset(ctx context.Context, owner, repo string, id int64, followRedirectsClient *http.Client) (rc io.ReadCloser, redirectURL string, err error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, "", err - } - req.Header.Set("Accept", defaultMediaType) - - s.client.clientMu.Lock() - defer s.client.clientMu.Unlock() - - var loc string - saveRedirect := s.client.client.CheckRedirect - s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { - loc = req.URL.String() - return errors.New("disable redirect") - } - defer func() { s.client.client.CheckRedirect = saveRedirect }() - - req = withContext(ctx, req) - resp, err := s.client.client.Do(req) - if err != nil { - if !strings.Contains(err.Error(), "disable redirect") { - return nil, "", err - } - if followRedirectsClient != nil { - rc, err := s.downloadReleaseAssetFromURL(ctx, followRedirectsClient, loc) - return rc, "", err - } - return nil, loc, nil // Intentionally return no error with valid redirect URL. - } - - if err := CheckResponse(resp); err != nil { - _ = resp.Body.Close() - return nil, "", err - } - - return resp.Body, "", nil -} - -func (s *RepositoriesService) downloadReleaseAssetFromURL(ctx context.Context, followRedirectsClient *http.Client, url string) (rc io.ReadCloser, err error) { - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return nil, err - } - req = withContext(ctx, req) - req.Header.Set("Accept", "*/*") - resp, err := followRedirectsClient.Do(req) - if err != nil { - return nil, err - } - if err := CheckResponse(resp); err != nil { - _ = resp.Body.Close() - return nil, err - } - return resp.Body, nil -} - -// EditReleaseAsset edits a repository release asset. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#update-a-release-asset -// -//meta:operation PATCH /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) EditReleaseAsset(ctx context.Context, owner, repo string, id int64, release *ReleaseAsset) (*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("PATCH", u, release) - if err != nil { - return nil, nil, err - } - - asset := new(ReleaseAsset) - resp, err := s.client.Do(ctx, req, asset) - if err != nil { - return nil, resp, err - } - return asset, resp, nil -} - -// DeleteReleaseAsset delete a single release asset from a repository. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#delete-a-release-asset -// -//meta:operation DELETE /repos/{owner}/{repo}/releases/assets/{asset_id} -func (s *RepositoriesService) DeleteReleaseAsset(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// UploadReleaseAsset creates an asset by uploading a file into a release repository. -// To upload assets that cannot be represented by an os.File, call NewUploadRequest directly. -// -// GitHub API docs: https://docs.github.com/rest/releases/assets#upload-a-release-asset -// -//meta:operation POST /repos/{owner}/{repo}/releases/{release_id}/assets -func (s *RepositoriesService) UploadReleaseAsset(ctx context.Context, owner, repo string, id int64, opts *UploadOptions, file *os.File) (*ReleaseAsset, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - stat, err := file.Stat() - if err != nil { - return nil, nil, err - } - if stat.IsDir() { - return nil, nil, errors.New("the asset to upload can't be a directory") - } - - mediaType := mime.TypeByExtension(filepath.Ext(file.Name())) - if opts.MediaType != "" { - mediaType = opts.MediaType - } - - req, err := s.client.NewUploadRequest(u, file, stat.Size(), mediaType) - if err != nil { - return nil, nil, err - } - - asset := new(ReleaseAsset) - resp, err := s.client.Do(ctx, req, asset) - if err != nil { - return nil, resp, err - } - return asset, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_rules.go b/vendor/github.com/google/go-github/v57/github/repos_rules.go deleted file mode 100644 index 479806c2..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_rules.go +++ /dev/null @@ -1,511 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// BypassActor represents the bypass actors from a ruleset. -type BypassActor struct { - ActorID *int64 `json:"actor_id,omitempty"` - // Possible values for ActorType are: RepositoryRole, Team, Integration, OrganizationAdmin - ActorType *string `json:"actor_type,omitempty"` - // Possible values for BypassMode are: always, pull_request - BypassMode *string `json:"bypass_mode,omitempty"` -} - -// RulesetLink represents a single link object from GitHub ruleset request _links. -type RulesetLink struct { - HRef *string `json:"href,omitempty"` -} - -// RulesetLinks represents the "_links" object in a Ruleset. -type RulesetLinks struct { - Self *RulesetLink `json:"self,omitempty"` -} - -// RulesetRefConditionParameters represents the conditions object for ref_names. -type RulesetRefConditionParameters struct { - Include []string `json:"include"` - Exclude []string `json:"exclude"` -} - -// RulesetRepositoryNamesConditionParameters represents the conditions object for repository_names. -type RulesetRepositoryNamesConditionParameters struct { - Include []string `json:"include"` - Exclude []string `json:"exclude"` - Protected *bool `json:"protected,omitempty"` -} - -// RulesetRepositoryIDsConditionParameters represents the conditions object for repository_ids. -type RulesetRepositoryIDsConditionParameters struct { - RepositoryIDs []int64 `json:"repository_ids,omitempty"` -} - -// RulesetConditions represents the conditions object in a ruleset. -// Set either RepositoryName or RepositoryID, not both. -type RulesetConditions struct { - RefName *RulesetRefConditionParameters `json:"ref_name,omitempty"` - RepositoryName *RulesetRepositoryNamesConditionParameters `json:"repository_name,omitempty"` - RepositoryID *RulesetRepositoryIDsConditionParameters `json:"repository_id,omitempty"` -} - -// RulePatternParameters represents the rule pattern parameters. -type RulePatternParameters struct { - Name *string `json:"name,omitempty"` - // If Negate is true, the rule will fail if the pattern matches. - Negate *bool `json:"negate,omitempty"` - // Possible values for Operator are: starts_with, ends_with, contains, regex - Operator string `json:"operator"` - Pattern string `json:"pattern"` -} - -// UpdateAllowsFetchAndMergeRuleParameters represents the update rule parameters. -type UpdateAllowsFetchAndMergeRuleParameters struct { - UpdateAllowsFetchAndMerge bool `json:"update_allows_fetch_and_merge"` -} - -// RequiredDeploymentEnvironmentsRuleParameters represents the required_deployments rule parameters. -type RequiredDeploymentEnvironmentsRuleParameters struct { - RequiredDeploymentEnvironments []string `json:"required_deployment_environments"` -} - -// PullRequestRuleParameters represents the pull_request rule parameters. -type PullRequestRuleParameters struct { - DismissStaleReviewsOnPush bool `json:"dismiss_stale_reviews_on_push"` - RequireCodeOwnerReview bool `json:"require_code_owner_review"` - RequireLastPushApproval bool `json:"require_last_push_approval"` - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - RequiredReviewThreadResolution bool `json:"required_review_thread_resolution"` -} - -// RuleRequiredStatusChecks represents the RequiredStatusChecks for the RequiredStatusChecksRuleParameters object. -type RuleRequiredStatusChecks struct { - Context string `json:"context"` - IntegrationID *int64 `json:"integration_id,omitempty"` -} - -// RequiredStatusChecksRuleParameters represents the required_status_checks rule parameters. -type RequiredStatusChecksRuleParameters struct { - RequiredStatusChecks []RuleRequiredStatusChecks `json:"required_status_checks"` - StrictRequiredStatusChecksPolicy bool `json:"strict_required_status_checks_policy"` -} - -// RuleRequiredWorkflow represents the Workflow for the RequireWorkflowsRuleParameters object. -type RuleRequiredWorkflow struct { - Path string `json:"path"` - Ref *string `json:"ref,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Sha *string `json:"sha,omitempty"` -} - -// RequiredWorkflowsRuleParameters represents the workflows rule parameters. -type RequiredWorkflowsRuleParameters struct { - RequiredWorkflows []*RuleRequiredWorkflow `json:"workflows"` -} - -// RepositoryRule represents a GitHub Rule. -type RepositoryRule struct { - Type string `json:"type"` - Parameters *json.RawMessage `json:"parameters,omitempty"` -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// This helps us handle the fact that RepositoryRule parameter field can be of numerous types. -func (r *RepositoryRule) UnmarshalJSON(data []byte) error { - type rule RepositoryRule - var RepositoryRule rule - if err := json.Unmarshal(data, &RepositoryRule); err != nil { - return err - } - - r.Type = RepositoryRule.Type - - switch RepositoryRule.Type { - case "creation", "deletion", "required_linear_history", "required_signatures", "non_fast_forward": - r.Parameters = nil - case "update": - if RepositoryRule.Parameters == nil { - r.Parameters = nil - return nil - } - params := UpdateAllowsFetchAndMergeRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - - case "required_deployments": - params := RequiredDeploymentEnvironmentsRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "commit_message_pattern", "commit_author_email_pattern", "committer_email_pattern", "branch_name_pattern", "tag_name_pattern": - params := RulePatternParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "pull_request": - params := PullRequestRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "required_status_checks": - params := RequiredStatusChecksRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - case "workflows": - params := RequiredWorkflowsRuleParameters{} - if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { - return err - } - - bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) - - r.Parameters = &rawParams - default: - r.Type = "" - r.Parameters = nil - return fmt.Errorf("RepositoryRule.Type %T is not yet implemented, unable to unmarshal", RepositoryRule.Type) - } - - return nil -} - -// NewCreationRule creates a rule to only allow users with bypass permission to create matching refs. -func NewCreationRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "creation", - } -} - -// NewUpdateRule creates a rule to only allow users with bypass permission to update matching refs. -func NewUpdateRule(params *UpdateAllowsFetchAndMergeRuleParameters) (rule *RepositoryRule) { - if params != nil { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "update", - Parameters: &rawParams, - } - } - return &RepositoryRule{ - Type: "update", - } -} - -// NewDeletionRule creates a rule to only allow users with bypass permissions to delete matching refs. -func NewDeletionRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "deletion", - } -} - -// NewRequiredLinearHistoryRule creates a rule to prevent merge commits from being pushed to matching branches. -func NewRequiredLinearHistoryRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "required_linear_history", - } -} - -// NewRequiredDeploymentsRule creates a rule to require environments to be successfully deployed before they can be merged into the matching branches. -func NewRequiredDeploymentsRule(params *RequiredDeploymentEnvironmentsRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "required_deployments", - Parameters: &rawParams, - } -} - -// NewRequiredSignaturesRule creates a rule a to require commits pushed to matching branches to have verified signatures. -func NewRequiredSignaturesRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "required_signatures", - } -} - -// NewPullRequestRule creates a rule to require all commits be made to a non-target branch and submitted via a pull request before they can be merged. -func NewPullRequestRule(params *PullRequestRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "pull_request", - Parameters: &rawParams, - } -} - -// NewRequiredStatusChecksRule creates a rule to require which status checks must pass before branches can be merged into a branch rule. -func NewRequiredStatusChecksRule(params *RequiredStatusChecksRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "required_status_checks", - Parameters: &rawParams, - } -} - -// NewNonFastForwardRule creates a rule as part to prevent users with push access from force pushing to matching branches. -func NewNonFastForwardRule() (rule *RepositoryRule) { - return &RepositoryRule{ - Type: "non_fast_forward", - } -} - -// NewCommitMessagePatternRule creates a rule to restrict commit message patterns being pushed to matching branches. -func NewCommitMessagePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "commit_message_pattern", - Parameters: &rawParams, - } -} - -// NewCommitAuthorEmailPatternRule creates a rule to restrict commits with author email patterns being merged into matching branches. -func NewCommitAuthorEmailPatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "commit_author_email_pattern", - Parameters: &rawParams, - } -} - -// NewCommitterEmailPatternRule creates a rule to restrict commits with committer email patterns being merged into matching branches. -func NewCommitterEmailPatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "committer_email_pattern", - Parameters: &rawParams, - } -} - -// NewBranchNamePatternRule creates a rule to restrict branch patterns from being merged into matching branches. -func NewBranchNamePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "branch_name_pattern", - Parameters: &rawParams, - } -} - -// NewTagNamePatternRule creates a rule to restrict tag patterns contained in non-target branches from being merged into matching branches. -func NewTagNamePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "tag_name_pattern", - Parameters: &rawParams, - } -} - -// NewRequiredWorkflowsRule creates a rule to require which status checks must pass before branches can be merged into a branch rule. -func NewRequiredWorkflowsRule(params *RequiredWorkflowsRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) - - rawParams := json.RawMessage(bytes) - - return &RepositoryRule{ - Type: "workflows", - Parameters: &rawParams, - } -} - -// Ruleset represents a GitHub ruleset object. -type Ruleset struct { - ID *int64 `json:"id,omitempty"` - Name string `json:"name"` - // Possible values for Target are branch, tag - Target *string `json:"target,omitempty"` - // Possible values for SourceType are: Repository, Organization - SourceType *string `json:"source_type,omitempty"` - Source string `json:"source"` - // Possible values for Enforcement are: disabled, active, evaluate - Enforcement string `json:"enforcement"` - BypassActors []*BypassActor `json:"bypass_actors,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Links *RulesetLinks `json:"_links,omitempty"` - Conditions *RulesetConditions `json:"conditions,omitempty"` - Rules []*RepositoryRule `json:"rules,omitempty"` -} - -// GetRulesForBranch gets all the rules that apply to the specified branch. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#get-rules-for-a-branch -// -//meta:operation GET /repos/{owner}/{repo}/rules/branches/{branch} -func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo, branch string) ([]*RepositoryRule, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rules/branches/%v", owner, repo, branch) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var rules []*RepositoryRule - resp, err := s.client.Do(ctx, req, &rules) - if err != nil { - return nil, resp, err - } - - return rules, resp, nil -} - -// GetAllRulesets gets all the rules that apply to the specified repository. -// If includesParents is true, rulesets configured at the organization level that apply to the repository will be returned. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#get-all-repository-rulesets -// -//meta:operation GET /repos/{owner}/{repo}/rulesets -func (s *RepositoriesService) GetAllRulesets(ctx context.Context, owner, repo string, includesParents bool) ([]*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets?includes_parents=%v", owner, repo, includesParents) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var ruleset []*Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// CreateRuleset creates a ruleset for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#create-a-repository-ruleset -// -//meta:operation POST /repos/{owner}/{repo}/rulesets -func (s *RepositoriesService) CreateRuleset(ctx context.Context, owner, repo string, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets", owner, repo) - - req, err := s.client.NewRequest("POST", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// GetRuleset gets a ruleset for the specified repository. -// If includesParents is true, rulesets configured at the organization level that apply to the repository will be returned. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#get-a-repository-ruleset -// -//meta:operation GET /repos/{owner}/{repo}/rulesets/{ruleset_id} -func (s *RepositoriesService) GetRuleset(ctx context.Context, owner, repo string, rulesetID int64, includesParents bool) (*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets/%v?includes_parents=%v", owner, repo, rulesetID, includesParents) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// UpdateRuleset updates a ruleset for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#update-a-repository-ruleset -// -//meta:operation PUT /repos/{owner}/{repo}/rulesets/{ruleset_id} -func (s *RepositoriesService) UpdateRuleset(ctx context.Context, owner, repo string, rulesetID int64, rs *Ruleset) (*Ruleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets/%v", owner, repo, rulesetID) - - req, err := s.client.NewRequest("PUT", u, rs) - if err != nil { - return nil, nil, err - } - - var ruleset *Ruleset - resp, err := s.client.Do(ctx, req, &ruleset) - if err != nil { - return nil, resp, err - } - - return ruleset, resp, nil -} - -// DeleteRuleset deletes a ruleset for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/rules#delete-a-repository-ruleset -// -//meta:operation DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id} -func (s *RepositoriesService) DeleteRuleset(ctx context.Context, owner, repo string, rulesetID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets/%v", owner, repo, rulesetID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_stats.go b/vendor/github.com/google/go-github/v57/github/repos_stats.go deleted file mode 100644 index 898693f7..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_stats.go +++ /dev/null @@ -1,242 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// ContributorStats represents a contributor to a repository and their -// weekly contributions to a given repo. -type ContributorStats struct { - Author *Contributor `json:"author,omitempty"` - Total *int `json:"total,omitempty"` - Weeks []*WeeklyStats `json:"weeks,omitempty"` -} - -func (c ContributorStats) String() string { - return Stringify(c) -} - -// WeeklyStats represents the number of additions, deletions and commits -// a Contributor made in a given week. -type WeeklyStats struct { - Week *Timestamp `json:"w,omitempty"` - Additions *int `json:"a,omitempty"` - Deletions *int `json:"d,omitempty"` - Commits *int `json:"c,omitempty"` -} - -func (w WeeklyStats) String() string { - return Stringify(w) -} - -// ListContributorsStats gets a repo's contributor list with additions, -// deletions and commit counts. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-all-contributor-commit-activity -// -//meta:operation GET /repos/{owner}/{repo}/stats/contributors -func (s *RepositoriesService) ListContributorsStats(ctx context.Context, owner, repo string) ([]*ContributorStats, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/contributors", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var contributorStats []*ContributorStats - resp, err := s.client.Do(ctx, req, &contributorStats) - if err != nil { - return nil, resp, err - } - - return contributorStats, resp, nil -} - -// WeeklyCommitActivity represents the weekly commit activity for a repository. -// The days array is a group of commits per day, starting on Sunday. -type WeeklyCommitActivity struct { - Days []int `json:"days,omitempty"` - Total *int `json:"total,omitempty"` - Week *Timestamp `json:"week,omitempty"` -} - -func (w WeeklyCommitActivity) String() string { - return Stringify(w) -} - -// ListCommitActivity returns the last year of commit activity -// grouped by week. The days array is a group of commits per day, -// starting on Sunday. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-last-year-of-commit-activity -// -//meta:operation GET /repos/{owner}/{repo}/stats/commit_activity -func (s *RepositoriesService) ListCommitActivity(ctx context.Context, owner, repo string) ([]*WeeklyCommitActivity, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/commit_activity", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var weeklyCommitActivity []*WeeklyCommitActivity - resp, err := s.client.Do(ctx, req, &weeklyCommitActivity) - if err != nil { - return nil, resp, err - } - - return weeklyCommitActivity, resp, nil -} - -// ListCodeFrequency returns a weekly aggregate of the number of additions and -// deletions pushed to a repository. Returned WeeklyStats will contain -// additions and deletions, but not total commits. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-activity -// -//meta:operation GET /repos/{owner}/{repo}/stats/code_frequency -func (s *RepositoriesService) ListCodeFrequency(ctx context.Context, owner, repo string) ([]*WeeklyStats, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/code_frequency", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var weeks [][]int - resp, err := s.client.Do(ctx, req, &weeks) - if err != nil { - return nil, resp, err - } - - // convert int slices into WeeklyStats - var stats []*WeeklyStats - for _, week := range weeks { - if len(week) != 3 { - continue - } - stat := &WeeklyStats{ - Week: &Timestamp{time.Unix(int64(week[0]), 0)}, - Additions: Int(week[1]), - Deletions: Int(week[2]), - } - stats = append(stats, stat) - } - - return stats, resp, nil -} - -// RepositoryParticipation is the number of commits by everyone -// who has contributed to the repository (including the owner) -// as well as the number of commits by the owner themself. -type RepositoryParticipation struct { - All []int `json:"all,omitempty"` - Owner []int `json:"owner,omitempty"` -} - -func (r RepositoryParticipation) String() string { - return Stringify(r) -} - -// ListParticipation returns the total commit counts for the 'owner' -// and total commit counts in 'all'. 'all' is everyone combined, -// including the 'owner' in the last 52 weeks. If you’d like to get -// the commit counts for non-owners, you can subtract 'all' from 'owner'. -// -// The array order is oldest week (index 0) to most recent week. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-count -// -//meta:operation GET /repos/{owner}/{repo}/stats/participation -func (s *RepositoriesService) ListParticipation(ctx context.Context, owner, repo string) (*RepositoryParticipation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/participation", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - participation := new(RepositoryParticipation) - resp, err := s.client.Do(ctx, req, participation) - if err != nil { - return nil, resp, err - } - - return participation, resp, nil -} - -// PunchCard represents the number of commits made during a given hour of a -// day of the week. -type PunchCard struct { - Day *int // Day of the week (0-6: =Sunday - Saturday). - Hour *int // Hour of day (0-23). - Commits *int // Number of commits. -} - -// ListPunchCard returns the number of commits per hour in each day. -// -// If this is the first time these statistics are requested for the given -// repository, this method will return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing the requested statistics. A follow up request, after a -// delay of a second or so, should result in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/metrics/statistics#get-the-hourly-commit-count-for-each-day -// -//meta:operation GET /repos/{owner}/{repo}/stats/punch_card -func (s *RepositoriesService) ListPunchCard(ctx context.Context, owner, repo string) ([]*PunchCard, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/stats/punch_card", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var results [][]int - resp, err := s.client.Do(ctx, req, &results) - if err != nil { - return nil, resp, err - } - - // convert int slices into Punchcards - var cards []*PunchCard - for _, result := range results { - if len(result) != 3 { - continue - } - card := &PunchCard{ - Day: Int(result[0]), - Hour: Int(result[1]), - Commits: Int(result[2]), - } - cards = append(cards, card) - } - - return cards, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/repos_tags.go b/vendor/github.com/google/go-github/v57/github/repos_tags.go deleted file mode 100644 index 93164dd1..00000000 --- a/vendor/github.com/google/go-github/v57/github/repos_tags.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TagProtection represents a repository tag protection. -type TagProtection struct { - ID *int64 `json:"id"` - Pattern *string `json:"pattern"` -} - -// tagProtectionRequest represents a request to create tag protection. -type tagProtectionRequest struct { - // An optional glob pattern to match against when enforcing tag protection. - Pattern string `json:"pattern"` -} - -// ListTagProtection lists tag protection of the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/tags#list-tag-protection-states-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/tags/protection -func (s *RepositoriesService) ListTagProtection(ctx context.Context, owner, repo string) ([]*TagProtection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags/protection", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var tagProtections []*TagProtection - resp, err := s.client.Do(ctx, req, &tagProtections) - if err != nil { - return nil, resp, err - } - - return tagProtections, resp, nil -} - -// CreateTagProtection creates the tag protection of the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/tags#create-a-tag-protection-state-for-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/tags/protection -func (s *RepositoriesService) CreateTagProtection(ctx context.Context, owner, repo, pattern string) (*TagProtection, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags/protection", owner, repo) - r := &tagProtectionRequest{Pattern: pattern} - req, err := s.client.NewRequest("POST", u, r) - if err != nil { - return nil, nil, err - } - - tagProtection := new(TagProtection) - resp, err := s.client.Do(ctx, req, tagProtection) - if err != nil { - return nil, resp, err - } - - return tagProtection, resp, nil -} - -// DeleteTagProtection deletes a tag protection from the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/tags#delete-a-tag-protection-state-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id} -func (s *RepositoriesService) DeleteTagProtection(ctx context.Context, owner, repo string, tagProtectionID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/tags/protection/%v", owner, repo, tagProtectionID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/scim.go b/vendor/github.com/google/go-github/v57/github/scim.go deleted file mode 100644 index 02136d7e..00000000 --- a/vendor/github.com/google/go-github/v57/github/scim.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// SCIMService provides access to SCIM related functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/scim -type SCIMService service - -// SCIMUserAttributes represents supported SCIM User attributes. -// -// GitHub API docs: https://docs.github.com/rest/scim#supported-scim-user-attributes -type SCIMUserAttributes struct { - UserName string `json:"userName"` // Configured by the admin. Could be an email, login, or username. (Required.) - Name SCIMUserName `json:"name"` // (Required.) - DisplayName *string `json:"displayName,omitempty"` // The name of the user, suitable for display to end-users. (Optional.) - Emails []*SCIMUserEmail `json:"emails"` // User emails. (Required.) - Schemas []string `json:"schemas,omitempty"` // (Optional.) - ExternalID *string `json:"externalId,omitempty"` // (Optional.) - Groups []string `json:"groups,omitempty"` // (Optional.) - Active *bool `json:"active,omitempty"` // (Optional.) - // Only populated as a result of calling ListSCIMProvisionedIdentitiesOptions or GetSCIMProvisioningInfoForUser: - ID *string `json:"id,omitempty"` - Meta *SCIMMeta `json:"meta,omitempty"` -} - -// SCIMUserName represents SCIM user information. -type SCIMUserName struct { - GivenName string `json:"givenName"` // The first name of the user. (Required.) - FamilyName string `json:"familyName"` // The family name of the user. (Required.) - Formatted *string `json:"formatted,omitempty"` // (Optional.) -} - -// SCIMUserEmail represents SCIM user email. -type SCIMUserEmail struct { - Value string `json:"value"` // (Required.) - Primary *bool `json:"primary,omitempty"` // (Optional.) - Type *string `json:"type,omitempty"` // (Optional.) -} - -// SCIMMeta represents metadata about the SCIM resource. -type SCIMMeta struct { - ResourceType *string `json:"resourceType,omitempty"` - Created *Timestamp `json:"created,omitempty"` - LastModified *Timestamp `json:"lastModified,omitempty"` - Location *string `json:"location,omitempty"` -} - -// SCIMProvisionedIdentities represents the result of calling ListSCIMProvisionedIdentities. -type SCIMProvisionedIdentities struct { - Schemas []string `json:"schemas,omitempty"` - TotalResults *int `json:"totalResults,omitempty"` - ItemsPerPage *int `json:"itemsPerPage,omitempty"` - StartIndex *int `json:"startIndex,omitempty"` - Resources []*SCIMUserAttributes `json:"Resources,omitempty"` -} - -// ListSCIMProvisionedIdentitiesOptions represents options for ListSCIMProvisionedIdentities. -// -// GitHub API docs: https://docs.github.com/rest/scim#list-scim-provisioned-identities--parameters -type ListSCIMProvisionedIdentitiesOptions struct { - StartIndex *int `url:"startIndex,omitempty"` // Used for pagination: the index of the first result to return. (Optional.) - Count *int `url:"count,omitempty"` // Used for pagination: the number of results to return. (Optional.) - // Filter results using the equals query parameter operator (eq). - // You can filter results that are equal to id, userName, emails, and external_id. - // For example, to search for an identity with the userName Octocat, you would use this query: ?filter=userName%20eq%20\"Octocat\". - // To filter results for the identity with the email octocat@github.com, you would use this query: ?filter=emails%20eq%20\"octocat@github.com\". - // (Optional.) - Filter *string `url:"filter,omitempty"` -} - -// ListSCIMProvisionedIdentities lists SCIM provisioned identities. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#list-scim-provisioned-identities -// -//meta:operation GET /scim/v2/organizations/{org}/Users -func (s *SCIMService) ListSCIMProvisionedIdentities(ctx context.Context, org string, opts *ListSCIMProvisionedIdentitiesOptions) (*SCIMProvisionedIdentities, *Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - identities := new(SCIMProvisionedIdentities) - resp, err := s.client.Do(ctx, req, identities) - if err != nil { - return nil, resp, err - } - - return identities, resp, nil -} - -// ProvisionAndInviteSCIMUser provisions organization membership for a user, and sends an activation email to the email address. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#provision-and-invite-a-scim-user -// -//meta:operation POST /scim/v2/organizations/{org}/Users -func (s *SCIMService) ProvisionAndInviteSCIMUser(ctx context.Context, org string, opts *SCIMUserAttributes) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// GetSCIMProvisioningInfoForUser returns SCIM provisioning information for a user. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#get-scim-provisioning-information-for-a-user -// -//meta:operation GET /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) GetSCIMProvisioningInfoForUser(ctx context.Context, org, scimUserID string) (*SCIMUserAttributes, *Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - user := new(SCIMUserAttributes) - resp, err := s.client.Do(ctx, req, &user) - if err != nil { - return nil, resp, err - } - - return user, resp, nil -} - -// UpdateProvisionedOrgMembership updates a provisioned organization membership. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#update-a-provisioned-organization-membership -// -//meta:operation PUT /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) UpdateProvisionedOrgMembership(ctx context.Context, org, scimUserID string, opts *SCIMUserAttributes) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - u, err := addOptions(u, opts) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UpdateAttributeForSCIMUserOptions represents options for UpdateAttributeForSCIMUser. -// -// GitHub API docs: https://docs.github.com/rest/scim#update-an-attribute-for-a-scim-user--parameters -type UpdateAttributeForSCIMUserOptions struct { - Schemas []string `json:"schemas,omitempty"` // (Optional.) - Operations UpdateAttributeForSCIMUserOperations `json:"operations"` // Set of operations to be performed. (Required.) -} - -// UpdateAttributeForSCIMUserOperations represents operations for UpdateAttributeForSCIMUser. -type UpdateAttributeForSCIMUserOperations struct { - Op string `json:"op"` // (Required.) - Path *string `json:"path,omitempty"` // (Optional.) - Value json.RawMessage `json:"value,omitempty"` // (Optional.) -} - -// UpdateAttributeForSCIMUser updates an attribute for an SCIM user. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#update-an-attribute-for-a-scim-user -// -//meta:operation PATCH /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) UpdateAttributeForSCIMUser(ctx context.Context, org, scimUserID string, opts *UpdateAttributeForSCIMUserOptions) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - u, err := addOptions(u, opts) - if err != nil { - return nil, err - } - - req, err := s.client.NewRequest("PATCH", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteSCIMUserFromOrg deletes SCIM user from an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/scim/scim#delete-a-scim-user-from-an-organization -// -//meta:operation DELETE /scim/v2/organizations/{org}/Users/{scim_user_id} -func (s *SCIMService) DeleteSCIMUserFromOrg(ctx context.Context, org, scimUserID string) (*Response, error) { - u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/search.go b/vendor/github.com/google/go-github/v57/github/search.go deleted file mode 100644 index 71efe87a..00000000 --- a/vendor/github.com/google/go-github/v57/github/search.go +++ /dev/null @@ -1,347 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strconv" - "strings" - - qs "github.com/google/go-querystring/query" -) - -// SearchService provides access to the search related functions -// in the GitHub API. -// -// Each method takes a query string defining the search keywords and any search qualifiers. -// For example, when searching issues, the query "gopher is:issue language:go" will search -// for issues containing the word "gopher" in Go repositories. The method call -// -// opts := &github.SearchOptions{Sort: "created", Order: "asc"} -// cl.Search.Issues(ctx, "gopher is:issue language:go", opts) -// -// will search for such issues, sorting by creation date in ascending order -// (i.e., oldest first). -// -// If query includes multiple conditions, it MUST NOT include "+" as the condition separator. -// You have to use " " as the separator instead. -// For example, querying with "language:c++" and "leveldb", then query should be -// "language:c++ leveldb" but not "language:c+++leveldb". -// -// GitHub API docs: https://docs.github.com/rest/search/ -type SearchService service - -// SearchOptions specifies optional parameters to the SearchService methods. -type SearchOptions struct { - // How to sort the search results. Possible values are: - // - for repositories: stars, fork, updated - // - for commits: author-date, committer-date - // - for code: indexed - // - for issues: comments, created, updated - // - for users: followers, repositories, joined - // - // Default is to sort by best match. - Sort string `url:"sort,omitempty"` - - // Sort order if sort parameter is provided. Possible values are: asc, - // desc. Default is desc. - Order string `url:"order,omitempty"` - - // Whether to retrieve text match metadata with a query - TextMatch bool `url:"-"` - - ListOptions -} - -// Common search parameters. -type searchParameters struct { - Query string - RepositoryID *int64 // Sent if non-nil. -} - -// RepositoriesSearchResult represents the result of a repositories search. -type RepositoriesSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Repositories []*Repository `json:"items,omitempty"` -} - -// Repositories searches repositories via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-repositories -// -//meta:operation GET /search/repositories -func (s *SearchService) Repositories(ctx context.Context, query string, opts *SearchOptions) (*RepositoriesSearchResult, *Response, error) { - result := new(RepositoriesSearchResult) - resp, err := s.search(ctx, "repositories", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// TopicsSearchResult represents the result of a topics search. -type TopicsSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Topics []*TopicResult `json:"items,omitempty"` -} - -type TopicResult struct { - Name *string `json:"name,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - ShortDescription *string `json:"short_description,omitempty"` - Description *string `json:"description,omitempty"` - CreatedBy *string `json:"created_by,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *string `json:"updated_at,omitempty"` - Featured *bool `json:"featured,omitempty"` - Curated *bool `json:"curated,omitempty"` - Score *float64 `json:"score,omitempty"` -} - -// Topics finds topics via various criteria. Results are sorted by best match. -// Please see https://help.github.com/articles/searching-topics for more -// information about search qualifiers. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-topics -// -//meta:operation GET /search/topics -func (s *SearchService) Topics(ctx context.Context, query string, opts *SearchOptions) (*TopicsSearchResult, *Response, error) { - result := new(TopicsSearchResult) - resp, err := s.search(ctx, "topics", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// CommitsSearchResult represents the result of a commits search. -type CommitsSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Commits []*CommitResult `json:"items,omitempty"` -} - -// CommitResult represents a commit object as returned in commit search endpoint response. -type CommitResult struct { - SHA *string `json:"sha,omitempty"` - Commit *Commit `json:"commit,omitempty"` - Author *User `json:"author,omitempty"` - Committer *User `json:"committer,omitempty"` - Parents []*Commit `json:"parents,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - - Repository *Repository `json:"repository,omitempty"` - Score *float64 `json:"score,omitempty"` -} - -// Commits searches commits via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-commits -// -//meta:operation GET /search/commits -func (s *SearchService) Commits(ctx context.Context, query string, opts *SearchOptions) (*CommitsSearchResult, *Response, error) { - result := new(CommitsSearchResult) - resp, err := s.search(ctx, "commits", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// IssuesSearchResult represents the result of an issues search. -type IssuesSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Issues []*Issue `json:"items,omitempty"` -} - -// Issues searches issues via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-issues-and-pull-requests -// -//meta:operation GET /search/issues -func (s *SearchService) Issues(ctx context.Context, query string, opts *SearchOptions) (*IssuesSearchResult, *Response, error) { - result := new(IssuesSearchResult) - resp, err := s.search(ctx, "issues", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// UsersSearchResult represents the result of a users search. -type UsersSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Users []*User `json:"items,omitempty"` -} - -// Users searches users via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-users -// -//meta:operation GET /search/users -func (s *SearchService) Users(ctx context.Context, query string, opts *SearchOptions) (*UsersSearchResult, *Response, error) { - result := new(UsersSearchResult) - resp, err := s.search(ctx, "users", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// Match represents a single text match. -type Match struct { - Text *string `json:"text,omitempty"` - Indices []int `json:"indices,omitempty"` -} - -// TextMatch represents a text match for a SearchResult -type TextMatch struct { - ObjectURL *string `json:"object_url,omitempty"` - ObjectType *string `json:"object_type,omitempty"` - Property *string `json:"property,omitempty"` - Fragment *string `json:"fragment,omitempty"` - Matches []*Match `json:"matches,omitempty"` -} - -func (tm TextMatch) String() string { - return Stringify(tm) -} - -// CodeSearchResult represents the result of a code search. -type CodeSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - CodeResults []*CodeResult `json:"items,omitempty"` -} - -// CodeResult represents a single search result. -type CodeResult struct { - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - SHA *string `json:"sha,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Repository *Repository `json:"repository,omitempty"` - TextMatches []*TextMatch `json:"text_matches,omitempty"` -} - -func (c CodeResult) String() string { - return Stringify(c) -} - -// Code searches code via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-code -// -//meta:operation GET /search/code -func (s *SearchService) Code(ctx context.Context, query string, opts *SearchOptions) (*CodeSearchResult, *Response, error) { - result := new(CodeSearchResult) - resp, err := s.search(ctx, "code", &searchParameters{Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// LabelsSearchResult represents the result of a code search. -type LabelsSearchResult struct { - Total *int `json:"total_count,omitempty"` - IncompleteResults *bool `json:"incomplete_results,omitempty"` - Labels []*LabelResult `json:"items,omitempty"` -} - -// LabelResult represents a single search result. -type LabelResult struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Name *string `json:"name,omitempty"` - Color *string `json:"color,omitempty"` - Default *bool `json:"default,omitempty"` - Description *string `json:"description,omitempty"` - Score *float64 `json:"score,omitempty"` -} - -func (l LabelResult) String() string { - return Stringify(l) -} - -// Labels searches labels in the repository with ID repoID via various criteria. -// -// GitHub API docs: https://docs.github.com/rest/search/search#search-labels -// -//meta:operation GET /search/labels -func (s *SearchService) Labels(ctx context.Context, repoID int64, query string, opts *SearchOptions) (*LabelsSearchResult, *Response, error) { - result := new(LabelsSearchResult) - resp, err := s.search(ctx, "labels", &searchParameters{RepositoryID: &repoID, Query: query}, opts, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// Helper function that executes search queries against different -// GitHub search types (repositories, commits, code, issues, users, labels) -// -// If searchParameters.Query includes multiple condition, it MUST NOT include "+" as condition separator. -// For example, querying with "language:c++" and "leveldb", then searchParameters.Query should be "language:c++ leveldb" but not "language:c+++leveldb". -func (s *SearchService) search(ctx context.Context, searchType string, parameters *searchParameters, opts *SearchOptions, result interface{}) (*Response, error) { - params, err := qs.Values(opts) - if err != nil { - return nil, err - } - - if parameters.RepositoryID != nil { - params.Set("repository_id", strconv.FormatInt(*parameters.RepositoryID, 10)) - } - params.Set("q", parameters.Query) - u := fmt.Sprintf("search/%s?%s", searchType, params.Encode()) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, err - } - var acceptHeaders []string - switch { - case searchType == "commits": - // Accept header for search commits preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeCommitSearchPreview) - case searchType == "topics": - // Accept header for search repositories based on topics preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeTopicsPreview) - case searchType == "repositories": - // Accept header for search repositories based on topics preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeTopicsPreview) - case searchType == "issues": - // Accept header for search issues based on reactions preview endpoint - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders = append(acceptHeaders, mediaTypeReactionsPreview) - } - // https://docs.github.com/rest/search#search-repositories - // Accept header defaults to "application/vnd.github.v3+json" - // We change it here to fetch back text-match metadata - if opts != nil && opts.TextMatch { - acceptHeaders = append(acceptHeaders, "application/vnd.github.v3.text-match+json") - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, result) -} diff --git a/vendor/github.com/google/go-github/v57/github/secret_scanning.go b/vendor/github.com/google/go-github/v57/github/secret_scanning.go deleted file mode 100644 index 9b2ad8cd..00000000 --- a/vendor/github.com/google/go-github/v57/github/secret_scanning.go +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// SecretScanningService handles communication with the secret scanning related -// methods of the GitHub API. -type SecretScanningService service - -// SecretScanningAlert represents a GitHub secret scanning alert. -type SecretScanningAlert struct { - Number *int `json:"number,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - LocationsURL *string `json:"locations_url,omitempty"` - State *string `json:"state,omitempty"` - Resolution *string `json:"resolution,omitempty"` - ResolvedAt *Timestamp `json:"resolved_at,omitempty"` - ResolvedBy *User `json:"resolved_by,omitempty"` - SecretType *string `json:"secret_type,omitempty"` - SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` - Secret *string `json:"secret,omitempty"` - Repository *Repository `json:"repository,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PushProtectionBypassed *bool `json:"push_protection_bypassed,omitempty"` - PushProtectionBypassedBy *User `json:"push_protection_bypassed_by,omitempty"` - PushProtectionBypassedAt *Timestamp `json:"push_protection_bypassed_at,omitempty"` - ResolutionComment *string `json:"resolution_comment,omitempty"` -} - -// SecretScanningAlertLocation represents the location for a secret scanning alert. -type SecretScanningAlertLocation struct { - Type *string `json:"type,omitempty"` - Details *SecretScanningAlertLocationDetails `json:"details,omitempty"` -} - -// SecretScanningAlertLocationDetails represents the location details for a secret scanning alert. -type SecretScanningAlertLocationDetails struct { - Path *string `json:"path,omitempty"` - Startline *int `json:"start_line,omitempty"` - EndLine *int `json:"end_line,omitempty"` - StartColumn *int `json:"start_column,omitempty"` - EndColumn *int `json:"end_column,omitempty"` - BlobSHA *string `json:"blob_sha,omitempty"` - BlobURL *string `json:"blob_url,omitempty"` - CommitSHA *string `json:"commit_sha,omitempty"` - CommitURL *string `json:"commit_url,omitempty"` -} - -// SecretScanningAlertListOptions specifies optional parameters to the SecretScanningService.ListAlertsForEnterprise method. -type SecretScanningAlertListOptions struct { - // State of the secret scanning alerts to list. Set to open or resolved to only list secret scanning alerts in a specific state. - State string `url:"state,omitempty"` - - // A comma-separated list of secret types to return. By default all secret types are returned. - SecretType string `url:"secret_type,omitempty"` - - // A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. - // Valid resolutions are false_positive, wont_fix, revoked, pattern_edited, pattern_deleted or used_in_tests. - Resolution string `url:"resolution,omitempty"` - - ListCursorOptions - - // List options can vary on the Enterprise type. - // On Enterprise Cloud, Secret Scan alerts support requesting by page number - // along with providing a cursor for an "after" param. - // See: https://docs.github.com/enterprise-cloud@latest/rest/secret-scanning#list-secret-scanning-alerts-for-an-organization - // Whereas on Enterprise Server, pagination is by index. - // See: https://docs.github.com/enterprise-server@3.6/rest/secret-scanning#list-secret-scanning-alerts-for-an-organization - ListOptions -} - -// SecretScanningAlertUpdateOptions specifies optional parameters to the SecretScanningService.UpdateAlert method. -type SecretScanningAlertUpdateOptions struct { - // State is required and sets the state of the secret scanning alert. - // Can be either "open" or "resolved". - // You must provide resolution when you set the state to "resolved". - State string `json:"state"` - - // Required when the state is "resolved" and represents the reason for resolving the alert. - // Can be one of: "false_positive", "wont_fix", "revoked", or "used_in_tests". - Resolution *string `json:"resolution,omitempty"` -} - -// ListAlertsForEnterprise lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. -// -// To use this endpoint, you must be a member of the enterprise, and you must use an access token with the repo scope or -// security_events scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a security manager. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/secret-scanning/alerts -func (s *SecretScanningService) ListAlertsForEnterprise(ctx context.Context, enterprise string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("enterprises/%v/secret-scanning/alerts", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListAlertsForOrg lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-an-organization -// -//meta:operation GET /orgs/{org}/secret-scanning/alerts -func (s *SecretScanningService) ListAlertsForOrg(ctx context.Context, org string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("orgs/%v/secret-scanning/alerts", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// ListAlertsForRepo lists secret scanning alerts for a private repository, from newest to oldest. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/secret-scanning/alerts -func (s *SecretScanningService) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alerts []*SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alerts) - if err != nil { - return nil, resp, err - } - - return alerts, resp, nil -} - -// GetAlert gets a single secret scanning alert detected in a private repository. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#get-a-secret-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number} -func (s *SecretScanningService) GetAlert(ctx context.Context, owner, repo string, number int64) (*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v", owner, repo, number) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var alert *SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} - -// UpdateAlert updates the status of a secret scanning alert in a private repository. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#update-a-secret-scanning-alert -// -//meta:operation PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number} -func (s *SecretScanningService) UpdateAlert(ctx context.Context, owner, repo string, number int64, opts *SecretScanningAlertUpdateOptions) (*SecretScanningAlert, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v", owner, repo, number) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - var alert *SecretScanningAlert - resp, err := s.client.Do(ctx, req, &alert) - if err != nil { - return nil, resp, err - } - - return alert, resp, nil -} - -// ListLocationsForAlert lists all locations for a given secret scanning alert for a private repository. -// -// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with -// the repo scope or security_events scope. -// -// GitHub API docs: https://docs.github.com/rest/secret-scanning/secret-scanning#list-locations-for-a-secret-scanning-alert -// -//meta:operation GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations -func (s *SecretScanningService) ListLocationsForAlert(ctx context.Context, owner, repo string, number int64, opts *ListOptions) ([]*SecretScanningAlertLocation, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v/locations", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var locations []*SecretScanningAlertLocation - resp, err := s.client.Do(ctx, req, &locations) - if err != nil { - return nil, resp, err - } - - return locations, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/security_advisories.go b/vendor/github.com/google/go-github/v57/github/security_advisories.go deleted file mode 100644 index 63526374..00000000 --- a/vendor/github.com/google/go-github/v57/github/security_advisories.go +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -type SecurityAdvisoriesService service - -// SecurityAdvisorySubmission represents the Security Advisory Submission. -type SecurityAdvisorySubmission struct { - // Accepted represents whether a private vulnerability report was accepted by the repository's administrators. - Accepted *bool `json:"accepted,omitempty"` -} - -// RepoAdvisoryCredit represents the credit object for a repository Security Advisory. -type RepoAdvisoryCredit struct { - Login *string `json:"login,omitempty"` - Type *string `json:"type,omitempty"` -} - -// RepoAdvisoryCreditDetailed represents a credit given to a user for a repository Security Advisory. -type RepoAdvisoryCreditDetailed struct { - User *User `json:"user,omitempty"` - Type *string `json:"type,omitempty"` - State *string `json:"state,omitempty"` -} - -// ListRepositorySecurityAdvisoriesOptions specifies the optional parameters to list the repository security advisories. -type ListRepositorySecurityAdvisoriesOptions struct { - ListCursorOptions - - // Direction in which to sort advisories. Possible values are: asc, desc. - // Default is "asc". - Direction string `url:"direction,omitempty"` - - // Sort specifies how to sort advisories. Possible values are: created, updated, - // and published. Default value is "created". - Sort string `url:"sort,omitempty"` - - // State filters advisories based on their state. Possible values are: triage, draft, published, closed. - State string `url:"state,omitempty"` -} - -// ListGlobalSecurityAdvisoriesOptions specifies the optional parameters to list the global security advisories. -type ListGlobalSecurityAdvisoriesOptions struct { - ListCursorOptions - - // If specified, only advisories with this GHSA (GitHub Security Advisory) identifier will be returned. - GHSAID *string `url:"ghsa_id,omitempty"` - - // If specified, only advisories of this type will be returned. - // By default, a request with no other parameters defined will only return reviewed advisories that are not malware. - // Default: reviewed - // Can be one of: reviewed, malware, unreviewed - Type *string `url:"type,omitempty"` - - // If specified, only advisories with this CVE (Common Vulnerabilities and Exposures) identifier will be returned. - CVEID *string `url:"cve_id,omitempty"` - - // If specified, only advisories for these ecosystems will be returned. - // Can be one of: actions, composer, erlang, go, maven, npm, nuget, other, pip, pub, rubygems, rust - Ecosystem *string `url:"ecosystem,omitempty"` - - // If specified, only advisories with these severities will be returned. - // Can be one of: unknown, low, medium, high, critical - Severity *string `url:"severity,omitempty"` - - // If specified, only advisories with these Common Weakness Enumerations (CWEs) will be returned. - // Example: cwes=79,284,22 or cwes[]=79&cwes[]=284&cwes[]=22 - CWEs []string `url:"cwes,omitempty"` - - // Whether to only return advisories that have been withdrawn. - IsWithdrawn *bool `url:"is_withdrawn,omitempty"` - - // If specified, only return advisories that affect any of package or package@version. - // A maximum of 1000 packages can be specified. If the query parameter causes - // the URL to exceed the maximum URL length supported by your client, you must specify fewer packages. - // Example: affects=package1,package2@1.0.0,package3@^2.0.0 or affects[]=package1&affects[]=package2@1.0.0 - Affects *string `url:"affects,omitempty"` - - // If specified, only return advisories that were published on a date or date range. - Published *string `url:"published,omitempty"` - - // If specified, only return advisories that were updated on a date or date range. - Updated *string `url:"updated,omitempty"` - - // If specified, only show advisories that were updated or published on a date or date range. - Modified *string `url:"modified,omitempty"` -} - -// GlobalSecurityAdvisory represents the global security advisory object response. -type GlobalSecurityAdvisory struct { - SecurityAdvisory - ID *int64 `json:"id,omitempty"` - RepositoryAdvisoryURL *string `json:"repository_advisory_url,omitempty"` - Type *string `json:"type,omitempty"` - SourceCodeLocation *string `json:"source_code_location,omitempty"` - References []string `json:"references,omitempty"` - Vulnerabilities []*GlobalSecurityVulnerability `json:"vulnerabilities,omitempty"` - GithubReviewedAt *Timestamp `json:"github_reviewed_at,omitempty"` - NVDPublishedAt *Timestamp `json:"nvd_published_at,omitempty"` - Credits []*Credit `json:"credits,omitempty"` -} - -// GlobalSecurityVulnerability represents a vulnerability for a global security advisory. -type GlobalSecurityVulnerability struct { - Package *VulnerabilityPackage `json:"package,omitempty"` - FirstPatchedVersion *string `json:"first_patched_version,omitempty"` - VulnerableVersionRange *string `json:"vulnerable_version_range,omitempty"` - VulnerableFunctions []string `json:"vulnerable_functions,omitempty"` -} - -// Credit represents the credit object for a global security advisory. -type Credit struct { - User *User `json:"user,omitempty"` - Type *string `json:"type,omitempty"` -} - -// RequestCVE requests a Common Vulnerabilities and Exposures (CVE) for a repository security advisory. -// The ghsaID is the GitHub Security Advisory identifier of the advisory. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/repository-advisories#request-a-cve-for-a-repository-security-advisory -// -//meta:operation POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve -func (s *SecurityAdvisoriesService) RequestCVE(ctx context.Context, owner, repo, ghsaID string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/security-advisories/%v/cve", owner, repo, ghsaID) - - req, err := s.client.NewRequest("POST", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - if _, ok := err.(*AcceptedError); ok { - return resp, nil - } - - return resp, err - } - - return resp, nil -} - -// ListRepositorySecurityAdvisoriesForOrg lists the repository security advisories for an organization. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories-for-an-organization -// -//meta:operation GET /orgs/{org}/security-advisories -func (s *SecurityAdvisoriesService) ListRepositorySecurityAdvisoriesForOrg(ctx context.Context, org string, opt *ListRepositorySecurityAdvisoriesOptions) ([]*SecurityAdvisory, *Response, error) { - url := fmt.Sprintf("orgs/%v/security-advisories", org) - url, err := addOptions(url, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisories []*SecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisories) - if err != nil { - return nil, resp, err - } - - return advisories, resp, nil -} - -// ListRepositorySecurityAdvisories lists the security advisories in a repository. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories -// -//meta:operation GET /repos/{owner}/{repo}/security-advisories -func (s *SecurityAdvisoriesService) ListRepositorySecurityAdvisories(ctx context.Context, owner, repo string, opt *ListRepositorySecurityAdvisoriesOptions) ([]*SecurityAdvisory, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/security-advisories", owner, repo) - url, err := addOptions(url, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisories []*SecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisories) - if err != nil { - return nil, resp, err - } - - return advisories, resp, nil -} - -// ListGlobalSecurityAdvisories lists all global security advisories. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/global-advisories#list-global-security-advisories -// -//meta:operation GET /advisories -func (s *SecurityAdvisoriesService) ListGlobalSecurityAdvisories(ctx context.Context, opts *ListGlobalSecurityAdvisoriesOptions) ([]*GlobalSecurityAdvisory, *Response, error) { - url := "advisories" - url, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisories []*GlobalSecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisories) - if err != nil { - return nil, resp, err - } - - return advisories, resp, nil -} - -// GetGlobalSecurityAdvisories gets a global security advisory using its GitHub Security Advisory (GHSA) identifier. -// -// GitHub API docs: https://docs.github.com/rest/security-advisories/global-advisories#get-a-global-security-advisory -// -//meta:operation GET /advisories/{ghsa_id} -func (s *SecurityAdvisoriesService) GetGlobalSecurityAdvisories(ctx context.Context, ghsaID string) (*GlobalSecurityAdvisory, *Response, error) { - url := fmt.Sprintf("advisories/%s", ghsaID) - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var advisory *GlobalSecurityAdvisory - resp, err := s.client.Do(ctx, req, &advisory) - if err != nil { - return nil, resp, err - } - - return advisory, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/strings.go b/vendor/github.com/google/go-github/v57/github/strings.go deleted file mode 100644 index 147c515e..00000000 --- a/vendor/github.com/google/go-github/v57/github/strings.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "fmt" - "reflect" -) - -var timestampType = reflect.TypeOf(Timestamp{}) - -// Stringify attempts to create a reasonable string representation of types in -// the GitHub library. It does things like resolve pointers to their values -// and omits struct fields with nil values. -func Stringify(message interface{}) string { - var buf bytes.Buffer - v := reflect.ValueOf(message) - stringifyValue(&buf, v) - return buf.String() -} - -// stringifyValue was heavily inspired by the goprotobuf library. - -func stringifyValue(w *bytes.Buffer, val reflect.Value) { - if val.Kind() == reflect.Ptr && val.IsNil() { - w.Write([]byte("")) - return - } - - v := reflect.Indirect(val) - - switch v.Kind() { - case reflect.String: - fmt.Fprintf(w, `"%s"`, v) - case reflect.Slice: - w.Write([]byte{'['}) - for i := 0; i < v.Len(); i++ { - if i > 0 { - w.Write([]byte{' '}) - } - - stringifyValue(w, v.Index(i)) - } - - w.Write([]byte{']'}) - return - case reflect.Struct: - if v.Type().Name() != "" { - w.Write([]byte(v.Type().String())) - } - - // special handling of Timestamp values - if v.Type() == timestampType { - fmt.Fprintf(w, "{%s}", v.Interface()) - return - } - - w.Write([]byte{'{'}) - - var sep bool - for i := 0; i < v.NumField(); i++ { - fv := v.Field(i) - if fv.Kind() == reflect.Ptr && fv.IsNil() { - continue - } - if fv.Kind() == reflect.Slice && fv.IsNil() { - continue - } - if fv.Kind() == reflect.Map && fv.IsNil() { - continue - } - - if sep { - w.Write([]byte(", ")) - } else { - sep = true - } - - w.Write([]byte(v.Type().Field(i).Name)) - w.Write([]byte{':'}) - stringifyValue(w, fv) - } - - w.Write([]byte{'}'}) - default: - if v.CanInterface() { - fmt.Fprint(w, v.Interface()) - } - } -} diff --git a/vendor/github.com/google/go-github/v57/github/teams.go b/vendor/github.com/google/go-github/v57/github/teams.go deleted file mode 100644 index fd22b792..00000000 --- a/vendor/github.com/google/go-github/v57/github/teams.go +++ /dev/null @@ -1,1067 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "net/http" - "strings" -) - -// TeamsService provides access to the team-related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/teams/ -type TeamsService service - -// Team represents a team within a GitHub organization. Teams are used to -// manage access to an organization's repositories. -type Team struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - URL *string `json:"url,omitempty"` - Slug *string `json:"slug,omitempty"` - - // Permission specifies the default permission for repositories owned by the team. - Permission *string `json:"permission,omitempty"` - - // Permissions identifies the permissions that a team has on a given - // repository. This is only populated when calling Repositories.ListTeams. - Permissions map[string]bool `json:"permissions,omitempty"` - - // Privacy identifies the level of privacy this team should have. - // Possible values are: - // secret - only visible to organization owners and members of this team - // closed - visible to all members of this organization - // Default is "secret". - Privacy *string `json:"privacy,omitempty"` - - MembersCount *int `json:"members_count,omitempty"` - ReposCount *int `json:"repos_count,omitempty"` - Organization *Organization `json:"organization,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - MembersURL *string `json:"members_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` - Parent *Team `json:"parent,omitempty"` - - // LDAPDN is only available in GitHub Enterprise and when the team - // membership is synchronized with LDAP. - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -func (t Team) String() string { - return Stringify(t) -} - -// Invitation represents a team member's invitation status. -type Invitation struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Login *string `json:"login,omitempty"` - Email *string `json:"email,omitempty"` - // Role can be one of the values - 'direct_member', 'admin', 'billing_manager', 'hiring_manager', or 'reinstate'. - Role *string `json:"role,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - Inviter *User `json:"inviter,omitempty"` - TeamCount *int `json:"team_count,omitempty"` - InvitationTeamURL *string `json:"invitation_team_url,omitempty"` - FailedAt *Timestamp `json:"failed_at,omitempty"` - FailedReason *string `json:"failed_reason,omitempty"` -} - -func (i Invitation) String() string { - return Stringify(i) -} - -// ListTeams lists all of the teams for an organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-teams -// -//meta:operation GET /orgs/{org}/teams -func (s *TeamsService) ListTeams(ctx context.Context, org string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// GetTeamByID fetches a team, given a specified organization ID, by ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#get-a-team-by-name -// -//meta:operation GET /orgs/{org}/teams/{team_slug} -func (s *TeamsService) GetTeamByID(ctx context.Context, orgID, teamID int64) (*Team, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// GetTeamBySlug fetches a team, given a specified organization name, by slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#get-a-team-by-name -// -//meta:operation GET /orgs/{org}/teams/{team_slug} -func (s *TeamsService) GetTeamBySlug(ctx context.Context, org, slug string) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// NewTeam represents a team to be created or modified. -type NewTeam struct { - Name string `json:"name"` // Name of the team. (Required.) - Description *string `json:"description,omitempty"` - Maintainers []string `json:"maintainers,omitempty"` - RepoNames []string `json:"repo_names,omitempty"` - ParentTeamID *int64 `json:"parent_team_id,omitempty"` - - // Deprecated: Permission is deprecated when creating or editing a team in an org - // using the new GitHub permission model. It no longer identifies the - // permission a team has on its repos, but only specifies the default - // permission a repo is initially added with. Avoid confusion by - // specifying a permission value when calling AddTeamRepo. - Permission *string `json:"permission,omitempty"` - - // Privacy identifies the level of privacy this team should have. - // Possible values are: - // secret - only visible to organization owners and members of this team - // closed - visible to all members of this organization - // Default is "secret". - Privacy *string `json:"privacy,omitempty"` - - // LDAPDN may be used in GitHub Enterprise when the team membership - // is synchronized with LDAP. - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -func (s NewTeam) String() string { - return Stringify(s) -} - -// CreateTeam creates a new team within an organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#create-a-team -// -//meta:operation POST /orgs/{org}/teams -func (s *TeamsService) CreateTeam(ctx context.Context, org string, team NewTeam) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - req, err := s.client.NewRequest("POST", u, team) - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// newTeamNoParent is the same as NewTeam but ensures that the -// "parent_team_id" field will be null. It is for internal use -// only and should not be exported. -type newTeamNoParent struct { - Name string `json:"name"` - Description *string `json:"description,omitempty"` - Maintainers []string `json:"maintainers,omitempty"` - RepoNames []string `json:"repo_names,omitempty"` - ParentTeamID *int64 `json:"parent_team_id"` // This will be "null" - Privacy *string `json:"privacy,omitempty"` - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -// copyNewTeamWithoutParent is used to set the "parent_team_id" -// field to "null" after copying the other fields from a NewTeam. -// It is for internal use only and should not be exported. -func copyNewTeamWithoutParent(team *NewTeam) *newTeamNoParent { - return &newTeamNoParent{ - Name: team.Name, - Description: team.Description, - Maintainers: team.Maintainers, - RepoNames: team.RepoNames, - Privacy: team.Privacy, - LDAPDN: team.LDAPDN, - } -} - -// EditTeamByID edits a team, given an organization ID, selected by ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#update-a-team -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug} -func (s *TeamsService) EditTeamByID(ctx context.Context, orgID, teamID int64, team NewTeam, removeParent bool) (*Team, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) - - var req *http.Request - var err error - if removeParent { - teamRemoveParent := copyNewTeamWithoutParent(&team) - req, err = s.client.NewRequest("PATCH", u, teamRemoveParent) - } else { - req, err = s.client.NewRequest("PATCH", u, team) - } - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// EditTeamBySlug edits a team, given an organization name, by slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#update-a-team -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug} -func (s *TeamsService) EditTeamBySlug(ctx context.Context, org, slug string, team NewTeam, removeParent bool) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) - - var req *http.Request - var err error - if removeParent { - teamRemoveParent := copyNewTeamWithoutParent(&team) - req, err = s.client.NewRequest("PATCH", u, teamRemoveParent) - } else { - req, err = s.client.NewRequest("PATCH", u, team) - } - if err != nil { - return nil, nil, err - } - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// DeleteTeamByID deletes a team referenced by ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#delete-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug} -func (s *TeamsService) DeleteTeamByID(ctx context.Context, orgID, teamID int64) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteTeamBySlug deletes a team reference by slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#delete-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug} -func (s *TeamsService) DeleteTeamBySlug(ctx context.Context, org, slug string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListChildTeamsByParentID lists child teams for a parent team given parent ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-child-teams -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/teams -func (s *TeamsService) ListChildTeamsByParentID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/teams", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ListChildTeamsByParentSlug lists child teams for a parent team given parent slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-child-teams -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/teams -func (s *TeamsService) ListChildTeamsByParentSlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/teams", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ListTeamReposByID lists the repositories given a team ID that the specified team has access to. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-repositories -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos -func (s *TeamsService) ListTeamReposByID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when topics API fully launches. - headers := []string{mediaTypeTopicsPreview} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// ListTeamReposBySlug lists the repositories given a team slug that the specified team has access to. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-repositories -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos -func (s *TeamsService) ListTeamReposBySlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when topics API fully launches. - headers := []string{mediaTypeTopicsPreview} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// IsTeamRepoByID checks if a team, given its ID, manages the specified repository. If the -// repository is managed by team, a Repository is returned which includes the -// permissions team has for that repo. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-repository -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) IsTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - headers := []string{mediaTypeOrgPermissionRepo} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// IsTeamRepoBySlug checks if a team, given its slug, manages the specified repository. If the -// repository is managed by team, a Repository is returned which includes the -// permissions team has for that repo. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-repository -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) IsTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - headers := []string{mediaTypeOrgPermissionRepo} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// TeamAddTeamRepoOptions specifies the optional parameters to the -// TeamsService.AddTeamRepoByID and TeamsService.AddTeamRepoBySlug methods. -type TeamAddTeamRepoOptions struct { - // Permission specifies the permission to grant the team on this repository. - // Possible values are: - // pull - team members can pull, but not push to or administer this repository - // push - team members can pull and push, but not administer this repository - // admin - team members can pull, push and administer this repository - // maintain - team members can manage the repository without access to sensitive or destructive actions. - // triage - team members can proactively manage issues and pull requests without write access. - // - // If not specified, the team's permission attribute will be used. - Permission string `json:"permission,omitempty"` -} - -// AddTeamRepoByID adds a repository to be managed by the specified team given the team ID. -// The specified repository must be owned by the organization to which the team -// belongs, or a direct fork of a repository owned by the organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-repository-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) AddTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string, opts *TeamAddTeamRepoOptions) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddTeamRepoBySlug adds a repository to be managed by the specified team given the team slug. -// The specified repository must be owned by the organization to which the team -// belongs, or a direct fork of a repository owned by the organization. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-repository-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) AddTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string, opts *TeamAddTeamRepoOptions) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamRepoByID removes a repository from being managed by the specified -// team given the team ID. Note that this does not delete the repository, it -// just removes it from the team. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-repository-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) RemoveTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamRepoBySlug removes a repository from being managed by the specified -// team given the team slug. Note that this does not delete the repository, it -// just removes it from the team. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-repository-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo} -func (s *TeamsService) RemoveTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListUserTeams lists a user's teams -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-teams-for-the-authenticated-user -// -//meta:operation GET /user/teams -func (s *TeamsService) ListUserTeams(ctx context.Context, opts *ListOptions) ([]*Team, *Response, error) { - u := "user/teams" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ListTeamProjectsByID lists the organization projects for a team given the team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-projects -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects -func (s *TeamsService) ListTeamProjectsByID(ctx context.Context, orgID, teamID int64) ([]*Project, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects", orgID, teamID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// ListTeamProjectsBySlug lists the organization projects for a team given the team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#list-team-projects -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects -func (s *TeamsService) ListTeamProjectsBySlug(ctx context.Context, org, slug string) ([]*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects", org, slug) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// ReviewTeamProjectsByID checks whether a team, given its ID, has read, write, or admin -// permissions for an organization project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-project -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) ReviewTeamProjectsByID(ctx context.Context, orgID, teamID, projectID int64) (*Project, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - projects := &Project{} - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// ReviewTeamProjectsBySlug checks whether a team, given its slug, has read, write, or admin -// permissions for an organization project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#check-team-permissions-for-a-project -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) ReviewTeamProjectsBySlug(ctx context.Context, org, slug string, projectID int64) (*Project, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - projects := &Project{} - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// TeamProjectOptions specifies the optional parameters to the -// TeamsService.AddTeamProject method. -type TeamProjectOptions struct { - // Permission specifies the permission to grant to the team for this project. - // Possible values are: - // "read" - team members can read, but not write to or administer this project. - // "write" - team members can read and write, but not administer this project. - // "admin" - team members can read, write and administer this project. - // - Permission *string `json:"permission,omitempty"` -} - -// AddTeamProjectByID adds an organization project to a team given the team ID. -// To add a project to a team or update the team's permission on a project, the -// authenticated user must have admin permissions for the project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-project-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) AddTeamProjectByID(ctx context.Context, orgID, teamID, projectID int64, opts *TeamProjectOptions) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// AddTeamProjectBySlug adds an organization project to a team given the team slug. -// To add a project to a team or update the team's permission on a project, the -// authenticated user must have admin permissions for the project. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#add-or-update-team-project-permissions -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) AddTeamProjectBySlug(ctx context.Context, org, slug string, projectID int64, opts *TeamProjectOptions) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamProjectByID removes an organization project from a team given team ID. -// An organization owner or a team maintainer can remove any project from the team. -// To remove a project from a team as an organization member, the authenticated user -// must have "read" access to both the team and project, or "admin" access to the team -// or project. -// Note: This endpoint removes the project from the team, but does not delete it. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-project-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) RemoveTeamProjectByID(ctx context.Context, orgID, teamID, projectID int64) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamProjectBySlug removes an organization project from a team given team slug. -// An organization owner or a team maintainer can remove any project from the team. -// To remove a project from a team as an organization member, the authenticated user -// must have "read" access to both the team and project, or "admin" access to the team -// or project. -// Note: This endpoint removes the project from the team, but does not delete it. -// -// GitHub API docs: https://docs.github.com/rest/teams/teams#remove-a-project-from-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id} -func (s *TeamsService) RemoveTeamProjectBySlug(ctx context.Context, org, slug string, projectID int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeProjectsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - return s.client.Do(ctx, req, nil) -} - -// IDPGroupList represents a list of external identity provider (IDP) groups. -type IDPGroupList struct { - Groups []*IDPGroup `json:"groups"` -} - -// IDPGroup represents an external identity provider (IDP) group. -type IDPGroup struct { - GroupID *string `json:"group_id,omitempty"` - GroupName *string `json:"group_name,omitempty"` - GroupDescription *string `json:"group_description,omitempty"` -} - -// ListIDPGroupsInOrganization lists IDP groups available in an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#list-idp-groups-for-an-organization -// -//meta:operation GET /orgs/{org}/team-sync/groups -func (s *TeamsService) ListIDPGroupsInOrganization(ctx context.Context, org string, opts *ListCursorOptions) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/team-sync/groups", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// ListIDPGroupsForTeamByID lists IDP groups connected to a team on GitHub -// given organization and team IDs. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#list-idp-groups-for-a-team -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) ListIDPGroupsForTeamByID(ctx context.Context, orgID, teamID int64) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/team-sync/group-mappings", orgID, teamID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// ListIDPGroupsForTeamBySlug lists IDP groups connected to a team on GitHub -// given organization name and team slug. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#list-idp-groups-for-a-team -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) ListIDPGroupsForTeamBySlug(ctx context.Context, org, slug string) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/team-sync/group-mappings", org, slug) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// CreateOrUpdateIDPGroupConnectionsByID creates, updates, or removes a connection -// between a team and an IDP group given organization and team IDs. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#create-or-update-idp-group-connections -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) CreateOrUpdateIDPGroupConnectionsByID(ctx context.Context, orgID, teamID int64, opts IDPGroupList) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/team-sync/group-mappings", orgID, teamID) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// CreateOrUpdateIDPGroupConnectionsBySlug creates, updates, or removes a connection -// between a team and an IDP group given organization name and team slug. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/team-sync#create-or-update-idp-group-connections -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/team-sync/group-mappings -func (s *TeamsService) CreateOrUpdateIDPGroupConnectionsBySlug(ctx context.Context, org, slug string, opts IDPGroupList) (*IDPGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/team-sync/group-mappings", org, slug) - - req, err := s.client.NewRequest("PATCH", u, opts) - if err != nil { - return nil, nil, err - } - - groups := new(IDPGroupList) - resp, err := s.client.Do(ctx, req, groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// ExternalGroupMember represents a member of an external group. -type ExternalGroupMember struct { - MemberID *int64 `json:"member_id,omitempty"` - MemberLogin *string `json:"member_login,omitempty"` - MemberName *string `json:"member_name,omitempty"` - MemberEmail *string `json:"member_email,omitempty"` -} - -// ExternalGroupTeam represents a team connected to an external group. -type ExternalGroupTeam struct { - TeamID *int64 `json:"team_id,omitempty"` - TeamName *string `json:"team_name,omitempty"` -} - -// ExternalGroup represents an external group. -type ExternalGroup struct { - GroupID *int64 `json:"group_id,omitempty"` - GroupName *string `json:"group_name,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Teams []*ExternalGroupTeam `json:"teams,omitempty"` - Members []*ExternalGroupMember `json:"members,omitempty"` -} - -// ExternalGroupList represents a list of external groups. -type ExternalGroupList struct { - Groups []*ExternalGroup `json:"groups"` -} - -// GetExternalGroup fetches an external group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#get-an-external-group -// -//meta:operation GET /orgs/{org}/external-group/{group_id} -func (s *TeamsService) GetExternalGroup(ctx context.Context, org string, groupID int64) (*ExternalGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/external-group/%v", org, groupID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - externalGroup := new(ExternalGroup) - resp, err := s.client.Do(ctx, req, externalGroup) - if err != nil { - return nil, resp, err - } - - return externalGroup, resp, nil -} - -// ListExternalGroupsOptions specifies the optional parameters to the -// TeamsService.ListExternalGroups method. -type ListExternalGroupsOptions struct { - DisplayName *string `url:"display_name,omitempty"` - - ListOptions -} - -// ListExternalGroups lists external groups in an organization on GitHub. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#list-external-groups-in-an-organization -// -//meta:operation GET /orgs/{org}/external-groups -func (s *TeamsService) ListExternalGroups(ctx context.Context, org string, opts *ListExternalGroupsOptions) (*ExternalGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/external-groups", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - externalGroups := new(ExternalGroupList) - resp, err := s.client.Do(ctx, req, externalGroups) - if err != nil { - return nil, resp, err - } - - return externalGroups, resp, nil -} - -// ListExternalGroupsForTeamBySlug lists external groups connected to a team on GitHub. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#list-a-connection-between-an-external-group-and-a-team -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/external-groups -func (s *TeamsService) ListExternalGroupsForTeamBySlug(ctx context.Context, org, slug string) (*ExternalGroupList, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - externalGroups := new(ExternalGroupList) - resp, err := s.client.Do(ctx, req, externalGroups) - if err != nil { - return nil, resp, err - } - - return externalGroups, resp, nil -} - -// UpdateConnectedExternalGroup updates the connection between an external group and a team. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#update-the-connection-between-an-external-group-and-a-team -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/external-groups -func (s *TeamsService) UpdateConnectedExternalGroup(ctx context.Context, org, slug string, eg *ExternalGroup) (*ExternalGroup, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) - - req, err := s.client.NewRequest("PATCH", u, eg) - if err != nil { - return nil, nil, err - } - - externalGroup := new(ExternalGroup) - resp, err := s.client.Do(ctx, req, externalGroup) - if err != nil { - return nil, resp, err - } - - return externalGroup, resp, nil -} - -// RemoveConnectedExternalGroup removes the connection between an external group and a team. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/teams/external-groups#remove-the-connection-between-an-external-group-and-a-team -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/external-groups -func (s *TeamsService) RemoveConnectedExternalGroup(ctx context.Context, org, slug string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go deleted file mode 100644 index ad3818c1..00000000 --- a/vendor/github.com/google/go-github/v57/github/teams_discussion_comments.go +++ /dev/null @@ -1,262 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// DiscussionComment represents a GitHub dicussion in a team. -type DiscussionComment struct { - Author *User `json:"author,omitempty"` - Body *string `json:"body,omitempty"` - BodyHTML *string `json:"body_html,omitempty"` - BodyVersion *string `json:"body_version,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` - DiscussionURL *string `json:"discussion_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Number *int `json:"number,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` -} - -func (c DiscussionComment) String() string { - return Stringify(c) -} - -// DiscussionCommentListOptions specifies optional parameters to the -// TeamServices.ListComments method. -type DiscussionCommentListOptions struct { - // Sorts the discussion comments by the date they were created. - // Accepted values are asc and desc. Default is desc. - Direction string `url:"direction,omitempty"` - ListOptions -} - -// ListCommentsByID lists all comments on a team discussion by team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#list-discussion-comments -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) ListCommentsByID(ctx context.Context, orgID, teamID int64, discussionNumber int, options *DiscussionCommentListOptions) ([]*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments", orgID, teamID, discussionNumber) - u, err := addOptions(u, options) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*DiscussionComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// ListCommentsBySlug lists all comments on a team discussion by team slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#list-discussion-comments -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) ListCommentsBySlug(ctx context.Context, org, slug string, discussionNumber int, options *DiscussionCommentListOptions) ([]*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments", org, slug, discussionNumber) - u, err := addOptions(u, options) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*DiscussionComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetCommentByID gets a specific comment on a team discussion by team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#get-a-discussion-comment -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) GetCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// GetCommentBySlug gets a specific comment on a team discussion by team slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#get-a-discussion-comment -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) GetCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// CreateCommentByID creates a new comment on a team discussion by team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#create-a-discussion-comment -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) CreateCommentByID(ctx context.Context, orgID, teamID int64, discsusionNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments", orgID, teamID, discsusionNumber) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// CreateCommentBySlug creates a new comment on a team discussion by team slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#create-a-discussion-comment -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments -func (s *TeamsService) CreateCommentBySlug(ctx context.Context, org, slug string, discsusionNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments", org, slug, discsusionNumber) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// EditCommentByID edits the body text of a discussion comment by team ID. -// Authenticated user must grant write:discussion scope. -// User is allowed to edit body of a comment only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#update-a-discussion-comment -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) EditCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// EditCommentBySlug edits the body text of a discussion comment by team slug. -// Authenticated user must grant write:discussion scope. -// User is allowed to edit body of a comment only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#update-a-discussion-comment -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) EditCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - discussionComment := &DiscussionComment{} - resp, err := s.client.Do(ctx, req, discussionComment) - if err != nil { - return nil, resp, err - } - - return discussionComment, resp, nil -} - -// DeleteCommentByID deletes a comment on a team discussion by team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#delete-a-discussion-comment -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) DeleteCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteCommentBySlug deletes a comment on a team discussion by team slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussion-comments#delete-a-discussion-comment -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number} -func (s *TeamsService) DeleteCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/teams_discussions.go b/vendor/github.com/google/go-github/v57/github/teams_discussions.go deleted file mode 100644 index ee78c032..00000000 --- a/vendor/github.com/google/go-github/v57/github/teams_discussions.go +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TeamDiscussion represents a GitHub dicussion in a team. -type TeamDiscussion struct { - Author *User `json:"author,omitempty"` - Body *string `json:"body,omitempty"` - BodyHTML *string `json:"body_html,omitempty"` - BodyVersion *string `json:"body_version,omitempty"` - CommentsCount *int `json:"comments_count,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Number *int `json:"number,omitempty"` - Pinned *bool `json:"pinned,omitempty"` - Private *bool `json:"private,omitempty"` - TeamURL *string `json:"team_url,omitempty"` - Title *string `json:"title,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` -} - -func (d TeamDiscussion) String() string { - return Stringify(d) -} - -// DiscussionListOptions specifies optional parameters to the -// TeamServices.ListDiscussions method. -type DiscussionListOptions struct { - // Sorts the discussion by the date they were created. - // Accepted values are asc and desc. Default is desc. - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListDiscussionsByID lists all discussions on team's page given Organization and Team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#list-discussions -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) ListDiscussionsByID(ctx context.Context, orgID, teamID int64, opts *DiscussionListOptions) ([]*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teamDiscussions []*TeamDiscussion - resp, err := s.client.Do(ctx, req, &teamDiscussions) - if err != nil { - return nil, resp, err - } - - return teamDiscussions, resp, nil -} - -// ListDiscussionsBySlug lists all discussions on team's page given Organization name and Team's slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#list-discussions -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) ListDiscussionsBySlug(ctx context.Context, org, slug string, opts *DiscussionListOptions) ([]*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var teamDiscussions []*TeamDiscussion - resp, err := s.client.Do(ctx, req, &teamDiscussions) - if err != nil { - return nil, resp, err - } - - return teamDiscussions, resp, nil -} - -// GetDiscussionByID gets a specific discussion on a team's page given Organization and Team ID. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#get-a-discussion -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) GetDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// GetDiscussionBySlug gets a specific discussion on a team's page given Organization name and Team's slug. -// Authenticated user must grant read:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#get-a-discussion -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) GetDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// CreateDiscussionByID creates a new discussion post on a team's page given Organization and Team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#create-a-discussion -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) CreateDiscussionByID(ctx context.Context, orgID, teamID int64, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions", orgID, teamID) - req, err := s.client.NewRequest("POST", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// CreateDiscussionBySlug creates a new discussion post on a team's page given Organization name and Team's slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#create-a-discussion -// -//meta:operation POST /orgs/{org}/teams/{team_slug}/discussions -func (s *TeamsService) CreateDiscussionBySlug(ctx context.Context, org, slug string, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions", org, slug) - req, err := s.client.NewRequest("POST", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// EditDiscussionByID edits the title and body text of a discussion post given Organization and Team ID. -// Authenticated user must grant write:discussion scope. -// User is allowed to change Title and Body of a discussion only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#update-a-discussion -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) EditDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) - req, err := s.client.NewRequest("PATCH", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// EditDiscussionBySlug edits the title and body text of a discussion post given Organization name and Team's slug. -// Authenticated user must grant write:discussion scope. -// User is allowed to change Title and Body of a discussion only. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#update-a-discussion -// -//meta:operation PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) EditDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) - req, err := s.client.NewRequest("PATCH", u, discussion) - if err != nil { - return nil, nil, err - } - - teamDiscussion := &TeamDiscussion{} - resp, err := s.client.Do(ctx, req, teamDiscussion) - if err != nil { - return nil, resp, err - } - - return teamDiscussion, resp, nil -} - -// DeleteDiscussionByID deletes a discussion from team's page given Organization and Team ID. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#delete-a-discussion -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) DeleteDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteDiscussionBySlug deletes a discussion from team's page given Organization name and Team's slug. -// Authenticated user must grant write:discussion scope. -// -// GitHub API docs: https://docs.github.com/rest/teams/discussions#delete-a-discussion -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number} -func (s *TeamsService) DeleteDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/timestamp.go b/vendor/github.com/google/go-github/v57/github/timestamp.go deleted file mode 100644 index 00c1235e..00000000 --- a/vendor/github.com/google/go-github/v57/github/timestamp.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "strconv" - "time" -) - -// Timestamp represents a time that can be unmarshalled from a JSON string -// formatted as either an RFC3339 or Unix timestamp. This is necessary for some -// fields since the GitHub API is inconsistent in how it represents times. All -// exported methods of time.Time can be called on Timestamp. -type Timestamp struct { - time.Time -} - -func (t Timestamp) String() string { - return t.Time.String() -} - -// GetTime returns std time.Time. -func (t *Timestamp) GetTime() *time.Time { - if t == nil { - return nil - } - return &t.Time -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -// Time is expected in RFC3339 or Unix format. -func (t *Timestamp) UnmarshalJSON(data []byte) (err error) { - str := string(data) - i, err := strconv.ParseInt(str, 10, 64) - if err == nil { - t.Time = time.Unix(i, 0) - if t.Time.Year() > 3000 { - t.Time = time.Unix(0, i*1e6) - } - } else { - t.Time, err = time.Parse(`"`+time.RFC3339+`"`, str) - } - return -} - -// Equal reports whether t and u are equal based on time.Equal -func (t Timestamp) Equal(u Timestamp) bool { - return t.Time.Equal(u.Time) -} diff --git a/vendor/github.com/google/go-github/v57/github/users.go b/vendor/github.com/google/go-github/v57/github/users.go deleted file mode 100644 index 51b2b219..00000000 --- a/vendor/github.com/google/go-github/v57/github/users.go +++ /dev/null @@ -1,294 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// UsersService handles communication with the user related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/users/ -type UsersService service - -// User represents a GitHub user. -type User struct { - Login *string `json:"login,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - AvatarURL *string `json:"avatar_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - GravatarID *string `json:"gravatar_id,omitempty"` - Name *string `json:"name,omitempty"` - Company *string `json:"company,omitempty"` - Blog *string `json:"blog,omitempty"` - Location *string `json:"location,omitempty"` - Email *string `json:"email,omitempty"` - Hireable *bool `json:"hireable,omitempty"` - Bio *string `json:"bio,omitempty"` - TwitterUsername *string `json:"twitter_username,omitempty"` - PublicRepos *int `json:"public_repos,omitempty"` - PublicGists *int `json:"public_gists,omitempty"` - Followers *int `json:"followers,omitempty"` - Following *int `json:"following,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - SuspendedAt *Timestamp `json:"suspended_at,omitempty"` - Type *string `json:"type,omitempty"` - SiteAdmin *bool `json:"site_admin,omitempty"` - TotalPrivateRepos *int64 `json:"total_private_repos,omitempty"` - OwnedPrivateRepos *int64 `json:"owned_private_repos,omitempty"` - PrivateGists *int `json:"private_gists,omitempty"` - DiskUsage *int `json:"disk_usage,omitempty"` - Collaborators *int `json:"collaborators,omitempty"` - TwoFactorAuthentication *bool `json:"two_factor_authentication,omitempty"` - Plan *Plan `json:"plan,omitempty"` - LdapDn *string `json:"ldap_dn,omitempty"` - - // API URLs - URL *string `json:"url,omitempty"` - EventsURL *string `json:"events_url,omitempty"` - FollowingURL *string `json:"following_url,omitempty"` - FollowersURL *string `json:"followers_url,omitempty"` - GistsURL *string `json:"gists_url,omitempty"` - OrganizationsURL *string `json:"organizations_url,omitempty"` - ReceivedEventsURL *string `json:"received_events_url,omitempty"` - ReposURL *string `json:"repos_url,omitempty"` - StarredURL *string `json:"starred_url,omitempty"` - SubscriptionsURL *string `json:"subscriptions_url,omitempty"` - - // TextMatches is only populated from search results that request text matches - // See: search.go and https://docs.github.com/rest/search/#text-match-metadata - TextMatches []*TextMatch `json:"text_matches,omitempty"` - - // Permissions and RoleName identify the permissions and role that a user has on a given - // repository. These are only populated when calling Repositories.ListCollaborators. - Permissions map[string]bool `json:"permissions,omitempty"` - RoleName *string `json:"role_name,omitempty"` -} - -func (u User) String() string { - return Stringify(u) -} - -// Get fetches a user. Passing the empty string will fetch the authenticated -// user. -// -// GitHub API docs: https://docs.github.com/rest/users/users#get-a-user -// GitHub API docs: https://docs.github.com/rest/users/users#get-the-authenticated-user -// -//meta:operation GET /user -//meta:operation GET /users/{username} -func (s *UsersService) Get(ctx context.Context, user string) (*User, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v", user) - } else { - u = "user" - } - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - uResp := new(User) - resp, err := s.client.Do(ctx, req, uResp) - if err != nil { - return nil, resp, err - } - - return uResp, resp, nil -} - -// GetByID fetches a user. -// -// Note: GetByID uses the undocumented GitHub API endpoint "GET /user/{user_id}". -// -//meta:operation GET /user/{user_id} -func (s *UsersService) GetByID(ctx context.Context, id int64) (*User, *Response, error) { - u := fmt.Sprintf("user/%d", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - user := new(User) - resp, err := s.client.Do(ctx, req, user) - if err != nil { - return nil, resp, err - } - - return user, resp, nil -} - -// Edit the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/users#update-the-authenticated-user -// -//meta:operation PATCH /user -func (s *UsersService) Edit(ctx context.Context, user *User) (*User, *Response, error) { - u := "user" - req, err := s.client.NewRequest("PATCH", u, user) - if err != nil { - return nil, nil, err - } - - uResp := new(User) - resp, err := s.client.Do(ctx, req, uResp) - if err != nil { - return nil, resp, err - } - - return uResp, resp, nil -} - -// HovercardOptions specifies optional parameters to the UsersService.GetHovercard -// method. -type HovercardOptions struct { - // SubjectType specifies the additional information to be received about the hovercard. - // Possible values are: organization, repository, issue, pull_request. (Required when using subject_id.) - SubjectType string `url:"subject_type"` - - // SubjectID specifies the ID for the SubjectType. (Required when using subject_type.) - SubjectID string `url:"subject_id"` -} - -// Hovercard represents hovercard information about a user. -type Hovercard struct { - Contexts []*UserContext `json:"contexts,omitempty"` -} - -// UserContext represents the contextual information about user. -type UserContext struct { - Message *string `json:"message,omitempty"` - Octicon *string `json:"octicon,omitempty"` -} - -// GetHovercard fetches contextual information about user. It requires authentication -// via Basic Auth or via OAuth with the repo scope. -// -// GitHub API docs: https://docs.github.com/rest/users/users#get-contextual-information-for-a-user -// -//meta:operation GET /users/{username}/hovercard -func (s *UsersService) GetHovercard(ctx context.Context, user string, opts *HovercardOptions) (*Hovercard, *Response, error) { - u := fmt.Sprintf("users/%v/hovercard", user) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - hc := new(Hovercard) - resp, err := s.client.Do(ctx, req, hc) - if err != nil { - return nil, resp, err - } - - return hc, resp, nil -} - -// UserListOptions specifies optional parameters to the UsersService.ListAll -// method. -type UserListOptions struct { - // ID of the last user seen - Since int64 `url:"since,omitempty"` - - // Note: Pagination is powered exclusively by the Since parameter, - // ListOptions.Page has no effect. - // ListOptions.PerPage controls an undocumented GitHub API parameter. - ListOptions -} - -// ListAll lists all GitHub users. -// -// To paginate through all users, populate 'Since' with the ID of the last user. -// -// GitHub API docs: https://docs.github.com/rest/users/users#list-users -// -//meta:operation GET /users -func (s *UsersService) ListAll(ctx context.Context, opts *UserListOptions) ([]*User, *Response, error) { - u, err := addOptions("users", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ListInvitations lists all currently-open repository invitations for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#list-repository-invitations-for-the-authenticated-user -// -//meta:operation GET /user/repository_invitations -func (s *UsersService) ListInvitations(ctx context.Context, opts *ListOptions) ([]*RepositoryInvitation, *Response, error) { - u, err := addOptions("user/repository_invitations", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - invites := []*RepositoryInvitation{} - resp, err := s.client.Do(ctx, req, &invites) - if err != nil { - return nil, resp, err - } - - return invites, resp, nil -} - -// AcceptInvitation accepts the currently-open repository invitation for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#accept-a-repository-invitation -// -//meta:operation PATCH /user/repository_invitations/{invitation_id} -func (s *UsersService) AcceptInvitation(ctx context.Context, invitationID int64) (*Response, error) { - u := fmt.Sprintf("user/repository_invitations/%v", invitationID) - req, err := s.client.NewRequest("PATCH", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeclineInvitation declines the currently-open repository invitation for the -// authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/collaborators/invitations#decline-a-repository-invitation -// -//meta:operation DELETE /user/repository_invitations/{invitation_id} -func (s *UsersService) DeclineInvitation(ctx context.Context, invitationID int64) (*Response, error) { - u := fmt.Sprintf("user/repository_invitations/%v", invitationID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_administration.go b/vendor/github.com/google/go-github/v57/github/users_administration.go deleted file mode 100644 index 02cb894b..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_administration.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// PromoteSiteAdmin promotes a user to a site administrator of a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#promote-a-user-to-be-a-site-administrator -// -//meta:operation PUT /users/{username}/site_admin -func (s *UsersService) PromoteSiteAdmin(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("users/%v/site_admin", user) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DemoteSiteAdmin demotes a user from site administrator of a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#demote-a-site-administrator -// -//meta:operation DELETE /users/{username}/site_admin -func (s *UsersService) DemoteSiteAdmin(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("users/%v/site_admin", user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// UserSuspendOptions represents the reason a user is being suspended. -type UserSuspendOptions struct { - Reason *string `json:"reason,omitempty"` -} - -// Suspend a user on a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#suspend-a-user -// -//meta:operation PUT /users/{username}/suspended -func (s *UsersService) Suspend(ctx context.Context, user string, opts *UserSuspendOptions) (*Response, error) { - u := fmt.Sprintf("users/%v/suspended", user) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unsuspend a user on a GitHub Enterprise instance. -// -// GitHub API docs: https://docs.github.com/enterprise-server@3.10/rest/enterprise-admin/users#unsuspend-a-user -// -//meta:operation DELETE /users/{username}/suspended -func (s *UsersService) Unsuspend(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("users/%v/suspended", user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_emails.go b/vendor/github.com/google/go-github/v57/github/users_emails.go deleted file mode 100644 index 8386de25..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_emails.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import "context" - -// UserEmail represents user's email address -type UserEmail struct { - Email *string `json:"email,omitempty"` - Primary *bool `json:"primary,omitempty"` - Verified *bool `json:"verified,omitempty"` - Visibility *string `json:"visibility,omitempty"` -} - -// ListEmails lists all email addresses for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/emails#list-email-addresses-for-the-authenticated-user -// -//meta:operation GET /user/emails -func (s *UsersService) ListEmails(ctx context.Context, opts *ListOptions) ([]*UserEmail, *Response, error) { - u := "user/emails" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var emails []*UserEmail - resp, err := s.client.Do(ctx, req, &emails) - if err != nil { - return nil, resp, err - } - - return emails, resp, nil -} - -// AddEmails adds email addresses of the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/emails#add-an-email-address-for-the-authenticated-user -// -//meta:operation POST /user/emails -func (s *UsersService) AddEmails(ctx context.Context, emails []string) ([]*UserEmail, *Response, error) { - u := "user/emails" - req, err := s.client.NewRequest("POST", u, emails) - if err != nil { - return nil, nil, err - } - - var e []*UserEmail - resp, err := s.client.Do(ctx, req, &e) - if err != nil { - return nil, resp, err - } - - return e, resp, nil -} - -// DeleteEmails deletes email addresses from authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/emails#delete-an-email-address-for-the-authenticated-user -// -//meta:operation DELETE /user/emails -func (s *UsersService) DeleteEmails(ctx context.Context, emails []string) (*Response, error) { - u := "user/emails" - req, err := s.client.NewRequest("DELETE", u, emails) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// SetEmailVisibility sets the visibility for the primary email address of the authenticated user. -// `visibility` can be "private" or "public". -// -// GitHub API docs: https://docs.github.com/rest/users/emails#set-primary-email-visibility-for-the-authenticated-user -// -//meta:operation PATCH /user/email/visibility -func (s *UsersService) SetEmailVisibility(ctx context.Context, visibility string) ([]*UserEmail, *Response, error) { - u := "user/email/visibility" - - updateVisiblilityReq := &UserEmail{ - Visibility: &visibility, - } - - req, err := s.client.NewRequest("PATCH", u, updateVisiblilityReq) - if err != nil { - return nil, nil, err - } - - var e []*UserEmail - resp, err := s.client.Do(ctx, req, &e) - if err != nil { - return nil, resp, err - } - - return e, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/users_gpg_keys.go b/vendor/github.com/google/go-github/v57/github/users_gpg_keys.go deleted file mode 100644 index de7caaf1..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_gpg_keys.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GPGKey represents a GitHub user's public GPG key used to verify GPG signed commits and tags. -// -// https://developer.github.com/changes/2016-04-04-git-signing-api-preview/ -type GPGKey struct { - ID *int64 `json:"id,omitempty"` - PrimaryKeyID *int64 `json:"primary_key_id,omitempty"` - KeyID *string `json:"key_id,omitempty"` - RawKey *string `json:"raw_key,omitempty"` - PublicKey *string `json:"public_key,omitempty"` - Emails []*GPGEmail `json:"emails,omitempty"` - Subkeys []*GPGKey `json:"subkeys,omitempty"` - CanSign *bool `json:"can_sign,omitempty"` - CanEncryptComms *bool `json:"can_encrypt_comms,omitempty"` - CanEncryptStorage *bool `json:"can_encrypt_storage,omitempty"` - CanCertify *bool `json:"can_certify,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} - -// String stringifies a GPGKey. -func (k GPGKey) String() string { - return Stringify(k) -} - -// GPGEmail represents an email address associated to a GPG key. -type GPGEmail struct { - Email *string `json:"email,omitempty"` - Verified *bool `json:"verified,omitempty"` -} - -// ListGPGKeys lists the public GPG keys for a user. Passing the empty -// string will fetch keys for the authenticated user. It requires authentication -// via Basic Auth or via OAuth with at least read:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#list-gpg-keys-for-a-user -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#list-gpg-keys-for-the-authenticated-user -// -//meta:operation GET /user/gpg_keys -//meta:operation GET /users/{username}/gpg_keys -func (s *UsersService) ListGPGKeys(ctx context.Context, user string, opts *ListOptions) ([]*GPGKey, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/gpg_keys", user) - } else { - u = "user/gpg_keys" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*GPGKey - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetGPGKey gets extended details for a single GPG key. It requires authentication -// via Basic Auth or via OAuth with at least read:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#get-a-gpg-key-for-the-authenticated-user -// -//meta:operation GET /user/gpg_keys/{gpg_key_id} -func (s *UsersService) GetGPGKey(ctx context.Context, id int64) (*GPGKey, *Response, error) { - u := fmt.Sprintf("user/gpg_keys/%v", id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := &GPGKey{} - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateGPGKey creates a GPG key. It requires authenticatation via Basic Auth -// or OAuth with at least write:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#create-a-gpg-key-for-the-authenticated-user -// -//meta:operation POST /user/gpg_keys -func (s *UsersService) CreateGPGKey(ctx context.Context, armoredPublicKey string) (*GPGKey, *Response, error) { - gpgKey := &struct { - ArmoredPublicKey string `json:"armored_public_key"` - }{ArmoredPublicKey: armoredPublicKey} - req, err := s.client.NewRequest("POST", "user/gpg_keys", gpgKey) - if err != nil { - return nil, nil, err - } - - key := &GPGKey{} - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// DeleteGPGKey deletes a GPG key. It requires authentication via Basic Auth or -// via OAuth with at least admin:gpg_key scope. -// -// GitHub API docs: https://docs.github.com/rest/users/gpg-keys#delete-a-gpg-key-for-the-authenticated-user -// -//meta:operation DELETE /user/gpg_keys/{gpg_key_id} -func (s *UsersService) DeleteGPGKey(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("user/gpg_keys/%v", id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_packages.go b/vendor/github.com/google/go-github/v57/github/users_packages.go deleted file mode 100644 index 3ccf68a1..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_packages.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListPackages lists the packages for a user. Passing the empty string for "user" will -// list packages for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-packages-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-packages-for-the-authenticated-users-namespace -// -//meta:operation GET /user/packages -//meta:operation GET /users/{username}/packages -func (s *UsersService) ListPackages(ctx context.Context, user string, opts *PackageListOptions) ([]*Package, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages", user) - } else { - u = "user/packages" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var packages []*Package - resp, err := s.client.Do(ctx, req, &packages) - if err != nil { - return nil, resp, err - } - - return packages, resp, nil -} - -// GetPackage gets a package by name for a user. Passing the empty string for "user" will -// get the package for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-for-the-authenticated-user -// -//meta:operation GET /user/packages/{package_type}/{package_name} -//meta:operation GET /users/{username}/packages/{package_type}/{package_name} -func (s *UsersService) GetPackage(ctx context.Context, user, packageType, packageName string) (*Package, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v", packageType, packageName) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pack *Package - resp, err := s.client.Do(ctx, req, &pack) - if err != nil { - return nil, resp, err - } - - return pack, resp, nil -} - -// DeletePackage deletes a package from a user. Passing the empty string for "user" will -// delete the package for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-for-the-authenticated-user -// -//meta:operation DELETE /user/packages/{package_type}/{package_name} -//meta:operation DELETE /users/{username}/packages/{package_type}/{package_name} -func (s *UsersService) DeletePackage(ctx context.Context, user, packageType, packageName string) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v", packageType, packageName) - } - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RestorePackage restores a package to a user. Passing the empty string for "user" will -// restore the package for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-for-the-authenticated-user -// -//meta:operation POST /user/packages/{package_type}/{package_name}/restore -//meta:operation POST /users/{username}/packages/{package_type}/{package_name}/restore -func (s *UsersService) RestorePackage(ctx context.Context, user, packageType, packageName string) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/restore", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v/restore", packageType, packageName) - } - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageGetAllVersions gets all versions of a package for a user. Passing the empty string for "user" will -// get versions for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-package-versions-for-a-package-owned-by-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#list-package-versions-for-a-package-owned-by-the-authenticated-user -// -//meta:operation GET /user/packages/{package_type}/{package_name}/versions -//meta:operation GET /users/{username}/packages/{package_type}/{package_name}/versions -func (s *UsersService) PackageGetAllVersions(ctx context.Context, user, packageType, packageName string, opts *PackageListOptions) ([]*PackageVersion, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions", user, packageType, packageName) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions", packageType, packageName) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var versions []*PackageVersion - resp, err := s.client.Do(ctx, req, &versions) - if err != nil { - return nil, resp, err - } - - return versions, resp, nil -} - -// PackageGetVersion gets a specific version of a package for a user. Passing the empty string for "user" will -// get the version for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-version-for-a-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#get-a-package-version-for-the-authenticated-user -// -//meta:operation GET /user/packages/{package_type}/{package_name}/versions/{package_version_id} -//meta:operation GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *UsersService) PackageGetVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*PackageVersion, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v", user, packageType, packageName, packageVersionID) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions/%v", packageType, packageName, packageVersionID) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var version *PackageVersion - resp, err := s.client.Do(ctx, req, &version) - if err != nil { - return nil, resp, err - } - - return version, resp, nil -} - -// PackageDeleteVersion deletes a package version for a user. Passing the empty string for "user" will -// delete the version for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-a-package-version-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#delete-package-version-for-a-user -// -//meta:operation DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id} -//meta:operation DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id} -func (s *UsersService) PackageDeleteVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v", user, packageType, packageName, packageVersionID) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions/%v", packageType, packageName, packageVersionID) - } - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// PackageRestoreVersion restores a package version to a user. Passing the empty string for "user" will -// restore the version for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-a-package-version-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/packages/packages#restore-package-version-for-a-user -// -//meta:operation POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore -//meta:operation POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore -func (s *UsersService) PackageRestoreVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v/restore", user, packageType, packageName, packageVersionID) - } else { - u = fmt.Sprintf("user/packages/%v/%v/versions/%v/restore", packageType, packageName, packageVersionID) - } - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v57/github/users_projects.go b/vendor/github.com/google/go-github/v57/github/users_projects.go deleted file mode 100644 index 0ab57e5c..00000000 --- a/vendor/github.com/google/go-github/v57/github/users_projects.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListProjects lists the projects for the specified user. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#list-user-projects -// -//meta:operation GET /users/{username}/projects -func (s *UsersService) ListProjects(ctx context.Context, user string, opts *ProjectListOptions) ([]*Project, *Response, error) { - u := fmt.Sprintf("users/%v/projects", user) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - var projects []*Project - resp, err := s.client.Do(ctx, req, &projects) - if err != nil { - return nil, resp, err - } - - return projects, resp, nil -} - -// CreateUserProjectOptions specifies the parameters to the UsersService.CreateProject method. -type CreateUserProjectOptions struct { - // The name of the project. (Required.) - Name string `json:"name"` - // The description of the project. (Optional.) - Body *string `json:"body,omitempty"` -} - -// CreateProject creates a GitHub Project for the current user. -// -// GitHub API docs: https://docs.github.com/rest/projects/projects#create-a-user-project -// -//meta:operation POST /user/projects -func (s *UsersService) CreateProject(ctx context.Context, opts *CreateUserProjectOptions) (*Project, *Response, error) { - u := "user/projects" - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeProjectsPreview) - - project := &Project{} - resp, err := s.client.Do(ctx, req, project) - if err != nil { - return nil, resp, err - } - - return project, resp, nil -} diff --git a/vendor/github.com/google/go-github/v57/github/with_appengine.go b/vendor/github.com/google/go-github/v57/github/with_appengine.go deleted file mode 100644 index 9053ce10..00000000 --- a/vendor/github.com/google/go-github/v57/github/with_appengine.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build appengine -// +build appengine - -// This file provides glue for making github work on App Engine. - -package github - -import ( - "context" - "net/http" -) - -func withContext(ctx context.Context, req *http.Request) *http.Request { - // No-op because App Engine adds context to a request differently. - return req -} diff --git a/vendor/github.com/google/go-github/v57/github/without_appengine.go b/vendor/github.com/google/go-github/v57/github/without_appengine.go deleted file mode 100644 index 0024ae41..00000000 --- a/vendor/github.com/google/go-github/v57/github/without_appengine.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !appengine -// +build !appengine - -// This file provides glue for making github work without App Engine. - -package github - -import ( - "context" - "net/http" -) - -func withContext(ctx context.Context, req *http.Request) *http.Request { - return req.WithContext(ctx) -} diff --git a/vendor/github.com/google/go-github/v71/LICENSE b/vendor/github.com/google/go-github/v71/LICENSE deleted file mode 100644 index 28b6486f..00000000 --- a/vendor/github.com/google/go-github/v71/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013 The go-github AUTHORS. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/google/go-github/v71/github/actions.go b/vendor/github.com/google/go-github/v71/github/actions.go deleted file mode 100644 index 4b88a1e1..00000000 --- a/vendor/github.com/google/go-github/v71/github/actions.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// ActionsService handles communication with the actions related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/actions/ -type ActionsService service diff --git a/vendor/github.com/google/go-github/v71/github/actions_oidc.go b/vendor/github.com/google/go-github/v71/github/actions_oidc.go deleted file mode 100644 index 596aa9d9..00000000 --- a/vendor/github.com/google/go-github/v71/github/actions_oidc.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OIDCSubjectClaimCustomTemplate represents an OIDC subject claim customization template. -type OIDCSubjectClaimCustomTemplate struct { - UseDefault *bool `json:"use_default,omitempty"` - IncludeClaimKeys []string `json:"include_claim_keys,omitempty"` -} - -// GetOrgOIDCSubjectClaimCustomTemplate gets the subject claim customization template for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/oidc/customization/sub -func (s *ActionsService) GetOrgOIDCSubjectClaimCustomTemplate(ctx context.Context, org string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/oidc/customization/sub", org) - return s.getOIDCSubjectClaimCustomTemplate(ctx, u) -} - -// GetRepoOIDCSubjectClaimCustomTemplate gets the subject claim customization template for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/oidc/customization/sub -func (s *ActionsService) GetRepoOIDCSubjectClaimCustomTemplate(ctx context.Context, owner, repo string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/oidc/customization/sub", owner, repo) - return s.getOIDCSubjectClaimCustomTemplate(ctx, u) -} - -func (s *ActionsService) getOIDCSubjectClaimCustomTemplate(ctx context.Context, url string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - tmpl := new(OIDCSubjectClaimCustomTemplate) - resp, err := s.client.Do(ctx, req, tmpl) - if err != nil { - return nil, resp, err - } - - return tmpl, resp, nil -} - -// SetOrgOIDCSubjectClaimCustomTemplate sets the subject claim customization for an organization. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/oidc/customization/sub -func (s *ActionsService) SetOrgOIDCSubjectClaimCustomTemplate(ctx context.Context, org string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { - u := fmt.Sprintf("orgs/%v/actions/oidc/customization/sub", org) - return s.setOIDCSubjectClaimCustomTemplate(ctx, u, template) -} - -// SetRepoOIDCSubjectClaimCustomTemplate sets the subject claim customization for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/oidc/customization/sub -func (s *ActionsService) SetRepoOIDCSubjectClaimCustomTemplate(ctx context.Context, owner, repo string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/oidc/customization/sub", owner, repo) - return s.setOIDCSubjectClaimCustomTemplate(ctx, u, template) -} - -func (s *ActionsService) setOIDCSubjectClaimCustomTemplate(ctx context.Context, url string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, template) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/actions_required_workflows.go b/vendor/github.com/google/go-github/v71/github/actions_required_workflows.go deleted file mode 100644 index b89741a8..00000000 --- a/vendor/github.com/google/go-github/v71/github/actions_required_workflows.go +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// OrgRequiredWorkflow represents a required workflow object at the org level. -type OrgRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - Scope *string `json:"scope,omitempty"` - Ref *string `json:"ref,omitempty"` - State *string `json:"state,omitempty"` - SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Repository *Repository `json:"repository,omitempty"` -} - -// OrgRequiredWorkflows represents the required workflows for the org. -type OrgRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*OrgRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// CreateUpdateRequiredWorkflowOptions represents the input object used to create or update required workflows. -type CreateUpdateRequiredWorkflowOptions struct { - WorkflowFilePath *string `json:"workflow_file_path,omitempty"` - RepositoryID *int64 `json:"repository_id,omitempty"` - Scope *string `json:"scope,omitempty"` - SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -// RequiredWorkflowSelectedRepos represents the repos that a required workflow is applied to. -type RequiredWorkflowSelectedRepos struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories,omitempty"` -} - -// RepoRequiredWorkflow represents a required workflow object at the repo level. -type RepoRequiredWorkflow struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - State *string `json:"state,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BadgeURL *string `json:"badge_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - SourceRepository *Repository `json:"source_repository,omitempty"` -} - -// RepoRequiredWorkflows represents the required workflows for a repo. -type RepoRequiredWorkflows struct { - TotalCount *int `json:"total_count,omitempty"` - RequiredWorkflows []*RepoRequiredWorkflow `json:"required_workflows,omitempty"` -} - -// ListOrgRequiredWorkflows lists the RequiredWorkflows for an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows -func (s *ActionsService) ListOrgRequiredWorkflows(ctx context.Context, org string, opts *ListOptions) (*OrgRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(OrgRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} - -// CreateRequiredWorkflow creates the required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation POST /orgs/{org}/actions/required_workflows -func (s *ActionsService) CreateRequiredWorkflow(ctx context.Context, org string, createRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) - req, err := s.client.NewRequest("POST", url, createRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// GetRequiredWorkflowByID get the RequiredWorkflows for an org by its ID. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) GetRequiredWorkflowByID(ctx context.Context, owner string, requiredWorkflowID int64) (*OrgRequiredWorkflow, *Response, error) { - u := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", owner, requiredWorkflowID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, &requiredWorkflow) - if err != nil { - return nil, resp, err - } - - return requiredWorkflow, resp, nil -} - -// UpdateRequiredWorkflow updates a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PATCH /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) UpdateRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64, updateRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("PATCH", url, updateRequiredWorkflowOptions) - if err != nil { - return nil, nil, err - } - - orgRequiredWorkflow := new(OrgRequiredWorkflow) - resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) - if err != nil { - return nil, resp, err - } - - return orgRequiredWorkflow, resp, nil -} - -// DeleteRequiredWorkflow deletes a required workflow in an org. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id} -func (s *ActionsService) DeleteRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRequiredWorkflowSelectedRepos lists the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) ListRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, opts *ListOptions) (*RequiredWorkflowSelectedRepos, *Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflowRepos := new(RequiredWorkflowSelectedRepos) - resp, err := s.client.Do(ctx, req, &requiredWorkflowRepos) - if err != nil { - return nil, resp, err - } - - return requiredWorkflowRepos, resp, nil -} - -// SetRequiredWorkflowSelectedRepos sets the Repositories selected for a workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories -func (s *ActionsService) SetRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRepoToRequiredWorkflow adds the Repository to a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation PUT /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) AddRepoToRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// RemoveRepoFromRequiredWorkflow removes the Repository from a required workflow. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation DELETE /orgs/{org}/actions/required_workflows/{workflow_id}/repositories/{repository_id} -func (s *ActionsService) RemoveRepoFromRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { - url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} - -// ListRepoRequiredWorkflows lists the RequiredWorkflows for a repo. -// -// GitHub API docs: https://docs.github.com/actions/using-workflows/required-workflows -// -//meta:operation GET /repos/{owner}/{repo}/actions/required_workflows -func (s *ActionsService) ListRepoRequiredWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*RepoRequiredWorkflows, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/actions/required_workflows", owner, repo) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - requiredWorkflows := new(RepoRequiredWorkflows) - resp, err := s.client.Do(ctx, req, &requiredWorkflows) - if err != nil { - return nil, resp, err - } - - return requiredWorkflows, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/actions_workflows.go b/vendor/github.com/google/go-github/v71/github/actions_workflows.go deleted file mode 100644 index 0214e6ab..00000000 --- a/vendor/github.com/google/go-github/v71/github/actions_workflows.go +++ /dev/null @@ -1,237 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Workflow represents a repository action workflow. -type Workflow struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - State *string `json:"state,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - BadgeURL *string `json:"badge_url,omitempty"` -} - -// Workflows represents a slice of repository action workflows. -type Workflows struct { - TotalCount *int `json:"total_count,omitempty"` - Workflows []*Workflow `json:"workflows,omitempty"` -} - -// WorkflowUsage represents a usage of a specific workflow. -type WorkflowUsage struct { - Billable *WorkflowBillMap `json:"billable,omitempty"` -} - -// WorkflowBillMap represents different runner environments available for a workflow. -// Its key is the name of its environment, e.g. "UBUNTU", "MACOS", "WINDOWS", etc. -type WorkflowBillMap map[string]*WorkflowBill - -// WorkflowBill specifies billable time for a specific environment in a workflow. -type WorkflowBill struct { - TotalMS *int64 `json:"total_ms,omitempty"` -} - -// CreateWorkflowDispatchEventRequest represents a request to create a workflow dispatch event. -type CreateWorkflowDispatchEventRequest struct { - // Ref represents the reference of the workflow run. - // The reference can be a branch or a tag. - // Ref is required when creating a workflow dispatch event. - Ref string `json:"ref"` - // Inputs represents input keys and values configured in the workflow file. - // The maximum number of properties is 10. - // Default: Any default properties configured in the workflow file will be used when `inputs` are omitted. - Inputs map[string]interface{} `json:"inputs,omitempty"` -} - -// ListWorkflows lists all workflows in a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#list-repository-workflows -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows -func (s *ActionsService) ListWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*Workflows, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/actions/workflows", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - workflows := new(Workflows) - resp, err := s.client.Do(ctx, req, &workflows) - if err != nil { - return nil, resp, err - } - - return workflows, resp, nil -} - -// GetWorkflowByID gets a specific workflow by ID. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id} -func (s *ActionsService) GetWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Workflow, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v", owner, repo, workflowID) - - return s.getWorkflow(ctx, u) -} - -// GetWorkflowByFileName gets a specific workflow by file name. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-a-workflow -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id} -func (s *ActionsService) GetWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Workflow, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v", owner, repo, workflowFileName) - - return s.getWorkflow(ctx, u) -} - -func (s *ActionsService) getWorkflow(ctx context.Context, url string) (*Workflow, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - workflow := new(Workflow) - resp, err := s.client.Do(ctx, req, workflow) - if err != nil { - return nil, resp, err - } - - return workflow, resp, nil -} - -// GetWorkflowUsageByID gets a specific workflow usage by ID in the unit of billable milliseconds. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-workflow-usage -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing -func (s *ActionsService) GetWorkflowUsageByID(ctx context.Context, owner, repo string, workflowID int64) (*WorkflowUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/timing", owner, repo, workflowID) - - return s.getWorkflowUsage(ctx, u) -} - -// GetWorkflowUsageByFileName gets a specific workflow usage by file name in the unit of billable milliseconds. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#get-workflow-usage -// -//meta:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing -func (s *ActionsService) GetWorkflowUsageByFileName(ctx context.Context, owner, repo, workflowFileName string) (*WorkflowUsage, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/timing", owner, repo, workflowFileName) - - return s.getWorkflowUsage(ctx, u) -} - -func (s *ActionsService) getWorkflowUsage(ctx context.Context, url string) (*WorkflowUsage, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - workflowUsage := new(WorkflowUsage) - resp, err := s.client.Do(ctx, req, workflowUsage) - if err != nil { - return nil, resp, err - } - - return workflowUsage, resp, nil -} - -// CreateWorkflowDispatchEventByID manually triggers a GitHub Actions workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#create-a-workflow-dispatch-event -// -//meta:operation POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches -func (s *ActionsService) CreateWorkflowDispatchEventByID(ctx context.Context, owner, repo string, workflowID int64, event CreateWorkflowDispatchEventRequest) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/dispatches", owner, repo, workflowID) - - return s.createWorkflowDispatchEvent(ctx, u, &event) -} - -// CreateWorkflowDispatchEventByFileName manually triggers a GitHub Actions workflow run. -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#create-a-workflow-dispatch-event -// -//meta:operation POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches -func (s *ActionsService) CreateWorkflowDispatchEventByFileName(ctx context.Context, owner, repo, workflowFileName string, event CreateWorkflowDispatchEventRequest) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/dispatches", owner, repo, workflowFileName) - - return s.createWorkflowDispatchEvent(ctx, u, &event) -} - -func (s *ActionsService) createWorkflowDispatchEvent(ctx context.Context, url string, event *CreateWorkflowDispatchEventRequest) (*Response, error) { - req, err := s.client.NewRequest("POST", url, event) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// EnableWorkflowByID enables a workflow and sets the state of the workflow to "active". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#enable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable -func (s *ActionsService) EnableWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/enable", owner, repo, workflowID) - return s.doNewPutRequest(ctx, u) -} - -// EnableWorkflowByFileName enables a workflow and sets the state of the workflow to "active". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#enable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable -func (s *ActionsService) EnableWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/enable", owner, repo, workflowFileName) - return s.doNewPutRequest(ctx, u) -} - -// DisableWorkflowByID disables a workflow and sets the state of the workflow to "disabled_manually". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#disable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable -func (s *ActionsService) DisableWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/disable", owner, repo, workflowID) - return s.doNewPutRequest(ctx, u) -} - -// DisableWorkflowByFileName disables a workflow and sets the state of the workflow to "disabled_manually". -// -// GitHub API docs: https://docs.github.com/rest/actions/workflows#disable-a-workflow -// -//meta:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable -func (s *ActionsService) DisableWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/disable", owner, repo, workflowFileName) - return s.doNewPutRequest(ctx, u) -} - -func (s *ActionsService) doNewPutRequest(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/activity.go b/vendor/github.com/google/go-github/v71/github/activity.go deleted file mode 100644 index edf8cc43..00000000 --- a/vendor/github.com/google/go-github/v71/github/activity.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import "context" - -// ActivityService handles communication with the activity related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/activity/ -type ActivityService service - -// FeedLink represents a link to a related resource. -type FeedLink struct { - HRef *string `json:"href,omitempty"` - Type *string `json:"type,omitempty"` -} - -// Feeds represents timeline resources in Atom format. -type Feeds struct { - TimelineURL *string `json:"timeline_url,omitempty"` - UserURL *string `json:"user_url,omitempty"` - CurrentUserPublicURL *string `json:"current_user_public_url,omitempty"` - CurrentUserURL *string `json:"current_user_url,omitempty"` - CurrentUserActorURL *string `json:"current_user_actor_url,omitempty"` - CurrentUserOrganizationURL *string `json:"current_user_organization_url,omitempty"` - CurrentUserOrganizationURLs []string `json:"current_user_organization_urls,omitempty"` - Links *FeedLinks `json:"_links,omitempty"` -} - -// FeedLinks represents the links in a Feed. -type FeedLinks struct { - Timeline *FeedLink `json:"timeline,omitempty"` - User *FeedLink `json:"user,omitempty"` - CurrentUserPublic *FeedLink `json:"current_user_public,omitempty"` - CurrentUser *FeedLink `json:"current_user,omitempty"` - CurrentUserActor *FeedLink `json:"current_user_actor,omitempty"` - CurrentUserOrganization *FeedLink `json:"current_user_organization,omitempty"` - CurrentUserOrganizations []*FeedLink `json:"current_user_organizations,omitempty"` -} - -// ListFeeds lists all the feeds available to the authenticated user. -// -// GitHub provides several timeline resources in Atom format: -// -// Timeline: The GitHub global public timeline -// User: The public timeline for any user, using URI template -// Current user public: The public timeline for the authenticated user -// Current user: The private timeline for the authenticated user -// Current user actor: The private timeline for activity created by the -// authenticated user -// Current user organizations: The private timeline for the organizations -// the authenticated user is a member of. -// -// Note: Private feeds are only returned when authenticating via Basic Auth -// since current feed URIs use the older, non revocable auth tokens. -// -// GitHub API docs: https://docs.github.com/rest/activity/feeds#get-feeds -// -//meta:operation GET /feeds -func (s *ActivityService) ListFeeds(ctx context.Context) (*Feeds, *Response, error) { - req, err := s.client.NewRequest("GET", "feeds", nil) - if err != nil { - return nil, nil, err - } - - f := &Feeds{} - resp, err := s.client.Do(ctx, req, f) - if err != nil { - return nil, resp, err - } - - return f, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/activity_events.go b/vendor/github.com/google/go-github/v71/github/activity_events.go deleted file mode 100644 index b12baa99..00000000 --- a/vendor/github.com/google/go-github/v71/github/activity_events.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListEvents drinks from the firehose of all public events across GitHub. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events -// -//meta:operation GET /events -func (s *ActivityService) ListEvents(ctx context.Context, opts *ListOptions) ([]*Event, *Response, error) { - u, err := addOptions("events", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListRepositoryEvents lists events for a repository. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-repository-events -// -//meta:operation GET /repos/{owner}/{repo}/events -func (s *ActivityService) ListRepositoryEvents(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListIssueEventsForRepository lists issue events for a repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/events#list-issue-events-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/issues/events -func (s *ActivityService) ListIssueEventsForRepository(ctx context.Context, owner, repo string, opts *ListOptions) ([]*IssueEvent, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*IssueEvent - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsForRepoNetwork lists public events for a network of repositories. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events-for-a-network-of-repositories -// -//meta:operation GET /networks/{owner}/{repo}/events -func (s *ActivityService) ListEventsForRepoNetwork(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("networks/%v/%v/events", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsForOrganization lists public events for an organization. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-organization-events -// -//meta:operation GET /orgs/{org}/events -func (s *ActivityService) ListEventsForOrganization(ctx context.Context, org string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("orgs/%v/events", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsPerformedByUser lists the events performed by a user. If publicOnly is -// true, only public events will be returned. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-events-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events-for-a-user -// -//meta:operation GET /users/{username}/events -//meta:operation GET /users/{username}/events/public -func (s *ActivityService) ListEventsPerformedByUser(ctx context.Context, user string, publicOnly bool, opts *ListOptions) ([]*Event, *Response, error) { - var u string - if publicOnly { - u = fmt.Sprintf("users/%v/events/public", user) - } else { - u = fmt.Sprintf("users/%v/events", user) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListEventsReceivedByUser lists the events received by a user. If publicOnly is -// true, only public events will be returned. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-events-received-by-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/activity/events#list-public-events-received-by-a-user -// -//meta:operation GET /users/{username}/received_events -//meta:operation GET /users/{username}/received_events/public -func (s *ActivityService) ListEventsReceivedByUser(ctx context.Context, user string, publicOnly bool, opts *ListOptions) ([]*Event, *Response, error) { - var u string - if publicOnly { - u = fmt.Sprintf("users/%v/received_events/public", user) - } else { - u = fmt.Sprintf("users/%v/received_events", user) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} - -// ListUserEventsForOrganization provides the user’s organization dashboard. You -// must be authenticated as the user to view this. -// -// GitHub API docs: https://docs.github.com/rest/activity/events#list-organization-events-for-the-authenticated-user -// -//meta:operation GET /users/{username}/events/orgs/{org} -func (s *ActivityService) ListUserEventsForOrganization(ctx context.Context, org, user string, opts *ListOptions) ([]*Event, *Response, error) { - u := fmt.Sprintf("users/%v/events/orgs/%v", user, org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var events []*Event - resp, err := s.client.Do(ctx, req, &events) - if err != nil { - return nil, resp, err - } - - return events, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/activity_star.go b/vendor/github.com/google/go-github/v71/github/activity_star.go deleted file mode 100644 index cebdacf7..00000000 --- a/vendor/github.com/google/go-github/v71/github/activity_star.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" -) - -// StarredRepository is returned by ListStarred. -type StarredRepository struct { - StarredAt *Timestamp `json:"starred_at,omitempty"` - Repository *Repository `json:"repo,omitempty"` -} - -// Stargazer represents a user that has starred a repository. -type Stargazer struct { - StarredAt *Timestamp `json:"starred_at,omitempty"` - User *User `json:"user,omitempty"` -} - -// ListStargazers lists people who have starred the specified repo. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#list-stargazers -// -//meta:operation GET /repos/{owner}/{repo}/stargazers -func (s *ActivityService) ListStargazers(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Stargazer, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/stargazers", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeStarringPreview) - - var stargazers []*Stargazer - resp, err := s.client.Do(ctx, req, &stargazers) - if err != nil { - return nil, resp, err - } - - return stargazers, resp, nil -} - -// ActivityListStarredOptions specifies the optional parameters to the -// ActivityService.ListStarred method. -type ActivityListStarredOptions struct { - // How to sort the repository list. Possible values are: created, updated, - // pushed, full_name. Default is "full_name". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort repositories. Possible values are: asc, desc. - // Default is "asc" when sort is "full_name", otherwise default is "desc". - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListStarred lists all the repos starred by a user. Passing the empty string -// will list the starred repositories for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#list-repositories-starred-by-a-user -// GitHub API docs: https://docs.github.com/rest/activity/starring#list-repositories-starred-by-the-authenticated-user -// -//meta:operation GET /user/starred -//meta:operation GET /users/{username}/starred -func (s *ActivityService) ListStarred(ctx context.Context, user string, opts *ActivityListStarredOptions) ([]*StarredRepository, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/starred", user) - } else { - u = "user/starred" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when APIs fully launch - acceptHeaders := []string{mediaTypeStarringPreview, mediaTypeTopicsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var repos []*StarredRepository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// IsStarred checks if a repository is starred by authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#check-if-a-repository-is-starred-by-the-authenticated-user -// -//meta:operation GET /user/starred/{owner}/{repo} -func (s *ActivityService) IsStarred(ctx context.Context, owner, repo string) (bool, *Response, error) { - u := fmt.Sprintf("user/starred/%v/%v", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - starred, err := parseBoolResponse(err) - return starred, resp, err -} - -// Star a repository as the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#star-a-repository-for-the-authenticated-user -// -//meta:operation PUT /user/starred/{owner}/{repo} -func (s *ActivityService) Star(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("user/starred/%v/%v", owner, repo) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unstar a repository as the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/starring#unstar-a-repository-for-the-authenticated-user -// -//meta:operation DELETE /user/starred/{owner}/{repo} -func (s *ActivityService) Unstar(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("user/starred/%v/%v", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/activity_watching.go b/vendor/github.com/google/go-github/v71/github/activity_watching.go deleted file mode 100644 index 34859005..00000000 --- a/vendor/github.com/google/go-github/v71/github/activity_watching.go +++ /dev/null @@ -1,158 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Subscription identifies a repository or thread subscription. -type Subscription struct { - Subscribed *bool `json:"subscribed,omitempty"` - Ignored *bool `json:"ignored,omitempty"` - Reason *string `json:"reason,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - - // only populated for repository subscriptions - RepositoryURL *string `json:"repository_url,omitempty"` - - // only populated for thread subscriptions - ThreadURL *string `json:"thread_url,omitempty"` -} - -// ListWatchers lists watchers of a particular repo. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#list-watchers -// -//meta:operation GET /repos/{owner}/{repo}/subscribers -func (s *ActivityService) ListWatchers(ctx context.Context, owner, repo string, opts *ListOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscribers", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var watchers []*User - resp, err := s.client.Do(ctx, req, &watchers) - if err != nil { - return nil, resp, err - } - - return watchers, resp, nil -} - -// ListWatched lists the repositories the specified user is watching. Passing -// the empty string will fetch watched repos for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#list-repositories-watched-by-a-user -// GitHub API docs: https://docs.github.com/rest/activity/watching#list-repositories-watched-by-the-authenticated-user -// -//meta:operation GET /user/subscriptions -//meta:operation GET /users/{username}/subscriptions -func (s *ActivityService) ListWatched(ctx context.Context, user string, opts *ListOptions) ([]*Repository, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/subscriptions", user) - } else { - u = "user/subscriptions" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var watched []*Repository - resp, err := s.client.Do(ctx, req, &watched) - if err != nil { - return nil, resp, err - } - - return watched, resp, nil -} - -// GetRepositorySubscription returns the subscription for the specified -// repository for the authenticated user. If the authenticated user is not -// watching the repository, a nil Subscription is returned. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#get-a-repository-subscription -// -//meta:operation GET /repos/{owner}/{repo}/subscription -func (s *ActivityService) GetRepositorySubscription(ctx context.Context, owner, repo string) (*Subscription, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - // if it's just a 404, don't return that as an error - _, err = parseBoolResponse(err) - return nil, resp, err - } - - return sub, resp, nil -} - -// SetRepositorySubscription sets the subscription for the specified repository -// for the authenticated user. -// -// To watch a repository, set subscription.Subscribed to true. -// To ignore notifications made within a repository, set subscription.Ignored to true. -// To stop watching a repository, use DeleteRepositorySubscription. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#set-a-repository-subscription -// -//meta:operation PUT /repos/{owner}/{repo}/subscription -func (s *ActivityService) SetRepositorySubscription(ctx context.Context, owner, repo string, subscription *Subscription) (*Subscription, *Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) - - req, err := s.client.NewRequest("PUT", u, subscription) - if err != nil { - return nil, nil, err - } - - sub := new(Subscription) - resp, err := s.client.Do(ctx, req, sub) - if err != nil { - return nil, resp, err - } - - return sub, resp, nil -} - -// DeleteRepositorySubscription deletes the subscription for the specified -// repository for the authenticated user. -// -// This is used to stop watching a repository. To control whether or not to -// receive notifications from a repository, use SetRepositorySubscription. -// -// GitHub API docs: https://docs.github.com/rest/activity/watching#delete-a-repository-subscription -// -//meta:operation DELETE /repos/{owner}/{repo}/subscription -func (s *ActivityService) DeleteRepositorySubscription(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/apps_hooks.go b/vendor/github.com/google/go-github/v71/github/apps_hooks.go deleted file mode 100644 index 6046827e..00000000 --- a/vendor/github.com/google/go-github/v71/github/apps_hooks.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// GetHookConfig returns the webhook configuration for a GitHub App. -// The underlying transport must be authenticated as an app. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#get-a-webhook-configuration-for-an-app -// -//meta:operation GET /app/hook/config -func (s *AppsService) GetHookConfig(ctx context.Context) (*HookConfig, *Response, error) { - req, err := s.client.NewRequest("GET", "app/hook/config", nil) - if err != nil { - return nil, nil, err - } - - config := new(HookConfig) - resp, err := s.client.Do(ctx, req, &config) - if err != nil { - return nil, resp, err - } - - return config, resp, nil -} - -// UpdateHookConfig updates the webhook configuration for a GitHub App. -// The underlying transport must be authenticated as an app. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#update-a-webhook-configuration-for-an-app -// -//meta:operation PATCH /app/hook/config -func (s *AppsService) UpdateHookConfig(ctx context.Context, config *HookConfig) (*HookConfig, *Response, error) { - req, err := s.client.NewRequest("PATCH", "app/hook/config", config) - if err != nil { - return nil, nil, err - } - - c := new(HookConfig) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/apps_hooks_deliveries.go b/vendor/github.com/google/go-github/v71/github/apps_hooks_deliveries.go deleted file mode 100644 index 59800a0a..00000000 --- a/vendor/github.com/google/go-github/v71/github/apps_hooks_deliveries.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListHookDeliveries lists deliveries of an App webhook. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#list-deliveries-for-an-app-webhook -// -//meta:operation GET /app/hook/deliveries -func (s *AppsService) ListHookDeliveries(ctx context.Context, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { - u, err := addOptions("app/hook/deliveries", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deliveries := []*HookDelivery{} - resp, err := s.client.Do(ctx, req, &deliveries) - if err != nil { - return nil, resp, err - } - - return deliveries, resp, nil -} - -// GetHookDelivery returns the App webhook delivery with the specified ID. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#get-a-delivery-for-an-app-webhook -// -//meta:operation GET /app/hook/deliveries/{delivery_id} -func (s *AppsService) GetHookDelivery(ctx context.Context, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("app/hook/deliveries/%v", deliveryID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// RedeliverHookDelivery redelivers a delivery for an App webhook. -// -// GitHub API docs: https://docs.github.com/rest/apps/webhooks#redeliver-a-delivery-for-an-app-webhook -// -//meta:operation POST /app/hook/deliveries/{delivery_id}/attempts -func (s *AppsService) RedeliverHookDelivery(ctx context.Context, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("app/hook/deliveries/%v/attempts", deliveryID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/apps_installation.go b/vendor/github.com/google/go-github/v71/github/apps_installation.go deleted file mode 100644 index d430511d..00000000 --- a/vendor/github.com/google/go-github/v71/github/apps_installation.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" -) - -// ListRepositories represents the response from the list repos endpoints. -type ListRepositories struct { - TotalCount *int `json:"total_count,omitempty"` - Repositories []*Repository `json:"repositories"` -} - -// ListRepos lists the repositories that are accessible to the authenticated installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#list-repositories-accessible-to-the-app-installation -// -//meta:operation GET /installation/repositories -func (s *AppsService) ListRepos(ctx context.Context, opts *ListOptions) (*ListRepositories, *Response, error) { - u, err := addOptions("installation/repositories", opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{ - mediaTypeTopicsPreview, - mediaTypeRepositoryVisibilityPreview, - mediaTypeRepositoryTemplatePreview, - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var r *ListRepositories - - resp, err := s.client.Do(ctx, req, &r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// ListUserRepos lists repositories that are accessible -// to the authenticated user for an installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#list-repositories-accessible-to-the-user-access-token -// -//meta:operation GET /user/installations/{installation_id}/repositories -func (s *AppsService) ListUserRepos(ctx context.Context, id int64, opts *ListOptions) (*ListRepositories, *Response, error) { - u := fmt.Sprintf("user/installations/%v/repositories", id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{ - mediaTypeTopicsPreview, - mediaTypeRepositoryVisibilityPreview, - mediaTypeRepositoryTemplatePreview, - } - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var r *ListRepositories - resp, err := s.client.Do(ctx, req, &r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// AddRepository adds a single repository to an installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#add-a-repository-to-an-app-installation -// -//meta:operation PUT /user/installations/{installation_id}/repositories/{repository_id} -func (s *AppsService) AddRepository(ctx context.Context, instID, repoID int64) (*Repository, *Response, error) { - u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, nil, err - } - - r := new(Repository) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// RemoveRepository removes a single repository from an installation. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#remove-a-repository-from-an-app-installation -// -//meta:operation DELETE /user/installations/{installation_id}/repositories/{repository_id} -func (s *AppsService) RemoveRepository(ctx context.Context, instID, repoID int64) (*Response, error) { - u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RevokeInstallationToken revokes an installation token. -// -// GitHub API docs: https://docs.github.com/rest/apps/installations#revoke-an-installation-access-token -// -//meta:operation DELETE /installation/token -func (s *AppsService) RevokeInstallationToken(ctx context.Context) (*Response, error) { - u := "installation/token" - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/apps_manifest.go b/vendor/github.com/google/go-github/v71/github/apps_manifest.go deleted file mode 100644 index 5b6ff9af..00000000 --- a/vendor/github.com/google/go-github/v71/github/apps_manifest.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// AppConfig describes the configuration of a GitHub App. -type AppConfig struct { - ID *int64 `json:"id,omitempty"` - Slug *string `json:"slug,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Owner *User `json:"owner,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - ExternalURL *string `json:"external_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClientID *string `json:"client_id,omitempty"` - ClientSecret *string `json:"client_secret,omitempty"` - WebhookSecret *string `json:"webhook_secret,omitempty"` - PEM *string `json:"pem,omitempty"` -} - -// CompleteAppManifest completes the App manifest handshake flow for the given -// code. -// -// GitHub API docs: https://docs.github.com/rest/apps/apps#create-a-github-app-from-a-manifest -// -//meta:operation POST /app-manifests/{code}/conversions -func (s *AppsService) CompleteAppManifest(ctx context.Context, code string) (*AppConfig, *Response, error) { - u := fmt.Sprintf("app-manifests/%s/conversions", code) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - cfg := new(AppConfig) - resp, err := s.client.Do(ctx, req, cfg) - if err != nil { - return nil, resp, err - } - - return cfg, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/apps_marketplace.go b/vendor/github.com/google/go-github/v71/github/apps_marketplace.go deleted file mode 100644 index 976775a7..00000000 --- a/vendor/github.com/google/go-github/v71/github/apps_marketplace.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// MarketplaceService handles communication with the marketplace related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/apps#marketplace -type MarketplaceService struct { - client *Client - // Stubbed controls whether endpoints that return stubbed data are used - // instead of production endpoints. Stubbed data is fake data that's useful - // for testing your GitHub Apps. Stubbed data is hard-coded and will not - // change based on actual subscriptions. - // - // GitHub API docs: https://docs.github.com/rest/apps#testing-with-stubbed-endpoints - Stubbed bool -} - -// MarketplacePlan represents a GitHub Apps Marketplace Listing Plan. -type MarketplacePlan struct { - URL *string `json:"url,omitempty"` - AccountsURL *string `json:"accounts_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - MonthlyPriceInCents *int `json:"monthly_price_in_cents,omitempty"` - YearlyPriceInCents *int `json:"yearly_price_in_cents,omitempty"` - // The pricing model for this listing. Can be one of "flat-rate", "per-unit", or "free". - PriceModel *string `json:"price_model,omitempty"` - UnitName *string `json:"unit_name,omitempty"` - Bullets *[]string `json:"bullets,omitempty"` - // State can be one of the values "draft" or "published". - State *string `json:"state,omitempty"` - HasFreeTrial *bool `json:"has_free_trial,omitempty"` -} - -// MarketplacePurchase represents a GitHub Apps Marketplace Purchase. -type MarketplacePurchase struct { - Account *MarketplacePurchaseAccount `json:"account,omitempty"` - // BillingCycle can be one of the values "yearly", "monthly" or nil. - BillingCycle *string `json:"billing_cycle,omitempty"` - NextBillingDate *Timestamp `json:"next_billing_date,omitempty"` - UnitCount *int `json:"unit_count,omitempty"` - Plan *MarketplacePlan `json:"plan,omitempty"` - OnFreeTrial *bool `json:"on_free_trial,omitempty"` - FreeTrialEndsOn *Timestamp `json:"free_trial_ends_on,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -// MarketplacePendingChange represents a pending change to a GitHub Apps Marketplace Plan. -type MarketplacePendingChange struct { - EffectiveDate *Timestamp `json:"effective_date,omitempty"` - UnitCount *int `json:"unit_count,omitempty"` - ID *int64 `json:"id,omitempty"` - Plan *MarketplacePlan `json:"plan,omitempty"` -} - -// MarketplacePlanAccount represents a GitHub Account (user or organization) on a specific plan. -type MarketplacePlanAccount struct { - URL *string `json:"url,omitempty"` - Type *string `json:"type,omitempty"` - ID *int64 `json:"id,omitempty"` - Login *string `json:"login,omitempty"` - OrganizationBillingEmail *string `json:"organization_billing_email,omitempty"` - MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` - MarketplacePendingChange *MarketplacePendingChange `json:"marketplace_pending_change,omitempty"` -} - -// MarketplacePurchaseAccount represents a GitHub Account (user or organization) for a Purchase. -type MarketplacePurchaseAccount struct { - URL *string `json:"url,omitempty"` - Type *string `json:"type,omitempty"` - ID *int64 `json:"id,omitempty"` - Login *string `json:"login,omitempty"` - OrganizationBillingEmail *string `json:"organization_billing_email,omitempty"` - Email *string `json:"email,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// ListPlans lists all plans for your Marketplace listing. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-plans -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-plans-stubbed -// -//meta:operation GET /marketplace_listing/plans -//meta:operation GET /marketplace_listing/stubbed/plans -func (s *MarketplaceService) ListPlans(ctx context.Context, opts *ListOptions) ([]*MarketplacePlan, *Response, error) { - uri := s.marketplaceURI("plans") - u, err := addOptions(uri, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var plans []*MarketplacePlan - resp, err := s.client.Do(ctx, req, &plans) - if err != nil { - return nil, resp, err - } - - return plans, resp, nil -} - -// ListPlanAccountsForPlan lists all GitHub accounts (user or organization) on a specific plan. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-accounts-for-a-plan -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-accounts-for-a-plan-stubbed -// -//meta:operation GET /marketplace_listing/plans/{plan_id}/accounts -//meta:operation GET /marketplace_listing/stubbed/plans/{plan_id}/accounts -func (s *MarketplaceService) ListPlanAccountsForPlan(ctx context.Context, planID int64, opts *ListOptions) ([]*MarketplacePlanAccount, *Response, error) { - uri := s.marketplaceURI(fmt.Sprintf("plans/%v/accounts", planID)) - u, err := addOptions(uri, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var accounts []*MarketplacePlanAccount - resp, err := s.client.Do(ctx, req, &accounts) - if err != nil { - return nil, resp, err - } - - return accounts, resp, nil -} - -// GetPlanAccountForAccount get GitHub account (user or organization) associated with an account. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#get-a-subscription-plan-for-an-account -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#get-a-subscription-plan-for-an-account-stubbed -// -//meta:operation GET /marketplace_listing/accounts/{account_id} -//meta:operation GET /marketplace_listing/stubbed/accounts/{account_id} -func (s *MarketplaceService) GetPlanAccountForAccount(ctx context.Context, accountID int64) (*MarketplacePlanAccount, *Response, error) { - uri := s.marketplaceURI(fmt.Sprintf("accounts/%v", accountID)) - - req, err := s.client.NewRequest("GET", uri, nil) - if err != nil { - return nil, nil, err - } - - var account *MarketplacePlanAccount - resp, err := s.client.Do(ctx, req, &account) - if err != nil { - return nil, resp, err - } - - return account, resp, nil -} - -// ListMarketplacePurchasesForUser lists all GitHub marketplace purchases made by a user. -// -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-subscriptions-for-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/apps/marketplace#list-subscriptions-for-the-authenticated-user-stubbed -// -//meta:operation GET /user/marketplace_purchases -//meta:operation GET /user/marketplace_purchases/stubbed -func (s *MarketplaceService) ListMarketplacePurchasesForUser(ctx context.Context, opts *ListOptions) ([]*MarketplacePurchase, *Response, error) { - uri := "user/marketplace_purchases" - if s.Stubbed { - uri = "user/marketplace_purchases/stubbed" - } - - u, err := addOptions(uri, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var purchases []*MarketplacePurchase - resp, err := s.client.Do(ctx, req, &purchases) - if err != nil { - return nil, resp, err - } - return purchases, resp, nil -} - -func (s *MarketplaceService) marketplaceURI(endpoint string) string { - url := "marketplace_listing" - if s.Stubbed { - url = "marketplace_listing/stubbed" - } - return url + "/" + endpoint -} diff --git a/vendor/github.com/google/go-github/v71/github/codespaces.go b/vendor/github.com/google/go-github/v71/github/codespaces.go deleted file mode 100644 index 60837050..00000000 --- a/vendor/github.com/google/go-github/v71/github/codespaces.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// CodespacesService handles communication with the Codespaces related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/ -type CodespacesService service - -// Codespace represents a codespace. -// -// GitHub API docs: https://docs.github.com/rest/codespaces -type Codespace struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - EnvironmentID *string `json:"environment_id,omitempty"` - Owner *User `json:"owner,omitempty"` - BillableOwner *User `json:"billable_owner,omitempty"` - Repository *Repository `json:"repository,omitempty"` - Machine *CodespacesMachine `json:"machine,omitempty"` - DevcontainerPath *string `json:"devcontainer_path,omitempty"` - Prebuild *bool `json:"prebuild,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - LastUsedAt *Timestamp `json:"last_used_at,omitempty"` - State *string `json:"state,omitempty"` - URL *string `json:"url,omitempty"` - GitStatus *CodespacesGitStatus `json:"git_status,omitempty"` - Location *string `json:"location,omitempty"` - IdleTimeoutMinutes *int `json:"idle_timeout_minutes,omitempty"` - WebURL *string `json:"web_url,omitempty"` - MachinesURL *string `json:"machines_url,omitempty"` - StartURL *string `json:"start_url,omitempty"` - StopURL *string `json:"stop_url,omitempty"` - PullsURL *string `json:"pulls_url,omitempty"` - RecentFolders []string `json:"recent_folders,omitempty"` - RuntimeConstraints *CodespacesRuntimeConstraints `json:"runtime_constraints,omitempty"` - PendingOperation *bool `json:"pending_operation,omitempty"` - PendingOperationDisabledReason *string `json:"pending_operation_disabled_reason,omitempty"` - IdleTimeoutNotice *string `json:"idle_timeout_notice,omitempty"` - RetentionPeriodMinutes *int `json:"retention_period_minutes,omitempty"` - RetentionExpiresAt *Timestamp `json:"retention_expires_at,omitempty"` - LastKnownStopNotice *string `json:"last_known_stop_notice,omitempty"` -} - -// CodespacesGitStatus represents the git status of a codespace. -type CodespacesGitStatus struct { - Ahead *int `json:"ahead,omitempty"` - Behind *int `json:"behind,omitempty"` - HasUnpushedChanges *bool `json:"has_unpushed_changes,omitempty"` - HasUncommittedChanges *bool `json:"has_uncommitted_changes,omitempty"` - Ref *string `json:"ref,omitempty"` -} - -// CodespacesMachine represents the machine type of a codespace. -type CodespacesMachine struct { - Name *string `json:"name,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - OperatingSystem *string `json:"operating_system,omitempty"` - StorageInBytes *int64 `json:"storage_in_bytes,omitempty"` - MemoryInBytes *int64 `json:"memory_in_bytes,omitempty"` - CPUs *int `json:"cpus,omitempty"` - PrebuildAvailability *string `json:"prebuild_availability,omitempty"` -} - -// CodespacesRuntimeConstraints represents the runtime constraints of a codespace. -type CodespacesRuntimeConstraints struct { - AllowedPortPrivacySettings []string `json:"allowed_port_privacy_settings,omitempty"` -} - -// ListCodespaces represents the response from the list codespaces endpoints. -type ListCodespaces struct { - TotalCount *int `json:"total_count,omitempty"` - Codespaces []*Codespace `json:"codespaces"` -} - -// ListInRepo lists codespaces for a user in a repository. -// -// Lists the codespaces associated with a specified repository and the authenticated user. -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have read access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user -// -//meta:operation GET /repos/{owner}/{repo}/codespaces -func (s *CodespacesService) ListInRepo(ctx context.Context, owner, repo string, opts *ListOptions) (*ListCodespaces, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var codespaces *ListCodespaces - resp, err := s.client.Do(ctx, req, &codespaces) - if err != nil { - return nil, resp, err - } - - return codespaces, resp, nil -} - -// ListCodespacesOptions represents the options for listing codespaces for a user. -type ListCodespacesOptions struct { - ListOptions - RepositoryID int64 `url:"repository_id,omitempty"` -} - -// List lists codespaces for an authenticated user. -// -// Lists the authenticated user's codespaces. -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have read access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#list-codespaces-for-the-authenticated-user -// -//meta:operation GET /user/codespaces -func (s *CodespacesService) List(ctx context.Context, opts *ListCodespacesOptions) (*ListCodespaces, *Response, error) { - u := "user/codespaces" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var codespaces *ListCodespaces - resp, err := s.client.Do(ctx, req, &codespaces) - if err != nil { - return nil, resp, err - } - - return codespaces, resp, nil -} - -// CreateCodespaceOptions represents options for the creation of a codespace in a repository. -type CreateCodespaceOptions struct { - Ref *string `json:"ref,omitempty"` - // Geo represents the geographic area for this codespace. - // If not specified, the value is assigned by IP. - // This property replaces location, which is being deprecated. - // Geo can be one of: `EuropeWest`, `SoutheastAsia`, `UsEast`, `UsWest`. - Geo *string `json:"geo,omitempty"` - ClientIP *string `json:"client_ip,omitempty"` - Machine *string `json:"machine,omitempty"` - DevcontainerPath *string `json:"devcontainer_path,omitempty"` - MultiRepoPermissionsOptOut *bool `json:"multi_repo_permissions_opt_out,omitempty"` - WorkingDirectory *string `json:"working_directory,omitempty"` - IdleTimeoutMinutes *int `json:"idle_timeout_minutes,omitempty"` - DisplayName *string `json:"display_name,omitempty"` - // RetentionPeriodMinutes represents the duration in minutes after codespace has gone idle in which it will be deleted. - // Must be integer minutes between 0 and 43200 (30 days). - RetentionPeriodMinutes *int `json:"retention_period_minutes,omitempty"` -} - -// CreateInRepo creates a codespace in a repository. -// -// Creates a codespace owned by the authenticated user in the specified repository. -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#create-a-codespace-in-a-repository -// -//meta:operation POST /repos/{owner}/{repo}/codespaces -func (s *CodespacesService) CreateInRepo(ctx context.Context, owner, repo string, request *CreateCodespaceOptions) (*Codespace, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces", owner, repo) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - var codespace *Codespace - resp, err := s.client.Do(ctx, req, &codespace) - if err != nil { - return nil, resp, err - } - - return codespace, resp, nil -} - -// Start starts a codespace. -// -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces_lifecycle_admin repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#start-a-codespace-for-the-authenticated-user -// -//meta:operation POST /user/codespaces/{codespace_name}/start -func (s *CodespacesService) Start(ctx context.Context, codespaceName string) (*Codespace, *Response, error) { - u := fmt.Sprintf("user/codespaces/%v/start", codespaceName) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - var codespace *Codespace - resp, err := s.client.Do(ctx, req, &codespace) - if err != nil { - return nil, resp, err - } - - return codespace, resp, nil -} - -// Stop stops a codespace. -// -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces_lifecycle_admin repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#stop-a-codespace-for-the-authenticated-user -// -//meta:operation POST /user/codespaces/{codespace_name}/stop -func (s *CodespacesService) Stop(ctx context.Context, codespaceName string) (*Codespace, *Response, error) { - u := fmt.Sprintf("user/codespaces/%v/stop", codespaceName) - - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - var codespace *Codespace - resp, err := s.client.Do(ctx, req, &codespace) - if err != nil { - return nil, resp, err - } - - return codespace, resp, nil -} - -// Delete deletes a codespace. -// -// You must authenticate using an access token with the codespace scope to use this endpoint. -// GitHub Apps must have write access to the codespaces repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/codespaces#delete-a-codespace-for-the-authenticated-user -// -//meta:operation DELETE /user/codespaces/{codespace_name} -func (s *CodespacesService) Delete(ctx context.Context, codespaceName string) (*Response, error) { - u := fmt.Sprintf("user/codespaces/%v", codespaceName) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/codespaces_secrets.go b/vendor/github.com/google/go-github/v71/github/codespaces_secrets.go deleted file mode 100644 index 438c27f8..00000000 --- a/vendor/github.com/google/go-github/v71/github/codespaces_secrets.go +++ /dev/null @@ -1,451 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListUserSecrets list all secrets available for a users codespace -// -// Lists all secrets available for a user's Codespaces without revealing their encrypted values -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint -// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#list-secrets-for-the-authenticated-user -// -//meta:operation GET /user/codespaces/secrets -func (s *CodespacesService) ListUserSecrets(ctx context.Context, opts *ListOptions) (*Secrets, *Response, error) { - u, err := addOptions("user/codespaces/secrets", opts) - if err != nil { - return nil, nil, err - } - return s.listSecrets(ctx, u) -} - -// ListOrgSecrets list all secrets available to an org -// -// Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#list-organization-secrets -// -//meta:operation GET /orgs/{org}/codespaces/secrets -func (s *CodespacesService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - return s.listSecrets(ctx, u) -} - -// ListRepoSecrets list all secrets available to a repo -// -// Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#list-repository-secrets -// -//meta:operation GET /repos/{owner}/{repo}/codespaces/secrets -func (s *CodespacesService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - return s.listSecrets(ctx, u) -} - -func (s *CodespacesService) listSecrets(ctx context.Context, url string) (*Secrets, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var secrets *Secrets - resp, err := s.client.Do(ctx, req, &secrets) - if err != nil { - return nil, resp, err - } - - return secrets, resp, nil -} - -// GetUserPublicKey gets the users public key for encrypting codespace secrets -// -// Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#get-public-key-for-the-authenticated-user -// -//meta:operation GET /user/codespaces/secrets/public-key -func (s *CodespacesService) GetUserPublicKey(ctx context.Context) (*PublicKey, *Response, error) { - return s.getPublicKey(ctx, "user/codespaces/secrets/public-key") -} - -// GetOrgPublicKey gets the org public key for encrypting codespace secrets -// -// Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#get-an-organization-public-key -// -//meta:operation GET /orgs/{org}/codespaces/secrets/public-key -func (s *CodespacesService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { - return s.getPublicKey(ctx, fmt.Sprintf("orgs/%v/codespaces/secrets/public-key", org)) -} - -// GetRepoPublicKey gets the repo public key for encrypting codespace secrets -// -// Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the repo scope. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#get-a-repository-public-key -// -//meta:operation GET /repos/{owner}/{repo}/codespaces/secrets/public-key -func (s *CodespacesService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { - return s.getPublicKey(ctx, fmt.Sprintf("repos/%v/%v/codespaces/secrets/public-key", owner, repo)) -} - -func (s *CodespacesService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var publicKey *PublicKey - resp, err := s.client.Do(ctx, req, &publicKey) - if err != nil { - return nil, resp, err - } - - return publicKey, resp, nil -} - -// GetUserSecret gets a users codespace secret -// -// Gets a secret available to a user's codespaces without revealing its encrypted value. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#get-a-secret-for-the-authenticated-user -// -//meta:operation GET /user/codespaces/secrets/{secret_name} -func (s *CodespacesService) GetUserSecret(ctx context.Context, name string) (*Secret, *Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v", name) - return s.getSecret(ctx, u) -} - -// GetOrgSecret gets an org codespace secret -// -// Gets an organization secret without revealing its encrypted value. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#get-an-organization-secret -// -//meta:operation GET /orgs/{org}/codespaces/secrets/{secret_name} -func (s *CodespacesService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, name) - return s.getSecret(ctx, u) -} - -// GetRepoSecret gets a repo codespace secret -// -// Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#get-a-repository-secret -// -//meta:operation GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name} -func (s *CodespacesService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, name) - return s.getSecret(ctx, u) -} - -func (s *CodespacesService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var secret *Secret - resp, err := s.client.Do(ctx, req, &secret) - if err != nil { - return nil, resp, err - } - - return secret, resp, nil -} - -// CreateOrUpdateUserSecret creates or updates a users codespace secret -// -// Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using LibSodium. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must also have Codespaces access to use this endpoint. -// GitHub Apps must have write access to the codespaces_user_secrets user permission and codespaces_secrets repository permission on all referenced repositories to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#create-or-update-a-secret-for-the-authenticated-user -// -//meta:operation PUT /user/codespaces/secrets/{secret_name} -func (s *CodespacesService) CreateOrUpdateUserSecret(ctx context.Context, eSecret *EncryptedSecret) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v", eSecret.Name) - return s.createOrUpdateSecret(ctx, u, eSecret) -} - -// CreateOrUpdateOrgSecret creates or updates an orgs codespace secret -// -// Creates or updates an organization secret with an encrypted value. Encrypt your secret using LibSodium. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#create-or-update-an-organization-secret -// -//meta:operation PUT /orgs/{org}/codespaces/secrets/{secret_name} -func (s *CodespacesService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *EncryptedSecret) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, eSecret.Name) - return s.createOrUpdateSecret(ctx, u, eSecret) -} - -// CreateOrUpdateRepoSecret creates or updates a repos codespace secret -// -// Creates or updates a repository secret with an encrypted value. Encrypt your secret using LibSodium. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#create-or-update-a-repository-secret -// -//meta:operation PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name} -func (s *CodespacesService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *EncryptedSecret) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, eSecret.Name) - return s.createOrUpdateSecret(ctx, u, eSecret) -} - -func (s *CodespacesService) createOrUpdateSecret(ctx context.Context, url string, eSecret *EncryptedSecret) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, eSecret) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DeleteUserSecret deletes a users codespace secret -// -// Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have write access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#delete-a-secret-for-the-authenticated-user -// -//meta:operation DELETE /user/codespaces/secrets/{secret_name} -func (s *CodespacesService) DeleteUserSecret(ctx context.Context, name string) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v", name) - return s.deleteSecret(ctx, u) -} - -// DeleteOrgSecret deletes an orgs codespace secret -// -// Deletes an organization secret using the secret name. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#delete-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/codespaces/secrets/{secret_name} -func (s *CodespacesService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, name) - return s.deleteSecret(ctx, u) -} - -// DeleteRepoSecret deletes a repos codespace secret -// -// Deletes a secret in a repository using the secret name. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/repository-secrets#delete-a-repository-secret -// -//meta:operation DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name} -func (s *CodespacesService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, name) - return s.deleteSecret(ctx, u) -} - -func (s *CodespacesService) deleteSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// ListSelectedReposForUserSecret lists the repositories that have been granted the ability to use a user's codespace secret. -// -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have read access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on all referenced repositories to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#list-selected-repositories-for-a-user-secret -// -//meta:operation GET /user/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) ListSelectedReposForUserSecret(ctx context.Context, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories", name) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - return s.listSelectedReposForSecret(ctx, u) -} - -// ListSelectedReposForOrgSecret lists the repositories that have been granted the ability to use an organization's codespace secret. -// -// Lists all repositories that have been selected when the visibility for repository access to a secret is set to selected. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#list-selected-repositories-for-an-organization-secret -// -//meta:operation GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories", org, name) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - return s.listSelectedReposForSecret(ctx, u) -} - -func (s *CodespacesService) listSelectedReposForSecret(ctx context.Context, url string) (*SelectedReposList, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - var repositories *SelectedReposList - resp, err := s.client.Do(ctx, req, &repositories) - if err != nil { - return nil, resp, err - } - - return repositories, resp, nil -} - -// SetSelectedReposForUserSecret sets the repositories that have been granted the ability to use a user's codespace secret. -// -// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. -// GitHub Apps must have write access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on all referenced repositories to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#set-selected-repositories-for-a-user-secret -// -//meta:operation PUT /user/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) SetSelectedReposForUserSecret(ctx context.Context, name string, ids SelectedRepoIDs) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories", name) - return s.setSelectedRepoForSecret(ctx, u, ids) -} - -// SetSelectedReposForOrgSecret sets the repositories that have been granted the ability to use a user's codespace secret. -// -// Replaces all repositories for an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#set-selected-repositories-for-an-organization-secret -// -//meta:operation PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories -func (s *CodespacesService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories", org, name) - return s.setSelectedRepoForSecret(ctx, u, ids) -} - -func (s *CodespacesService) setSelectedRepoForSecret(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { - type repoIDs struct { - SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// AddSelectedRepoToUserSecret adds a repository to the list of repositories that have been granted the ability to use a user's codespace secret. -// -// Adds a repository to the selected repositories for a user's codespace secret. You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. GitHub Apps must have write access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on the referenced repository to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#add-a-selected-repository-to-a-user-secret -// -//meta:operation PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) AddSelectedRepoToUserSecret(ctx context.Context, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories/%v", name, *repo.ID) - return s.addSelectedRepoToSecret(ctx, u) -} - -// AddSelectedRepoToOrgSecret adds a repository to the list of repositories that have been granted the ability to use an organization's codespace secret. -// -// Adds a repository to an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#add-selected-repository-to-an-organization-secret -// -//meta:operation PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.addSelectedRepoToSecret(ctx, u) -} - -func (s *CodespacesService) addSelectedRepoToSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// RemoveSelectedRepoFromUserSecret removes a repository from the list of repositories that have been granted the ability to use a user's codespace secret. -// -// Removes a repository from the selected repositories for a user's codespace secret. You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. GitHub Apps must have write access to the codespaces_user_secrets user permission to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/secrets#remove-a-selected-repository-from-a-user-secret -// -//meta:operation DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) RemoveSelectedRepoFromUserSecret(ctx context.Context, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("user/codespaces/secrets/%v/repositories/%v", name, *repo.ID) - return s.removeSelectedRepoFromSecret(ctx, u) -} - -// RemoveSelectedRepoFromOrgSecret removes a repository from the list of repositories that have been granted the ability to use an organization's codespace secret. -// -// Removes a repository from an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. -// -// GitHub API docs: https://docs.github.com/rest/codespaces/organization-secrets#remove-selected-repository-from-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id} -func (s *CodespacesService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories/%v", org, name, *repo.ID) - return s.removeSelectedRepoFromSecret(ctx, u) -} - -func (s *CodespacesService) removeSelectedRepoFromSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/dependabot.go b/vendor/github.com/google/go-github/v71/github/dependabot.go deleted file mode 100644 index 2a11a9c9..00000000 --- a/vendor/github.com/google/go-github/v71/github/dependabot.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// DependabotService handles communication with the Dependabot related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/ -type DependabotService service diff --git a/vendor/github.com/google/go-github/v71/github/dependabot_secrets.go b/vendor/github.com/google/go-github/v71/github/dependabot_secrets.go deleted file mode 100644 index e85c805a..00000000 --- a/vendor/github.com/google/go-github/v71/github/dependabot_secrets.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -func (s *DependabotService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - pubKey := new(PublicKey) - resp, err := s.client.Do(ctx, req, pubKey) - if err != nil { - return nil, resp, err - } - - return pubKey, resp, nil -} - -// GetRepoPublicKey gets a public key that should be used for Dependabot secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-a-repository-public-key -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/secrets/public-key -func (s *DependabotService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/public-key", owner, repo) - return s.getPublicKey(ctx, url) -} - -// GetOrgPublicKey gets a public key that should be used for Dependabot secret encryption. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-an-organization-public-key -// -//meta:operation GET /orgs/{org}/dependabot/secrets/public-key -func (s *DependabotService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/public-key", org) - return s.getPublicKey(ctx, url) -} - -func (s *DependabotService) listSecrets(ctx context.Context, url string, opts *ListOptions) (*Secrets, *Response, error) { - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - secrets := new(Secrets) - resp, err := s.client.Do(ctx, req, &secrets) - if err != nil { - return nil, resp, err - } - - return secrets, resp, nil -} - -// ListRepoSecrets lists all Dependabot secrets available in a repository -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#list-repository-secrets -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/secrets -func (s *DependabotService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets", owner, repo) - return s.listSecrets(ctx, url, opts) -} - -// ListOrgSecrets lists all Dependabot secrets available in an organization -// without revealing their encrypted values. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#list-organization-secrets -// -//meta:operation GET /orgs/{org}/dependabot/secrets -func (s *DependabotService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets", org) - return s.listSecrets(ctx, url, opts) -} - -func (s *DependabotService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { - req, err := s.client.NewRequest("GET", url, nil) - if err != nil { - return nil, nil, err - } - - secret := new(Secret) - resp, err := s.client.Do(ctx, req, secret) - if err != nil { - return nil, resp, err - } - - return secret, resp, nil -} - -// GetRepoSecret gets a single repository Dependabot secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-a-repository-secret -// -//meta:operation GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name} -func (s *DependabotService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, name) - return s.getSecret(ctx, url) -} - -// GetOrgSecret gets a single organization Dependabot secret without revealing its encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#get-an-organization-secret -// -//meta:operation GET /orgs/{org}/dependabot/secrets/{secret_name} -func (s *DependabotService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, name) - return s.getSecret(ctx, url) -} - -// DependabotEncryptedSecret represents a secret that is encrypted using a public key for Dependabot. -// -// The value of EncryptedValue must be your secret, encrypted with -// LibSodium (see documentation here: https://libsodium.gitbook.io/doc/bindings_for_other_languages) -// using the public key retrieved using the GetPublicKey method. -type DependabotEncryptedSecret struct { - Name string `json:"-"` - KeyID string `json:"key_id"` - EncryptedValue string `json:"encrypted_value"` - Visibility string `json:"visibility,omitempty"` - SelectedRepositoryIDs DependabotSecretsSelectedRepoIDs `json:"selected_repository_ids,omitempty"` -} - -func (s *DependabotService) putSecret(ctx context.Context, url string, eSecret *DependabotEncryptedSecret) (*Response, error) { - req, err := s.client.NewRequest("PUT", url, eSecret) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateOrUpdateRepoSecret creates or updates a repository Dependabot secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#create-or-update-a-repository-secret -// -//meta:operation PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name} -func (s *DependabotService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *DependabotEncryptedSecret) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, eSecret.Name) - return s.putSecret(ctx, url, eSecret) -} - -// CreateOrUpdateOrgSecret creates or updates an organization Dependabot secret with an encrypted value. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#create-or-update-an-organization-secret -// -//meta:operation PUT /orgs/{org}/dependabot/secrets/{secret_name} -func (s *DependabotService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *DependabotEncryptedSecret) (*Response, error) { - repoIDs := make([]string, len(eSecret.SelectedRepositoryIDs)) - for i, secret := range eSecret.SelectedRepositoryIDs { - repoIDs[i] = fmt.Sprintf("%v", secret) - } - params := struct { - *DependabotEncryptedSecret - SelectedRepositoryIDs []string `json:"selected_repository_ids,omitempty"` - }{ - DependabotEncryptedSecret: eSecret, - SelectedRepositoryIDs: repoIDs, - } - - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, eSecret.Name) - req, err := s.client.NewRequest("PUT", url, params) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -func (s *DependabotService) deleteSecret(ctx context.Context, url string) (*Response, error) { - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteRepoSecret deletes a Dependabot secret in a repository using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#delete-a-repository-secret -// -//meta:operation DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name} -func (s *DependabotService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { - url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, name) - return s.deleteSecret(ctx, url) -} - -// DeleteOrgSecret deletes a Dependabot secret in an organization using the secret name. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#delete-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/dependabot/secrets/{secret_name} -func (s *DependabotService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, name) - return s.deleteSecret(ctx, url) -} - -// ListSelectedReposForOrgSecret lists all repositories that have access to a Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#list-selected-repositories-for-an-organization-secret -// -//meta:operation GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories -func (s *DependabotService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories", org, name) - u, err := addOptions(url, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - result := new(SelectedReposList) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} - -// DependabotSecretsSelectedRepoIDs are the repository IDs that have access to the dependabot secrets. -type DependabotSecretsSelectedRepoIDs []int64 - -// SetSelectedReposForOrgSecret sets the repositories that have access to a Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#set-selected-repositories-for-an-organization-secret -// -//meta:operation PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories -func (s *DependabotService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids DependabotSecretsSelectedRepoIDs) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories", org, name) - type repoIDs struct { - SelectedIDs DependabotSecretsSelectedRepoIDs `json:"selected_repository_ids"` - } - - req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddSelectedRepoToOrgSecret adds a repository to an organization Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#add-selected-repository-to-an-organization-secret -// -//meta:operation PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id} -func (s *DependabotService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories/%v", org, name, *repo.ID) - req, err := s.client.NewRequest("PUT", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveSelectedRepoFromOrgSecret removes a repository from an organization Dependabot secret. -// -// GitHub API docs: https://docs.github.com/rest/dependabot/secrets#remove-selected-repository-from-an-organization-secret -// -//meta:operation DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id} -func (s *DependabotService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { - url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories/%v", org, name, *repo.ID) - req, err := s.client.NewRequest("DELETE", url, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/dependency_graph.go b/vendor/github.com/google/go-github/v71/github/dependency_graph.go deleted file mode 100644 index 86a1fe48..00000000 --- a/vendor/github.com/google/go-github/v71/github/dependency_graph.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -type DependencyGraphService service - -// SBOM represents a software bill of materials, which describes the -// packages/libraries that a repository depends on. -type SBOM struct { - SBOM *SBOMInfo `json:"sbom,omitempty"` -} - -// CreationInfo represents when the SBOM was created and who created it. -type CreationInfo struct { - Created *Timestamp `json:"created,omitempty"` - Creators []string `json:"creators,omitempty"` -} - -// RepoDependencies represents the dependencies of a repo. -type RepoDependencies struct { - SPDXID *string `json:"SPDXID,omitempty"` - // Package name - Name *string `json:"name,omitempty"` - VersionInfo *string `json:"versionInfo,omitempty"` - DownloadLocation *string `json:"downloadLocation,omitempty"` - FilesAnalyzed *bool `json:"filesAnalyzed,omitempty"` - LicenseConcluded *string `json:"licenseConcluded,omitempty"` - LicenseDeclared *string `json:"licenseDeclared,omitempty"` -} - -// SBOMInfo represents a software bill of materials (SBOM) using SPDX. -// SPDX is an open standard for SBOMs that -// identifies and catalogs components, licenses, copyrights, security -// references, and other metadata relating to software. -type SBOMInfo struct { - SPDXID *string `json:"SPDXID,omitempty"` - SPDXVersion *string `json:"spdxVersion,omitempty"` - CreationInfo *CreationInfo `json:"creationInfo,omitempty"` - - // Repo name - Name *string `json:"name,omitempty"` - DataLicense *string `json:"dataLicense,omitempty"` - DocumentDescribes []string `json:"documentDescribes,omitempty"` - DocumentNamespace *string `json:"documentNamespace,omitempty"` - - // List of packages dependencies - Packages []*RepoDependencies `json:"packages,omitempty"` -} - -func (s SBOM) String() string { - return Stringify(s) -} - -// GetSBOM fetches the software bill of materials for a repository. -// -// GitHub API docs: https://docs.github.com/rest/dependency-graph/sboms#export-a-software-bill-of-materials-sbom-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/dependency-graph/sbom -func (s *DependencyGraphService) GetSBOM(ctx context.Context, owner, repo string) (*SBOM, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/dependency-graph/sbom", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var sbom *SBOM - resp, err := s.client.Do(ctx, req, &sbom) - if err != nil { - return nil, resp, err - } - - return sbom, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/enterprise.go b/vendor/github.com/google/go-github/v71/github/enterprise.go deleted file mode 100644 index 2036f8bc..00000000 --- a/vendor/github.com/google/go-github/v71/github/enterprise.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// EnterpriseService provides access to the enterprise related functions -// in the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/enterprise-admin/ -type EnterpriseService service diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_actions_runner_groups.go b/vendor/github.com/google/go-github/v71/github/enterprise_actions_runner_groups.go deleted file mode 100644 index f171df75..00000000 --- a/vendor/github.com/google/go-github/v71/github/enterprise_actions_runner_groups.go +++ /dev/null @@ -1,336 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListOrganizations represents the response from the list orgs endpoints. -type ListOrganizations struct { - TotalCount *int `json:"total_count,omitempty"` - Organizations []*Organization `json:"organizations"` -} - -// EnterpriseRunnerGroup represents a self-hosted runner group configured in an enterprise. -type EnterpriseRunnerGroup struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - Default *bool `json:"default,omitempty"` - SelectedOrganizationsURL *string `json:"selected_organizations_url,omitempty"` - RunnersURL *string `json:"runners_url,omitempty"` - Inherited *bool `json:"inherited,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` - WorkflowRestrictionsReadOnly *bool `json:"workflow_restrictions_read_only,omitempty"` -} - -// EnterpriseRunnerGroups represents a collection of self-hosted runner groups configured for an enterprise. -type EnterpriseRunnerGroups struct { - TotalCount *int `json:"total_count,omitempty"` - RunnerGroups []*EnterpriseRunnerGroup `json:"runner_groups"` -} - -// CreateEnterpriseRunnerGroupRequest represents a request to create a Runner group for an enterprise. -type CreateEnterpriseRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - // List of organization IDs that can access the runner group. - SelectedOrganizationIDs []int64 `json:"selected_organization_ids,omitempty"` - // Runners represent a list of runner IDs to add to the runner group. - Runners []int64 `json:"runners,omitempty"` - // If set to True, public repos can use this runner group - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - // If true, the runner group will be restricted to running only the workflows specified in the SelectedWorkflows slice. - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - // List of workflows the runner group should be allowed to run. This setting will be ignored unless RestrictedToWorkflows is set to true. - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// UpdateEnterpriseRunnerGroupRequest represents a request to update a Runner group for an enterprise. -type UpdateEnterpriseRunnerGroupRequest struct { - Name *string `json:"name,omitempty"` - Visibility *string `json:"visibility,omitempty"` - AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` - RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` - SelectedWorkflows []string `json:"selected_workflows,omitempty"` -} - -// SetOrgAccessRunnerGroupRequest represents a request to replace the list of organizations -// that can access a self-hosted runner group configured in an enterprise. -type SetOrgAccessRunnerGroupRequest struct { - // Updated list of organization IDs that should be given access to the runner group. - SelectedOrganizationIDs []int64 `json:"selected_organization_ids"` -} - -// ListEnterpriseRunnerGroupOptions extend ListOptions to have the optional parameters VisibleToOrganization. -type ListEnterpriseRunnerGroupOptions struct { - ListOptions - - // Only return runner groups that are allowed to be used by this organization. - VisibleToOrganization string `url:"visible_to_organization,omitempty"` -} - -// ListRunnerGroups lists all self-hosted runner groups configured in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runner-groups-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups -func (s *EnterpriseService) ListRunnerGroups(ctx context.Context, enterprise string, opts *ListEnterpriseRunnerGroupOptions) (*EnterpriseRunnerGroups, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - groups := &EnterpriseRunnerGroups{} - resp, err := s.client.Do(ctx, req, &groups) - if err != nil { - return nil, resp, err - } - - return groups, resp, nil -} - -// GetEnterpriseRunnerGroup gets a specific self-hosted runner group for an enterprise using its RunnerGroup ID. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#get-a-self-hosted-runner-group-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id} -func (s *EnterpriseService) GetEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64) (*EnterpriseRunnerGroup, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(EnterpriseRunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// DeleteEnterpriseRunnerGroup deletes a self-hosted runner group from an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#delete-a-self-hosted-runner-group-from-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id} -func (s *EnterpriseService) DeleteEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// CreateEnterpriseRunnerGroup creates a new self-hosted runner group for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#create-a-self-hosted-runner-group-for-an-enterprise -// -//meta:operation POST /enterprises/{enterprise}/actions/runner-groups -func (s *EnterpriseService) CreateEnterpriseRunnerGroup(ctx context.Context, enterprise string, createReq CreateEnterpriseRunnerGroupRequest) (*EnterpriseRunnerGroup, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups", enterprise) - req, err := s.client.NewRequest("POST", u, createReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(EnterpriseRunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// UpdateEnterpriseRunnerGroup updates a self-hosted runner group for an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#update-a-self-hosted-runner-group-for-an-enterprise -// -//meta:operation PATCH /enterprises/{enterprise}/actions/runner-groups/{runner_group_id} -func (s *EnterpriseService) UpdateEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64, updateReq UpdateEnterpriseRunnerGroupRequest) (*EnterpriseRunnerGroup, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) - req, err := s.client.NewRequest("PATCH", u, updateReq) - if err != nil { - return nil, nil, err - } - - runnerGroup := new(EnterpriseRunnerGroup) - resp, err := s.client.Do(ctx, req, runnerGroup) - if err != nil { - return nil, resp, err - } - - return runnerGroup, resp, nil -} - -// ListOrganizationAccessRunnerGroup lists the organizations with access to a self-hosted runner group configured in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-organization-access-to-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations -func (s *EnterpriseService) ListOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID int64, opts *ListOptions) (*ListOrganizations, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations", enterprise, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - orgs := &ListOrganizations{} - resp, err := s.client.Do(ctx, req, &orgs) - if err != nil { - return nil, resp, err - } - - return orgs, resp, nil -} - -// SetOrganizationAccessRunnerGroup replaces the list of organizations that have access to a self-hosted runner group configured in an enterprise -// with a new List of organizations. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-organization-access-for-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations -func (s *EnterpriseService) SetOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID int64, ids SetOrgAccessRunnerGroupRequest) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations", enterprise, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddOrganizationAccessRunnerGroup adds an organization to the list of selected organizations that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-organization-access-to-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id} -func (s *EnterpriseService) AddOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID, orgID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations/%v", enterprise, groupID, orgID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveOrganizationAccessRunnerGroup removes an organization from the list of selected organizations that can access a self-hosted runner group. -// The runner group must have visibility set to 'selected'. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-organization-access-to-a-self-hosted-runner-group-in-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id} -func (s *EnterpriseService) RemoveOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID, orgID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations/%v", enterprise, groupID, orgID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListRunnerGroupRunners lists self-hosted runners that are in a specific enterprise group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#list-self-hosted-runners-in-a-group-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners -func (s *EnterpriseService) ListRunnerGroupRunners(ctx context.Context, enterprise string, groupID int64, opts *ListOptions) (*Runners, *Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners", enterprise, groupID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - runners := &Runners{} - resp, err := s.client.Do(ctx, req, &runners) - if err != nil { - return nil, resp, err - } - - return runners, resp, nil -} - -// SetRunnerGroupRunners replaces the list of self-hosted runners that are part of an enterprise runner group -// with a new list of runners. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#set-self-hosted-runners-in-a-group-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners -func (s *EnterpriseService) SetRunnerGroupRunners(ctx context.Context, enterprise string, groupID int64, ids SetRunnerGroupRunnersRequest) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners", enterprise, groupID) - - req, err := s.client.NewRequest("PUT", u, ids) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// AddRunnerGroupRunners adds a self-hosted runner to a runner group configured in an enterprise. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#add-a-self-hosted-runner-to-a-group-for-an-enterprise -// -//meta:operation PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *EnterpriseService) AddRunnerGroupRunners(ctx context.Context, enterprise string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners/%v", enterprise, groupID, runnerID) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveRunnerGroupRunners removes a self-hosted runner from a group configured in an enterprise. -// The runner is then returned to the default group. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups#remove-a-self-hosted-runner-from-a-group-for-an-enterprise -// -//meta:operation DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id} -func (s *EnterpriseService) RemoveRunnerGroupRunners(ctx context.Context, enterprise string, groupID, runnerID int64) (*Response, error) { - u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners/%v", enterprise, groupID, runnerID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_audit_log.go b/vendor/github.com/google/go-github/v71/github/enterprise_audit_log.go deleted file mode 100644 index 058a7d17..00000000 --- a/vendor/github.com/google/go-github/v71/github/enterprise_audit_log.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetAuditLog gets the audit-log entries for an organization. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/enterprise-admin/audit-log#get-the-audit-log-for-an-enterprise -// -//meta:operation GET /enterprises/{enterprise}/audit-log -func (s *EnterpriseService) GetAuditLog(ctx context.Context, enterprise string, opts *GetAuditLogOptions) ([]*AuditEntry, *Response, error) { - u := fmt.Sprintf("enterprises/%v/audit-log", enterprise) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var auditEntries []*AuditEntry - resp, err := s.client.Do(ctx, req, &auditEntries) - if err != nil { - return nil, resp, err - } - - return auditEntries, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/event.go b/vendor/github.com/google/go-github/v71/github/event.go deleted file mode 100644 index e98606bc..00000000 --- a/vendor/github.com/google/go-github/v71/github/event.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "encoding/json" -) - -// Event represents a GitHub event. -type Event struct { - Type *string `json:"type,omitempty"` - Public *bool `json:"public,omitempty"` - RawPayload *json.RawMessage `json:"payload,omitempty"` - Repo *Repository `json:"repo,omitempty"` - Actor *User `json:"actor,omitempty"` - Org *Organization `json:"org,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ID *string `json:"id,omitempty"` -} - -func (e Event) String() string { - return Stringify(e) -} - -// ParsePayload parses the event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -func (e *Event) ParsePayload() (interface{}, error) { - // It would be nice if e.Type were the snake_case name of the event, - // but the existing interface uses the struct name instead. - payload := EventForType(typeToMessageMapping[e.GetType()]) - - if err := json.Unmarshal(e.GetRawPayload(), &payload); err != nil { - return nil, err - } - - return payload, nil -} - -// Payload returns the parsed event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -// -// Deprecated: Use ParsePayload instead, which returns an error -// rather than panics if JSON unmarshaling raw payload fails. -func (e *Event) Payload() (payload interface{}) { - var err error - payload, err = e.ParsePayload() - if err != nil { - panic(err) - } - return payload -} diff --git a/vendor/github.com/google/go-github/v71/github/gists_comments.go b/vendor/github.com/google/go-github/v71/github/gists_comments.go deleted file mode 100644 index 5e061423..00000000 --- a/vendor/github.com/google/go-github/v71/github/gists_comments.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GistComment represents a Gist comment. -type GistComment struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - Body *string `json:"body,omitempty"` - User *User `json:"user,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -func (g GistComment) String() string { - return Stringify(g) -} - -// ListComments lists all comments for a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#list-gist-comments -// -//meta:operation GET /gists/{gist_id}/comments -func (s *GistsService) ListComments(ctx context.Context, gistID string, opts *ListOptions) ([]*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments", gistID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*GistComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetComment retrieves a single comment from a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#get-a-gist-comment -// -//meta:operation GET /gists/{gist_id}/comments/{comment_id} -func (s *GistsService) GetComment(ctx context.Context, gistID string, commentID int64) (*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - c := new(GistComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// CreateComment creates a comment for a gist. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#create-a-gist-comment -// -//meta:operation POST /gists/{gist_id}/comments -func (s *GistsService) CreateComment(ctx context.Context, gistID string, comment *GistComment) (*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments", gistID) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(GistComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// EditComment edits an existing gist comment. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#update-a-gist-comment -// -//meta:operation PATCH /gists/{gist_id}/comments/{comment_id} -func (s *GistsService) EditComment(ctx context.Context, gistID string, commentID int64, comment *GistComment) (*GistComment, *Response, error) { - u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(GistComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes a gist comment. -// -// GitHub API docs: https://docs.github.com/rest/gists/comments#delete-a-gist-comment -// -//meta:operation DELETE /gists/{gist_id}/comments/{comment_id} -func (s *GistsService) DeleteComment(ctx context.Context, gistID string, commentID int64) (*Response, error) { - u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/git.go b/vendor/github.com/google/go-github/v71/github/git.go deleted file mode 100644 index 2ca835e1..00000000 --- a/vendor/github.com/google/go-github/v71/github/git.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// GitService handles communication with the git data related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/git/ -type GitService service diff --git a/vendor/github.com/google/go-github/v71/github/git_blobs.go b/vendor/github.com/google/go-github/v71/github/git_blobs.go deleted file mode 100644 index d8904288..00000000 --- a/vendor/github.com/google/go-github/v71/github/git_blobs.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" -) - -// Blob represents a blob object. -type Blob struct { - Content *string `json:"content,omitempty"` - Encoding *string `json:"encoding,omitempty"` - SHA *string `json:"sha,omitempty"` - Size *int `json:"size,omitempty"` - URL *string `json:"url,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// GetBlob fetches a blob from a repo given a SHA. -// -// GitHub API docs: https://docs.github.com/rest/git/blobs#get-a-blob -// -//meta:operation GET /repos/{owner}/{repo}/git/blobs/{file_sha} -func (s *GitService) GetBlob(ctx context.Context, owner string, repo string, sha string) (*Blob, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/blobs/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - blob := new(Blob) - resp, err := s.client.Do(ctx, req, blob) - if err != nil { - return nil, resp, err - } - - return blob, resp, nil -} - -// GetBlobRaw fetches a blob's contents from a repo. -// Unlike GetBlob, it returns the raw bytes rather than the base64-encoded data. -// -// GitHub API docs: https://docs.github.com/rest/git/blobs#get-a-blob -// -//meta:operation GET /repos/{owner}/{repo}/git/blobs/{file_sha} -func (s *GitService) GetBlobRaw(ctx context.Context, owner, repo, sha string) ([]byte, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/blobs/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", "application/vnd.github.v3.raw") - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return nil, resp, err - } - - return buf.Bytes(), resp, nil -} - -// CreateBlob creates a blob object. -// -// GitHub API docs: https://docs.github.com/rest/git/blobs#create-a-blob -// -//meta:operation POST /repos/{owner}/{repo}/git/blobs -func (s *GitService) CreateBlob(ctx context.Context, owner string, repo string, blob *Blob) (*Blob, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/blobs", owner, repo) - req, err := s.client.NewRequest("POST", u, blob) - if err != nil { - return nil, nil, err - } - - t := new(Blob) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/git_tags.go b/vendor/github.com/google/go-github/v71/github/git_tags.go deleted file mode 100644 index 67321566..00000000 --- a/vendor/github.com/google/go-github/v71/github/git_tags.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Tag represents a tag object. -type Tag struct { - Tag *string `json:"tag,omitempty"` - SHA *string `json:"sha,omitempty"` - URL *string `json:"url,omitempty"` - Message *string `json:"message,omitempty"` - Tagger *CommitAuthor `json:"tagger,omitempty"` - Object *GitObject `json:"object,omitempty"` - Verification *SignatureVerification `json:"verification,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -// createTagRequest represents the body of a CreateTag request. This is mostly -// identical to Tag with the exception that the object SHA and Type are -// top-level fields, rather than being nested inside a JSON object. -type createTagRequest struct { - Tag *string `json:"tag,omitempty"` - Message *string `json:"message,omitempty"` - Object *string `json:"object,omitempty"` - Type *string `json:"type,omitempty"` - Tagger *CommitAuthor `json:"tagger,omitempty"` -} - -// GetTag fetches a tag from a repo given a SHA. -// -// GitHub API docs: https://docs.github.com/rest/git/tags#get-a-tag -// -//meta:operation GET /repos/{owner}/{repo}/git/tags/{tag_sha} -func (s *GitService) GetTag(ctx context.Context, owner string, repo string, sha string) (*Tag, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/tags/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - tag := new(Tag) - resp, err := s.client.Do(ctx, req, tag) - if err != nil { - return nil, resp, err - } - - return tag, resp, nil -} - -// CreateTag creates a tag object. -// -// GitHub API docs: https://docs.github.com/rest/git/tags#create-a-tag-object -// -//meta:operation POST /repos/{owner}/{repo}/git/tags -func (s *GitService) CreateTag(ctx context.Context, owner string, repo string, tag *Tag) (*Tag, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/tags", owner, repo) - - // convert Tag into a createTagRequest - tagRequest := &createTagRequest{ - Tag: tag.Tag, - Message: tag.Message, - Tagger: tag.Tagger, - } - if tag.Object != nil { - tagRequest.Object = tag.Object.SHA - tagRequest.Type = tag.Object.Type - } - - req, err := s.client.NewRequest("POST", u, tagRequest) - if err != nil { - return nil, nil, err - } - - t := new(Tag) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/git_trees.go b/vendor/github.com/google/go-github/v71/github/git_trees.go deleted file mode 100644 index b8eed58e..00000000 --- a/vendor/github.com/google/go-github/v71/github/git_trees.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// Tree represents a GitHub tree. -type Tree struct { - SHA *string `json:"sha,omitempty"` - Entries []*TreeEntry `json:"tree,omitempty"` - - // Truncated is true if the number of items in the tree - // exceeded GitHub's maximum limit and the Entries were truncated - // in the response. Only populated for requests that fetch - // trees like Git.GetTree. - Truncated *bool `json:"truncated,omitempty"` -} - -func (t Tree) String() string { - return Stringify(t) -} - -// TreeEntry represents the contents of a tree structure. TreeEntry can -// represent either a blob, a commit (in the case of a submodule), or another -// tree. -type TreeEntry struct { - SHA *string `json:"sha,omitempty"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - Size *int `json:"size,omitempty"` - Content *string `json:"content,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (t TreeEntry) String() string { - return Stringify(t) -} - -// treeEntryWithFileDelete is used internally to delete a file whose -// Content and SHA fields are empty. It does this by removing the "omitempty" -// tag modifier on the SHA field which causes the GitHub API to receive -// {"sha":null} and thereby delete the file. -type treeEntryWithFileDelete struct { - SHA *string `json:"sha"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - Size *int `json:"size,omitempty"` - Content *string `json:"content,omitempty"` - URL *string `json:"url,omitempty"` -} - -func (t *TreeEntry) MarshalJSON() ([]byte, error) { - if t.SHA == nil && t.Content == nil { - return json.Marshal(struct { - SHA *string `json:"sha"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - }{ - nil, - t.Path, - t.Mode, - t.Type, - }) - } - return json.Marshal(struct { - SHA *string `json:"sha,omitempty"` - Path *string `json:"path,omitempty"` - Mode *string `json:"mode,omitempty"` - Type *string `json:"type,omitempty"` - Size *int `json:"size,omitempty"` - Content *string `json:"content,omitempty"` - URL *string `json:"url,omitempty"` - }{ - SHA: t.SHA, - Path: t.Path, - Mode: t.Mode, - Type: t.Type, - Size: t.Size, - Content: t.Content, - URL: t.URL, - }) -} - -// GetTree fetches the Tree object for a given sha hash from a repository. -// -// GitHub API docs: https://docs.github.com/rest/git/trees#get-a-tree -// -//meta:operation GET /repos/{owner}/{repo}/git/trees/{tree_sha} -func (s *GitService) GetTree(ctx context.Context, owner string, repo string, sha string, recursive bool) (*Tree, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/trees/%v", owner, repo, sha) - if recursive { - u += "?recursive=1" - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Tree) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// createTree represents the body of a CreateTree request. -type createTree struct { - BaseTree string `json:"base_tree,omitempty"` - Entries []interface{} `json:"tree"` -} - -// CreateTree creates a new tree in a repository. If both a tree and a nested -// path modifying that tree are specified, it will overwrite the contents of -// that tree with the new path contents and write a new tree out. -// -// GitHub API docs: https://docs.github.com/rest/git/trees#create-a-tree -// -//meta:operation POST /repos/{owner}/{repo}/git/trees -func (s *GitService) CreateTree(ctx context.Context, owner string, repo string, baseTree string, entries []*TreeEntry) (*Tree, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/git/trees", owner, repo) - - newEntries := make([]interface{}, 0, len(entries)) - for _, entry := range entries { - if entry.Content == nil && entry.SHA == nil { - newEntries = append(newEntries, treeEntryWithFileDelete{ - Path: entry.Path, - Mode: entry.Mode, - Type: entry.Type, - Size: entry.Size, - URL: entry.URL, - }) - continue - } - newEntries = append(newEntries, entry) - } - - body := &createTree{ - BaseTree: baseTree, - Entries: newEntries, - } - req, err := s.client.NewRequest("POST", u, body) - if err != nil { - return nil, nil, err - } - - t := new(Tree) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/gitignore.go b/vendor/github.com/google/go-github/v71/github/gitignore.go deleted file mode 100644 index 34cf285e..00000000 --- a/vendor/github.com/google/go-github/v71/github/gitignore.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GitignoresService provides access to the gitignore related functions in the -// GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/gitignore/ -type GitignoresService service - -// Gitignore represents a .gitignore file as returned by the GitHub API. -type Gitignore struct { - Name *string `json:"name,omitempty"` - Source *string `json:"source,omitempty"` -} - -func (g Gitignore) String() string { - return Stringify(g) -} - -// List all available Gitignore templates. -// -// GitHub API docs: https://docs.github.com/rest/gitignore/gitignore#get-all-gitignore-templates -// -//meta:operation GET /gitignore/templates -func (s *GitignoresService) List(ctx context.Context) ([]string, *Response, error) { - req, err := s.client.NewRequest("GET", "gitignore/templates", nil) - if err != nil { - return nil, nil, err - } - - var availableTemplates []string - resp, err := s.client.Do(ctx, req, &availableTemplates) - if err != nil { - return nil, resp, err - } - - return availableTemplates, resp, nil -} - -// Get a Gitignore by name. -// -// GitHub API docs: https://docs.github.com/rest/gitignore/gitignore#get-a-gitignore-template -// -//meta:operation GET /gitignore/templates/{name} -func (s *GitignoresService) Get(ctx context.Context, name string) (*Gitignore, *Response, error) { - u := fmt.Sprintf("gitignore/templates/%v", name) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - gitignore := new(Gitignore) - resp, err := s.client.Do(ctx, req, gitignore) - if err != nil { - return nil, resp, err - } - - return gitignore, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/interactions.go b/vendor/github.com/google/go-github/v71/github/interactions.go deleted file mode 100644 index 2268273d..00000000 --- a/vendor/github.com/google/go-github/v71/github/interactions.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// InteractionsService handles communication with the repository and organization related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/interactions/ -type InteractionsService service - -// InteractionRestriction represents the interaction restrictions for repository and organization. -type InteractionRestriction struct { - // Specifies the group of GitHub users who can - // comment, open issues, or create pull requests for the given repository. - // Possible values are: "existing_users", "contributors_only" and "collaborators_only". - Limit *string `json:"limit,omitempty"` - - // Origin specifies the type of the resource to interact with. - // Possible values are: "repository" and "organization". - Origin *string `json:"origin,omitempty"` - - // ExpiresAt specifies the time after which the interaction restrictions expire. - // The default expiry time is 24 hours from the time restriction is created. - ExpiresAt *Timestamp `json:"expires_at,omitempty"` -} diff --git a/vendor/github.com/google/go-github/v71/github/issue_import.go b/vendor/github.com/google/go-github/v71/github/issue_import.go deleted file mode 100644 index 4f063710..00000000 --- a/vendor/github.com/google/go-github/v71/github/issue_import.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2020 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "encoding/json" - "fmt" -) - -// IssueImportService handles communication with the issue import related -// methods of the Issue Import GitHub API. -type IssueImportService service - -// IssueImportRequest represents a request to create an issue. -// -// https://gist.github.com/jonmagic/5282384165e0f86ef105#supported-issue-and-comment-fields -type IssueImportRequest struct { - IssueImport IssueImport `json:"issue"` - Comments []*Comment `json:"comments,omitempty"` -} - -// IssueImport represents body of issue to import. -type IssueImport struct { - Title string `json:"title"` - Body string `json:"body"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Assignee *string `json:"assignee,omitempty"` - Milestone *int `json:"milestone,omitempty"` - Closed *bool `json:"closed,omitempty"` - Labels []string `json:"labels,omitempty"` -} - -// Comment represents comments of issue to import. -type Comment struct { - CreatedAt *Timestamp `json:"created_at,omitempty"` - Body string `json:"body"` -} - -// IssueImportResponse represents the response of an issue import create request. -// -// https://gist.github.com/jonmagic/5282384165e0f86ef105#import-issue-response -type IssueImportResponse struct { - ID *int `json:"id,omitempty"` - Status *string `json:"status,omitempty"` - URL *string `json:"url,omitempty"` - ImportIssuesURL *string `json:"import_issues_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Message *string `json:"message,omitempty"` - DocumentationURL *string `json:"documentation_url,omitempty"` - Errors []*IssueImportError `json:"errors,omitempty"` -} - -// IssueImportError represents errors of an issue import create request. -type IssueImportError struct { - Location *string `json:"location,omitempty"` - Resource *string `json:"resource,omitempty"` - Field *string `json:"field,omitempty"` - Value *string `json:"value,omitempty"` - Code *string `json:"code,omitempty"` -} - -// Create a new imported issue on the specified repository. -// -// GitHub API docs: https://gist.github.com/jonmagic/5282384165e0f86ef105#start-an-issue-import -// -//meta:operation POST /repos/{owner}/{repo}/import/issues -func (s *IssueImportService) Create(ctx context.Context, owner, repo string, issue *IssueImportRequest) (*IssueImportResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/issues", owner, repo) - req, err := s.client.NewRequest("POST", u, issue) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeIssueImportAPI) - - i := new(IssueImportResponse) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - aerr, ok := err.(*AcceptedError) - if ok { - if err := json.Unmarshal(aerr.Raw, i); err != nil { - return i, resp, err - } - return i, resp, err - } - return nil, resp, err - } - - return i, resp, nil -} - -// CheckStatus checks the status of an imported issue. -// -// GitHub API docs: https://gist.github.com/jonmagic/5282384165e0f86ef105#import-status-request -// -//meta:operation GET /repos/{owner}/{repo}/import/issues/{issue_number} -func (s *IssueImportService) CheckStatus(ctx context.Context, owner, repo string, issueID int64) (*IssueImportResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/issues/%v", owner, repo, issueID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeIssueImportAPI) - - i := new(IssueImportResponse) - resp, err := s.client.Do(ctx, req, i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} - -// CheckStatusSince checks the status of multiple imported issues since a given date. -// -// GitHub API docs: https://gist.github.com/jonmagic/5282384165e0f86ef105#check-status-of-multiple-issues -// -//meta:operation GET /repos/{owner}/{repo}/import/issues -func (s *IssueImportService) CheckStatusSince(ctx context.Context, owner, repo string, since Timestamp) ([]*IssueImportResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/import/issues?since=%v", owner, repo, since.Format("2006-01-02")) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept headers when APIs fully launch. - req.Header.Set("Accept", mediaTypeIssueImportAPI) - - var b bytes.Buffer - resp, err := s.client.Do(ctx, req, &b) - if err != nil { - return nil, resp, err - } - - var i []*IssueImportResponse - err = json.Unmarshal(b.Bytes(), &i) - if err != nil { - return nil, resp, err - } - - return i, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/issues_assignees.go b/vendor/github.com/google/go-github/v71/github/issues_assignees.go deleted file mode 100644 index fd065771..00000000 --- a/vendor/github.com/google/go-github/v71/github/issues_assignees.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListAssignees fetches all available assignees (owners and collaborators) to -// which issues may be assigned. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#list-assignees -// -//meta:operation GET /repos/{owner}/{repo}/assignees -func (s *IssuesService) ListAssignees(ctx context.Context, owner, repo string, opts *ListOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/assignees", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var assignees []*User - resp, err := s.client.Do(ctx, req, &assignees) - if err != nil { - return nil, resp, err - } - - return assignees, resp, nil -} - -// IsAssignee checks if a user is an assignee for the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#check-if-a-user-can-be-assigned -// -//meta:operation GET /repos/{owner}/{repo}/assignees/{assignee} -func (s *IssuesService) IsAssignee(ctx context.Context, owner, repo, user string) (bool, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/assignees/%v", owner, repo, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - assignee, err := parseBoolResponse(err) - return assignee, resp, err -} - -// AddAssignees adds the provided GitHub users as assignees to the issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#add-assignees-to-an-issue -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/assignees -func (s *IssuesService) AddAssignees(ctx context.Context, owner, repo string, number int, assignees []string) (*Issue, *Response, error) { - users := &struct { - Assignees []string `json:"assignees,omitempty"` - }{Assignees: assignees} - u := fmt.Sprintf("repos/%v/%v/issues/%v/assignees", owner, repo, number) - req, err := s.client.NewRequest("POST", u, users) - if err != nil { - return nil, nil, err - } - - issue := &Issue{} - resp, err := s.client.Do(ctx, req, issue) - if err != nil { - return nil, resp, err - } - - return issue, resp, nil -} - -// RemoveAssignees removes the provided GitHub users as assignees from the issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/assignees#remove-assignees-from-an-issue -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees -func (s *IssuesService) RemoveAssignees(ctx context.Context, owner, repo string, number int, assignees []string) (*Issue, *Response, error) { - users := &struct { - Assignees []string `json:"assignees,omitempty"` - }{Assignees: assignees} - u := fmt.Sprintf("repos/%v/%v/issues/%v/assignees", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, users) - if err != nil { - return nil, nil, err - } - - issue := &Issue{} - resp, err := s.client.Do(ctx, req, issue) - if err != nil { - return nil, resp, err - } - - return issue, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/issues_comments.go b/vendor/github.com/google/go-github/v71/github/issues_comments.go deleted file mode 100644 index 74a4e60f..00000000 --- a/vendor/github.com/google/go-github/v71/github/issues_comments.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "time" -) - -// IssueComment represents a comment left on an issue. -type IssueComment struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Body *string `json:"body,omitempty"` - User *User `json:"user,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - // AuthorAssociation is the comment author's relationship to the issue's repository. - // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". - AuthorAssociation *string `json:"author_association,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - IssueURL *string `json:"issue_url,omitempty"` -} - -func (i IssueComment) String() string { - return Stringify(i) -} - -// IssueListCommentsOptions specifies the optional parameters to the -// IssuesService.ListComments method. -type IssueListCommentsOptions struct { - // Sort specifies how to sort comments. Possible values are: created, updated. - Sort *string `url:"sort,omitempty"` - - // Direction in which to sort comments. Possible values are: asc, desc. - Direction *string `url:"direction,omitempty"` - - // Since filters comments by time. - Since *time.Time `url:"since,omitempty"` - - ListOptions -} - -// ListComments lists all comments on the specified issue. Specifying an issue -// number of 0 will return all comments on all issues for the repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#list-issue-comments -// GitHub API docs: https://docs.github.com/rest/issues/comments#list-issue-comments-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/issues/comments -//meta:operation GET /repos/{owner}/{repo}/issues/{issue_number}/comments -func (s *IssuesService) ListComments(ctx context.Context, owner string, repo string, number int, opts *IssueListCommentsOptions) ([]*IssueComment, *Response, error) { - var u string - if number == 0 { - u = fmt.Sprintf("repos/%v/%v/issues/comments", owner, repo) - } else { - u = fmt.Sprintf("repos/%v/%v/issues/%d/comments", owner, repo, number) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var comments []*IssueComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetComment fetches the specified issue comment. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#get-an-issue-comment -// -//meta:operation GET /repos/{owner}/{repo}/issues/comments/{comment_id} -func (s *IssuesService) GetComment(ctx context.Context, owner string, repo string, commentID int64) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - comment := new(IssueComment) - resp, err := s.client.Do(ctx, req, comment) - if err != nil { - return nil, resp, err - } - - return comment, resp, nil -} - -// CreateComment creates a new comment on the specified issue. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#create-an-issue-comment -// -//meta:operation POST /repos/{owner}/{repo}/issues/{issue_number}/comments -func (s *IssuesService) CreateComment(ctx context.Context, owner string, repo string, number int, comment *IssueComment) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/%d/comments", owner, repo, number) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - c := new(IssueComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// EditComment updates an issue comment. -// A non-nil comment.Body must be provided. Other comment fields should be left nil. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#update-an-issue-comment -// -//meta:operation PATCH /repos/{owner}/{repo}/issues/comments/{comment_id} -func (s *IssuesService) EditComment(ctx context.Context, owner string, repo string, commentID int64, comment *IssueComment) (*IssueComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - c := new(IssueComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes an issue comment. -// -// GitHub API docs: https://docs.github.com/rest/issues/comments#delete-an-issue-comment -// -//meta:operation DELETE /repos/{owner}/{repo}/issues/comments/{comment_id} -func (s *IssuesService) DeleteComment(ctx context.Context, owner string, repo string, commentID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/issues_milestones.go b/vendor/github.com/google/go-github/v71/github/issues_milestones.go deleted file mode 100644 index 6c31bcd0..00000000 --- a/vendor/github.com/google/go-github/v71/github/issues_milestones.go +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Milestone represents a GitHub repository milestone. -type Milestone struct { - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - LabelsURL *string `json:"labels_url,omitempty"` - ID *int64 `json:"id,omitempty"` - Number *int `json:"number,omitempty"` - State *string `json:"state,omitempty"` - Title *string `json:"title,omitempty"` - Description *string `json:"description,omitempty"` - Creator *User `json:"creator,omitempty"` - OpenIssues *int `json:"open_issues,omitempty"` - ClosedIssues *int `json:"closed_issues,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - ClosedAt *Timestamp `json:"closed_at,omitempty"` - DueOn *Timestamp `json:"due_on,omitempty"` - NodeID *string `json:"node_id,omitempty"` -} - -func (m Milestone) String() string { - return Stringify(m) -} - -// MilestoneListOptions specifies the optional parameters to the -// IssuesService.ListMilestones method. -type MilestoneListOptions struct { - // State filters milestones based on their state. Possible values are: - // open, closed, all. Default is "open". - State string `url:"state,omitempty"` - - // Sort specifies how to sort milestones. Possible values are: due_on, completeness. - // Default value is "due_on". - Sort string `url:"sort,omitempty"` - - // Direction in which to sort milestones. Possible values are: asc, desc. - // Default is "asc". - Direction string `url:"direction,omitempty"` - - ListOptions -} - -// ListMilestones lists all milestones for a repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#list-milestones -// -//meta:operation GET /repos/{owner}/{repo}/milestones -func (s *IssuesService) ListMilestones(ctx context.Context, owner string, repo string, opts *MilestoneListOptions) ([]*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var milestones []*Milestone - resp, err := s.client.Do(ctx, req, &milestones) - if err != nil { - return nil, resp, err - } - - return milestones, resp, nil -} - -// GetMilestone gets a single milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#get-a-milestone -// -//meta:operation GET /repos/{owner}/{repo}/milestones/{milestone_number} -func (s *IssuesService) GetMilestone(ctx context.Context, owner string, repo string, number int) (*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - milestone := new(Milestone) - resp, err := s.client.Do(ctx, req, milestone) - if err != nil { - return nil, resp, err - } - - return milestone, resp, nil -} - -// CreateMilestone creates a new milestone on the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#create-a-milestone -// -//meta:operation POST /repos/{owner}/{repo}/milestones -func (s *IssuesService) CreateMilestone(ctx context.Context, owner string, repo string, milestone *Milestone) (*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones", owner, repo) - req, err := s.client.NewRequest("POST", u, milestone) - if err != nil { - return nil, nil, err - } - - m := new(Milestone) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// EditMilestone edits a milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#update-a-milestone -// -//meta:operation PATCH /repos/{owner}/{repo}/milestones/{milestone_number} -func (s *IssuesService) EditMilestone(ctx context.Context, owner string, repo string, number int, milestone *Milestone) (*Milestone, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) - req, err := s.client.NewRequest("PATCH", u, milestone) - if err != nil { - return nil, nil, err - } - - m := new(Milestone) - resp, err := s.client.Do(ctx, req, m) - if err != nil { - return nil, resp, err - } - - return m, resp, nil -} - -// DeleteMilestone deletes a milestone. -// -// GitHub API docs: https://docs.github.com/rest/issues/milestones#delete-a-milestone -// -//meta:operation DELETE /repos/{owner}/{repo}/milestones/{milestone_number} -func (s *IssuesService) DeleteMilestone(ctx context.Context, owner string, repo string, number int) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/licenses.go b/vendor/github.com/google/go-github/v71/github/licenses.go deleted file mode 100644 index 34b8a3d8..00000000 --- a/vendor/github.com/google/go-github/v71/github/licenses.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// LicensesService handles communication with the license related -// methods of the GitHub API. -// -// GitHub API docs: https://docs.github.com/rest/licenses/ -type LicensesService service - -// RepositoryLicense represents the license for a repository. -type RepositoryLicense struct { - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` - - SHA *string `json:"sha,omitempty"` - Size *int `json:"size,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - DownloadURL *string `json:"download_url,omitempty"` - Type *string `json:"type,omitempty"` - Content *string `json:"content,omitempty"` - Encoding *string `json:"encoding,omitempty"` - License *License `json:"license,omitempty"` -} - -func (l RepositoryLicense) String() string { - return Stringify(l) -} - -// License represents an open source license. -type License struct { - Key *string `json:"key,omitempty"` - Name *string `json:"name,omitempty"` - URL *string `json:"url,omitempty"` - - SPDXID *string `json:"spdx_id,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - Featured *bool `json:"featured,omitempty"` - Description *string `json:"description,omitempty"` - Implementation *string `json:"implementation,omitempty"` - Permissions *[]string `json:"permissions,omitempty"` - Conditions *[]string `json:"conditions,omitempty"` - Limitations *[]string `json:"limitations,omitempty"` - Body *string `json:"body,omitempty"` -} - -func (l License) String() string { - return Stringify(l) -} - -// List popular open source licenses. -// -// GitHub API docs: https://docs.github.com/rest/licenses/licenses#get-all-commonly-used-licenses -// -//meta:operation GET /licenses -func (s *LicensesService) List(ctx context.Context) ([]*License, *Response, error) { - req, err := s.client.NewRequest("GET", "licenses", nil) - if err != nil { - return nil, nil, err - } - - var licenses []*License - resp, err := s.client.Do(ctx, req, &licenses) - if err != nil { - return nil, resp, err - } - - return licenses, resp, nil -} - -// Get extended metadata for one license. -// -// GitHub API docs: https://docs.github.com/rest/licenses/licenses#get-a-license -// -//meta:operation GET /licenses/{license} -func (s *LicensesService) Get(ctx context.Context, licenseName string) (*License, *Response, error) { - u := fmt.Sprintf("licenses/%s", licenseName) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - license := new(License) - resp, err := s.client.Do(ctx, req, license) - if err != nil { - return nil, resp, err - } - - return license, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/orgs_actions_allowed.go b/vendor/github.com/google/go-github/v71/github/orgs_actions_allowed.go deleted file mode 100644 index b115e094..00000000 --- a/vendor/github.com/google/go-github/v71/github/orgs_actions_allowed.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// GetActionsAllowed gets the actions that are allowed in an organization. -// -// Deprecated: please use `client.Actions.GetActionsAllowed` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions/selected-actions -func (s *OrganizationsService) GetActionsAllowed(ctx context.Context, org string) (*ActionsAllowed, *Response, error) { - s2 := (*ActionsService)(s) - return s2.GetActionsAllowed(ctx, org) -} - -// EditActionsAllowed sets the actions that are allowed in an organization. -// -// Deprecated: please use `client.Actions.EditActionsAllowed` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions/selected-actions -func (s *OrganizationsService) EditActionsAllowed(ctx context.Context, org string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - s2 := (*ActionsService)(s) - return s2.EditActionsAllowed(ctx, org, actionsAllowed) -} diff --git a/vendor/github.com/google/go-github/v71/github/orgs_actions_permissions.go b/vendor/github.com/google/go-github/v71/github/orgs_actions_permissions.go deleted file mode 100644 index 97df1c96..00000000 --- a/vendor/github.com/google/go-github/v71/github/orgs_actions_permissions.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" -) - -// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. -// -// Deprecated: please use `client.Actions.GetActionsPermissions` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-github-actions-permissions-for-an-organization -// -//meta:operation GET /orgs/{org}/actions/permissions -func (s *OrganizationsService) GetActionsPermissions(ctx context.Context, org string) (*ActionsPermissions, *Response, error) { - s2 := (*ActionsService)(s) - return s2.GetActionsPermissions(ctx, org) -} - -// EditActionsPermissions sets the permissions policy for repositories and allowed actions in an organization. -// -// Deprecated: please use `client.Actions.EditActionsPermissions` instead. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-github-actions-permissions-for-an-organization -// -//meta:operation PUT /orgs/{org}/actions/permissions -func (s *OrganizationsService) EditActionsPermissions(ctx context.Context, org string, actionsPermissions ActionsPermissions) (*ActionsPermissions, *Response, error) { - s2 := (*ActionsService)(s) - return s2.EditActionsPermissions(ctx, org, actionsPermissions) -} diff --git a/vendor/github.com/google/go-github/v71/github/orgs_hooks.go b/vendor/github.com/google/go-github/v71/github/orgs_hooks.go deleted file mode 100644 index c2eef77c..00000000 --- a/vendor/github.com/google/go-github/v71/github/orgs_hooks.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2015 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListHooks lists all Hooks for the specified organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#list-organization-webhooks -// -//meta:operation GET /orgs/{org}/hooks -func (s *OrganizationsService) ListHooks(ctx context.Context, org string, opts *ListOptions) ([]*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var hooks []*Hook - resp, err := s.client.Do(ctx, req, &hooks) - if err != nil { - return nil, resp, err - } - - return hooks, resp, nil -} - -// GetHook returns a single specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#get-an-organization-webhook -// -//meta:operation GET /orgs/{org}/hooks/{hook_id} -func (s *OrganizationsService) GetHook(ctx context.Context, org string, id int64) (*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - hook := new(Hook) - resp, err := s.client.Do(ctx, req, hook) - if err != nil { - return nil, resp, err - } - - return hook, resp, nil -} - -// CreateHook creates a Hook for the specified org. -// Config is a required field. -// -// Note that only a subset of the hook fields are used and hook must -// not be nil. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#create-an-organization-webhook -// -//meta:operation POST /orgs/{org}/hooks -func (s *OrganizationsService) CreateHook(ctx context.Context, org string, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks", org) - - hookReq := &createHookRequest{ - Name: "web", - Events: hook.Events, - Active: hook.Active, - Config: hook.Config, - } - - req, err := s.client.NewRequest("POST", u, hookReq) - if err != nil { - return nil, nil, err - } - - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// EditHook updates a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#update-an-organization-webhook -// -//meta:operation PATCH /orgs/{org}/hooks/{hook_id} -func (s *OrganizationsService) EditHook(ctx context.Context, org string, id int64, hook *Hook) (*Hook, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) - req, err := s.client.NewRequest("PATCH", u, hook) - if err != nil { - return nil, nil, err - } - - h := new(Hook) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// PingHook triggers a 'ping' event to be sent to the Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#ping-an-organization-webhook -// -//meta:operation POST /orgs/{org}/hooks/{hook_id}/pings -func (s *OrganizationsService) PingHook(ctx context.Context, org string, id int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d/pings", org, id) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// DeleteHook deletes a specified Hook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#delete-an-organization-webhook -// -//meta:operation DELETE /orgs/{org}/hooks/{hook_id} -func (s *OrganizationsService) DeleteHook(ctx context.Context, org string, id int64) (*Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/orgs_hooks_configuration.go b/vendor/github.com/google/go-github/v71/github/orgs_hooks_configuration.go deleted file mode 100644 index aeb616fc..00000000 --- a/vendor/github.com/google/go-github/v71/github/orgs_hooks_configuration.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetHookConfiguration returns the configuration for the specified organization webhook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#get-a-webhook-configuration-for-an-organization -// -//meta:operation GET /orgs/{org}/hooks/{hook_id}/config -func (s *OrganizationsService) GetHookConfiguration(ctx context.Context, org string, id int64) (*HookConfig, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - config := new(HookConfig) - resp, err := s.client.Do(ctx, req, config) - if err != nil { - return nil, resp, err - } - - return config, resp, nil -} - -// EditHookConfiguration updates the configuration for the specified organization webhook. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#update-a-webhook-configuration-for-an-organization -// -//meta:operation PATCH /orgs/{org}/hooks/{hook_id}/config -func (s *OrganizationsService) EditHookConfiguration(ctx context.Context, org string, id int64, config *HookConfig) (*HookConfig, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) - req, err := s.client.NewRequest("PATCH", u, config) - if err != nil { - return nil, nil, err - } - - c := new(HookConfig) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/orgs_hooks_deliveries.go b/vendor/github.com/google/go-github/v71/github/orgs_hooks_deliveries.go deleted file mode 100644 index c1c30124..00000000 --- a/vendor/github.com/google/go-github/v71/github/orgs_hooks_deliveries.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2021 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListHookDeliveries lists webhook deliveries for a webhook configured in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#list-deliveries-for-an-organization-webhook -// -//meta:operation GET /orgs/{org}/hooks/{hook_id}/deliveries -func (s *OrganizationsService) ListHookDeliveries(ctx context.Context, org string, id int64, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries", org, id) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - deliveries := []*HookDelivery{} - resp, err := s.client.Do(ctx, req, &deliveries) - if err != nil { - return nil, resp, err - } - - return deliveries, resp, nil -} - -// GetHookDelivery returns a delivery for a webhook configured in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#get-a-webhook-delivery-for-an-organization-webhook -// -//meta:operation GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id} -func (s *OrganizationsService) GetHookDelivery(ctx context.Context, owner string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries/%v", owner, hookID, deliveryID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} - -// RedeliverHookDelivery redelivers a delivery for a webhook configured in an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/webhooks#redeliver-a-delivery-for-an-organization-webhook -// -//meta:operation POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts -func (s *OrganizationsService) RedeliverHookDelivery(ctx context.Context, owner string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { - u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries/%v/attempts", owner, hookID, deliveryID) - req, err := s.client.NewRequest("POST", u, nil) - if err != nil { - return nil, nil, err - } - - h := new(HookDelivery) - resp, err := s.client.Do(ctx, req, h) - if err != nil { - return nil, resp, err - } - - return h, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/orgs_outside_collaborators.go b/vendor/github.com/google/go-github/v71/github/orgs_outside_collaborators.go deleted file mode 100644 index 56034d72..00000000 --- a/vendor/github.com/google/go-github/v71/github/orgs_outside_collaborators.go +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListOutsideCollaboratorsOptions specifies optional parameters to the -// OrganizationsService.ListOutsideCollaborators method. -type ListOutsideCollaboratorsOptions struct { - // Filter outside collaborators returned in the list. Possible values are: - // 2fa_disabled, all. Default is "all". - Filter string `url:"filter,omitempty"` - - ListOptions -} - -// ListOutsideCollaborators lists outside collaborators of organization's repositories. -// This will only work if the authenticated -// user is an owner of the organization. -// -// Warning: The API may change without advance notice during the preview period. -// Preview features are not supported for production use. -// -// GitHub API docs: https://docs.github.com/rest/orgs/outside-collaborators#list-outside-collaborators-for-an-organization -// -//meta:operation GET /orgs/{org}/outside_collaborators -func (s *OrganizationsService) ListOutsideCollaborators(ctx context.Context, org string, opts *ListOutsideCollaboratorsOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("orgs/%v/outside_collaborators", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// RemoveOutsideCollaborator removes a user from the list of outside collaborators; -// consequently, removing them from all the organization's repositories. -// -// GitHub API docs: https://docs.github.com/rest/orgs/outside-collaborators#remove-outside-collaborator-from-an-organization -// -//meta:operation DELETE /orgs/{org}/outside_collaborators/{username} -func (s *OrganizationsService) RemoveOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ConvertMemberToOutsideCollaborator reduces the permission level of a member of the -// organization to that of an outside collaborator. Therefore, they will only -// have access to the repositories that their current team membership allows. -// Responses for converting a non-member or the last owner to an outside collaborator -// are listed in GitHub API docs. -// -// GitHub API docs: https://docs.github.com/rest/orgs/outside-collaborators#convert-an-organization-member-to-outside-collaborator -// -//meta:operation PUT /orgs/{org}/outside_collaborators/{username} -func (s *OrganizationsService) ConvertMemberToOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/orgs_users_blocking.go b/vendor/github.com/google/go-github/v71/github/orgs_users_blocking.go deleted file mode 100644 index 62bd9116..00000000 --- a/vendor/github.com/google/go-github/v71/github/orgs_users_blocking.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListBlockedUsers lists all the users blocked by an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#list-users-blocked-by-an-organization -// -//meta:operation GET /orgs/{org}/blocks -func (s *OrganizationsService) ListBlockedUsers(ctx context.Context, org string, opts *ListOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("orgs/%v/blocks", org) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - var blockedUsers []*User - resp, err := s.client.Do(ctx, req, &blockedUsers) - if err != nil { - return nil, resp, err - } - - return blockedUsers, resp, nil -} - -// IsBlocked reports whether specified user is blocked from an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#check-if-a-user-is-blocked-by-an-organization -// -//meta:operation GET /orgs/{org}/blocks/{username} -func (s *OrganizationsService) IsBlocked(ctx context.Context, org string, user string) (bool, *Response, error) { - u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - resp, err := s.client.Do(ctx, req, nil) - isBlocked, err := parseBoolResponse(err) - return isBlocked, resp, err -} - -// BlockUser blocks specified user from an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#block-a-user-from-an-organization -// -//meta:operation PUT /orgs/{org}/blocks/{username} -func (s *OrganizationsService) BlockUser(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnblockUser unblocks specified user from an organization. -// -// GitHub API docs: https://docs.github.com/rest/orgs/blocking#unblock-a-user-from-an-organization -// -//meta:operation DELETE /orgs/{org}/blocks/{username} -func (s *OrganizationsService) UnblockUser(ctx context.Context, org string, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/pulls_comments.go b/vendor/github.com/google/go-github/v71/github/pulls_comments.go deleted file mode 100644 index a9ffe8d7..00000000 --- a/vendor/github.com/google/go-github/v71/github/pulls_comments.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" - "time" -) - -// PullRequestComment represents a comment left on a pull request. -type PullRequestComment struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - InReplyTo *int64 `json:"in_reply_to_id,omitempty"` - Body *string `json:"body,omitempty"` - Path *string `json:"path,omitempty"` - DiffHunk *string `json:"diff_hunk,omitempty"` - PullRequestReviewID *int64 `json:"pull_request_review_id,omitempty"` - Position *int `json:"position,omitempty"` - OriginalPosition *int `json:"original_position,omitempty"` - StartLine *int `json:"start_line,omitempty"` - Line *int `json:"line,omitempty"` - OriginalLine *int `json:"original_line,omitempty"` - OriginalStartLine *int `json:"original_start_line,omitempty"` - Side *string `json:"side,omitempty"` - StartSide *string `json:"start_side,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - OriginalCommitID *string `json:"original_commit_id,omitempty"` - User *User `json:"user,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - // AuthorAssociation is the comment author's relationship to the pull request's repository. - // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". - AuthorAssociation *string `json:"author_association,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - PullRequestURL *string `json:"pull_request_url,omitempty"` - // Can be one of: LINE, FILE from https://docs.github.com/rest/pulls/comments#create-a-review-comment-for-a-pull-request - SubjectType *string `json:"subject_type,omitempty"` -} - -func (p PullRequestComment) String() string { - return Stringify(p) -} - -// PullRequestListCommentsOptions specifies the optional parameters to the -// PullRequestsService.ListComments method. -type PullRequestListCommentsOptions struct { - // Sort specifies how to sort comments. Possible values are: created, updated. - Sort string `url:"sort,omitempty"` - - // Direction in which to sort comments. Possible values are: asc, desc. - Direction string `url:"direction,omitempty"` - - // Since filters comments by time. - Since time.Time `url:"since,omitempty"` - - ListOptions -} - -// ListComments lists all comments on the specified pull request. Specifying a -// pull request number of 0 will return all comments on all pull requests for -// the repository. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#list-review-comments-in-a-repository -// GitHub API docs: https://docs.github.com/rest/pulls/comments#list-review-comments-on-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/comments -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/comments -func (s *PullRequestsService) ListComments(ctx context.Context, owner, repo string, number int, opts *PullRequestListCommentsOptions) ([]*PullRequestComment, *Response, error) { - var u string - if number == 0 { - u = fmt.Sprintf("repos/%v/%v/pulls/comments", owner, repo) - } else { - u = fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - var comments []*PullRequestComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// GetComment fetches the specified pull request comment. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#get-a-review-comment-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/comments/{comment_id} -func (s *PullRequestsService) GetComment(ctx context.Context, owner, repo string, commentID int64) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - comment := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, comment) - if err != nil { - return nil, resp, err - } - - return comment, resp, nil -} - -// CreateComment creates a new comment on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#create-a-review-comment-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/comments -func (s *PullRequestsService) CreateComment(ctx context.Context, owner, repo string, number int, comment *PullRequestComment) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - // TODO: remove custom Accept headers when their respective API fully launches. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) - - c := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// CreateCommentInReplyTo creates a new comment as a reply to an existing pull request comment. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#create-a-review-comment-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/comments -func (s *PullRequestsService) CreateCommentInReplyTo(ctx context.Context, owner, repo string, number int, body string, commentID int64) (*PullRequestComment, *Response, error) { - comment := &struct { - Body string `json:"body,omitempty"` - InReplyTo int64 `json:"in_reply_to,omitempty"` - }{ - Body: body, - InReplyTo: commentID, - } - u := fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// EditComment updates a pull request comment. -// A non-nil comment.Body must be provided. Other comment fields should be left nil. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#update-a-review-comment-for-a-pull-request -// -//meta:operation PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id} -func (s *PullRequestsService) EditComment(ctx context.Context, owner, repo string, commentID int64, comment *PullRequestComment) (*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(PullRequestComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes a pull request comment. -// -// GitHub API docs: https://docs.github.com/rest/pulls/comments#delete-a-review-comment-for-a-pull-request -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id} -func (s *PullRequestsService) DeleteComment(ctx context.Context, owner, repo string, commentID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/pulls_reviews.go b/vendor/github.com/google/go-github/v71/github/pulls_reviews.go deleted file mode 100644 index 27b8dc37..00000000 --- a/vendor/github.com/google/go-github/v71/github/pulls_reviews.go +++ /dev/null @@ -1,329 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "errors" - "fmt" -) - -var ErrMixedCommentStyles = errors.New("cannot use both position and side/line form comments") - -// PullRequestReview represents a review of a pull request. -type PullRequestReview struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - User *User `json:"user,omitempty"` - Body *string `json:"body,omitempty"` - SubmittedAt *Timestamp `json:"submitted_at,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - PullRequestURL *string `json:"pull_request_url,omitempty"` - State *string `json:"state,omitempty"` - // AuthorAssociation is the comment author's relationship to the issue's repository. - // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". - AuthorAssociation *string `json:"author_association,omitempty"` -} - -func (p PullRequestReview) String() string { - return Stringify(p) -} - -// DraftReviewComment represents a comment part of the review. -type DraftReviewComment struct { - Path *string `json:"path,omitempty"` - Position *int `json:"position,omitempty"` - Body *string `json:"body,omitempty"` - - // The new comfort-fade-preview fields - StartSide *string `json:"start_side,omitempty"` - Side *string `json:"side,omitempty"` - StartLine *int `json:"start_line,omitempty"` - Line *int `json:"line,omitempty"` -} - -func (c DraftReviewComment) String() string { - return Stringify(c) -} - -// PullRequestReviewRequest represents a request to create a review. -type PullRequestReviewRequest struct { - NodeID *string `json:"node_id,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - Body *string `json:"body,omitempty"` - Event *string `json:"event,omitempty"` - Comments []*DraftReviewComment `json:"comments,omitempty"` -} - -func (r PullRequestReviewRequest) String() string { - return Stringify(r) -} - -func (r *PullRequestReviewRequest) isComfortFadePreview() (bool, error) { - var isCF *bool - for _, comment := range r.Comments { - if comment == nil { - continue - } - hasPos := comment.Position != nil - hasComfortFade := (comment.StartSide != nil) || (comment.Side != nil) || - (comment.StartLine != nil) || (comment.Line != nil) - - switch { - case hasPos && hasComfortFade: - return false, ErrMixedCommentStyles - case hasPos && isCF != nil && *isCF: - return false, ErrMixedCommentStyles - case hasComfortFade && isCF != nil && !*isCF: - return false, ErrMixedCommentStyles - } - isCF = &hasComfortFade - } - if isCF != nil { - return *isCF, nil - } - return false, nil -} - -// PullRequestReviewDismissalRequest represents a request to dismiss a review. -type PullRequestReviewDismissalRequest struct { - Message *string `json:"message,omitempty"` -} - -func (r PullRequestReviewDismissalRequest) String() string { - return Stringify(r) -} - -// ListReviews lists all reviews on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews -func (s *PullRequestsService) ListReviews(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews", owner, repo, number) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var reviews []*PullRequestReview - resp, err := s.client.Do(ctx, req, &reviews) - if err != nil { - return nil, resp, err - } - - return reviews, resp, nil -} - -// GetReview fetches the specified pull request review. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#get-a-review-for-a-pull-request -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id} -func (s *PullRequestsService) GetReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - review := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, review) - if err != nil { - return nil, resp, err - } - - return review, resp, nil -} - -// DeletePendingReview deletes the specified pull request pending review. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#delete-a-pending-review-for-a-pull-request -// -//meta:operation DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id} -func (s *PullRequestsService) DeletePendingReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, nil, err - } - - review := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, review) - if err != nil { - return nil, resp, err - } - - return review, resp, nil -} - -// ListReviewComments lists all the comments for the specified review. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#list-comments-for-a-pull-request-review -// -//meta:operation GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments -func (s *PullRequestsService) ListReviewComments(ctx context.Context, owner, repo string, number int, reviewID int64, opts *ListOptions) ([]*PullRequestComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/comments", owner, repo, number, reviewID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var comments []*PullRequestComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// CreateReview creates a new review on the specified pull request. -// -// In order to use multi-line comments, you must use the "comfort fade" preview. -// This replaces the use of the "Position" field in comments with 4 new fields: -// -// [Start]Side, and [Start]Line. -// -// These new fields must be used for ALL comments (including single-line), -// with the following restrictions (empirically observed, so subject to change). -// -// For single-line "comfort fade" comments, you must use: -// -// Path: &path, // as before -// Body: &body, // as before -// Side: &"RIGHT" (or "LEFT") -// Line: &123, // NOT THE SAME AS POSITION, this is an actual line number. -// -// If StartSide or StartLine is used with single-line comments, a 422 is returned. -// -// For multi-line "comfort fade" comments, you must use: -// -// Path: &path, // as before -// Body: &body, // as before -// StartSide: &"RIGHT" (or "LEFT") -// Side: &"RIGHT" (or "LEFT") -// StartLine: &120, -// Line: &125, -// -// Suggested edits are made by commenting on the lines to replace, and including the -// suggested edit in a block like this (it may be surrounded in non-suggestion markdown): -// -// ```suggestion -// Use this instead. -// It is waaaaaay better. -// ``` -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#create-a-review-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews -func (s *PullRequestsService) CreateReview(ctx context.Context, owner, repo string, number int, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews", owner, repo, number) - - req, err := s.client.NewRequest("POST", u, review) - if err != nil { - return nil, nil, err - } - - // Detect which style of review comment is being used. - if isCF, err := review.isComfortFadePreview(); err != nil { - return nil, nil, err - } else if isCF { - // If the review comments are using the comfort fade preview fields, - // then pass the comfort fade header. - req.Header.Set("Accept", mediaTypeMultiLineCommentsPreview) - } - - r := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// UpdateReview updates the review summary on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#update-a-review-for-a-pull-request -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id} -func (s *PullRequestsService) UpdateReview(ctx context.Context, owner, repo string, number int, reviewID int64, body string) (*PullRequestReview, *Response, error) { - opts := &struct { - Body string `json:"body"` - }{Body: body} - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - review := &PullRequestReview{} - resp, err := s.client.Do(ctx, req, review) - if err != nil { - return nil, resp, err - } - - return review, resp, nil -} - -// SubmitReview submits a specified review on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#submit-a-review-for-a-pull-request -// -//meta:operation POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events -func (s *PullRequestsService) SubmitReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/events", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("POST", u, review) - if err != nil { - return nil, nil, err - } - - r := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} - -// DismissReview dismisses a specified review on the specified pull request. -// -// GitHub API docs: https://docs.github.com/rest/pulls/reviews#dismiss-a-review-for-a-pull-request -// -//meta:operation PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals -func (s *PullRequestsService) DismissReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewDismissalRequest) (*PullRequestReview, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/dismissals", owner, repo, number, reviewID) - - req, err := s.client.NewRequest("PUT", u, review) - if err != nil { - return nil, nil, err - } - - r := new(PullRequestReview) - resp, err := s.client.Do(ctx, req, r) - if err != nil { - return nil, resp, err - } - - return r, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/pulls_threads.go b/vendor/github.com/google/go-github/v71/github/pulls_threads.go deleted file mode 100644 index 23e924d8..00000000 --- a/vendor/github.com/google/go-github/v71/github/pulls_threads.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -// PullRequestThread represents a thread of comments on a pull request. -type PullRequestThread struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Comments []*PullRequestComment `json:"comments,omitempty"` -} - -func (p PullRequestThread) String() string { - return Stringify(p) -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_actions_access.go b/vendor/github.com/google/go-github/v71/github/repos_actions_access.go deleted file mode 100644 index 2da1f01c..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_actions_access.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryActionsAccessLevel represents the repository actions access level. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-the-level-of-access-for-workflows-outside-of-the-repository -type RepositoryActionsAccessLevel struct { - // AccessLevel specifies the level of access that workflows outside of the repository have - // to actions and reusable workflows within the repository. - // Possible values are: "none", "organization" "enterprise". - AccessLevel *string `json:"access_level,omitempty"` -} - -// GetActionsAccessLevel gets the level of access that workflows outside of the repository have -// to actions and reusable workflows in the repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-the-level-of-access-for-workflows-outside-of-the-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/permissions/access -func (s *RepositoriesService) GetActionsAccessLevel(ctx context.Context, owner, repo string) (*RepositoryActionsAccessLevel, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/access", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - raal := new(RepositoryActionsAccessLevel) - resp, err := s.client.Do(ctx, req, raal) - if err != nil { - return nil, resp, err - } - - return raal, resp, nil -} - -// EditActionsAccessLevel sets the level of access that workflows outside of the repository have -// to actions and reusable workflows in the repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-the-level-of-access-for-workflows-outside-of-the-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/permissions/access -func (s *RepositoriesService) EditActionsAccessLevel(ctx context.Context, owner, repo string, repositoryActionsAccessLevel RepositoryActionsAccessLevel) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/access", owner, repo) - req, err := s.client.NewRequest("PUT", u, repositoryActionsAccessLevel) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_actions_allowed.go b/vendor/github.com/google/go-github/v71/github/repos_actions_allowed.go deleted file mode 100644 index e9ebff1d..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_actions_allowed.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetActionsAllowed gets the allowed actions and reusable workflows for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/actions/permissions/selected-actions -func (s *RepositoriesService) GetActionsAllowed(ctx context.Context, org, repo string) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/selected-actions", org, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - actionsAllowed := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, actionsAllowed) - if err != nil { - return nil, resp, err - } - - return actionsAllowed, resp, nil -} - -// EditActionsAllowed sets the allowed actions and reusable workflows for a repository. -// -// GitHub API docs: https://docs.github.com/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/actions/permissions/selected-actions -func (s *RepositoriesService) EditActionsAllowed(ctx context.Context, org, repo string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/actions/permissions/selected-actions", org, repo) - req, err := s.client.NewRequest("PUT", u, actionsAllowed) - if err != nil { - return nil, nil, err - } - - p := new(ActionsAllowed) - resp, err := s.client.Do(ctx, req, p) - if err != nil { - return nil, resp, err - } - - return p, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_codeowners.go b/vendor/github.com/google/go-github/v71/github/repos_codeowners.go deleted file mode 100644 index 93eeae09..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_codeowners.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// GetCodeownersErrorsOptions specifies the optional parameters to the -// RepositoriesService.GetCodeownersErrors method. -type GetCodeownersErrorsOptions struct { - // A branch, tag or commit name used to determine which version of the CODEOWNERS file to use. - // Default: the repository's default branch (e.g. main). - Ref string `url:"ref,omitempty"` -} - -// CodeownersErrors represents a list of syntax errors detected in the CODEOWNERS file. -type CodeownersErrors struct { - Errors []*CodeownersError `json:"errors"` -} - -// CodeownersError represents a syntax error detected in the CODEOWNERS file. -type CodeownersError struct { - Line int `json:"line"` - Column int `json:"column"` - Kind string `json:"kind"` - Source string `json:"source"` - Suggestion *string `json:"suggestion,omitempty"` - Message string `json:"message"` - Path string `json:"path"` -} - -// GetCodeownersErrors lists any syntax errors that are detected in the CODEOWNERS file. -// -// GitHub API docs: https://docs.github.com/rest/repos/repos#list-codeowners-errors -// -//meta:operation GET /repos/{owner}/{repo}/codeowners/errors -func (s *RepositoriesService) GetCodeownersErrors(ctx context.Context, owner, repo string, opts *GetCodeownersErrorsOptions) (*CodeownersErrors, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/codeowners/errors", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - codeownersErrors := &CodeownersErrors{} - resp, err := s.client.Do(ctx, req, codeownersErrors) - if err != nil { - return nil, resp, err - } - - return codeownersErrors, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_comments.go b/vendor/github.com/google/go-github/v71/github/repos_comments.go deleted file mode 100644 index 766a614c..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_comments.go +++ /dev/null @@ -1,173 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryComment represents a comment for a commit, file, or line in a repository. -type RepositoryComment struct { - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - User *User `json:"user,omitempty"` - Reactions *Reactions `json:"reactions,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - - // User-mutable fields - Body *string `json:"body"` - // User-initialized fields - Path *string `json:"path,omitempty"` - Position *int `json:"position,omitempty"` -} - -func (r RepositoryComment) String() string { - return Stringify(r) -} - -// ListComments lists all the comments for the repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#list-commit-comments-for-a-repository -// -//meta:operation GET /repos/{owner}/{repo}/comments -func (s *RepositoriesService) ListComments(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var comments []*RepositoryComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// ListCommitComments lists all the comments for a given commit SHA. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#list-commit-comments -// -//meta:operation GET /repos/{owner}/{repo}/commits/{commit_sha}/comments -func (s *RepositoriesService) ListCommitComments(ctx context.Context, owner, repo, sha string, opts *ListOptions) ([]*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/comments", owner, repo, sha) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - var comments []*RepositoryComment - resp, err := s.client.Do(ctx, req, &comments) - if err != nil { - return nil, resp, err - } - - return comments, resp, nil -} - -// CreateComment creates a comment for the given commit. -// Note: GitHub allows for comments to be created for non-existing files and positions. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#create-a-commit-comment -// -//meta:operation POST /repos/{owner}/{repo}/commits/{commit_sha}/comments -func (s *RepositoriesService) CreateComment(ctx context.Context, owner, repo, sha string, comment *RepositoryComment) (*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/comments", owner, repo, sha) - req, err := s.client.NewRequest("POST", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(RepositoryComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// GetComment gets a single comment from a repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#get-a-commit-comment -// -//meta:operation GET /repos/{owner}/{repo}/comments/{comment_id} -func (s *RepositoriesService) GetComment(ctx context.Context, owner, repo string, id int64) (*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeReactionsPreview) - - c := new(RepositoryComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// UpdateComment updates the body of a single comment. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#update-a-commit-comment -// -//meta:operation PATCH /repos/{owner}/{repo}/comments/{comment_id} -func (s *RepositoriesService) UpdateComment(ctx context.Context, owner, repo string, id int64, comment *RepositoryComment) (*RepositoryComment, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) - req, err := s.client.NewRequest("PATCH", u, comment) - if err != nil { - return nil, nil, err - } - - c := new(RepositoryComment) - resp, err := s.client.Do(ctx, req, c) - if err != nil { - return nil, resp, err - } - - return c, resp, nil -} - -// DeleteComment deletes a single comment from a repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/comments#delete-a-commit-comment -// -//meta:operation DELETE /repos/{owner}/{repo}/comments/{comment_id} -func (s *RepositoriesService) DeleteComment(ctx context.Context, owner, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_commits.go b/vendor/github.com/google/go-github/v71/github/repos_commits.go deleted file mode 100644 index cca7430c..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_commits.go +++ /dev/null @@ -1,325 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "bytes" - "context" - "fmt" - "net/url" - "time" -) - -// RepositoryCommit represents a commit in a repo. -// Note that it's wrapping a Commit, so author/committer information is in two places, -// but contain different details about them: in RepositoryCommit "github details", in Commit - "git details". -type RepositoryCommit struct { - NodeID *string `json:"node_id,omitempty"` - SHA *string `json:"sha,omitempty"` - Commit *Commit `json:"commit,omitempty"` - Author *User `json:"author,omitempty"` - Committer *User `json:"committer,omitempty"` - Parents []*Commit `json:"parents,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - URL *string `json:"url,omitempty"` - CommentsURL *string `json:"comments_url,omitempty"` - - // Details about how many changes were made in this commit. Only filled in during GetCommit! - Stats *CommitStats `json:"stats,omitempty"` - // Details about which files, and how this commit touched. Only filled in during GetCommit! - Files []*CommitFile `json:"files,omitempty"` -} - -func (r RepositoryCommit) String() string { - return Stringify(r) -} - -// CommitStats represents the number of additions / deletions from a file in a given RepositoryCommit or GistCommit. -type CommitStats struct { - Additions *int `json:"additions,omitempty"` - Deletions *int `json:"deletions,omitempty"` - Total *int `json:"total,omitempty"` -} - -func (c CommitStats) String() string { - return Stringify(c) -} - -// CommitFile represents a file modified in a commit. -type CommitFile struct { - SHA *string `json:"sha,omitempty"` - Filename *string `json:"filename,omitempty"` - Additions *int `json:"additions,omitempty"` - Deletions *int `json:"deletions,omitempty"` - Changes *int `json:"changes,omitempty"` - Status *string `json:"status,omitempty"` - Patch *string `json:"patch,omitempty"` - BlobURL *string `json:"blob_url,omitempty"` - RawURL *string `json:"raw_url,omitempty"` - ContentsURL *string `json:"contents_url,omitempty"` - PreviousFilename *string `json:"previous_filename,omitempty"` -} - -func (c CommitFile) String() string { - return Stringify(c) -} - -// CommitsComparison is the result of comparing two commits. -// See CompareCommits() for details. -type CommitsComparison struct { - BaseCommit *RepositoryCommit `json:"base_commit,omitempty"` - MergeBaseCommit *RepositoryCommit `json:"merge_base_commit,omitempty"` - - // Head can be 'behind' or 'ahead' - Status *string `json:"status,omitempty"` - AheadBy *int `json:"ahead_by,omitempty"` - BehindBy *int `json:"behind_by,omitempty"` - TotalCommits *int `json:"total_commits,omitempty"` - - Commits []*RepositoryCommit `json:"commits,omitempty"` - - Files []*CommitFile `json:"files,omitempty"` - - HTMLURL *string `json:"html_url,omitempty"` - PermalinkURL *string `json:"permalink_url,omitempty"` - DiffURL *string `json:"diff_url,omitempty"` - PatchURL *string `json:"patch_url,omitempty"` - URL *string `json:"url,omitempty"` // API URL. -} - -func (c CommitsComparison) String() string { - return Stringify(c) -} - -// CommitsListOptions specifies the optional parameters to the -// RepositoriesService.ListCommits method. -type CommitsListOptions struct { - // SHA or branch to start listing Commits from. - SHA string `url:"sha,omitempty"` - - // Path that should be touched by the returned Commits. - Path string `url:"path,omitempty"` - - // Author of by which to filter Commits. - Author string `url:"author,omitempty"` - - // Since when should Commits be included in the response. - Since time.Time `url:"since,omitempty"` - - // Until when should Commits be included in the response. - Until time.Time `url:"until,omitempty"` - - ListOptions -} - -// BranchCommit is the result of listing branches with commit SHA. -type BranchCommit struct { - Name *string `json:"name,omitempty"` - Commit *Commit `json:"commit,omitempty"` - Protected *bool `json:"protected,omitempty"` -} - -// ListCommits lists the commits of a repository. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#list-commits -// -//meta:operation GET /repos/{owner}/{repo}/commits -func (s *RepositoriesService) ListCommits(ctx context.Context, owner, repo string, opts *CommitsListOptions) ([]*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var commits []*RepositoryCommit - resp, err := s.client.Do(ctx, req, &commits) - if err != nil { - return nil, resp, err - } - - return commits, resp, nil -} - -// GetCommit fetches the specified commit, including all details about it. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#get-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref} -func (s *RepositoriesService) GetCommit(ctx context.Context, owner, repo, sha string, opts *ListOptions) (*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - commit := new(RepositoryCommit) - resp, err := s.client.Do(ctx, req, commit) - if err != nil { - return nil, resp, err - } - - return commit, resp, nil -} - -// GetCommitRaw fetches the specified commit in raw (diff or patch) format. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#get-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref} -func (s *RepositoriesService) GetCommitRaw(ctx context.Context, owner string, repo string, sha string, opts RawOptions) (string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - switch opts.Type { - case Diff: - req.Header.Set("Accept", mediaTypeV3Diff) - case Patch: - req.Header.Set("Accept", mediaTypeV3Patch) - default: - return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) - } - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// GetCommitSHA1 gets the SHA-1 of a commit reference. If a last-known SHA1 is -// supplied and no new commits have occurred, a 304 Unmodified response is returned. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#get-a-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref} -func (s *RepositoriesService) GetCommitSHA1(ctx context.Context, owner, repo, ref, lastSHA string) (string, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, refURLEscape(ref)) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - if lastSHA != "" { - req.Header.Set("If-None-Match", `"`+lastSHA+`"`) - } - - req.Header.Set("Accept", mediaTypeV3SHA) - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// CompareCommits compares a range of commits with each other. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#compare-two-commits -// -//meta:operation GET /repos/{owner}/{repo}/compare/{basehead} -func (s *RepositoriesService) CompareCommits(ctx context.Context, owner, repo string, base, head string, opts *ListOptions) (*CommitsComparison, *Response, error) { - escapedBase := url.QueryEscape(base) - escapedHead := url.QueryEscape(head) - - u := fmt.Sprintf("repos/%v/%v/compare/%v...%v", owner, repo, escapedBase, escapedHead) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - comp := new(CommitsComparison) - resp, err := s.client.Do(ctx, req, comp) - if err != nil { - return nil, resp, err - } - - return comp, resp, nil -} - -// CompareCommitsRaw compares a range of commits with each other in raw (diff or patch) format. -// -// Both "base" and "head" must be branch names in "repo". -// To compare branches across other repositories in the same network as "repo", -// use the format ":branch". -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#compare-two-commits -// -//meta:operation GET /repos/{owner}/{repo}/compare/{basehead} -func (s *RepositoriesService) CompareCommitsRaw(ctx context.Context, owner, repo, base, head string, opts RawOptions) (string, *Response, error) { - escapedBase := url.QueryEscape(base) - escapedHead := url.QueryEscape(head) - - u := fmt.Sprintf("repos/%v/%v/compare/%v...%v", owner, repo, escapedBase, escapedHead) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return "", nil, err - } - - switch opts.Type { - case Diff: - req.Header.Set("Accept", mediaTypeV3Diff) - case Patch: - req.Header.Set("Accept", mediaTypeV3Patch) - default: - return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) - } - - var buf bytes.Buffer - resp, err := s.client.Do(ctx, req, &buf) - if err != nil { - return "", resp, err - } - - return buf.String(), resp, nil -} - -// ListBranchesHeadCommit gets all branches where the given commit SHA is the HEAD, -// or latest commit for the branch. -// -// GitHub API docs: https://docs.github.com/rest/commits/commits#list-branches-for-head-commit -// -//meta:operation GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head -func (s *RepositoriesService) ListBranchesHeadCommit(ctx context.Context, owner, repo, sha string) ([]*BranchCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/branches-where-head", owner, repo, sha) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeListPullsOrBranchesForCommitPreview) - var branchCommits []*BranchCommit - resp, err := s.client.Do(ctx, req, &branchCommits) - if err != nil { - return nil, resp, err - } - - return branchCommits, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_community_health.go b/vendor/github.com/google/go-github/v71/github/repos_community_health.go deleted file mode 100644 index 54d1b414..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_community_health.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Metric represents the different fields for one file in community health files. -type Metric struct { - Name *string `json:"name"` - Key *string `json:"key"` - SPDXID *string `json:"spdx_id"` - URL *string `json:"url"` - HTMLURL *string `json:"html_url"` - NodeID *string `json:"node_id"` -} - -// CommunityHealthFiles represents the different files in the community health metrics response. -type CommunityHealthFiles struct { - CodeOfConduct *Metric `json:"code_of_conduct"` - CodeOfConductFile *Metric `json:"code_of_conduct_file"` - Contributing *Metric `json:"contributing"` - IssueTemplate *Metric `json:"issue_template"` - PullRequestTemplate *Metric `json:"pull_request_template"` - License *Metric `json:"license"` - Readme *Metric `json:"readme"` -} - -// CommunityHealthMetrics represents a response containing the community metrics of a repository. -type CommunityHealthMetrics struct { - HealthPercentage *int `json:"health_percentage"` - Description *string `json:"description"` - Documentation *string `json:"documentation"` - Files *CommunityHealthFiles `json:"files"` - UpdatedAt *Timestamp `json:"updated_at"` - ContentReportsEnabled *bool `json:"content_reports_enabled"` -} - -// GetCommunityHealthMetrics retrieves all the community health metrics for a repository. -// -// GitHub API docs: https://docs.github.com/rest/metrics/community#get-community-profile-metrics -// -//meta:operation GET /repos/{owner}/{repo}/community/profile -func (s *RepositoriesService) GetCommunityHealthMetrics(ctx context.Context, owner, repo string) (*CommunityHealthMetrics, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/community/profile", owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - metrics := &CommunityHealthMetrics{} - resp, err := s.client.Do(ctx, req, metrics) - if err != nil { - return nil, resp, err - } - - return metrics, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_deployment_branch_policies.go b/vendor/github.com/google/go-github/v71/github/repos_deployment_branch_policies.go deleted file mode 100644 index 77ac73e4..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_deployment_branch_policies.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2023 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// DeploymentBranchPolicy represents a single deployment branch policy for an environment. -type DeploymentBranchPolicy struct { - Name *string `json:"name,omitempty"` - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - Type *string `json:"type,omitempty"` -} - -// DeploymentBranchPolicyResponse represents the slightly different format of response that comes back when you list deployment branch policies. -type DeploymentBranchPolicyResponse struct { - TotalCount *int `json:"total_count,omitempty"` - BranchPolicies []*DeploymentBranchPolicy `json:"branch_policies,omitempty"` -} - -// DeploymentBranchPolicyRequest represents a deployment branch policy request. -type DeploymentBranchPolicyRequest struct { - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` -} - -// ListDeploymentBranchPolicies lists the deployment branch policies for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#list-deployment-branch-policies -// -//meta:operation GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies -func (s *RepositoriesService) ListDeploymentBranchPolicies(ctx context.Context, owner, repo, environment string) (*DeploymentBranchPolicyResponse, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies", owner, repo, environment) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var list *DeploymentBranchPolicyResponse - resp, err := s.client.Do(ctx, req, &list) - if err != nil { - return nil, resp, err - } - - return list, resp, nil -} - -// GetDeploymentBranchPolicy gets a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#get-a-deployment-branch-policy -// -//meta:operation GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id} -func (s *RepositoriesService) GetDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64) (*DeploymentBranchPolicy, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var policy *DeploymentBranchPolicy - resp, err := s.client.Do(ctx, req, &policy) - if err != nil { - return nil, resp, err - } - - return policy, resp, nil -} - -// CreateDeploymentBranchPolicy creates a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#create-a-deployment-branch-policy -// -//meta:operation POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies -func (s *RepositoriesService) CreateDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, request *DeploymentBranchPolicyRequest) (*DeploymentBranchPolicy, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies", owner, repo, environment) - - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - var policy *DeploymentBranchPolicy - resp, err := s.client.Do(ctx, req, &policy) - if err != nil { - return nil, resp, err - } - - return policy, resp, nil -} - -// UpdateDeploymentBranchPolicy updates a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#update-a-deployment-branch-policy -// -//meta:operation PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id} -func (s *RepositoriesService) UpdateDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64, request *DeploymentBranchPolicyRequest) (*DeploymentBranchPolicy, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) - - req, err := s.client.NewRequest("PUT", u, request) - if err != nil { - return nil, nil, err - } - - var policy *DeploymentBranchPolicy - resp, err := s.client.Do(ctx, req, &policy) - if err != nil { - return nil, resp, err - } - - return policy, resp, nil -} - -// DeleteDeploymentBranchPolicy deletes a deployment branch policy for an environment. -// -// GitHub API docs: https://docs.github.com/rest/deployments/branch-policies#delete-a-deployment-branch-policy -// -//meta:operation DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id} -func (s *RepositoriesService) DeleteDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_forks.go b/vendor/github.com/google/go-github/v71/github/repos_forks.go deleted file mode 100644 index 60fb49da..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_forks.go +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "encoding/json" - "fmt" -) - -// RepositoryListForksOptions specifies the optional parameters to the -// RepositoriesService.ListForks method. -type RepositoryListForksOptions struct { - // How to sort the forks list. Possible values are: newest, oldest, - // watchers. Default is "newest". - Sort string `url:"sort,omitempty"` - - ListOptions -} - -// ListForks lists the forks of the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/repos/forks#list-forks -// -//meta:operation GET /repos/{owner}/{repo}/forks -func (s *RepositoriesService) ListForks(ctx context.Context, owner, repo string, opts *RepositoryListForksOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/forks", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when topics API fully launches. - req.Header.Set("Accept", mediaTypeTopicsPreview) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// RepositoryCreateForkOptions specifies the optional parameters to the -// RepositoriesService.CreateFork method. -type RepositoryCreateForkOptions struct { - // The organization to fork the repository into. - Organization string `json:"organization,omitempty"` - Name string `json:"name,omitempty"` - DefaultBranchOnly bool `json:"default_branch_only,omitempty"` -} - -// CreateFork creates a fork of the specified repository. -// -// This method might return an *AcceptedError and a status code of -// 202. This is because this is the status that GitHub returns to signify that -// it is now computing creating the fork in a background task. In this event, -// the Repository value will be returned, which includes the details about the pending fork. -// A follow up request, after a delay of a second or so, should result -// in a successful request. -// -// GitHub API docs: https://docs.github.com/rest/repos/forks#create-a-fork -// -//meta:operation POST /repos/{owner}/{repo}/forks -func (s *RepositoriesService) CreateFork(ctx context.Context, owner, repo string, opts *RepositoryCreateForkOptions) (*Repository, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/forks", owner, repo) - - req, err := s.client.NewRequest("POST", u, opts) - if err != nil { - return nil, nil, err - } - - fork := new(Repository) - resp, err := s.client.Do(ctx, req, fork) - if err != nil { - // Persist AcceptedError's metadata to the Repository object. - if aerr, ok := err.(*AcceptedError); ok { - if err := json.Unmarshal(aerr.Raw, fork); err != nil { - return fork, resp, err - } - - return fork, resp, err - } - return nil, resp, err - } - - return fork, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_keys.go b/vendor/github.com/google/go-github/v71/github/repos_keys.go deleted file mode 100644 index cc86f8bb..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_keys.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// The Key type is defined in users_keys.go - -// ListKeys lists the deploy keys for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#list-deploy-keys -// -//meta:operation GET /repos/{owner}/{repo}/keys -func (s *RepositoriesService) ListKeys(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Key, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*Key - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetKey fetches a single deploy key. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#get-a-deploy-key -// -//meta:operation GET /repos/{owner}/{repo}/keys/{key_id} -func (s *RepositoriesService) GetKey(ctx context.Context, owner string, repo string, id int64) (*Key, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := new(Key) - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateKey adds a deploy key for a repository. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#create-a-deploy-key -// -//meta:operation POST /repos/{owner}/{repo}/keys -func (s *RepositoriesService) CreateKey(ctx context.Context, owner string, repo string, key *Key) (*Key, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys", owner, repo) - - req, err := s.client.NewRequest("POST", u, key) - if err != nil { - return nil, nil, err - } - - k := new(Key) - resp, err := s.client.Do(ctx, req, k) - if err != nil { - return nil, resp, err - } - - return k, resp, nil -} - -// DeleteKey deletes a deploy key. -// -// GitHub API docs: https://docs.github.com/rest/deploy-keys/deploy-keys#delete-a-deploy-key -// -//meta:operation DELETE /repos/{owner}/{repo}/keys/{key_id} -func (s *RepositoriesService) DeleteKey(ctx context.Context, owner string, repo string, id int64) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_lfs.go b/vendor/github.com/google/go-github/v71/github/repos_lfs.go deleted file mode 100644 index f0153c08..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_lfs.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// EnableLFS turns the LFS (Large File Storage) feature ON for the selected repo. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/repos/lfs#enable-git-lfs-for-a-repository -// -//meta:operation PUT /repos/{owner}/{repo}/lfs -func (s *RepositoriesService) EnableLFS(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/lfs", owner, repo) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} - -// DisableLFS turns the LFS (Large File Storage) feature OFF for the selected repo. -// -// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/repos/lfs#disable-git-lfs-for-a-repository -// -//meta:operation DELETE /repos/{owner}/{repo}/lfs -func (s *RepositoriesService) DisableLFS(ctx context.Context, owner, repo string) (*Response, error) { - u := fmt.Sprintf("repos/%v/%v/lfs", owner, repo) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - if err != nil { - return resp, err - } - - return resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_merging.go b/vendor/github.com/google/go-github/v71/github/repos_merging.go deleted file mode 100644 index b26e5da1..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_merging.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2014 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepositoryMergeRequest represents a request to merge a branch in a -// repository. -type RepositoryMergeRequest struct { - Base *string `json:"base,omitempty"` - Head *string `json:"head,omitempty"` - CommitMessage *string `json:"commit_message,omitempty"` -} - -// RepoMergeUpstreamRequest represents a request to sync a branch of -// a forked repository to keep it up-to-date with the upstream repository. -type RepoMergeUpstreamRequest struct { - Branch *string `json:"branch,omitempty"` -} - -// RepoMergeUpstreamResult represents the result of syncing a branch of -// a forked repository with the upstream repository. -type RepoMergeUpstreamResult struct { - Message *string `json:"message,omitempty"` - MergeType *string `json:"merge_type,omitempty"` - BaseBranch *string `json:"base_branch,omitempty"` -} - -// Merge a branch in the specified repository. -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#merge-a-branch -// -//meta:operation POST /repos/{owner}/{repo}/merges -func (s *RepositoriesService) Merge(ctx context.Context, owner, repo string, request *RepositoryMergeRequest) (*RepositoryCommit, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/merges", owner, repo) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - commit := new(RepositoryCommit) - resp, err := s.client.Do(ctx, req, commit) - if err != nil { - return nil, resp, err - } - - return commit, resp, nil -} - -// MergeUpstream syncs a branch of a forked repository to keep it up-to-date -// with the upstream repository. -// -// GitHub API docs: https://docs.github.com/rest/branches/branches#sync-a-fork-branch-with-the-upstream-repository -// -//meta:operation POST /repos/{owner}/{repo}/merge-upstream -func (s *RepositoriesService) MergeUpstream(ctx context.Context, owner, repo string, request *RepoMergeUpstreamRequest) (*RepoMergeUpstreamResult, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/merge-upstream", owner, repo) - req, err := s.client.NewRequest("POST", u, request) - if err != nil { - return nil, nil, err - } - - result := new(RepoMergeUpstreamResult) - resp, err := s.client.Do(ctx, req, result) - if err != nil { - return nil, resp, err - } - - return result, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_statuses.go b/vendor/github.com/google/go-github/v71/github/repos_statuses.go deleted file mode 100644 index e7b03047..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_statuses.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// RepoStatus represents the status of a repository at a particular reference. -type RepoStatus struct { - ID *int64 `json:"id,omitempty"` - NodeID *string `json:"node_id,omitempty"` - URL *string `json:"url,omitempty"` - - // State is the current state of the repository. Possible values are: - // pending, success, error, or failure. - State *string `json:"state,omitempty"` - - // TargetURL is the URL of the page representing this status. It will be - // linked from the GitHub UI to allow users to see the source of the status. - TargetURL *string `json:"target_url,omitempty"` - - // Description is a short high level summary of the status. - Description *string `json:"description,omitempty"` - - // A string label to differentiate this status from the statuses of other systems. - Context *string `json:"context,omitempty"` - - // AvatarURL is the URL of the avatar of this status. - AvatarURL *string `json:"avatar_url,omitempty"` - - Creator *User `json:"creator,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` -} - -func (r RepoStatus) String() string { - return Stringify(r) -} - -// ListStatuses lists the statuses of a repository at the specified -// reference. ref can be a SHA, a branch name, or a tag name. -// -// GitHub API docs: https://docs.github.com/rest/commits/statuses#list-commit-statuses-for-a-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/statuses -func (s *RepositoriesService) ListStatuses(ctx context.Context, owner, repo, ref string, opts *ListOptions) ([]*RepoStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/statuses", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var statuses []*RepoStatus - resp, err := s.client.Do(ctx, req, &statuses) - if err != nil { - return nil, resp, err - } - - return statuses, resp, nil -} - -// CreateStatus creates a new status for a repository at the specified -// reference. Ref can be a SHA, a branch name, or a tag name. -// -// GitHub API docs: https://docs.github.com/rest/commits/statuses#create-a-commit-status -// -//meta:operation POST /repos/{owner}/{repo}/statuses/{sha} -func (s *RepositoriesService) CreateStatus(ctx context.Context, owner, repo, ref string, status *RepoStatus) (*RepoStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/statuses/%v", owner, repo, refURLEscape(ref)) - req, err := s.client.NewRequest("POST", u, status) - if err != nil { - return nil, nil, err - } - - repoStatus := new(RepoStatus) - resp, err := s.client.Do(ctx, req, repoStatus) - if err != nil { - return nil, resp, err - } - - return repoStatus, resp, nil -} - -// CombinedStatus represents the combined status of a repository at a particular reference. -type CombinedStatus struct { - // State is the combined state of the repository. Possible values are: - // failure, pending, or success. - State *string `json:"state,omitempty"` - - Name *string `json:"name,omitempty"` - SHA *string `json:"sha,omitempty"` - TotalCount *int `json:"total_count,omitempty"` - Statuses []*RepoStatus `json:"statuses,omitempty"` - - CommitURL *string `json:"commit_url,omitempty"` - RepositoryURL *string `json:"repository_url,omitempty"` -} - -func (s CombinedStatus) String() string { - return Stringify(s) -} - -// GetCombinedStatus returns the combined status of a repository at the specified -// reference. ref can be a SHA, a branch name, or a tag name. -// -// GitHub API docs: https://docs.github.com/rest/commits/statuses#get-the-combined-status-for-a-specific-reference -// -//meta:operation GET /repos/{owner}/{repo}/commits/{ref}/status -func (s *RepositoriesService) GetCombinedStatus(ctx context.Context, owner, repo, ref string, opts *ListOptions) (*CombinedStatus, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/commits/%v/status", owner, repo, refURLEscape(ref)) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - status := new(CombinedStatus) - resp, err := s.client.Do(ctx, req, status) - if err != nil { - return nil, resp, err - } - - return status, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/repos_traffic.go b/vendor/github.com/google/go-github/v71/github/repos_traffic.go deleted file mode 100644 index ae2f1a86..00000000 --- a/vendor/github.com/google/go-github/v71/github/repos_traffic.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2016 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TrafficReferrer represent information about traffic from a referrer . -type TrafficReferrer struct { - Referrer *string `json:"referrer,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficPath represent information about the traffic on a path of the repo. -type TrafficPath struct { - Path *string `json:"path,omitempty"` - Title *string `json:"title,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficData represent information about a specific timestamp in views or clones list. -type TrafficData struct { - Timestamp *Timestamp `json:"timestamp,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficViews represent information about the number of views in the last 14 days. -type TrafficViews struct { - Views []*TrafficData `json:"views,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficClones represent information about the number of clones in the last 14 days. -type TrafficClones struct { - Clones []*TrafficData `json:"clones,omitempty"` - Count *int `json:"count,omitempty"` - Uniques *int `json:"uniques,omitempty"` -} - -// TrafficBreakdownOptions specifies the parameters to methods that support breakdown per day or week. -// Can be one of: day, week. Default: day. -type TrafficBreakdownOptions struct { - Per string `url:"per,omitempty"` -} - -// ListTrafficReferrers list the top 10 referrers over the last 14 days. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-top-referral-sources -// -//meta:operation GET /repos/{owner}/{repo}/traffic/popular/referrers -func (s *RepositoriesService) ListTrafficReferrers(ctx context.Context, owner, repo string) ([]*TrafficReferrer, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/popular/referrers", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var trafficReferrers []*TrafficReferrer - resp, err := s.client.Do(ctx, req, &trafficReferrers) - if err != nil { - return nil, resp, err - } - - return trafficReferrers, resp, nil -} - -// ListTrafficPaths list the top 10 popular content over the last 14 days. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-top-referral-paths -// -//meta:operation GET /repos/{owner}/{repo}/traffic/popular/paths -func (s *RepositoriesService) ListTrafficPaths(ctx context.Context, owner, repo string) ([]*TrafficPath, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/popular/paths", owner, repo) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var paths []*TrafficPath - resp, err := s.client.Do(ctx, req, &paths) - if err != nil { - return nil, resp, err - } - - return paths, resp, nil -} - -// ListTrafficViews get total number of views for the last 14 days and breaks it down either per day or week. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-page-views -// -//meta:operation GET /repos/{owner}/{repo}/traffic/views -func (s *RepositoriesService) ListTrafficViews(ctx context.Context, owner, repo string, opts *TrafficBreakdownOptions) (*TrafficViews, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/views", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - trafficViews := new(TrafficViews) - resp, err := s.client.Do(ctx, req, &trafficViews) - if err != nil { - return nil, resp, err - } - - return trafficViews, resp, nil -} - -// ListTrafficClones get total number of clones for the last 14 days and breaks it down either per day or week for the last 14 days. -// -// GitHub API docs: https://docs.github.com/rest/metrics/traffic#get-repository-clones -// -//meta:operation GET /repos/{owner}/{repo}/traffic/clones -func (s *RepositoriesService) ListTrafficClones(ctx context.Context, owner, repo string, opts *TrafficBreakdownOptions) (*TrafficClones, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/traffic/clones", owner, repo) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - trafficClones := new(TrafficClones) - resp, err := s.client.Do(ctx, req, &trafficClones) - if err != nil { - return nil, resp, err - } - - return trafficClones, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/teams_members.go b/vendor/github.com/google/go-github/v71/github/teams_members.go deleted file mode 100644 index 059d993a..00000000 --- a/vendor/github.com/google/go-github/v71/github/teams_members.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// TeamListTeamMembersOptions specifies the optional parameters to the -// TeamsService.ListTeamMembers method. -type TeamListTeamMembersOptions struct { - // Role filters members returned by their role in the team. Possible - // values are "all", "member", "maintainer". Default is "all". - Role string `url:"role,omitempty"` - - ListOptions -} - -// ListTeamMembersByID lists all of the users who are members of a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-team-members -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/members -func (s *TeamsService) ListTeamMembersByID(ctx context.Context, orgID, teamID int64, opts *TeamListTeamMembersOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/members", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// ListTeamMembersBySlug lists all of the users who are members of a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-team-members -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/members -func (s *TeamsService) ListTeamMembersBySlug(ctx context.Context, org, slug string, opts *TeamListTeamMembersOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/members", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// GetTeamMembershipByID returns the membership status for a user in a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-team-members -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/members -func (s *TeamsService) GetTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string) (*Membership, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// GetTeamMembershipBySlug returns the membership status for a user in a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#get-team-membership-for-a-user -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) GetTeamMembershipBySlug(ctx context.Context, org, slug, user string) (*Membership, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// TeamAddTeamMembershipOptions specifies the optional -// parameters to the TeamsService.AddTeamMembership method. -type TeamAddTeamMembershipOptions struct { - // Role specifies the role the user should have in the team. Possible - // values are: - // member - a normal member of the team - // maintainer - a team maintainer. Able to add/remove other team - // members, promote other team members to team - // maintainer, and edit the team’s name and description - // - // Default value is "member". - Role string `json:"role,omitempty"` -} - -// AddTeamMembershipByID adds or invites a user to a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#add-or-update-team-membership-for-a-user -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) AddTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string, opts *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// AddTeamMembershipBySlug adds or invites a user to a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#add-or-update-team-membership-for-a-user -// -//meta:operation PUT /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) AddTeamMembershipBySlug(ctx context.Context, org, slug, user string, opts *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) - req, err := s.client.NewRequest("PUT", u, opts) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// RemoveTeamMembershipByID removes a user from a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#remove-team-membership-for-a-user -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) RemoveTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string) (*Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamMembershipBySlug removes a user from a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#remove-team-membership-for-a-user -// -//meta:operation DELETE /orgs/{org}/teams/{team_slug}/memberships/{username} -func (s *TeamsService) RemoveTeamMembershipBySlug(ctx context.Context, org, slug, user string) (*Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListPendingTeamInvitationsByID gets pending invitation list of a team, given a specified -// organization ID, by team ID. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-pending-team-invitations -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/invitations -func (s *TeamsService) ListPendingTeamInvitationsByID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("organizations/%v/team/%v/invitations", orgID, teamID) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} - -// ListPendingTeamInvitationsBySlug get pending invitation list of a team, given a specified -// organization name, by team slug. -// -// GitHub API docs: https://docs.github.com/rest/teams/members#list-pending-team-invitations -// -//meta:operation GET /orgs/{org}/teams/{team_slug}/invitations -func (s *TeamsService) ListPendingTeamInvitationsBySlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams/%v/invitations", org, slug) - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} diff --git a/vendor/github.com/google/go-github/v71/github/users_blocking.go b/vendor/github.com/google/go-github/v71/github/users_blocking.go deleted file mode 100644 index 3f2af38f..00000000 --- a/vendor/github.com/google/go-github/v71/github/users_blocking.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListBlockedUsers lists all the blocked users by the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#list-users-blocked-by-the-authenticated-user -// -//meta:operation GET /user/blocks -func (s *UsersService) ListBlockedUsers(ctx context.Context, opts *ListOptions) ([]*User, *Response, error) { - u := "user/blocks" - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - var blockedUsers []*User - resp, err := s.client.Do(ctx, req, &blockedUsers) - if err != nil { - return nil, resp, err - } - - return blockedUsers, resp, nil -} - -// IsBlocked reports whether specified user is blocked by the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#check-if-a-user-is-blocked-by-the-authenticated-user -// -//meta:operation GET /user/blocks/{username} -func (s *UsersService) IsBlocked(ctx context.Context, user string) (bool, *Response, error) { - u := fmt.Sprintf("user/blocks/%v", user) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - resp, err := s.client.Do(ctx, req, nil) - isBlocked, err := parseBoolResponse(err) - return isBlocked, resp, err -} - -// BlockUser blocks specified user for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#block-a-user -// -//meta:operation PUT /user/blocks/{username} -func (s *UsersService) BlockUser(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/blocks/%v", user) - - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} - -// UnblockUser unblocks specified user for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/blocking#unblock-a-user -// -//meta:operation DELETE /user/blocks/{username} -func (s *UsersService) UnblockUser(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/blocks/%v", user) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeBlockUsersPreview) - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/users_followers.go b/vendor/github.com/google/go-github/v71/github/users_followers.go deleted file mode 100644 index ec6f531e..00000000 --- a/vendor/github.com/google/go-github/v71/github/users_followers.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// ListFollowers lists the followers for a user. Passing the empty string will -// fetch followers for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#list-followers-of-a-user -// GitHub API docs: https://docs.github.com/rest/users/followers#list-followers-of-the-authenticated-user -// -//meta:operation GET /user/followers -//meta:operation GET /users/{username}/followers -func (s *UsersService) ListFollowers(ctx context.Context, user string, opts *ListOptions) ([]*User, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/followers", user) - } else { - u = "user/followers" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// ListFollowing lists the people that a user is following. Passing the empty -// string will list people the authenticated user is following. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#list-the-people-a-user-follows -// GitHub API docs: https://docs.github.com/rest/users/followers#list-the-people-the-authenticated-user-follows -// -//meta:operation GET /user/following -//meta:operation GET /users/{username}/following -func (s *UsersService) ListFollowing(ctx context.Context, user string, opts *ListOptions) ([]*User, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/following", user) - } else { - u = "user/following" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var users []*User - resp, err := s.client.Do(ctx, req, &users) - if err != nil { - return nil, resp, err - } - - return users, resp, nil -} - -// IsFollowing checks if "user" is following "target". Passing the empty -// string for "user" will check if the authenticated user is following "target". -// -// GitHub API docs: https://docs.github.com/rest/users/followers#check-if-a-person-is-followed-by-the-authenticated-user -// GitHub API docs: https://docs.github.com/rest/users/followers#check-if-a-user-follows-another-user -// -//meta:operation GET /user/following/{username} -//meta:operation GET /users/{username}/following/{target_user} -func (s *UsersService) IsFollowing(ctx context.Context, user, target string) (bool, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/following/%v", user, target) - } else { - u = fmt.Sprintf("user/following/%v", target) - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - following, err := parseBoolResponse(err) - return following, resp, err -} - -// Follow will cause the authenticated user to follow the specified user. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#follow-a-user -// -//meta:operation PUT /user/following/{username} -func (s *UsersService) Follow(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/following/%v", user) - req, err := s.client.NewRequest("PUT", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// Unfollow will cause the authenticated user to unfollow the specified user. -// -// GitHub API docs: https://docs.github.com/rest/users/followers#unfollow-a-user -// -//meta:operation DELETE /user/following/{username} -func (s *UsersService) Unfollow(ctx context.Context, user string) (*Response, error) { - u := fmt.Sprintf("user/following/%v", user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/users_keys.go b/vendor/github.com/google/go-github/v71/github/users_keys.go deleted file mode 100644 index 4d42986e..00000000 --- a/vendor/github.com/google/go-github/v71/github/users_keys.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// Key represents a public SSH key used to authenticate a user or deploy script. -type Key struct { - ID *int64 `json:"id,omitempty"` - Key *string `json:"key,omitempty"` - URL *string `json:"url,omitempty"` - Title *string `json:"title,omitempty"` - ReadOnly *bool `json:"read_only,omitempty"` - Verified *bool `json:"verified,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - AddedBy *string `json:"added_by,omitempty"` - LastUsed *Timestamp `json:"last_used,omitempty"` -} - -func (k Key) String() string { - return Stringify(k) -} - -// ListKeys lists the verified public keys for a user. Passing the empty -// string will fetch keys for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#list-public-keys-for-a-user -// GitHub API docs: https://docs.github.com/rest/users/keys#list-public-ssh-keys-for-the-authenticated-user -// -//meta:operation GET /user/keys -//meta:operation GET /users/{username}/keys -func (s *UsersService) ListKeys(ctx context.Context, user string, opts *ListOptions) ([]*Key, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/keys", user) - } else { - u = "user/keys" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*Key - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetKey fetches a single public key. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#get-a-public-ssh-key-for-the-authenticated-user -// -//meta:operation GET /user/keys/{key_id} -func (s *UsersService) GetKey(ctx context.Context, id int64) (*Key, *Response, error) { - u := fmt.Sprintf("user/keys/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := new(Key) - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateKey adds a public key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#create-a-public-ssh-key-for-the-authenticated-user -// -//meta:operation POST /user/keys -func (s *UsersService) CreateKey(ctx context.Context, key *Key) (*Key, *Response, error) { - u := "user/keys" - - req, err := s.client.NewRequest("POST", u, key) - if err != nil { - return nil, nil, err - } - - k := new(Key) - resp, err := s.client.Do(ctx, req, k) - if err != nil { - return nil, resp, err - } - - return k, resp, nil -} - -// DeleteKey deletes a public key. -// -// GitHub API docs: https://docs.github.com/rest/users/keys#delete-a-public-ssh-key-for-the-authenticated-user -// -//meta:operation DELETE /user/keys/{key_id} -func (s *UsersService) DeleteKey(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("user/keys/%v", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/github/users_ssh_signing_keys.go b/vendor/github.com/google/go-github/v71/github/users_ssh_signing_keys.go deleted file mode 100644 index fcc930be..00000000 --- a/vendor/github.com/google/go-github/v71/github/users_ssh_signing_keys.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2022 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" -) - -// SSHSigningKey represents a public SSH key used to sign git commits. -type SSHSigningKey struct { - ID *int64 `json:"id,omitempty"` - Key *string `json:"key,omitempty"` - Title *string `json:"title,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` -} - -func (k SSHSigningKey) String() string { - return Stringify(k) -} - -// ListSSHSigningKeys lists the SSH signing keys for a user. Passing an empty -// username string will fetch SSH signing keys for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#list-ssh-signing-keys-for-a-user -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#list-ssh-signing-keys-for-the-authenticated-user -// -//meta:operation GET /user/ssh_signing_keys -//meta:operation GET /users/{username}/ssh_signing_keys -func (s *UsersService) ListSSHSigningKeys(ctx context.Context, user string, opts *ListOptions) ([]*SSHSigningKey, *Response, error) { - var u string - if user != "" { - u = fmt.Sprintf("users/%v/ssh_signing_keys", user) - } else { - u = "user/ssh_signing_keys" - } - u, err := addOptions(u, opts) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var keys []*SSHSigningKey - resp, err := s.client.Do(ctx, req, &keys) - if err != nil { - return nil, resp, err - } - - return keys, resp, nil -} - -// GetSSHSigningKey fetches a single SSH signing key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#get-an-ssh-signing-key-for-the-authenticated-user -// -//meta:operation GET /user/ssh_signing_keys/{ssh_signing_key_id} -func (s *UsersService) GetSSHSigningKey(ctx context.Context, id int64) (*SSHSigningKey, *Response, error) { - u := fmt.Sprintf("user/ssh_signing_keys/%v", id) - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - key := new(SSHSigningKey) - resp, err := s.client.Do(ctx, req, key) - if err != nil { - return nil, resp, err - } - - return key, resp, nil -} - -// CreateSSHSigningKey adds a SSH signing key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#create-a-ssh-signing-key-for-the-authenticated-user -// -//meta:operation POST /user/ssh_signing_keys -func (s *UsersService) CreateSSHSigningKey(ctx context.Context, key *Key) (*SSHSigningKey, *Response, error) { - u := "user/ssh_signing_keys" - - req, err := s.client.NewRequest("POST", u, key) - if err != nil { - return nil, nil, err - } - - k := new(SSHSigningKey) - resp, err := s.client.Do(ctx, req, k) - if err != nil { - return nil, resp, err - } - - return k, resp, nil -} - -// DeleteSSHSigningKey deletes a SSH signing key for the authenticated user. -// -// GitHub API docs: https://docs.github.com/rest/users/ssh-signing-keys#delete-an-ssh-signing-key-for-the-authenticated-user -// -//meta:operation DELETE /user/ssh_signing_keys/{ssh_signing_key_id} -func (s *UsersService) DeleteSSHSigningKey(ctx context.Context, id int64) (*Response, error) { - u := fmt.Sprintf("user/ssh_signing_keys/%v", id) - - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} diff --git a/vendor/github.com/google/go-github/v71/AUTHORS b/vendor/github.com/google/go-github/v72/AUTHORS similarity index 100% rename from vendor/github.com/google/go-github/v71/AUTHORS rename to vendor/github.com/google/go-github/v72/AUTHORS diff --git a/vendor/github.com/google/go-github/v57/LICENSE b/vendor/github.com/google/go-github/v72/LICENSE similarity index 100% rename from vendor/github.com/google/go-github/v57/LICENSE rename to vendor/github.com/google/go-github/v72/LICENSE diff --git a/vendor/github.com/google/go-github/v57/github/actions.go b/vendor/github.com/google/go-github/v72/github/actions.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/actions.go rename to vendor/github.com/google/go-github/v72/github/actions.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_artifacts.go b/vendor/github.com/google/go-github/v72/github/actions_artifacts.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_artifacts.go rename to vendor/github.com/google/go-github/v72/github/actions_artifacts.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_cache.go b/vendor/github.com/google/go-github/v72/github/actions_cache.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_cache.go rename to vendor/github.com/google/go-github/v72/github/actions_cache.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_hosted_runners.go b/vendor/github.com/google/go-github/v72/github/actions_hosted_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_hosted_runners.go rename to vendor/github.com/google/go-github/v72/github/actions_hosted_runners.go diff --git a/vendor/github.com/google/go-github/v57/github/actions_oidc.go b/vendor/github.com/google/go-github/v72/github/actions_oidc.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/actions_oidc.go rename to vendor/github.com/google/go-github/v72/github/actions_oidc.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_permissions_enterprise.go b/vendor/github.com/google/go-github/v72/github/actions_permissions_enterprise.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_permissions_enterprise.go rename to vendor/github.com/google/go-github/v72/github/actions_permissions_enterprise.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_permissions_orgs.go b/vendor/github.com/google/go-github/v72/github/actions_permissions_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_permissions_orgs.go rename to vendor/github.com/google/go-github/v72/github/actions_permissions_orgs.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_runner_groups.go b/vendor/github.com/google/go-github/v72/github/actions_runner_groups.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_runner_groups.go rename to vendor/github.com/google/go-github/v72/github/actions_runner_groups.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_runners.go b/vendor/github.com/google/go-github/v72/github/actions_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_runners.go rename to vendor/github.com/google/go-github/v72/github/actions_runners.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_secrets.go b/vendor/github.com/google/go-github/v72/github/actions_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_secrets.go rename to vendor/github.com/google/go-github/v72/github/actions_secrets.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_variables.go b/vendor/github.com/google/go-github/v72/github/actions_variables.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_variables.go rename to vendor/github.com/google/go-github/v72/github/actions_variables.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_workflow_jobs.go b/vendor/github.com/google/go-github/v72/github/actions_workflow_jobs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_workflow_jobs.go rename to vendor/github.com/google/go-github/v72/github/actions_workflow_jobs.go diff --git a/vendor/github.com/google/go-github/v71/github/actions_workflow_runs.go b/vendor/github.com/google/go-github/v72/github/actions_workflow_runs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/actions_workflow_runs.go rename to vendor/github.com/google/go-github/v72/github/actions_workflow_runs.go diff --git a/vendor/github.com/google/go-github/v57/github/actions_workflows.go b/vendor/github.com/google/go-github/v72/github/actions_workflows.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/actions_workflows.go rename to vendor/github.com/google/go-github/v72/github/actions_workflows.go diff --git a/vendor/github.com/google/go-github/v57/github/activity.go b/vendor/github.com/google/go-github/v72/github/activity.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/activity.go rename to vendor/github.com/google/go-github/v72/github/activity.go diff --git a/vendor/github.com/google/go-github/v57/github/activity_events.go b/vendor/github.com/google/go-github/v72/github/activity_events.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/activity_events.go rename to vendor/github.com/google/go-github/v72/github/activity_events.go diff --git a/vendor/github.com/google/go-github/v71/github/activity_notifications.go b/vendor/github.com/google/go-github/v72/github/activity_notifications.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/activity_notifications.go rename to vendor/github.com/google/go-github/v72/github/activity_notifications.go diff --git a/vendor/github.com/google/go-github/v57/github/activity_star.go b/vendor/github.com/google/go-github/v72/github/activity_star.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/activity_star.go rename to vendor/github.com/google/go-github/v72/github/activity_star.go diff --git a/vendor/github.com/google/go-github/v57/github/activity_watching.go b/vendor/github.com/google/go-github/v72/github/activity_watching.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/activity_watching.go rename to vendor/github.com/google/go-github/v72/github/activity_watching.go diff --git a/vendor/github.com/google/go-github/v71/github/admin.go b/vendor/github.com/google/go-github/v72/github/admin.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin.go rename to vendor/github.com/google/go-github/v72/github/admin.go diff --git a/vendor/github.com/google/go-github/v71/github/admin_orgs.go b/vendor/github.com/google/go-github/v72/github/admin_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin_orgs.go rename to vendor/github.com/google/go-github/v72/github/admin_orgs.go diff --git a/vendor/github.com/google/go-github/v71/github/admin_stats.go b/vendor/github.com/google/go-github/v72/github/admin_stats.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin_stats.go rename to vendor/github.com/google/go-github/v72/github/admin_stats.go diff --git a/vendor/github.com/google/go-github/v71/github/admin_users.go b/vendor/github.com/google/go-github/v72/github/admin_users.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/admin_users.go rename to vendor/github.com/google/go-github/v72/github/admin_users.go diff --git a/vendor/github.com/google/go-github/v71/github/apps.go b/vendor/github.com/google/go-github/v72/github/apps.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/apps.go rename to vendor/github.com/google/go-github/v72/github/apps.go diff --git a/vendor/github.com/google/go-github/v57/github/apps_hooks.go b/vendor/github.com/google/go-github/v72/github/apps_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/apps_hooks.go rename to vendor/github.com/google/go-github/v72/github/apps_hooks.go diff --git a/vendor/github.com/google/go-github/v57/github/apps_hooks_deliveries.go b/vendor/github.com/google/go-github/v72/github/apps_hooks_deliveries.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/apps_hooks_deliveries.go rename to vendor/github.com/google/go-github/v72/github/apps_hooks_deliveries.go diff --git a/vendor/github.com/google/go-github/v57/github/apps_installation.go b/vendor/github.com/google/go-github/v72/github/apps_installation.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/apps_installation.go rename to vendor/github.com/google/go-github/v72/github/apps_installation.go diff --git a/vendor/github.com/google/go-github/v57/github/apps_manifest.go b/vendor/github.com/google/go-github/v72/github/apps_manifest.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/apps_manifest.go rename to vendor/github.com/google/go-github/v72/github/apps_manifest.go diff --git a/vendor/github.com/google/go-github/v57/github/apps_marketplace.go b/vendor/github.com/google/go-github/v72/github/apps_marketplace.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/apps_marketplace.go rename to vendor/github.com/google/go-github/v72/github/apps_marketplace.go diff --git a/vendor/github.com/google/go-github/v71/github/attestations.go b/vendor/github.com/google/go-github/v72/github/attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/attestations.go rename to vendor/github.com/google/go-github/v72/github/attestations.go diff --git a/vendor/github.com/google/go-github/v71/github/authorizations.go b/vendor/github.com/google/go-github/v72/github/authorizations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/authorizations.go rename to vendor/github.com/google/go-github/v72/github/authorizations.go diff --git a/vendor/github.com/google/go-github/v71/github/billing.go b/vendor/github.com/google/go-github/v72/github/billing.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/billing.go rename to vendor/github.com/google/go-github/v72/github/billing.go diff --git a/vendor/github.com/google/go-github/v71/github/checks.go b/vendor/github.com/google/go-github/v72/github/checks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/checks.go rename to vendor/github.com/google/go-github/v72/github/checks.go diff --git a/vendor/github.com/google/go-github/v71/github/code_scanning.go b/vendor/github.com/google/go-github/v72/github/code_scanning.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/code_scanning.go rename to vendor/github.com/google/go-github/v72/github/code_scanning.go diff --git a/vendor/github.com/google/go-github/v71/github/codesofconduct.go b/vendor/github.com/google/go-github/v72/github/codesofconduct.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/codesofconduct.go rename to vendor/github.com/google/go-github/v72/github/codesofconduct.go diff --git a/vendor/github.com/google/go-github/v57/github/codespaces.go b/vendor/github.com/google/go-github/v72/github/codespaces.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/codespaces.go rename to vendor/github.com/google/go-github/v72/github/codespaces.go diff --git a/vendor/github.com/google/go-github/v57/github/codespaces_secrets.go b/vendor/github.com/google/go-github/v72/github/codespaces_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/codespaces_secrets.go rename to vendor/github.com/google/go-github/v72/github/codespaces_secrets.go diff --git a/vendor/github.com/google/go-github/v71/github/copilot.go b/vendor/github.com/google/go-github/v72/github/copilot.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/copilot.go rename to vendor/github.com/google/go-github/v72/github/copilot.go diff --git a/vendor/github.com/google/go-github/v57/github/dependabot.go b/vendor/github.com/google/go-github/v72/github/dependabot.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/dependabot.go rename to vendor/github.com/google/go-github/v72/github/dependabot.go diff --git a/vendor/github.com/google/go-github/v71/github/dependabot_alerts.go b/vendor/github.com/google/go-github/v72/github/dependabot_alerts.go similarity index 94% rename from vendor/github.com/google/go-github/v71/github/dependabot_alerts.go rename to vendor/github.com/google/go-github/v72/github/dependabot_alerts.go index c274f07b..67e624c9 100644 --- a/vendor/github.com/google/go-github/v71/github/dependabot_alerts.go +++ b/vendor/github.com/google/go-github/v72/github/dependabot_alerts.go @@ -29,6 +29,15 @@ type AdvisoryCWEs struct { Name *string `json:"name,omitempty"` } +// AdvisoryEPSS represents the advisory pertaining to the Exploit Prediction Scoring System. +// +// For more information, see: +// https://github.blog/changelog/2024-10-10-epss-scores-in-the-github-advisory-database/ +type AdvisoryEPSS struct { + Percentage float64 `json:"percentage"` + Percentile float64 `json:"percentile"` +} + // DependabotSecurityAdvisory represents the GitHub Security Advisory. type DependabotSecurityAdvisory struct { GHSAID *string `json:"ghsa_id,omitempty"` @@ -39,6 +48,7 @@ type DependabotSecurityAdvisory struct { Severity *string `json:"severity,omitempty"` CVSS *AdvisoryCVSS `json:"cvss,omitempty"` CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` + EPSS *AdvisoryEPSS `json:"epss,omitempty"` Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` References []*AdvisoryReference `json:"references,omitempty"` PublishedAt *Timestamp `json:"published_at,omitempty"` diff --git a/vendor/github.com/google/go-github/v57/github/dependabot_secrets.go b/vendor/github.com/google/go-github/v72/github/dependabot_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/dependabot_secrets.go rename to vendor/github.com/google/go-github/v72/github/dependabot_secrets.go diff --git a/vendor/github.com/google/go-github/v57/github/dependency_graph.go b/vendor/github.com/google/go-github/v72/github/dependency_graph.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/dependency_graph.go rename to vendor/github.com/google/go-github/v72/github/dependency_graph.go diff --git a/vendor/github.com/google/go-github/v71/github/dependency_graph_snapshots.go b/vendor/github.com/google/go-github/v72/github/dependency_graph_snapshots.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/dependency_graph_snapshots.go rename to vendor/github.com/google/go-github/v72/github/dependency_graph_snapshots.go diff --git a/vendor/github.com/google/go-github/v71/github/doc.go b/vendor/github.com/google/go-github/v72/github/doc.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/doc.go rename to vendor/github.com/google/go-github/v72/github/doc.go index b963051f..8a6112fe 100644 --- a/vendor/github.com/google/go-github/v71/github/doc.go +++ b/vendor/github.com/google/go-github/v72/github/doc.go @@ -8,7 +8,7 @@ Package github provides a client for using the GitHub API. Usage: - import "github.com/google/go-github/v71/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) + import "github.com/google/go-github/v72/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) import "github.com/google/go-github/github" // with go modules disabled Construct a new GitHub client, then use the various services on the client to diff --git a/vendor/github.com/google/go-github/v71/github/emojis.go b/vendor/github.com/google/go-github/v72/github/emojis.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/emojis.go rename to vendor/github.com/google/go-github/v72/github/emojis.go diff --git a/vendor/github.com/google/go-github/v57/github/enterprise.go b/vendor/github.com/google/go-github/v72/github/enterprise.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/enterprise.go rename to vendor/github.com/google/go-github/v72/github/enterprise.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_actions_hosted_runners.go b/vendor/github.com/google/go-github/v72/github/enterprise_actions_hosted_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_actions_hosted_runners.go rename to vendor/github.com/google/go-github/v72/github/enterprise_actions_hosted_runners.go diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_actions_runner_groups.go b/vendor/github.com/google/go-github/v72/github/enterprise_actions_runner_groups.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/enterprise_actions_runner_groups.go rename to vendor/github.com/google/go-github/v72/github/enterprise_actions_runner_groups.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_actions_runners.go b/vendor/github.com/google/go-github/v72/github/enterprise_actions_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_actions_runners.go rename to vendor/github.com/google/go-github/v72/github/enterprise_actions_runners.go diff --git a/vendor/github.com/google/go-github/v57/github/enterprise_audit_log.go b/vendor/github.com/google/go-github/v72/github/enterprise_audit_log.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/enterprise_audit_log.go rename to vendor/github.com/google/go-github/v72/github/enterprise_audit_log.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_code_security_and_analysis.go b/vendor/github.com/google/go-github/v72/github/enterprise_code_security_and_analysis.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_code_security_and_analysis.go rename to vendor/github.com/google/go-github/v72/github/enterprise_code_security_and_analysis.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_config.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_config.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_config.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_config.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_maintenance.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_maintenance.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_maintenance.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_maintenance.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_ssh.go b/vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_ssh.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_manage_ghes_ssh.go rename to vendor/github.com/google/go-github/v72/github/enterprise_manage_ghes_ssh.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_network_configurations.go b/vendor/github.com/google/go-github/v72/github/enterprise_network_configurations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_network_configurations.go rename to vendor/github.com/google/go-github/v72/github/enterprise_network_configurations.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_properties.go b/vendor/github.com/google/go-github/v72/github/enterprise_properties.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_properties.go rename to vendor/github.com/google/go-github/v72/github/enterprise_properties.go diff --git a/vendor/github.com/google/go-github/v71/github/enterprise_rules.go b/vendor/github.com/google/go-github/v72/github/enterprise_rules.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/enterprise_rules.go rename to vendor/github.com/google/go-github/v72/github/enterprise_rules.go diff --git a/vendor/github.com/google/go-github/v57/github/event.go b/vendor/github.com/google/go-github/v72/github/event.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/event.go rename to vendor/github.com/google/go-github/v72/github/event.go diff --git a/vendor/github.com/google/go-github/v71/github/event_types.go b/vendor/github.com/google/go-github/v72/github/event_types.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/event_types.go rename to vendor/github.com/google/go-github/v72/github/event_types.go index 6a6caf19..b5369865 100644 --- a/vendor/github.com/google/go-github/v71/github/event_types.go +++ b/vendor/github.com/google/go-github/v72/github/event_types.go @@ -1446,6 +1446,26 @@ type PushEventRepoOwner struct { Email *string `json:"email,omitempty"` } +// RegistryPackageEvent represents activity related to GitHub Packages. +// The Webhook event name is "registry_package". +// +// This event is triggered when a GitHub Package is published or updated. +// +// GitHub API docs: https://docs.github.com/en/webhooks/webhook-events-and-payloads#registry_package +type RegistryPackageEvent struct { + // Action is the action that was performed. + // Can be "published" or "updated". + Action *string `json:"action,omitempty"` + RegistryPackage *Package `json:"registry_package,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Sender *User `json:"sender,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` +} + // ReleaseEvent is triggered when a release is published, unpublished, created, // edited, deleted, or prereleased. // The Webhook event name is "release". diff --git a/vendor/github.com/google/go-github/v71/github/gists.go b/vendor/github.com/google/go-github/v72/github/gists.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/gists.go rename to vendor/github.com/google/go-github/v72/github/gists.go diff --git a/vendor/github.com/google/go-github/v57/github/gists_comments.go b/vendor/github.com/google/go-github/v72/github/gists_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/gists_comments.go rename to vendor/github.com/google/go-github/v72/github/gists_comments.go diff --git a/vendor/github.com/google/go-github/v57/github/git.go b/vendor/github.com/google/go-github/v72/github/git.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/git.go rename to vendor/github.com/google/go-github/v72/github/git.go diff --git a/vendor/github.com/google/go-github/v57/github/git_blobs.go b/vendor/github.com/google/go-github/v72/github/git_blobs.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/git_blobs.go rename to vendor/github.com/google/go-github/v72/github/git_blobs.go diff --git a/vendor/github.com/google/go-github/v71/github/git_commits.go b/vendor/github.com/google/go-github/v72/github/git_commits.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git_commits.go rename to vendor/github.com/google/go-github/v72/github/git_commits.go diff --git a/vendor/github.com/google/go-github/v71/github/git_refs.go b/vendor/github.com/google/go-github/v72/github/git_refs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/git_refs.go rename to vendor/github.com/google/go-github/v72/github/git_refs.go diff --git a/vendor/github.com/google/go-github/v57/github/git_tags.go b/vendor/github.com/google/go-github/v72/github/git_tags.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/git_tags.go rename to vendor/github.com/google/go-github/v72/github/git_tags.go diff --git a/vendor/github.com/google/go-github/v57/github/git_trees.go b/vendor/github.com/google/go-github/v72/github/git_trees.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/git_trees.go rename to vendor/github.com/google/go-github/v72/github/git_trees.go diff --git a/vendor/github.com/google/go-github/v71/github/github-accessors.go b/vendor/github.com/google/go-github/v72/github/github-accessors.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/github-accessors.go rename to vendor/github.com/google/go-github/v72/github/github-accessors.go index 45fa9075..6378b40b 100644 --- a/vendor/github.com/google/go-github/v71/github/github-accessors.go +++ b/vendor/github.com/google/go-github/v72/github/github-accessors.go @@ -6022,38 +6022,6 @@ func (c *CreateUpdateEnvironment) GetWaitTimer() int { return *c.WaitTimer } -// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetRepositoryID() int64 { - if c == nil || c.RepositoryID == nil { - return 0 - } - return *c.RepositoryID -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetScope() string { - if c == nil || c.Scope == nil { - return "" - } - return *c.Scope -} - -// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. -func (c *CreateUpdateRequiredWorkflowOptions) GetSelectedRepositoryIDs() *SelectedRepoIDs { - if c == nil { - return nil - } - return c.SelectedRepositoryIDs -} - -// GetWorkflowFilePath returns the WorkflowFilePath field if it's non-nil, zero value otherwise. -func (c *CreateUpdateRequiredWorkflowOptions) GetWorkflowFilePath() string { - if c == nil || c.WorkflowFilePath == nil { - return "" - } - return *c.WorkflowFilePath -} - // GetEmail returns the Email field if it's non-nil, zero value otherwise. func (c *CreateUserRequest) GetEmail() string { if c == nil || c.Email == nil { @@ -6886,6 +6854,14 @@ func (d *DependabotSecurityAdvisory) GetDescription() string { return *d.Description } +// GetEPSS returns the EPSS field. +func (d *DependabotSecurityAdvisory) GetEPSS() *AdvisoryEPSS { + if d == nil { + return nil + } + return d.EPSS +} + // GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. func (d *DependabotSecurityAdvisory) GetGHSAID() string { if d == nil || d.GHSAID == nil { @@ -12430,6 +12406,14 @@ func (i *IssueRequest) GetTitle() string { return *i.Title } +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetType() string { + if i == nil || i.Type == nil { + return "" + } + return *i.Type +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (i *IssuesEvent) GetAction() string { if i == nil || i.Action == nil { @@ -15694,94 +15678,6 @@ func (o *OrgBlockEvent) GetSender() *User { return o.Sender } -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetCreatedAt() Timestamp { - if o == nil || o.CreatedAt == nil { - return Timestamp{} - } - return *o.CreatedAt -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetID() int64 { - if o == nil || o.ID == nil { - return 0 - } - return *o.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetName() string { - if o == nil || o.Name == nil { - return "" - } - return *o.Name -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetPath() string { - if o == nil || o.Path == nil { - return "" - } - return *o.Path -} - -// GetRef returns the Ref field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetRef() string { - if o == nil || o.Ref == nil { - return "" - } - return *o.Ref -} - -// GetRepository returns the Repository field. -func (o *OrgRequiredWorkflow) GetRepository() *Repository { - if o == nil { - return nil - } - return o.Repository -} - -// GetScope returns the Scope field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetScope() string { - if o == nil || o.Scope == nil { - return "" - } - return *o.Scope -} - -// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetSelectedRepositoriesURL() string { - if o == nil || o.SelectedRepositoriesURL == nil { - return "" - } - return *o.SelectedRepositoriesURL -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetState() string { - if o == nil || o.State == nil { - return "" - } - return *o.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflow) GetUpdatedAt() Timestamp { - if o == nil || o.UpdatedAt == nil { - return Timestamp{} - } - return *o.UpdatedAt -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (o *OrgRequiredWorkflows) GetTotalCount() int { - if o == nil || o.TotalCount == nil { - return 0 - } - return *o.TotalCount -} - // GetDisabledOrgs returns the DisabledOrgs field if it's non-nil, zero value otherwise. func (o *OrgStats) GetDisabledOrgs() int { if o == nil || o.DisabledOrgs == nil { @@ -20982,6 +20878,62 @@ func (r *RegistrationToken) GetToken() string { return *r.Token } +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (r *RegistryPackageEvent) GetAction() string { + if r == nil || r.Action == nil { + return "" + } + return *r.Action +} + +// GetEnterprise returns the Enterprise field. +func (r *RegistryPackageEvent) GetEnterprise() *Enterprise { + if r == nil { + return nil + } + return r.Enterprise +} + +// GetInstallation returns the Installation field. +func (r *RegistryPackageEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetOrganization returns the Organization field. +func (r *RegistryPackageEvent) GetOrganization() *Organization { + if r == nil { + return nil + } + return r.Organization +} + +// GetRegistryPackage returns the RegistryPackage field. +func (r *RegistryPackageEvent) GetRegistryPackage() *Package { + if r == nil { + return nil + } + return r.RegistryPackage +} + +// GetRepository returns the Repository field. +func (r *RegistryPackageEvent) GetRepository() *Repository { + if r == nil { + return nil + } + return r.Repository +} + +// GetSender returns the Sender field. +func (r *RegistryPackageEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + // GetBrowserDownloadURL returns the BrowserDownloadURL field if it's non-nil, zero value otherwise. func (r *ReleaseAsset) GetBrowserDownloadURL() string { if r == nil || r.BrowserDownloadURL == nil { @@ -21350,102 +21302,6 @@ func (r *RepoName) GetFrom() string { return *r.From } -// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetBadgeURL() string { - if r == nil || r.BadgeURL == nil { - return "" - } - return *r.BadgeURL -} - -// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetCreatedAt() Timestamp { - if r == nil || r.CreatedAt == nil { - return Timestamp{} - } - return *r.CreatedAt -} - -// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetHTMLURL() string { - if r == nil || r.HTMLURL == nil { - return "" - } - return *r.HTMLURL -} - -// GetID returns the ID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetID() int64 { - if r == nil || r.ID == nil { - return 0 - } - return *r.ID -} - -// GetName returns the Name field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetName() string { - if r == nil || r.Name == nil { - return "" - } - return *r.Name -} - -// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetNodeID() string { - if r == nil || r.NodeID == nil { - return "" - } - return *r.NodeID -} - -// GetPath returns the Path field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetPath() string { - if r == nil || r.Path == nil { - return "" - } - return *r.Path -} - -// GetSourceRepository returns the SourceRepository field. -func (r *RepoRequiredWorkflow) GetSourceRepository() *Repository { - if r == nil { - return nil - } - return r.SourceRepository -} - -// GetState returns the State field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetState() string { - if r == nil || r.State == nil { - return "" - } - return *r.State -} - -// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetUpdatedAt() Timestamp { - if r == nil || r.UpdatedAt == nil { - return Timestamp{} - } - return *r.UpdatedAt -} - -// GetURL returns the URL field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflow) GetURL() string { - if r == nil || r.URL == nil { - return "" - } - return *r.URL -} - -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RepoRequiredWorkflows) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - // GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. func (r *RepositoriesSearchResult) GetIncompleteResults() bool { if r == nil || r.IncompleteResults == nil { @@ -22934,6 +22790,14 @@ func (r *RepositoryLicense) GetURL() string { return *r.URL } +// GetIncludesParents returns the IncludesParents field if it's non-nil, zero value otherwise. +func (r *RepositoryListRulesetsOptions) GetIncludesParents() bool { + if r == nil || r.IncludesParents == nil { + return false + } + return *r.IncludesParents +} + // GetBase returns the Base field if it's non-nil, zero value otherwise. func (r *RepositoryMergeRequest) GetBase() string { if r == nil || r.Base == nil { @@ -24078,14 +23942,6 @@ func (r *RequiredStatusChecksRuleParameters) GetDoNotEnforceOnCreate() bool { return *r.DoNotEnforceOnCreate } -// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. -func (r *RequiredWorkflowSelectedRepos) GetTotalCount() int { - if r == nil || r.TotalCount == nil { - return 0 - } - return *r.TotalCount -} - // GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. func (r *ReviewersRequest) GetNodeID() string { if r == nil || r.NodeID == nil { @@ -24846,6 +24702,14 @@ func (s *SecretScanningAlert) GetHTMLURL() string { return *s.HTMLURL } +// GetIsBase64Encoded returns the IsBase64Encoded field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetIsBase64Encoded() bool { + if s == nil || s.IsBase64Encoded == nil { + return false + } + return *s.IsBase64Encoded +} + // GetLocationsURL returns the LocationsURL field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetLocationsURL() string { if s == nil || s.LocationsURL == nil { @@ -24854,6 +24718,14 @@ func (s *SecretScanningAlert) GetLocationsURL() string { return *s.LocationsURL } +// GetMultiRepo returns the MultiRepo field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetMultiRepo() bool { + if s == nil || s.MultiRepo == nil { + return false + } + return *s.MultiRepo +} + // GetNumber returns the Number field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetNumber() int { if s == nil || s.Number == nil { @@ -24862,6 +24734,14 @@ func (s *SecretScanningAlert) GetNumber() int { return *s.Number } +// GetPubliclyLeaked returns the PubliclyLeaked field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPubliclyLeaked() bool { + if s == nil || s.PubliclyLeaked == nil { + return false + } + return *s.PubliclyLeaked +} + // GetPushProtectionBypassed returns the PushProtectionBypassed field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetPushProtectionBypassed() bool { if s == nil || s.PushProtectionBypassed == nil { @@ -24886,6 +24766,38 @@ func (s *SecretScanningAlert) GetPushProtectionBypassedBy() *User { return s.PushProtectionBypassedBy } +// GetPushProtectionBypassRequestComment returns the PushProtectionBypassRequestComment field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestComment() string { + if s == nil || s.PushProtectionBypassRequestComment == nil { + return "" + } + return *s.PushProtectionBypassRequestComment +} + +// GetPushProtectionBypassRequestHTMLURL returns the PushProtectionBypassRequestHTMLURL field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestHTMLURL() string { + if s == nil || s.PushProtectionBypassRequestHTMLURL == nil { + return "" + } + return *s.PushProtectionBypassRequestHTMLURL +} + +// GetPushProtectionBypassRequestReviewer returns the PushProtectionBypassRequestReviewer field. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestReviewer() *User { + if s == nil { + return nil + } + return s.PushProtectionBypassRequestReviewer +} + +// GetPushProtectionBypassRequestReviewerComment returns the PushProtectionBypassRequestReviewerComment field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassRequestReviewerComment() string { + if s == nil || s.PushProtectionBypassRequestReviewerComment == nil { + return "" + } + return *s.PushProtectionBypassRequestReviewerComment +} + // GetRepository returns the Repository field. func (s *SecretScanningAlert) GetRepository() *Repository { if s == nil { @@ -24974,6 +24886,14 @@ func (s *SecretScanningAlert) GetURL() string { return *s.URL } +// GetValidity returns the Validity field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetValidity() string { + if s == nil || s.Validity == nil { + return "" + } + return *s.Validity +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (s *SecretScanningAlertEvent) GetAction() string { if s == nil || s.Action == nil { diff --git a/vendor/github.com/google/go-github/v71/github/github.go b/vendor/github.com/google/go-github/v72/github/github.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/github.go rename to vendor/github.com/google/go-github/v72/github/github.go index ddafffb2..a3b1941e 100644 --- a/vendor/github.com/google/go-github/v71/github/github.go +++ b/vendor/github.com/google/go-github/v72/github/github.go @@ -29,7 +29,7 @@ import ( ) const ( - Version = "v71.0.0" + Version = "v72.0.0" defaultAPIVersion = "2022-11-28" defaultBaseURL = "https://api.github.com/" diff --git a/vendor/github.com/google/go-github/v57/github/gitignore.go b/vendor/github.com/google/go-github/v72/github/gitignore.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/gitignore.go rename to vendor/github.com/google/go-github/v72/github/gitignore.go diff --git a/vendor/github.com/google/go-github/v57/github/interactions.go b/vendor/github.com/google/go-github/v72/github/interactions.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/interactions.go rename to vendor/github.com/google/go-github/v72/github/interactions.go diff --git a/vendor/github.com/google/go-github/v71/github/interactions_orgs.go b/vendor/github.com/google/go-github/v72/github/interactions_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/interactions_orgs.go rename to vendor/github.com/google/go-github/v72/github/interactions_orgs.go diff --git a/vendor/github.com/google/go-github/v71/github/interactions_repos.go b/vendor/github.com/google/go-github/v72/github/interactions_repos.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/interactions_repos.go rename to vendor/github.com/google/go-github/v72/github/interactions_repos.go diff --git a/vendor/github.com/google/go-github/v57/github/issue_import.go b/vendor/github.com/google/go-github/v72/github/issue_import.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/issue_import.go rename to vendor/github.com/google/go-github/v72/github/issue_import.go diff --git a/vendor/github.com/google/go-github/v71/github/issues.go b/vendor/github.com/google/go-github/v72/github/issues.go similarity index 97% rename from vendor/github.com/google/go-github/v71/github/issues.go rename to vendor/github.com/google/go-github/v72/github/issues.go index 6d3a6b15..395d64cf 100644 --- a/vendor/github.com/google/go-github/v71/github/issues.go +++ b/vendor/github.com/google/go-github/v72/github/issues.go @@ -90,6 +90,7 @@ type IssueRequest struct { StateReason *string `json:"state_reason,omitempty"` Milestone *int `json:"milestone,omitempty"` Assignees *[]string `json:"assignees,omitempty"` + Type *string `json:"type,omitempty"` } // IssueListOptions specifies the optional parameters to the IssuesService.List @@ -117,6 +118,10 @@ type IssueListOptions struct { // Since filters issues by time. Since time.Time `url:"since,omitempty"` + ListCursorOptions + + // Add ListOptions so offset pagination with integer type "page" query parameter is accepted + // since ListCursorOptions accepts "page" as string only. ListOptions } @@ -233,6 +238,10 @@ type IssueListByRepoOptions struct { // Since filters issues by time. Since time.Time `url:"since,omitempty"` + ListCursorOptions + + // Add ListOptions so offset pagination with integer type "page" query parameter is accepted + // since ListCursorOptions accepts "page" as string only. ListOptions } diff --git a/vendor/github.com/google/go-github/v57/github/issues_assignees.go b/vendor/github.com/google/go-github/v72/github/issues_assignees.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/issues_assignees.go rename to vendor/github.com/google/go-github/v72/github/issues_assignees.go diff --git a/vendor/github.com/google/go-github/v57/github/issues_comments.go b/vendor/github.com/google/go-github/v72/github/issues_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/issues_comments.go rename to vendor/github.com/google/go-github/v72/github/issues_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_events.go b/vendor/github.com/google/go-github/v72/github/issues_events.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_events.go rename to vendor/github.com/google/go-github/v72/github/issues_events.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_labels.go b/vendor/github.com/google/go-github/v72/github/issues_labels.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_labels.go rename to vendor/github.com/google/go-github/v72/github/issues_labels.go diff --git a/vendor/github.com/google/go-github/v57/github/issues_milestones.go b/vendor/github.com/google/go-github/v72/github/issues_milestones.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/issues_milestones.go rename to vendor/github.com/google/go-github/v72/github/issues_milestones.go diff --git a/vendor/github.com/google/go-github/v71/github/issues_timeline.go b/vendor/github.com/google/go-github/v72/github/issues_timeline.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/issues_timeline.go rename to vendor/github.com/google/go-github/v72/github/issues_timeline.go diff --git a/vendor/github.com/google/go-github/v57/github/licenses.go b/vendor/github.com/google/go-github/v72/github/licenses.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/licenses.go rename to vendor/github.com/google/go-github/v72/github/licenses.go diff --git a/vendor/github.com/google/go-github/v71/github/markdown.go b/vendor/github.com/google/go-github/v72/github/markdown.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/markdown.go rename to vendor/github.com/google/go-github/v72/github/markdown.go diff --git a/vendor/github.com/google/go-github/v71/github/messages.go b/vendor/github.com/google/go-github/v72/github/messages.go similarity index 99% rename from vendor/github.com/google/go-github/v71/github/messages.go rename to vendor/github.com/google/go-github/v72/github/messages.go index 59b214b3..2b5cce75 100644 --- a/vendor/github.com/google/go-github/v71/github/messages.go +++ b/vendor/github.com/google/go-github/v72/github/messages.go @@ -95,6 +95,7 @@ var ( "pull_request_review_thread": &PullRequestReviewThreadEvent{}, "pull_request_target": &PullRequestTargetEvent{}, "push": &PushEvent{}, + "registry_package": &RegistryPackageEvent{}, "repository": &RepositoryEvent{}, "repository_dispatch": &RepositoryDispatchEvent{}, "repository_import": &RepositoryImportEvent{}, diff --git a/vendor/github.com/google/go-github/v71/github/meta.go b/vendor/github.com/google/go-github/v72/github/meta.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/meta.go rename to vendor/github.com/google/go-github/v72/github/meta.go diff --git a/vendor/github.com/google/go-github/v71/github/migrations.go b/vendor/github.com/google/go-github/v72/github/migrations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/migrations.go rename to vendor/github.com/google/go-github/v72/github/migrations.go diff --git a/vendor/github.com/google/go-github/v71/github/migrations_source_import.go b/vendor/github.com/google/go-github/v72/github/migrations_source_import.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/migrations_source_import.go rename to vendor/github.com/google/go-github/v72/github/migrations_source_import.go diff --git a/vendor/github.com/google/go-github/v71/github/migrations_user.go b/vendor/github.com/google/go-github/v72/github/migrations_user.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/migrations_user.go rename to vendor/github.com/google/go-github/v72/github/migrations_user.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs.go b/vendor/github.com/google/go-github/v72/github/orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs.go rename to vendor/github.com/google/go-github/v72/github/orgs.go diff --git a/vendor/github.com/google/go-github/v57/github/orgs_actions_allowed.go b/vendor/github.com/google/go-github/v72/github/orgs_actions_allowed.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/orgs_actions_allowed.go rename to vendor/github.com/google/go-github/v72/github/orgs_actions_allowed.go diff --git a/vendor/github.com/google/go-github/v57/github/orgs_actions_permissions.go b/vendor/github.com/google/go-github/v72/github/orgs_actions_permissions.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/orgs_actions_permissions.go rename to vendor/github.com/google/go-github/v72/github/orgs_actions_permissions.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_attestations.go b/vendor/github.com/google/go-github/v72/github/orgs_attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_attestations.go rename to vendor/github.com/google/go-github/v72/github/orgs_attestations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_audit_log.go b/vendor/github.com/google/go-github/v72/github/orgs_audit_log.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_audit_log.go rename to vendor/github.com/google/go-github/v72/github/orgs_audit_log.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_codesecurity_configurations.go b/vendor/github.com/google/go-github/v72/github/orgs_codesecurity_configurations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_codesecurity_configurations.go rename to vendor/github.com/google/go-github/v72/github/orgs_codesecurity_configurations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_credential_authorizations.go b/vendor/github.com/google/go-github/v72/github/orgs_credential_authorizations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_credential_authorizations.go rename to vendor/github.com/google/go-github/v72/github/orgs_credential_authorizations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_custom_repository_roles.go b/vendor/github.com/google/go-github/v72/github/orgs_custom_repository_roles.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_custom_repository_roles.go rename to vendor/github.com/google/go-github/v72/github/orgs_custom_repository_roles.go diff --git a/vendor/github.com/google/go-github/v57/github/orgs_hooks.go b/vendor/github.com/google/go-github/v72/github/orgs_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/orgs_hooks.go rename to vendor/github.com/google/go-github/v72/github/orgs_hooks.go diff --git a/vendor/github.com/google/go-github/v57/github/orgs_hooks_configuration.go b/vendor/github.com/google/go-github/v72/github/orgs_hooks_configuration.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/orgs_hooks_configuration.go rename to vendor/github.com/google/go-github/v72/github/orgs_hooks_configuration.go diff --git a/vendor/github.com/google/go-github/v57/github/orgs_hooks_deliveries.go b/vendor/github.com/google/go-github/v72/github/orgs_hooks_deliveries.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/orgs_hooks_deliveries.go rename to vendor/github.com/google/go-github/v72/github/orgs_hooks_deliveries.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_issue_types.go b/vendor/github.com/google/go-github/v72/github/orgs_issue_types.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_issue_types.go rename to vendor/github.com/google/go-github/v72/github/orgs_issue_types.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_members.go b/vendor/github.com/google/go-github/v72/github/orgs_members.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_members.go rename to vendor/github.com/google/go-github/v72/github/orgs_members.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_network_configurations.go b/vendor/github.com/google/go-github/v72/github/orgs_network_configurations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_network_configurations.go rename to vendor/github.com/google/go-github/v72/github/orgs_network_configurations.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_organization_roles.go b/vendor/github.com/google/go-github/v72/github/orgs_organization_roles.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_organization_roles.go rename to vendor/github.com/google/go-github/v72/github/orgs_organization_roles.go diff --git a/vendor/github.com/google/go-github/v57/github/orgs_outside_collaborators.go b/vendor/github.com/google/go-github/v72/github/orgs_outside_collaborators.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/orgs_outside_collaborators.go rename to vendor/github.com/google/go-github/v72/github/orgs_outside_collaborators.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_packages.go b/vendor/github.com/google/go-github/v72/github/orgs_packages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_packages.go rename to vendor/github.com/google/go-github/v72/github/orgs_packages.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_personal_access_tokens.go b/vendor/github.com/google/go-github/v72/github/orgs_personal_access_tokens.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_personal_access_tokens.go rename to vendor/github.com/google/go-github/v72/github/orgs_personal_access_tokens.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_properties.go b/vendor/github.com/google/go-github/v72/github/orgs_properties.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_properties.go rename to vendor/github.com/google/go-github/v72/github/orgs_properties.go diff --git a/vendor/github.com/google/go-github/v71/github/orgs_rules.go b/vendor/github.com/google/go-github/v72/github/orgs_rules.go similarity index 96% rename from vendor/github.com/google/go-github/v71/github/orgs_rules.go rename to vendor/github.com/google/go-github/v72/github/orgs_rules.go index 8cb2e5d1..357eb8ce 100644 --- a/vendor/github.com/google/go-github/v71/github/orgs_rules.go +++ b/vendor/github.com/google/go-github/v72/github/orgs_rules.go @@ -15,9 +15,14 @@ import ( // GitHub API docs: https://docs.github.com/rest/orgs/rules#get-all-organization-repository-rulesets // //meta:operation GET /orgs/{org}/rulesets -func (s *OrganizationsService) GetAllRepositoryRulesets(ctx context.Context, org string) ([]*RepositoryRuleset, *Response, error) { +func (s *OrganizationsService) GetAllRepositoryRulesets(ctx context.Context, org string, opts *ListOptions) ([]*RepositoryRuleset, *Response, error) { u := fmt.Sprintf("orgs/%v/rulesets", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err diff --git a/vendor/github.com/google/go-github/v71/github/orgs_security_managers.go b/vendor/github.com/google/go-github/v72/github/orgs_security_managers.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/orgs_security_managers.go rename to vendor/github.com/google/go-github/v72/github/orgs_security_managers.go diff --git a/vendor/github.com/google/go-github/v57/github/orgs_users_blocking.go b/vendor/github.com/google/go-github/v72/github/orgs_users_blocking.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/orgs_users_blocking.go rename to vendor/github.com/google/go-github/v72/github/orgs_users_blocking.go diff --git a/vendor/github.com/google/go-github/v71/github/packages.go b/vendor/github.com/google/go-github/v72/github/packages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/packages.go rename to vendor/github.com/google/go-github/v72/github/packages.go diff --git a/vendor/github.com/google/go-github/v71/github/pulls.go b/vendor/github.com/google/go-github/v72/github/pulls.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/pulls.go rename to vendor/github.com/google/go-github/v72/github/pulls.go diff --git a/vendor/github.com/google/go-github/v57/github/pulls_comments.go b/vendor/github.com/google/go-github/v72/github/pulls_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/pulls_comments.go rename to vendor/github.com/google/go-github/v72/github/pulls_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/pulls_reviewers.go b/vendor/github.com/google/go-github/v72/github/pulls_reviewers.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/pulls_reviewers.go rename to vendor/github.com/google/go-github/v72/github/pulls_reviewers.go diff --git a/vendor/github.com/google/go-github/v57/github/pulls_reviews.go b/vendor/github.com/google/go-github/v72/github/pulls_reviews.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/pulls_reviews.go rename to vendor/github.com/google/go-github/v72/github/pulls_reviews.go diff --git a/vendor/github.com/google/go-github/v57/github/pulls_threads.go b/vendor/github.com/google/go-github/v72/github/pulls_threads.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/pulls_threads.go rename to vendor/github.com/google/go-github/v72/github/pulls_threads.go diff --git a/vendor/github.com/google/go-github/v71/github/rate_limit.go b/vendor/github.com/google/go-github/v72/github/rate_limit.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/rate_limit.go rename to vendor/github.com/google/go-github/v72/github/rate_limit.go diff --git a/vendor/github.com/google/go-github/v71/github/reactions.go b/vendor/github.com/google/go-github/v72/github/reactions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/reactions.go rename to vendor/github.com/google/go-github/v72/github/reactions.go diff --git a/vendor/github.com/google/go-github/v71/github/repos.go b/vendor/github.com/google/go-github/v72/github/repos.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos.go rename to vendor/github.com/google/go-github/v72/github/repos.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_actions_access.go b/vendor/github.com/google/go-github/v72/github/repos_actions_access.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_actions_access.go rename to vendor/github.com/google/go-github/v72/github/repos_actions_access.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_actions_allowed.go b/vendor/github.com/google/go-github/v72/github/repos_actions_allowed.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_actions_allowed.go rename to vendor/github.com/google/go-github/v72/github/repos_actions_allowed.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_actions_permissions.go b/vendor/github.com/google/go-github/v72/github/repos_actions_permissions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_actions_permissions.go rename to vendor/github.com/google/go-github/v72/github/repos_actions_permissions.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_attestations.go b/vendor/github.com/google/go-github/v72/github/repos_attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_attestations.go rename to vendor/github.com/google/go-github/v72/github/repos_attestations.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_autolinks.go b/vendor/github.com/google/go-github/v72/github/repos_autolinks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_autolinks.go rename to vendor/github.com/google/go-github/v72/github/repos_autolinks.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_codeowners.go b/vendor/github.com/google/go-github/v72/github/repos_codeowners.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_codeowners.go rename to vendor/github.com/google/go-github/v72/github/repos_codeowners.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_collaborators.go b/vendor/github.com/google/go-github/v72/github/repos_collaborators.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_collaborators.go rename to vendor/github.com/google/go-github/v72/github/repos_collaborators.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_comments.go b/vendor/github.com/google/go-github/v72/github/repos_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_comments.go rename to vendor/github.com/google/go-github/v72/github/repos_comments.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_commits.go b/vendor/github.com/google/go-github/v72/github/repos_commits.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_commits.go rename to vendor/github.com/google/go-github/v72/github/repos_commits.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_community_health.go b/vendor/github.com/google/go-github/v72/github/repos_community_health.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_community_health.go rename to vendor/github.com/google/go-github/v72/github/repos_community_health.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_contents.go b/vendor/github.com/google/go-github/v72/github/repos_contents.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_contents.go rename to vendor/github.com/google/go-github/v72/github/repos_contents.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_deployment_branch_policies.go b/vendor/github.com/google/go-github/v72/github/repos_deployment_branch_policies.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_deployment_branch_policies.go rename to vendor/github.com/google/go-github/v72/github/repos_deployment_branch_policies.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_deployment_protection_rules.go b/vendor/github.com/google/go-github/v72/github/repos_deployment_protection_rules.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_deployment_protection_rules.go rename to vendor/github.com/google/go-github/v72/github/repos_deployment_protection_rules.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_deployments.go b/vendor/github.com/google/go-github/v72/github/repos_deployments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_deployments.go rename to vendor/github.com/google/go-github/v72/github/repos_deployments.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_environments.go b/vendor/github.com/google/go-github/v72/github/repos_environments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_environments.go rename to vendor/github.com/google/go-github/v72/github/repos_environments.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_forks.go b/vendor/github.com/google/go-github/v72/github/repos_forks.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_forks.go rename to vendor/github.com/google/go-github/v72/github/repos_forks.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_hooks.go b/vendor/github.com/google/go-github/v72/github/repos_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_hooks.go rename to vendor/github.com/google/go-github/v72/github/repos_hooks.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_hooks_configuration.go b/vendor/github.com/google/go-github/v72/github/repos_hooks_configuration.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_hooks_configuration.go rename to vendor/github.com/google/go-github/v72/github/repos_hooks_configuration.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_hooks_deliveries.go b/vendor/github.com/google/go-github/v72/github/repos_hooks_deliveries.go similarity index 89% rename from vendor/github.com/google/go-github/v71/github/repos_hooks_deliveries.go rename to vendor/github.com/google/go-github/v72/github/repos_hooks_deliveries.go index c8029f62..bcd4b336 100644 --- a/vendor/github.com/google/go-github/v71/github/repos_hooks_deliveries.go +++ b/vendor/github.com/google/go-github/v72/github/repos_hooks_deliveries.go @@ -9,6 +9,7 @@ import ( "context" "encoding/json" "fmt" + "strings" ) // HookDelivery represents the data that is received from GitHub's Webhook Delivery API @@ -39,6 +40,16 @@ func (d HookDelivery) String() string { return Stringify(d) } +// getHeader common function for GetHeader funcs of HookRequest & HookResponse. +func getHeader(headers map[string]string, key string) string { + for k, v := range headers { + if strings.EqualFold(k, key) { + return v + } + } + return "" +} + // HookRequest is a part of HookDelivery that contains // the HTTP headers and the JSON payload of the webhook request. type HookRequest struct { @@ -46,6 +57,11 @@ type HookRequest struct { RawPayload *json.RawMessage `json:"payload,omitempty"` } +// GetHeader gets the value associated with the given key (ignoring key case). +func (r *HookRequest) GetHeader(key string) string { + return getHeader(r.Headers, key) +} + func (r HookRequest) String() string { return Stringify(r) } @@ -57,6 +73,11 @@ type HookResponse struct { RawPayload *json.RawMessage `json:"payload,omitempty"` } +// GetHeader gets the value associated with the given key (ignoring key case). +func (r *HookResponse) GetHeader(key string) string { + return getHeader(r.Headers, key) +} + func (r HookResponse) String() string { return Stringify(r) } diff --git a/vendor/github.com/google/go-github/v71/github/repos_invitations.go b/vendor/github.com/google/go-github/v72/github/repos_invitations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_invitations.go rename to vendor/github.com/google/go-github/v72/github/repos_invitations.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_keys.go b/vendor/github.com/google/go-github/v72/github/repos_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_keys.go rename to vendor/github.com/google/go-github/v72/github/repos_keys.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_lfs.go b/vendor/github.com/google/go-github/v72/github/repos_lfs.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_lfs.go rename to vendor/github.com/google/go-github/v72/github/repos_lfs.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_merging.go b/vendor/github.com/google/go-github/v72/github/repos_merging.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_merging.go rename to vendor/github.com/google/go-github/v72/github/repos_merging.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_pages.go b/vendor/github.com/google/go-github/v72/github/repos_pages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_pages.go rename to vendor/github.com/google/go-github/v72/github/repos_pages.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/v72/github/repos_prereceive_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_prereceive_hooks.go rename to vendor/github.com/google/go-github/v72/github/repos_prereceive_hooks.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_properties.go b/vendor/github.com/google/go-github/v72/github/repos_properties.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_properties.go rename to vendor/github.com/google/go-github/v72/github/repos_properties.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_releases.go b/vendor/github.com/google/go-github/v72/github/repos_releases.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_releases.go rename to vendor/github.com/google/go-github/v72/github/repos_releases.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_rules.go b/vendor/github.com/google/go-github/v72/github/repos_rules.go similarity index 88% rename from vendor/github.com/google/go-github/v71/github/repos_rules.go rename to vendor/github.com/google/go-github/v72/github/repos_rules.go index d38e35cd..038cefd7 100644 --- a/vendor/github.com/google/go-github/v71/github/repos_rules.go +++ b/vendor/github.com/google/go-github/v72/github/repos_rules.go @@ -38,9 +38,14 @@ type rulesetClearBypassActors struct { // GitHub API docs: https://docs.github.com/rest/repos/rules#get-rules-for-a-branch // //meta:operation GET /repos/{owner}/{repo}/rules/branches/{branch} -func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo, branch string) (*BranchRules, *Response, error) { +func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo, branch string, opts *ListOptions) (*BranchRules, *Response, error) { u := fmt.Sprintf("repos/%v/%v/rules/branches/%v", owner, repo, branch) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err @@ -55,14 +60,28 @@ func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo return rules, resp, nil } +// RepositoryListRulesetsOptions specifies optional parameters to the +// RepositoriesService.GetAllRulesets method. +type RepositoryListRulesetsOptions struct { + // IncludesParents indicates whether to include rulesets configured at the organization or enterprise level that apply to the repository. + IncludesParents *bool `url:"includes_parents,omitempty"` + ListOptions +} + // GetAllRulesets gets all the repository rulesets for the specified repository. -// If includesParents is true, rulesets configured at the organization or enterprise level that apply to the repository will be returned. +// By default, this endpoint will include rulesets configured at the organization or enterprise level that apply to the repository. +// To exclude those rulesets, set the `RepositoryListRulesetsOptions.IncludesParents` parameter to `false`. // // GitHub API docs: https://docs.github.com/rest/repos/rules#get-all-repository-rulesets // //meta:operation GET /repos/{owner}/{repo}/rulesets -func (s *RepositoriesService) GetAllRulesets(ctx context.Context, owner, repo string, includesParents bool) ([]*RepositoryRuleset, *Response, error) { - u := fmt.Sprintf("repos/%v/%v/rulesets?includes_parents=%v", owner, repo, includesParents) +func (s *RepositoriesService) GetAllRulesets(ctx context.Context, owner, repo string, opts *RepositoryListRulesetsOptions) ([]*RepositoryRuleset, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/rulesets", owner, repo) + + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } req, err := s.client.NewRequest("GET", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/v71/github/repos_stats.go b/vendor/github.com/google/go-github/v72/github/repos_stats.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_stats.go rename to vendor/github.com/google/go-github/v72/github/repos_stats.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_statuses.go b/vendor/github.com/google/go-github/v72/github/repos_statuses.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_statuses.go rename to vendor/github.com/google/go-github/v72/github/repos_statuses.go diff --git a/vendor/github.com/google/go-github/v71/github/repos_tags.go b/vendor/github.com/google/go-github/v72/github/repos_tags.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/repos_tags.go rename to vendor/github.com/google/go-github/v72/github/repos_tags.go diff --git a/vendor/github.com/google/go-github/v57/github/repos_traffic.go b/vendor/github.com/google/go-github/v72/github/repos_traffic.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/repos_traffic.go rename to vendor/github.com/google/go-github/v72/github/repos_traffic.go diff --git a/vendor/github.com/google/go-github/v71/github/rules.go b/vendor/github.com/google/go-github/v72/github/rules.go similarity index 95% rename from vendor/github.com/google/go-github/v71/github/rules.go rename to vendor/github.com/google/go-github/v72/github/rules.go index 985f0aac..4def2d19 100644 --- a/vendor/github.com/google/go-github/v71/github/rules.go +++ b/vendor/github.com/google/go-github/v72/github/rules.go @@ -99,14 +99,34 @@ const ( MergeGroupingStrategyHeadGreen MergeGroupingStrategy = "HEADGREEN" ) -// MergeMethod models a GitHub merge method. -type MergeMethod string +// PullRequestMergeMethod is used in PullRequestRuleParameters, +// where the GitHub API expects lowercase merge method values: "merge", "rebase", "squash". +// +// NOTE: GitHub's API inconsistently uses different casing for the same logical values +// across different rules. +// +// TODO: Unify with MergeQueueMergeMethod once the GitHub API uses consistent casing. +type PullRequestMergeMethod string -// This is the set of GitHub merge methods. const ( - MergeMethodMerge MergeMethod = "merge" - MergeMethodRebase MergeMethod = "rebase" - MergeMethodSquash MergeMethod = "squash" + PullRequestMergeMethodMerge PullRequestMergeMethod = "merge" + PullRequestMergeMethodRebase PullRequestMergeMethod = "rebase" + PullRequestMergeMethodSquash PullRequestMergeMethod = "squash" +) + +// MergeQueueMergeMethod is used in MergeQueueRuleParameters, +// where the GitHub API expects uppercase merge method values: "MERGE", "REBASE", "SQUASH". +// +// NOTE: This type exists alongside PullRequestMergeMethod solely due to API casing inconsistencies. +// It enforces the correct usage by API context. +// +// TODO: Unify with PullRequestMergeMethod once the GitHub API uses consistent casing. +type MergeQueueMergeMethod string + +const ( + MergeQueueMergeMethodMerge MergeQueueMergeMethod = "MERGE" + MergeQueueMergeMethodRebase MergeQueueMergeMethod = "REBASE" + MergeQueueMergeMethodSquash MergeQueueMergeMethod = "SQUASH" ) // PatternRuleOperator models a GitHub pattern rule operator. @@ -383,7 +403,7 @@ type MergeQueueRuleParameters struct { GroupingStrategy MergeGroupingStrategy `json:"grouping_strategy"` MaxEntriesToBuild int `json:"max_entries_to_build"` MaxEntriesToMerge int `json:"max_entries_to_merge"` - MergeMethod MergeMethod `json:"merge_method"` + MergeMethod MergeQueueMergeMethod `json:"merge_method"` MinEntriesToMerge int `json:"min_entries_to_merge"` MinEntriesToMergeWaitMinutes int `json:"min_entries_to_merge_wait_minutes"` } @@ -395,13 +415,13 @@ type RequiredDeploymentsRuleParameters struct { // PullRequestRuleParameters represents the pull_request rule parameters. type PullRequestRuleParameters struct { - AllowedMergeMethods []MergeMethod `json:"allowed_merge_methods"` - AutomaticCopilotCodeReviewEnabled *bool `json:"automatic_copilot_code_review_enabled,omitempty"` - DismissStaleReviewsOnPush bool `json:"dismiss_stale_reviews_on_push"` - RequireCodeOwnerReview bool `json:"require_code_owner_review"` - RequireLastPushApproval bool `json:"require_last_push_approval"` - RequiredApprovingReviewCount int `json:"required_approving_review_count"` - RequiredReviewThreadResolution bool `json:"required_review_thread_resolution"` + AllowedMergeMethods []PullRequestMergeMethod `json:"allowed_merge_methods"` + AutomaticCopilotCodeReviewEnabled *bool `json:"automatic_copilot_code_review_enabled,omitempty"` + DismissStaleReviewsOnPush bool `json:"dismiss_stale_reviews_on_push"` + RequireCodeOwnerReview bool `json:"require_code_owner_review"` + RequireLastPushApproval bool `json:"require_last_push_approval"` + RequiredApprovingReviewCount int `json:"required_approving_review_count"` + RequiredReviewThreadResolution bool `json:"required_review_thread_resolution"` } // RequiredStatusChecksRuleParameters represents the required status checks rule parameters. diff --git a/vendor/github.com/google/go-github/v71/github/scim.go b/vendor/github.com/google/go-github/v72/github/scim.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/scim.go rename to vendor/github.com/google/go-github/v72/github/scim.go diff --git a/vendor/github.com/google/go-github/v71/github/search.go b/vendor/github.com/google/go-github/v72/github/search.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/search.go rename to vendor/github.com/google/go-github/v72/github/search.go diff --git a/vendor/github.com/google/go-github/v71/github/secret_scanning.go b/vendor/github.com/google/go-github/v72/github/secret_scanning.go similarity index 78% rename from vendor/github.com/google/go-github/v71/github/secret_scanning.go rename to vendor/github.com/google/go-github/v72/github/secret_scanning.go index 4eeeba3d..ad2312d0 100644 --- a/vendor/github.com/google/go-github/v71/github/secret_scanning.go +++ b/vendor/github.com/google/go-github/v72/github/secret_scanning.go @@ -16,24 +16,32 @@ type SecretScanningService service // SecretScanningAlert represents a GitHub secret scanning alert. type SecretScanningAlert struct { - Number *int `json:"number,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - LocationsURL *string `json:"locations_url,omitempty"` - State *string `json:"state,omitempty"` - Resolution *string `json:"resolution,omitempty"` - ResolvedAt *Timestamp `json:"resolved_at,omitempty"` - ResolvedBy *User `json:"resolved_by,omitempty"` - SecretType *string `json:"secret_type,omitempty"` - SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` - Secret *string `json:"secret,omitempty"` - Repository *Repository `json:"repository,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - PushProtectionBypassed *bool `json:"push_protection_bypassed,omitempty"` - PushProtectionBypassedBy *User `json:"push_protection_bypassed_by,omitempty"` - PushProtectionBypassedAt *Timestamp `json:"push_protection_bypassed_at,omitempty"` - ResolutionComment *string `json:"resolution_comment,omitempty"` + Number *int `json:"number,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + LocationsURL *string `json:"locations_url,omitempty"` + State *string `json:"state,omitempty"` + Resolution *string `json:"resolution,omitempty"` + ResolvedAt *Timestamp `json:"resolved_at,omitempty"` + ResolvedBy *User `json:"resolved_by,omitempty"` + SecretType *string `json:"secret_type,omitempty"` + SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` + Secret *string `json:"secret,omitempty"` + Repository *Repository `json:"repository,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + IsBase64Encoded *bool `json:"is_base64_encoded,omitempty"` + MultiRepo *bool `json:"multi_repo,omitempty"` + PubliclyLeaked *bool `json:"publicly_leaked,omitempty"` + PushProtectionBypassed *bool `json:"push_protection_bypassed,omitempty"` + PushProtectionBypassedBy *User `json:"push_protection_bypassed_by,omitempty"` + PushProtectionBypassedAt *Timestamp `json:"push_protection_bypassed_at,omitempty"` + ResolutionComment *string `json:"resolution_comment,omitempty"` + PushProtectionBypassRequestComment *string `json:"push_protection_bypass_request_comment,omitempty"` + PushProtectionBypassRequestHTMLURL *string `json:"push_protection_bypass_request_html_url,omitempty"` + PushProtectionBypassRequestReviewer *User `json:"push_protection_bypass_request_reviewer,omitempty"` + PushProtectionBypassRequestReviewerComment *string `json:"push_protection_bypass_request_reviewer_comment,omitempty"` + Validity *string `json:"validity,omitempty"` } // SecretScanningAlertLocation represents the location for a secret scanning alert. @@ -72,6 +80,12 @@ type SecretScanningAlertListOptions struct { // Valid options are active, inactive, and unknown. Validity string `url:"validity,omitempty"` + // A boolean value representing whether or not to filter alerts by the publicly-leaked tag being present. Default: false. + IsPubliclyLeaked bool `url:"is_publicly_leaked,omitempty"` + + // A boolean value representing whether or not to filter alerts by the multi-repo tag being present. Default: false. + IsMultiRepo bool `url:"is_multi_repo,omitempty"` + // The direction to sort the results by. Possible values are: asc, desc. Default: desc. Direction string `url:"direction,omitempty"` diff --git a/vendor/github.com/google/go-github/v71/github/security_advisories.go b/vendor/github.com/google/go-github/v72/github/security_advisories.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/security_advisories.go rename to vendor/github.com/google/go-github/v72/github/security_advisories.go diff --git a/vendor/github.com/google/go-github/v71/github/strings.go b/vendor/github.com/google/go-github/v72/github/strings.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/strings.go rename to vendor/github.com/google/go-github/v72/github/strings.go diff --git a/vendor/github.com/google/go-github/v71/github/teams.go b/vendor/github.com/google/go-github/v72/github/teams.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/teams.go rename to vendor/github.com/google/go-github/v72/github/teams.go diff --git a/vendor/github.com/google/go-github/v71/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/v72/github/teams_discussion_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/teams_discussion_comments.go rename to vendor/github.com/google/go-github/v72/github/teams_discussion_comments.go diff --git a/vendor/github.com/google/go-github/v71/github/teams_discussions.go b/vendor/github.com/google/go-github/v72/github/teams_discussions.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/teams_discussions.go rename to vendor/github.com/google/go-github/v72/github/teams_discussions.go diff --git a/vendor/github.com/google/go-github/v57/github/teams_members.go b/vendor/github.com/google/go-github/v72/github/teams_members.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/teams_members.go rename to vendor/github.com/google/go-github/v72/github/teams_members.go diff --git a/vendor/github.com/google/go-github/v71/github/timestamp.go b/vendor/github.com/google/go-github/v72/github/timestamp.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/timestamp.go rename to vendor/github.com/google/go-github/v72/github/timestamp.go diff --git a/vendor/github.com/google/go-github/v71/github/users.go b/vendor/github.com/google/go-github/v72/github/users.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users.go rename to vendor/github.com/google/go-github/v72/github/users.go diff --git a/vendor/github.com/google/go-github/v71/github/users_administration.go b/vendor/github.com/google/go-github/v72/github/users_administration.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_administration.go rename to vendor/github.com/google/go-github/v72/github/users_administration.go diff --git a/vendor/github.com/google/go-github/v71/github/users_attestations.go b/vendor/github.com/google/go-github/v72/github/users_attestations.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_attestations.go rename to vendor/github.com/google/go-github/v72/github/users_attestations.go diff --git a/vendor/github.com/google/go-github/v57/github/users_blocking.go b/vendor/github.com/google/go-github/v72/github/users_blocking.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/users_blocking.go rename to vendor/github.com/google/go-github/v72/github/users_blocking.go diff --git a/vendor/github.com/google/go-github/v71/github/users_emails.go b/vendor/github.com/google/go-github/v72/github/users_emails.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_emails.go rename to vendor/github.com/google/go-github/v72/github/users_emails.go diff --git a/vendor/github.com/google/go-github/v57/github/users_followers.go b/vendor/github.com/google/go-github/v72/github/users_followers.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/users_followers.go rename to vendor/github.com/google/go-github/v72/github/users_followers.go diff --git a/vendor/github.com/google/go-github/v71/github/users_gpg_keys.go b/vendor/github.com/google/go-github/v72/github/users_gpg_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_gpg_keys.go rename to vendor/github.com/google/go-github/v72/github/users_gpg_keys.go diff --git a/vendor/github.com/google/go-github/v57/github/users_keys.go b/vendor/github.com/google/go-github/v72/github/users_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/users_keys.go rename to vendor/github.com/google/go-github/v72/github/users_keys.go diff --git a/vendor/github.com/google/go-github/v71/github/users_packages.go b/vendor/github.com/google/go-github/v72/github/users_packages.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/users_packages.go rename to vendor/github.com/google/go-github/v72/github/users_packages.go diff --git a/vendor/github.com/google/go-github/v57/github/users_ssh_signing_keys.go b/vendor/github.com/google/go-github/v72/github/users_ssh_signing_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v57/github/users_ssh_signing_keys.go rename to vendor/github.com/google/go-github/v72/github/users_ssh_signing_keys.go diff --git a/vendor/github.com/google/go-github/v71/github/with_appengine.go b/vendor/github.com/google/go-github/v72/github/with_appengine.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/with_appengine.go rename to vendor/github.com/google/go-github/v72/github/with_appengine.go diff --git a/vendor/github.com/google/go-github/v71/github/without_appengine.go b/vendor/github.com/google/go-github/v72/github/without_appengine.go similarity index 100% rename from vendor/github.com/google/go-github/v71/github/without_appengine.go rename to vendor/github.com/google/go-github/v72/github/without_appengine.go diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go index 57781ff9..86a29357 100644 --- a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go +++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go @@ -468,12 +468,13 @@ func (ejvw *extJSONValueWriter) WriteRegex(pattern string, options string) error return err } + options = sortStringAlphebeticAscending(options) var buf bytes.Buffer buf.WriteString(`{"$regularExpression":{"pattern":`) writeStringWithEscapes(pattern, &buf, ejvw.escapeHTML) - buf.WriteString(`,"options":"`) - buf.WriteString(sortStringAlphebeticAscending(options)) - buf.WriteString(`"}},`) + buf.WriteString(`,"options":`) + writeStringWithEscapes(options, &buf, ejvw.escapeHTML) + buf.WriteString(`}},`) ejvw.buf = append(ejvw.buf, buf.Bytes()...) diff --git a/vendor/golang.org/x/crypto/bcrypt/bcrypt.go b/vendor/golang.org/x/crypto/bcrypt/bcrypt.go index dc931187..3e7f8df8 100644 --- a/vendor/golang.org/x/crypto/bcrypt/bcrypt.go +++ b/vendor/golang.org/x/crypto/bcrypt/bcrypt.go @@ -50,7 +50,7 @@ func (ih InvalidHashPrefixError) Error() string { type InvalidCostError int func (ic InvalidCostError) Error() string { - return fmt.Sprintf("crypto/bcrypt: cost %d is outside allowed range (%d,%d)", int(ic), MinCost, MaxCost) + return fmt.Sprintf("crypto/bcrypt: cost %d is outside allowed inclusive range %d..%d", int(ic), MinCost, MaxCost) } const ( diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go index cfafed5b..cb6bb9ad 100644 --- a/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -76,10 +76,8 @@ func (g *Group) Wait() error { } // Go calls the given function in a new goroutine. -// The first call to Go must happen before a Wait. -// It blocks until the new goroutine can be added without the number of -// active goroutines in the group exceeding the configured limit. // +// The first call to Go must happen before a Wait. // It blocks until the new goroutine can be added without the number of // goroutines in the group exceeding the configured limit. // @@ -185,8 +183,9 @@ type PanicError struct { } func (p PanicError) Error() string { - // A Go Error method conventionally does not include a stack dump, so omit it - // here. (Callers who care can extract it from the Stack field.) + if len(p.Stack) > 0 { + return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) + } return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) } diff --git a/vendor/gorm.io/driver/mysql/migrator.go b/vendor/gorm.io/driver/mysql/migrator.go index e266cfdc..018368d1 100644 --- a/vendor/gorm.io/driver/mysql/migrator.go +++ b/vendor/gorm.io/driver/mysql/migrator.go @@ -109,7 +109,7 @@ func (m Migrator) MigrateColumnUnique(value interface{}, field *schema.Field, co return err } } - if field.UniqueIndex != "" { + if field.UniqueIndex != "" && !queryTx.Migrator().HasIndex(value, field.UniqueIndex) { if err := execTx.Migrator().CreateIndex(value, field.UniqueIndex); err != nil { return err } diff --git a/vendor/gorm.io/driver/mysql/mysql.go b/vendor/gorm.io/driver/mysql/mysql.go index 8b6ef3db..d203577f 100644 --- a/vendor/gorm.io/driver/mysql/mysql.go +++ b/vendor/gorm.io/driver/mysql/mysql.go @@ -185,7 +185,9 @@ func (dialector Dialector) Initialize(db *gorm.DB) (err error) { callbacks.RegisterDefaultCallbacks(db, callbackConfig) for k, v := range dialector.ClauseBuilders() { - db.ClauseBuilders[k] = v + if _, ok := db.ClauseBuilders[k]; !ok { + db.ClauseBuilders[k] = v + } } return } diff --git a/vendor/gorm.io/driver/sqlite/ddlmod.go b/vendor/gorm.io/driver/sqlite/ddlmod.go index c839cd79..e7e58597 100644 --- a/vendor/gorm.io/driver/sqlite/ddlmod.go +++ b/vendor/gorm.io/driver/sqlite/ddlmod.go @@ -209,8 +209,12 @@ func (d *ddl) renameTable(dst, src string) error { return nil } +func compileConstraintRegexp(name string) *regexp.Regexp { + return regexp.MustCompile("^(?i:CONSTRAINT)\\s+[\"`]?" + regexp.QuoteMeta(name) + "[\"`\\s]") +} + func (d *ddl) addConstraint(name string, sql string) { - reg := regexp.MustCompile("^CONSTRAINT [\"`]?" + regexp.QuoteMeta(name) + "[\"` ]") + reg := compileConstraintRegexp(name) for i := 0; i < len(d.fields); i++ { if reg.MatchString(d.fields[i]) { @@ -223,7 +227,7 @@ func (d *ddl) addConstraint(name string, sql string) { } func (d *ddl) removeConstraint(name string) bool { - reg := regexp.MustCompile("^CONSTRAINT [\"`]?" + regexp.QuoteMeta(name) + "[\"` ]") + reg := compileConstraintRegexp(name) for i := 0; i < len(d.fields); i++ { if reg.MatchString(d.fields[i]) { @@ -235,7 +239,7 @@ func (d *ddl) removeConstraint(name string) bool { } func (d *ddl) hasConstraint(name string) bool { - reg := regexp.MustCompile("^CONSTRAINT [\"`]?" + regexp.QuoteMeta(name) + "[\"` ]") + reg := compileConstraintRegexp(name) for _, f := range d.fields { if reg.MatchString(f) { diff --git a/vendor/gorm.io/driver/sqlite/sqlite.go b/vendor/gorm.io/driver/sqlite/sqlite.go index 0543d0b1..418842aa 100644 --- a/vendor/gorm.io/driver/sqlite/sqlite.go +++ b/vendor/gorm.io/driver/sqlite/sqlite.go @@ -76,7 +76,9 @@ func (dialector Dialector) Initialize(db *gorm.DB) (err error) { } for k, v := range dialector.ClauseBuilders() { - db.ClauseBuilders[k] = v + if _, ok := db.ClauseBuilders[k]; !ok { + db.ClauseBuilders[k] = v + } } return } diff --git a/vendor/gorm.io/gorm/callbacks/create.go b/vendor/gorm.io/gorm/callbacks/create.go index 8b7846b6..d8701f51 100644 --- a/vendor/gorm.io/gorm/callbacks/create.go +++ b/vendor/gorm.io/gorm/callbacks/create.go @@ -89,6 +89,10 @@ func Create(config *Config) func(db *gorm.DB) { db.AddError(rows.Close()) }() gorm.Scan(rows, db, mode) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } } return @@ -103,6 +107,12 @@ func Create(config *Config) func(db *gorm.DB) { } db.RowsAffected, _ = result.RowsAffected() + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } + if db.RowsAffected == 0 { return } diff --git a/vendor/gorm.io/gorm/callbacks/delete.go b/vendor/gorm.io/gorm/callbacks/delete.go index 84f446a3..07ed6fee 100644 --- a/vendor/gorm.io/gorm/callbacks/delete.go +++ b/vendor/gorm.io/gorm/callbacks/delete.go @@ -157,8 +157,14 @@ func Delete(config *Config) func(db *gorm.DB) { ok, mode := hasReturning(db, supportReturning) if !ok { result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...) + if db.AddError(err) == nil { db.RowsAffected, _ = result.RowsAffected() + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } } return @@ -166,6 +172,10 @@ func Delete(config *Config) func(db *gorm.DB) { if rows, err := db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...); db.AddError(err) == nil { gorm.Scan(rows, db, mode) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } db.AddError(rows.Close()) } } diff --git a/vendor/gorm.io/gorm/callbacks/preload.go b/vendor/gorm.io/gorm/callbacks/preload.go index fd8214bb..225cda28 100644 --- a/vendor/gorm.io/gorm/callbacks/preload.go +++ b/vendor/gorm.io/gorm/callbacks/preload.go @@ -103,11 +103,11 @@ func preloadEntryPoint(db *gorm.DB, joins []string, relationships *schema.Relati joined = true continue } - joinNames := strings.SplitN(join, ".", 2) - if len(joinNames) == 2 { - if _, ok := relationships.Relations[joinNames[0]]; ok && name == joinNames[0] { + join0, join1, cut := strings.Cut(join, ".") + if cut { + if _, ok := relationships.Relations[join0]; ok && name == join0 { joined = true - nestedJoins = append(nestedJoins, joinNames[1]) + nestedJoins = append(nestedJoins, join1) } } } @@ -275,6 +275,8 @@ func preload(tx *gorm.DB, rel *schema.Relationship, conds []interface{}, preload column, values := schema.ToQueryValues(clause.CurrentTable, relForeignKeys, foreignValues) if len(values) != 0 { + tx = tx.Model(reflectResults.Addr().Interface()).Where(clause.IN{Column: column, Values: values}) + for _, cond := range conds { if fc, ok := cond.(func(*gorm.DB) *gorm.DB); ok { tx = fc(tx) @@ -283,7 +285,11 @@ func preload(tx *gorm.DB, rel *schema.Relationship, conds []interface{}, preload } } - if err := tx.Where(clause.IN{Column: column, Values: values}).Find(reflectResults.Addr().Interface(), inlineConds...).Error; err != nil { + if len(inlineConds) > 0 { + tx = tx.Where(inlineConds[0], inlineConds[1:]...) + } + + if err := tx.Find(reflectResults.Addr().Interface()).Error; err != nil { return err } } diff --git a/vendor/gorm.io/gorm/callbacks/query.go b/vendor/gorm.io/gorm/callbacks/query.go index bbf238a9..548bf709 100644 --- a/vendor/gorm.io/gorm/callbacks/query.go +++ b/vendor/gorm.io/gorm/callbacks/query.go @@ -25,6 +25,10 @@ func Query(db *gorm.DB) { db.AddError(rows.Close()) }() gorm.Scan(rows, db, 0) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } } } } @@ -110,7 +114,7 @@ func BuildQuerySQL(db *gorm.DB) { } } - specifiedRelationsName := make(map[string]interface{}) + specifiedRelationsName := map[string]string{clause.CurrentTable: clause.CurrentTable} for _, join := range db.Statement.Joins { if db.Statement.Schema != nil { var isRelations bool // is relations or raw sql @@ -124,12 +128,12 @@ func BuildQuerySQL(db *gorm.DB) { nestedJoinNames := strings.Split(join.Name, ".") if len(nestedJoinNames) > 1 { isNestedJoin := true - gussNestedRelations := make([]*schema.Relationship, 0, len(nestedJoinNames)) + guessNestedRelations := make([]*schema.Relationship, 0, len(nestedJoinNames)) currentRelations := db.Statement.Schema.Relationships.Relations for _, relname := range nestedJoinNames { // incomplete match, only treated as raw sql if relation, ok = currentRelations[relname]; ok { - gussNestedRelations = append(gussNestedRelations, relation) + guessNestedRelations = append(guessNestedRelations, relation) currentRelations = relation.FieldSchema.Relationships.Relations } else { isNestedJoin = false @@ -139,18 +143,13 @@ func BuildQuerySQL(db *gorm.DB) { if isNestedJoin { isRelations = true - relations = gussNestedRelations + relations = guessNestedRelations } } } if isRelations { - genJoinClause := func(joinType clause.JoinType, parentTableName string, relation *schema.Relationship) clause.Join { - tableAliasName := relation.Name - if parentTableName != clause.CurrentTable { - tableAliasName = utils.NestedRelationName(parentTableName, tableAliasName) - } - + genJoinClause := func(joinType clause.JoinType, tableAliasName string, parentTableName string, relation *schema.Relationship) clause.Join { columnStmt := gorm.Statement{ Table: tableAliasName, DB: db, Schema: relation.FieldSchema, Selects: join.Selects, Omits: join.Omits, @@ -167,6 +166,13 @@ func BuildQuerySQL(db *gorm.DB) { } } + if join.Expression != nil { + return clause.Join{ + Type: join.JoinType, + Expression: join.Expression, + } + } + exprs := make([]clause.Expression, len(relation.References)) for idx, ref := range relation.References { if ref.OwnPrimaryKey { @@ -226,19 +232,24 @@ func BuildQuerySQL(db *gorm.DB) { } parentTableName := clause.CurrentTable - for _, rel := range relations { + for idx, rel := range relations { // joins table alias like "Manager, Company, Manager__Company" - nestedAlias := utils.NestedRelationName(parentTableName, rel.Name) - if _, ok := specifiedRelationsName[nestedAlias]; !ok { - fromClause.Joins = append(fromClause.Joins, genJoinClause(join.JoinType, parentTableName, rel)) - specifiedRelationsName[nestedAlias] = nil + curAliasName := rel.Name + if parentTableName != clause.CurrentTable { + curAliasName = utils.NestedRelationName(parentTableName, curAliasName) } - if parentTableName != clause.CurrentTable { - parentTableName = utils.NestedRelationName(parentTableName, rel.Name) - } else { - parentTableName = rel.Name + if _, ok := specifiedRelationsName[curAliasName]; !ok { + aliasName := curAliasName + if idx == len(relations)-1 && join.Alias != "" { + aliasName = join.Alias + } + + fromClause.Joins = append(fromClause.Joins, genJoinClause(join.JoinType, aliasName, specifiedRelationsName[parentTableName], rel)) + specifiedRelationsName[curAliasName] = aliasName } + + parentTableName = curAliasName } } else { fromClause.Joins = append(fromClause.Joins, clause.Join{ diff --git a/vendor/gorm.io/gorm/callbacks/raw.go b/vendor/gorm.io/gorm/callbacks/raw.go index 013e638c..3bb647c4 100644 --- a/vendor/gorm.io/gorm/callbacks/raw.go +++ b/vendor/gorm.io/gorm/callbacks/raw.go @@ -13,5 +13,10 @@ func RawExec(db *gorm.DB) { } db.RowsAffected, _ = result.RowsAffected() + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } } } diff --git a/vendor/gorm.io/gorm/callbacks/update.go b/vendor/gorm.io/gorm/callbacks/update.go index 7cde7f61..8e2782e1 100644 --- a/vendor/gorm.io/gorm/callbacks/update.go +++ b/vendor/gorm.io/gorm/callbacks/update.go @@ -92,6 +92,10 @@ func Update(config *Config) func(db *gorm.DB) { gorm.Scan(rows, db, mode) db.Statement.Dest = dest db.AddError(rows.Close()) + + if db.Statement.Result != nil { + db.Statement.Result.RowsAffected = db.RowsAffected + } } } else { result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...) @@ -99,6 +103,11 @@ func Update(config *Config) func(db *gorm.DB) { if db.AddError(err) == nil { db.RowsAffected, _ = result.RowsAffected() } + + if db.Statement.Result != nil { + db.Statement.Result.Result = result + db.Statement.Result.RowsAffected = db.RowsAffected + } } } } diff --git a/vendor/gorm.io/gorm/chainable_api.go b/vendor/gorm.io/gorm/chainable_api.go index 8953413d..8a6aea34 100644 --- a/vendor/gorm.io/gorm/chainable_api.go +++ b/vendor/gorm.io/gorm/chainable_api.go @@ -448,9 +448,10 @@ func (db *DB) Assign(attrs ...interface{}) (tx *DB) { // Unscoped allows queries to include records marked as deleted, // overriding the soft deletion behavior. // Example: -// var users []User -// db.Unscoped().Find(&users) -// // Retrieves all users, including deleted ones. +// +// var users []User +// db.Unscoped().Find(&users) +// // Retrieves all users, including deleted ones. func (db *DB) Unscoped() (tx *DB) { tx = db.getInstance() tx.Statement.Unscoped = true diff --git a/vendor/gorm.io/gorm/clause/joins.go b/vendor/gorm.io/gorm/clause/joins.go index 879892be..a6f13e55 100644 --- a/vendor/gorm.io/gorm/clause/joins.go +++ b/vendor/gorm.io/gorm/clause/joins.go @@ -1,5 +1,7 @@ package clause +import "gorm.io/gorm/utils" + type JoinType string const ( @@ -9,6 +11,30 @@ const ( RightJoin JoinType = "RIGHT" ) +type JoinTarget struct { + Type JoinType + Association string + Subquery Expression + Table string +} + +func Has(name string) JoinTarget { + return JoinTarget{Type: InnerJoin, Association: name} +} + +func (jt JoinType) Association(name string) JoinTarget { + return JoinTarget{Type: jt, Association: name} +} + +func (jt JoinType) AssociationFrom(name string, subquery Expression) JoinTarget { + return JoinTarget{Type: jt, Association: name, Subquery: subquery} +} + +func (jt JoinTarget) As(name string) JoinTarget { + jt.Table = name + return jt +} + // Join clause for from type Join struct { Type JoinType @@ -18,6 +44,12 @@ type Join struct { Expression Expression } +func JoinTable(names ...string) Table { + return Table{ + Name: utils.JoinNestedRelationNames(names), + } +} + func (join Join) Build(builder Builder) { if join.Expression != nil { join.Expression.Build(builder) diff --git a/vendor/gorm.io/gorm/finisher_api.go b/vendor/gorm.io/gorm/finisher_api.go index 6802945c..57809d17 100644 --- a/vendor/gorm.io/gorm/finisher_api.go +++ b/vendor/gorm.io/gorm/finisher_api.go @@ -1,6 +1,7 @@ package gorm import ( + "context" "database/sql" "errors" "fmt" @@ -673,11 +674,18 @@ func (db *DB) Begin(opts ...*sql.TxOptions) *DB { opt = opts[0] } + ctx := tx.Statement.Context + if _, ok := ctx.Deadline(); !ok { + if db.Config.DefaultTransactionTimeout > 0 { + ctx, _ = context.WithTimeout(ctx, db.Config.DefaultTransactionTimeout) + } + } + switch beginner := tx.Statement.ConnPool.(type) { case TxBeginner: - tx.Statement.ConnPool, err = beginner.BeginTx(tx.Statement.Context, opt) + tx.Statement.ConnPool, err = beginner.BeginTx(ctx, opt) case ConnPoolBeginner: - tx.Statement.ConnPool, err = beginner.BeginTx(tx.Statement.Context, opt) + tx.Statement.ConnPool, err = beginner.BeginTx(ctx, opt) default: err = ErrInvalidTransaction } diff --git a/vendor/gorm.io/gorm/generics.go b/vendor/gorm.io/gorm/generics.go new file mode 100644 index 00000000..ad2d063f --- /dev/null +++ b/vendor/gorm.io/gorm/generics.go @@ -0,0 +1,605 @@ +package gorm + +import ( + "context" + "database/sql" + "fmt" + "sort" + "strings" + + "gorm.io/gorm/clause" + "gorm.io/gorm/logger" +) + +type result struct { + Result sql.Result + RowsAffected int64 +} + +func (info *result) ModifyStatement(stmt *Statement) { + stmt.Result = info +} + +// Build implements clause.Expression interface +func (result) Build(clause.Builder) { +} + +func WithResult() *result { + return &result{} +} + +type Interface[T any] interface { + Raw(sql string, values ...interface{}) ExecInterface[T] + Exec(ctx context.Context, sql string, values ...interface{}) error + CreateInterface[T] +} + +type CreateInterface[T any] interface { + ChainInterface[T] + Table(name string, args ...interface{}) CreateInterface[T] + Create(ctx context.Context, r *T) error + CreateInBatches(ctx context.Context, r *[]T, batchSize int) error +} + +type ChainInterface[T any] interface { + ExecInterface[T] + Scopes(scopes ...func(db *Statement)) ChainInterface[T] + Where(query interface{}, args ...interface{}) ChainInterface[T] + Not(query interface{}, args ...interface{}) ChainInterface[T] + Or(query interface{}, args ...interface{}) ChainInterface[T] + Limit(offset int) ChainInterface[T] + Offset(offset int) ChainInterface[T] + Joins(query clause.JoinTarget, on func(db JoinBuilder, joinTable clause.Table, curTable clause.Table) error) ChainInterface[T] + Preload(association string, query func(db PreloadBuilder) error) ChainInterface[T] + Select(query string, args ...interface{}) ChainInterface[T] + Omit(columns ...string) ChainInterface[T] + MapColumns(m map[string]string) ChainInterface[T] + Distinct(args ...interface{}) ChainInterface[T] + Group(name string) ChainInterface[T] + Having(query interface{}, args ...interface{}) ChainInterface[T] + Order(value interface{}) ChainInterface[T] + + Build(builder clause.Builder) + + Delete(ctx context.Context) (rowsAffected int, err error) + Update(ctx context.Context, name string, value any) (rowsAffected int, err error) + Updates(ctx context.Context, t T) (rowsAffected int, err error) + Count(ctx context.Context, column string) (result int64, err error) +} + +type ExecInterface[T any] interface { + Scan(ctx context.Context, r interface{}) error + First(context.Context) (T, error) + Last(ctx context.Context) (T, error) + Take(context.Context) (T, error) + Find(ctx context.Context) ([]T, error) + FindInBatches(ctx context.Context, batchSize int, fc func(data []T, batch int) error) error + Row(ctx context.Context) *sql.Row + Rows(ctx context.Context) (*sql.Rows, error) +} + +type JoinBuilder interface { + Select(...string) JoinBuilder + Omit(...string) JoinBuilder + Where(query interface{}, args ...interface{}) JoinBuilder + Not(query interface{}, args ...interface{}) JoinBuilder + Or(query interface{}, args ...interface{}) JoinBuilder +} + +type PreloadBuilder interface { + Select(...string) PreloadBuilder + Omit(...string) PreloadBuilder + Where(query interface{}, args ...interface{}) PreloadBuilder + Not(query interface{}, args ...interface{}) PreloadBuilder + Or(query interface{}, args ...interface{}) PreloadBuilder + Limit(offset int) PreloadBuilder + Offset(offset int) PreloadBuilder + Order(value interface{}) PreloadBuilder + LimitPerRecord(num int) PreloadBuilder +} + +type op func(*DB) *DB + +func G[T any](db *DB, opts ...clause.Expression) Interface[T] { + v := &g[T]{ + db: db, + ops: make([]op, 0, 5), + } + + if len(opts) > 0 { + v.ops = append(v.ops, func(db *DB) *DB { + return db.Clauses(opts...) + }) + } + + v.createG = &createG[T]{ + chainG: chainG[T]{ + execG: execG[T]{g: v}, + }, + } + return v +} + +type g[T any] struct { + *createG[T] + db *DB + ops []op +} + +func (g *g[T]) apply(ctx context.Context) *DB { + db := g.db + if !db.DryRun { + db = db.Session(&Session{NewDB: true, Context: ctx}).getInstance() + } + + for _, op := range g.ops { + db = op(db) + } + return db +} + +func (c *g[T]) Raw(sql string, values ...interface{}) ExecInterface[T] { + return execG[T]{g: &g[T]{ + db: c.db, + ops: append(c.ops, func(db *DB) *DB { + return db.Raw(sql, values...) + }), + }} +} + +func (c *g[T]) Exec(ctx context.Context, sql string, values ...interface{}) error { + return c.apply(ctx).Exec(sql, values...).Error +} + +type createG[T any] struct { + chainG[T] +} + +func (c createG[T]) Table(name string, args ...interface{}) CreateInterface[T] { + return createG[T]{c.with(func(db *DB) *DB { + return db.Table(name, args...) + })} +} + +func (c createG[T]) Create(ctx context.Context, r *T) error { + return c.g.apply(ctx).Create(r).Error +} + +func (c createG[T]) CreateInBatches(ctx context.Context, r *[]T, batchSize int) error { + return c.g.apply(ctx).CreateInBatches(r, batchSize).Error +} + +type chainG[T any] struct { + execG[T] +} + +func (c chainG[T]) getInstance() *DB { + var r T + return c.g.apply(context.Background()).Model(r).getInstance() +} + +func (c chainG[T]) with(v op) chainG[T] { + return chainG[T]{ + execG: execG[T]{g: &g[T]{ + db: c.g.db, + ops: append(append([]op(nil), c.g.ops...), v), + }}, + } +} + +func (c chainG[T]) Scopes(scopes ...func(db *Statement)) ChainInterface[T] { + return c.with(func(db *DB) *DB { + for _, fc := range scopes { + fc(db.Statement) + } + return db + }) +} + +func (c chainG[T]) Table(name string, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Table(name, args...) + }) +} + +func (c chainG[T]) Where(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Where(query, args...) + }) +} + +func (c chainG[T]) Not(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Not(query, args...) + }) +} + +func (c chainG[T]) Or(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Or(query, args...) + }) +} + +func (c chainG[T]) Limit(offset int) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Limit(offset) + }) +} + +func (c chainG[T]) Offset(offset int) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Offset(offset) + }) +} + +type joinBuilder struct { + db *DB +} + +func (q *joinBuilder) Where(query interface{}, args ...interface{}) JoinBuilder { + q.db.Where(query, args...) + return q +} + +func (q *joinBuilder) Or(query interface{}, args ...interface{}) JoinBuilder { + q.db.Where(query, args...) + return q +} + +func (q *joinBuilder) Not(query interface{}, args ...interface{}) JoinBuilder { + q.db.Where(query, args...) + return q +} + +func (q *joinBuilder) Select(columns ...string) JoinBuilder { + q.db.Select(columns) + return q +} + +func (q *joinBuilder) Omit(columns ...string) JoinBuilder { + q.db.Omit(columns...) + return q +} + +type preloadBuilder struct { + limitPerRecord int + db *DB +} + +func (q *preloadBuilder) Where(query interface{}, args ...interface{}) PreloadBuilder { + q.db.Where(query, args...) + return q +} + +func (q *preloadBuilder) Or(query interface{}, args ...interface{}) PreloadBuilder { + q.db.Where(query, args...) + return q +} + +func (q *preloadBuilder) Not(query interface{}, args ...interface{}) PreloadBuilder { + q.db.Where(query, args...) + return q +} + +func (q *preloadBuilder) Select(columns ...string) PreloadBuilder { + q.db.Select(columns) + return q +} + +func (q *preloadBuilder) Omit(columns ...string) PreloadBuilder { + q.db.Omit(columns...) + return q +} + +func (q *preloadBuilder) Limit(limit int) PreloadBuilder { + q.db.Limit(limit) + return q +} + +func (q *preloadBuilder) Offset(offset int) PreloadBuilder { + q.db.Offset(offset) + return q +} + +func (q *preloadBuilder) Order(value interface{}) PreloadBuilder { + q.db.Order(value) + return q +} + +func (q *preloadBuilder) LimitPerRecord(num int) PreloadBuilder { + q.limitPerRecord = num + return q +} + +func (c chainG[T]) Joins(jt clause.JoinTarget, on func(db JoinBuilder, joinTable clause.Table, curTable clause.Table) error) ChainInterface[T] { + return c.with(func(db *DB) *DB { + if jt.Table == "" { + jt.Table = clause.JoinTable(strings.Split(jt.Association, ".")...).Name + } + + q := joinBuilder{db: db.Session(&Session{NewDB: true, Initialized: true}).Table(jt.Table)} + if on != nil { + if err := on(&q, clause.Table{Name: jt.Table}, clause.Table{Name: clause.CurrentTable}); err != nil { + db.AddError(err) + } + } + + j := join{ + Name: jt.Association, + Alias: jt.Table, + Selects: q.db.Statement.Selects, + Omits: q.db.Statement.Omits, + JoinType: jt.Type, + } + + if where, ok := q.db.Statement.Clauses["WHERE"].Expression.(clause.Where); ok { + j.On = &where + } + + if jt.Subquery != nil { + joinType := j.JoinType + if joinType == "" { + joinType = clause.LeftJoin + } + + if db, ok := jt.Subquery.(interface{ getInstance() *DB }); ok { + stmt := db.getInstance().Statement + if len(j.Selects) == 0 { + j.Selects = stmt.Selects + } + if len(j.Omits) == 0 { + j.Omits = stmt.Omits + } + } + + expr := clause.NamedExpr{SQL: fmt.Sprintf("%s JOIN (?) AS ?", joinType), Vars: []interface{}{jt.Subquery, clause.Table{Name: j.Alias}}} + + if j.On != nil { + expr.SQL += " ON ?" + expr.Vars = append(expr.Vars, clause.AndConditions{Exprs: j.On.Exprs}) + } + + j.Expression = expr + } + + db.Statement.Joins = append(db.Statement.Joins, j) + sort.Slice(db.Statement.Joins, func(i, j int) bool { + return db.Statement.Joins[i].Name < db.Statement.Joins[j].Name + }) + return db + }) +} + +func (c chainG[T]) Select(query string, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Select(query, args...) + }) +} + +func (c chainG[T]) Omit(columns ...string) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Omit(columns...) + }) +} + +func (c chainG[T]) MapColumns(m map[string]string) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.MapColumns(m) + }) +} + +func (c chainG[T]) Distinct(args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Distinct(args...) + }) +} + +func (c chainG[T]) Group(name string) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Group(name) + }) +} + +func (c chainG[T]) Having(query interface{}, args ...interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Having(query, args...) + }) +} + +func (c chainG[T]) Order(value interface{}) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Order(value) + }) +} + +func (c chainG[T]) Preload(association string, query func(db PreloadBuilder) error) ChainInterface[T] { + return c.with(func(db *DB) *DB { + return db.Preload(association, func(tx *DB) *DB { + q := preloadBuilder{db: tx.getInstance()} + if query != nil { + if err := query(&q); err != nil { + db.AddError(err) + } + } + + relation, ok := db.Statement.Schema.Relationships.Relations[association] + if !ok { + if preloadFields := strings.Split(association, "."); len(preloadFields) > 1 { + relationships := db.Statement.Schema.Relationships + for _, field := range preloadFields { + var ok bool + relation, ok = relationships.Relations[field] + if ok { + relationships = relation.FieldSchema.Relationships + } else { + db.AddError(fmt.Errorf("relation %s not found", association)) + return nil + } + } + } else { + db.AddError(fmt.Errorf("relation %s not found", association)) + return nil + } + } + + if q.limitPerRecord > 0 { + if relation.JoinTable != nil { + tx.AddError(fmt.Errorf("many2many relation %s don't support LimitPerRecord", association)) + return tx + } + + refColumns := []clause.Column{} + for _, rel := range relation.References { + if rel.OwnPrimaryKey { + refColumns = append(refColumns, clause.Column{Name: rel.ForeignKey.DBName}) + } + } + + if len(refColumns) != 0 { + selectExpr := clause.CommaExpression{} + for _, column := range q.db.Statement.Selects { + selectExpr.Exprs = append(selectExpr.Exprs, clause.Expr{SQL: "?", Vars: []interface{}{clause.Column{Name: column}}}) + } + + if len(selectExpr.Exprs) == 0 { + selectExpr.Exprs = []clause.Expression{clause.Expr{SQL: "*", Vars: []interface{}{}}} + } + + partitionBy := clause.CommaExpression{} + for _, column := range refColumns { + partitionBy.Exprs = append(partitionBy.Exprs, clause.Expr{SQL: "?", Vars: []interface{}{clause.Column{Name: column.Name}}}) + } + + rnnColumn := clause.Column{Name: "gorm_preload_rnn"} + sql := "ROW_NUMBER() OVER (PARTITION BY ? ?)" + vars := []interface{}{partitionBy} + if orderBy, ok := q.db.Statement.Clauses["ORDER BY"]; ok { + vars = append(vars, orderBy) + } else { + vars = append(vars, clause.Clause{Name: "ORDER BY", Expression: clause.OrderBy{ + Columns: []clause.OrderByColumn{{Column: clause.PrimaryColumn, Desc: true}}, + }}) + } + vars = append(vars, rnnColumn) + + selectExpr.Exprs = append(selectExpr.Exprs, clause.Expr{SQL: sql + " AS ?", Vars: vars}) + + q.db.Clauses(clause.Select{Expression: selectExpr}) + + return q.db.Session(&Session{NewDB: true}).Unscoped().Table("(?) t", q.db).Where("? <= ?", rnnColumn, q.limitPerRecord) + } + } + + return q.db + }) + }) +} + +func (c chainG[T]) Delete(ctx context.Context) (rowsAffected int, err error) { + r := new(T) + res := c.g.apply(ctx).Delete(r) + return int(res.RowsAffected), res.Error +} + +func (c chainG[T]) Update(ctx context.Context, name string, value any) (rowsAffected int, err error) { + var r T + res := c.g.apply(ctx).Model(r).Update(name, value) + return int(res.RowsAffected), res.Error +} + +func (c chainG[T]) Updates(ctx context.Context, t T) (rowsAffected int, err error) { + res := c.g.apply(ctx).Updates(t) + return int(res.RowsAffected), res.Error +} + +func (c chainG[T]) Count(ctx context.Context, column string) (result int64, err error) { + var r T + err = c.g.apply(ctx).Model(r).Select(column).Count(&result).Error + return +} + +func (c chainG[T]) Build(builder clause.Builder) { + subdb := c.getInstance() + subdb.Logger = logger.Discard + subdb.DryRun = true + + if stmt, ok := builder.(*Statement); ok { + if subdb.Statement.SQL.Len() > 0 { + var ( + vars = subdb.Statement.Vars + sql = subdb.Statement.SQL.String() + ) + + subdb.Statement.Vars = make([]interface{}, 0, len(vars)) + for _, vv := range vars { + subdb.Statement.Vars = append(subdb.Statement.Vars, vv) + bindvar := strings.Builder{} + subdb.BindVarTo(&bindvar, subdb.Statement, vv) + sql = strings.Replace(sql, bindvar.String(), "?", 1) + } + + subdb.Statement.SQL.Reset() + subdb.Statement.Vars = stmt.Vars + if strings.Contains(sql, "@") { + clause.NamedExpr{SQL: sql, Vars: vars}.Build(subdb.Statement) + } else { + clause.Expr{SQL: sql, Vars: vars}.Build(subdb.Statement) + } + } else { + subdb.Statement.Vars = append(stmt.Vars, subdb.Statement.Vars...) + subdb.callbacks.Query().Execute(subdb) + } + + builder.WriteString(subdb.Statement.SQL.String()) + stmt.Vars = subdb.Statement.Vars + } +} + +type execG[T any] struct { + g *g[T] +} + +func (g execG[T]) First(ctx context.Context) (T, error) { + var r T + err := g.g.apply(ctx).First(&r).Error + return r, err +} + +func (g execG[T]) Scan(ctx context.Context, result interface{}) error { + var r T + err := g.g.apply(ctx).Model(r).Find(&result).Error + return err +} + +func (g execG[T]) Last(ctx context.Context) (T, error) { + var r T + err := g.g.apply(ctx).Last(&r).Error + return r, err +} + +func (g execG[T]) Take(ctx context.Context) (T, error) { + var r T + err := g.g.apply(ctx).Take(&r).Error + return r, err +} + +func (g execG[T]) Find(ctx context.Context) ([]T, error) { + var r []T + err := g.g.apply(ctx).Find(&r).Error + return r, err +} + +func (g execG[T]) FindInBatches(ctx context.Context, batchSize int, fc func(data []T, batch int) error) error { + var data []T + return g.g.apply(ctx).FindInBatches(&data, batchSize, func(tx *DB, batch int) error { + return fc(data, batch) + }).Error +} + +func (g execG[T]) Row(ctx context.Context) *sql.Row { + return g.g.apply(ctx).Row() +} + +func (g execG[T]) Rows(ctx context.Context) (*sql.Rows, error) { + return g.g.apply(ctx).Rows() +} diff --git a/vendor/gorm.io/gorm/gorm.go b/vendor/gorm.io/gorm/gorm.go index 63a28b37..67889262 100644 --- a/vendor/gorm.io/gorm/gorm.go +++ b/vendor/gorm.io/gorm/gorm.go @@ -21,7 +21,9 @@ const preparedStmtDBKey = "preparedStmt" type Config struct { // GORM perform single create, update, delete operations in transactions by default to ensure database data integrity // You can disable it by setting `SkipDefaultTransaction` to true - SkipDefaultTransaction bool + SkipDefaultTransaction bool + DefaultTransactionTimeout time.Duration + // NamingStrategy tables, columns naming strategy NamingStrategy schema.Namer // FullSaveAssociations full save associations @@ -135,12 +137,16 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { return isConfig && !isConfig2 }) + var skipAfterInitialize bool for _, opt := range opts { if opt != nil { if applyErr := opt.Apply(config); applyErr != nil { return nil, applyErr } defer func(opt Option) { + if skipAfterInitialize { + return + } if errr := opt.AfterInitialize(db); errr != nil { err = errr } @@ -192,6 +198,10 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { if db, _ := db.DB(); db != nil { _ = db.Close() } + + // DB is not initialized, so we skip AfterInitialize + skipAfterInitialize = true + return } if config.TranslateError { @@ -519,7 +529,7 @@ func (db *DB) Use(plugin Plugin) error { // .First(&User{}) // }) func (db *DB) ToSQL(queryFn func(tx *DB) *DB) string { - tx := queryFn(db.Session(&Session{DryRun: true, SkipDefaultTransaction: true})) + tx := queryFn(db.Session(&Session{DryRun: true, SkipDefaultTransaction: true}).getInstance()) stmt := tx.Statement return db.Dialector.Explain(stmt.SQL.String(), stmt.Vars...) diff --git a/vendor/gorm.io/gorm/scan.go b/vendor/gorm.io/gorm/scan.go index 6dc55f62..9a99d024 100644 --- a/vendor/gorm.io/gorm/scan.go +++ b/vendor/gorm.io/gorm/scan.go @@ -4,6 +4,7 @@ import ( "database/sql" "database/sql/driver" "reflect" + "strings" "time" "gorm.io/gorm/schema" @@ -244,6 +245,14 @@ func Scan(rows Rows, db *DB, mode ScanMode) { matchedFieldCount[column] = 1 } } else if names := utils.SplitNestedRelationName(column); len(names) > 1 { // has nested relation + aliasName := utils.JoinNestedRelationNames(names[0 : len(names)-1]) + for _, join := range db.Statement.Joins { + if join.Alias == aliasName { + names = append(strings.Split(join.Name, "."), names[len(names)-1]) + break + } + } + if rel, ok := sch.Relationships.Relations[names[0]]; ok { subNameCount := len(names) // nested relation fields diff --git a/vendor/gorm.io/gorm/schema/field.go b/vendor/gorm.io/gorm/schema/field.go index d1a633ce..a6ff1a72 100644 --- a/vendor/gorm.io/gorm/schema/field.go +++ b/vendor/gorm.io/gorm/schema/field.go @@ -318,9 +318,10 @@ func (schema *Schema) ParseField(fieldStruct reflect.StructField) *Field { } if val, ok := field.TagSettings["TYPE"]; ok { - switch DataType(strings.ToLower(val)) { + lowerVal := DataType(strings.ToLower(val)) + switch lowerVal { case Bool, Int, Uint, Float, String, Time, Bytes: - field.DataType = DataType(strings.ToLower(val)) + field.DataType = lowerVal default: field.DataType = DataType(val) } diff --git a/vendor/gorm.io/gorm/schema/index.go b/vendor/gorm.io/gorm/schema/index.go index a1cdc639..2690a0cb 100644 --- a/vendor/gorm.io/gorm/schema/index.go +++ b/vendor/gorm.io/gorm/schema/index.go @@ -105,7 +105,7 @@ func parseFieldIndexes(field *Field) (indexes []Index, err error) { var ( name string tag = strings.Join(v[1:], ":") - idx = strings.Index(tag, ",") + idx = strings.IndexByte(tag, ',') tagSetting = strings.Join(strings.Split(tag, ",")[1:], ",") settings = ParseTagSetting(tagSetting, ",") length, _ = strconv.Atoi(settings["LENGTH"]) diff --git a/vendor/gorm.io/gorm/schema/relationship.go b/vendor/gorm.io/gorm/schema/relationship.go index def4a595..f1ace924 100644 --- a/vendor/gorm.io/gorm/schema/relationship.go +++ b/vendor/gorm.io/gorm/schema/relationship.go @@ -78,7 +78,7 @@ func (schema *Schema) parseRelation(field *Field) *Relationship { cacheStore := schema.cacheStore if relation.FieldSchema, err = getOrParse(fieldValue, cacheStore, schema.namer); err != nil { - schema.err = err + schema.err = fmt.Errorf("failed to parse field: %s, error: %w", field.Name, err) return nil } @@ -663,6 +663,7 @@ func (rel *Relationship) ParseConstraint() *Constraint { if !(rel.References[idx].PrimaryKey == ref.PrimaryKey && rel.References[idx].ForeignKey == ref.ForeignKey && rel.References[idx].PrimaryValue == ref.PrimaryValue) { matched = false + break } } @@ -675,7 +676,7 @@ func (rel *Relationship) ParseConstraint() *Constraint { var ( name string - idx = strings.Index(str, ",") + idx = strings.IndexByte(str, ',') settings = ParseTagSetting(str, ",") ) @@ -762,8 +763,9 @@ func (rel *Relationship) ToQueryConditions(ctx context.Context, reflectValue ref } func copyableDataType(str DataType) bool { + lowerStr := strings.ToLower(string(str)) for _, s := range []string{"auto_increment", "primary key"} { - if strings.Contains(strings.ToLower(string(str)), s) { + if strings.Contains(lowerStr, s) { return false } } diff --git a/vendor/gorm.io/gorm/statement.go b/vendor/gorm.io/gorm/statement.go index 39e05d09..c6183724 100644 --- a/vendor/gorm.io/gorm/statement.go +++ b/vendor/gorm.io/gorm/statement.go @@ -47,15 +47,18 @@ type Statement struct { attrs []interface{} assigns []interface{} scopes []func(*DB) *DB + Result *result } type join struct { - Name string - Conds []interface{} - On *clause.Where - Selects []string - Omits []string - JoinType clause.JoinType + Name string + Alias string + Conds []interface{} + On *clause.Where + Selects []string + Omits []string + Expression clause.Expression + JoinType clause.JoinType } // StatementModifier statement modifier interface @@ -205,19 +208,21 @@ func (stmt *Statement) AddVar(writer clause.Writer, vars ...interface{}) { } else { writer.WriteString("(NULL)") } - case *DB: - subdb := v.Session(&Session{Logger: logger.Discard, DryRun: true}).getInstance() - if v.Statement.SQL.Len() > 0 { + case interface{ getInstance() *DB }: + cv := v.getInstance() + + subdb := cv.Session(&Session{Logger: logger.Discard, DryRun: true}).getInstance() + if cv.Statement.SQL.Len() > 0 { var ( vars = subdb.Statement.Vars - sql = v.Statement.SQL.String() + sql = cv.Statement.SQL.String() ) subdb.Statement.Vars = make([]interface{}, 0, len(vars)) for _, vv := range vars { subdb.Statement.Vars = append(subdb.Statement.Vars, vv) bindvar := strings.Builder{} - v.Dialector.BindVarTo(&bindvar, subdb.Statement, vv) + cv.BindVarTo(&bindvar, subdb.Statement, vv) sql = strings.Replace(sql, bindvar.String(), "?", 1) } @@ -321,6 +326,11 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] arg, _ = valuer.Value() } + curTable := stmt.Table + if curTable == "" { + curTable = clause.CurrentTable + } + switch v := arg.(type) { case clause.Expression: conds = append(conds, v) @@ -351,7 +361,8 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] sort.Strings(keys) for _, key := range keys { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + column := clause.Column{Name: key, Table: curTable} + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } case map[string]interface{}: keys := make([]string, 0, len(v)) @@ -362,12 +373,13 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] for _, key := range keys { reflectValue := reflect.Indirect(reflect.ValueOf(v[key])) + column := clause.Column{Name: key, Table: curTable} switch reflectValue.Kind() { case reflect.Slice, reflect.Array: if _, ok := v[key].(driver.Valuer); ok { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } else if _, ok := v[key].(Valuer); ok { - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } else { // optimize reflect value length valueLen := reflectValue.Len() @@ -376,10 +388,10 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] values[i] = reflectValue.Index(i).Interface() } - conds = append(conds, clause.IN{Column: key, Values: values}) + conds = append(conds, clause.IN{Column: column, Values: values}) } default: - conds = append(conds, clause.Eq{Column: key, Value: v[key]}) + conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } } default: @@ -406,9 +418,9 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] if selected || (!restricted && field.Readable) { if v, isZero := field.ValueOf(stmt.Context, reflectValue); !isZero || selected { if field.DBName != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.DBName}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.DBName}, Value: v}) } else if field.DataType != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.Name}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.Name}, Value: v}) } } } @@ -420,9 +432,9 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] if selected || (!restricted && field.Readable) { if v, isZero := field.ValueOf(stmt.Context, reflectValue.Index(i)); !isZero || selected { if field.DBName != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.DBName}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.DBName}, Value: v}) } else if field.DataType != "" { - conds = append(conds, clause.Eq{Column: clause.Column{Table: clause.CurrentTable, Name: field.Name}, Value: v}) + conds = append(conds, clause.Eq{Column: clause.Column{Table: curTable, Name: field.Name}, Value: v}) } } } @@ -447,14 +459,14 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] } if len(values) > 0 { - conds = append(conds, clause.IN{Column: clause.PrimaryColumn, Values: values}) + conds = append(conds, clause.IN{Column: clause.Column{Table: curTable, Name: clause.PrimaryKey}, Values: values}) return []clause.Expression{clause.And(conds...)} } return nil } } - conds = append(conds, clause.IN{Column: clause.PrimaryColumn, Values: args}) + conds = append(conds, clause.IN{Column: clause.Column{Table: curTable, Name: clause.PrimaryKey}, Values: args}) } } } @@ -521,6 +533,7 @@ func (stmt *Statement) clone() *Statement { Context: stmt.Context, RaiseErrorOnNotFound: stmt.RaiseErrorOnNotFound, SkipHooks: stmt.SkipHooks, + Result: stmt.Result, } if stmt.SQL.Len() > 0 { diff --git a/vendor/modules.txt b/vendor/modules.txt index ec6c70d9..9dc5ecdc 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -12,7 +12,7 @@ github.com/asaskevich/govalidator # github.com/beorn7/perks v1.0.1 ## explicit; go 1.11 github.com/beorn7/perks/quantile -# github.com/bradleyfalzon/ghinstallation/v2 v2.15.0 +# github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 ## explicit; go 1.23.0 github.com/bradleyfalzon/ghinstallation/v2 # github.com/cespare/xxhash/v2 v2.3.0 @@ -21,8 +21,8 @@ github.com/cespare/xxhash/v2 # github.com/chzyer/readline v1.5.1 ## explicit; go 1.15 github.com/chzyer/readline -# github.com/cloudbase/garm-provider-common v0.1.4 -## explicit; go 1.22 +# github.com/cloudbase/garm-provider-common v0.1.5 +## explicit; go 1.23.0 github.com/cloudbase/garm-provider-common/defaults github.com/cloudbase/garm-provider-common/errors github.com/cloudbase/garm-provider-common/execution/common @@ -36,7 +36,7 @@ github.com/davecgh/go-spew/spew # github.com/felixge/httpsnoop v1.0.4 ## explicit; go 1.13 github.com/felixge/httpsnoop -# github.com/go-logr/logr v1.4.2 +# github.com/go-logr/logr v1.4.3 ## explicit; go 1.18 github.com/go-logr/logr github.com/go-logr/logr/funcr @@ -88,7 +88,7 @@ github.com/go-openapi/swag # github.com/go-openapi/validate v0.24.0 ## explicit; go 1.20 github.com/go-openapi/validate -# github.com/go-sql-driver/mysql v1.9.2 +# github.com/go-sql-driver/mysql v1.9.3 ## explicit; go 1.21.0 github.com/go-sql-driver/mysql # github.com/golang-jwt/jwt/v4 v4.5.2 @@ -97,12 +97,9 @@ github.com/golang-jwt/jwt/v4 # github.com/golang-jwt/jwt/v5 v5.2.2 ## explicit; go 1.18 github.com/golang-jwt/jwt/v5 -# github.com/google/go-github/v57 v57.0.0 -## explicit; go 1.17 -github.com/google/go-github/v57/github -# github.com/google/go-github/v71 v71.0.0 +# github.com/google/go-github/v72 v72.0.0 ## explicit; go 1.23.0 -github.com/google/go-github/v71/github +github.com/google/go-github/v72/github # github.com/google/go-querystring v1.1.0 ## explicit; go 1.10 github.com/google/go-querystring/query @@ -242,7 +239,7 @@ github.com/stretchr/testify/suite # github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 ## explicit; go 1.18 github.com/teris-io/shortid -# go.mongodb.org/mongo-driver v1.17.3 +# go.mongodb.org/mongo-driver v1.17.4 ## explicit; go 1.18 go.mongodb.org/mongo-driver/bson go.mongodb.org/mongo-driver/bson/bsoncodec @@ -279,7 +276,7 @@ go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.38.0 +# golang.org/x/crypto v0.39.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -288,7 +285,7 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/net v0.40.0 +# golang.org/x/net v0.41.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks golang.org/x/net/proxy @@ -296,7 +293,7 @@ golang.org/x/net/proxy ## explicit; go 1.23.0 golang.org/x/oauth2 golang.org/x/oauth2/internal -# golang.org/x/sync v0.14.0 +# golang.org/x/sync v0.15.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup # golang.org/x/sys v0.33.0 @@ -304,7 +301,7 @@ golang.org/x/sync/errgroup golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.25.0 +# golang.org/x/text v0.26.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal @@ -358,13 +355,13 @@ gopkg.in/yaml.v3 # gorm.io/datatypes v1.2.5 ## explicit; go 1.19 gorm.io/datatypes -# gorm.io/driver/mysql v1.5.7 -## explicit; go 1.14 +# gorm.io/driver/mysql v1.6.0 +## explicit; go 1.18 gorm.io/driver/mysql -# gorm.io/driver/sqlite v1.5.7 +# gorm.io/driver/sqlite v1.6.0 ## explicit; go 1.20 gorm.io/driver/sqlite -# gorm.io/gorm v1.26.1 +# gorm.io/gorm v1.30.0 ## explicit; go 1.18 gorm.io/gorm gorm.io/gorm/callbacks From e07befbea576e5fd81308ab4b2201173e21eb90e Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 19 Jun 2025 21:13:15 +0000 Subject: [PATCH 108/179] Fix show webhooks. Signed-off-by: Gabriel Adrian Samfira --- runner/pool/common.go | 4 ++- test/integration/organizations_test.go | 36 +++++++++++++------------- test/integration/repositories_test.go | 34 ++++++++++++------------ 3 files changed, 38 insertions(+), 36 deletions(-) diff --git a/runner/pool/common.go b/runner/pool/common.go index 5a15659f..29c86e27 100644 --- a/runner/pool/common.go +++ b/runner/pool/common.go @@ -39,14 +39,16 @@ func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, func hookToParamsHookInfo(hook *github.Hook) params.HookInfo { var insecureSSL bool + var hookURL string config := hook.GetConfig() if config != nil { insecureSSL = config.GetInsecureSSL() == "1" + hookURL = config.GetURL() } return params.HookInfo{ ID: *hook.ID, - URL: hook.GetURL(), + URL: hookURL, Events: hook.Events, Active: *hook.Active, InsecureSSL: insecureSSL, diff --git a/test/integration/organizations_test.go b/test/integration/organizations_test.go index ed8f7f02..46264d87 100644 --- a/test/integration/organizations_test.go +++ b/test/integration/organizations_test.go @@ -57,7 +57,7 @@ func (suite *GarmSuite) CreateOrg(orgName, credentialsName, orgWebhookSecret str WebhookSecret: orgWebhookSecret, } org, err := createOrg(suite.cli, suite.authToken, orgParams) - suite.NoError(err, "error creating organization") + suite.Require().NoError(err, "error creating organization") return org } @@ -68,7 +68,7 @@ func (suite *GarmSuite) UpdateOrg(id, credentialsName string) *params.Organizati CredentialsName: credentialsName, } org, err := updateOrg(suite.cli, suite.authToken, id, updateParams) - suite.NoError(err, "error updating organization") + suite.Require().NoError(err, "error updating organization") return org } @@ -79,15 +79,15 @@ func (suite *GarmSuite) InstallOrgWebhook(id string) *params.HookInfo { WebhookEndpointType: params.WebhookEndpointDirect, } _, err := installOrgWebhook(suite.cli, suite.authToken, id, webhookParams) - suite.NoError(err, "error installing organization webhook") + suite.Require().NoError(err, "error installing organization webhook") webhookInfo, err := getOrgWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error getting organization webhook") + suite.Require().NoError(err, "error getting organization webhook") return webhookInfo } func (suite *GarmSuite) ValidateOrgWebhookInstalled(ghToken, url, orgName string) { hook, err := getGhOrgWebhook(url, ghToken, orgName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.NotNil(hook, "github webhook with url %s, for org %s was not properly installed", url, orgName) } @@ -99,7 +99,7 @@ func getGhOrgWebhook(url, ghToken, orgName string) (*github.Hook, error) { } for _, hook := range ghOrgHooks { - hookURL := hook.GetURL() + hookURL := hook.Config.GetURL() if hookURL == url { return hook, nil } @@ -112,12 +112,12 @@ func (suite *GarmSuite) UninstallOrgWebhook(id string) { t := suite.T() t.Logf("Uninstall org webhook with org_id %s", id) err := uninstallOrgWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error uninstalling organization webhook") + suite.Require().NoError(err, "error uninstalling organization webhook") } func (suite *GarmSuite) ValidateOrgWebhookUninstalled(ghToken, url, orgName string) { hook, err := getGhOrgWebhook(url, ghToken, orgName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.Nil(hook, "github webhook with url %s, for org %s was not properly uninstalled", url, orgName) } @@ -125,7 +125,7 @@ func (suite *GarmSuite) CreateOrgPool(orgID string, poolParams params.CreatePool t := suite.T() t.Logf("Create org pool with org_id %s", orgID) pool, err := createOrgPool(suite.cli, suite.authToken, orgID, poolParams) - suite.NoError(err, "error creating organization pool") + suite.Require().NoError(err, "error creating organization pool") return pool } @@ -133,7 +133,7 @@ func (suite *GarmSuite) GetOrgPool(orgID, orgPoolID string) *params.Pool { t := suite.T() t.Logf("Get org pool with org_id %s and pool_id %s", orgID, orgPoolID) pool, err := getOrgPool(suite.cli, suite.authToken, orgID, orgPoolID) - suite.NoError(err, "error getting organization pool") + suite.Require().NoError(err, "error getting organization pool") return pool } @@ -141,7 +141,7 @@ func (suite *GarmSuite) DeleteOrgPool(orgID, orgPoolID string) { t := suite.T() t.Logf("Delete org pool with org_id %s and pool_id %s", orgID, orgPoolID) err := deleteOrgPool(suite.cli, suite.authToken, orgID, orgPoolID) - suite.NoError(err, "error deleting organization pool") + suite.Require().NoError(err, "error deleting organization pool") } func (suite *GarmSuite) UpdateOrgPool(orgID, orgPoolID string, maxRunners, minIdleRunners uint) *params.Pool { @@ -152,14 +152,14 @@ func (suite *GarmSuite) UpdateOrgPool(orgID, orgPoolID string, maxRunners, minId MaxRunners: &maxRunners, } pool, err := updateOrgPool(suite.cli, suite.authToken, orgID, orgPoolID, poolParams) - suite.NoError(err, "error updating organization pool") + suite.Require().NoError(err, "error updating organization pool") return pool } func (suite *GarmSuite) WaitOrgRunningIdleInstances(orgID string, timeout time.Duration) { t := suite.T() orgPools, err := listOrgPools(suite.cli, suite.authToken, orgID) - suite.NoError(err, "error listing organization pools") + suite.Require().NoError(err, "error listing organization pools") for _, pool := range orgPools { err := suite.WaitPoolInstances(pool.ID, commonParams.InstanceRunning, params.RunnerIdle, timeout) if err != nil { @@ -174,19 +174,19 @@ func (suite *GarmSuite) dumpOrgInstancesDetails(orgID string) { // print org details t.Logf("Dumping org details with org_id %s", orgID) org, err := getOrg(suite.cli, suite.authToken, orgID) - suite.NoError(err, "error getting organization") + suite.Require().NoError(err, "error getting organization") err = printJSONResponse(org) - suite.NoError(err, "error printing organization") + suite.Require().NoError(err, "error printing organization") // print org instances details t.Logf("Dumping org instances details for org %s", orgID) instances, err := listOrgInstances(suite.cli, suite.authToken, orgID) - suite.NoError(err, "error listing organization instances") + suite.Require().NoError(err, "error listing organization instances") for _, instance := range instances { instance, err := getInstance(suite.cli, suite.authToken, instance.Name) - suite.NoError(err, "error getting instance") + suite.Require().NoError(err, "error getting instance") t.Logf("Instance info for instace %s", instance.Name) err = printJSONResponse(instance) - suite.NoError(err, "error printing instance") + suite.Require().NoError(err, "error printing instance") } } diff --git a/test/integration/repositories_test.go b/test/integration/repositories_test.go index 36887664..2936cef8 100644 --- a/test/integration/repositories_test.go +++ b/test/integration/repositories_test.go @@ -41,7 +41,7 @@ func (suite *GarmSuite) TestRepositories() { CredentialsName: fmt.Sprintf("%s-clone", suite.credentialsName), } repo, err := updateRepo(suite.cli, suite.authToken, suite.repo.ID, updateParams) - suite.NoError(err, "error updating repository") + suite.Require().NoError(err, "error updating repository") suite.Equal(fmt.Sprintf("%s-clone", suite.credentialsName), repo.CredentialsName, "credentials name mismatch") suite.repo = repo @@ -88,16 +88,16 @@ func (suite *GarmSuite) InstallRepoWebhook(id string) *params.HookInfo { WebhookEndpointType: params.WebhookEndpointDirect, } _, err := installRepoWebhook(suite.cli, suite.authToken, id, webhookParams) - suite.NoError(err, "error installing repository webhook") + suite.Require().NoError(err, "error installing repository webhook") webhookInfo, err := getRepoWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error getting repository webhook") + suite.Require().NoError(err, "error getting repository webhook") return webhookInfo } func (suite *GarmSuite) ValidateRepoWebhookInstalled(ghToken, url, orgName, repoName string) { hook, err := getGhRepoWebhook(url, ghToken, orgName, repoName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.NotNil(hook, "github webhook with url %s, for repo %s/%s was not properly installed", url, orgName, repoName) } @@ -109,7 +109,7 @@ func getGhRepoWebhook(url, ghToken, orgName, repoName string) (*github.Hook, err } for _, hook := range ghRepoHooks { - hookURL := hook.GetURL() + hookURL := hook.Config.GetURL() if hookURL == url { return hook, nil } @@ -128,12 +128,12 @@ func (suite *GarmSuite) UninstallRepoWebhook(id string) { t := suite.T() t.Logf("Uninstall repo webhook with repo_id %s", id) err := uninstallRepoWebhook(suite.cli, suite.authToken, id) - suite.NoError(err, "error uninstalling repository webhook") + suite.Require().NoError(err, "error uninstalling repository webhook") } func (suite *GarmSuite) ValidateRepoWebhookUninstalled(ghToken, url, orgName, repoName string) { hook, err := getGhRepoWebhook(url, ghToken, orgName, repoName) - suite.NoError(err, "error getting github webhook") + suite.Require().NoError(err, "error getting github webhook") suite.Nil(hook, "github webhook with url %s, for repo %s/%s was not properly uninstalled", url, orgName, repoName) } @@ -141,7 +141,7 @@ func (suite *GarmSuite) CreateRepoPool(repoID string, poolParams params.CreatePo t := suite.T() t.Logf("Create repo pool with repo_id %s and pool_params %+v", repoID, poolParams) pool, err := createRepoPool(suite.cli, suite.authToken, repoID, poolParams) - suite.NoError(err, "error creating repository pool") + suite.Require().NoError(err, "error creating repository pool") return pool } @@ -149,7 +149,7 @@ func (suite *GarmSuite) GetRepoPool(repoID, repoPoolID string) *params.Pool { t := suite.T() t.Logf("Get repo pool repo_id %s and pool_id %s", repoID, repoPoolID) pool, err := getRepoPool(suite.cli, suite.authToken, repoID, repoPoolID) - suite.NoError(err, "error getting repository pool") + suite.Require().NoError(err, "error getting repository pool") return pool } @@ -157,7 +157,7 @@ func (suite *GarmSuite) DeleteRepoPool(repoID, repoPoolID string) { t := suite.T() t.Logf("Delete repo pool with repo_id %s and pool_id %s", repoID, repoPoolID) err := deleteRepoPool(suite.cli, suite.authToken, repoID, repoPoolID) - suite.NoError(err, "error deleting repository pool") + suite.Require().NoError(err, "error deleting repository pool") } func (suite *GarmSuite) UpdateRepoPool(repoID, repoPoolID string, maxRunners, minIdleRunners uint) *params.Pool { @@ -168,14 +168,14 @@ func (suite *GarmSuite) UpdateRepoPool(repoID, repoPoolID string, maxRunners, mi MaxRunners: &maxRunners, } pool, err := updateRepoPool(suite.cli, suite.authToken, repoID, repoPoolID, poolParams) - suite.NoError(err, "error updating repository pool") + suite.Require().NoError(err, "error updating repository pool") return pool } func (suite *GarmSuite) WaitRepoRunningIdleInstances(repoID string, timeout time.Duration) { t := suite.T() repoPools, err := listRepoPools(suite.cli, suite.authToken, repoID) - suite.NoError(err, "error listing repo pools") + suite.Require().NoError(err, "error listing repo pools") for _, pool := range repoPools { err := suite.WaitPoolInstances(pool.ID, commonParams.InstanceRunning, params.RunnerIdle, timeout) if err != nil { @@ -190,19 +190,19 @@ func (suite *GarmSuite) dumpRepoInstancesDetails(repoID string) { // print repo details t.Logf("Dumping repo details for repo %s", repoID) repo, err := getRepo(suite.cli, suite.authToken, repoID) - suite.NoError(err, "error getting repo") + suite.Require().NoError(err, "error getting repo") err = printJSONResponse(repo) - suite.NoError(err, "error printing repo") + suite.Require().NoError(err, "error printing repo") // print repo instances details t.Logf("Dumping repo instances details for repo %s", repoID) instances, err := listRepoInstances(suite.cli, suite.authToken, repoID) - suite.NoError(err, "error listing repo instances") + suite.Require().NoError(err, "error listing repo instances") for _, instance := range instances { instance, err := getInstance(suite.cli, suite.authToken, instance.Name) - suite.NoError(err, "error getting instance") + suite.Require().NoError(err, "error getting instance") t.Logf("Instance info for instance %s", instance.Name) err = printJSONResponse(instance) - suite.NoError(err, "error printing instance") + suite.Require().NoError(err, "error printing instance") } } From ed924e8034a3ba89a74f657cf9f6ad75852e1bfd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 20 Jun 2025 07:08:08 +0000 Subject: [PATCH 109/179] Bump github.com/cloudbase/garm-provider-common Bumps [github.com/cloudbase/garm-provider-common](https://github.com/cloudbase/garm-provider-common) from 0.1.5-0.20250417155201-8ef03502d06e to 0.1.5. - [Release notes](https://github.com/cloudbase/garm-provider-common/releases) - [Commits](https://github.com/cloudbase/garm-provider-common/commits/v0.1.5) --- updated-dependencies: - dependency-name: github.com/cloudbase/garm-provider-common dependency-version: 0.1.5 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- vendor/modules.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index ad7cdb66..978d0aa3 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 - github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e + github.com/cloudbase/garm-provider-common v0.1.5 github.com/felixge/httpsnoop v1.0.4 github.com/go-openapi/errors v0.22.1 github.com/go-openapi/runtime v0.28.0 diff --git a/go.sum b/go.sum index 99587a2c..769ec641 100644 --- a/go.sum +++ b/go.sum @@ -19,8 +19,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= -github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e h1:giq2Prk9I/ez1dc4/r9jivf2jbhjX9apZ41TWQ5g3qE= -github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e/go.mod h1:sSrTBtTc0q72MZdmS9EuLLdDhkmXZAqAwRIgEK0TqUo= +github.com/cloudbase/garm-provider-common v0.1.5 h1:aJL646l+VnZceQ2grbDYhWfxYpaQR2/QsUSD76kSZVs= +github.com/cloudbase/garm-provider-common v0.1.5/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= diff --git a/vendor/modules.txt b/vendor/modules.txt index b269cd6f..5cd69fbb 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -21,7 +21,7 @@ github.com/cespare/xxhash/v2 # github.com/chzyer/readline v1.5.1 ## explicit; go 1.15 github.com/chzyer/readline -# github.com/cloudbase/garm-provider-common v0.1.5-0.20250417155201-8ef03502d06e +# github.com/cloudbase/garm-provider-common v0.1.5 ## explicit; go 1.23.0 github.com/cloudbase/garm-provider-common/defaults github.com/cloudbase/garm-provider-common/errors From 808af82e0d6185d543792daf1c7dec82f5c0223d Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 21 Jun 2025 16:53:41 +0000 Subject: [PATCH 110/179] Add endpoint option to all relevant commands In case of ambiguity when using the name of a repo, org or enterprise, an --endpoint flag can be used to uniquely identify an entity against an endpoint. Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/enterprise.go | 10 +++++++--- cmd/garm-cli/cmd/organization.go | 22 ++++++++++++++++------ cmd/garm-cli/cmd/pool.go | 16 ++++++++++------ cmd/garm-cli/cmd/repository.go | 21 +++++++++++++++------ cmd/garm-cli/cmd/scalesets.go | 14 ++++++++------ cmd/garm-cli/cmd/util.go | 22 +++++++++++++++------- 6 files changed, 71 insertions(+), 34 deletions(-) diff --git a/cmd/garm-cli/cmd/enterprise.go b/cmd/garm-cli/cmd/enterprise.go index 0f688fe5..5c937b81 100644 --- a/cmd/garm-cli/cmd/enterprise.go +++ b/cmd/garm-cli/cmd/enterprise.go @@ -113,7 +113,7 @@ var enterpriseShowCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - enterpriseID, err := resolveEnterprise(args[0]) + enterpriseID, err := resolveEnterprise(args[0], enterpriseEndpoint) if err != nil { return err } @@ -146,7 +146,7 @@ var enterpriseDeleteCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - enterpriseID, err := resolveEnterprise(args[0]) + enterpriseID, err := resolveEnterprise(args[0], enterpriseEndpoint) if err != nil { return err } @@ -177,7 +177,7 @@ var enterpriseUpdateCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - enterpriseID, err := resolveEnterprise(args[0]) + enterpriseID, err := resolveEnterprise(args[0], enterpriseEndpoint) if err != nil { return err } @@ -213,6 +213,10 @@ func init() { enterpriseUpdateCmd.Flags().StringVar(&enterpriseWebhookSecret, "webhook-secret", "", "The webhook secret for this enterprise") enterpriseUpdateCmd.Flags().StringVar(&enterpriseCreds, "credentials", "", "Credentials name. See credentials list.") enterpriseUpdateCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", "", "The balancing strategy to use when creating runners in pools matching requested labels.") + enterpriseUpdateCmd.Flags().StringVar(&enterpriseEndpoint, "endpoint", "", "When using the name of the enterprise, the endpoint must be specified when multiple enterprises with the same name exist.") + + enterpriseDeleteCmd.Flags().StringVar(&enterpriseEndpoint, "endpoint", "", "When using the name of the enterprise, the endpoint must be specified when multiple enterprises with the same name exist.") + enterpriseShowCmd.Flags().StringVar(&enterpriseEndpoint, "endpoint", "", "When using the name of the enterprise, the endpoint must be specified when multiple enterprises with the same name exist.") enterpriseCmd.AddCommand( enterpriseListCmd, diff --git a/cmd/garm-cli/cmd/organization.go b/cmd/garm-cli/cmd/organization.go index 4cb7222f..b16812fa 100644 --- a/cmd/garm-cli/cmd/organization.go +++ b/cmd/garm-cli/cmd/organization.go @@ -76,7 +76,7 @@ var orgWebhookInstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0]) + orgID, err := resolveOrganization(args[0], orgEndpoint) if err != nil { return err } @@ -110,7 +110,7 @@ var orgHookInfoShowCmd = &cobra.Command{ if len(args) > 1 { return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0]) + orgID, err := resolveOrganization(args[0], orgEndpoint) if err != nil { return err } @@ -142,7 +142,7 @@ var orgWebhookUninstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0]) + orgID, err := resolveOrganization(args[0], orgEndpoint) if err != nil { return err } @@ -230,7 +230,7 @@ var orgUpdateCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0]) + orgID, err := resolveOrganization(args[0], orgEndpoint) if err != nil { return err } @@ -290,7 +290,7 @@ var orgShowCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0]) + orgID, err := resolveOrganization(args[0], orgEndpoint) if err != nil { return err } @@ -323,7 +323,7 @@ var orgDeleteCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - orgID, err := resolveOrganization(args[0]) + orgID, err := resolveOrganization(args[0], orgEndpoint) if err != nil { return err } @@ -357,12 +357,22 @@ func init() { orgAddCmd.MarkFlagRequired("name") //nolint orgDeleteCmd.Flags().BoolVar(&keepOrgWebhook, "keep-webhook", false, "Do not delete any existing webhook when removing the organization from GARM.") + orgDeleteCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") + + orgShowCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") orgUpdateCmd.Flags().StringVar(&orgWebhookSecret, "webhook-secret", "", "The webhook secret for this organization") orgUpdateCmd.Flags().StringVar(&orgCreds, "credentials", "", "Credentials name. See credentials list.") orgUpdateCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", "", "The balancing strategy to use when creating runners in pools matching requested labels.") + orgUpdateCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") orgWebhookInstallCmd.Flags().BoolVar(&insecureOrgWebhook, "insecure", false, "Ignore self signed certificate errors.") + orgWebhookInstallCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") + + orgWebhookUninstallCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") + + orgHookInfoShowCmd.Flags().StringVar(&orgEndpoint, "endpoint", "", "When using the name of the org, the endpoint must be specified when multiple organizations with the same name exist.") + orgWebhookCmd.AddCommand( orgWebhookInstallCmd, orgWebhookUninstallCmd, diff --git a/cmd/garm-cli/cmd/pool.go b/cmd/garm-cli/cmd/pool.go index 096210fa..0c667c4a 100644 --- a/cmd/garm-cli/cmd/pool.go +++ b/cmd/garm-cli/cmd/pool.go @@ -105,7 +105,7 @@ Example: switch len(args) { case 0: if cmd.Flags().Changed("repo") { - poolRepository, err = resolveRepository(poolRepository) + poolRepository, err = resolveRepository(poolRepository, endpointName) if err != nil { return err } @@ -113,7 +113,7 @@ Example: listRepoPoolsReq.RepoID = poolRepository response, err = apiCli.Repositories.ListRepoPools(listRepoPoolsReq, authToken) } else if cmd.Flags().Changed("org") { - poolOrganization, err = resolveOrganization(poolOrganization) + poolOrganization, err = resolveOrganization(poolOrganization, endpointName) if err != nil { return err } @@ -121,7 +121,7 @@ Example: listOrgPoolsReq.OrgID = poolOrganization response, err = apiCli.Organizations.ListOrgPools(listOrgPoolsReq, authToken) } else if cmd.Flags().Changed("enterprise") { - poolEnterprise, err = resolveEnterprise(poolEnterprise) + poolEnterprise, err = resolveEnterprise(poolEnterprise, endpointName) if err != nil { return err } @@ -262,7 +262,7 @@ var poolAddCmd = &cobra.Command{ var err error var response poolPayloadGetter if cmd.Flags().Changed("repo") { - poolRepository, err = resolveRepository(poolRepository) + poolRepository, err = resolveRepository(poolRepository, endpointName) if err != nil { return err } @@ -271,7 +271,7 @@ var poolAddCmd = &cobra.Command{ newRepoPoolReq.Body = newPoolParams response, err = apiCli.Repositories.CreateRepoPool(newRepoPoolReq, authToken) } else if cmd.Flags().Changed("org") { - poolOrganization, err = resolveOrganization(poolOrganization) + poolOrganization, err = resolveOrganization(poolOrganization, endpointName) if err != nil { return err } @@ -280,7 +280,7 @@ var poolAddCmd = &cobra.Command{ newOrgPoolReq.Body = newPoolParams response, err = apiCli.Organizations.CreateOrgPool(newOrgPoolReq, authToken) } else if cmd.Flags().Changed("enterprise") { - poolEnterprise, err = resolveEnterprise(poolEnterprise) + poolEnterprise, err = resolveEnterprise(poolEnterprise, endpointName) if err != nil { return err } @@ -411,6 +411,8 @@ func init() { poolListCmd.Flags().StringVarP(&poolEnterprise, "enterprise", "e", "", "List all pools within this enterprise.") poolListCmd.Flags().BoolVarP(&poolAll, "all", "a", false, "List all pools, regardless of org or repo.") poolListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") + poolListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") + poolListCmd.MarkFlagsMutuallyExclusive("repo", "org", "all", "enterprise") poolUpdateCmd.Flags().StringVar(&poolImage, "image", "", "The provider-specific image name to use for runners in this pool.") @@ -444,6 +446,8 @@ func init() { poolAddCmd.Flags().UintVar(&poolRunnerBootstrapTimeout, "runner-bootstrap-timeout", 20, "Duration in minutes after which a runner is considered failed if it does not join Github.") poolAddCmd.Flags().UintVar(&poolMinIdleRunners, "min-idle-runners", 1, "Attempt to maintain a minimum of idle self-hosted runners of this type.") poolAddCmd.Flags().BoolVar(&poolEnabled, "enabled", false, "Enable this pool.") + poolAddCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") + poolAddCmd.MarkFlagRequired("provider-name") //nolint poolAddCmd.MarkFlagRequired("image") //nolint poolAddCmd.MarkFlagRequired("flavor") //nolint diff --git a/cmd/garm-cli/cmd/repository.go b/cmd/garm-cli/cmd/repository.go index eef936da..cca1a7fe 100644 --- a/cmd/garm-cli/cmd/repository.go +++ b/cmd/garm-cli/cmd/repository.go @@ -78,7 +78,7 @@ var repoWebhookInstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0]) + repoID, err := resolveRepository(args[0], repoEndpoint) if err != nil { return err } @@ -113,7 +113,7 @@ var repoHookInfoShowCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0]) + repoID, err := resolveRepository(args[0], repoEndpoint) if err != nil { return err } @@ -146,7 +146,7 @@ var repoWebhookUninstallCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0]) + repoID, err := resolveRepository(args[0], repoEndpoint) if err != nil { return err } @@ -259,7 +259,7 @@ var repoUpdateCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0]) + repoID, err := resolveRepository(args[0], repoEndpoint) if err != nil { return err } @@ -297,7 +297,7 @@ var repoShowCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0]) + repoID, err := resolveRepository(args[0], repoEndpoint) if err != nil { return err } @@ -330,7 +330,7 @@ var repoDeleteCmd = &cobra.Command{ return fmt.Errorf("too many arguments") } - repoID, err := resolveRepository(args[0]) + repoID, err := resolveRepository(args[0], repoEndpoint) if err != nil { return err } @@ -367,12 +367,21 @@ func init() { repoAddCmd.MarkFlagRequired("name") //nolint repoDeleteCmd.Flags().BoolVar(&keepRepoWebhook, "keep-webhook", false, "Do not delete any existing webhook when removing the repo from GARM.") + repoDeleteCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") + + repoShowCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") repoUpdateCmd.Flags().StringVar(&repoWebhookSecret, "webhook-secret", "", "The webhook secret for this repository. If you update this secret, you will have to manually update the secret in GitHub as well.") repoUpdateCmd.Flags().StringVar(&repoCreds, "credentials", "", "Credentials name. See credentials list.") repoUpdateCmd.Flags().StringVar(&poolBalancerType, "pool-balancer-type", "", "The balancing strategy to use when creating runners in pools matching requested labels.") + repoUpdateCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") repoWebhookInstallCmd.Flags().BoolVar(&insecureRepoWebhook, "insecure", false, "Ignore self signed certificate errors.") + repoWebhookInstallCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") + + repoWebhookUninstallCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") + + repoHookInfoShowCmd.Flags().StringVar(&repoEndpoint, "endpoint", "", "When using the name of the repo, the endpoint must be specified when multiple repositories with the same name exist.") repoWebhookCmd.AddCommand( repoWebhookInstallCmd, diff --git a/cmd/garm-cli/cmd/scalesets.go b/cmd/garm-cli/cmd/scalesets.go index ece9b7a2..cf64c9fa 100644 --- a/cmd/garm-cli/cmd/scalesets.go +++ b/cmd/garm-cli/cmd/scalesets.go @@ -105,7 +105,7 @@ Example: switch len(args) { case 0: if cmd.Flags().Changed("repo") { - scalesetRepository, err = resolveRepository(scalesetRepository) + scalesetRepository, err = resolveRepository(scalesetRepository, endpointName) if err != nil { return err } @@ -113,7 +113,7 @@ Example: listRepoScaleSetsReq.RepoID = scalesetRepository response, err = apiCli.Repositories.ListRepoScaleSets(listRepoScaleSetsReq, authToken) } else if cmd.Flags().Changed("org") { - scalesetOrganization, err = resolveOrganization(scalesetOrganization) + scalesetOrganization, err = resolveOrganization(scalesetOrganization, endpointName) if err != nil { return err } @@ -121,7 +121,7 @@ Example: listOrgScaleSetsReq.OrgID = scalesetOrganization response, err = apiCli.Organizations.ListOrgScaleSets(listOrgScaleSetsReq, authToken) } else if cmd.Flags().Changed("enterprise") { - scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise) + scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise, endpointName) if err != nil { return err } @@ -256,7 +256,7 @@ var scaleSetAddCmd = &cobra.Command{ var err error var response scalesetPayloadGetter if cmd.Flags().Changed("repo") { - scalesetRepository, err = resolveRepository(scalesetRepository) + scalesetRepository, err = resolveRepository(scalesetRepository, endpointName) if err != nil { return err } @@ -265,7 +265,7 @@ var scaleSetAddCmd = &cobra.Command{ newRepoScaleSetReq.Body = newScaleSetParams response, err = apiCli.Repositories.CreateRepoScaleSet(newRepoScaleSetReq, authToken) } else if cmd.Flags().Changed("org") { - scalesetOrganization, err = resolveOrganization(scalesetOrganization) + scalesetOrganization, err = resolveOrganization(scalesetOrganization, endpointName) if err != nil { return err } @@ -274,7 +274,7 @@ var scaleSetAddCmd = &cobra.Command{ newOrgScaleSetReq.Body = newScaleSetParams response, err = apiCli.Organizations.CreateOrgScaleSet(newOrgScaleSetReq, authToken) } else if cmd.Flags().Changed("enterprise") { - scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise) + scalesetEnterprise, err = resolveEnterprise(scalesetEnterprise, endpointName) if err != nil { return err } @@ -402,6 +402,7 @@ func init() { scalesetListCmd.Flags().StringVarP(&scalesetEnterprise, "enterprise", "e", "", "List all scale sets within this enterprise.") scalesetListCmd.Flags().BoolVarP(&scalesetAll, "all", "a", false, "List all scale sets, regardless of org or repo.") scalesetListCmd.MarkFlagsMutuallyExclusive("repo", "org", "all", "enterprise") + scalesetListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") scaleSetUpdateCmd.Flags().StringVar(&scalesetImage, "image", "", "The provider-specific image name to use for runners in this scale set.") scaleSetUpdateCmd.Flags().StringVar(&scalesetFlavor, "flavor", "", "The flavor to use for the runners in this scale set.") @@ -432,6 +433,7 @@ func init() { scaleSetAddCmd.Flags().UintVar(&scalesetRunnerBootstrapTimeout, "runner-bootstrap-timeout", 20, "Duration in minutes after which a runner is considered failed if it does not join Github.") scaleSetAddCmd.Flags().UintVar(&scalesetMinIdleRunners, "min-idle-runners", 1, "Attempt to maintain a minimum of idle self-hosted runners of this type.") scaleSetAddCmd.Flags().BoolVar(&scalesetEnabled, "enabled", false, "Enable this scale set.") + scaleSetAddCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") scaleSetAddCmd.MarkFlagRequired("provider-name") //nolint scaleSetAddCmd.MarkFlagRequired("name") //nolint scaleSetAddCmd.MarkFlagRequired("image") //nolint diff --git a/cmd/garm-cli/cmd/util.go b/cmd/garm-cli/cmd/util.go index 584ad9c4..26f57abb 100644 --- a/cmd/garm-cli/cmd/util.go +++ b/cmd/garm-cli/cmd/util.go @@ -11,7 +11,7 @@ import ( apiClientRepos "github.com/cloudbase/garm/client/repositories" ) -func resolveRepository(nameOrID string) (string, error) { +func resolveRepository(nameOrID, endpoint string) (string, error) { if nameOrID == "" { return "", fmt.Errorf("missing repository name or ID") } @@ -30,6 +30,9 @@ func resolveRepository(nameOrID string) (string, error) { listReposReq := apiClientRepos.NewListReposParams() listReposReq.Owner = &parts[0] listReposReq.Name = &parts[1] + if endpoint != "" { + listReposReq.Endpoint = &endpoint + } response, err := apiCli.Repositories.ListRepos(listReposReq, authToken) if err != nil { return "", err @@ -39,12 +42,12 @@ func resolveRepository(nameOrID string) (string, error) { } if len(response.Payload) > 1 { - return "", fmt.Errorf("multiple repositories with the name %s exist, please use the repository ID", nameOrID) + return "", fmt.Errorf("multiple repositories with the name %s exist, please use the repository ID or specify the --endpoint parameter", nameOrID) } return response.Payload[0].ID, nil } -func resolveOrganization(nameOrID string) (string, error) { +func resolveOrganization(nameOrID, endpoint string) (string, error) { if nameOrID == "" { return "", fmt.Errorf("missing organization name or ID") } @@ -55,6 +58,9 @@ func resolveOrganization(nameOrID string) (string, error) { listOrgsReq := apiClientOrgs.NewListOrgsParams() listOrgsReq.Name = &nameOrID + if endpoint != "" { + listOrgsReq.Endpoint = &endpoint + } response, err := apiCli.Organizations.ListOrgs(listOrgsReq, authToken) if err != nil { return "", err @@ -65,13 +71,13 @@ func resolveOrganization(nameOrID string) (string, error) { } if len(response.Payload) > 1 { - return "", fmt.Errorf("multiple organizations with the name %s exist, please use the organization ID", nameOrID) + return "", fmt.Errorf("multiple organizations with the name %s exist, please use the organization ID or specify the --endpoint parameter", nameOrID) } return response.Payload[0].ID, nil } -func resolveEnterprise(nameOrID string) (string, error) { +func resolveEnterprise(nameOrID, endpoint string) (string, error) { if nameOrID == "" { return "", fmt.Errorf("missing enterprise name or ID") } @@ -82,7 +88,9 @@ func resolveEnterprise(nameOrID string) (string, error) { listEnterprisesReq := apiClientEnterprises.NewListEnterprisesParams() listEnterprisesReq.Name = &enterpriseName - listEnterprisesReq.Endpoint = &enterpriseEndpoint + if endpoint != "" { + listEnterprisesReq.Endpoint = &endpoint + } response, err := apiCli.Enterprises.ListEnterprises(listEnterprisesReq, authToken) if err != nil { return "", err @@ -93,7 +101,7 @@ func resolveEnterprise(nameOrID string) (string, error) { } if len(response.Payload) > 1 { - return "", fmt.Errorf("multiple enterprises with the name %s exist, please use the enterprise ID", nameOrID) + return "", fmt.Errorf("multiple enterprises with the name %s exist, please use the enterprise ID or specify the --endpoint parameter", nameOrID) } return response.Payload[0].ID, nil From 1ec99e8695bf902420136662b641aa194038a795 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 31 May 2025 20:55:21 +0000 Subject: [PATCH 111/179] Some cleanup Signed-off-by: Gabriel Adrian Samfira --- params/requests.go | 6 --- workers/cache/tool_cache.go | 49 ++++++++--------------- workers/entity/controller_watcher.go | 57 ++++++++++++++++++++------- workers/entity/util.go | 1 + workers/entity/worker.go | 16 ++++++-- workers/entity/worker_watcher.go | 7 +--- workers/scaleset/scaleset.go | 33 +++++++++++----- workers/scaleset/scaleset_listener.go | 5 ++- 8 files changed, 104 insertions(+), 70 deletions(-) diff --git a/params/requests.go b/params/requests.go index 5be0e3a1..3f2fcfab 100644 --- a/params/requests.go +++ b/params/requests.go @@ -646,12 +646,6 @@ func (c CreateGiteaEndpointParams) Validate() error { return runnerErrors.NewBadRequestError("invalid api_base_url") } - switch url.Scheme { - case httpsScheme, httpScheme: - default: - return runnerErrors.NewBadRequestError("invalid api_base_url") - } - if c.BaseURL == "" { return runnerErrors.NewBadRequestError("missing base_url") } diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index 3df103ec..727c82b4 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -161,13 +161,9 @@ func (t *toolsUpdater) giteaUpdateLoop() { t.sleepWithCancel(time.Duration(randInt.Int64()) * time.Millisecond) tools, err := getTools() if err != nil { - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update gitea tools: %q", err), 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent(fmt.Sprintf("failed to update gitea tools: %q", err), params.EventError) } else { - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent("successfully updated tools", params.EventInfo) cache.SetGithubToolsCache(t.entity, tools) } @@ -184,15 +180,11 @@ func (t *toolsUpdater) giteaUpdateLoop() { case <-ticker.C: tools, err := getTools() if err != nil { - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update gitea tools: %q", err), 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent(fmt.Sprintf("failed to update gitea tools: %q", err), params.EventError) slog.DebugContext(t.ctx, "failed to update gitea tools", "error", err) continue } - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent("successfully updated tools", params.EventInfo) cache.SetGithubToolsCache(t.entity, tools) } } @@ -213,18 +205,13 @@ func (t *toolsUpdater) loop() { now := time.Now().UTC() if now.After(t.lastUpdate.Add(40 * time.Minute)) { if err := t.updateTools(); err != nil { - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err), 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } - slog.ErrorContext(t.ctx, "initial tools update error", "error", err) + slog.ErrorContext(t.ctx, "updating tools", "error", err) + t.addStatusEvent(fmt.Sprintf("failed to update tools: %q", err), params.EventError) resetTime = now.Add(5 * time.Minute) - slog.ErrorContext(t.ctx, "initial tools update error", "error", err) } else { // Tools are usually valid for 1 hour. resetTime = t.lastUpdate.Add(40 * time.Minute) - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent("successfully updated tools", params.EventInfo) } } @@ -248,16 +235,12 @@ func (t *toolsUpdater) loop() { now = time.Now().UTC() if err := t.updateTools(); err != nil { slog.ErrorContext(t.ctx, "updating tools", "error", err) - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err), 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent(fmt.Sprintf("failed to update tools: %q", err), params.EventError) resetTime = now.Add(5 * time.Minute) } else { // Tools are usually valid for 1 hour. resetTime = t.lastUpdate.Add(40 * time.Minute) - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent("successfully updated tools", params.EventInfo) } case <-t.reset: slog.DebugContext(t.ctx, "resetting tools updater") @@ -265,18 +248,20 @@ func (t *toolsUpdater) loop() { now = time.Now().UTC() if err := t.updateTools(); err != nil { slog.ErrorContext(t.ctx, "updating tools", "error", err) - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to update tools: %q", err), 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent(fmt.Sprintf("failed to update tools: %q", err), params.EventError) resetTime = now.Add(5 * time.Minute) } else { // Tools are usually valid for 1 hour. resetTime = t.lastUpdate.Add(40 * time.Minute) - if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, params.EventInfo, "successfully updated tools", 30); err != nil { - slog.ErrorContext(t.ctx, "failed to add entity event", "error", err) - } + t.addStatusEvent("successfully updated tools", params.EventInfo) } } timer.Stop() } } + +func (t *toolsUpdater) addStatusEvent(msg string, level params.EventLevel) { + if err := t.store.AddEntityEvent(t.ctx, t.entity, params.StatusEvent, level, msg, 30); err != nil { + slog.With(slog.Any("error", err)).Error("failed to add entity event") + } +} diff --git a/workers/entity/controller_watcher.go b/workers/entity/controller_watcher.go index 6bd3e173..d907d25a 100644 --- a/workers/entity/controller_watcher.go +++ b/workers/entity/controller_watcher.go @@ -14,6 +14,7 @@ package entity import ( + "fmt" "log/slog" dbCommon "github.com/cloudbase/garm/database/common" @@ -28,6 +29,7 @@ func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { repo, ok := event.Payload.(params.Repository) if !ok { slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return } entityGetter = repo case dbCommon.OrganizationEntityType: @@ -35,6 +37,7 @@ func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { org, ok := event.Payload.(params.Organization) if !ok { slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return } entityGetter = org case dbCommon.EnterpriseEntityType: @@ -42,6 +45,7 @@ func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { ent, ok := event.Payload.(params.Enterprise) if !ok { slog.ErrorContext(c.ctx, "invalid payload for entity type", "entity_type", event.EntityType, "payload", event.Payload) + return } entityGetter = ent default: @@ -49,34 +53,63 @@ func (c *Controller) handleWatcherEvent(event dbCommon.ChangePayload) { return } - if entityGetter == nil { + entity, err := entityGetter.GetEntity() + if err != nil { + slog.ErrorContext(c.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) return } switch event.Operation { case dbCommon.CreateOperation: slog.DebugContext(c.ctx, "got create operation") - c.handleWatcherCreateOperation(entityGetter, event) + c.handleWatcherCreateOperation(entity) case dbCommon.DeleteOperation: slog.DebugContext(c.ctx, "got delete operation") - c.handleWatcherDeleteOperation(entityGetter, event) + c.handleWatcherDeleteOperation(entity) + case dbCommon.UpdateOperation: + slog.DebugContext(c.ctx, "got update operation") + c.handleWatcherUpdateOperation(entity) default: slog.ErrorContext(c.ctx, "invalid operation type", "operation_type", event.Operation) return } } -func (c *Controller) handleWatcherCreateOperation(entityGetter params.EntityGetter, event dbCommon.ChangePayload) { +func (c *Controller) handleWatcherUpdateOperation(entity params.ForgeEntity) { c.mux.Lock() defer c.mux.Unlock() - entity, err := entityGetter.GetEntity() - if err != nil { - slog.ErrorContext(c.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) + + worker, ok := c.Entities[entity.ID] + if !ok { + slog.InfoContext(c.ctx, "entity not found in worker list", "entity_id", entity.ID) return } + + if worker.IsRunning() { + // The worker is running. It watches for updates to its own entity. We only care about updates + // in the controller, if for some reason, the worker is not running. + slog.DebugContext(c.ctx, "worker is already running, skipping update", "entity_id", entity.ID) + return + } + + slog.InfoContext(c.ctx, "updating entity worker", "entity_id", entity.ID, "entity_type", entity.EntityType) + worker.Entity = entity + if err := worker.Start(); err != nil { + slog.ErrorContext(c.ctx, "starting worker after update", "entity_id", entity.ID, "error", err) + worker.addStatusEvent(fmt.Sprintf("failed to start worker for %s (%s) after update: %s", entity.ID, entity.ForgeURL(), err.Error()), params.EventError) + return + } + slog.InfoContext(c.ctx, "entity worker updated and successfully started", "entity_id", entity.ID, "entity_type", entity.EntityType) + worker.addStatusEvent(fmt.Sprintf("worker updated and successfully started for entity: %s (%s)", entity.ID, entity.ForgeURL()), params.EventInfo) +} + +func (c *Controller) handleWatcherCreateOperation(entity params.ForgeEntity) { + c.mux.Lock() + defer c.mux.Unlock() + worker, err := NewWorker(c.ctx, c.store, entity, c.providers) if err != nil { - slog.ErrorContext(c.ctx, "creating worker from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) + slog.ErrorContext(c.ctx, "creating worker from repository", "entity_type", entity.EntityType, "error", err) return } @@ -89,14 +122,10 @@ func (c *Controller) handleWatcherCreateOperation(entityGetter params.EntityGett c.Entities[entity.ID] = worker } -func (c *Controller) handleWatcherDeleteOperation(entityGetter params.EntityGetter, event dbCommon.ChangePayload) { +func (c *Controller) handleWatcherDeleteOperation(entity params.ForgeEntity) { c.mux.Lock() defer c.mux.Unlock() - entity, err := entityGetter.GetEntity() - if err != nil { - slog.ErrorContext(c.ctx, "getting entity from repository", "entity_type", event.EntityType, "payload", event.Payload, "error", err) - return - } + worker, ok := c.Entities[entity.ID] if !ok { slog.InfoContext(c.ctx, "entity not found in worker list", "entity_id", entity.ID) diff --git a/workers/entity/util.go b/workers/entity/util.go index 2216c326..38e011b0 100644 --- a/workers/entity/util.go +++ b/workers/entity/util.go @@ -40,6 +40,7 @@ func composeControllerWatcherFilters() dbCommon.PayloadFilterFunc { watcher.WithAny( watcher.WithOperationTypeFilter(dbCommon.CreateOperation), watcher.WithOperationTypeFilter(dbCommon.DeleteOperation), + watcher.WithOperationTypeFilter(dbCommon.UpdateOperation), ), ) } diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 583ab2c8..1cb40ad5 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -132,6 +132,12 @@ func (w *Worker) Start() (err error) { return nil } +func (w *Worker) IsRunning() bool { + w.mux.Lock() + defer w.mux.Unlock() + return w.running +} + // consolidateRunnerState will list all runners on GitHub for this entity, sort by // pool or scale set and pass those runners to the appropriate controller (pools or scale sets). // The controller will then pass along to their respective workers the list of runners @@ -212,9 +218,7 @@ func (w *Worker) consolidateRunnerLoop() { return } if err := w.consolidateRunnerState(); err != nil { - if err := w.store.AddEntityEvent(w.ctx, w.Entity, params.StatusEvent, params.EventError, fmt.Sprintf("failed to consolidate runner state: %q", err.Error()), 30); err != nil { - slog.With(slog.Any("error", err)).Error("failed to add entity event") - } + w.addStatusEvent(fmt.Sprintf("failed to consolidate runner state: %q", err.Error()), params.EventError) slog.With(slog.Any("error", err)).Error("failed to consolidate runner state") } case <-w.ctx.Done(): @@ -239,3 +243,9 @@ func (w *Worker) loop() { } } } + +func (w *Worker) addStatusEvent(msg string, level params.EventLevel) { + if err := w.store.AddEntityEvent(w.ctx, w.Entity, params.StatusEvent, level, msg, 30); err != nil { + slog.With(slog.Any("error", err)).Error("failed to add entity event") + } +} diff --git a/workers/entity/worker_watcher.go b/workers/entity/worker_watcher.go index 9acfbc60..ce8fd244 100644 --- a/workers/entity/worker_watcher.go +++ b/workers/entity/worker_watcher.go @@ -29,7 +29,6 @@ func (w *Worker) handleWorkerWatcherEvent(event dbCommon.ChangePayload) { switch event.EntityType { case entityType: w.handleEntityEventPayload(event) - return case dbCommon.GithubCredentialsEntityType, dbCommon.GiteaCredentialsEntityType: slog.DebugContext(w.ctx, "got github credentials payload event") w.handleEntityCredentialsEventPayload(event) @@ -90,20 +89,18 @@ func (w *Worker) handleEntityCredentialsEventPayload(event dbCommon.ChangePayloa return } - credentials := creds - switch event.Operation { case dbCommon.UpdateOperation: slog.DebugContext(w.ctx, "got delete operation") w.mux.Lock() defer w.mux.Unlock() - if w.Entity.Credentials.GetID() != credentials.GetID() { + if w.Entity.Credentials.GetID() != creds.GetID() { // The channel is buffered. We may get an old update. If credentials get updated // immediately after they are swapped on the entity, we may still get an update // pushed to the channel before the filters are swapped. We can ignore the update. return } - w.Entity.Credentials = credentials + w.Entity.Credentials = creds ghCli, err := github.Client(w.ctx, w.Entity) if err != nil { slog.ErrorContext(w.ctx, "creating github client", "entity_id", w.Entity.ID, "error", err) diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index e4faba70..5022217a 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -646,24 +646,38 @@ func (w *Worker) sleepWithCancel(sleepTime time.Duration) (canceled bool) { return true } +func (w *Worker) sessionLoopMayRun() bool { + w.mux.Lock() + defer w.mux.Unlock() + return w.scaleSet.Enabled +} + func (w *Worker) keepListenerAlive() { var backoff time.Duration +Loop: for { - w.mux.Lock() - if !w.scaleSet.Enabled { + if !w.sessionLoopMayRun() { if canceled := w.sleepWithCancel(2 * time.Second); canceled { - slog.DebugContext(w.ctx, "worker is stopped; exiting keepListenerAlive") - w.mux.Unlock() + slog.InfoContext(w.ctx, "worker is stopped; exiting keepListenerAlive") return } - w.mux.Unlock() continue } // noop if already started. If the scaleset was just enabled, we need to // start the listener here, or the <-w.listener.Wait() channel receive bellow // will block forever, even if we start the listener, as a nil channel will // block forever. - w.listener.Start() + if err := w.listener.Start(); err != nil { + slog.ErrorContext(w.ctx, "error starting listener", "error", err, "consumer_id", w.consumerID) + if canceled := w.sleepWithCancel(2 * time.Second); canceled { + slog.InfoContext(w.ctx, "worker is stopped; exiting keepListenerAlive") + w.mux.Unlock() + return + } + // we failed to start the listener. Try again. + w.mux.Unlock() + continue + } w.mux.Unlock() select { @@ -675,8 +689,9 @@ func (w *Worker) keepListenerAlive() { slog.DebugContext(w.ctx, "listener is stopped; attempting to restart") w.mux.Lock() if !w.scaleSet.Enabled { + w.listener.Stop() // cleanup w.mux.Unlock() - continue + continue Loop } w.mux.Unlock() for { @@ -684,7 +699,7 @@ func (w *Worker) keepListenerAlive() { w.listener.Stop() // cleanup if !w.scaleSet.Enabled { w.mux.Unlock() - break + continue Loop } slog.DebugContext(w.ctx, "attempting to restart") if err := w.listener.Start(); err != nil { @@ -707,7 +722,7 @@ func (w *Worker) keepListenerAlive() { continue } w.mux.Unlock() - break + continue Loop } } } diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 76a321f4..d69092f5 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -59,6 +59,10 @@ func (l *scaleSetListener) Start() error { l.mux.Lock() defer l.mux.Unlock() + if l.running { + return nil + } + l.listenerCtx, l.cancelFunc = context.WithCancel(context.Background()) scaleSet := l.scaleSetHelper.GetScaleSet() scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() @@ -103,7 +107,6 @@ func (l *scaleSetListener) Stop() error { } } - l.messageSession.Close() l.running = false close(l.quit) l.cancelFunc() From 9279fb66cff35c6424778a77c9dbeb37a8e876cb Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 27 Jun 2025 07:18:54 +0000 Subject: [PATCH 112/179] Set http transport config This change sets the github client http transport options to mirror those of the default transport from Go, with the addition of the TLSClientConfig. Signed-off-by: Gabriel Adrian Samfira --- Makefile | 2 +- params/params.go | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index bce60045..f5337783 100644 --- a/Makefile +++ b/Makefile @@ -106,7 +106,7 @@ $(LOCALBIN): GOLANGCI_LINT ?= $(LOCALBIN)/golangci-lint ## Tool Versions -GOLANGCI_LINT_VERSION ?= v1.61.0 +GOLANGCI_LINT_VERSION ?= v1.64.8 .PHONY: golangci-lint golangci-lint: $(GOLANGCI_LINT) ## Download golangci-lint locally if necessary. If wrong version is installed, it will be overwritten. diff --git a/params/params.go b/params/params.go index c44891bd..ed9403b2 100644 --- a/params/params.go +++ b/params/params.go @@ -22,6 +22,7 @@ import ( "encoding/json" "encoding/pem" "fmt" + "net" "net/http" "time" @@ -642,11 +643,22 @@ func (g GithubCredentials) GetHTTPClient(ctx context.Context) (*http.Client, err } } + dialer := &net.Dialer{ + Timeout: 30 * time.Second, + KeepAlive: 30 * time.Second, + } + httpTransport := &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: dialer.DialContext, TLSClientConfig: &tls.Config{ RootCAs: roots, MinVersion: tls.VersionTLS12, }, + ForceAttemptHTTP2: true, + IdleConnTimeout: 90 * time.Second, + TLSHandshakeTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, } var tc *http.Client From 529ce8b7a8b40ae6971420cfdee5cee4aecbc89e Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 27 Jun 2025 07:18:54 +0000 Subject: [PATCH 113/179] Set http transport config This change sets the github client http transport options to mirror those of the default transport from Go, with the addition of the TLSClientConfig. Signed-off-by: Gabriel Adrian Samfira --- params/params.go | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/params/params.go b/params/params.go index c9d4fb94..3a0e8435 100644 --- a/params/params.go +++ b/params/params.go @@ -23,6 +23,7 @@ import ( "encoding/pem" "fmt" "math" + "net" "net/http" "time" @@ -912,11 +913,22 @@ func (g ForgeCredentials) GetHTTPClient(ctx context.Context) (*http.Client, erro } } + dialer := &net.Dialer{ + Timeout: 30 * time.Second, + KeepAlive: 30 * time.Second, + } + httpTransport := &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: dialer.DialContext, TLSClientConfig: &tls.Config{ RootCAs: roots, MinVersion: tls.VersionTLS12, }, + ForceAttemptHTTP2: true, + IdleConnTimeout: 90 * time.Second, + TLSHandshakeTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, } var tc *http.Client From 7c8241579dd70d91ae6a1757e3fca2a64f774d0b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Jul 2025 09:02:05 +0000 Subject: [PATCH 114/179] Bump gorm.io/datatypes from 1.2.5 to 1.2.6 Bumps [gorm.io/datatypes](https://github.com/go-gorm/datatypes) from 1.2.5 to 1.2.6. - [Release notes](https://github.com/go-gorm/datatypes/releases) - [Commits](https://github.com/go-gorm/datatypes/compare/v1.2.5...v1.2.6) --- updated-dependencies: - dependency-name: gorm.io/datatypes dependency-version: 1.2.6 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 8 ++++---- vendor/gorm.io/datatypes/json.go | 24 +++++++++++++++--------- vendor/modules.txt | 2 +- 4 files changed, 21 insertions(+), 15 deletions(-) diff --git a/go.mod b/go.mod index 978d0aa3..4a7cb39d 100644 --- a/go.mod +++ b/go.mod @@ -34,7 +34,7 @@ require ( golang.org/x/sync v0.15.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 - gorm.io/datatypes v1.2.5 + gorm.io/datatypes v1.2.6 gorm.io/driver/mysql v1.6.0 gorm.io/driver/sqlite v1.6.0 gorm.io/gorm v1.30.0 diff --git a/go.sum b/go.sum index 769ec641..ac6ee48b 100644 --- a/go.sum +++ b/go.sum @@ -219,15 +219,15 @@ gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gorm.io/datatypes v1.2.5 h1:9UogU3jkydFVW1bIVVeoYsTpLRgwDVW3rHfJG6/Ek9I= -gorm.io/datatypes v1.2.5/go.mod h1:I5FUdlKpLb5PMqeMQhm30CQ6jXP8Rj89xkTeCSAaAD4= +gorm.io/datatypes v1.2.6 h1:KafLdXvFUhzNeL2ncm03Gl3eTLONQfNKZ+wJ+9Y4Nck= +gorm.io/datatypes v1.2.6/go.mod h1:M2iO+6S3hhi4nAyYe444Pcb0dcIiOMJ7QHaUXxyiNZY= gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= gorm.io/driver/postgres v1.5.0 h1:u2FXTy14l45qc3UeCJ7QaAXZmZfDDv0YrthvmRq1l0U= gorm.io/driver/postgres v1.5.0/go.mod h1:FUZXzO+5Uqg5zzwzv4KK49R8lvGIyscBOqYrtI1Ce9A= gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= -gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= -gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= +gorm.io/driver/sqlserver v1.6.0 h1:VZOBQVsVhkHU/NzNhRJKoANt5pZGQAS1Bwc6m6dgfnc= +gorm.io/driver/sqlserver v1.6.0/go.mod h1:WQzt4IJo/WHKnckU9jXBLMJIVNMVeTu25dnOzehntWw= gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/vendor/gorm.io/datatypes/json.go b/vendor/gorm.io/datatypes/json.go index 79476781..97e6b4aa 100644 --- a/vendor/gorm.io/datatypes/json.go +++ b/vendor/gorm.io/datatypes/json.go @@ -35,16 +35,20 @@ func (j *JSON) Scan(value interface{}) error { return nil } var bytes []byte - switch v := value.(type) { - case []byte: - if len(v) > 0 { - bytes = make([]byte, len(v)) - copy(bytes, v) + if s, ok := value.(fmt.Stringer); ok { + bytes = []byte(s.String()) + } else { + switch v := value.(type) { + case []byte: + if len(v) > 0 { + bytes = make([]byte, len(v)) + copy(bytes, v) + } + case string: + bytes = []byte(v) + default: + return errors.New(fmt.Sprint("Failed to unmarshal JSONB value:", value)) } - case string: - bytes = []byte(v) - default: - return errors.New(fmt.Sprint("Failed to unmarshal JSONB value:", value)) } result := json.RawMessage(bytes) @@ -394,6 +398,8 @@ func (jsonSet *JSONSetExpression) Build(builder clause.Builder) { break } stmt.AddVar(builder, gorm.Expr("CAST(? AS JSON)", string(b))) + case reflect.Bool: + builder.WriteString(strconv.FormatBool(rv.Bool())) default: stmt.AddVar(builder, value) } diff --git a/vendor/modules.txt b/vendor/modules.txt index 5cd69fbb..5b88c936 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -355,7 +355,7 @@ gopkg.in/natefinch/lumberjack.v2 # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3 -# gorm.io/datatypes v1.2.5 +# gorm.io/datatypes v1.2.6 ## explicit; go 1.19 gorm.io/datatypes # gorm.io/driver/mysql v1.6.0 From 3853f8bd944da31bc50a0380b3a085a273ff6d49 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Wed, 2 Jul 2025 14:04:41 +0200 Subject: [PATCH 115/179] improve usability of podman in Makefile --- Makefile | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index f5337783..3747ad24 100644 --- a/Makefile +++ b/Makefile @@ -6,8 +6,9 @@ export SHELLOPTS:=$(if $(SHELLOPTS),$(SHELLOPTS):)pipefail:errexit GEN_PASSWORD=$(shell (/usr/bin/apg -n1 -m32)) IMAGE_TAG = garm-build -USER_ID=$(shell ((docker --version | grep -q podman) && echo "0" || id -u)) -USER_GROUP=$(shell ((docker --version | grep -q podman) && echo "0" || id -g)) +IMAGE_BUILDER=$(shell (which docker || which podman)) +USER_ID=$(shell (($(IMAGE_BUILDER) --version | grep -q podman) && echo "0" || id -u)) +USER_GROUP=$(shell (($(IMAGE_BUILDER) --version | grep -q podman) && echo "0" || id -g)) ROOTDIR=$(dir $(abspath $(lastword $(MAKEFILE_LIST)))) GOPATH ?= $(shell go env GOPATH) VERSION ?= $(shell git describe --tags --match='v[0-9]*' --dirty --always) @@ -32,9 +33,9 @@ default: build .PHONY : build-static test install-lint-deps lint go-test fmt fmtcheck verify-vendor verify create-release-files release build-static: ## Build garm statically @echo Building garm - docker build --tag $(IMAGE_TAG) -f Dockerfile.build-static . + $(IMAGE_BUILDER) build -v /etc/ssl/certs/ca-certificates.crt:/etc/ssl/certs/ca-certificates.crt --tag $(IMAGE_TAG) -f Dockerfile.build-static . mkdir -p build - docker run --rm -e USER_ID=$(USER_ID) -e GARM_REF=$(GARM_REF) -e USER_GROUP=$(USER_GROUP) -v $(PWD)/build:/build/output:z $(IMAGE_TAG) /build-static.sh + $(IMAGE_BUILDER) run --rm -e USER_ID=$(USER_ID) -e GARM_REF=$(GARM_REF) -e USER_GROUP=$(USER_GROUP) -v $(PWD)/build:/build/output:z $(IMAGE_TAG) /build-static.sh @echo Binaries are available in $(PWD)/build clean: ## Clean up build artifacts From 18902f884a6c6853100e8c68cb6ba24bbc9bf87a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 6 Jul 2025 19:22:43 +0000 Subject: [PATCH 116/179] Use JoinPath() and relative paths Use JoinPath() in newActionsRequest() and make sure we pass relative paths to it. This should fix scale sets on GHES. Signed-off-by: Gabriel Adrian Samfira --- util/github/scalesets/runners.go | 10 ++-------- util/github/scalesets/util.go | 6 +----- workers/scaleset/scaleset.go | 3 --- 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/util/github/scalesets/runners.go b/util/github/scalesets/runners.go index 252d7e94..79c321bc 100644 --- a/util/github/scalesets/runners.go +++ b/util/github/scalesets/runners.go @@ -45,14 +45,8 @@ func (s *ScaleSetClient) GenerateJitRunnerConfig(ctx context.Context, runnerName return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to ensure admin info: %w", err) } - serviceURL, err := s.actionsServiceInfo.GetURL() - if err != nil { - return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to get pipeline URL: %w", err) - } - jitConfigPath := fmt.Sprintf("/%s/%d/generatejitconfig", scaleSetEndpoint, scaleSetID) - jitConfigURL := serviceURL.JoinPath(jitConfigPath) - - req, err := s.newActionsRequest(ctx, http.MethodPost, jitConfigURL.String(), bytes.NewBuffer(body)) + jitConfigPath := fmt.Sprintf("%s/%d/generatejitconfig", scaleSetEndpoint, scaleSetID) + req, err := s.newActionsRequest(ctx, http.MethodPost, jitConfigPath, bytes.NewBuffer(body)) if err != nil { return params.RunnerScaleSetJitRunnerConfig{}, fmt.Errorf("failed to create request: %w", err) } diff --git a/util/github/scalesets/util.go b/util/github/scalesets/util.go index 15c3a5cf..ac5468bf 100644 --- a/util/github/scalesets/util.go +++ b/util/github/scalesets/util.go @@ -31,11 +31,7 @@ func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, path str return nil, fmt.Errorf("failed to get pipeline URL: %w", err) } - uri, err := actionsURI.Parse(path) - if err != nil { - return nil, fmt.Errorf("failed to parse path: %w", err) - } - + uri := actionsURI.JoinPath(path) q := uri.Query() if q.Get("api-version") == "" { q.Set("api-version", "6.0-preview") diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 5022217a..5226d981 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -671,14 +671,11 @@ Loop: slog.ErrorContext(w.ctx, "error starting listener", "error", err, "consumer_id", w.consumerID) if canceled := w.sleepWithCancel(2 * time.Second); canceled { slog.InfoContext(w.ctx, "worker is stopped; exiting keepListenerAlive") - w.mux.Unlock() return } // we failed to start the listener. Try again. - w.mux.Unlock() continue } - w.mux.Unlock() select { case <-w.quit: From 6ae3b25b4d7960b8168bd8eabc403152c4d02ea5 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 6 Jul 2025 21:17:31 +0000 Subject: [PATCH 117/179] Use the -v flag on podman only Docker does not support the --volume flag at build time. This needs to be done in the Dockerfile directly on the RUN stanza. Will update in a future PR, until then, just set the flag for podman. Signed-off-by: Gabriel Adrian Samfira --- Makefile | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 3747ad24..9a09e999 100644 --- a/Makefile +++ b/Makefile @@ -7,8 +7,9 @@ GEN_PASSWORD=$(shell (/usr/bin/apg -n1 -m32)) IMAGE_TAG = garm-build IMAGE_BUILDER=$(shell (which docker || which podman)) -USER_ID=$(shell (($(IMAGE_BUILDER) --version | grep -q podman) && echo "0" || id -u)) -USER_GROUP=$(shell (($(IMAGE_BUILDER) --version | grep -q podman) && echo "0" || id -g)) +IS_PODMAN=$(shell (($(IMAGE_BUILDER) --version | grep -q podman) && echo "yes" || echo "no")) +USER_ID=$(if $(filter yes,$(IS_PODMAN)),0,$(shell id -u)) +USER_GROUP=$(if $(filter yes,$(IS_PODMAN)),0,$(shell id -g)) ROOTDIR=$(dir $(abspath $(lastword $(MAKEFILE_LIST)))) GOPATH ?= $(shell go env GOPATH) VERSION ?= $(shell git describe --tags --match='v[0-9]*' --dirty --always) @@ -21,6 +22,11 @@ export CREDENTIALS_NAME ?= test-garm-creds export WORKFLOW_FILE_NAME ?= test.yml export GARM_ADMIN_USERNAME ?= admin +ifeq ($(IS_PODMAN),yes) + EXTRA_ARGS := -v /etc/ssl/certs/ca-certificates.crt:/etc/ssl/certs/ca-certificates.crt +endif + + .PHONY: help help: ## Display this help. @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-20s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) @@ -33,7 +39,7 @@ default: build .PHONY : build-static test install-lint-deps lint go-test fmt fmtcheck verify-vendor verify create-release-files release build-static: ## Build garm statically @echo Building garm - $(IMAGE_BUILDER) build -v /etc/ssl/certs/ca-certificates.crt:/etc/ssl/certs/ca-certificates.crt --tag $(IMAGE_TAG) -f Dockerfile.build-static . + $(IMAGE_BUILDER) build $(EXTRA_ARGS) --tag $(IMAGE_TAG) -f Dockerfile.build-static . mkdir -p build $(IMAGE_BUILDER) run --rm -e USER_ID=$(USER_ID) -e GARM_REF=$(GARM_REF) -e USER_GROUP=$(USER_GROUP) -v $(PWD)/build:/build/output:z $(IMAGE_TAG) /build-static.sh @echo Binaries are available in $(PWD)/build From b23bca73bc4368934dd846a70d2b8c0243957a37 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Mon, 7 Jul 2025 04:54:10 +0000 Subject: [PATCH 118/179] Fix sleepWithCancel and ensure closed channel * time.NewTicker will panic if the duration is 0. Make it return early if duration is 0. * Return a pre-closed channel in Wait() instead of nil. Ensures receiver will not block forever. Signed-off-by: Gabriel Adrian Samfira --- runner/pool/pool.go | 3 +++ workers/cache/tool_cache.go | 3 +++ workers/scaleset/scaleset.go | 8 ++++---- workers/scaleset/scaleset_listener.go | 10 +++++++--- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 17285e1d..9f6d3c0e 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -1369,6 +1369,9 @@ func (r *basePoolManager) deleteInstanceFromProvider(ctx context.Context, instan } func (r *basePoolManager) sleepWithCancel(sleepTime time.Duration) (canceled bool) { + if sleepTime == 0 { + return false + } ticker := time.NewTicker(sleepTime) defer ticker.Stop() diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index 727c82b4..6cbcc716 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -135,6 +135,9 @@ func (t *toolsUpdater) Reset() { } func (t *toolsUpdater) sleepWithCancel(sleepTime time.Duration) (canceled bool) { + if sleepTime == 0 { + return false + } ticker := time.NewTicker(sleepTime) defer ticker.Stop() diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 5226d981..f5b34400 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -634,6 +634,9 @@ func (w *Worker) loop() { } func (w *Worker) sleepWithCancel(sleepTime time.Duration) (canceled bool) { + if sleepTime == 0 { + return false + } ticker := time.NewTicker(sleepTime) defer ticker.Stop() @@ -663,10 +666,7 @@ Loop: } continue } - // noop if already started. If the scaleset was just enabled, we need to - // start the listener here, or the <-w.listener.Wait() channel receive bellow - // will block forever, even if we start the listener, as a nil channel will - // block forever. + // noop if already started. if err := w.listener.Start(); err != nil { slog.ErrorContext(w.ctx, "error starting listener", "error", err, "consumer_id", w.consumerID) if canceled := w.sleepWithCancel(2 * time.Second); canceled { diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index d69092f5..1274ee59 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -25,6 +25,10 @@ import ( "github.com/cloudbase/garm/util/github/scalesets" ) +var closed = make(chan struct{}) + +func init() { close(closed) } + func newListener(ctx context.Context, scaleSetHelper scaleSetHelper) *scaleSetListener { return &scaleSetListener{ ctx: ctx, @@ -278,11 +282,11 @@ func (l *scaleSetListener) loop() { func (l *scaleSetListener) Wait() <-chan struct{} { l.mux.Lock() + defer l.mux.Unlock() + if !l.running { slog.DebugContext(l.ctx, "scale set listener is not running") - l.mux.Unlock() - return nil + return closed } - l.mux.Unlock() return l.loopExited } From f9b1b26f8031dd2fa3afefd44779ab7e90816399 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Jul 2025 06:28:17 +0000 Subject: [PATCH 119/179] Bump golang.org/x/crypto from 0.39.0 to 0.40.0 Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.39.0 to 0.40.0. - [Commits](https://github.com/golang/crypto/compare/v0.39.0...v0.40.0) --- updated-dependencies: - dependency-name: golang.org/x/crypto dependency-version: 0.40.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 8 +- go.sum | 16 +-- vendor/golang.org/x/sync/errgroup/errgroup.go | 118 +++++------------- vendor/golang.org/x/sys/unix/zerrors_linux.go | 25 +++- .../x/sys/unix/zerrors_linux_386.go | 1 + .../x/sys/unix/zerrors_linux_amd64.go | 1 + .../x/sys/unix/zerrors_linux_arm.go | 1 + .../x/sys/unix/zerrors_linux_arm64.go | 1 + .../x/sys/unix/zerrors_linux_loong64.go | 1 + .../x/sys/unix/zerrors_linux_mips.go | 1 + .../x/sys/unix/zerrors_linux_mips64.go | 1 + .../x/sys/unix/zerrors_linux_mips64le.go | 1 + .../x/sys/unix/zerrors_linux_mipsle.go | 1 + .../x/sys/unix/zerrors_linux_ppc.go | 1 + .../x/sys/unix/zerrors_linux_ppc64.go | 1 + .../x/sys/unix/zerrors_linux_ppc64le.go | 1 + .../x/sys/unix/zerrors_linux_riscv64.go | 1 + .../x/sys/unix/zerrors_linux_s390x.go | 1 + .../x/sys/unix/zerrors_linux_sparc64.go | 1 + vendor/golang.org/x/sys/unix/ztypes_linux.go | 108 ++++++++++++++-- .../golang.org/x/sys/unix/ztypes_linux_386.go | 16 +++ .../x/sys/unix/ztypes_linux_amd64.go | 16 +++ .../golang.org/x/sys/unix/ztypes_linux_arm.go | 16 +++ .../x/sys/unix/ztypes_linux_arm64.go | 16 +++ .../x/sys/unix/ztypes_linux_loong64.go | 16 +++ .../x/sys/unix/ztypes_linux_mips.go | 16 +++ .../x/sys/unix/ztypes_linux_mips64.go | 16 +++ .../x/sys/unix/ztypes_linux_mips64le.go | 16 +++ .../x/sys/unix/ztypes_linux_mipsle.go | 16 +++ .../golang.org/x/sys/unix/ztypes_linux_ppc.go | 16 +++ .../x/sys/unix/ztypes_linux_ppc64.go | 16 +++ .../x/sys/unix/ztypes_linux_ppc64le.go | 16 +++ .../x/sys/unix/ztypes_linux_riscv64.go | 16 +++ .../x/sys/unix/ztypes_linux_s390x.go | 16 +++ .../x/sys/unix/ztypes_linux_sparc64.go | 16 +++ vendor/modules.txt | 8 +- 36 files changed, 423 insertions(+), 115 deletions(-) diff --git a/go.mod b/go.mod index 4a7cb39d..bc622e0e 100644 --- a/go.mod +++ b/go.mod @@ -28,10 +28,10 @@ require ( github.com/prometheus/client_golang v1.22.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 - golang.org/x/crypto v0.39.0 + golang.org/x/crypto v0.40.0 golang.org/x/mod v0.25.0 golang.org/x/oauth2 v0.30.0 - golang.org/x/sync v0.15.0 + golang.org/x/sync v0.16.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gorm.io/datatypes v1.2.6 @@ -88,8 +88,8 @@ require ( go.opentelemetry.io/otel/metric v1.36.0 // indirect go.opentelemetry.io/otel/trace v1.36.0 // indirect golang.org/x/net v0.41.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect + golang.org/x/sys v0.34.0 // indirect + golang.org/x/text v0.27.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index ac6ee48b..494ac211 100644 --- a/go.sum +++ b/go.sum @@ -187,23 +187,23 @@ go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucg go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go index cb6bb9ad..1d8cffae 100644 --- a/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -12,8 +12,6 @@ package errgroup import ( "context" "fmt" - "runtime" - "runtime/debug" "sync" ) @@ -33,10 +31,6 @@ type Group struct { errOnce sync.Once err error - - mu sync.Mutex - panicValue any // = PanicError | PanicValue; non-nil if some Group.Go goroutine panicked. - abnormal bool // some Group.Go goroutine terminated abnormally (panic or goexit). } func (g *Group) done() { @@ -56,22 +50,13 @@ func WithContext(ctx context.Context) (*Group, context.Context) { return &Group{cancel: cancel}, ctx } -// Wait blocks until all function calls from the Go method have returned -// normally, then returns the first non-nil error (if any) from them. -// -// If any of the calls panics, Wait panics with a [PanicValue]; -// and if any of them calls [runtime.Goexit], Wait calls runtime.Goexit. +// Wait blocks until all function calls from the Go method have returned, then +// returns the first non-nil error (if any) from them. func (g *Group) Wait() error { g.wg.Wait() if g.cancel != nil { g.cancel(g.err) } - if g.panicValue != nil { - panic(g.panicValue) - } - if g.abnormal { - runtime.Goexit() - } return g.err } @@ -81,53 +66,31 @@ func (g *Group) Wait() error { // It blocks until the new goroutine can be added without the number of // goroutines in the group exceeding the configured limit. // -// The first goroutine in the group that returns a non-nil error, panics, or -// invokes [runtime.Goexit] will cancel the associated Context, if any. +// The first goroutine in the group that returns a non-nil error will +// cancel the associated Context, if any. The error will be returned +// by Wait. func (g *Group) Go(f func() error) { if g.sem != nil { g.sem <- token{} } - g.add(f) -} - -func (g *Group) add(f func() error) { g.wg.Add(1) go func() { defer g.done() - normalReturn := false - defer func() { - if normalReturn { - return - } - v := recover() - g.mu.Lock() - defer g.mu.Unlock() - if !g.abnormal { - if g.cancel != nil { - g.cancel(g.err) - } - g.abnormal = true - } - if v != nil && g.panicValue == nil { - switch v := v.(type) { - case error: - g.panicValue = PanicError{ - Recovered: v, - Stack: debug.Stack(), - } - default: - g.panicValue = PanicValue{ - Recovered: v, - Stack: debug.Stack(), - } - } - } - }() - err := f() - normalReturn = true - if err != nil { + // It is tempting to propagate panics from f() + // up to the goroutine that calls Wait, but + // it creates more problems than it solves: + // - it delays panics arbitrarily, + // making bugs harder to detect; + // - it turns f's panic stack into a mere value, + // hiding it from crash-monitoring tools; + // - it risks deadlocks that hide the panic entirely, + // if f's panic leaves the program in a state + // that prevents the Wait call from being reached. + // See #53757, #74275, #74304, #74306. + + if err := f(); err != nil { g.errOnce.Do(func() { g.err = err if g.cancel != nil { @@ -152,7 +115,19 @@ func (g *Group) TryGo(f func() error) bool { } } - g.add(f) + g.wg.Add(1) + go func() { + defer g.done() + + if err := f(); err != nil { + g.errOnce.Do(func() { + g.err = err + if g.cancel != nil { + g.cancel(g.err) + } + }) + } + }() return true } @@ -174,34 +149,3 @@ func (g *Group) SetLimit(n int) { } g.sem = make(chan token, n) } - -// PanicError wraps an error recovered from an unhandled panic -// when calling a function passed to Go or TryGo. -type PanicError struct { - Recovered error - Stack []byte // result of call to [debug.Stack] -} - -func (p PanicError) Error() string { - if len(p.Stack) > 0 { - return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) - } - return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) -} - -func (p PanicError) Unwrap() error { return p.Recovered } - -// PanicValue wraps a value that does not implement the error interface, -// recovered from an unhandled panic when calling a function passed to Go or -// TryGo. -type PanicValue struct { - Recovered any - Stack []byte // result of call to [debug.Stack] -} - -func (p PanicValue) String() string { - if len(p.Stack) > 0 { - return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) - } - return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) -} diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index 4f432bfe..9e7a6c5a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -319,6 +319,7 @@ const ( AUDIT_INTEGRITY_POLICY_RULE = 0x70f AUDIT_INTEGRITY_RULE = 0x70d AUDIT_INTEGRITY_STATUS = 0x70a + AUDIT_INTEGRITY_USERSPACE = 0x710 AUDIT_IPC = 0x517 AUDIT_IPC_SET_PERM = 0x51f AUDIT_IPE_ACCESS = 0x58c @@ -843,9 +844,9 @@ const ( DM_UUID_FLAG = 0x4000 DM_UUID_LEN = 0x81 DM_VERSION = 0xc138fd00 - DM_VERSION_EXTRA = "-ioctl (2023-03-01)" + DM_VERSION_EXTRA = "-ioctl (2025-01-17)" DM_VERSION_MAJOR = 0x4 - DM_VERSION_MINOR = 0x30 + DM_VERSION_MINOR = 0x31 DM_VERSION_PATCHLEVEL = 0x0 DT_BLK = 0x6 DT_CHR = 0x2 @@ -941,6 +942,8 @@ const ( ETHER_FLOW = 0x12 ETHTOOL_BUSINFO_LEN = 0x20 ETHTOOL_EROMVERS_LEN = 0x20 + ETHTOOL_FAMILY_NAME = "ethtool" + ETHTOOL_FAMILY_VERSION = 0x1 ETHTOOL_FEC_AUTO = 0x2 ETHTOOL_FEC_BASER = 0x10 ETHTOOL_FEC_LLRS = 0x20 @@ -1203,6 +1206,9 @@ const ( FAN_DENY = 0x2 FAN_ENABLE_AUDIT = 0x40 FAN_EPIDFD = -0x2 + FAN_ERRNO_BITS = 0x8 + FAN_ERRNO_MASK = 0xff + FAN_ERRNO_SHIFT = 0x18 FAN_EVENT_INFO_TYPE_DFID = 0x3 FAN_EVENT_INFO_TYPE_DFID_NAME = 0x2 FAN_EVENT_INFO_TYPE_ERROR = 0x5 @@ -1210,6 +1216,7 @@ const ( FAN_EVENT_INFO_TYPE_NEW_DFID_NAME = 0xc FAN_EVENT_INFO_TYPE_OLD_DFID_NAME = 0xa FAN_EVENT_INFO_TYPE_PIDFD = 0x4 + FAN_EVENT_INFO_TYPE_RANGE = 0x6 FAN_EVENT_METADATA_LEN = 0x18 FAN_EVENT_ON_CHILD = 0x8000000 FAN_FS_ERROR = 0x8000 @@ -1240,6 +1247,7 @@ const ( FAN_OPEN_EXEC = 0x1000 FAN_OPEN_EXEC_PERM = 0x40000 FAN_OPEN_PERM = 0x10000 + FAN_PRE_ACCESS = 0x100000 FAN_Q_OVERFLOW = 0x4000 FAN_RENAME = 0x10000000 FAN_REPORT_DFID_NAME = 0xc00 @@ -2787,7 +2795,7 @@ const ( RTAX_UNSPEC = 0x0 RTAX_WINDOW = 0x3 RTA_ALIGNTO = 0x4 - RTA_MAX = 0x1e + RTA_MAX = 0x1f RTCF_DIRECTSRC = 0x4000000 RTCF_DOREDIRECT = 0x1000000 RTCF_LOG = 0x2000000 @@ -2864,10 +2872,12 @@ const ( RTM_DELACTION = 0x31 RTM_DELADDR = 0x15 RTM_DELADDRLABEL = 0x49 + RTM_DELANYCAST = 0x3d RTM_DELCHAIN = 0x65 RTM_DELLINK = 0x11 RTM_DELLINKPROP = 0x6d RTM_DELMDB = 0x55 + RTM_DELMULTICAST = 0x39 RTM_DELNEIGH = 0x1d RTM_DELNETCONF = 0x51 RTM_DELNEXTHOP = 0x69 @@ -2917,11 +2927,13 @@ const ( RTM_NEWACTION = 0x30 RTM_NEWADDR = 0x14 RTM_NEWADDRLABEL = 0x48 + RTM_NEWANYCAST = 0x3c RTM_NEWCACHEREPORT = 0x60 RTM_NEWCHAIN = 0x64 RTM_NEWLINK = 0x10 RTM_NEWLINKPROP = 0x6c RTM_NEWMDB = 0x54 + RTM_NEWMULTICAST = 0x38 RTM_NEWNDUSEROPT = 0x44 RTM_NEWNEIGH = 0x1c RTM_NEWNEIGHTBL = 0x40 @@ -2987,11 +2999,12 @@ const ( RUSAGE_THREAD = 0x1 RWF_APPEND = 0x10 RWF_ATOMIC = 0x40 + RWF_DONTCACHE = 0x80 RWF_DSYNC = 0x2 RWF_HIPRI = 0x1 RWF_NOAPPEND = 0x20 RWF_NOWAIT = 0x8 - RWF_SUPPORTED = 0x7f + RWF_SUPPORTED = 0xff RWF_SYNC = 0x4 RWF_WRITE_LIFE_NOT_SET = 0x0 SCHED_BATCH = 0x3 @@ -3271,6 +3284,7 @@ const ( STATX_BTIME = 0x800 STATX_CTIME = 0x80 STATX_DIOALIGN = 0x2000 + STATX_DIO_READ_ALIGN = 0x20000 STATX_GID = 0x10 STATX_INO = 0x100 STATX_MNT_ID = 0x1000 @@ -3322,7 +3336,7 @@ const ( TASKSTATS_GENL_NAME = "TASKSTATS" TASKSTATS_GENL_VERSION = 0x1 TASKSTATS_TYPE_MAX = 0x6 - TASKSTATS_VERSION = 0xe + TASKSTATS_VERSION = 0xf TCIFLUSH = 0x0 TCIOFF = 0x2 TCIOFLUSH = 0x2 @@ -3503,6 +3517,7 @@ const ( TP_STATUS_WRONG_FORMAT = 0x4 TRACEFS_MAGIC = 0x74726163 TS_COMM_LEN = 0x20 + UBI_IOCECNFO = 0xc01c6f06 UDF_SUPER_MAGIC = 0x15013346 UDP_CORK = 0x1 UDP_ENCAP = 0x64 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go index 75207613..a8c421e2 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go @@ -372,6 +372,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go index c68acda5..9a88d181 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go @@ -373,6 +373,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go index a8c607ab..7cb6a867 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go @@ -378,6 +378,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go index 18563dd8..d0ecd2c5 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go index 22912cda..7a2940ae 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go @@ -365,6 +365,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go index 29344eb3..d14ca8f2 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go index 20d51fb9..2da1bac1 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go index 321b6090..28727514 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go index 9bacdf1e..7f287b54 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go index c2242726..7e5f9e6a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go @@ -426,6 +426,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x10 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x12 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go index 6270c8ee..37c87952 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go @@ -430,6 +430,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x10 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x12 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go index 9966c194..52201336 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go @@ -430,6 +430,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x10 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x12 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go index 848e5fcc..4bfe2b5b 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go @@ -362,6 +362,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go index 669b2adb..e3cffb86 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go @@ -434,6 +434,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go index 4834e575..c219c8db 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go @@ -473,6 +473,7 @@ const ( SO_RCVBUFFORCE = 0x100b SO_RCVLOWAT = 0x800 SO_RCVMARK = 0x54 + SO_RCVPRIORITY = 0x5b SO_RCVTIMEO = 0x2000 SO_RCVTIMEO_NEW = 0x44 SO_RCVTIMEO_OLD = 0x2000 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index a46abe64..8bcac283 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -114,7 +114,7 @@ type Statx_t struct { Atomic_write_unit_min uint32 Atomic_write_unit_max uint32 Atomic_write_segments_max uint32 - _ [1]uint32 + Dio_read_offset_align uint32 _ [9]uint64 } @@ -2226,8 +2226,11 @@ const ( NFT_PAYLOAD_LL_HEADER = 0x0 NFT_PAYLOAD_NETWORK_HEADER = 0x1 NFT_PAYLOAD_TRANSPORT_HEADER = 0x2 + NFT_PAYLOAD_INNER_HEADER = 0x3 + NFT_PAYLOAD_TUN_HEADER = 0x4 NFT_PAYLOAD_CSUM_NONE = 0x0 NFT_PAYLOAD_CSUM_INET = 0x1 + NFT_PAYLOAD_CSUM_SCTP = 0x2 NFT_PAYLOAD_L4CSUM_PSEUDOHDR = 0x1 NFTA_PAYLOAD_UNSPEC = 0x0 NFTA_PAYLOAD_DREG = 0x1 @@ -3802,7 +3805,16 @@ const ( ETHTOOL_MSG_PSE_GET = 0x24 ETHTOOL_MSG_PSE_SET = 0x25 ETHTOOL_MSG_RSS_GET = 0x26 - ETHTOOL_MSG_USER_MAX = 0x2d + ETHTOOL_MSG_PLCA_GET_CFG = 0x27 + ETHTOOL_MSG_PLCA_SET_CFG = 0x28 + ETHTOOL_MSG_PLCA_GET_STATUS = 0x29 + ETHTOOL_MSG_MM_GET = 0x2a + ETHTOOL_MSG_MM_SET = 0x2b + ETHTOOL_MSG_MODULE_FW_FLASH_ACT = 0x2c + ETHTOOL_MSG_PHY_GET = 0x2d + ETHTOOL_MSG_TSCONFIG_GET = 0x2e + ETHTOOL_MSG_TSCONFIG_SET = 0x2f + ETHTOOL_MSG_USER_MAX = 0x2f ETHTOOL_MSG_KERNEL_NONE = 0x0 ETHTOOL_MSG_STRSET_GET_REPLY = 0x1 ETHTOOL_MSG_LINKINFO_GET_REPLY = 0x2 @@ -3842,7 +3854,17 @@ const ( ETHTOOL_MSG_MODULE_NTF = 0x24 ETHTOOL_MSG_PSE_GET_REPLY = 0x25 ETHTOOL_MSG_RSS_GET_REPLY = 0x26 - ETHTOOL_MSG_KERNEL_MAX = 0x2e + ETHTOOL_MSG_PLCA_GET_CFG_REPLY = 0x27 + ETHTOOL_MSG_PLCA_GET_STATUS_REPLY = 0x28 + ETHTOOL_MSG_PLCA_NTF = 0x29 + ETHTOOL_MSG_MM_GET_REPLY = 0x2a + ETHTOOL_MSG_MM_NTF = 0x2b + ETHTOOL_MSG_MODULE_FW_FLASH_NTF = 0x2c + ETHTOOL_MSG_PHY_GET_REPLY = 0x2d + ETHTOOL_MSG_PHY_NTF = 0x2e + ETHTOOL_MSG_TSCONFIG_GET_REPLY = 0x2f + ETHTOOL_MSG_TSCONFIG_SET_REPLY = 0x30 + ETHTOOL_MSG_KERNEL_MAX = 0x30 ETHTOOL_FLAG_COMPACT_BITSETS = 0x1 ETHTOOL_FLAG_OMIT_REPLY = 0x2 ETHTOOL_FLAG_STATS = 0x4 @@ -3949,7 +3971,12 @@ const ( ETHTOOL_A_RINGS_TCP_DATA_SPLIT = 0xb ETHTOOL_A_RINGS_CQE_SIZE = 0xc ETHTOOL_A_RINGS_TX_PUSH = 0xd - ETHTOOL_A_RINGS_MAX = 0x10 + ETHTOOL_A_RINGS_RX_PUSH = 0xe + ETHTOOL_A_RINGS_TX_PUSH_BUF_LEN = 0xf + ETHTOOL_A_RINGS_TX_PUSH_BUF_LEN_MAX = 0x10 + ETHTOOL_A_RINGS_HDS_THRESH = 0x11 + ETHTOOL_A_RINGS_HDS_THRESH_MAX = 0x12 + ETHTOOL_A_RINGS_MAX = 0x12 ETHTOOL_A_CHANNELS_UNSPEC = 0x0 ETHTOOL_A_CHANNELS_HEADER = 0x1 ETHTOOL_A_CHANNELS_RX_MAX = 0x2 @@ -4015,7 +4042,9 @@ const ( ETHTOOL_A_TSINFO_TX_TYPES = 0x3 ETHTOOL_A_TSINFO_RX_FILTERS = 0x4 ETHTOOL_A_TSINFO_PHC_INDEX = 0x5 - ETHTOOL_A_TSINFO_MAX = 0x6 + ETHTOOL_A_TSINFO_STATS = 0x6 + ETHTOOL_A_TSINFO_HWTSTAMP_PROVIDER = 0x7 + ETHTOOL_A_TSINFO_MAX = 0x7 ETHTOOL_A_CABLE_TEST_UNSPEC = 0x0 ETHTOOL_A_CABLE_TEST_HEADER = 0x1 ETHTOOL_A_CABLE_TEST_MAX = 0x1 @@ -4613,6 +4642,7 @@ const ( NL80211_ATTR_AKM_SUITES = 0x4c NL80211_ATTR_AP_ISOLATE = 0x60 NL80211_ATTR_AP_SETTINGS_FLAGS = 0x135 + NL80211_ATTR_ASSOC_SPP_AMSDU = 0x14a NL80211_ATTR_AUTH_DATA = 0x9c NL80211_ATTR_AUTH_TYPE = 0x35 NL80211_ATTR_BANDS = 0xef @@ -4623,6 +4653,7 @@ const ( NL80211_ATTR_BSS_BASIC_RATES = 0x24 NL80211_ATTR_BSS = 0x2f NL80211_ATTR_BSS_CTS_PROT = 0x1c + NL80211_ATTR_BSS_DUMP_INCLUDE_USE_DATA = 0x147 NL80211_ATTR_BSS_HT_OPMODE = 0x6d NL80211_ATTR_BSSID = 0xf5 NL80211_ATTR_BSS_SELECT = 0xe3 @@ -4682,6 +4713,7 @@ const ( NL80211_ATTR_DTIM_PERIOD = 0xd NL80211_ATTR_DURATION = 0x57 NL80211_ATTR_EHT_CAPABILITY = 0x136 + NL80211_ATTR_EMA_RNR_ELEMS = 0x145 NL80211_ATTR_EML_CAPABILITY = 0x13d NL80211_ATTR_EXT_CAPA = 0xa9 NL80211_ATTR_EXT_CAPA_MASK = 0xaa @@ -4717,6 +4749,7 @@ const ( NL80211_ATTR_HIDDEN_SSID = 0x7e NL80211_ATTR_HT_CAPABILITY = 0x1f NL80211_ATTR_HT_CAPABILITY_MASK = 0x94 + NL80211_ATTR_HW_TIMESTAMP_ENABLED = 0x144 NL80211_ATTR_IE_ASSOC_RESP = 0x80 NL80211_ATTR_IE = 0x2a NL80211_ATTR_IE_PROBE_RESP = 0x7f @@ -4747,9 +4780,10 @@ const ( NL80211_ATTR_MAC_HINT = 0xc8 NL80211_ATTR_MAC_MASK = 0xd7 NL80211_ATTR_MAX_AP_ASSOC_STA = 0xca - NL80211_ATTR_MAX = 0x14d + NL80211_ATTR_MAX = 0x150 NL80211_ATTR_MAX_CRIT_PROT_DURATION = 0xb4 NL80211_ATTR_MAX_CSA_COUNTERS = 0xce + NL80211_ATTR_MAX_HW_TIMESTAMP_PEERS = 0x143 NL80211_ATTR_MAX_MATCH_SETS = 0x85 NL80211_ATTR_MAX_NUM_AKM_SUITES = 0x13c NL80211_ATTR_MAX_NUM_PMKIDS = 0x56 @@ -4774,9 +4808,12 @@ const ( NL80211_ATTR_MGMT_SUBTYPE = 0x29 NL80211_ATTR_MLD_ADDR = 0x13a NL80211_ATTR_MLD_CAPA_AND_OPS = 0x13e + NL80211_ATTR_MLO_LINK_DISABLED = 0x146 NL80211_ATTR_MLO_LINK_ID = 0x139 NL80211_ATTR_MLO_LINKS = 0x138 NL80211_ATTR_MLO_SUPPORT = 0x13b + NL80211_ATTR_MLO_TTLM_DLINK = 0x148 + NL80211_ATTR_MLO_TTLM_ULINK = 0x149 NL80211_ATTR_MNTR_FLAGS = 0x17 NL80211_ATTR_MPATH_INFO = 0x1b NL80211_ATTR_MPATH_NEXT_HOP = 0x1a @@ -4809,12 +4846,14 @@ const ( NL80211_ATTR_PORT_AUTHORIZED = 0x103 NL80211_ATTR_POWER_RULE_MAX_ANT_GAIN = 0x5 NL80211_ATTR_POWER_RULE_MAX_EIRP = 0x6 + NL80211_ATTR_POWER_RULE_PSD = 0x8 NL80211_ATTR_PREV_BSSID = 0x4f NL80211_ATTR_PRIVACY = 0x46 NL80211_ATTR_PROBE_RESP = 0x91 NL80211_ATTR_PROBE_RESP_OFFLOAD = 0x90 NL80211_ATTR_PROTOCOL_FEATURES = 0xad NL80211_ATTR_PS_STATE = 0x5d + NL80211_ATTR_PUNCT_BITMAP = 0x142 NL80211_ATTR_QOS_MAP = 0xc7 NL80211_ATTR_RADAR_BACKGROUND = 0x134 NL80211_ATTR_RADAR_EVENT = 0xa8 @@ -4943,7 +4982,9 @@ const ( NL80211_ATTR_WIPHY_FREQ = 0x26 NL80211_ATTR_WIPHY_FREQ_HINT = 0xc9 NL80211_ATTR_WIPHY_FREQ_OFFSET = 0x122 + NL80211_ATTR_WIPHY_INTERFACE_COMBINATIONS = 0x14c NL80211_ATTR_WIPHY_NAME = 0x2 + NL80211_ATTR_WIPHY_RADIOS = 0x14b NL80211_ATTR_WIPHY_RETRY_LONG = 0x3e NL80211_ATTR_WIPHY_RETRY_SHORT = 0x3d NL80211_ATTR_WIPHY_RTS_THRESHOLD = 0x40 @@ -4978,6 +5019,8 @@ const ( NL80211_BAND_ATTR_IFTYPE_DATA = 0x9 NL80211_BAND_ATTR_MAX = 0xd NL80211_BAND_ATTR_RATES = 0x2 + NL80211_BAND_ATTR_S1G_CAPA = 0xd + NL80211_BAND_ATTR_S1G_MCS_NSS_SET = 0xc NL80211_BAND_ATTR_VHT_CAPA = 0x8 NL80211_BAND_ATTR_VHT_MCS_SET = 0x7 NL80211_BAND_IFTYPE_ATTR_EHT_CAP_MAC = 0x8 @@ -5001,6 +5044,10 @@ const ( NL80211_BSS_BEACON_INTERVAL = 0x4 NL80211_BSS_BEACON_TSF = 0xd NL80211_BSS_BSSID = 0x1 + NL80211_BSS_CANNOT_USE_6GHZ_PWR_MISMATCH = 0x2 + NL80211_BSS_CANNOT_USE_NSTR_NONPRIMARY = 0x1 + NL80211_BSS_CANNOT_USE_REASONS = 0x18 + NL80211_BSS_CANNOT_USE_UHB_PWR_MISMATCH = 0x2 NL80211_BSS_CAPABILITY = 0x5 NL80211_BSS_CHAIN_SIGNAL = 0x13 NL80211_BSS_CHAN_WIDTH_10 = 0x1 @@ -5032,6 +5079,9 @@ const ( NL80211_BSS_STATUS = 0x9 NL80211_BSS_STATUS_IBSS_JOINED = 0x2 NL80211_BSS_TSF = 0x3 + NL80211_BSS_USE_FOR = 0x17 + NL80211_BSS_USE_FOR_MLD_LINK = 0x2 + NL80211_BSS_USE_FOR_NORMAL = 0x1 NL80211_CHAN_HT20 = 0x1 NL80211_CHAN_HT40MINUS = 0x2 NL80211_CHAN_HT40PLUS = 0x3 @@ -5117,7 +5167,8 @@ const ( NL80211_CMD_LEAVE_IBSS = 0x2c NL80211_CMD_LEAVE_MESH = 0x45 NL80211_CMD_LEAVE_OCB = 0x6d - NL80211_CMD_MAX = 0x9b + NL80211_CMD_LINKS_REMOVED = 0x9a + NL80211_CMD_MAX = 0x9d NL80211_CMD_MICHAEL_MIC_FAILURE = 0x29 NL80211_CMD_MODIFY_LINK_STA = 0x97 NL80211_CMD_NAN_MATCH = 0x78 @@ -5161,6 +5212,7 @@ const ( NL80211_CMD_SET_COALESCE = 0x65 NL80211_CMD_SET_CQM = 0x3f NL80211_CMD_SET_FILS_AAD = 0x92 + NL80211_CMD_SET_HW_TIMESTAMP = 0x99 NL80211_CMD_SET_INTERFACE = 0x6 NL80211_CMD_SET_KEY = 0xa NL80211_CMD_SET_MAC_ACL = 0x5d @@ -5180,6 +5232,7 @@ const ( NL80211_CMD_SET_SAR_SPECS = 0x8c NL80211_CMD_SET_STATION = 0x12 NL80211_CMD_SET_TID_CONFIG = 0x89 + NL80211_CMD_SET_TID_TO_LINK_MAPPING = 0x9b NL80211_CMD_SET_TX_BITRATE_MASK = 0x39 NL80211_CMD_SET_WDS_PEER = 0x42 NL80211_CMD_SET_WIPHY = 0x2 @@ -5247,6 +5300,7 @@ const ( NL80211_EXT_FEATURE_AIRTIME_FAIRNESS = 0x21 NL80211_EXT_FEATURE_AP_PMKSA_CACHING = 0x22 NL80211_EXT_FEATURE_AQL = 0x28 + NL80211_EXT_FEATURE_AUTH_AND_DEAUTH_RANDOM_TA = 0x40 NL80211_EXT_FEATURE_BEACON_PROTECTION_CLIENT = 0x2e NL80211_EXT_FEATURE_BEACON_PROTECTION = 0x29 NL80211_EXT_FEATURE_BEACON_RATE_HE = 0x36 @@ -5262,6 +5316,7 @@ const ( NL80211_EXT_FEATURE_CQM_RSSI_LIST = 0xd NL80211_EXT_FEATURE_DATA_ACK_SIGNAL_SUPPORT = 0x1b NL80211_EXT_FEATURE_DEL_IBSS_STA = 0x2c + NL80211_EXT_FEATURE_DFS_CONCURRENT = 0x43 NL80211_EXT_FEATURE_DFS_OFFLOAD = 0x19 NL80211_EXT_FEATURE_ENABLE_FTM_RESPONDER = 0x20 NL80211_EXT_FEATURE_EXT_KEY_ID = 0x24 @@ -5281,9 +5336,12 @@ const ( NL80211_EXT_FEATURE_OCE_PROBE_REQ_DEFERRAL_SUPPRESSION = 0x14 NL80211_EXT_FEATURE_OCE_PROBE_REQ_HIGH_TX_RATE = 0x13 NL80211_EXT_FEATURE_OPERATING_CHANNEL_VALIDATION = 0x31 + NL80211_EXT_FEATURE_OWE_OFFLOAD_AP = 0x42 + NL80211_EXT_FEATURE_OWE_OFFLOAD = 0x41 NL80211_EXT_FEATURE_POWERED_ADDR_CHANGE = 0x3d NL80211_EXT_FEATURE_PROTECTED_TWT = 0x2b NL80211_EXT_FEATURE_PROT_RANGE_NEGO_AND_MEASURE = 0x39 + NL80211_EXT_FEATURE_PUNCT = 0x3e NL80211_EXT_FEATURE_RADAR_BACKGROUND = 0x3c NL80211_EXT_FEATURE_RRM = 0x1 NL80211_EXT_FEATURE_SAE_OFFLOAD_AP = 0x33 @@ -5295,8 +5353,10 @@ const ( NL80211_EXT_FEATURE_SCHED_SCAN_BAND_SPECIFIC_RSSI_THOLD = 0x23 NL80211_EXT_FEATURE_SCHED_SCAN_RELATIVE_RSSI = 0xc NL80211_EXT_FEATURE_SECURE_LTF = 0x37 + NL80211_EXT_FEATURE_SECURE_NAN = 0x3f NL80211_EXT_FEATURE_SECURE_RTT = 0x38 NL80211_EXT_FEATURE_SET_SCAN_DWELL = 0x5 + NL80211_EXT_FEATURE_SPP_AMSDU_SUPPORT = 0x44 NL80211_EXT_FEATURE_STA_TX_PWR = 0x25 NL80211_EXT_FEATURE_TXQS = 0x1c NL80211_EXT_FEATURE_UNSOL_BCAST_PROBE_RESP = 0x35 @@ -5343,7 +5403,10 @@ const ( NL80211_FREQUENCY_ATTR_2MHZ = 0x16 NL80211_FREQUENCY_ATTR_4MHZ = 0x17 NL80211_FREQUENCY_ATTR_8MHZ = 0x18 + NL80211_FREQUENCY_ATTR_ALLOW_6GHZ_VLP_AP = 0x21 + NL80211_FREQUENCY_ATTR_CAN_MONITOR = 0x20 NL80211_FREQUENCY_ATTR_DFS_CAC_TIME = 0xd + NL80211_FREQUENCY_ATTR_DFS_CONCURRENT = 0x1d NL80211_FREQUENCY_ATTR_DFS_STATE = 0x7 NL80211_FREQUENCY_ATTR_DFS_TIME = 0x8 NL80211_FREQUENCY_ATTR_DISABLED = 0x2 @@ -5357,6 +5420,8 @@ const ( NL80211_FREQUENCY_ATTR_NO_160MHZ = 0xc NL80211_FREQUENCY_ATTR_NO_20MHZ = 0x10 NL80211_FREQUENCY_ATTR_NO_320MHZ = 0x1a + NL80211_FREQUENCY_ATTR_NO_6GHZ_AFC_CLIENT = 0x1f + NL80211_FREQUENCY_ATTR_NO_6GHZ_VLP_CLIENT = 0x1e NL80211_FREQUENCY_ATTR_NO_80MHZ = 0xb NL80211_FREQUENCY_ATTR_NO_EHT = 0x1b NL80211_FREQUENCY_ATTR_NO_HE = 0x13 @@ -5364,8 +5429,11 @@ const ( NL80211_FREQUENCY_ATTR_NO_HT40_PLUS = 0xa NL80211_FREQUENCY_ATTR_NO_IBSS = 0x3 NL80211_FREQUENCY_ATTR_NO_IR = 0x3 + NL80211_FREQUENCY_ATTR_NO_UHB_AFC_CLIENT = 0x1f + NL80211_FREQUENCY_ATTR_NO_UHB_VLP_CLIENT = 0x1e NL80211_FREQUENCY_ATTR_OFFSET = 0x14 NL80211_FREQUENCY_ATTR_PASSIVE_SCAN = 0x3 + NL80211_FREQUENCY_ATTR_PSD = 0x1c NL80211_FREQUENCY_ATTR_RADAR = 0x5 NL80211_FREQUENCY_ATTR_WMM = 0x12 NL80211_FTM_RESP_ATTR_CIVICLOC = 0x3 @@ -5430,6 +5498,7 @@ const ( NL80211_IFTYPE_STATION = 0x2 NL80211_IFTYPE_UNSPECIFIED = 0x0 NL80211_IFTYPE_WDS = 0x5 + NL80211_KCK_EXT_LEN_32 = 0x20 NL80211_KCK_EXT_LEN = 0x18 NL80211_KCK_LEN = 0x10 NL80211_KEK_EXT_LEN = 0x20 @@ -5458,6 +5527,7 @@ const ( NL80211_MAX_SUPP_HT_RATES = 0x4d NL80211_MAX_SUPP_RATES = 0x20 NL80211_MAX_SUPP_REG_RULES = 0x80 + NL80211_MAX_SUPP_SELECTORS = 0x80 NL80211_MBSSID_CONFIG_ATTR_EMA = 0x5 NL80211_MBSSID_CONFIG_ATTR_INDEX = 0x3 NL80211_MBSSID_CONFIG_ATTR_MAX = 0x5 @@ -5703,11 +5773,16 @@ const ( NL80211_RADAR_PRE_CAC_EXPIRED = 0x4 NL80211_RATE_INFO_10_MHZ_WIDTH = 0xb NL80211_RATE_INFO_160_MHZ_WIDTH = 0xa + NL80211_RATE_INFO_16_MHZ_WIDTH = 0x1d + NL80211_RATE_INFO_1_MHZ_WIDTH = 0x19 + NL80211_RATE_INFO_2_MHZ_WIDTH = 0x1a NL80211_RATE_INFO_320_MHZ_WIDTH = 0x12 NL80211_RATE_INFO_40_MHZ_WIDTH = 0x3 + NL80211_RATE_INFO_4_MHZ_WIDTH = 0x1b NL80211_RATE_INFO_5_MHZ_WIDTH = 0xc NL80211_RATE_INFO_80_MHZ_WIDTH = 0x8 NL80211_RATE_INFO_80P80_MHZ_WIDTH = 0x9 + NL80211_RATE_INFO_8_MHZ_WIDTH = 0x1c NL80211_RATE_INFO_BITRATE32 = 0x5 NL80211_RATE_INFO_BITRATE = 0x1 NL80211_RATE_INFO_EHT_GI_0_8 = 0x0 @@ -5753,6 +5828,8 @@ const ( NL80211_RATE_INFO_HE_RU_ALLOC = 0x11 NL80211_RATE_INFO_MAX = 0x1d NL80211_RATE_INFO_MCS = 0x2 + NL80211_RATE_INFO_S1G_MCS = 0x17 + NL80211_RATE_INFO_S1G_NSS = 0x18 NL80211_RATE_INFO_SHORT_GI = 0x4 NL80211_RATE_INFO_VHT_MCS = 0x6 NL80211_RATE_INFO_VHT_NSS = 0x7 @@ -5770,14 +5847,19 @@ const ( NL80211_REKEY_DATA_KEK = 0x1 NL80211_REKEY_DATA_REPLAY_CTR = 0x3 NL80211_REPLAY_CTR_LEN = 0x8 + NL80211_RRF_ALLOW_6GHZ_VLP_AP = 0x1000000 NL80211_RRF_AUTO_BW = 0x800 NL80211_RRF_DFS = 0x10 + NL80211_RRF_DFS_CONCURRENT = 0x200000 NL80211_RRF_GO_CONCURRENT = 0x1000 NL80211_RRF_IR_CONCURRENT = 0x1000 NL80211_RRF_NO_160MHZ = 0x10000 NL80211_RRF_NO_320MHZ = 0x40000 + NL80211_RRF_NO_6GHZ_AFC_CLIENT = 0x800000 + NL80211_RRF_NO_6GHZ_VLP_CLIENT = 0x400000 NL80211_RRF_NO_80MHZ = 0x8000 NL80211_RRF_NO_CCK = 0x2 + NL80211_RRF_NO_EHT = 0x80000 NL80211_RRF_NO_HE = 0x20000 NL80211_RRF_NO_HT40 = 0x6000 NL80211_RRF_NO_HT40MINUS = 0x2000 @@ -5788,7 +5870,10 @@ const ( NL80211_RRF_NO_IR = 0x80 NL80211_RRF_NO_OFDM = 0x1 NL80211_RRF_NO_OUTDOOR = 0x8 + NL80211_RRF_NO_UHB_AFC_CLIENT = 0x800000 + NL80211_RRF_NO_UHB_VLP_CLIENT = 0x400000 NL80211_RRF_PASSIVE_SCAN = 0x80 + NL80211_RRF_PSD = 0x100000 NL80211_RRF_PTMP_ONLY = 0x40 NL80211_RRF_PTP_ONLY = 0x20 NL80211_RXMGMT_FLAG_ANSWERED = 0x1 @@ -5849,6 +5934,7 @@ const ( NL80211_STA_FLAG_MAX_OLD_API = 0x6 NL80211_STA_FLAG_MFP = 0x4 NL80211_STA_FLAG_SHORT_PREAMBLE = 0x2 + NL80211_STA_FLAG_SPP_AMSDU = 0x8 NL80211_STA_FLAG_TDLS_PEER = 0x6 NL80211_STA_FLAG_WME = 0x3 NL80211_STA_INFO_ACK_SIGNAL_AVG = 0x23 @@ -6007,6 +6093,13 @@ const ( NL80211_VHT_CAPABILITY_LEN = 0xc NL80211_VHT_NSS_MAX = 0x8 NL80211_WIPHY_NAME_MAXLEN = 0x40 + NL80211_WIPHY_RADIO_ATTR_FREQ_RANGE = 0x2 + NL80211_WIPHY_RADIO_ATTR_INDEX = 0x1 + NL80211_WIPHY_RADIO_ATTR_INTERFACE_COMBINATION = 0x3 + NL80211_WIPHY_RADIO_ATTR_MAX = 0x4 + NL80211_WIPHY_RADIO_FREQ_ATTR_END = 0x2 + NL80211_WIPHY_RADIO_FREQ_ATTR_MAX = 0x2 + NL80211_WIPHY_RADIO_FREQ_ATTR_START = 0x1 NL80211_WMMR_AIFSN = 0x3 NL80211_WMMR_CW_MAX = 0x2 NL80211_WMMR_CW_MIN = 0x1 @@ -6038,6 +6131,7 @@ const ( NL80211_WOWLAN_TRIG_PKT_PATTERN = 0x4 NL80211_WOWLAN_TRIG_RFKILL_RELEASE = 0x9 NL80211_WOWLAN_TRIG_TCP_CONNECTION = 0xe + NL80211_WOWLAN_TRIG_UNPROTECTED_DEAUTH_DISASSOC = 0x14 NL80211_WOWLAN_TRIG_WAKEUP_PKT_80211 = 0xa NL80211_WOWLAN_TRIG_WAKEUP_PKT_80211_LEN = 0xb NL80211_WOWLAN_TRIG_WAKEUP_PKT_8023 = 0xc diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go index fd402da4..62db85f6 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go @@ -285,10 +285,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -324,11 +330,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -336,8 +348,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go index eb7a5e18..7d89d648 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go @@ -300,10 +300,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -338,19 +344,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go index d78ac108..9c0b39ee 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go @@ -276,10 +276,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -315,11 +321,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -327,8 +339,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go index cd06d47f..de9c7ff3 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go @@ -279,10 +279,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -317,19 +323,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go index 2f28fe26..2336bd2b 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go @@ -280,10 +280,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -318,19 +324,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go index 71d6cac2..4711f0be 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go @@ -281,10 +281,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,11 +326,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -332,8 +344,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go index 8596d453..ab99a34b 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go @@ -282,10 +282,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,19 +326,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go index cd60ea18..04c9866e 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go @@ -282,10 +282,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,19 +326,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go index b0ae420c..60aa69f6 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go @@ -281,10 +281,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,11 +326,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -332,8 +344,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go index 83597287..cb4fad78 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go @@ -288,10 +288,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,11 +333,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -339,8 +351,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go index 69eb6a5c..60272cfc 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go @@ -289,10 +289,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,19 +333,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go index 5f583cb6..3f5b91bc 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go @@ -289,10 +289,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,19 +333,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go index ad05b51a..51550f15 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go @@ -307,10 +307,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -345,19 +351,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go index cf3ce900..3239e50e 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go @@ -302,10 +302,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -340,19 +346,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go index 590b5673..faf20027 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go @@ -284,10 +284,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -322,19 +328,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/modules.txt b/vendor/modules.txt index 5b88c936..fbf47929 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -276,7 +276,7 @@ go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.39.0 +# golang.org/x/crypto v0.40.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -296,15 +296,15 @@ golang.org/x/net/proxy ## explicit; go 1.23.0 golang.org/x/oauth2 golang.org/x/oauth2/internal -# golang.org/x/sync v0.15.0 +# golang.org/x/sync v0.16.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup -# golang.org/x/sys v0.33.0 +# golang.org/x/sys v0.34.0 ## explicit; go 1.23.0 golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.26.0 +# golang.org/x/text v0.27.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal From 0cc51e48ef816cca88454b8427ca3c5be36721d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Jul 2025 09:34:59 +0000 Subject: [PATCH 120/179] Bump golang.org/x/mod from 0.25.0 to 0.26.0 --- updated-dependencies: - dependency-name: golang.org/x/mod dependency-version: 0.26.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- vendor/modules.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index bc622e0e..b09a9002 100644 --- a/go.mod +++ b/go.mod @@ -29,7 +29,7 @@ require ( github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 golang.org/x/crypto v0.40.0 - golang.org/x/mod v0.25.0 + golang.org/x/mod v0.26.0 golang.org/x/oauth2 v0.30.0 golang.org/x/sync v0.16.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 diff --git a/go.sum b/go.sum index 494ac211..9b08878d 100644 --- a/go.sum +++ b/go.sum @@ -189,8 +189,8 @@ go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKr go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= -golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= -golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= diff --git a/vendor/modules.txt b/vendor/modules.txt index fbf47929..d9d4f3df 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -285,7 +285,7 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/mod v0.25.0 +# golang.org/x/mod v0.26.0 ## explicit; go 1.23.0 golang.org/x/mod/semver # golang.org/x/net v0.41.0 From 65d6d1ae8742dd74b58054179281b837b69f2244 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 16 Jul 2025 06:39:46 +0000 Subject: [PATCH 121/179] Handle query args Merge any query args from both the GH url and the supplied URL. Signed-off-by: Gabriel Adrian Samfira --- util/github/scalesets/util.go | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/util/github/scalesets/util.go b/util/github/scalesets/util.go index ac5468bf..e8387e63 100644 --- a/util/github/scalesets/util.go +++ b/util/github/scalesets/util.go @@ -19,9 +19,11 @@ import ( "fmt" "io" "net/http" + "net/url" + "path" ) -func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, path string, body io.Reader) (*http.Request, error) { +func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, uriPath string, body io.Reader) (*http.Request, error) { if err := s.ensureAdminInfo(ctx); err != nil { return nil, fmt.Errorf("failed to update token: %w", err) } @@ -31,14 +33,27 @@ func (s *ScaleSetClient) newActionsRequest(ctx context.Context, method, path str return nil, fmt.Errorf("failed to get pipeline URL: %w", err) } - uri := actionsURI.JoinPath(path) - q := uri.Query() - if q.Get("api-version") == "" { - q.Set("api-version", "6.0-preview") + pathURI, err := url.Parse(uriPath) + if err != nil { + return nil, fmt.Errorf("failed to parse path: %w", err) + } + pathQuery := pathURI.Query() + baseQuery := actionsURI.Query() + for k, values := range pathQuery { + if baseQuery.Get(k) == "" { + for _, val := range values { + baseQuery.Add(k, val) + } + } + } + if baseQuery.Get("api-version") == "" { + baseQuery.Set("api-version", "6.0-preview") } - uri.RawQuery = q.Encode() - req, err := http.NewRequestWithContext(ctx, method, uri.String(), body) + actionsURI.Path = path.Join(actionsURI.Path, pathURI.Path) + actionsURI.RawQuery = baseQuery.Encode() + + req, err := http.NewRequestWithContext(ctx, method, actionsURI.String(), body) if err != nil { return nil, err } From a46c4746402054df7bb8be42da980626123bf0dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Jul 2025 06:41:38 +0000 Subject: [PATCH 122/179] Bump github.com/golang-jwt/jwt/v5 from 5.2.2 to 5.2.3 Bumps [github.com/golang-jwt/jwt/v5](https://github.com/golang-jwt/jwt) from 5.2.2 to 5.2.3. - [Release notes](https://github.com/golang-jwt/jwt/releases) - [Changelog](https://github.com/golang-jwt/jwt/blob/main/VERSION_HISTORY.md) - [Commits](https://github.com/golang-jwt/jwt/compare/v5.2.2...v5.2.3) --- updated-dependencies: - dependency-name: github.com/golang-jwt/jwt/v5 dependency-version: 5.2.3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 +- .../golang-jwt/jwt/v5/MIGRATION_GUIDE.md | 2 +- vendor/github.com/golang-jwt/jwt/v5/ecdsa.go | 4 +- .../golang-jwt/jwt/v5/ecdsa_utils.go | 4 +- .../github.com/golang-jwt/jwt/v5/ed25519.go | 4 +- .../golang-jwt/jwt/v5/ed25519_utils.go | 4 +- vendor/github.com/golang-jwt/jwt/v5/hmac.go | 4 +- .../golang-jwt/jwt/v5/map_claims.go | 8 +-- vendor/github.com/golang-jwt/jwt/v5/none.go | 4 +- .../golang-jwt/jwt/v5/parser_option.go | 25 ++++++++-- vendor/github.com/golang-jwt/jwt/v5/rsa.go | 4 +- .../github.com/golang-jwt/jwt/v5/rsa_pss.go | 4 +- .../github.com/golang-jwt/jwt/v5/rsa_utils.go | 6 +-- .../golang-jwt/jwt/v5/signing_method.go | 6 +-- vendor/github.com/golang-jwt/jwt/v5/token.go | 20 ++++---- vendor/github.com/golang-jwt/jwt/v5/types.go | 4 +- .../github.com/golang-jwt/jwt/v5/validator.go | 50 +++++++++++-------- vendor/modules.txt | 2 +- 19 files changed, 94 insertions(+), 67 deletions(-) diff --git a/go.mod b/go.mod index b09a9002..56175f82 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,7 @@ require ( github.com/go-openapi/runtime v0.28.0 github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 - github.com/golang-jwt/jwt/v5 v5.2.2 + github.com/golang-jwt/jwt/v5 v5.2.3 github.com/google/go-github/v72 v72.0.0 github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.2 diff --git a/go.sum b/go.sum index 9b08878d..d936ccf8 100644 --- a/go.sum +++ b/go.sum @@ -56,8 +56,8 @@ github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1 github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= -github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0= +github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= diff --git a/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md b/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md index ff9c57e1..b3178e75 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md +++ b/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md @@ -155,7 +155,7 @@ stored in base64 encoded form, which was redundant with the information in the type Token struct { Raw string // Raw contains the raw token Method SigningMethod // Method is the signing method used or to be used - Header map[string]interface{} // Header is the first segment of the token in decoded form + Header map[string]any // Header is the first segment of the token in decoded form Claims Claims // Claims is the second segment of the token in decoded form Signature []byte // Signature is the third segment of the token in decoded form Valid bool // Valid specifies if the token is valid diff --git a/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go b/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go index c929e4a0..06cd94d2 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go @@ -55,7 +55,7 @@ func (m *SigningMethodECDSA) Alg() string { // Verify implements token verification for the SigningMethod. // For this verify method, key must be an ecdsa.PublicKey struct -func (m *SigningMethodECDSA) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodECDSA) Verify(signingString string, sig []byte, key any) error { // Get the key var ecdsaKey *ecdsa.PublicKey switch k := key.(type) { @@ -89,7 +89,7 @@ func (m *SigningMethodECDSA) Verify(signingString string, sig []byte, key interf // Sign implements token signing for the SigningMethod. // For this signing method, key must be an ecdsa.PrivateKey struct -func (m *SigningMethodECDSA) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodECDSA) Sign(signingString string, key any) ([]byte, error) { // Get the key var ecdsaKey *ecdsa.PrivateKey switch k := key.(type) { diff --git a/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go b/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go index 5700636d..44a3b7a1 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go @@ -23,7 +23,7 @@ func ParseECPrivateKeyFromPEM(key []byte) (*ecdsa.PrivateKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParseECPrivateKey(block.Bytes); err != nil { if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { return nil, err @@ -50,7 +50,7 @@ func ParseECPublicKeyFromPEM(key []byte) (*ecdsa.PublicKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { if cert, err := x509.ParseCertificate(block.Bytes); err == nil { parsedKey = cert.PublicKey diff --git a/vendor/github.com/golang-jwt/jwt/v5/ed25519.go b/vendor/github.com/golang-jwt/jwt/v5/ed25519.go index c2138119..4159e57b 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ed25519.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ed25519.go @@ -33,7 +33,7 @@ func (m *SigningMethodEd25519) Alg() string { // Verify implements token verification for the SigningMethod. // For this verify method, key must be an ed25519.PublicKey -func (m *SigningMethodEd25519) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodEd25519) Verify(signingString string, sig []byte, key any) error { var ed25519Key ed25519.PublicKey var ok bool @@ -55,7 +55,7 @@ func (m *SigningMethodEd25519) Verify(signingString string, sig []byte, key inte // Sign implements token signing for the SigningMethod. // For this signing method, key must be an ed25519.PrivateKey -func (m *SigningMethodEd25519) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodEd25519) Sign(signingString string, key any) ([]byte, error) { var ed25519Key crypto.Signer var ok bool diff --git a/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go b/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go index cdb5e68e..6f46e886 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go @@ -24,7 +24,7 @@ func ParseEdPrivateKeyFromPEM(key []byte) (crypto.PrivateKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { return nil, err } @@ -49,7 +49,7 @@ func ParseEdPublicKeyFromPEM(key []byte) (crypto.PublicKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { return nil, err } diff --git a/vendor/github.com/golang-jwt/jwt/v5/hmac.go b/vendor/github.com/golang-jwt/jwt/v5/hmac.go index aca600ce..1bef138c 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/hmac.go +++ b/vendor/github.com/golang-jwt/jwt/v5/hmac.go @@ -55,7 +55,7 @@ func (m *SigningMethodHMAC) Alg() string { // about this, and why we intentionally are not supporting string as a key can // be found on our usage guide // https://golang-jwt.github.io/jwt/usage/signing_methods/#signing-methods-and-key-types. -func (m *SigningMethodHMAC) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodHMAC) Verify(signingString string, sig []byte, key any) error { // Verify the key is the right type keyBytes, ok := key.([]byte) if !ok { @@ -88,7 +88,7 @@ func (m *SigningMethodHMAC) Verify(signingString string, sig []byte, key interfa // cryptographically random source, e.g. crypto/rand. Additional information // about this, and why we intentionally are not supporting string as a key can // be found on our usage guide https://golang-jwt.github.io/jwt/usage/signing_methods/. -func (m *SigningMethodHMAC) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodHMAC) Sign(signingString string, key any) ([]byte, error) { if keyBytes, ok := key.([]byte); ok { if !m.Hash.Available() { return nil, ErrHashUnavailable diff --git a/vendor/github.com/golang-jwt/jwt/v5/map_claims.go b/vendor/github.com/golang-jwt/jwt/v5/map_claims.go index b2b51a1f..3b920527 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/map_claims.go +++ b/vendor/github.com/golang-jwt/jwt/v5/map_claims.go @@ -5,9 +5,9 @@ import ( "fmt" ) -// MapClaims is a claims type that uses the map[string]interface{} for JSON +// MapClaims is a claims type that uses the map[string]any for JSON // decoding. This is the default claims type if you don't supply one -type MapClaims map[string]interface{} +type MapClaims map[string]any // GetExpirationTime implements the Claims interface. func (m MapClaims) GetExpirationTime() (*NumericDate, error) { @@ -73,7 +73,7 @@ func (m MapClaims) parseClaimsString(key string) (ClaimStrings, error) { cs = append(cs, v) case []string: cs = v - case []interface{}: + case []any: for _, a := range v { vs, ok := a.(string) if !ok { @@ -92,7 +92,7 @@ func (m MapClaims) parseClaimsString(key string) (ClaimStrings, error) { func (m MapClaims) parseString(key string) (string, error) { var ( ok bool - raw interface{} + raw any iss string ) raw, ok = m[key] diff --git a/vendor/github.com/golang-jwt/jwt/v5/none.go b/vendor/github.com/golang-jwt/jwt/v5/none.go index 685c2ea3..624ad55e 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/none.go +++ b/vendor/github.com/golang-jwt/jwt/v5/none.go @@ -25,7 +25,7 @@ func (m *signingMethodNone) Alg() string { } // Only allow 'none' alg type if UnsafeAllowNoneSignatureType is specified as the key -func (m *signingMethodNone) Verify(signingString string, sig []byte, key interface{}) (err error) { +func (m *signingMethodNone) Verify(signingString string, sig []byte, key any) (err error) { // Key must be UnsafeAllowNoneSignatureType to prevent accidentally // accepting 'none' signing method if _, ok := key.(unsafeNoneMagicConstant); !ok { @@ -41,7 +41,7 @@ func (m *signingMethodNone) Verify(signingString string, sig []byte, key interfa } // Only allow 'none' signing if UnsafeAllowNoneSignatureType is specified as the key -func (m *signingMethodNone) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *signingMethodNone) Sign(signingString string, key any) ([]byte, error) { if _, ok := key.(unsafeNoneMagicConstant); ok { return []byte{}, nil } diff --git a/vendor/github.com/golang-jwt/jwt/v5/parser_option.go b/vendor/github.com/golang-jwt/jwt/v5/parser_option.go index 88a780fb..43157355 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/parser_option.go +++ b/vendor/github.com/golang-jwt/jwt/v5/parser_option.go @@ -66,20 +66,37 @@ func WithExpirationRequired() ParserOption { } } -// WithAudience configures the validator to require the specified audience in -// the `aud` claim. Validation will fail if the audience is not listed in the -// token or the `aud` claim is missing. +// WithAudience configures the validator to require any of the specified +// audiences in the `aud` claim. Validation will fail if the audience is not +// listed in the token or the `aud` claim is missing. // // NOTE: While the `aud` claim is OPTIONAL in a JWT, the handling of it is // application-specific. Since this validation API is helping developers in // writing secure application, we decided to REQUIRE the existence of the claim, // if an audience is expected. -func WithAudience(aud string) ParserOption { +func WithAudience(aud ...string) ParserOption { return func(p *Parser) { p.validator.expectedAud = aud } } +// WithAllAudiences configures the validator to require all the specified +// audiences in the `aud` claim. Validation will fail if the specified audiences +// are not listed in the token or the `aud` claim is missing. Duplicates within +// the list are de-duplicated since internally, we use a map to look up the +// audiences. +// +// NOTE: While the `aud` claim is OPTIONAL in a JWT, the handling of it is +// application-specific. Since this validation API is helping developers in +// writing secure application, we decided to REQUIRE the existence of the claim, +// if an audience is expected. +func WithAllAudiences(aud ...string) ParserOption { + return func(p *Parser) { + p.validator.expectedAud = aud + p.validator.expectAllAud = true + } +} + // WithIssuer configures the validator to require the specified issuer in the // `iss` claim. Validation will fail if a different issuer is specified in the // token or the `iss` claim is missing. diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa.go b/vendor/github.com/golang-jwt/jwt/v5/rsa.go index 83cbee6a..98b960a7 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa.go @@ -46,7 +46,7 @@ func (m *SigningMethodRSA) Alg() string { // Verify implements token verification for the SigningMethod // For this signing method, must be an *rsa.PublicKey structure. -func (m *SigningMethodRSA) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodRSA) Verify(signingString string, sig []byte, key any) error { var rsaKey *rsa.PublicKey var ok bool @@ -67,7 +67,7 @@ func (m *SigningMethodRSA) Verify(signingString string, sig []byte, key interfac // Sign implements token signing for the SigningMethod // For this signing method, must be an *rsa.PrivateKey structure. -func (m *SigningMethodRSA) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodRSA) Sign(signingString string, key any) ([]byte, error) { var rsaKey *rsa.PrivateKey var ok bool diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go index 28c386ec..7c216ae0 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go @@ -82,7 +82,7 @@ func init() { // Verify implements token verification for the SigningMethod. // For this verify method, key must be an rsa.PublicKey struct -func (m *SigningMethodRSAPSS) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodRSAPSS) Verify(signingString string, sig []byte, key any) error { var rsaKey *rsa.PublicKey switch k := key.(type) { case *rsa.PublicKey: @@ -108,7 +108,7 @@ func (m *SigningMethodRSAPSS) Verify(signingString string, sig []byte, key inter // Sign implements token signing for the SigningMethod. // For this signing method, key must be an rsa.PrivateKey struct -func (m *SigningMethodRSAPSS) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodRSAPSS) Sign(signingString string, key any) ([]byte, error) { var rsaKey *rsa.PrivateKey switch k := key.(type) { diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go b/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go index b3aeebbe..f22c3d06 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go @@ -23,7 +23,7 @@ func ParseRSAPrivateKeyFromPEM(key []byte) (*rsa.PrivateKey, error) { return nil, ErrKeyMustBePEMEncoded } - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKCS1PrivateKey(block.Bytes); err != nil { if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { return nil, err @@ -53,7 +53,7 @@ func ParseRSAPrivateKeyFromPEMWithPassword(key []byte, password string) (*rsa.Pr return nil, ErrKeyMustBePEMEncoded } - var parsedKey interface{} + var parsedKey any var blockDecrypted []byte if blockDecrypted, err = x509.DecryptPEMBlock(block, []byte(password)); err != nil { @@ -86,7 +86,7 @@ func ParseRSAPublicKeyFromPEM(key []byte) (*rsa.PublicKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { if cert, err := x509.ParseCertificate(block.Bytes); err == nil { parsedKey = cert.PublicKey diff --git a/vendor/github.com/golang-jwt/jwt/v5/signing_method.go b/vendor/github.com/golang-jwt/jwt/v5/signing_method.go index 0d73631c..096d0ed4 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/signing_method.go +++ b/vendor/github.com/golang-jwt/jwt/v5/signing_method.go @@ -12,9 +12,9 @@ var signingMethodLock = new(sync.RWMutex) // signature in Sign. The signature is then usually base64 encoded as part of a // JWT. type SigningMethod interface { - Verify(signingString string, sig []byte, key interface{}) error // Returns nil if signature is valid - Sign(signingString string, key interface{}) ([]byte, error) // Returns signature or error - Alg() string // returns the alg identifier for this method (example: 'HS256') + Verify(signingString string, sig []byte, key any) error // Returns nil if signature is valid + Sign(signingString string, key any) ([]byte, error) // Returns signature or error + Alg() string // returns the alg identifier for this method (example: 'HS256') } // RegisterSigningMethod registers the "alg" name and a factory function for signing method. diff --git a/vendor/github.com/golang-jwt/jwt/v5/token.go b/vendor/github.com/golang-jwt/jwt/v5/token.go index 9c7f4ab0..3f715588 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/token.go +++ b/vendor/github.com/golang-jwt/jwt/v5/token.go @@ -11,9 +11,9 @@ import ( // Token. This allows you to use properties in the Header of the token (such as // `kid`) to identify which key to use. // -// The returned interface{} may be a single key or a VerificationKeySet containing +// The returned any may be a single key or a VerificationKeySet containing // multiple keys. -type Keyfunc func(*Token) (interface{}, error) +type Keyfunc func(*Token) (any, error) // VerificationKey represents a public or secret key for verifying a token's signature. type VerificationKey interface { @@ -28,12 +28,12 @@ type VerificationKeySet struct { // Token represents a JWT Token. Different fields will be used depending on // whether you're creating or parsing/verifying a token. type Token struct { - Raw string // Raw contains the raw token. Populated when you [Parse] a token - Method SigningMethod // Method is the signing method used or to be used - Header map[string]interface{} // Header is the first segment of the token in decoded form - Claims Claims // Claims is the second segment of the token in decoded form - Signature []byte // Signature is the third segment of the token in decoded form. Populated when you Parse a token - Valid bool // Valid specifies if the token is valid. Populated when you Parse/Verify a token + Raw string // Raw contains the raw token. Populated when you [Parse] a token + Method SigningMethod // Method is the signing method used or to be used + Header map[string]any // Header is the first segment of the token in decoded form + Claims Claims // Claims is the second segment of the token in decoded form + Signature []byte // Signature is the third segment of the token in decoded form. Populated when you Parse a token + Valid bool // Valid specifies if the token is valid. Populated when you Parse/Verify a token } // New creates a new [Token] with the specified signing method and an empty map @@ -46,7 +46,7 @@ func New(method SigningMethod, opts ...TokenOption) *Token { // claims. Additional options can be specified, but are currently unused. func NewWithClaims(method SigningMethod, claims Claims, opts ...TokenOption) *Token { return &Token{ - Header: map[string]interface{}{ + Header: map[string]any{ "typ": "JWT", "alg": method.Alg(), }, @@ -60,7 +60,7 @@ func NewWithClaims(method SigningMethod, claims Claims, opts ...TokenOption) *To // https://golang-jwt.github.io/jwt/usage/signing_methods/#signing-methods-and-key-types // for an overview of the different signing methods and their respective key // types. -func (t *Token) SignedString(key interface{}) (string, error) { +func (t *Token) SignedString(key any) (string, error) { sstr, err := t.SigningString() if err != nil { return "", err diff --git a/vendor/github.com/golang-jwt/jwt/v5/types.go b/vendor/github.com/golang-jwt/jwt/v5/types.go index b2655a9e..a3e0ef12 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/types.go +++ b/vendor/github.com/golang-jwt/jwt/v5/types.go @@ -103,7 +103,7 @@ func (date *NumericDate) UnmarshalJSON(b []byte) (err error) { type ClaimStrings []string func (s *ClaimStrings) UnmarshalJSON(data []byte) (err error) { - var value interface{} + var value any if err = json.Unmarshal(data, &value); err != nil { return err @@ -116,7 +116,7 @@ func (s *ClaimStrings) UnmarshalJSON(data []byte) (err error) { aud = append(aud, v) case []string: aud = ClaimStrings(v) - case []interface{}: + case []any: for _, vv := range v { vs, ok := vv.(string) if !ok { diff --git a/vendor/github.com/golang-jwt/jwt/v5/validator.go b/vendor/github.com/golang-jwt/jwt/v5/validator.go index 008ecd87..92b5c057 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/validator.go +++ b/vendor/github.com/golang-jwt/jwt/v5/validator.go @@ -1,8 +1,8 @@ package jwt import ( - "crypto/subtle" "fmt" + "slices" "time" ) @@ -52,8 +52,12 @@ type Validator struct { verifyIat bool // expectedAud contains the audience this token expects. Supplying an empty - // string will disable aud checking. - expectedAud string + // slice will disable aud checking. + expectedAud []string + + // expectAllAud specifies whether all expected audiences must be present in + // the token. If false, only one of the expected audiences must be present. + expectAllAud bool // expectedIss contains the issuer this token expects. Supplying an empty // string will disable iss checking. @@ -88,7 +92,7 @@ func NewValidator(opts ...ParserOption) *Validator { func (v *Validator) Validate(claims Claims) error { var ( now time.Time - errs []error = make([]error, 0, 6) + errs = make([]error, 0, 6) err error ) @@ -120,8 +124,8 @@ func (v *Validator) Validate(claims Claims) error { } // If we have an expected audience, we also require the audience claim - if v.expectedAud != "" { - if err = v.verifyAudience(claims, v.expectedAud, true); err != nil { + if len(v.expectedAud) > 0 { + if err = v.verifyAudience(claims, v.expectedAud, v.expectAllAud); err != nil { errs = append(errs, err) } } @@ -226,33 +230,39 @@ func (v *Validator) verifyNotBefore(claims Claims, cmp time.Time, required bool) // // Additionally, if any error occurs while retrieving the claim, e.g., when its // the wrong type, an ErrTokenUnverifiable error will be returned. -func (v *Validator) verifyAudience(claims Claims, cmp string, required bool) error { +func (v *Validator) verifyAudience(claims Claims, cmp []string, expectAllAud bool) error { aud, err := claims.GetAudience() if err != nil { return err } - if len(aud) == 0 { + // Check that aud exists and is not empty. We only require the aud claim + // if we expect at least one audience to be present. + if len(aud) == 0 || len(aud) == 1 && aud[0] == "" { + required := len(v.expectedAud) > 0 return errorIfRequired(required, "aud") } - // use a var here to keep constant time compare when looping over a number of claims - result := false - - var stringClaims string - for _, a := range aud { - if subtle.ConstantTimeCompare([]byte(a), []byte(cmp)) != 0 { - result = true + if !expectAllAud { + for _, a := range aud { + // If we only expect one match, we can stop early if we find a match + if slices.Contains(cmp, a) { + return nil + } } - stringClaims = stringClaims + a + + return ErrTokenInvalidAudience } - // case where "" is sent in one or many aud claims - if stringClaims == "" { - return errorIfRequired(required, "aud") + // Note that we are looping cmp here to ensure that all expected audiences + // are present in the aud claim. + for _, a := range cmp { + if !slices.Contains(aud, a) { + return ErrTokenInvalidAudience + } } - return errorIfFalse(result, ErrTokenInvalidAudience) + return nil } // verifyIssuer compares the iss claim in claims against cmp. diff --git a/vendor/modules.txt b/vendor/modules.txt index d9d4f3df..b060fed7 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -94,7 +94,7 @@ github.com/go-sql-driver/mysql # github.com/golang-jwt/jwt/v4 v4.5.2 ## explicit; go 1.16 github.com/golang-jwt/jwt/v4 -# github.com/golang-jwt/jwt/v5 v5.2.2 +# github.com/golang-jwt/jwt/v5 v5.2.3 ## explicit; go 1.18 github.com/golang-jwt/jwt/v5 # github.com/google/go-github/v72 v72.0.0 From 69779a0a7d622d99b04800c8ef571f6dc8ecd8f9 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 17 Jul 2025 16:39:55 +0000 Subject: [PATCH 123/179] Fix scale set param Do not look for a name when composing the scale set. Preload may not have been called on an entity, but we still have the ID, which is the only thing needed when GetEntity() is called. Signed-off-by: Gabriel Adrian Samfira --- database/sql/scalesets.go | 4 ++-- database/sql/util.go | 4 ++-- params/params.go | 2 +- runner/pool/pool.go | 7 ++++++- workers/cache/cache.go | 2 +- workers/cache/gitea_tools.go | 10 +++++++--- workers/cache/tool_cache.go | 4 ++-- 7 files changed, 21 insertions(+), 12 deletions(-) diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 930ae17d..65a51ca0 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -389,7 +389,7 @@ func (s *sqlDatabase) SetScaleSetLastMessageID(_ context.Context, scaleSetID uin } }() if err := s.conn.Transaction(func(tx *gorm.DB) error { - dbSet, err := s.getScaleSetByID(tx, scaleSetID) + dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") if err != nil { return errors.Wrap(err, "fetching scale set") } @@ -416,7 +416,7 @@ func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(_ context.Context, scaleSetI } }() if err := s.conn.Transaction(func(tx *gorm.DB) error { - dbSet, err := s.getScaleSetByID(tx, scaleSetID) + dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") if err != nil { return errors.Wrap(err, "fetching scale set") } diff --git a/database/sql/util.go b/database/sql/util.go index d55e0174..2b2a1de8 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -362,12 +362,12 @@ func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, e } } - if scaleSet.OrgID != nil && scaleSet.Organization.Name != "" { + if scaleSet.OrgID != nil { ret.OrgID = scaleSet.OrgID.String() ret.OrgName = scaleSet.Organization.Name } - if scaleSet.EnterpriseID != nil && scaleSet.Enterprise.Name != "" { + if scaleSet.EnterpriseID != nil { ret.EnterpriseID = scaleSet.EnterpriseID.String() ret.EnterpriseName = scaleSet.Enterprise.Name } diff --git a/params/params.go b/params/params.go index 3a0e8435..6e1bc1aa 100644 --- a/params/params.go +++ b/params/params.go @@ -570,7 +570,7 @@ func (p ScaleSet) GetEntity() (ForgeEntity, error) { EntityType: ForgeEntityTypeEnterprise, }, nil } - return ForgeEntity{}, fmt.Errorf("pool has no associated entity") + return ForgeEntity{}, fmt.Errorf("scale set has no associated entity") } func (p *ScaleSet) ScaleSetType() ForgeEntityType { diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 9f6d3c0e..c161e41d 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -68,7 +68,12 @@ const ( ) func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instanceTokenGetter auth.InstanceTokenGetter, providers map[string]common.Provider, store dbCommon.Store) (common.PoolManager, error) { - ctx = garmUtil.WithSlogContext(ctx, slog.Any("pool_mgr", entity.String()), slog.Any("pool_type", entity.EntityType)) + ctx = garmUtil.WithSlogContext( + ctx, + slog.Any("pool_mgr", entity.String()), + slog.Any("endpoint", entity.Credentials.Endpoint.Name), + slog.Any("pool_type", entity.EntityType), + ) ghc, err := ghClient.Client(ctx, entity) if err != nil { return nil, errors.Wrap(err, "getting github client") diff --git a/workers/cache/cache.go b/workers/cache/cache.go index 3f589edd..3b387f50 100644 --- a/workers/cache/cache.go +++ b/workers/cache/cache.go @@ -365,7 +365,7 @@ func (w *Worker) handleScaleSetEvent(event common.ChangePayload) { } entity, err := scaleSet.GetEntity() if err != nil { - slog.DebugContext(w.ctx, "getting entity from pool", "error", err) + slog.DebugContext(w.ctx, "getting entity from scale set", "error", err) return } diff --git a/workers/cache/gitea_tools.go b/workers/cache/gitea_tools.go index 5d09ccb3..43fd86ba 100644 --- a/workers/cache/gitea_tools.go +++ b/workers/cache/gitea_tools.go @@ -15,9 +15,11 @@ package cache import ( + "context" "encoding/json" "fmt" "io" + "log/slog" "net/http" "strings" "time" @@ -139,7 +141,7 @@ func (g GiteaEntityTools) MinimumVersion() (GiteaEntityTool, bool) { return GiteaEntityTool{}, false } -func getTools() ([]commonParams.RunnerApplicationDownload, error) { +func getTools(ctx context.Context) ([]commonParams.RunnerApplicationDownload, error) { resp, err := http.Get(GiteaRunnerReleasesURL) if err != nil { return nil, err @@ -170,11 +172,13 @@ func getTools() ([]commonParams.RunnerApplicationDownload, error) { for _, asset := range latest.Assets { arch, err := asset.GetArch() if err != nil { - return nil, fmt.Errorf("getting arch: %w", err) + slog.InfoContext(ctx, "ignoring unrecognized tools arch", "tool", asset.Name) + continue } os, err := asset.GetOS() if err != nil { - return nil, fmt.Errorf("getting os: %w", err) + slog.InfoContext(ctx, "ignoring unrecognized tools os", "tool", asset.Name) + continue } ret = append(ret, commonParams.RunnerApplicationDownload{ OS: os, diff --git a/workers/cache/tool_cache.go b/workers/cache/tool_cache.go index 6cbcc716..fcf66757 100644 --- a/workers/cache/tool_cache.go +++ b/workers/cache/tool_cache.go @@ -162,7 +162,7 @@ func (t *toolsUpdater) giteaUpdateLoop() { randInt = big.NewInt(0) } t.sleepWithCancel(time.Duration(randInt.Int64()) * time.Millisecond) - tools, err := getTools() + tools, err := getTools(t.ctx) if err != nil { t.addStatusEvent(fmt.Sprintf("failed to update gitea tools: %q", err), params.EventError) } else { @@ -181,7 +181,7 @@ func (t *toolsUpdater) giteaUpdateLoop() { case <-t.ctx.Done(): return case <-ticker.C: - tools, err := getTools() + tools, err := getTools(t.ctx) if err != nil { t.addStatusEvent(fmt.Sprintf("failed to update gitea tools: %q", err), params.EventError) slog.DebugContext(t.ctx, "failed to update gitea tools", "error", err) From a984782fd73eae3f0ef4dffede1f901a4437e331 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Fri, 18 Jul 2025 07:51:50 +0000 Subject: [PATCH 124/179] Handle new jobID for scale sets There seems to be a change in the scale set message. It now includes a jobID and sets the runner request ID to 0. This change adds separate job ID fields for workflow jobs and scaleset jobs. Signed-off-by: Gabriel Adrian Samfira --- database/sql/jobs.go | 48 ++++++++++++++++++++++----- database/sql/models.go | 6 ++++ database/sql/sql.go | 29 ++++++++++------ params/github.go | 4 +-- params/params.go | 4 +++ runner/pool/pool.go | 44 ++++++++++++------------ workers/scaleset/scaleset_helper.go | 3 +- workers/scaleset/scaleset_listener.go | 28 ++++++++-------- 8 files changed, 107 insertions(+), 59 deletions(-) diff --git a/database/sql/jobs.go b/database/sql/jobs.go index 1215e3f3..ff19394f 100644 --- a/database/sql/jobs.go +++ b/database/sql/jobs.go @@ -41,6 +41,8 @@ func sqlWorkflowJobToParamsJob(job WorkflowJob) (params.Job, error) { jobParam := params.Job{ ID: job.ID, + WorkflowJobID: job.WorkflowJobID, + ScaleSetJobID: job.ScaleSetJobID, RunID: job.RunID, Action: job.Action, Status: job.Status, @@ -75,7 +77,8 @@ func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job } workflofJob := WorkflowJob{ - ID: job.ID, + ScaleSetJobID: job.ScaleSetJobID, + WorkflowJobID: job.ID, RunID: job.RunID, Action: job.Action, Status: job.Status, @@ -109,14 +112,27 @@ func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job } func (s *sqlDatabase) DeleteJob(_ context.Context, jobID int64) (err error) { + var workflowJob WorkflowJob + q := s.conn.Where("workflow_job_id = ?", jobID).Preload("Instance").First(&workflowJob) + if q.Error != nil { + if errors.Is(q.Error, gorm.ErrRecordNotFound) { + return nil + } + return errors.Wrap(q.Error, "fetching job") + } + removedJob, err := sqlWorkflowJobToParamsJob(workflowJob) + if err != nil { + return errors.Wrap(err, "converting job") + } + defer func() { if err == nil { - if notifyErr := s.sendNotify(common.JobEntityType, common.DeleteOperation, params.Job{ID: jobID}); notifyErr != nil { + if notifyErr := s.sendNotify(common.JobEntityType, common.DeleteOperation, removedJob); notifyErr != nil { slog.With(slog.Any("error", notifyErr)).Error("failed to send notify") } } }() - q := s.conn.Delete(&WorkflowJob{}, jobID) + q = s.conn.Delete(&workflowJob) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil @@ -132,7 +148,7 @@ func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) e return errors.Wrap(err, "parsing entity id") } var workflowJob WorkflowJob - q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ?", jobID).First(&workflowJob) + q := s.conn.Preload("Instance").Where("id = ?", jobID).First(&workflowJob) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { @@ -167,7 +183,7 @@ func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) e func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err error) { var workflowJob WorkflowJob - q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ? and status = ?", jobID, params.JobStatusQueued).First(&workflowJob) + q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("workflow_job_id = ? and status = ?", jobID, params.JobStatusQueued).First(&workflowJob) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { @@ -195,7 +211,7 @@ func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) error { var workflowJob WorkflowJob - q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Where("id = ?", jobID).First(&workflowJob) + q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Where("workflow_job_id = ?", jobID).First(&workflowJob) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { @@ -229,7 +245,14 @@ func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (params.Job, error) { var workflowJob WorkflowJob var err error - q := s.conn.Clauses(clause.Locking{Strength: "UPDATE"}).Preload("Instance").Where("id = ?", job.ID).First(&workflowJob) + + searchField := "workflow_job_id = ?" + var searchVal any = job.ID + if job.ScaleSetJobID != "" { + searchField = "scale_set_job_id = ?" + searchVal = job.ScaleSetJobID + } + q := s.conn.Preload("Instance").Where(searchField, searchVal).First(&workflowJob) if q.Error != nil { if !errors.Is(q.Error, gorm.ErrRecordNotFound) { @@ -249,6 +272,9 @@ func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (pa workflowJob.GithubRunnerID = job.GithubRunnerID workflowJob.RunnerGroupID = job.RunnerGroupID workflowJob.RunnerGroupName = job.RunnerGroupName + if job.RunID != 0 && workflowJob.RunID == 0 { + workflowJob.RunID = job.RunID + } if job.LockedBy != uuid.Nil { workflowJob.LockedBy = job.LockedBy @@ -327,7 +353,11 @@ func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType param } var jobs []WorkflowJob - query := s.conn.Model(&WorkflowJob{}).Preload("Instance").Where("status = ?", status) + query := s.conn. + Model(&WorkflowJob{}). + Preload("Instance"). + Where("status = ?", status). + Where("workflow_job_id > 0") switch entityType { case params.ForgeEntityTypeOrganization: @@ -381,7 +411,7 @@ func (s *sqlDatabase) ListAllJobs(_ context.Context) ([]params.Job, error) { // GetJobByID gets a job by id. func (s *sqlDatabase) GetJobByID(_ context.Context, jobID int64) (params.Job, error) { var job WorkflowJob - query := s.conn.Model(&WorkflowJob{}).Preload("Instance").Where("id = ?", jobID) + query := s.conn.Model(&WorkflowJob{}).Preload("Instance").Where("workflow_job_id = ?", jobID) if err := query.First(&job); err.Error != nil { if errors.Is(err.Error, gorm.ErrRecordNotFound) { diff --git a/database/sql/models.go b/database/sql/models.go index 4cdb9b8b..8944dee1 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -319,6 +319,12 @@ type User struct { type WorkflowJob struct { // ID is the ID of the job. ID int64 `gorm:"index"` + + // WorkflowJobID is the ID of the workflow job. + WorkflowJobID int64 `gorm:"index:workflow_job_id_idx"` + // ScaleSetJobID is the job ID for a scaleset job. + ScaleSetJobID string `gorm:"index:scaleset_job_id_idx"` + // RunID is the ID of the workflow run. A run may have multiple jobs. RunID int64 // Action is the specific activity that triggered the event. diff --git a/database/sql/sql.go b/database/sql/sql.go index d6e60586..16411364 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -374,6 +374,22 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { return nil } +func (s *sqlDatabase) migrateWorkflow() error { + if s.conn.Migrator().HasTable(&WorkflowJob{}) { + if s.conn.Migrator().HasColumn(&WorkflowJob{}, "runner_name") { + // Remove jobs that are not in "queued" status. We really only care about queued jobs. Once they transition + // to something else, we don't really consume them anyway. + if err := s.conn.Exec("delete from workflow_jobs where status is not 'queued'").Error; err != nil { + return errors.Wrap(err, "updating workflow_jobs") + } + if err := s.conn.Migrator().DropColumn(&WorkflowJob{}, "runner_name"); err != nil { + return errors.Wrap(err, "updating workflow_jobs") + } + } + } + return nil +} + func (s *sqlDatabase) migrateDB() error { if s.conn.Migrator().HasIndex(&Organization{}, "idx_organizations_name") { if err := s.conn.Migrator().DropIndex(&Organization{}, "idx_organizations_name"); err != nil { @@ -405,17 +421,8 @@ func (s *sqlDatabase) migrateDB() error { } } - if s.conn.Migrator().HasTable(&WorkflowJob{}) { - if s.conn.Migrator().HasColumn(&WorkflowJob{}, "runner_name") { - // Remove jobs that are not in "queued" status. We really only care about queued jobs. Once they transition - // to something else, we don't really consume them anyway. - if err := s.conn.Exec("delete from workflow_jobs where status is not 'queued'").Error; err != nil { - return errors.Wrap(err, "updating workflow_jobs") - } - if err := s.conn.Migrator().DropColumn(&WorkflowJob{}, "runner_name"); err != nil { - return errors.Wrap(err, "updating workflow_jobs") - } - } + if err := s.migrateWorkflow(); err != nil { + return errors.Wrap(err, "migrating workflows") } if s.conn.Migrator().HasTable(&GithubEndpoint{}) { diff --git a/params/github.go b/params/github.go index 9859f717..cb68d880 100644 --- a/params/github.go +++ b/params/github.go @@ -420,7 +420,6 @@ func (r RunnerScaleSetMessage) GetJobsFromBody() ([]ScaleSetJobMessage, error) { if r.Body == "" { return nil, fmt.Errorf("no body specified") } - if err := json.Unmarshal([]byte(r.Body), &body); err != nil { return nil, fmt.Errorf("failed to unmarshal body: %w", err) } @@ -519,6 +518,7 @@ type RunnerGroupList struct { type ScaleSetJobMessage struct { MessageType string `json:"messageType,omitempty"` + JobID string `json:"jobId,omitempty"` RunnerRequestID int64 `json:"runnerRequestId,omitempty"` RepositoryName string `json:"repositoryName,omitempty"` OwnerName string `json:"ownerName,omitempty"` @@ -552,7 +552,7 @@ func (s ScaleSetJobMessage) MessageTypeToStatus() JobStatus { func (s ScaleSetJobMessage) ToJob() Job { return Job{ - ID: s.RunnerRequestID, + ScaleSetJobID: s.JobID, Action: s.EventName, RunID: s.WorkflowRunID, Status: string(s.MessageTypeToStatus()), diff --git a/params/params.go b/params/params.go index 6e1bc1aa..9cd4fc83 100644 --- a/params/params.go +++ b/params/params.go @@ -1035,6 +1035,10 @@ func (p RunnerPrefix) GetRunnerPrefix() string { type Job struct { // ID is the ID of the job. ID int64 `json:"id,omitempty"` + + WorkflowJobID int64 `json:"workflow_job_id,omitempty"` + // ScaleSetJobID is the job ID when generated for a scale set. + ScaleSetJobID string `json:"scaleset_job_id,omitempty"` // RunID is the ID of the workflow run. A run may have multiple jobs. RunID int64 `json:"run_id,omitempty"` // Action is the specific activity that triggered the event. diff --git a/runner/pool/pool.go b/runner/pool/pool.go index c161e41d..1afee56e 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -176,19 +176,19 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { var triggeredBy int64 defer func() { - if jobParams.ID == 0 { + if jobParams.WorkflowJobID == 0 { return } // we're updating the job in the database, regardless of whether it was successful or not. // or if it was meant for this pool or not. Github will send the same job data to all hierarchies // that have been configured to work with garm. Updating the job at all levels should yield the same // outcome in the db. - _, err := r.store.GetJobByID(r.ctx, jobParams.ID) + _, err := r.store.GetJobByID(r.ctx, jobParams.WorkflowJobID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to get job", - "job_id", jobParams.ID) + "job_id", jobParams.WorkflowJobID) return } // This job is new to us. Check if we have a pool that can handle it. @@ -203,10 +203,10 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { if _, jobErr := r.store.CreateOrUpdateJob(r.ctx, jobParams); jobErr != nil { slog.With(slog.Any("error", jobErr)).ErrorContext( - r.ctx, "failed to update job", "job_id", jobParams.ID) + r.ctx, "failed to update job", "job_id", jobParams.WorkflowJobID) } - if triggeredBy != 0 && jobParams.ID != triggeredBy { + if triggeredBy != 0 && jobParams.WorkflowJobID != triggeredBy { // The triggeredBy value is only set by the "in_progress" webhook. The runner that // transitioned to in_progress was created as a result of a different queued job. If that job is // still queued and we don't remove the lock, it will linger until the lock timeout is reached. @@ -970,7 +970,7 @@ func (r *basePoolManager) paramsWorkflowJobToParamsJob(job params.WorkflowJob) ( } jobParams := params.Job{ - ID: job.WorkflowJob.ID, + WorkflowJobID: job.WorkflowJob.ID, Action: job.Action, RunID: job.WorkflowJob.RunID, Status: job.WorkflowJob.Status, @@ -1106,10 +1106,10 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool for _, job := range queued { if time.Since(job.CreatedAt).Minutes() > 10 && pool.HasRequiredLabels(job.Labels) { - if err := r.store.DeleteJob(ctx, job.ID); err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { + if err := r.store.DeleteJob(ctx, job.WorkflowJobID); err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to delete job", - "job_id", job.ID) + "job_id", job.WorkflowJobID) } } } @@ -1760,7 +1760,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { // Job was handled by us or another entity. slog.DebugContext( r.ctx, "job is locked", - "job_id", job.ID, + "job_id", job.WorkflowJobID, "locking_entity", job.LockedBy.String()) continue } @@ -1769,7 +1769,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { // give the idle runners a chance to pick up the job. slog.DebugContext( r.ctx, "job backoff not reached", "backoff_interval", r.controllerInfo.MinimumJobAgeBackoff, - "job_id", job.ID) + "job_id", job.WorkflowJobID) continue } @@ -1777,12 +1777,12 @@ func (r *basePoolManager) consumeQueuedJobs() error { // Job is still queued in our db, 10 minutes after a matching runner // was spawned. Unlock it and try again. A different job may have picked up // the runner. - if err := r.store.UnlockJob(r.ctx, job.ID, r.ID()); err != nil { + if err := r.store.UnlockJob(r.ctx, job.WorkflowJobID, r.ID()); err != nil { // nolint:golangci-lint,godox // TODO: Implament a cache? Should we return here? slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to unlock job", - "job_id", job.ID) + "job_id", job.WorkflowJobID) continue } } @@ -1795,7 +1795,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { // runner. slog.DebugContext( r.ctx, "job is locked by us", - "job_id", job.ID) + "job_id", job.WorkflowJobID) continue } @@ -1816,29 +1816,29 @@ func (r *basePoolManager) consumeQueuedJobs() error { } runnerCreated := false - if err := r.store.LockJob(r.ctx, job.ID, r.ID()); err != nil { + if err := r.store.LockJob(r.ctx, job.WorkflowJobID, r.ID()); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "could not lock job", - "job_id", job.ID) + "job_id", job.WorkflowJobID) continue } jobLabels := []string{ - fmt.Sprintf("%s=%d", jobLabelPrefix, job.ID), + fmt.Sprintf("%s=%d", jobLabelPrefix, job.WorkflowJobID), } for i := 0; i < poolRR.Len(); i++ { pool, err := poolRR.Next() if err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "could not find a pool to create a runner for job", - "job_id", job.ID) + "job_id", job.WorkflowJobID) break } slog.InfoContext( r.ctx, "attempting to create a runner in pool", "pool_id", pool.ID, - "job_id", job.ID) + "job_id", job.WorkflowJobID) if err := r.addRunnerToPool(pool, jobLabels); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "could not add runner to pool", @@ -1847,7 +1847,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { } slog.DebugContext(r.ctx, "a new runner was added as a response to queued job", "pool_id", pool.ID, - "job_id", job.ID) + "job_id", job.WorkflowJobID) runnerCreated = true break } @@ -1855,11 +1855,11 @@ func (r *basePoolManager) consumeQueuedJobs() error { if !runnerCreated { slog.WarnContext( r.ctx, "could not create a runner for job; unlocking", - "job_id", job.ID) - if err := r.store.UnlockJob(r.ctx, job.ID, r.ID()); err != nil { + "job_id", job.WorkflowJobID) + if err := r.store.UnlockJob(r.ctx, job.WorkflowJobID, r.ID()); err != nil { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to unlock job", - "job_id", job.ID) + "job_id", job.WorkflowJobID) return errors.Wrap(err, "unlocking job") } } diff --git a/workers/scaleset/scaleset_helper.go b/workers/scaleset/scaleset_helper.go index 7b3fdf03..c04c92a2 100644 --- a/workers/scaleset/scaleset_helper.go +++ b/workers/scaleset/scaleset_helper.go @@ -80,7 +80,7 @@ func (w *Worker) recordOrUpdateJob(job params.ScaleSetJobMessage) error { case params.ForgeEntityTypeOrganization: jobParams.OrgID = &asUUID default: - return fmt.Errorf("unknown entity type: %s", entity.EntityType) + return fmt.Errorf("unknown entity type: %s --> %s", entity.EntityType, entity) } if _, jobErr := w.store.CreateOrUpdateJob(w.ctx, jobParams); jobErr != nil { @@ -163,6 +163,7 @@ func (w *Worker) HandleJobsStarted(jobs []params.ScaleSetJobMessage) (err error) } func (w *Worker) HandleJobsAvailable(jobs []params.ScaleSetJobMessage) error { + slog.DebugContext(w.ctx, "handling jobs available", "jobs", jobs) for _, job := range jobs { if err := w.recordOrUpdateJob(job); err != nil { // recording scale set jobs are purely informational for now. diff --git a/workers/scaleset/scaleset_listener.go b/workers/scaleset/scaleset_listener.go index 1274ee59..7808f9f6 100644 --- a/workers/scaleset/scaleset_listener.go +++ b/workers/scaleset/scaleset_listener.go @@ -150,28 +150,22 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage for _, job := range body { switch job.MessageType { case params.MessageTypeJobAssigned: - slog.InfoContext(l.ctx, "new job assigned", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName) + slog.InfoContext(l.ctx, "new job assigned", "job_id", job.JobID, "job_name", job.JobDisplayName) assignedJobs = append(assignedJobs, job) case params.MessageTypeJobStarted: - slog.InfoContext(l.ctx, "job started", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) + slog.InfoContext(l.ctx, "job started", "job_id", job.JobID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) startedJobs = append(startedJobs, job) case params.MessageTypeJobCompleted: - slog.InfoContext(l.ctx, "job completed", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) + slog.InfoContext(l.ctx, "job completed", "job_id", job.JobID, "job_name", job.JobDisplayName, "runner_name", job.RunnerName) completedJobs = append(completedJobs, job) case params.MessageTypeJobAvailable: - slog.InfoContext(l.ctx, "job available", "job_id", job.RunnerRequestID, "job_name", job.JobDisplayName) + slog.InfoContext(l.ctx, "job available", "job_id", job.JobID, "job_name", job.JobDisplayName) availableJobs = append(availableJobs, job) default: slog.DebugContext(l.ctx, "unknown message type", "message_type", job.MessageType) } } - if len(assignedJobs) > 0 { - if err := l.scaleSetHelper.HandleJobsAvailable(assignedJobs); err != nil { - slog.ErrorContext(l.ctx, "error handling available jobs", "error", err) - } - } - scaleSetClient, err := l.scaleSetHelper.GetScaleSetClient() if err != nil { slog.ErrorContext(l.ctx, "getting scale set client", "error", err) @@ -198,10 +192,9 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage slog.DebugContext(l.ctx, "acquired jobs", "job_ids", idsAcquired) } - if len(completedJobs) > 0 { - if err := l.scaleSetHelper.HandleJobsCompleted(completedJobs); err != nil { - slog.ErrorContext(l.ctx, "error handling completed jobs", "error", err) - return + if len(assignedJobs) > 0 { + if err := l.scaleSetHelper.HandleJobsAvailable(assignedJobs); err != nil { + slog.ErrorContext(l.ctx, "error handling available jobs", "error", err) } } @@ -212,6 +205,13 @@ func (l *scaleSetListener) handleSessionMessage(msg params.RunnerScaleSetMessage } } + if len(completedJobs) > 0 { + if err := l.scaleSetHelper.HandleJobsCompleted(completedJobs); err != nil { + slog.ErrorContext(l.ctx, "error handling completed jobs", "error", err) + return + } + } + if err := l.scaleSetHelper.SetLastMessageID(msg.MessageID); err != nil { slog.ErrorContext(l.ctx, "setting last message ID", "error", err) } else { From 5addd8d50afe6984d38b4ac76ba5644f7a957cae Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 20 Jul 2025 00:35:38 +0000 Subject: [PATCH 125/179] Update dependencies Update all dependencies. Signed-off-by: Gabriel Adrian Samfira --- go.mod | 28 +- go.sum | 59 +- .../golang-jwt/jwt/v5/MIGRATION_GUIDE.md | 2 +- vendor/github.com/golang-jwt/jwt/v5/ecdsa.go | 4 +- .../golang-jwt/jwt/v5/ecdsa_utils.go | 4 +- .../github.com/golang-jwt/jwt/v5/ed25519.go | 4 +- .../golang-jwt/jwt/v5/ed25519_utils.go | 4 +- vendor/github.com/golang-jwt/jwt/v5/hmac.go | 4 +- .../golang-jwt/jwt/v5/map_claims.go | 8 +- vendor/github.com/golang-jwt/jwt/v5/none.go | 4 +- .../golang-jwt/jwt/v5/parser_option.go | 25 +- vendor/github.com/golang-jwt/jwt/v5/rsa.go | 4 +- .../github.com/golang-jwt/jwt/v5/rsa_pss.go | 4 +- .../github.com/golang-jwt/jwt/v5/rsa_utils.go | 6 +- .../golang-jwt/jwt/v5/signing_method.go | 6 +- vendor/github.com/golang-jwt/jwt/v5/token.go | 20 +- vendor/github.com/golang-jwt/jwt/v5/types.go | 4 +- .../github.com/golang-jwt/jwt/v5/validator.go | 50 +- .../prometheus/common/model/time.go | 25 +- .../prometheus/procfs/Makefile.common | 15 +- vendor/github.com/prometheus/procfs/mdstat.go | 5 +- .../github.com/prometheus/procfs/meminfo.go | 33 + .../github.com/prometheus/procfs/proc_stat.go | 12 +- .../prometheus/procfs/proc_statm.go | 116 + vendor/github.com/spf13/pflag/README.md | 27 + vendor/github.com/spf13/pflag/bool_func.go | 40 + vendor/github.com/spf13/pflag/count.go | 2 +- vendor/github.com/spf13/pflag/errors.go | 149 + vendor/github.com/spf13/pflag/flag.go | 85 +- vendor/github.com/spf13/pflag/func.go | 37 + vendor/github.com/spf13/pflag/golangflag.go | 22 + vendor/github.com/spf13/pflag/ipnet_slice.go | 2 +- vendor/github.com/spf13/pflag/text.go | 81 + vendor/github.com/spf13/pflag/time.go | 118 + .../go.opentelemetry.io/otel/.clomonitor.yml | 3 + vendor/go.opentelemetry.io/otel/.golangci.yml | 6 +- vendor/go.opentelemetry.io/otel/CHANGELOG.md | 60 +- .../go.opentelemetry.io/otel/CONTRIBUTING.md | 17 +- vendor/go.opentelemetry.io/otel/Makefile | 2 +- vendor/go.opentelemetry.io/otel/README.md | 1 + vendor/go.opentelemetry.io/otel/RELEASING.md | 23 + .../otel/dependencies.Dockerfile | 4 +- .../otel/semconv/v1.34.0/MIGRATION.md | 4 + .../otel/semconv/v1.34.0/README.md | 3 + .../otel/semconv/v1.34.0/attribute_group.go | 13851 ++++++++++++++++ .../otel/semconv/v1.34.0/doc.go | 9 + .../otel/semconv/v1.34.0/exception.go | 9 + .../otel/semconv/v1.34.0/schema.go | 9 + vendor/go.opentelemetry.io/otel/trace/auto.go | 2 +- vendor/go.opentelemetry.io/otel/version.go | 2 +- vendor/go.opentelemetry.io/otel/versions.yaml | 11 +- vendor/golang.org/x/sync/errgroup/errgroup.go | 118 +- vendor/golang.org/x/sys/unix/zerrors_linux.go | 25 +- .../x/sys/unix/zerrors_linux_386.go | 1 + .../x/sys/unix/zerrors_linux_amd64.go | 1 + .../x/sys/unix/zerrors_linux_arm.go | 1 + .../x/sys/unix/zerrors_linux_arm64.go | 1 + .../x/sys/unix/zerrors_linux_loong64.go | 1 + .../x/sys/unix/zerrors_linux_mips.go | 1 + .../x/sys/unix/zerrors_linux_mips64.go | 1 + .../x/sys/unix/zerrors_linux_mips64le.go | 1 + .../x/sys/unix/zerrors_linux_mipsle.go | 1 + .../x/sys/unix/zerrors_linux_ppc.go | 1 + .../x/sys/unix/zerrors_linux_ppc64.go | 1 + .../x/sys/unix/zerrors_linux_ppc64le.go | 1 + .../x/sys/unix/zerrors_linux_riscv64.go | 1 + .../x/sys/unix/zerrors_linux_s390x.go | 1 + .../x/sys/unix/zerrors_linux_sparc64.go | 1 + vendor/golang.org/x/sys/unix/ztypes_linux.go | 108 +- .../golang.org/x/sys/unix/ztypes_linux_386.go | 16 + .../x/sys/unix/ztypes_linux_amd64.go | 16 + .../golang.org/x/sys/unix/ztypes_linux_arm.go | 16 + .../x/sys/unix/ztypes_linux_arm64.go | 16 + .../x/sys/unix/ztypes_linux_loong64.go | 16 + .../x/sys/unix/ztypes_linux_mips.go | 16 + .../x/sys/unix/ztypes_linux_mips64.go | 16 + .../x/sys/unix/ztypes_linux_mips64le.go | 16 + .../x/sys/unix/ztypes_linux_mipsle.go | 16 + .../golang.org/x/sys/unix/ztypes_linux_ppc.go | 16 + .../x/sys/unix/ztypes_linux_ppc64.go | 16 + .../x/sys/unix/ztypes_linux_ppc64le.go | 16 + .../x/sys/unix/ztypes_linux_riscv64.go | 16 + .../x/sys/unix/ztypes_linux_s390x.go | 16 + .../x/sys/unix/ztypes_linux_sparc64.go | 16 + vendor/gorm.io/datatypes/json.go | 24 +- vendor/modules.txt | 29 +- 86 files changed, 15288 insertions(+), 296 deletions(-) create mode 100644 vendor/github.com/prometheus/procfs/proc_statm.go create mode 100644 vendor/github.com/spf13/pflag/bool_func.go create mode 100644 vendor/github.com/spf13/pflag/errors.go create mode 100644 vendor/github.com/spf13/pflag/func.go create mode 100644 vendor/github.com/spf13/pflag/text.go create mode 100644 vendor/github.com/spf13/pflag/time.go create mode 100644 vendor/go.opentelemetry.io/otel/.clomonitor.yml create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.34.0/MIGRATION.md create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.34.0/README.md create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.34.0/attribute_group.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.34.0/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.34.0/exception.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.34.0/schema.go diff --git a/go.mod b/go.mod index b572643f..b60697c7 100644 --- a/go.mod +++ b/go.mod @@ -7,13 +7,13 @@ toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 - github.com/cloudbase/garm-provider-common v0.1.5 + github.com/cloudbase/garm-provider-common v0.1.6 github.com/felixge/httpsnoop v1.0.4 github.com/go-openapi/errors v0.22.1 github.com/go-openapi/runtime v0.28.0 github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 - github.com/golang-jwt/jwt/v5 v5.2.2 + github.com/golang-jwt/jwt/v5 v5.2.3 github.com/google/go-github/v72 v72.0.0 github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.2 @@ -28,12 +28,12 @@ require ( github.com/prometheus/client_golang v1.22.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 - golang.org/x/crypto v0.39.0 + golang.org/x/crypto v0.40.0 golang.org/x/oauth2 v0.30.0 - golang.org/x/sync v0.15.0 + golang.org/x/sync v0.16.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 - gorm.io/datatypes v1.2.5 + gorm.io/datatypes v1.2.6 gorm.io/driver/mysql v1.6.0 gorm.io/driver/sqlite v1.6.0 gorm.io/gorm v1.30.0 @@ -75,20 +75,20 @@ require ( github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect - github.com/prometheus/common v0.64.0 // indirect - github.com/prometheus/procfs v0.16.1 // indirect + github.com/prometheus/common v0.65.0 // indirect + github.com/prometheus/procfs v0.17.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect - github.com/spf13/pflag v1.0.6 // indirect + github.com/spf13/pflag v1.0.7 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 // indirect go.mongodb.org/mongo-driver v1.17.4 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/otel v1.36.0 // indirect - go.opentelemetry.io/otel/metric v1.36.0 // indirect - go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect + golang.org/x/net v0.42.0 // indirect + golang.org/x/sys v0.34.0 // indirect + golang.org/x/text v0.27.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index e557709f..aa948fc0 100644 --- a/go.sum +++ b/go.sum @@ -19,8 +19,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= -github.com/cloudbase/garm-provider-common v0.1.5 h1:aJL646l+VnZceQ2grbDYhWfxYpaQR2/QsUSD76kSZVs= -github.com/cloudbase/garm-provider-common v0.1.5/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= +github.com/cloudbase/garm-provider-common v0.1.6 h1:wLqolRkUD2Z4rzuBLDs2exL1Aq+eJ5RBVnRvk5JP6fs= +github.com/cloudbase/garm-provider-common v0.1.6/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -56,8 +56,8 @@ github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1 github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= -github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0= +github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= @@ -152,10 +152,10 @@ github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/ github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= -github.com/prometheus/common v0.64.0 h1:pdZeA+g617P7oGv1CzdTzyeShxAGrTBsolKNOLQPGO4= -github.com/prometheus/common v0.64.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= -github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= -github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0= +github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= @@ -164,8 +164,9 @@ github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWN github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= -github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M= +github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= @@ -179,29 +180,29 @@ go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFX go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= -go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= -go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= -go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= -go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= -go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= @@ -217,15 +218,15 @@ gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gorm.io/datatypes v1.2.5 h1:9UogU3jkydFVW1bIVVeoYsTpLRgwDVW3rHfJG6/Ek9I= -gorm.io/datatypes v1.2.5/go.mod h1:I5FUdlKpLb5PMqeMQhm30CQ6jXP8Rj89xkTeCSAaAD4= +gorm.io/datatypes v1.2.6 h1:KafLdXvFUhzNeL2ncm03Gl3eTLONQfNKZ+wJ+9Y4Nck= +gorm.io/datatypes v1.2.6/go.mod h1:M2iO+6S3hhi4nAyYe444Pcb0dcIiOMJ7QHaUXxyiNZY= gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= gorm.io/driver/postgres v1.5.0 h1:u2FXTy14l45qc3UeCJ7QaAXZmZfDDv0YrthvmRq1l0U= gorm.io/driver/postgres v1.5.0/go.mod h1:FUZXzO+5Uqg5zzwzv4KK49R8lvGIyscBOqYrtI1Ce9A= gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= -gorm.io/driver/sqlserver v1.5.4 h1:xA+Y1KDNspv79q43bPyjDMUgHoYHLhXYmdFcYPobg8g= -gorm.io/driver/sqlserver v1.5.4/go.mod h1:+frZ/qYmuna11zHPlh5oc2O6ZA/lS88Keb0XSH1Zh/g= +gorm.io/driver/sqlserver v1.6.0 h1:VZOBQVsVhkHU/NzNhRJKoANt5pZGQAS1Bwc6m6dgfnc= +gorm.io/driver/sqlserver v1.6.0/go.mod h1:WQzt4IJo/WHKnckU9jXBLMJIVNMVeTu25dnOzehntWw= gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md b/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md index ff9c57e1..b3178e75 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md +++ b/vendor/github.com/golang-jwt/jwt/v5/MIGRATION_GUIDE.md @@ -155,7 +155,7 @@ stored in base64 encoded form, which was redundant with the information in the type Token struct { Raw string // Raw contains the raw token Method SigningMethod // Method is the signing method used or to be used - Header map[string]interface{} // Header is the first segment of the token in decoded form + Header map[string]any // Header is the first segment of the token in decoded form Claims Claims // Claims is the second segment of the token in decoded form Signature []byte // Signature is the third segment of the token in decoded form Valid bool // Valid specifies if the token is valid diff --git a/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go b/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go index c929e4a0..06cd94d2 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ecdsa.go @@ -55,7 +55,7 @@ func (m *SigningMethodECDSA) Alg() string { // Verify implements token verification for the SigningMethod. // For this verify method, key must be an ecdsa.PublicKey struct -func (m *SigningMethodECDSA) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodECDSA) Verify(signingString string, sig []byte, key any) error { // Get the key var ecdsaKey *ecdsa.PublicKey switch k := key.(type) { @@ -89,7 +89,7 @@ func (m *SigningMethodECDSA) Verify(signingString string, sig []byte, key interf // Sign implements token signing for the SigningMethod. // For this signing method, key must be an ecdsa.PrivateKey struct -func (m *SigningMethodECDSA) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodECDSA) Sign(signingString string, key any) ([]byte, error) { // Get the key var ecdsaKey *ecdsa.PrivateKey switch k := key.(type) { diff --git a/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go b/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go index 5700636d..44a3b7a1 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ecdsa_utils.go @@ -23,7 +23,7 @@ func ParseECPrivateKeyFromPEM(key []byte) (*ecdsa.PrivateKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParseECPrivateKey(block.Bytes); err != nil { if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { return nil, err @@ -50,7 +50,7 @@ func ParseECPublicKeyFromPEM(key []byte) (*ecdsa.PublicKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { if cert, err := x509.ParseCertificate(block.Bytes); err == nil { parsedKey = cert.PublicKey diff --git a/vendor/github.com/golang-jwt/jwt/v5/ed25519.go b/vendor/github.com/golang-jwt/jwt/v5/ed25519.go index c2138119..4159e57b 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ed25519.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ed25519.go @@ -33,7 +33,7 @@ func (m *SigningMethodEd25519) Alg() string { // Verify implements token verification for the SigningMethod. // For this verify method, key must be an ed25519.PublicKey -func (m *SigningMethodEd25519) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodEd25519) Verify(signingString string, sig []byte, key any) error { var ed25519Key ed25519.PublicKey var ok bool @@ -55,7 +55,7 @@ func (m *SigningMethodEd25519) Verify(signingString string, sig []byte, key inte // Sign implements token signing for the SigningMethod. // For this signing method, key must be an ed25519.PrivateKey -func (m *SigningMethodEd25519) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodEd25519) Sign(signingString string, key any) ([]byte, error) { var ed25519Key crypto.Signer var ok bool diff --git a/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go b/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go index cdb5e68e..6f46e886 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go +++ b/vendor/github.com/golang-jwt/jwt/v5/ed25519_utils.go @@ -24,7 +24,7 @@ func ParseEdPrivateKeyFromPEM(key []byte) (crypto.PrivateKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { return nil, err } @@ -49,7 +49,7 @@ func ParseEdPublicKeyFromPEM(key []byte) (crypto.PublicKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { return nil, err } diff --git a/vendor/github.com/golang-jwt/jwt/v5/hmac.go b/vendor/github.com/golang-jwt/jwt/v5/hmac.go index aca600ce..1bef138c 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/hmac.go +++ b/vendor/github.com/golang-jwt/jwt/v5/hmac.go @@ -55,7 +55,7 @@ func (m *SigningMethodHMAC) Alg() string { // about this, and why we intentionally are not supporting string as a key can // be found on our usage guide // https://golang-jwt.github.io/jwt/usage/signing_methods/#signing-methods-and-key-types. -func (m *SigningMethodHMAC) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodHMAC) Verify(signingString string, sig []byte, key any) error { // Verify the key is the right type keyBytes, ok := key.([]byte) if !ok { @@ -88,7 +88,7 @@ func (m *SigningMethodHMAC) Verify(signingString string, sig []byte, key interfa // cryptographically random source, e.g. crypto/rand. Additional information // about this, and why we intentionally are not supporting string as a key can // be found on our usage guide https://golang-jwt.github.io/jwt/usage/signing_methods/. -func (m *SigningMethodHMAC) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodHMAC) Sign(signingString string, key any) ([]byte, error) { if keyBytes, ok := key.([]byte); ok { if !m.Hash.Available() { return nil, ErrHashUnavailable diff --git a/vendor/github.com/golang-jwt/jwt/v5/map_claims.go b/vendor/github.com/golang-jwt/jwt/v5/map_claims.go index b2b51a1f..3b920527 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/map_claims.go +++ b/vendor/github.com/golang-jwt/jwt/v5/map_claims.go @@ -5,9 +5,9 @@ import ( "fmt" ) -// MapClaims is a claims type that uses the map[string]interface{} for JSON +// MapClaims is a claims type that uses the map[string]any for JSON // decoding. This is the default claims type if you don't supply one -type MapClaims map[string]interface{} +type MapClaims map[string]any // GetExpirationTime implements the Claims interface. func (m MapClaims) GetExpirationTime() (*NumericDate, error) { @@ -73,7 +73,7 @@ func (m MapClaims) parseClaimsString(key string) (ClaimStrings, error) { cs = append(cs, v) case []string: cs = v - case []interface{}: + case []any: for _, a := range v { vs, ok := a.(string) if !ok { @@ -92,7 +92,7 @@ func (m MapClaims) parseClaimsString(key string) (ClaimStrings, error) { func (m MapClaims) parseString(key string) (string, error) { var ( ok bool - raw interface{} + raw any iss string ) raw, ok = m[key] diff --git a/vendor/github.com/golang-jwt/jwt/v5/none.go b/vendor/github.com/golang-jwt/jwt/v5/none.go index 685c2ea3..624ad55e 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/none.go +++ b/vendor/github.com/golang-jwt/jwt/v5/none.go @@ -25,7 +25,7 @@ func (m *signingMethodNone) Alg() string { } // Only allow 'none' alg type if UnsafeAllowNoneSignatureType is specified as the key -func (m *signingMethodNone) Verify(signingString string, sig []byte, key interface{}) (err error) { +func (m *signingMethodNone) Verify(signingString string, sig []byte, key any) (err error) { // Key must be UnsafeAllowNoneSignatureType to prevent accidentally // accepting 'none' signing method if _, ok := key.(unsafeNoneMagicConstant); !ok { @@ -41,7 +41,7 @@ func (m *signingMethodNone) Verify(signingString string, sig []byte, key interfa } // Only allow 'none' signing if UnsafeAllowNoneSignatureType is specified as the key -func (m *signingMethodNone) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *signingMethodNone) Sign(signingString string, key any) ([]byte, error) { if _, ok := key.(unsafeNoneMagicConstant); ok { return []byte{}, nil } diff --git a/vendor/github.com/golang-jwt/jwt/v5/parser_option.go b/vendor/github.com/golang-jwt/jwt/v5/parser_option.go index 88a780fb..43157355 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/parser_option.go +++ b/vendor/github.com/golang-jwt/jwt/v5/parser_option.go @@ -66,20 +66,37 @@ func WithExpirationRequired() ParserOption { } } -// WithAudience configures the validator to require the specified audience in -// the `aud` claim. Validation will fail if the audience is not listed in the -// token or the `aud` claim is missing. +// WithAudience configures the validator to require any of the specified +// audiences in the `aud` claim. Validation will fail if the audience is not +// listed in the token or the `aud` claim is missing. // // NOTE: While the `aud` claim is OPTIONAL in a JWT, the handling of it is // application-specific. Since this validation API is helping developers in // writing secure application, we decided to REQUIRE the existence of the claim, // if an audience is expected. -func WithAudience(aud string) ParserOption { +func WithAudience(aud ...string) ParserOption { return func(p *Parser) { p.validator.expectedAud = aud } } +// WithAllAudiences configures the validator to require all the specified +// audiences in the `aud` claim. Validation will fail if the specified audiences +// are not listed in the token or the `aud` claim is missing. Duplicates within +// the list are de-duplicated since internally, we use a map to look up the +// audiences. +// +// NOTE: While the `aud` claim is OPTIONAL in a JWT, the handling of it is +// application-specific. Since this validation API is helping developers in +// writing secure application, we decided to REQUIRE the existence of the claim, +// if an audience is expected. +func WithAllAudiences(aud ...string) ParserOption { + return func(p *Parser) { + p.validator.expectedAud = aud + p.validator.expectAllAud = true + } +} + // WithIssuer configures the validator to require the specified issuer in the // `iss` claim. Validation will fail if a different issuer is specified in the // token or the `iss` claim is missing. diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa.go b/vendor/github.com/golang-jwt/jwt/v5/rsa.go index 83cbee6a..98b960a7 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa.go @@ -46,7 +46,7 @@ func (m *SigningMethodRSA) Alg() string { // Verify implements token verification for the SigningMethod // For this signing method, must be an *rsa.PublicKey structure. -func (m *SigningMethodRSA) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodRSA) Verify(signingString string, sig []byte, key any) error { var rsaKey *rsa.PublicKey var ok bool @@ -67,7 +67,7 @@ func (m *SigningMethodRSA) Verify(signingString string, sig []byte, key interfac // Sign implements token signing for the SigningMethod // For this signing method, must be an *rsa.PrivateKey structure. -func (m *SigningMethodRSA) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodRSA) Sign(signingString string, key any) ([]byte, error) { var rsaKey *rsa.PrivateKey var ok bool diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go index 28c386ec..7c216ae0 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go @@ -82,7 +82,7 @@ func init() { // Verify implements token verification for the SigningMethod. // For this verify method, key must be an rsa.PublicKey struct -func (m *SigningMethodRSAPSS) Verify(signingString string, sig []byte, key interface{}) error { +func (m *SigningMethodRSAPSS) Verify(signingString string, sig []byte, key any) error { var rsaKey *rsa.PublicKey switch k := key.(type) { case *rsa.PublicKey: @@ -108,7 +108,7 @@ func (m *SigningMethodRSAPSS) Verify(signingString string, sig []byte, key inter // Sign implements token signing for the SigningMethod. // For this signing method, key must be an rsa.PrivateKey struct -func (m *SigningMethodRSAPSS) Sign(signingString string, key interface{}) ([]byte, error) { +func (m *SigningMethodRSAPSS) Sign(signingString string, key any) ([]byte, error) { var rsaKey *rsa.PrivateKey switch k := key.(type) { diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go b/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go index b3aeebbe..f22c3d06 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa_utils.go @@ -23,7 +23,7 @@ func ParseRSAPrivateKeyFromPEM(key []byte) (*rsa.PrivateKey, error) { return nil, ErrKeyMustBePEMEncoded } - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKCS1PrivateKey(block.Bytes); err != nil { if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { return nil, err @@ -53,7 +53,7 @@ func ParseRSAPrivateKeyFromPEMWithPassword(key []byte, password string) (*rsa.Pr return nil, ErrKeyMustBePEMEncoded } - var parsedKey interface{} + var parsedKey any var blockDecrypted []byte if blockDecrypted, err = x509.DecryptPEMBlock(block, []byte(password)); err != nil { @@ -86,7 +86,7 @@ func ParseRSAPublicKeyFromPEM(key []byte) (*rsa.PublicKey, error) { } // Parse the key - var parsedKey interface{} + var parsedKey any if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { if cert, err := x509.ParseCertificate(block.Bytes); err == nil { parsedKey = cert.PublicKey diff --git a/vendor/github.com/golang-jwt/jwt/v5/signing_method.go b/vendor/github.com/golang-jwt/jwt/v5/signing_method.go index 0d73631c..096d0ed4 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/signing_method.go +++ b/vendor/github.com/golang-jwt/jwt/v5/signing_method.go @@ -12,9 +12,9 @@ var signingMethodLock = new(sync.RWMutex) // signature in Sign. The signature is then usually base64 encoded as part of a // JWT. type SigningMethod interface { - Verify(signingString string, sig []byte, key interface{}) error // Returns nil if signature is valid - Sign(signingString string, key interface{}) ([]byte, error) // Returns signature or error - Alg() string // returns the alg identifier for this method (example: 'HS256') + Verify(signingString string, sig []byte, key any) error // Returns nil if signature is valid + Sign(signingString string, key any) ([]byte, error) // Returns signature or error + Alg() string // returns the alg identifier for this method (example: 'HS256') } // RegisterSigningMethod registers the "alg" name and a factory function for signing method. diff --git a/vendor/github.com/golang-jwt/jwt/v5/token.go b/vendor/github.com/golang-jwt/jwt/v5/token.go index 9c7f4ab0..3f715588 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/token.go +++ b/vendor/github.com/golang-jwt/jwt/v5/token.go @@ -11,9 +11,9 @@ import ( // Token. This allows you to use properties in the Header of the token (such as // `kid`) to identify which key to use. // -// The returned interface{} may be a single key or a VerificationKeySet containing +// The returned any may be a single key or a VerificationKeySet containing // multiple keys. -type Keyfunc func(*Token) (interface{}, error) +type Keyfunc func(*Token) (any, error) // VerificationKey represents a public or secret key for verifying a token's signature. type VerificationKey interface { @@ -28,12 +28,12 @@ type VerificationKeySet struct { // Token represents a JWT Token. Different fields will be used depending on // whether you're creating or parsing/verifying a token. type Token struct { - Raw string // Raw contains the raw token. Populated when you [Parse] a token - Method SigningMethod // Method is the signing method used or to be used - Header map[string]interface{} // Header is the first segment of the token in decoded form - Claims Claims // Claims is the second segment of the token in decoded form - Signature []byte // Signature is the third segment of the token in decoded form. Populated when you Parse a token - Valid bool // Valid specifies if the token is valid. Populated when you Parse/Verify a token + Raw string // Raw contains the raw token. Populated when you [Parse] a token + Method SigningMethod // Method is the signing method used or to be used + Header map[string]any // Header is the first segment of the token in decoded form + Claims Claims // Claims is the second segment of the token in decoded form + Signature []byte // Signature is the third segment of the token in decoded form. Populated when you Parse a token + Valid bool // Valid specifies if the token is valid. Populated when you Parse/Verify a token } // New creates a new [Token] with the specified signing method and an empty map @@ -46,7 +46,7 @@ func New(method SigningMethod, opts ...TokenOption) *Token { // claims. Additional options can be specified, but are currently unused. func NewWithClaims(method SigningMethod, claims Claims, opts ...TokenOption) *Token { return &Token{ - Header: map[string]interface{}{ + Header: map[string]any{ "typ": "JWT", "alg": method.Alg(), }, @@ -60,7 +60,7 @@ func NewWithClaims(method SigningMethod, claims Claims, opts ...TokenOption) *To // https://golang-jwt.github.io/jwt/usage/signing_methods/#signing-methods-and-key-types // for an overview of the different signing methods and their respective key // types. -func (t *Token) SignedString(key interface{}) (string, error) { +func (t *Token) SignedString(key any) (string, error) { sstr, err := t.SigningString() if err != nil { return "", err diff --git a/vendor/github.com/golang-jwt/jwt/v5/types.go b/vendor/github.com/golang-jwt/jwt/v5/types.go index b2655a9e..a3e0ef12 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/types.go +++ b/vendor/github.com/golang-jwt/jwt/v5/types.go @@ -103,7 +103,7 @@ func (date *NumericDate) UnmarshalJSON(b []byte) (err error) { type ClaimStrings []string func (s *ClaimStrings) UnmarshalJSON(data []byte) (err error) { - var value interface{} + var value any if err = json.Unmarshal(data, &value); err != nil { return err @@ -116,7 +116,7 @@ func (s *ClaimStrings) UnmarshalJSON(data []byte) (err error) { aud = append(aud, v) case []string: aud = ClaimStrings(v) - case []interface{}: + case []any: for _, vv := range v { vs, ok := vv.(string) if !ok { diff --git a/vendor/github.com/golang-jwt/jwt/v5/validator.go b/vendor/github.com/golang-jwt/jwt/v5/validator.go index 008ecd87..92b5c057 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/validator.go +++ b/vendor/github.com/golang-jwt/jwt/v5/validator.go @@ -1,8 +1,8 @@ package jwt import ( - "crypto/subtle" "fmt" + "slices" "time" ) @@ -52,8 +52,12 @@ type Validator struct { verifyIat bool // expectedAud contains the audience this token expects. Supplying an empty - // string will disable aud checking. - expectedAud string + // slice will disable aud checking. + expectedAud []string + + // expectAllAud specifies whether all expected audiences must be present in + // the token. If false, only one of the expected audiences must be present. + expectAllAud bool // expectedIss contains the issuer this token expects. Supplying an empty // string will disable iss checking. @@ -88,7 +92,7 @@ func NewValidator(opts ...ParserOption) *Validator { func (v *Validator) Validate(claims Claims) error { var ( now time.Time - errs []error = make([]error, 0, 6) + errs = make([]error, 0, 6) err error ) @@ -120,8 +124,8 @@ func (v *Validator) Validate(claims Claims) error { } // If we have an expected audience, we also require the audience claim - if v.expectedAud != "" { - if err = v.verifyAudience(claims, v.expectedAud, true); err != nil { + if len(v.expectedAud) > 0 { + if err = v.verifyAudience(claims, v.expectedAud, v.expectAllAud); err != nil { errs = append(errs, err) } } @@ -226,33 +230,39 @@ func (v *Validator) verifyNotBefore(claims Claims, cmp time.Time, required bool) // // Additionally, if any error occurs while retrieving the claim, e.g., when its // the wrong type, an ErrTokenUnverifiable error will be returned. -func (v *Validator) verifyAudience(claims Claims, cmp string, required bool) error { +func (v *Validator) verifyAudience(claims Claims, cmp []string, expectAllAud bool) error { aud, err := claims.GetAudience() if err != nil { return err } - if len(aud) == 0 { + // Check that aud exists and is not empty. We only require the aud claim + // if we expect at least one audience to be present. + if len(aud) == 0 || len(aud) == 1 && aud[0] == "" { + required := len(v.expectedAud) > 0 return errorIfRequired(required, "aud") } - // use a var here to keep constant time compare when looping over a number of claims - result := false - - var stringClaims string - for _, a := range aud { - if subtle.ConstantTimeCompare([]byte(a), []byte(cmp)) != 0 { - result = true + if !expectAllAud { + for _, a := range aud { + // If we only expect one match, we can stop early if we find a match + if slices.Contains(cmp, a) { + return nil + } } - stringClaims = stringClaims + a + + return ErrTokenInvalidAudience } - // case where "" is sent in one or many aud claims - if stringClaims == "" { - return errorIfRequired(required, "aud") + // Note that we are looping cmp here to ensure that all expected audiences + // are present in the aud claim. + for _, a := range cmp { + if !slices.Contains(aud, a) { + return ErrTokenInvalidAudience + } } - return errorIfFalse(result, ErrTokenInvalidAudience) + return nil } // verifyIssuer compares the iss claim in claims against cmp. diff --git a/vendor/github.com/prometheus/common/model/time.go b/vendor/github.com/prometheus/common/model/time.go index 5727452c..fed9e87b 100644 --- a/vendor/github.com/prometheus/common/model/time.go +++ b/vendor/github.com/prometheus/common/model/time.go @@ -201,6 +201,7 @@ var unitMap = map[string]struct { // ParseDuration parses a string into a time.Duration, assuming that a year // always has 365d, a week always has 7d, and a day always has 24h. +// Negative durations are not supported. func ParseDuration(s string) (Duration, error) { switch s { case "0": @@ -253,18 +254,36 @@ func ParseDuration(s string) (Duration, error) { return 0, errors.New("duration out of range") } } + return Duration(dur), nil } +// ParseDurationAllowNegative is like ParseDuration but also accepts negative durations. +func ParseDurationAllowNegative(s string) (Duration, error) { + if s == "" || s[0] != '-' { + return ParseDuration(s) + } + + d, err := ParseDuration(s[1:]) + + return -d, err +} + func (d Duration) String() string { var ( - ms = int64(time.Duration(d) / time.Millisecond) - r = "" + ms = int64(time.Duration(d) / time.Millisecond) + r = "" + sign = "" ) + if ms == 0 { return "0s" } + if ms < 0 { + sign, ms = "-", -ms + } + f := func(unit string, mult int64, exact bool) { if exact && ms%mult != 0 { return @@ -286,7 +305,7 @@ func (d Duration) String() string { f("s", 1000, false) f("ms", 1, false) - return r + return sign + r } // MarshalJSON implements the json.Marshaler interface. diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common index 0ed55c2b..4de21512 100644 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ b/vendor/github.com/prometheus/procfs/Makefile.common @@ -33,7 +33,7 @@ GOHOSTOS ?= $(shell $(GO) env GOHOSTOS) GOHOSTARCH ?= $(shell $(GO) env GOHOSTARCH) GO_VERSION ?= $(shell $(GO) version) -GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION))Error Parsing File +GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION)) PRE_GO_111 ?= $(shell echo $(GO_VERSION_NUMBER) | grep -E 'go1\.(10|[0-9])\.') PROMU := $(FIRST_GOPATH)/bin/promu @@ -61,7 +61,8 @@ PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_ SKIP_GOLANGCI_LINT := GOLANGCI_LINT := GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v2.0.2 +GOLANGCI_LINT_VERSION ?= v2.1.5 +GOLANGCI_FMT_OPTS ?= # golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64. # windows isn't included here because of the path separator being different. ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) @@ -156,9 +157,13 @@ $(GOTEST_DIR): @mkdir -p $@ .PHONY: common-format -common-format: +common-format: $(GOLANGCI_LINT) @echo ">> formatting code" $(GO) fmt $(pkgs) +ifdef GOLANGCI_LINT + @echo ">> formatting code with golangci-lint" + $(GOLANGCI_LINT) fmt $(GOLANGCI_FMT_OPTS) +endif .PHONY: common-vet common-vet: @@ -248,8 +253,8 @@ $(PROMU): cp $(PROMU_TMP)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM)/promu $(FIRST_GOPATH)/bin/promu rm -r $(PROMU_TMP) -.PHONY: proto -proto: +.PHONY: common-proto +common-proto: @echo ">> generating code from proto files" @./scripts/genproto.sh diff --git a/vendor/github.com/prometheus/procfs/mdstat.go b/vendor/github.com/prometheus/procfs/mdstat.go index 67a9d2b4..1fd4381b 100644 --- a/vendor/github.com/prometheus/procfs/mdstat.go +++ b/vendor/github.com/prometheus/procfs/mdstat.go @@ -123,13 +123,16 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) { finish := float64(0) pct := float64(0) recovering := strings.Contains(lines[syncLineIdx], "recovery") + reshaping := strings.Contains(lines[syncLineIdx], "reshape") resyncing := strings.Contains(lines[syncLineIdx], "resync") checking := strings.Contains(lines[syncLineIdx], "check") // Append recovery and resyncing state info. - if recovering || resyncing || checking { + if recovering || resyncing || checking || reshaping { if recovering { state = "recovering" + } else if reshaping { + state = "reshaping" } else if checking { state = "checking" } else { diff --git a/vendor/github.com/prometheus/procfs/meminfo.go b/vendor/github.com/prometheus/procfs/meminfo.go index 4b2c4050..937e1f96 100644 --- a/vendor/github.com/prometheus/procfs/meminfo.go +++ b/vendor/github.com/prometheus/procfs/meminfo.go @@ -66,6 +66,10 @@ type Meminfo struct { // Memory which has been evicted from RAM, and is temporarily // on the disk SwapFree *uint64 + // Memory consumed by the zswap backend (compressed size) + Zswap *uint64 + // Amount of anonymous memory stored in zswap (original size) + Zswapped *uint64 // Memory which is waiting to get written back to the disk Dirty *uint64 // Memory which is actively being written back to the disk @@ -85,6 +89,8 @@ type Meminfo struct { // amount of memory dedicated to the lowest level of page // tables. PageTables *uint64 + // secondary page tables. + SecPageTables *uint64 // NFS pages sent to the server, but not yet committed to // stable storage NFSUnstable *uint64 @@ -129,15 +135,18 @@ type Meminfo struct { Percpu *uint64 HardwareCorrupted *uint64 AnonHugePages *uint64 + FileHugePages *uint64 ShmemHugePages *uint64 ShmemPmdMapped *uint64 CmaTotal *uint64 CmaFree *uint64 + Unaccepted *uint64 HugePagesTotal *uint64 HugePagesFree *uint64 HugePagesRsvd *uint64 HugePagesSurp *uint64 Hugepagesize *uint64 + Hugetlb *uint64 DirectMap4k *uint64 DirectMap2M *uint64 DirectMap1G *uint64 @@ -161,6 +170,8 @@ type Meminfo struct { MlockedBytes *uint64 SwapTotalBytes *uint64 SwapFreeBytes *uint64 + ZswapBytes *uint64 + ZswappedBytes *uint64 DirtyBytes *uint64 WritebackBytes *uint64 AnonPagesBytes *uint64 @@ -171,6 +182,7 @@ type Meminfo struct { SUnreclaimBytes *uint64 KernelStackBytes *uint64 PageTablesBytes *uint64 + SecPageTablesBytes *uint64 NFSUnstableBytes *uint64 BounceBytes *uint64 WritebackTmpBytes *uint64 @@ -182,11 +194,14 @@ type Meminfo struct { PercpuBytes *uint64 HardwareCorruptedBytes *uint64 AnonHugePagesBytes *uint64 + FileHugePagesBytes *uint64 ShmemHugePagesBytes *uint64 ShmemPmdMappedBytes *uint64 CmaTotalBytes *uint64 CmaFreeBytes *uint64 + UnacceptedBytes *uint64 HugepagesizeBytes *uint64 + HugetlbBytes *uint64 DirectMap4kBytes *uint64 DirectMap2MBytes *uint64 DirectMap1GBytes *uint64 @@ -287,6 +302,12 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "SwapFree:": m.SwapFree = &val m.SwapFreeBytes = &valBytes + case "Zswap:": + m.Zswap = &val + m.ZswapBytes = &valBytes + case "Zswapped:": + m.Zswapped = &val + m.ZswapBytes = &valBytes case "Dirty:": m.Dirty = &val m.DirtyBytes = &valBytes @@ -317,6 +338,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "PageTables:": m.PageTables = &val m.PageTablesBytes = &valBytes + case "SecPageTables:": + m.SecPageTables = &val + m.SecPageTablesBytes = &valBytes case "NFS_Unstable:": m.NFSUnstable = &val m.NFSUnstableBytes = &valBytes @@ -350,6 +374,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "AnonHugePages:": m.AnonHugePages = &val m.AnonHugePagesBytes = &valBytes + case "FileHugePages:": + m.FileHugePages = &val + m.FileHugePagesBytes = &valBytes case "ShmemHugePages:": m.ShmemHugePages = &val m.ShmemHugePagesBytes = &valBytes @@ -362,6 +389,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "CmaFree:": m.CmaFree = &val m.CmaFreeBytes = &valBytes + case "Unaccepted:": + m.Unaccepted = &val + m.UnacceptedBytes = &valBytes case "HugePages_Total:": m.HugePagesTotal = &val case "HugePages_Free:": @@ -373,6 +403,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) { case "Hugepagesize:": m.Hugepagesize = &val m.HugepagesizeBytes = &valBytes + case "Hugetlb:": + m.Hugetlb = &val + m.HugetlbBytes = &valBytes case "DirectMap4k:": m.DirectMap4k = &val m.DirectMap4kBytes = &valBytes diff --git a/vendor/github.com/prometheus/procfs/proc_stat.go b/vendor/github.com/prometheus/procfs/proc_stat.go index 06a8d931..3328556b 100644 --- a/vendor/github.com/prometheus/procfs/proc_stat.go +++ b/vendor/github.com/prometheus/procfs/proc_stat.go @@ -101,6 +101,12 @@ type ProcStat struct { RSS int // Soft limit in bytes on the rss of the process. RSSLimit uint64 + // The address above which program text can run. + StartCode uint64 + // The address below which program text can run. + EndCode uint64 + // The address of the start (i.e., bottom) of the stack. + StartStack uint64 // CPU number last executed on. Processor uint // Real-time scheduling priority, a number in the range 1 to 99 for processes @@ -177,9 +183,9 @@ func (p Proc) Stat() (ProcStat, error) { &s.VSize, &s.RSS, &s.RSSLimit, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, + &s.StartCode, + &s.EndCode, + &s.StartStack, &ignoreUint64, &ignoreUint64, &ignoreUint64, diff --git a/vendor/github.com/prometheus/procfs/proc_statm.go b/vendor/github.com/prometheus/procfs/proc_statm.go new file mode 100644 index 00000000..ed579842 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/proc_statm.go @@ -0,0 +1,116 @@ +// Copyright 2025 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "os" + "strconv" + "strings" + + "github.com/prometheus/procfs/internal/util" +) + +// - https://man7.org/linux/man-pages/man5/proc_pid_statm.5.html + +// ProcStatm Provides memory usage information for a process, measured in memory pages. +// Read from /proc/[pid]/statm. +type ProcStatm struct { + // The process ID. + PID int + // total program size (same as VmSize in status) + Size uint64 + // resident set size (same as VmRSS in status) + Resident uint64 + // number of resident shared pages (i.e., backed by a file) + Shared uint64 + // text (code) + Text uint64 + // library (unused since Linux 2.6; always 0) + Lib uint64 + // data + stack + Data uint64 + // dirty pages (unused since Linux 2.6; always 0) + Dt uint64 +} + +// NewStatm returns the current status information of the process. +// Deprecated: Use p.Statm() instead. +func (p Proc) NewStatm() (ProcStatm, error) { + return p.Statm() +} + +// Statm returns the current memory usage information of the process. +func (p Proc) Statm() (ProcStatm, error) { + data, err := util.ReadFileNoStat(p.path("statm")) + if err != nil { + return ProcStatm{}, err + } + + statmSlice, err := parseStatm(data) + if err != nil { + return ProcStatm{}, err + } + + procStatm := ProcStatm{ + PID: p.PID, + Size: statmSlice[0], + Resident: statmSlice[1], + Shared: statmSlice[2], + Text: statmSlice[3], + Lib: statmSlice[4], + Data: statmSlice[5], + Dt: statmSlice[6], + } + + return procStatm, nil +} + +// parseStatm return /proc/[pid]/statm data to uint64 slice. +func parseStatm(data []byte) ([]uint64, error) { + var statmSlice []uint64 + statmItems := strings.Fields(string(data)) + for i := 0; i < len(statmItems); i++ { + statmItem, err := strconv.ParseUint(statmItems[i], 10, 64) + if err != nil { + return nil, err + } + statmSlice = append(statmSlice, statmItem) + } + return statmSlice, nil +} + +// SizeBytes returns the process of total program size in bytes. +func (s ProcStatm) SizeBytes() uint64 { + return s.Size * uint64(os.Getpagesize()) +} + +// ResidentBytes returns the process of resident set size in bytes. +func (s ProcStatm) ResidentBytes() uint64 { + return s.Resident * uint64(os.Getpagesize()) +} + +// SHRBytes returns the process of share memory size in bytes. +func (s ProcStatm) SHRBytes() uint64 { + return s.Shared * uint64(os.Getpagesize()) +} + +// TextBytes returns the process of text (code) size in bytes. +func (s ProcStatm) TextBytes() uint64 { + return s.Text * uint64(os.Getpagesize()) +} + +// DataBytes returns the process of data + stack size in bytes. +func (s ProcStatm) DataBytes() uint64 { + return s.Data * uint64(os.Getpagesize()) +} diff --git a/vendor/github.com/spf13/pflag/README.md b/vendor/github.com/spf13/pflag/README.md index 7eacc5bd..388c4e5e 100644 --- a/vendor/github.com/spf13/pflag/README.md +++ b/vendor/github.com/spf13/pflag/README.md @@ -284,6 +284,33 @@ func main() { } ``` +### Using pflag with go test +`pflag` does not parse the shorthand versions of go test's built-in flags (i.e., those starting with `-test.`). +For more context, see issues [#63](https://github.com/spf13/pflag/issues/63) and [#238](https://github.com/spf13/pflag/issues/238) for more details. + +For example, if you use pflag in your `TestMain` function and call `pflag.Parse()` after defining your custom flags, running a test like this: +```bash +go test /your/tests -run ^YourTest -v --your-test-pflags +``` +will result in the `-v` flag being ignored. This happens because of the way pflag handles flag parsing, skipping over go test's built-in shorthand flags. +To work around this, you can use the `ParseSkippedFlags` function, which ensures that go test's flags are parsed separately using the standard flag package. + +**Example**: You want to parse go test flags that are otherwise ignore by `pflag.Parse()` +```go +import ( + goflag "flag" + flag "github.com/spf13/pflag" +) + +var ip *int = flag.Int("flagname", 1234, "help message for flagname") + +func main() { + flag.CommandLine.AddGoFlagSet(goflag.CommandLine) + flag.ParseSkippedFlags(os.Args[1:], goflag.CommandLine) + flag.Parse() +} +``` + ## More info You can see the full reference documentation of the pflag package diff --git a/vendor/github.com/spf13/pflag/bool_func.go b/vendor/github.com/spf13/pflag/bool_func.go new file mode 100644 index 00000000..83d77afa --- /dev/null +++ b/vendor/github.com/spf13/pflag/bool_func.go @@ -0,0 +1,40 @@ +package pflag + +// -- func Value +type boolfuncValue func(string) error + +func (f boolfuncValue) Set(s string) error { return f(s) } + +func (f boolfuncValue) Type() string { return "boolfunc" } + +func (f boolfuncValue) String() string { return "" } // same behavior as stdlib 'flag' package + +func (f boolfuncValue) IsBoolFlag() bool { return true } + +// BoolFunc defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}" (or any form that matches the flag) is parsed +// on the command line. +func (f *FlagSet) BoolFunc(name string, usage string, fn func(string) error) { + f.BoolFuncP(name, "", usage, fn) +} + +// BoolFuncP is like BoolFunc, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BoolFuncP(name, shorthand string, usage string, fn func(string) error) { + var val Value = boolfuncValue(fn) + flag := f.VarPF(val, name, shorthand, usage) + flag.NoOptDefVal = "true" +} + +// BoolFunc defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}" (or any form that matches the flag) is parsed +// on the command line. +func BoolFunc(name string, usage string, fn func(string) error) { + CommandLine.BoolFuncP(name, "", usage, fn) +} + +// BoolFuncP is like BoolFunc, but accepts a shorthand letter that can be used after a single dash. +func BoolFuncP(name, shorthand string, usage string, fn func(string) error) { + CommandLine.BoolFuncP(name, shorthand, usage, fn) +} diff --git a/vendor/github.com/spf13/pflag/count.go b/vendor/github.com/spf13/pflag/count.go index a0b2679f..d49c0143 100644 --- a/vendor/github.com/spf13/pflag/count.go +++ b/vendor/github.com/spf13/pflag/count.go @@ -85,7 +85,7 @@ func (f *FlagSet) CountP(name, shorthand string, usage string) *int { // Count defines a count flag with specified name, default value, and usage string. // The return value is the address of an int variable that stores the value of the flag. -// A count flag will add 1 to its value evey time it is found on the command line +// A count flag will add 1 to its value every time it is found on the command line func Count(name string, usage string) *int { return CommandLine.CountP(name, "", usage) } diff --git a/vendor/github.com/spf13/pflag/errors.go b/vendor/github.com/spf13/pflag/errors.go new file mode 100644 index 00000000..ff11b66b --- /dev/null +++ b/vendor/github.com/spf13/pflag/errors.go @@ -0,0 +1,149 @@ +package pflag + +import "fmt" + +// notExistErrorMessageType specifies which flavor of "flag does not exist" +// is printed by NotExistError. This allows the related errors to be grouped +// under a single NotExistError struct without making a breaking change to +// the error message text. +type notExistErrorMessageType int + +const ( + flagNotExistMessage notExistErrorMessageType = iota + flagNotDefinedMessage + flagNoSuchFlagMessage + flagUnknownFlagMessage + flagUnknownShorthandFlagMessage +) + +// NotExistError is the error returned when trying to access a flag that +// does not exist in the FlagSet. +type NotExistError struct { + name string + specifiedShorthands string + messageType notExistErrorMessageType +} + +// Error implements error. +func (e *NotExistError) Error() string { + switch e.messageType { + case flagNotExistMessage: + return fmt.Sprintf("flag %q does not exist", e.name) + + case flagNotDefinedMessage: + return fmt.Sprintf("flag accessed but not defined: %s", e.name) + + case flagNoSuchFlagMessage: + return fmt.Sprintf("no such flag -%v", e.name) + + case flagUnknownFlagMessage: + return fmt.Sprintf("unknown flag: --%s", e.name) + + case flagUnknownShorthandFlagMessage: + c := rune(e.name[0]) + return fmt.Sprintf("unknown shorthand flag: %q in -%s", c, e.specifiedShorthands) + } + + panic(fmt.Errorf("unknown flagNotExistErrorMessageType: %v", e.messageType)) +} + +// GetSpecifiedName returns the name of the flag (without dashes) as it +// appeared in the parsed arguments. +func (e *NotExistError) GetSpecifiedName() string { + return e.name +} + +// GetSpecifiedShortnames returns the group of shorthand arguments +// (without dashes) that the flag appeared within. If the flag was not in a +// shorthand group, this will return an empty string. +func (e *NotExistError) GetSpecifiedShortnames() string { + return e.specifiedShorthands +} + +// ValueRequiredError is the error returned when a flag needs an argument but +// no argument was provided. +type ValueRequiredError struct { + flag *Flag + specifiedName string + specifiedShorthands string +} + +// Error implements error. +func (e *ValueRequiredError) Error() string { + if len(e.specifiedShorthands) > 0 { + c := rune(e.specifiedName[0]) + return fmt.Sprintf("flag needs an argument: %q in -%s", c, e.specifiedShorthands) + } + + return fmt.Sprintf("flag needs an argument: --%s", e.specifiedName) +} + +// GetFlag returns the flag for which the error occurred. +func (e *ValueRequiredError) GetFlag() *Flag { + return e.flag +} + +// GetSpecifiedName returns the name of the flag (without dashes) as it +// appeared in the parsed arguments. +func (e *ValueRequiredError) GetSpecifiedName() string { + return e.specifiedName +} + +// GetSpecifiedShortnames returns the group of shorthand arguments +// (without dashes) that the flag appeared within. If the flag was not in a +// shorthand group, this will return an empty string. +func (e *ValueRequiredError) GetSpecifiedShortnames() string { + return e.specifiedShorthands +} + +// InvalidValueError is the error returned when an invalid value is used +// for a flag. +type InvalidValueError struct { + flag *Flag + value string + cause error +} + +// Error implements error. +func (e *InvalidValueError) Error() string { + flag := e.flag + var flagName string + if flag.Shorthand != "" && flag.ShorthandDeprecated == "" { + flagName = fmt.Sprintf("-%s, --%s", flag.Shorthand, flag.Name) + } else { + flagName = fmt.Sprintf("--%s", flag.Name) + } + return fmt.Sprintf("invalid argument %q for %q flag: %v", e.value, flagName, e.cause) +} + +// Unwrap implements errors.Unwrap. +func (e *InvalidValueError) Unwrap() error { + return e.cause +} + +// GetFlag returns the flag for which the error occurred. +func (e *InvalidValueError) GetFlag() *Flag { + return e.flag +} + +// GetValue returns the invalid value that was provided. +func (e *InvalidValueError) GetValue() string { + return e.value +} + +// InvalidSyntaxError is the error returned when a bad flag name is passed on +// the command line. +type InvalidSyntaxError struct { + specifiedFlag string +} + +// Error implements error. +func (e *InvalidSyntaxError) Error() string { + return fmt.Sprintf("bad flag syntax: %s", e.specifiedFlag) +} + +// GetSpecifiedName returns the exact flag (with dashes) as it +// appeared in the parsed arguments. +func (e *InvalidSyntaxError) GetSpecifiedFlag() string { + return e.specifiedFlag +} diff --git a/vendor/github.com/spf13/pflag/flag.go b/vendor/github.com/spf13/pflag/flag.go index 7c058de3..d4dfbc5e 100644 --- a/vendor/github.com/spf13/pflag/flag.go +++ b/vendor/github.com/spf13/pflag/flag.go @@ -27,23 +27,32 @@ unaffected. Define flags using flag.String(), Bool(), Int(), etc. This declares an integer flag, -flagname, stored in the pointer ip, with type *int. + var ip = flag.Int("flagname", 1234, "help message for flagname") + If you like, you can bind the flag to a variable using the Var() functions. + var flagvar int func init() { flag.IntVar(&flagvar, "flagname", 1234, "help message for flagname") } + Or you can create custom flags that satisfy the Value interface (with pointer receivers) and couple them to flag parsing by + flag.Var(&flagVal, "name", "help message for flagname") + For such flags, the default value is just the initial value of the variable. After all flags are defined, call + flag.Parse() + to parse the command line into the defined flags. Flags may then be used directly. If you're using the flags themselves, they are all pointers; if you bind to variables, they're values. + fmt.Println("ip has value ", *ip) fmt.Println("flagvar has value ", flagvar) @@ -54,22 +63,26 @@ The arguments are indexed from 0 through flag.NArg()-1. The pflag package also defines some new functions that are not in flag, that give one-letter shorthands for flags. You can use these by appending 'P' to the name of any function that defines a flag. + var ip = flag.IntP("flagname", "f", 1234, "help message") var flagvar bool func init() { flag.BoolVarP(&flagvar, "boolname", "b", true, "help message") } flag.VarP(&flagval, "varname", "v", "help message") + Shorthand letters can be used with single dashes on the command line. Boolean shorthand flags can be combined with other shorthand flags. Command line flag syntax: + --flag // boolean flags only --flag=x Unlike the flag package, a single dash before an option means something different than a double dash. Single dashes signify a series of shorthand letters for flags. All but the last shorthand letter must be boolean flags. + // boolean flags -f -abc @@ -381,7 +394,7 @@ func (f *FlagSet) lookup(name NormalizedName) *Flag { func (f *FlagSet) getFlagType(name string, ftype string, convFunc func(sval string) (interface{}, error)) (interface{}, error) { flag := f.Lookup(name) if flag == nil { - err := fmt.Errorf("flag accessed but not defined: %s", name) + err := &NotExistError{name: name, messageType: flagNotDefinedMessage} return nil, err } @@ -411,7 +424,7 @@ func (f *FlagSet) ArgsLenAtDash() int { func (f *FlagSet) MarkDeprecated(name string, usageMessage string) error { flag := f.Lookup(name) if flag == nil { - return fmt.Errorf("flag %q does not exist", name) + return &NotExistError{name: name, messageType: flagNotExistMessage} } if usageMessage == "" { return fmt.Errorf("deprecated message for flag %q must be set", name) @@ -427,7 +440,7 @@ func (f *FlagSet) MarkDeprecated(name string, usageMessage string) error { func (f *FlagSet) MarkShorthandDeprecated(name string, usageMessage string) error { flag := f.Lookup(name) if flag == nil { - return fmt.Errorf("flag %q does not exist", name) + return &NotExistError{name: name, messageType: flagNotExistMessage} } if usageMessage == "" { return fmt.Errorf("deprecated message for flag %q must be set", name) @@ -441,7 +454,7 @@ func (f *FlagSet) MarkShorthandDeprecated(name string, usageMessage string) erro func (f *FlagSet) MarkHidden(name string) error { flag := f.Lookup(name) if flag == nil { - return fmt.Errorf("flag %q does not exist", name) + return &NotExistError{name: name, messageType: flagNotExistMessage} } flag.Hidden = true return nil @@ -464,18 +477,16 @@ func (f *FlagSet) Set(name, value string) error { normalName := f.normalizeFlagName(name) flag, ok := f.formal[normalName] if !ok { - return fmt.Errorf("no such flag -%v", name) + return &NotExistError{name: name, messageType: flagNoSuchFlagMessage} } err := flag.Value.Set(value) if err != nil { - var flagName string - if flag.Shorthand != "" && flag.ShorthandDeprecated == "" { - flagName = fmt.Sprintf("-%s, --%s", flag.Shorthand, flag.Name) - } else { - flagName = fmt.Sprintf("--%s", flag.Name) + return &InvalidValueError{ + flag: flag, + value: value, + cause: err, } - return fmt.Errorf("invalid argument %q for %q flag: %v", value, flagName, err) } if !flag.Changed { @@ -501,7 +512,7 @@ func (f *FlagSet) SetAnnotation(name, key string, values []string) error { normalName := f.normalizeFlagName(name) flag, ok := f.formal[normalName] if !ok { - return fmt.Errorf("no such flag -%v", name) + return &NotExistError{name: name, messageType: flagNoSuchFlagMessage} } if flag.Annotations == nil { flag.Annotations = map[string][]string{} @@ -538,7 +549,7 @@ func (f *FlagSet) PrintDefaults() { func (f *Flag) defaultIsZeroValue() bool { switch f.Value.(type) { case boolFlag: - return f.DefValue == "false" + return f.DefValue == "false" || f.DefValue == "" case *durationValue: // Beginning in Go 1.7, duration zero values are "0s" return f.DefValue == "0" || f.DefValue == "0s" @@ -551,7 +562,7 @@ func (f *Flag) defaultIsZeroValue() bool { case *intSliceValue, *stringSliceValue, *stringArrayValue: return f.DefValue == "[]" default: - switch f.Value.String() { + switch f.DefValue { case "false": return true case "": @@ -588,8 +599,10 @@ func UnquoteUsage(flag *Flag) (name string, usage string) { name = flag.Value.Type() switch name { - case "bool": + case "bool", "boolfunc": name = "" + case "func": + name = "value" case "float64": name = "float" case "int64": @@ -707,7 +720,7 @@ func (f *FlagSet) FlagUsagesWrapped(cols int) string { switch flag.Value.Type() { case "string": line += fmt.Sprintf("[=\"%s\"]", flag.NoOptDefVal) - case "bool": + case "bool", "boolfunc": if flag.NoOptDefVal != "true" { line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) } @@ -911,10 +924,9 @@ func VarP(value Value, name, shorthand, usage string) { CommandLine.VarP(value, name, shorthand, usage) } -// failf prints to standard error a formatted error and usage message and +// fail prints an error message and usage message to standard error and // returns the error. -func (f *FlagSet) failf(format string, a ...interface{}) error { - err := fmt.Errorf(format, a...) +func (f *FlagSet) fail(err error) error { if f.errorHandling != ContinueOnError { fmt.Fprintln(f.Output(), err) f.usage() @@ -934,9 +946,9 @@ func (f *FlagSet) usage() { } } -//--unknown (args will be empty) -//--unknown --next-flag ... (args will be --next-flag ...) -//--unknown arg ... (args will be arg ...) +// --unknown (args will be empty) +// --unknown --next-flag ... (args will be --next-flag ...) +// --unknown arg ... (args will be arg ...) func stripUnknownFlagValue(args []string) []string { if len(args) == 0 { //--unknown @@ -960,7 +972,7 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin a = args name := s[2:] if len(name) == 0 || name[0] == '-' || name[0] == '=' { - err = f.failf("bad flag syntax: %s", s) + err = f.fail(&InvalidSyntaxError{specifiedFlag: s}) return } @@ -982,7 +994,7 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin return stripUnknownFlagValue(a), nil default: - err = f.failf("unknown flag: --%s", name) + err = f.fail(&NotExistError{name: name, messageType: flagUnknownFlagMessage}) return } } @@ -1000,13 +1012,16 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin a = a[1:] } else { // '--flag' (arg was required) - err = f.failf("flag needs an argument: %s", s) + err = f.fail(&ValueRequiredError{ + flag: flag, + specifiedName: name, + }) return } err = fn(flag, value) if err != nil { - f.failf(err.Error()) + f.fail(err) } return } @@ -1014,7 +1029,7 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parseFunc) (outShorts string, outArgs []string, err error) { outArgs = args - if strings.HasPrefix(shorthands, "test.") { + if isGotestShorthandFlag(shorthands) { return } @@ -1039,7 +1054,11 @@ func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parse outArgs = stripUnknownFlagValue(outArgs) return default: - err = f.failf("unknown shorthand flag: %q in -%s", c, shorthands) + err = f.fail(&NotExistError{ + name: string(c), + specifiedShorthands: shorthands, + messageType: flagUnknownShorthandFlagMessage, + }) return } } @@ -1062,7 +1081,11 @@ func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parse outArgs = args[1:] } else { // '-f' (arg was required) - err = f.failf("flag needs an argument: %q in -%s", c, shorthands) + err = f.fail(&ValueRequiredError{ + flag: flag, + specifiedName: string(c), + specifiedShorthands: shorthands, + }) return } @@ -1072,7 +1095,7 @@ func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parse err = fn(flag, value) if err != nil { - f.failf(err.Error()) + f.fail(err) } return } @@ -1135,7 +1158,7 @@ func (f *FlagSet) Parse(arguments []string) error { } f.parsed = true - if len(arguments) < 0 { + if len(arguments) == 0 { return nil } diff --git a/vendor/github.com/spf13/pflag/func.go b/vendor/github.com/spf13/pflag/func.go new file mode 100644 index 00000000..9f4d88f2 --- /dev/null +++ b/vendor/github.com/spf13/pflag/func.go @@ -0,0 +1,37 @@ +package pflag + +// -- func Value +type funcValue func(string) error + +func (f funcValue) Set(s string) error { return f(s) } + +func (f funcValue) Type() string { return "func" } + +func (f funcValue) String() string { return "" } // same behavior as stdlib 'flag' package + +// Func defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}={value}" (or equivalent) is +// parsed on the command line, with "{value}" as an argument. +func (f *FlagSet) Func(name string, usage string, fn func(string) error) { + f.FuncP(name, "", usage, fn) +} + +// FuncP is like Func, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) FuncP(name string, shorthand string, usage string, fn func(string) error) { + var val Value = funcValue(fn) + f.VarP(val, name, shorthand, usage) +} + +// Func defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}={value}" (or equivalent) is +// parsed on the command line, with "{value}" as an argument. +func Func(name string, usage string, fn func(string) error) { + CommandLine.FuncP(name, "", usage, fn) +} + +// FuncP is like Func, but accepts a shorthand letter that can be used after a single dash. +func FuncP(name, shorthand string, usage string, fn func(string) error) { + CommandLine.FuncP(name, shorthand, usage, fn) +} diff --git a/vendor/github.com/spf13/pflag/golangflag.go b/vendor/github.com/spf13/pflag/golangflag.go index d3dd72b7..f563907e 100644 --- a/vendor/github.com/spf13/pflag/golangflag.go +++ b/vendor/github.com/spf13/pflag/golangflag.go @@ -10,6 +10,15 @@ import ( "strings" ) +// go test flags prefixes +func isGotestFlag(flag string) bool { + return strings.HasPrefix(flag, "-test.") +} + +func isGotestShorthandFlag(flag string) bool { + return strings.HasPrefix(flag, "test.") +} + // flagValueWrapper implements pflag.Value around a flag.Value. The main // difference here is the addition of the Type method that returns a string // name of the type. As this is generally unknown, we approximate that with @@ -103,3 +112,16 @@ func (f *FlagSet) AddGoFlagSet(newSet *goflag.FlagSet) { } f.addedGoFlagSets = append(f.addedGoFlagSets, newSet) } + +// ParseSkippedFlags explicitly Parses go test flags (i.e. the one starting with '-test.') with goflag.Parse(), +// since by default those are skipped by pflag.Parse(). +// Typical usage example: `ParseGoTestFlags(os.Args[1:], goflag.CommandLine)` +func ParseSkippedFlags(osArgs []string, goFlagSet *goflag.FlagSet) error { + var skippedFlags []string + for _, f := range osArgs { + if isGotestFlag(f) { + skippedFlags = append(skippedFlags, f) + } + } + return goFlagSet.Parse(skippedFlags) +} diff --git a/vendor/github.com/spf13/pflag/ipnet_slice.go b/vendor/github.com/spf13/pflag/ipnet_slice.go index 6b541aa8..c6e89da1 100644 --- a/vendor/github.com/spf13/pflag/ipnet_slice.go +++ b/vendor/github.com/spf13/pflag/ipnet_slice.go @@ -73,7 +73,7 @@ func (s *ipNetSliceValue) String() string { func ipNetSliceConv(val string) (interface{}, error) { val = strings.Trim(val, "[]") - // Emtpy string would cause a slice with one (empty) entry + // Empty string would cause a slice with one (empty) entry if len(val) == 0 { return []net.IPNet{}, nil } diff --git a/vendor/github.com/spf13/pflag/text.go b/vendor/github.com/spf13/pflag/text.go new file mode 100644 index 00000000..886d5a3d --- /dev/null +++ b/vendor/github.com/spf13/pflag/text.go @@ -0,0 +1,81 @@ +package pflag + +import ( + "encoding" + "fmt" + "reflect" +) + +// following is copied from go 1.23.4 flag.go +type textValue struct{ p encoding.TextUnmarshaler } + +func newTextValue(val encoding.TextMarshaler, p encoding.TextUnmarshaler) textValue { + ptrVal := reflect.ValueOf(p) + if ptrVal.Kind() != reflect.Ptr { + panic("variable value type must be a pointer") + } + defVal := reflect.ValueOf(val) + if defVal.Kind() == reflect.Ptr { + defVal = defVal.Elem() + } + if defVal.Type() != ptrVal.Type().Elem() { + panic(fmt.Sprintf("default type does not match variable type: %v != %v", defVal.Type(), ptrVal.Type().Elem())) + } + ptrVal.Elem().Set(defVal) + return textValue{p} +} + +func (v textValue) Set(s string) error { + return v.p.UnmarshalText([]byte(s)) +} + +func (v textValue) Get() interface{} { + return v.p +} + +func (v textValue) String() string { + if m, ok := v.p.(encoding.TextMarshaler); ok { + if b, err := m.MarshalText(); err == nil { + return string(b) + } + } + return "" +} + +//end of copy + +func (v textValue) Type() string { + return reflect.ValueOf(v.p).Type().Name() +} + +// GetText set out, which implements encoding.UnmarshalText, to the value of a flag with given name +func (f *FlagSet) GetText(name string, out encoding.TextUnmarshaler) error { + flag := f.Lookup(name) + if flag == nil { + return fmt.Errorf("flag accessed but not defined: %s", name) + } + if flag.Value.Type() != reflect.TypeOf(out).Name() { + return fmt.Errorf("trying to get %s value of flag of type %s", reflect.TypeOf(out).Name(), flag.Value.Type()) + } + return out.UnmarshalText([]byte(flag.Value.String())) +} + +// TextVar defines a flag with a specified name, default value, and usage string. The argument p must be a pointer to a variable that will hold the value of the flag, and p must implement encoding.TextUnmarshaler. If the flag is used, the flag value will be passed to p's UnmarshalText method. The type of the default value must be the same as the type of p. +func (f *FlagSet) TextVar(p encoding.TextUnmarshaler, name string, value encoding.TextMarshaler, usage string) { + f.VarP(newTextValue(value, p), name, "", usage) +} + +// TextVarP is like TextVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) TextVarP(p encoding.TextUnmarshaler, name, shorthand string, value encoding.TextMarshaler, usage string) { + f.VarP(newTextValue(value, p), name, shorthand, usage) +} + +// TextVar defines a flag with a specified name, default value, and usage string. The argument p must be a pointer to a variable that will hold the value of the flag, and p must implement encoding.TextUnmarshaler. If the flag is used, the flag value will be passed to p's UnmarshalText method. The type of the default value must be the same as the type of p. +func TextVar(p encoding.TextUnmarshaler, name string, value encoding.TextMarshaler, usage string) { + CommandLine.VarP(newTextValue(value, p), name, "", usage) +} + +// TextVarP is like TextVar, but accepts a shorthand letter that can be used after a single dash. +func TextVarP(p encoding.TextUnmarshaler, name, shorthand string, value encoding.TextMarshaler, usage string) { + CommandLine.VarP(newTextValue(value, p), name, shorthand, usage) +} diff --git a/vendor/github.com/spf13/pflag/time.go b/vendor/github.com/spf13/pflag/time.go new file mode 100644 index 00000000..dc024807 --- /dev/null +++ b/vendor/github.com/spf13/pflag/time.go @@ -0,0 +1,118 @@ +package pflag + +import ( + "fmt" + "strings" + "time" +) + +// TimeValue adapts time.Time for use as a flag. +type timeValue struct { + *time.Time + formats []string +} + +func newTimeValue(val time.Time, p *time.Time, formats []string) *timeValue { + *p = val + return &timeValue{ + Time: p, + formats: formats, + } +} + +// Set time.Time value from string based on accepted formats. +func (d *timeValue) Set(s string) error { + s = strings.TrimSpace(s) + for _, f := range d.formats { + v, err := time.Parse(f, s) + if err != nil { + continue + } + *d.Time = v + return nil + } + + formatsString := "" + for i, f := range d.formats { + if i > 0 { + formatsString += ", " + } + formatsString += fmt.Sprintf("`%s`", f) + } + + return fmt.Errorf("invalid time format `%s` must be one of: %s", s, formatsString) +} + +// Type name for time.Time flags. +func (d *timeValue) Type() string { + return "time" +} + +func (d *timeValue) String() string { return d.Time.Format(time.RFC3339Nano) } + +// GetTime return the time value of a flag with the given name +func (f *FlagSet) GetTime(name string) (time.Time, error) { + flag := f.Lookup(name) + if flag == nil { + err := fmt.Errorf("flag accessed but not defined: %s", name) + return time.Time{}, err + } + + if flag.Value.Type() != "time" { + err := fmt.Errorf("trying to get %s value of flag of type %s", "time", flag.Value.Type()) + return time.Time{}, err + } + + val, ok := flag.Value.(*timeValue) + if !ok { + return time.Time{}, fmt.Errorf("value %s is not a time", flag.Value) + } + + return *val.Time, nil +} + +// TimeVar defines a time.Time flag with specified name, default value, and usage string. +// The argument p points to a time.Time variable in which to store the value of the flag. +func (f *FlagSet) TimeVar(p *time.Time, name string, value time.Time, formats []string, usage string) { + f.TimeVarP(p, name, "", value, formats, usage) +} + +// TimeVarP is like TimeVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) TimeVarP(p *time.Time, name, shorthand string, value time.Time, formats []string, usage string) { + f.VarP(newTimeValue(value, p, formats), name, shorthand, usage) +} + +// TimeVar defines a time.Time flag with specified name, default value, and usage string. +// The argument p points to a time.Time variable in which to store the value of the flag. +func TimeVar(p *time.Time, name string, value time.Time, formats []string, usage string) { + CommandLine.TimeVarP(p, name, "", value, formats, usage) +} + +// TimeVarP is like TimeVar, but accepts a shorthand letter that can be used after a single dash. +func TimeVarP(p *time.Time, name, shorthand string, value time.Time, formats []string, usage string) { + CommandLine.VarP(newTimeValue(value, p, formats), name, shorthand, usage) +} + +// Time defines a time.Time flag with specified name, default value, and usage string. +// The return value is the address of a time.Time variable that stores the value of the flag. +func (f *FlagSet) Time(name string, value time.Time, formats []string, usage string) *time.Time { + return f.TimeP(name, "", value, formats, usage) +} + +// TimeP is like Time, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) TimeP(name, shorthand string, value time.Time, formats []string, usage string) *time.Time { + p := new(time.Time) + f.TimeVarP(p, name, shorthand, value, formats, usage) + return p +} + +// Time defines a time.Time flag with specified name, default value, and usage string. +// The return value is the address of a time.Time variable that stores the value of the flag. +func Time(name string, value time.Time, formats []string, usage string) *time.Time { + return CommandLine.TimeP(name, "", value, formats, usage) +} + +// TimeP is like Time, but accepts a shorthand letter that can be used after a single dash. +func TimeP(name, shorthand string, value time.Time, formats []string, usage string) *time.Time { + return CommandLine.TimeP(name, shorthand, value, formats, usage) +} diff --git a/vendor/go.opentelemetry.io/otel/.clomonitor.yml b/vendor/go.opentelemetry.io/otel/.clomonitor.yml new file mode 100644 index 00000000..128d61a2 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/.clomonitor.yml @@ -0,0 +1,3 @@ +exemptions: + - check: artifacthub_badge + reason: "Artifact Hub doesn't support Go packages" diff --git a/vendor/go.opentelemetry.io/otel/.golangci.yml b/vendor/go.opentelemetry.io/otel/.golangci.yml index 888e5da8..5f69cc02 100644 --- a/vendor/go.opentelemetry.io/otel/.golangci.yml +++ b/vendor/go.opentelemetry.io/otel/.golangci.yml @@ -66,8 +66,6 @@ linters: desc: Do not use cross-module internal packages. - pkg: go.opentelemetry.io/otel/internal/internaltest desc: Do not use cross-module internal packages. - - pkg: go.opentelemetry.io/otel/internal/matchers - desc: Do not use cross-module internal packages. otlp-internal: files: - '!**/exporters/otlp/internal/**/*.go' @@ -190,6 +188,10 @@ linters: - legacy - std-error-handling rules: + - linters: + - revive + path: schema/v.*/types/.* + text: avoid meaningless package names # TODO: Having appropriate comments for exported objects helps development, # even for objects in internal packages. Appropriate comments for all # exported objects should be added and this exclusion removed. diff --git a/vendor/go.opentelemetry.io/otel/CHANGELOG.md b/vendor/go.opentelemetry.io/otel/CHANGELOG.md index 648e4aba..4acc7570 100644 --- a/vendor/go.opentelemetry.io/otel/CHANGELOG.md +++ b/vendor/go.opentelemetry.io/otel/CHANGELOG.md @@ -11,6 +11,61 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +## [1.37.0/0.59.0/0.13.0] 2025-06-25 + +### Added + +- The `go.opentelemetry.io/otel/semconv/v1.33.0` package. + The package contains semantic conventions from the `v1.33.0` version of the OpenTelemetry Semantic Conventions. + See the [migration documentation](./semconv/v1.33.0/MIGRATION.md) for information on how to upgrade from `go.opentelemetry.io/otel/semconv/v1.32.0.`(#6799) +- The `go.opentelemetry.io/otel/semconv/v1.34.0` package. + The package contains semantic conventions from the `v1.34.0` version of the OpenTelemetry Semantic Conventions. (#6812) +- Add metric's schema URL as `otel_scope_schema_url` label in `go.opentelemetry.io/otel/exporters/prometheus`. (#5947) +- Add metric's scope attributes as `otel_scope_[attribute]` labels in `go.opentelemetry.io/otel/exporters/prometheus`. (#5947) +- Add `EventName` to `EnabledParameters` in `go.opentelemetry.io/otel/log`. (#6825) +- Add `EventName` to `EnabledParameters` in `go.opentelemetry.io/otel/sdk/log`. (#6825) +- Changed handling of `go.opentelemetry.io/otel/exporters/prometheus` metric renaming to add unit suffixes when it doesn't match one of the pre-defined values in the unit suffix map. (#6839) + +### Changed + +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/bridge/opentracing`. (#6827) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/exporters/zipkin`. (#6829) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/metric`. (#6832) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/sdk/resource`. (#6834) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/sdk/trace`. (#6835) +- The semantic conventions have been upgraded from `v1.26.0` to `v1.34.0` in `go.opentelemetry.io/otel/trace`. (#6836) +- `Record.Resource` now returns `*resource.Resource` instead of `resource.Resource` in `go.opentelemetry.io/otel/sdk/log`. (#6864) +- Retry now shows error cause for context timeout in `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc`, `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc`, `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc`, `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp`, `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp`, `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6898) + +### Fixed + +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc`. (#6710) +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp`. (#6710) +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc`. (#6710) +- Stop stripping trailing slashes from configured endpoint URL in `go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp`. (#6710) +- Validate exponential histogram scale range for Prometheus compatibility in `go.opentelemetry.io/otel/exporters/prometheus`. (#6822) +- Context cancellation during metric pipeline produce does not corrupt data in `go.opentelemetry.io/otel/sdk/metric`. (#6914) + +### Removed + +- `go.opentelemetry.io/otel/exporters/prometheus` no longer exports `otel_scope_info` metric. (#6770) + +## [0.12.2] 2025-05-22 + +### Fixed + +- Retract `v0.12.0` release of `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc` module that contains invalid dependencies. (#6804) +- Retract `v0.12.0` release of `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp` module that contains invalid dependencies. (#6804) +- Retract `v0.12.0` release of `go.opentelemetry.io/otel/exporters/stdout/stdoutlog` module that contains invalid dependencies. (#6804) + +## [0.12.1] 2025-05-21 + +### Fixes + +- Use the proper dependency version of `go.opentelemetry.io/otel/sdk/log/logtest` in `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc`. (#6800) +- Use the proper dependency version of `go.opentelemetry.io/otel/sdk/log/logtest` in `go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp`. (#6800) +- Use the proper dependency version of `go.opentelemetry.io/otel/sdk/log/logtest` in `go.opentelemetry.io/otel/exporters/stdout/stdoutlog`. (#6800) + ## [1.36.0/0.58.0/0.12.0] 2025-05-20 ### Added @@ -3288,7 +3343,10 @@ It contains api and sdk for trace and meter. - CircleCI build CI manifest files. - CODEOWNERS file to track owners of this project. -[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.36.0...HEAD +[Unreleased]: https://github.com/open-telemetry/opentelemetry-go/compare/v1.37.0...HEAD +[1.37.0/0.59.0/0.13.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.37.0 +[0.12.2]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/log/v0.12.2 +[0.12.1]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/log/v0.12.1 [1.36.0/0.58.0/0.12.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.36.0 [1.35.0/0.57.0/0.11.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.35.0 [1.34.0/0.56.0/0.10.0]: https://github.com/open-telemetry/opentelemetry-go/releases/tag/v1.34.0 diff --git a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md index 1902dac0..f9ddc281 100644 --- a/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md +++ b/vendor/go.opentelemetry.io/otel/CONTRIBUTING.md @@ -109,10 +109,9 @@ A PR is considered **ready to merge** when: This is not enforced through automation, but needs to be validated by the maintainer merging. - * The qualified approvals need to be from [Approver]s/[Maintainer]s - affiliated with different companies. Two qualified approvals from - [Approver]s or [Maintainer]s affiliated with the same company counts as a - single qualified approval. + * At least one of the qualified approvals need to be from an + [Approver]/[Maintainer] affiliated with a different company than the author + of the PR. * PRs introducing changes that have already been discussed and consensus reached only need one qualified approval. The discussion and resolution needs to be linked to the PR. @@ -650,11 +649,11 @@ should be canceled. ### Maintainers -- [Damien Mathieu](https://github.com/dmathieu), Elastic -- [David Ashpole](https://github.com/dashpole), Google -- [Robert Pająk](https://github.com/pellared), Splunk -- [Sam Xie](https://github.com/XSAM), Cisco/AppDynamics -- [Tyler Yahn](https://github.com/MrAlias), Splunk +- [Damien Mathieu](https://github.com/dmathieu), Elastic ([GPG](https://keys.openpgp.org/search?q=5A126B972A81A6CE443E5E1B408B8E44F0873832)) +- [David Ashpole](https://github.com/dashpole), Google ([GPG](https://keys.openpgp.org/search?q=C0D1BDDCAAEAE573673085F176327DA4D864DC70)) +- [Robert Pająk](https://github.com/pellared), Splunk ([GPG](https://keys.openpgp.org/search?q=CDAD3A60476A3DE599AA5092E5F7C35A4DBE90C2)) +- [Sam Xie](https://github.com/XSAM), Splunk ([GPG](https://keys.openpgp.org/search?q=AEA033782371ABB18EE39188B8044925D6FEEBEA)) +- [Tyler Yahn](https://github.com/MrAlias), Splunk ([GPG](https://keys.openpgp.org/search?q=0x46B0F3E1A8B1BA5A)) ### Emeritus diff --git a/vendor/go.opentelemetry.io/otel/Makefile b/vendor/go.opentelemetry.io/otel/Makefile index 62a56f4d..4fa423ca 100644 --- a/vendor/go.opentelemetry.io/otel/Makefile +++ b/vendor/go.opentelemetry.io/otel/Makefile @@ -293,7 +293,7 @@ semconv-generate: $(SEMCONVKIT) --param tag=$(TAG) \ go \ /home/weaver/target - $(SEMCONVKIT) -output "$(SEMCONVPKG)/$(TAG)" -tag "$(TAG)" + $(SEMCONVKIT) -semconv "$(SEMCONVPKG)" -tag "$(TAG)" .PHONY: gorelease gorelease: $(OTEL_GO_MOD_DIRS:%=gorelease/%) diff --git a/vendor/go.opentelemetry.io/otel/README.md b/vendor/go.opentelemetry.io/otel/README.md index b6007881..5fa1b75c 100644 --- a/vendor/go.opentelemetry.io/otel/README.md +++ b/vendor/go.opentelemetry.io/otel/README.md @@ -7,6 +7,7 @@ [![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/open-telemetry/opentelemetry-go/badge)](https://scorecard.dev/viewer/?uri=github.com/open-telemetry/opentelemetry-go) [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9996/badge)](https://www.bestpractices.dev/projects/9996) [![Fuzzing Status](https://oss-fuzz-build-logs.storage.googleapis.com/badges/opentelemetry-go.svg)](https://issues.oss-fuzz.com/issues?q=project:opentelemetry-go) +[![FOSSA Status](https://app.fossa.com/api/projects/custom%2B162%2Fgithub.com%2Fopen-telemetry%2Fopentelemetry-go.svg?type=shield&issueType=license)](https://app.fossa.com/projects/custom%2B162%2Fgithub.com%2Fopen-telemetry%2Fopentelemetry-go?ref=badge_shield&issueType=license) [![Slack](https://img.shields.io/badge/slack-@cncf/otel--go-brightgreen.svg?logo=slack)](https://cloud-native.slack.com/archives/C01NPAXACKT) OpenTelemetry-Go is the [Go](https://golang.org/) implementation of [OpenTelemetry](https://opentelemetry.io/). diff --git a/vendor/go.opentelemetry.io/otel/RELEASING.md b/vendor/go.opentelemetry.io/otel/RELEASING.md index 7c1a9119..1ddcdef0 100644 --- a/vendor/go.opentelemetry.io/otel/RELEASING.md +++ b/vendor/go.opentelemetry.io/otel/RELEASING.md @@ -112,6 +112,29 @@ It is critical you make sure the version you push upstream is correct. Finally create a Release for the new `` on GitHub. The release body should include all the release notes from the Changelog for this release. +### Sign the Release Artifact + +To ensure we comply with CNCF best practices, we need to sign the release artifact. +The tarball attached to the GitHub release needs to be signed with your GPG key. + +Follow [these steps] to sign the release artifact and upload it to GitHub. +You can use [this script] to verify the contents of the tarball before signing it. + +Be sure to use the correct GPG key when signing the release artifact. + +```terminal +gpg --local-user --armor --detach-sign opentelemetry-go-.tar.gz +``` + +You can verify the signature with: + +```terminal +gpg --verify opentelemetry-go-.tar.gz.asc opentelemetry-go-.tar.gz +``` + +[these steps]: https://wiki.debian.org/Creating%20signed%20GitHub%20releases +[this script]: https://github.com/MrAlias/attest-sh + ## Post-Release ### Contrib Repository diff --git a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile index 51fb76b3..935bd487 100644 --- a/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile +++ b/vendor/go.opentelemetry.io/otel/dependencies.Dockerfile @@ -1,4 +1,4 @@ # This is a renovate-friendly source of Docker images. -FROM python:3.13.3-slim-bullseye@sha256:9e3f9243e06fd68eb9519074b49878eda20ad39a855fac51aaffb741de20726e AS python -FROM otel/weaver:v0.15.0@sha256:1cf1c72eaed57dad813c2e359133b8a15bd4facf305aae5b13bdca6d3eccff56 AS weaver +FROM python:3.13.5-slim-bullseye@sha256:5b9fc0d8ef79cfb5f300e61cb516e0c668067bbf77646762c38c94107e230dbc AS python +FROM otel/weaver:v0.15.2@sha256:b13acea09f721774daba36344861f689ac4bb8d6ecd94c4600b4d590c8fb34b9 AS weaver FROM avtodev/markdown-lint:v1@sha256:6aeedc2f49138ce7a1cd0adffc1b1c0321b841dc2102408967d9301c031949ee AS markdown diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/MIGRATION.md b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/MIGRATION.md new file mode 100644 index 00000000..02b56115 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/MIGRATION.md @@ -0,0 +1,4 @@ + +# Migration from v1.33.0 to v1.34.0 + +The `go.opentelemetry.io/otel/semconv/v1.34.0` package should be a drop-in replacement for `go.opentelemetry.io/otel/semconv/v1.33.0`. diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/README.md b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/README.md new file mode 100644 index 00000000..fab06c97 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/README.md @@ -0,0 +1,3 @@ +# Semconv v1.34.0 + +[![PkgGoDev](https://pkg.go.dev/badge/go.opentelemetry.io/otel/semconv/v1.34.0)](https://pkg.go.dev/go.opentelemetry.io/otel/semconv/v1.34.0) diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/attribute_group.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/attribute_group.go new file mode 100644 index 00000000..5b566625 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/attribute_group.go @@ -0,0 +1,13851 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +// Code generated from semantic convention specification. DO NOT EDIT. + +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" + +import "go.opentelemetry.io/otel/attribute" + +// Namespace: android +const ( + // AndroidAppStateKey is the attribute Key conforming to the "android.app.state" + // semantic conventions. It represents the this attribute represents the state + // of the application. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "created" + // Note: The Android lifecycle states are defined in + // [Activity lifecycle callbacks], and from which the `OS identifiers` are + // derived. + // + // [Activity lifecycle callbacks]: https://developer.android.com/guide/components/activities/activity-lifecycle#lc + AndroidAppStateKey = attribute.Key("android.app.state") + + // AndroidOSAPILevelKey is the attribute Key conforming to the + // "android.os.api_level" semantic conventions. It represents the uniquely + // identifies the framework API revision offered by a version (`os.version`) of + // the android operating system. More information can be found [here]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "33", "32" + // + // [here]: https://developer.android.com/guide/topics/manifest/uses-sdk-element#ApiLevels + AndroidOSAPILevelKey = attribute.Key("android.os.api_level") +) + +// AndroidOSAPILevel returns an attribute KeyValue conforming to the +// "android.os.api_level" semantic conventions. It represents the uniquely +// identifies the framework API revision offered by a version (`os.version`) of +// the android operating system. More information can be found [here]. +// +// [here]: https://developer.android.com/guide/topics/manifest/uses-sdk-element#ApiLevels +func AndroidOSAPILevel(val string) attribute.KeyValue { + return AndroidOSAPILevelKey.String(val) +} + +// Enum values for android.app.state +var ( + // Any time before Activity.onResume() or, if the app has no Activity, + // Context.startService() has been called in the app for the first time. + // + // Stability: development + AndroidAppStateCreated = AndroidAppStateKey.String("created") + // Any time after Activity.onPause() or, if the app has no Activity, + // Context.stopService() has been called when the app was in the foreground + // state. + // + // Stability: development + AndroidAppStateBackground = AndroidAppStateKey.String("background") + // Any time after Activity.onResume() or, if the app has no Activity, + // Context.startService() has been called when the app was in either the created + // or background states. + // + // Stability: development + AndroidAppStateForeground = AndroidAppStateKey.String("foreground") +) + +// Namespace: app +const ( + // AppInstallationIDKey is the attribute Key conforming to the + // "app.installation.id" semantic conventions. It represents a unique identifier + // representing the installation of an application on a specific device. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2ab2916d-a51f-4ac8-80ee-45ac31a28092" + // Note: Its value SHOULD persist across launches of the same application + // installation, including through application upgrades. + // It SHOULD change if the application is uninstalled or if all applications of + // the vendor are uninstalled. + // Additionally, users might be able to reset this value (e.g. by clearing + // application data). + // If an app is installed multiple times on the same device (e.g. in different + // accounts on Android), each `app.installation.id` SHOULD have a different + // value. + // If multiple OpenTelemetry SDKs are used within the same application, they + // SHOULD use the same value for `app.installation.id`. + // Hardware IDs (e.g. serial number, IMEI, MAC address) MUST NOT be used as the + // `app.installation.id`. + // + // For iOS, this value SHOULD be equal to the [vendor identifier]. + // + // For Android, examples of `app.installation.id` implementations include: + // + // - [Firebase Installation ID]. + // - A globally unique UUID which is persisted across sessions in your + // application. + // - [App set ID]. + // - [`Settings.getString(Settings.Secure.ANDROID_ID)`]. + // + // More information about Android identifier best practices can be found [here] + // . + // + // [vendor identifier]: https://developer.apple.com/documentation/uikit/uidevice/identifierforvendor + // [Firebase Installation ID]: https://firebase.google.com/docs/projects/manage-installations + // [App set ID]: https://developer.android.com/identity/app-set-id + // [`Settings.getString(Settings.Secure.ANDROID_ID)`]: https://developer.android.com/reference/android/provider/Settings.Secure#ANDROID_ID + // [here]: https://developer.android.com/training/articles/user-data-ids + AppInstallationIDKey = attribute.Key("app.installation.id") + + // AppScreenCoordinateXKey is the attribute Key conforming to the + // "app.screen.coordinate.x" semantic conventions. It represents the x + // (horizontal) coordinate of a screen coordinate, in screen pixels. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 131 + AppScreenCoordinateXKey = attribute.Key("app.screen.coordinate.x") + + // AppScreenCoordinateYKey is the attribute Key conforming to the + // "app.screen.coordinate.y" semantic conventions. It represents the y + // (vertical) component of a screen coordinate, in screen pixels. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 12, 99 + AppScreenCoordinateYKey = attribute.Key("app.screen.coordinate.y") + + // AppWidgetIDKey is the attribute Key conforming to the "app.widget.id" + // semantic conventions. It represents an identifier that uniquely + // differentiates this widget from other widgets in the same application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "f9bc787d-ff05-48ad-90e1-fca1d46130b3", "submit_order_1829" + // Note: A widget is an application component, typically an on-screen visual GUI + // element. + AppWidgetIDKey = attribute.Key("app.widget.id") + + // AppWidgetNameKey is the attribute Key conforming to the "app.widget.name" + // semantic conventions. It represents the name of an application widget. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "submit", "attack", "Clear Cart" + // Note: A widget is an application component, typically an on-screen visual GUI + // element. + AppWidgetNameKey = attribute.Key("app.widget.name") +) + +// AppInstallationID returns an attribute KeyValue conforming to the +// "app.installation.id" semantic conventions. It represents a unique identifier +// representing the installation of an application on a specific device. +func AppInstallationID(val string) attribute.KeyValue { + return AppInstallationIDKey.String(val) +} + +// AppScreenCoordinateX returns an attribute KeyValue conforming to the +// "app.screen.coordinate.x" semantic conventions. It represents the x +// (horizontal) coordinate of a screen coordinate, in screen pixels. +func AppScreenCoordinateX(val int) attribute.KeyValue { + return AppScreenCoordinateXKey.Int(val) +} + +// AppScreenCoordinateY returns an attribute KeyValue conforming to the +// "app.screen.coordinate.y" semantic conventions. It represents the y (vertical) +// component of a screen coordinate, in screen pixels. +func AppScreenCoordinateY(val int) attribute.KeyValue { + return AppScreenCoordinateYKey.Int(val) +} + +// AppWidgetID returns an attribute KeyValue conforming to the "app.widget.id" +// semantic conventions. It represents an identifier that uniquely differentiates +// this widget from other widgets in the same application. +func AppWidgetID(val string) attribute.KeyValue { + return AppWidgetIDKey.String(val) +} + +// AppWidgetName returns an attribute KeyValue conforming to the +// "app.widget.name" semantic conventions. It represents the name of an +// application widget. +func AppWidgetName(val string) attribute.KeyValue { + return AppWidgetNameKey.String(val) +} + +// Namespace: artifact +const ( + // ArtifactAttestationFilenameKey is the attribute Key conforming to the + // "artifact.attestation.filename" semantic conventions. It represents the + // provenance filename of the built attestation which directly relates to the + // build artifact filename. This filename SHOULD accompany the artifact at + // publish time. See the [SLSA Relationship] specification for more information. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "golang-binary-amd64-v0.1.0.attestation", + // "docker-image-amd64-v0.1.0.intoto.json1", "release-1.tar.gz.attestation", + // "file-name-package.tar.gz.intoto.json1" + // + // [SLSA Relationship]: https://slsa.dev/spec/v1.0/distributing-provenance#relationship-between-artifacts-and-attestations + ArtifactAttestationFilenameKey = attribute.Key("artifact.attestation.filename") + + // ArtifactAttestationHashKey is the attribute Key conforming to the + // "artifact.attestation.hash" semantic conventions. It represents the full + // [hash value (see glossary)], of the built attestation. Some envelopes in the + // [software attestation space] also refer to this as the **digest**. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1b31dfcd5b7f9267bf2ff47651df1cfb9147b9e4df1f335accf65b4cda498408" + // + // [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + // [software attestation space]: https://github.com/in-toto/attestation/tree/main/spec + ArtifactAttestationHashKey = attribute.Key("artifact.attestation.hash") + + // ArtifactAttestationIDKey is the attribute Key conforming to the + // "artifact.attestation.id" semantic conventions. It represents the id of the + // build [software attestation]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123" + // + // [software attestation]: https://slsa.dev/attestation-model + ArtifactAttestationIDKey = attribute.Key("artifact.attestation.id") + + // ArtifactFilenameKey is the attribute Key conforming to the + // "artifact.filename" semantic conventions. It represents the human readable + // file name of the artifact, typically generated during build and release + // processes. Often includes the package name and version in the file name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "golang-binary-amd64-v0.1.0", "docker-image-amd64-v0.1.0", + // "release-1.tar.gz", "file-name-package.tar.gz" + // Note: This file name can also act as the [Package Name] + // in cases where the package ecosystem maps accordingly. + // Additionally, the artifact [can be published] + // for others, but that is not a guarantee. + // + // [Package Name]: https://slsa.dev/spec/v1.0/terminology#package-model + // [can be published]: https://slsa.dev/spec/v1.0/terminology#software-supply-chain + ArtifactFilenameKey = attribute.Key("artifact.filename") + + // ArtifactHashKey is the attribute Key conforming to the "artifact.hash" + // semantic conventions. It represents the full [hash value (see glossary)], + // often found in checksum.txt on a release of the artifact and used to verify + // package integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9ff4c52759e2c4ac70b7d517bc7fcdc1cda631ca0045271ddd1b192544f8a3e9" + // Note: The specific algorithm used to create the cryptographic hash value is + // not defined. In situations where an artifact has multiple + // cryptographic hashes, it is up to the implementer to choose which + // hash value to set here; this should be the most secure hash algorithm + // that is suitable for the situation and consistent with the + // corresponding attestation. The implementer can then provide the other + // hash values through an additional set of attribute extensions as they + // deem necessary. + // + // [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + ArtifactHashKey = attribute.Key("artifact.hash") + + // ArtifactPurlKey is the attribute Key conforming to the "artifact.purl" + // semantic conventions. It represents the [Package URL] of the + // [package artifact] provides a standard way to identify and locate the + // packaged artifact. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pkg:github/package-url/purl-spec@1209109710924", + // "pkg:npm/foo@12.12.3" + // + // [Package URL]: https://github.com/package-url/purl-spec + // [package artifact]: https://slsa.dev/spec/v1.0/terminology#package-model + ArtifactPurlKey = attribute.Key("artifact.purl") + + // ArtifactVersionKey is the attribute Key conforming to the "artifact.version" + // semantic conventions. It represents the version of the artifact. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "v0.1.0", "1.2.1", "122691-build" + ArtifactVersionKey = attribute.Key("artifact.version") +) + +// ArtifactAttestationFilename returns an attribute KeyValue conforming to the +// "artifact.attestation.filename" semantic conventions. It represents the +// provenance filename of the built attestation which directly relates to the +// build artifact filename. This filename SHOULD accompany the artifact at +// publish time. See the [SLSA Relationship] specification for more information. +// +// [SLSA Relationship]: https://slsa.dev/spec/v1.0/distributing-provenance#relationship-between-artifacts-and-attestations +func ArtifactAttestationFilename(val string) attribute.KeyValue { + return ArtifactAttestationFilenameKey.String(val) +} + +// ArtifactAttestationHash returns an attribute KeyValue conforming to the +// "artifact.attestation.hash" semantic conventions. It represents the full +// [hash value (see glossary)], of the built attestation. Some envelopes in the +// [software attestation space] also refer to this as the **digest**. +// +// [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf +// [software attestation space]: https://github.com/in-toto/attestation/tree/main/spec +func ArtifactAttestationHash(val string) attribute.KeyValue { + return ArtifactAttestationHashKey.String(val) +} + +// ArtifactAttestationID returns an attribute KeyValue conforming to the +// "artifact.attestation.id" semantic conventions. It represents the id of the +// build [software attestation]. +// +// [software attestation]: https://slsa.dev/attestation-model +func ArtifactAttestationID(val string) attribute.KeyValue { + return ArtifactAttestationIDKey.String(val) +} + +// ArtifactFilename returns an attribute KeyValue conforming to the +// "artifact.filename" semantic conventions. It represents the human readable +// file name of the artifact, typically generated during build and release +// processes. Often includes the package name and version in the file name. +func ArtifactFilename(val string) attribute.KeyValue { + return ArtifactFilenameKey.String(val) +} + +// ArtifactHash returns an attribute KeyValue conforming to the "artifact.hash" +// semantic conventions. It represents the full [hash value (see glossary)], +// often found in checksum.txt on a release of the artifact and used to verify +// package integrity. +// +// [hash value (see glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf +func ArtifactHash(val string) attribute.KeyValue { + return ArtifactHashKey.String(val) +} + +// ArtifactPurl returns an attribute KeyValue conforming to the "artifact.purl" +// semantic conventions. It represents the [Package URL] of the +// [package artifact] provides a standard way to identify and locate the packaged +// artifact. +// +// [Package URL]: https://github.com/package-url/purl-spec +// [package artifact]: https://slsa.dev/spec/v1.0/terminology#package-model +func ArtifactPurl(val string) attribute.KeyValue { + return ArtifactPurlKey.String(val) +} + +// ArtifactVersion returns an attribute KeyValue conforming to the +// "artifact.version" semantic conventions. It represents the version of the +// artifact. +func ArtifactVersion(val string) attribute.KeyValue { + return ArtifactVersionKey.String(val) +} + +// Namespace: aws +const ( + // AWSBedrockGuardrailIDKey is the attribute Key conforming to the + // "aws.bedrock.guardrail.id" semantic conventions. It represents the unique + // identifier of the AWS Bedrock Guardrail. A [guardrail] helps safeguard and + // prevent unwanted behavior from model responses or user messages. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "sgi5gkybzqak" + // + // [guardrail]: https://docs.aws.amazon.com/bedrock/latest/userguide/guardrails.html + AWSBedrockGuardrailIDKey = attribute.Key("aws.bedrock.guardrail.id") + + // AWSBedrockKnowledgeBaseIDKey is the attribute Key conforming to the + // "aws.bedrock.knowledge_base.id" semantic conventions. It represents the + // unique identifier of the AWS Bedrock Knowledge base. A [knowledge base] is a + // bank of information that can be queried by models to generate more relevant + // responses and augment prompts. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "XFWUPB9PAW" + // + // [knowledge base]: https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base.html + AWSBedrockKnowledgeBaseIDKey = attribute.Key("aws.bedrock.knowledge_base.id") + + // AWSDynamoDBAttributeDefinitionsKey is the attribute Key conforming to the + // "aws.dynamodb.attribute_definitions" semantic conventions. It represents the + // JSON-serialized value of each item in the `AttributeDefinitions` request + // field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "AttributeName": "string", "AttributeType": "string" }" + AWSDynamoDBAttributeDefinitionsKey = attribute.Key("aws.dynamodb.attribute_definitions") + + // AWSDynamoDBAttributesToGetKey is the attribute Key conforming to the + // "aws.dynamodb.attributes_to_get" semantic conventions. It represents the + // value of the `AttributesToGet` request parameter. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "lives", "id" + AWSDynamoDBAttributesToGetKey = attribute.Key("aws.dynamodb.attributes_to_get") + + // AWSDynamoDBConsistentReadKey is the attribute Key conforming to the + // "aws.dynamodb.consistent_read" semantic conventions. It represents the value + // of the `ConsistentRead` request parameter. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AWSDynamoDBConsistentReadKey = attribute.Key("aws.dynamodb.consistent_read") + + // AWSDynamoDBConsumedCapacityKey is the attribute Key conforming to the + // "aws.dynamodb.consumed_capacity" semantic conventions. It represents the + // JSON-serialized value of each item in the `ConsumedCapacity` response field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "CapacityUnits": number, "GlobalSecondaryIndexes": { "string" : + // { "CapacityUnits": number, "ReadCapacityUnits": number, "WriteCapacityUnits": + // number } }, "LocalSecondaryIndexes": { "string" : { "CapacityUnits": number, + // "ReadCapacityUnits": number, "WriteCapacityUnits": number } }, + // "ReadCapacityUnits": number, "Table": { "CapacityUnits": number, + // "ReadCapacityUnits": number, "WriteCapacityUnits": number }, "TableName": + // "string", "WriteCapacityUnits": number }" + AWSDynamoDBConsumedCapacityKey = attribute.Key("aws.dynamodb.consumed_capacity") + + // AWSDynamoDBCountKey is the attribute Key conforming to the + // "aws.dynamodb.count" semantic conventions. It represents the value of the + // `Count` response parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10 + AWSDynamoDBCountKey = attribute.Key("aws.dynamodb.count") + + // AWSDynamoDBExclusiveStartTableKey is the attribute Key conforming to the + // "aws.dynamodb.exclusive_start_table" semantic conventions. It represents the + // value of the `ExclusiveStartTableName` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Users", "CatsTable" + AWSDynamoDBExclusiveStartTableKey = attribute.Key("aws.dynamodb.exclusive_start_table") + + // AWSDynamoDBGlobalSecondaryIndexUpdatesKey is the attribute Key conforming to + // the "aws.dynamodb.global_secondary_index_updates" semantic conventions. It + // represents the JSON-serialized value of each item in the + // `GlobalSecondaryIndexUpdates` request field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "Create": { "IndexName": "string", "KeySchema": [ { + // "AttributeName": "string", "KeyType": "string" } ], "Projection": { + // "NonKeyAttributes": [ "string" ], "ProjectionType": "string" }, + // "ProvisionedThroughput": { "ReadCapacityUnits": number, "WriteCapacityUnits": + // number } }" + AWSDynamoDBGlobalSecondaryIndexUpdatesKey = attribute.Key("aws.dynamodb.global_secondary_index_updates") + + // AWSDynamoDBGlobalSecondaryIndexesKey is the attribute Key conforming to the + // "aws.dynamodb.global_secondary_indexes" semantic conventions. It represents + // the JSON-serialized value of each item of the `GlobalSecondaryIndexes` + // request field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "IndexName": "string", "KeySchema": [ { "AttributeName": + // "string", "KeyType": "string" } ], "Projection": { "NonKeyAttributes": [ + // "string" ], "ProjectionType": "string" }, "ProvisionedThroughput": { + // "ReadCapacityUnits": number, "WriteCapacityUnits": number } }" + AWSDynamoDBGlobalSecondaryIndexesKey = attribute.Key("aws.dynamodb.global_secondary_indexes") + + // AWSDynamoDBIndexNameKey is the attribute Key conforming to the + // "aws.dynamodb.index_name" semantic conventions. It represents the value of + // the `IndexName` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "name_to_group" + AWSDynamoDBIndexNameKey = attribute.Key("aws.dynamodb.index_name") + + // AWSDynamoDBItemCollectionMetricsKey is the attribute Key conforming to the + // "aws.dynamodb.item_collection_metrics" semantic conventions. It represents + // the JSON-serialized value of the `ItemCollectionMetrics` response field. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "string" : [ { "ItemCollectionKey": { "string" : { "B": blob, + // "BOOL": boolean, "BS": [ blob ], "L": [ "AttributeValue" ], "M": { "string" : + // "AttributeValue" }, "N": "string", "NS": [ "string" ], "NULL": boolean, "S": + // "string", "SS": [ "string" ] } }, "SizeEstimateRangeGB": [ number ] } ] }" + AWSDynamoDBItemCollectionMetricsKey = attribute.Key("aws.dynamodb.item_collection_metrics") + + // AWSDynamoDBLimitKey is the attribute Key conforming to the + // "aws.dynamodb.limit" semantic conventions. It represents the value of the + // `Limit` request parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10 + AWSDynamoDBLimitKey = attribute.Key("aws.dynamodb.limit") + + // AWSDynamoDBLocalSecondaryIndexesKey is the attribute Key conforming to the + // "aws.dynamodb.local_secondary_indexes" semantic conventions. It represents + // the JSON-serialized value of each item of the `LocalSecondaryIndexes` request + // field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "{ "IndexArn": "string", "IndexName": "string", "IndexSizeBytes": + // number, "ItemCount": number, "KeySchema": [ { "AttributeName": "string", + // "KeyType": "string" } ], "Projection": { "NonKeyAttributes": [ "string" ], + // "ProjectionType": "string" } }" + AWSDynamoDBLocalSecondaryIndexesKey = attribute.Key("aws.dynamodb.local_secondary_indexes") + + // AWSDynamoDBProjectionKey is the attribute Key conforming to the + // "aws.dynamodb.projection" semantic conventions. It represents the value of + // the `ProjectionExpression` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Title", "Title, Price, Color", "Title, Description, RelatedItems, + // ProductReviews" + AWSDynamoDBProjectionKey = attribute.Key("aws.dynamodb.projection") + + // AWSDynamoDBProvisionedReadCapacityKey is the attribute Key conforming to the + // "aws.dynamodb.provisioned_read_capacity" semantic conventions. It represents + // the value of the `ProvisionedThroughput.ReadCapacityUnits` request parameter. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0, 2.0 + AWSDynamoDBProvisionedReadCapacityKey = attribute.Key("aws.dynamodb.provisioned_read_capacity") + + // AWSDynamoDBProvisionedWriteCapacityKey is the attribute Key conforming to the + // "aws.dynamodb.provisioned_write_capacity" semantic conventions. It represents + // the value of the `ProvisionedThroughput.WriteCapacityUnits` request + // parameter. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0, 2.0 + AWSDynamoDBProvisionedWriteCapacityKey = attribute.Key("aws.dynamodb.provisioned_write_capacity") + + // AWSDynamoDBScanForwardKey is the attribute Key conforming to the + // "aws.dynamodb.scan_forward" semantic conventions. It represents the value of + // the `ScanIndexForward` request parameter. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AWSDynamoDBScanForwardKey = attribute.Key("aws.dynamodb.scan_forward") + + // AWSDynamoDBScannedCountKey is the attribute Key conforming to the + // "aws.dynamodb.scanned_count" semantic conventions. It represents the value of + // the `ScannedCount` response parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 50 + AWSDynamoDBScannedCountKey = attribute.Key("aws.dynamodb.scanned_count") + + // AWSDynamoDBSegmentKey is the attribute Key conforming to the + // "aws.dynamodb.segment" semantic conventions. It represents the value of the + // `Segment` request parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10 + AWSDynamoDBSegmentKey = attribute.Key("aws.dynamodb.segment") + + // AWSDynamoDBSelectKey is the attribute Key conforming to the + // "aws.dynamodb.select" semantic conventions. It represents the value of the + // `Select` request parameter. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ALL_ATTRIBUTES", "COUNT" + AWSDynamoDBSelectKey = attribute.Key("aws.dynamodb.select") + + // AWSDynamoDBTableCountKey is the attribute Key conforming to the + // "aws.dynamodb.table_count" semantic conventions. It represents the number of + // items in the `TableNames` response parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 20 + AWSDynamoDBTableCountKey = attribute.Key("aws.dynamodb.table_count") + + // AWSDynamoDBTableNamesKey is the attribute Key conforming to the + // "aws.dynamodb.table_names" semantic conventions. It represents the keys in + // the `RequestItems` object field. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Users", "Cats" + AWSDynamoDBTableNamesKey = attribute.Key("aws.dynamodb.table_names") + + // AWSDynamoDBTotalSegmentsKey is the attribute Key conforming to the + // "aws.dynamodb.total_segments" semantic conventions. It represents the value + // of the `TotalSegments` request parameter. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + AWSDynamoDBTotalSegmentsKey = attribute.Key("aws.dynamodb.total_segments") + + // AWSECSClusterARNKey is the attribute Key conforming to the + // "aws.ecs.cluster.arn" semantic conventions. It represents the ARN of an + // [ECS cluster]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:ecs:us-west-2:123456789123:cluster/my-cluster" + // + // [ECS cluster]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/clusters.html + AWSECSClusterARNKey = attribute.Key("aws.ecs.cluster.arn") + + // AWSECSContainerARNKey is the attribute Key conforming to the + // "aws.ecs.container.arn" semantic conventions. It represents the Amazon + // Resource Name (ARN) of an [ECS container instance]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:ecs:us-west-1:123456789123:container/32624152-9086-4f0e-acae-1a75b14fe4d9" + // + // [ECS container instance]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_instances.html + AWSECSContainerARNKey = attribute.Key("aws.ecs.container.arn") + + // AWSECSLaunchtypeKey is the attribute Key conforming to the + // "aws.ecs.launchtype" semantic conventions. It represents the [launch type] + // for an ECS task. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [launch type]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/launch_types.html + AWSECSLaunchtypeKey = attribute.Key("aws.ecs.launchtype") + + // AWSECSTaskARNKey is the attribute Key conforming to the "aws.ecs.task.arn" + // semantic conventions. It represents the ARN of a running [ECS task]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:ecs:us-west-1:123456789123:task/10838bed-421f-43ef-870a-f43feacbbb5b", + // "arn:aws:ecs:us-west-1:123456789123:task/my-cluster/task-id/23ebb8ac-c18f-46c6-8bbe-d55d0e37cfbd" + // + // [ECS task]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-account-settings.html#ecs-resource-ids + AWSECSTaskARNKey = attribute.Key("aws.ecs.task.arn") + + // AWSECSTaskFamilyKey is the attribute Key conforming to the + // "aws.ecs.task.family" semantic conventions. It represents the family name of + // the [ECS task definition] used to create the ECS task. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-family" + // + // [ECS task definition]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definitions.html + AWSECSTaskFamilyKey = attribute.Key("aws.ecs.task.family") + + // AWSECSTaskIDKey is the attribute Key conforming to the "aws.ecs.task.id" + // semantic conventions. It represents the ID of a running ECS task. The ID MUST + // be extracted from `task.arn`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "10838bed-421f-43ef-870a-f43feacbbb5b", + // "23ebb8ac-c18f-46c6-8bbe-d55d0e37cfbd" + AWSECSTaskIDKey = attribute.Key("aws.ecs.task.id") + + // AWSECSTaskRevisionKey is the attribute Key conforming to the + // "aws.ecs.task.revision" semantic conventions. It represents the revision for + // the task definition used to create the ECS task. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "8", "26" + AWSECSTaskRevisionKey = attribute.Key("aws.ecs.task.revision") + + // AWSEKSClusterARNKey is the attribute Key conforming to the + // "aws.eks.cluster.arn" semantic conventions. It represents the ARN of an EKS + // cluster. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:ecs:us-west-2:123456789123:cluster/my-cluster" + AWSEKSClusterARNKey = attribute.Key("aws.eks.cluster.arn") + + // AWSExtendedRequestIDKey is the attribute Key conforming to the + // "aws.extended_request_id" semantic conventions. It represents the AWS + // extended request ID as returned in the response header `x-amz-id-2`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "wzHcyEWfmOGDIE5QOhTAqFDoDWP3y8IUvpNINCwL9N4TEHbUw0/gZJ+VZTmCNCWR7fezEN3eCiQ=" + AWSExtendedRequestIDKey = attribute.Key("aws.extended_request_id") + + // AWSKinesisStreamNameKey is the attribute Key conforming to the + // "aws.kinesis.stream_name" semantic conventions. It represents the name of the + // AWS Kinesis [stream] the request refers to. Corresponds to the + // `--stream-name` parameter of the Kinesis [describe-stream] operation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "some-stream-name" + // + // [stream]: https://docs.aws.amazon.com/streams/latest/dev/introduction.html + // [describe-stream]: https://docs.aws.amazon.com/cli/latest/reference/kinesis/describe-stream.html + AWSKinesisStreamNameKey = attribute.Key("aws.kinesis.stream_name") + + // AWSLambdaInvokedARNKey is the attribute Key conforming to the + // "aws.lambda.invoked_arn" semantic conventions. It represents the full invoked + // ARN as provided on the `Context` passed to the function ( + // `Lambda-Runtime-Invoked-Function-Arn` header on the + // `/runtime/invocation/next` applicable). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:lambda:us-east-1:123456:function:myfunction:myalias" + // Note: This may be different from `cloud.resource_id` if an alias is involved. + AWSLambdaInvokedARNKey = attribute.Key("aws.lambda.invoked_arn") + + // AWSLambdaResourceMappingIDKey is the attribute Key conforming to the + // "aws.lambda.resource_mapping.id" semantic conventions. It represents the UUID + // of the [AWS Lambda EvenSource Mapping]. An event source is mapped to a lambda + // function. It's contents are read by Lambda and used to trigger a function. + // This isn't available in the lambda execution context or the lambda runtime + // environtment. This is going to be populated by the AWS SDK for each language + // when that UUID is present. Some of these operations are + // Create/Delete/Get/List/Update EventSourceMapping. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "587ad24b-03b9-4413-8202-bbd56b36e5b7" + // + // [AWS Lambda EvenSource Mapping]: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-eventsourcemapping.html + AWSLambdaResourceMappingIDKey = attribute.Key("aws.lambda.resource_mapping.id") + + // AWSLogGroupARNsKey is the attribute Key conforming to the + // "aws.log.group.arns" semantic conventions. It represents the Amazon Resource + // Name(s) (ARN) of the AWS log group(s). + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:logs:us-west-1:123456789012:log-group:/aws/my/group:*" + // Note: See the [log group ARN format documentation]. + // + // [log group ARN format documentation]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format + AWSLogGroupARNsKey = attribute.Key("aws.log.group.arns") + + // AWSLogGroupNamesKey is the attribute Key conforming to the + // "aws.log.group.names" semantic conventions. It represents the name(s) of the + // AWS log group(s) an application is writing to. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/aws/lambda/my-function", "opentelemetry-service" + // Note: Multiple log groups must be supported for cases like multi-container + // applications, where a single application has sidecar containers, and each + // write to their own log group. + AWSLogGroupNamesKey = attribute.Key("aws.log.group.names") + + // AWSLogStreamARNsKey is the attribute Key conforming to the + // "aws.log.stream.arns" semantic conventions. It represents the ARN(s) of the + // AWS log stream(s). + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:logs:us-west-1:123456789012:log-group:/aws/my/group:log-stream:logs/main/10838bed-421f-43ef-870a-f43feacbbb5b" + // Note: See the [log stream ARN format documentation]. One log group can + // contain several log streams, so these ARNs necessarily identify both a log + // group and a log stream. + // + // [log stream ARN format documentation]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/iam-access-control-overview-cwl.html#CWL_ARN_Format + AWSLogStreamARNsKey = attribute.Key("aws.log.stream.arns") + + // AWSLogStreamNamesKey is the attribute Key conforming to the + // "aws.log.stream.names" semantic conventions. It represents the name(s) of the + // AWS log stream(s) an application is writing to. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "logs/main/10838bed-421f-43ef-870a-f43feacbbb5b" + AWSLogStreamNamesKey = attribute.Key("aws.log.stream.names") + + // AWSRequestIDKey is the attribute Key conforming to the "aws.request_id" + // semantic conventions. It represents the AWS request ID as returned in the + // response headers `x-amzn-requestid`, `x-amzn-request-id` or + // `x-amz-request-id`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "79b9da39-b7ae-508a-a6bc-864b2829c622", "C9ER4AJX75574TDJ" + AWSRequestIDKey = attribute.Key("aws.request_id") + + // AWSS3BucketKey is the attribute Key conforming to the "aws.s3.bucket" + // semantic conventions. It represents the S3 bucket name the request refers to. + // Corresponds to the `--bucket` parameter of the [S3 API] operations. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "some-bucket-name" + // Note: The `bucket` attribute is applicable to all S3 operations that + // reference a bucket, i.e. that require the bucket name as a mandatory + // parameter. + // This applies to almost all S3 operations except `list-buckets`. + // + // [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html + AWSS3BucketKey = attribute.Key("aws.s3.bucket") + + // AWSS3CopySourceKey is the attribute Key conforming to the + // "aws.s3.copy_source" semantic conventions. It represents the source object + // (in the form `bucket`/`key`) for the copy operation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "someFile.yml" + // Note: The `copy_source` attribute applies to S3 copy operations and + // corresponds to the `--copy-source` parameter + // of the [copy-object operation within the S3 API]. + // This applies in particular to the following operations: + // + // - [copy-object] + // - [upload-part-copy] + // + // + // [copy-object operation within the S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/copy-object.html + // [copy-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/copy-object.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + AWSS3CopySourceKey = attribute.Key("aws.s3.copy_source") + + // AWSS3DeleteKey is the attribute Key conforming to the "aws.s3.delete" + // semantic conventions. It represents the delete request container that + // specifies the objects to be deleted. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "Objects=[{Key=string,VersionId=string},{Key=string,VersionId=string}],Quiet=boolean" + // Note: The `delete` attribute is only applicable to the [delete-object] + // operation. + // The `delete` attribute corresponds to the `--delete` parameter of the + // [delete-objects operation within the S3 API]. + // + // [delete-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-object.html + // [delete-objects operation within the S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-objects.html + AWSS3DeleteKey = attribute.Key("aws.s3.delete") + + // AWSS3KeyKey is the attribute Key conforming to the "aws.s3.key" semantic + // conventions. It represents the S3 object key the request refers to. + // Corresponds to the `--key` parameter of the [S3 API] operations. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "someFile.yml" + // Note: The `key` attribute is applicable to all object-related S3 operations, + // i.e. that require the object key as a mandatory parameter. + // This applies in particular to the following operations: + // + // - [copy-object] + // - [delete-object] + // - [get-object] + // - [head-object] + // - [put-object] + // - [restore-object] + // - [select-object-content] + // - [abort-multipart-upload] + // - [complete-multipart-upload] + // - [create-multipart-upload] + // - [list-parts] + // - [upload-part] + // - [upload-part-copy] + // + // + // [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html + // [copy-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/copy-object.html + // [delete-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/delete-object.html + // [get-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/get-object.html + // [head-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/head-object.html + // [put-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/put-object.html + // [restore-object]: https://docs.aws.amazon.com/cli/latest/reference/s3api/restore-object.html + // [select-object-content]: https://docs.aws.amazon.com/cli/latest/reference/s3api/select-object-content.html + // [abort-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/abort-multipart-upload.html + // [complete-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/complete-multipart-upload.html + // [create-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/create-multipart-upload.html + // [list-parts]: https://docs.aws.amazon.com/cli/latest/reference/s3api/list-parts.html + // [upload-part]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + AWSS3KeyKey = attribute.Key("aws.s3.key") + + // AWSS3PartNumberKey is the attribute Key conforming to the + // "aws.s3.part_number" semantic conventions. It represents the part number of + // the part being uploaded in a multipart-upload operation. This is a positive + // integer between 1 and 10,000. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3456 + // Note: The `part_number` attribute is only applicable to the [upload-part] + // and [upload-part-copy] operations. + // The `part_number` attribute corresponds to the `--part-number` parameter of + // the + // [upload-part operation within the S3 API]. + // + // [upload-part]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + // [upload-part operation within the S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + AWSS3PartNumberKey = attribute.Key("aws.s3.part_number") + + // AWSS3UploadIDKey is the attribute Key conforming to the "aws.s3.upload_id" + // semantic conventions. It represents the upload ID that identifies the + // multipart upload. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "dfRtDYWFbkRONycy.Yxwh66Yjlx.cph0gtNBtJ" + // Note: The `upload_id` attribute applies to S3 multipart-upload operations and + // corresponds to the `--upload-id` parameter + // of the [S3 API] multipart operations. + // This applies in particular to the following operations: + // + // - [abort-multipart-upload] + // - [complete-multipart-upload] + // - [list-parts] + // - [upload-part] + // - [upload-part-copy] + // + // + // [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html + // [abort-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/abort-multipart-upload.html + // [complete-multipart-upload]: https://docs.aws.amazon.com/cli/latest/reference/s3api/complete-multipart-upload.html + // [list-parts]: https://docs.aws.amazon.com/cli/latest/reference/s3api/list-parts.html + // [upload-part]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part.html + // [upload-part-copy]: https://docs.aws.amazon.com/cli/latest/reference/s3api/upload-part-copy.html + AWSS3UploadIDKey = attribute.Key("aws.s3.upload_id") + + // AWSSecretsmanagerSecretARNKey is the attribute Key conforming to the + // "aws.secretsmanager.secret.arn" semantic conventions. It represents the ARN + // of the Secret stored in the Secrets Mangger. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:secretsmanager:us-east-1:123456789012:secret:SecretName-6RandomCharacters" + AWSSecretsmanagerSecretARNKey = attribute.Key("aws.secretsmanager.secret.arn") + + // AWSSNSTopicARNKey is the attribute Key conforming to the "aws.sns.topic.arn" + // semantic conventions. It represents the ARN of the AWS SNS Topic. An Amazon + // SNS [topic] is a logical access point that acts as a communication channel. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:sns:us-east-1:123456789012:mystack-mytopic-NZJ5JSMVGFIE" + // + // [topic]: https://docs.aws.amazon.com/sns/latest/dg/sns-create-topic.html + AWSSNSTopicARNKey = attribute.Key("aws.sns.topic.arn") + + // AWSSQSQueueURLKey is the attribute Key conforming to the "aws.sqs.queue.url" + // semantic conventions. It represents the URL of the AWS SQS Queue. It's a + // unique identifier for a queue in Amazon Simple Queue Service (SQS) and is + // used to access the queue and perform actions on it. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://sqs.us-east-1.amazonaws.com/123456789012/MyQueue" + AWSSQSQueueURLKey = attribute.Key("aws.sqs.queue.url") + + // AWSStepFunctionsActivityARNKey is the attribute Key conforming to the + // "aws.step_functions.activity.arn" semantic conventions. It represents the ARN + // of the AWS Step Functions Activity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:states:us-east-1:123456789012:activity:get-greeting" + AWSStepFunctionsActivityARNKey = attribute.Key("aws.step_functions.activity.arn") + + // AWSStepFunctionsStateMachineARNKey is the attribute Key conforming to the + // "aws.step_functions.state_machine.arn" semantic conventions. It represents + // the ARN of the AWS Step Functions State Machine. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "arn:aws:states:us-east-1:123456789012:stateMachine:myStateMachine:1" + AWSStepFunctionsStateMachineARNKey = attribute.Key("aws.step_functions.state_machine.arn") +) + +// AWSBedrockGuardrailID returns an attribute KeyValue conforming to the +// "aws.bedrock.guardrail.id" semantic conventions. It represents the unique +// identifier of the AWS Bedrock Guardrail. A [guardrail] helps safeguard and +// prevent unwanted behavior from model responses or user messages. +// +// [guardrail]: https://docs.aws.amazon.com/bedrock/latest/userguide/guardrails.html +func AWSBedrockGuardrailID(val string) attribute.KeyValue { + return AWSBedrockGuardrailIDKey.String(val) +} + +// AWSBedrockKnowledgeBaseID returns an attribute KeyValue conforming to the +// "aws.bedrock.knowledge_base.id" semantic conventions. It represents the unique +// identifier of the AWS Bedrock Knowledge base. A [knowledge base] is a bank of +// information that can be queried by models to generate more relevant responses +// and augment prompts. +// +// [knowledge base]: https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base.html +func AWSBedrockKnowledgeBaseID(val string) attribute.KeyValue { + return AWSBedrockKnowledgeBaseIDKey.String(val) +} + +// AWSDynamoDBAttributeDefinitions returns an attribute KeyValue conforming to +// the "aws.dynamodb.attribute_definitions" semantic conventions. It represents +// the JSON-serialized value of each item in the `AttributeDefinitions` request +// field. +func AWSDynamoDBAttributeDefinitions(val ...string) attribute.KeyValue { + return AWSDynamoDBAttributeDefinitionsKey.StringSlice(val) +} + +// AWSDynamoDBAttributesToGet returns an attribute KeyValue conforming to the +// "aws.dynamodb.attributes_to_get" semantic conventions. It represents the value +// of the `AttributesToGet` request parameter. +func AWSDynamoDBAttributesToGet(val ...string) attribute.KeyValue { + return AWSDynamoDBAttributesToGetKey.StringSlice(val) +} + +// AWSDynamoDBConsistentRead returns an attribute KeyValue conforming to the +// "aws.dynamodb.consistent_read" semantic conventions. It represents the value +// of the `ConsistentRead` request parameter. +func AWSDynamoDBConsistentRead(val bool) attribute.KeyValue { + return AWSDynamoDBConsistentReadKey.Bool(val) +} + +// AWSDynamoDBConsumedCapacity returns an attribute KeyValue conforming to the +// "aws.dynamodb.consumed_capacity" semantic conventions. It represents the +// JSON-serialized value of each item in the `ConsumedCapacity` response field. +func AWSDynamoDBConsumedCapacity(val ...string) attribute.KeyValue { + return AWSDynamoDBConsumedCapacityKey.StringSlice(val) +} + +// AWSDynamoDBCount returns an attribute KeyValue conforming to the +// "aws.dynamodb.count" semantic conventions. It represents the value of the +// `Count` response parameter. +func AWSDynamoDBCount(val int) attribute.KeyValue { + return AWSDynamoDBCountKey.Int(val) +} + +// AWSDynamoDBExclusiveStartTable returns an attribute KeyValue conforming to the +// "aws.dynamodb.exclusive_start_table" semantic conventions. It represents the +// value of the `ExclusiveStartTableName` request parameter. +func AWSDynamoDBExclusiveStartTable(val string) attribute.KeyValue { + return AWSDynamoDBExclusiveStartTableKey.String(val) +} + +// AWSDynamoDBGlobalSecondaryIndexUpdates returns an attribute KeyValue +// conforming to the "aws.dynamodb.global_secondary_index_updates" semantic +// conventions. It represents the JSON-serialized value of each item in the +// `GlobalSecondaryIndexUpdates` request field. +func AWSDynamoDBGlobalSecondaryIndexUpdates(val ...string) attribute.KeyValue { + return AWSDynamoDBGlobalSecondaryIndexUpdatesKey.StringSlice(val) +} + +// AWSDynamoDBGlobalSecondaryIndexes returns an attribute KeyValue conforming to +// the "aws.dynamodb.global_secondary_indexes" semantic conventions. It +// represents the JSON-serialized value of each item of the +// `GlobalSecondaryIndexes` request field. +func AWSDynamoDBGlobalSecondaryIndexes(val ...string) attribute.KeyValue { + return AWSDynamoDBGlobalSecondaryIndexesKey.StringSlice(val) +} + +// AWSDynamoDBIndexName returns an attribute KeyValue conforming to the +// "aws.dynamodb.index_name" semantic conventions. It represents the value of the +// `IndexName` request parameter. +func AWSDynamoDBIndexName(val string) attribute.KeyValue { + return AWSDynamoDBIndexNameKey.String(val) +} + +// AWSDynamoDBItemCollectionMetrics returns an attribute KeyValue conforming to +// the "aws.dynamodb.item_collection_metrics" semantic conventions. It represents +// the JSON-serialized value of the `ItemCollectionMetrics` response field. +func AWSDynamoDBItemCollectionMetrics(val string) attribute.KeyValue { + return AWSDynamoDBItemCollectionMetricsKey.String(val) +} + +// AWSDynamoDBLimit returns an attribute KeyValue conforming to the +// "aws.dynamodb.limit" semantic conventions. It represents the value of the +// `Limit` request parameter. +func AWSDynamoDBLimit(val int) attribute.KeyValue { + return AWSDynamoDBLimitKey.Int(val) +} + +// AWSDynamoDBLocalSecondaryIndexes returns an attribute KeyValue conforming to +// the "aws.dynamodb.local_secondary_indexes" semantic conventions. It represents +// the JSON-serialized value of each item of the `LocalSecondaryIndexes` request +// field. +func AWSDynamoDBLocalSecondaryIndexes(val ...string) attribute.KeyValue { + return AWSDynamoDBLocalSecondaryIndexesKey.StringSlice(val) +} + +// AWSDynamoDBProjection returns an attribute KeyValue conforming to the +// "aws.dynamodb.projection" semantic conventions. It represents the value of the +// `ProjectionExpression` request parameter. +func AWSDynamoDBProjection(val string) attribute.KeyValue { + return AWSDynamoDBProjectionKey.String(val) +} + +// AWSDynamoDBProvisionedReadCapacity returns an attribute KeyValue conforming to +// the "aws.dynamodb.provisioned_read_capacity" semantic conventions. It +// represents the value of the `ProvisionedThroughput.ReadCapacityUnits` request +// parameter. +func AWSDynamoDBProvisionedReadCapacity(val float64) attribute.KeyValue { + return AWSDynamoDBProvisionedReadCapacityKey.Float64(val) +} + +// AWSDynamoDBProvisionedWriteCapacity returns an attribute KeyValue conforming +// to the "aws.dynamodb.provisioned_write_capacity" semantic conventions. It +// represents the value of the `ProvisionedThroughput.WriteCapacityUnits` request +// parameter. +func AWSDynamoDBProvisionedWriteCapacity(val float64) attribute.KeyValue { + return AWSDynamoDBProvisionedWriteCapacityKey.Float64(val) +} + +// AWSDynamoDBScanForward returns an attribute KeyValue conforming to the +// "aws.dynamodb.scan_forward" semantic conventions. It represents the value of +// the `ScanIndexForward` request parameter. +func AWSDynamoDBScanForward(val bool) attribute.KeyValue { + return AWSDynamoDBScanForwardKey.Bool(val) +} + +// AWSDynamoDBScannedCount returns an attribute KeyValue conforming to the +// "aws.dynamodb.scanned_count" semantic conventions. It represents the value of +// the `ScannedCount` response parameter. +func AWSDynamoDBScannedCount(val int) attribute.KeyValue { + return AWSDynamoDBScannedCountKey.Int(val) +} + +// AWSDynamoDBSegment returns an attribute KeyValue conforming to the +// "aws.dynamodb.segment" semantic conventions. It represents the value of the +// `Segment` request parameter. +func AWSDynamoDBSegment(val int) attribute.KeyValue { + return AWSDynamoDBSegmentKey.Int(val) +} + +// AWSDynamoDBSelect returns an attribute KeyValue conforming to the +// "aws.dynamodb.select" semantic conventions. It represents the value of the +// `Select` request parameter. +func AWSDynamoDBSelect(val string) attribute.KeyValue { + return AWSDynamoDBSelectKey.String(val) +} + +// AWSDynamoDBTableCount returns an attribute KeyValue conforming to the +// "aws.dynamodb.table_count" semantic conventions. It represents the number of +// items in the `TableNames` response parameter. +func AWSDynamoDBTableCount(val int) attribute.KeyValue { + return AWSDynamoDBTableCountKey.Int(val) +} + +// AWSDynamoDBTableNames returns an attribute KeyValue conforming to the +// "aws.dynamodb.table_names" semantic conventions. It represents the keys in the +// `RequestItems` object field. +func AWSDynamoDBTableNames(val ...string) attribute.KeyValue { + return AWSDynamoDBTableNamesKey.StringSlice(val) +} + +// AWSDynamoDBTotalSegments returns an attribute KeyValue conforming to the +// "aws.dynamodb.total_segments" semantic conventions. It represents the value of +// the `TotalSegments` request parameter. +func AWSDynamoDBTotalSegments(val int) attribute.KeyValue { + return AWSDynamoDBTotalSegmentsKey.Int(val) +} + +// AWSECSClusterARN returns an attribute KeyValue conforming to the +// "aws.ecs.cluster.arn" semantic conventions. It represents the ARN of an +// [ECS cluster]. +// +// [ECS cluster]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/clusters.html +func AWSECSClusterARN(val string) attribute.KeyValue { + return AWSECSClusterARNKey.String(val) +} + +// AWSECSContainerARN returns an attribute KeyValue conforming to the +// "aws.ecs.container.arn" semantic conventions. It represents the Amazon +// Resource Name (ARN) of an [ECS container instance]. +// +// [ECS container instance]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_instances.html +func AWSECSContainerARN(val string) attribute.KeyValue { + return AWSECSContainerARNKey.String(val) +} + +// AWSECSTaskARN returns an attribute KeyValue conforming to the +// "aws.ecs.task.arn" semantic conventions. It represents the ARN of a running +// [ECS task]. +// +// [ECS task]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-account-settings.html#ecs-resource-ids +func AWSECSTaskARN(val string) attribute.KeyValue { + return AWSECSTaskARNKey.String(val) +} + +// AWSECSTaskFamily returns an attribute KeyValue conforming to the +// "aws.ecs.task.family" semantic conventions. It represents the family name of +// the [ECS task definition] used to create the ECS task. +// +// [ECS task definition]: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definitions.html +func AWSECSTaskFamily(val string) attribute.KeyValue { + return AWSECSTaskFamilyKey.String(val) +} + +// AWSECSTaskID returns an attribute KeyValue conforming to the "aws.ecs.task.id" +// semantic conventions. It represents the ID of a running ECS task. The ID MUST +// be extracted from `task.arn`. +func AWSECSTaskID(val string) attribute.KeyValue { + return AWSECSTaskIDKey.String(val) +} + +// AWSECSTaskRevision returns an attribute KeyValue conforming to the +// "aws.ecs.task.revision" semantic conventions. It represents the revision for +// the task definition used to create the ECS task. +func AWSECSTaskRevision(val string) attribute.KeyValue { + return AWSECSTaskRevisionKey.String(val) +} + +// AWSEKSClusterARN returns an attribute KeyValue conforming to the +// "aws.eks.cluster.arn" semantic conventions. It represents the ARN of an EKS +// cluster. +func AWSEKSClusterARN(val string) attribute.KeyValue { + return AWSEKSClusterARNKey.String(val) +} + +// AWSExtendedRequestID returns an attribute KeyValue conforming to the +// "aws.extended_request_id" semantic conventions. It represents the AWS extended +// request ID as returned in the response header `x-amz-id-2`. +func AWSExtendedRequestID(val string) attribute.KeyValue { + return AWSExtendedRequestIDKey.String(val) +} + +// AWSKinesisStreamName returns an attribute KeyValue conforming to the +// "aws.kinesis.stream_name" semantic conventions. It represents the name of the +// AWS Kinesis [stream] the request refers to. Corresponds to the `--stream-name` +// parameter of the Kinesis [describe-stream] operation. +// +// [stream]: https://docs.aws.amazon.com/streams/latest/dev/introduction.html +// [describe-stream]: https://docs.aws.amazon.com/cli/latest/reference/kinesis/describe-stream.html +func AWSKinesisStreamName(val string) attribute.KeyValue { + return AWSKinesisStreamNameKey.String(val) +} + +// AWSLambdaInvokedARN returns an attribute KeyValue conforming to the +// "aws.lambda.invoked_arn" semantic conventions. It represents the full invoked +// ARN as provided on the `Context` passed to the function ( +// `Lambda-Runtime-Invoked-Function-Arn` header on the `/runtime/invocation/next` +// applicable). +func AWSLambdaInvokedARN(val string) attribute.KeyValue { + return AWSLambdaInvokedARNKey.String(val) +} + +// AWSLambdaResourceMappingID returns an attribute KeyValue conforming to the +// "aws.lambda.resource_mapping.id" semantic conventions. It represents the UUID +// of the [AWS Lambda EvenSource Mapping]. An event source is mapped to a lambda +// function. It's contents are read by Lambda and used to trigger a function. +// This isn't available in the lambda execution context or the lambda runtime +// environtment. This is going to be populated by the AWS SDK for each language +// when that UUID is present. Some of these operations are +// Create/Delete/Get/List/Update EventSourceMapping. +// +// [AWS Lambda EvenSource Mapping]: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lambda-eventsourcemapping.html +func AWSLambdaResourceMappingID(val string) attribute.KeyValue { + return AWSLambdaResourceMappingIDKey.String(val) +} + +// AWSLogGroupARNs returns an attribute KeyValue conforming to the +// "aws.log.group.arns" semantic conventions. It represents the Amazon Resource +// Name(s) (ARN) of the AWS log group(s). +func AWSLogGroupARNs(val ...string) attribute.KeyValue { + return AWSLogGroupARNsKey.StringSlice(val) +} + +// AWSLogGroupNames returns an attribute KeyValue conforming to the +// "aws.log.group.names" semantic conventions. It represents the name(s) of the +// AWS log group(s) an application is writing to. +func AWSLogGroupNames(val ...string) attribute.KeyValue { + return AWSLogGroupNamesKey.StringSlice(val) +} + +// AWSLogStreamARNs returns an attribute KeyValue conforming to the +// "aws.log.stream.arns" semantic conventions. It represents the ARN(s) of the +// AWS log stream(s). +func AWSLogStreamARNs(val ...string) attribute.KeyValue { + return AWSLogStreamARNsKey.StringSlice(val) +} + +// AWSLogStreamNames returns an attribute KeyValue conforming to the +// "aws.log.stream.names" semantic conventions. It represents the name(s) of the +// AWS log stream(s) an application is writing to. +func AWSLogStreamNames(val ...string) attribute.KeyValue { + return AWSLogStreamNamesKey.StringSlice(val) +} + +// AWSRequestID returns an attribute KeyValue conforming to the "aws.request_id" +// semantic conventions. It represents the AWS request ID as returned in the +// response headers `x-amzn-requestid`, `x-amzn-request-id` or `x-amz-request-id` +// . +func AWSRequestID(val string) attribute.KeyValue { + return AWSRequestIDKey.String(val) +} + +// AWSS3Bucket returns an attribute KeyValue conforming to the "aws.s3.bucket" +// semantic conventions. It represents the S3 bucket name the request refers to. +// Corresponds to the `--bucket` parameter of the [S3 API] operations. +// +// [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html +func AWSS3Bucket(val string) attribute.KeyValue { + return AWSS3BucketKey.String(val) +} + +// AWSS3CopySource returns an attribute KeyValue conforming to the +// "aws.s3.copy_source" semantic conventions. It represents the source object (in +// the form `bucket`/`key`) for the copy operation. +func AWSS3CopySource(val string) attribute.KeyValue { + return AWSS3CopySourceKey.String(val) +} + +// AWSS3Delete returns an attribute KeyValue conforming to the "aws.s3.delete" +// semantic conventions. It represents the delete request container that +// specifies the objects to be deleted. +func AWSS3Delete(val string) attribute.KeyValue { + return AWSS3DeleteKey.String(val) +} + +// AWSS3Key returns an attribute KeyValue conforming to the "aws.s3.key" semantic +// conventions. It represents the S3 object key the request refers to. +// Corresponds to the `--key` parameter of the [S3 API] operations. +// +// [S3 API]: https://docs.aws.amazon.com/cli/latest/reference/s3api/index.html +func AWSS3Key(val string) attribute.KeyValue { + return AWSS3KeyKey.String(val) +} + +// AWSS3PartNumber returns an attribute KeyValue conforming to the +// "aws.s3.part_number" semantic conventions. It represents the part number of +// the part being uploaded in a multipart-upload operation. This is a positive +// integer between 1 and 10,000. +func AWSS3PartNumber(val int) attribute.KeyValue { + return AWSS3PartNumberKey.Int(val) +} + +// AWSS3UploadID returns an attribute KeyValue conforming to the +// "aws.s3.upload_id" semantic conventions. It represents the upload ID that +// identifies the multipart upload. +func AWSS3UploadID(val string) attribute.KeyValue { + return AWSS3UploadIDKey.String(val) +} + +// AWSSecretsmanagerSecretARN returns an attribute KeyValue conforming to the +// "aws.secretsmanager.secret.arn" semantic conventions. It represents the ARN of +// the Secret stored in the Secrets Mangger. +func AWSSecretsmanagerSecretARN(val string) attribute.KeyValue { + return AWSSecretsmanagerSecretARNKey.String(val) +} + +// AWSSNSTopicARN returns an attribute KeyValue conforming to the +// "aws.sns.topic.arn" semantic conventions. It represents the ARN of the AWS SNS +// Topic. An Amazon SNS [topic] is a logical access point that acts as a +// communication channel. +// +// [topic]: https://docs.aws.amazon.com/sns/latest/dg/sns-create-topic.html +func AWSSNSTopicARN(val string) attribute.KeyValue { + return AWSSNSTopicARNKey.String(val) +} + +// AWSSQSQueueURL returns an attribute KeyValue conforming to the +// "aws.sqs.queue.url" semantic conventions. It represents the URL of the AWS SQS +// Queue. It's a unique identifier for a queue in Amazon Simple Queue Service +// (SQS) and is used to access the queue and perform actions on it. +func AWSSQSQueueURL(val string) attribute.KeyValue { + return AWSSQSQueueURLKey.String(val) +} + +// AWSStepFunctionsActivityARN returns an attribute KeyValue conforming to the +// "aws.step_functions.activity.arn" semantic conventions. It represents the ARN +// of the AWS Step Functions Activity. +func AWSStepFunctionsActivityARN(val string) attribute.KeyValue { + return AWSStepFunctionsActivityARNKey.String(val) +} + +// AWSStepFunctionsStateMachineARN returns an attribute KeyValue conforming to +// the "aws.step_functions.state_machine.arn" semantic conventions. It represents +// the ARN of the AWS Step Functions State Machine. +func AWSStepFunctionsStateMachineARN(val string) attribute.KeyValue { + return AWSStepFunctionsStateMachineARNKey.String(val) +} + +// Enum values for aws.ecs.launchtype +var ( + // ec2 + // Stability: development + AWSECSLaunchtypeEC2 = AWSECSLaunchtypeKey.String("ec2") + // fargate + // Stability: development + AWSECSLaunchtypeFargate = AWSECSLaunchtypeKey.String("fargate") +) + +// Namespace: az +const ( + // AzNamespaceKey is the attribute Key conforming to the "az.namespace" semantic + // conventions. It represents the [Azure Resource Provider Namespace] as + // recognized by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Microsoft.Storage", "Microsoft.KeyVault", "Microsoft.ServiceBus" + // + // [Azure Resource Provider Namespace]: https://learn.microsoft.com/azure/azure-resource-manager/management/azure-services-resource-providers + AzNamespaceKey = attribute.Key("az.namespace") + + // AzServiceRequestIDKey is the attribute Key conforming to the + // "az.service_request_id" semantic conventions. It represents the unique + // identifier of the service request. It's generated by the Azure service and + // returned with the response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "00000000-0000-0000-0000-000000000000" + AzServiceRequestIDKey = attribute.Key("az.service_request_id") +) + +// AzNamespace returns an attribute KeyValue conforming to the "az.namespace" +// semantic conventions. It represents the [Azure Resource Provider Namespace] as +// recognized by the client. +// +// [Azure Resource Provider Namespace]: https://learn.microsoft.com/azure/azure-resource-manager/management/azure-services-resource-providers +func AzNamespace(val string) attribute.KeyValue { + return AzNamespaceKey.String(val) +} + +// AzServiceRequestID returns an attribute KeyValue conforming to the +// "az.service_request_id" semantic conventions. It represents the unique +// identifier of the service request. It's generated by the Azure service and +// returned with the response. +func AzServiceRequestID(val string) attribute.KeyValue { + return AzServiceRequestIDKey.String(val) +} + +// Namespace: azure +const ( + // AzureClientIDKey is the attribute Key conforming to the "azure.client.id" + // semantic conventions. It represents the unique identifier of the client + // instance. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "3ba4827d-4422-483f-b59f-85b74211c11d", "storage-client-1" + AzureClientIDKey = attribute.Key("azure.client.id") + + // AzureCosmosDBConnectionModeKey is the attribute Key conforming to the + // "azure.cosmosdb.connection.mode" semantic conventions. It represents the + // cosmos client connection mode. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AzureCosmosDBConnectionModeKey = attribute.Key("azure.cosmosdb.connection.mode") + + // AzureCosmosDBConsistencyLevelKey is the attribute Key conforming to the + // "azure.cosmosdb.consistency.level" semantic conventions. It represents the + // account or request [consistency level]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Eventual", "ConsistentPrefix", "BoundedStaleness", "Strong", + // "Session" + // + // [consistency level]: https://learn.microsoft.com/azure/cosmos-db/consistency-levels + AzureCosmosDBConsistencyLevelKey = attribute.Key("azure.cosmosdb.consistency.level") + + // AzureCosmosDBOperationContactedRegionsKey is the attribute Key conforming to + // the "azure.cosmosdb.operation.contacted_regions" semantic conventions. It + // represents the list of regions contacted during operation in the order that + // they were contacted. If there is more than one region listed, it indicates + // that the operation was performed on multiple regions i.e. cross-regional + // call. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "North Central US", "Australia East", "Australia Southeast" + // Note: Region name matches the format of `displayName` in [Azure Location API] + // + // [Azure Location API]: https://learn.microsoft.com/rest/api/subscription/subscriptions/list-locations?view=rest-subscription-2021-10-01&tabs=HTTP#location + AzureCosmosDBOperationContactedRegionsKey = attribute.Key("azure.cosmosdb.operation.contacted_regions") + + // AzureCosmosDBOperationRequestChargeKey is the attribute Key conforming to the + // "azure.cosmosdb.operation.request_charge" semantic conventions. It represents + // the number of request units consumed by the operation. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 46.18, 1.0 + AzureCosmosDBOperationRequestChargeKey = attribute.Key("azure.cosmosdb.operation.request_charge") + + // AzureCosmosDBRequestBodySizeKey is the attribute Key conforming to the + // "azure.cosmosdb.request.body.size" semantic conventions. It represents the + // request payload size in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + AzureCosmosDBRequestBodySizeKey = attribute.Key("azure.cosmosdb.request.body.size") + + // AzureCosmosDBResponseSubStatusCodeKey is the attribute Key conforming to the + // "azure.cosmosdb.response.sub_status_code" semantic conventions. It represents + // the cosmos DB sub status code. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1000, 1002 + AzureCosmosDBResponseSubStatusCodeKey = attribute.Key("azure.cosmosdb.response.sub_status_code") +) + +// AzureClientID returns an attribute KeyValue conforming to the +// "azure.client.id" semantic conventions. It represents the unique identifier of +// the client instance. +func AzureClientID(val string) attribute.KeyValue { + return AzureClientIDKey.String(val) +} + +// AzureCosmosDBOperationContactedRegions returns an attribute KeyValue +// conforming to the "azure.cosmosdb.operation.contacted_regions" semantic +// conventions. It represents the list of regions contacted during operation in +// the order that they were contacted. If there is more than one region listed, +// it indicates that the operation was performed on multiple regions i.e. +// cross-regional call. +func AzureCosmosDBOperationContactedRegions(val ...string) attribute.KeyValue { + return AzureCosmosDBOperationContactedRegionsKey.StringSlice(val) +} + +// AzureCosmosDBOperationRequestCharge returns an attribute KeyValue conforming +// to the "azure.cosmosdb.operation.request_charge" semantic conventions. It +// represents the number of request units consumed by the operation. +func AzureCosmosDBOperationRequestCharge(val float64) attribute.KeyValue { + return AzureCosmosDBOperationRequestChargeKey.Float64(val) +} + +// AzureCosmosDBRequestBodySize returns an attribute KeyValue conforming to the +// "azure.cosmosdb.request.body.size" semantic conventions. It represents the +// request payload size in bytes. +func AzureCosmosDBRequestBodySize(val int) attribute.KeyValue { + return AzureCosmosDBRequestBodySizeKey.Int(val) +} + +// AzureCosmosDBResponseSubStatusCode returns an attribute KeyValue conforming to +// the "azure.cosmosdb.response.sub_status_code" semantic conventions. It +// represents the cosmos DB sub status code. +func AzureCosmosDBResponseSubStatusCode(val int) attribute.KeyValue { + return AzureCosmosDBResponseSubStatusCodeKey.Int(val) +} + +// Enum values for azure.cosmosdb.connection.mode +var ( + // Gateway (HTTP) connection. + // Stability: development + AzureCosmosDBConnectionModeGateway = AzureCosmosDBConnectionModeKey.String("gateway") + // Direct connection. + // Stability: development + AzureCosmosDBConnectionModeDirect = AzureCosmosDBConnectionModeKey.String("direct") +) + +// Enum values for azure.cosmosdb.consistency.level +var ( + // strong + // Stability: development + AzureCosmosDBConsistencyLevelStrong = AzureCosmosDBConsistencyLevelKey.String("Strong") + // bounded_staleness + // Stability: development + AzureCosmosDBConsistencyLevelBoundedStaleness = AzureCosmosDBConsistencyLevelKey.String("BoundedStaleness") + // session + // Stability: development + AzureCosmosDBConsistencyLevelSession = AzureCosmosDBConsistencyLevelKey.String("Session") + // eventual + // Stability: development + AzureCosmosDBConsistencyLevelEventual = AzureCosmosDBConsistencyLevelKey.String("Eventual") + // consistent_prefix + // Stability: development + AzureCosmosDBConsistencyLevelConsistentPrefix = AzureCosmosDBConsistencyLevelKey.String("ConsistentPrefix") +) + +// Namespace: browser +const ( + // BrowserBrandsKey is the attribute Key conforming to the "browser.brands" + // semantic conventions. It represents the array of brand name and version + // separated by a space. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: " Not A;Brand 99", "Chromium 99", "Chrome 99" + // Note: This value is intended to be taken from the [UA client hints API] ( + // `navigator.userAgentData.brands`). + // + // [UA client hints API]: https://wicg.github.io/ua-client-hints/#interface + BrowserBrandsKey = attribute.Key("browser.brands") + + // BrowserLanguageKey is the attribute Key conforming to the "browser.language" + // semantic conventions. It represents the preferred language of the user using + // the browser. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "en", "en-US", "fr", "fr-FR" + // Note: This value is intended to be taken from the Navigator API + // `navigator.language`. + BrowserLanguageKey = attribute.Key("browser.language") + + // BrowserMobileKey is the attribute Key conforming to the "browser.mobile" + // semantic conventions. It represents a boolean that is true if the browser is + // running on a mobile device. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This value is intended to be taken from the [UA client hints API] ( + // `navigator.userAgentData.mobile`). If unavailable, this attribute SHOULD be + // left unset. + // + // [UA client hints API]: https://wicg.github.io/ua-client-hints/#interface + BrowserMobileKey = attribute.Key("browser.mobile") + + // BrowserPlatformKey is the attribute Key conforming to the "browser.platform" + // semantic conventions. It represents the platform on which the browser is + // running. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Windows", "macOS", "Android" + // Note: This value is intended to be taken from the [UA client hints API] ( + // `navigator.userAgentData.platform`). If unavailable, the legacy + // `navigator.platform` API SHOULD NOT be used instead and this attribute SHOULD + // be left unset in order for the values to be consistent. + // The list of possible values is defined in the + // [W3C User-Agent Client Hints specification]. Note that some (but not all) of + // these values can overlap with values in the + // [`os.type` and `os.name` attributes]. However, for consistency, the values in + // the `browser.platform` attribute should capture the exact value that the user + // agent provides. + // + // [UA client hints API]: https://wicg.github.io/ua-client-hints/#interface + // [W3C User-Agent Client Hints specification]: https://wicg.github.io/ua-client-hints/#sec-ch-ua-platform + // [`os.type` and `os.name` attributes]: ./os.md + BrowserPlatformKey = attribute.Key("browser.platform") +) + +// BrowserBrands returns an attribute KeyValue conforming to the "browser.brands" +// semantic conventions. It represents the array of brand name and version +// separated by a space. +func BrowserBrands(val ...string) attribute.KeyValue { + return BrowserBrandsKey.StringSlice(val) +} + +// BrowserLanguage returns an attribute KeyValue conforming to the +// "browser.language" semantic conventions. It represents the preferred language +// of the user using the browser. +func BrowserLanguage(val string) attribute.KeyValue { + return BrowserLanguageKey.String(val) +} + +// BrowserMobile returns an attribute KeyValue conforming to the "browser.mobile" +// semantic conventions. It represents a boolean that is true if the browser is +// running on a mobile device. +func BrowserMobile(val bool) attribute.KeyValue { + return BrowserMobileKey.Bool(val) +} + +// BrowserPlatform returns an attribute KeyValue conforming to the +// "browser.platform" semantic conventions. It represents the platform on which +// the browser is running. +func BrowserPlatform(val string) attribute.KeyValue { + return BrowserPlatformKey.String(val) +} + +// Namespace: cassandra +const ( + // CassandraConsistencyLevelKey is the attribute Key conforming to the + // "cassandra.consistency.level" semantic conventions. It represents the + // consistency level of the query. Based on consistency values from [CQL]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [CQL]: https://docs.datastax.com/en/cassandra-oss/3.0/cassandra/dml/dmlConfigConsistency.html + CassandraConsistencyLevelKey = attribute.Key("cassandra.consistency.level") + + // CassandraCoordinatorDCKey is the attribute Key conforming to the + // "cassandra.coordinator.dc" semantic conventions. It represents the data + // center of the coordinating node for a query. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: us-west-2 + CassandraCoordinatorDCKey = attribute.Key("cassandra.coordinator.dc") + + // CassandraCoordinatorIDKey is the attribute Key conforming to the + // "cassandra.coordinator.id" semantic conventions. It represents the ID of the + // coordinating node for a query. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: be13faa2-8574-4d71-926d-27f16cf8a7af + CassandraCoordinatorIDKey = attribute.Key("cassandra.coordinator.id") + + // CassandraPageSizeKey is the attribute Key conforming to the + // "cassandra.page.size" semantic conventions. It represents the fetch size used + // for paging, i.e. how many rows will be returned at once. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 5000 + CassandraPageSizeKey = attribute.Key("cassandra.page.size") + + // CassandraQueryIdempotentKey is the attribute Key conforming to the + // "cassandra.query.idempotent" semantic conventions. It represents the whether + // or not the query is idempotent. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + CassandraQueryIdempotentKey = attribute.Key("cassandra.query.idempotent") + + // CassandraSpeculativeExecutionCountKey is the attribute Key conforming to the + // "cassandra.speculative_execution.count" semantic conventions. It represents + // the number of times a query was speculatively executed. Not set or `0` if the + // query was not executed speculatively. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 2 + CassandraSpeculativeExecutionCountKey = attribute.Key("cassandra.speculative_execution.count") +) + +// CassandraCoordinatorDC returns an attribute KeyValue conforming to the +// "cassandra.coordinator.dc" semantic conventions. It represents the data center +// of the coordinating node for a query. +func CassandraCoordinatorDC(val string) attribute.KeyValue { + return CassandraCoordinatorDCKey.String(val) +} + +// CassandraCoordinatorID returns an attribute KeyValue conforming to the +// "cassandra.coordinator.id" semantic conventions. It represents the ID of the +// coordinating node for a query. +func CassandraCoordinatorID(val string) attribute.KeyValue { + return CassandraCoordinatorIDKey.String(val) +} + +// CassandraPageSize returns an attribute KeyValue conforming to the +// "cassandra.page.size" semantic conventions. It represents the fetch size used +// for paging, i.e. how many rows will be returned at once. +func CassandraPageSize(val int) attribute.KeyValue { + return CassandraPageSizeKey.Int(val) +} + +// CassandraQueryIdempotent returns an attribute KeyValue conforming to the +// "cassandra.query.idempotent" semantic conventions. It represents the whether +// or not the query is idempotent. +func CassandraQueryIdempotent(val bool) attribute.KeyValue { + return CassandraQueryIdempotentKey.Bool(val) +} + +// CassandraSpeculativeExecutionCount returns an attribute KeyValue conforming to +// the "cassandra.speculative_execution.count" semantic conventions. It +// represents the number of times a query was speculatively executed. Not set or +// `0` if the query was not executed speculatively. +func CassandraSpeculativeExecutionCount(val int) attribute.KeyValue { + return CassandraSpeculativeExecutionCountKey.Int(val) +} + +// Enum values for cassandra.consistency.level +var ( + // all + // Stability: development + CassandraConsistencyLevelAll = CassandraConsistencyLevelKey.String("all") + // each_quorum + // Stability: development + CassandraConsistencyLevelEachQuorum = CassandraConsistencyLevelKey.String("each_quorum") + // quorum + // Stability: development + CassandraConsistencyLevelQuorum = CassandraConsistencyLevelKey.String("quorum") + // local_quorum + // Stability: development + CassandraConsistencyLevelLocalQuorum = CassandraConsistencyLevelKey.String("local_quorum") + // one + // Stability: development + CassandraConsistencyLevelOne = CassandraConsistencyLevelKey.String("one") + // two + // Stability: development + CassandraConsistencyLevelTwo = CassandraConsistencyLevelKey.String("two") + // three + // Stability: development + CassandraConsistencyLevelThree = CassandraConsistencyLevelKey.String("three") + // local_one + // Stability: development + CassandraConsistencyLevelLocalOne = CassandraConsistencyLevelKey.String("local_one") + // any + // Stability: development + CassandraConsistencyLevelAny = CassandraConsistencyLevelKey.String("any") + // serial + // Stability: development + CassandraConsistencyLevelSerial = CassandraConsistencyLevelKey.String("serial") + // local_serial + // Stability: development + CassandraConsistencyLevelLocalSerial = CassandraConsistencyLevelKey.String("local_serial") +) + +// Namespace: cicd +const ( + // CICDPipelineActionNameKey is the attribute Key conforming to the + // "cicd.pipeline.action.name" semantic conventions. It represents the kind of + // action a pipeline run is performing. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "BUILD", "RUN", "SYNC" + CICDPipelineActionNameKey = attribute.Key("cicd.pipeline.action.name") + + // CICDPipelineNameKey is the attribute Key conforming to the + // "cicd.pipeline.name" semantic conventions. It represents the human readable + // name of the pipeline within a CI/CD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Build and Test", "Lint", "Deploy Go Project", + // "deploy_to_environment" + CICDPipelineNameKey = attribute.Key("cicd.pipeline.name") + + // CICDPipelineResultKey is the attribute Key conforming to the + // "cicd.pipeline.result" semantic conventions. It represents the result of a + // pipeline run. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "success", "failure", "timeout", "skipped" + CICDPipelineResultKey = attribute.Key("cicd.pipeline.result") + + // CICDPipelineRunIDKey is the attribute Key conforming to the + // "cicd.pipeline.run.id" semantic conventions. It represents the unique + // identifier of a pipeline run within a CI/CD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "120912" + CICDPipelineRunIDKey = attribute.Key("cicd.pipeline.run.id") + + // CICDPipelineRunStateKey is the attribute Key conforming to the + // "cicd.pipeline.run.state" semantic conventions. It represents the pipeline + // run goes through these states during its lifecycle. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pending", "executing", "finalizing" + CICDPipelineRunStateKey = attribute.Key("cicd.pipeline.run.state") + + // CICDPipelineRunURLFullKey is the attribute Key conforming to the + // "cicd.pipeline.run.url.full" semantic conventions. It represents the [URL] of + // the pipeline run, providing the complete address in order to locate and + // identify the pipeline run. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "https://github.com/open-telemetry/semantic-conventions/actions/runs/9753949763?pr=1075" + // + // [URL]: https://wikipedia.org/wiki/URL + CICDPipelineRunURLFullKey = attribute.Key("cicd.pipeline.run.url.full") + + // CICDPipelineTaskNameKey is the attribute Key conforming to the + // "cicd.pipeline.task.name" semantic conventions. It represents the human + // readable name of a task within a pipeline. Task here most closely aligns with + // a [computing process] in a pipeline. Other terms for tasks include commands, + // steps, and procedures. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Run GoLang Linter", "Go Build", "go-test", "deploy_binary" + // + // [computing process]: https://wikipedia.org/wiki/Pipeline_(computing) + CICDPipelineTaskNameKey = attribute.Key("cicd.pipeline.task.name") + + // CICDPipelineTaskRunIDKey is the attribute Key conforming to the + // "cicd.pipeline.task.run.id" semantic conventions. It represents the unique + // identifier of a task run within a pipeline. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "12097" + CICDPipelineTaskRunIDKey = attribute.Key("cicd.pipeline.task.run.id") + + // CICDPipelineTaskRunResultKey is the attribute Key conforming to the + // "cicd.pipeline.task.run.result" semantic conventions. It represents the + // result of a task run. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "success", "failure", "timeout", "skipped" + CICDPipelineTaskRunResultKey = attribute.Key("cicd.pipeline.task.run.result") + + // CICDPipelineTaskRunURLFullKey is the attribute Key conforming to the + // "cicd.pipeline.task.run.url.full" semantic conventions. It represents the + // [URL] of the pipeline task run, providing the complete address in order to + // locate and identify the pipeline task run. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "https://github.com/open-telemetry/semantic-conventions/actions/runs/9753949763/job/26920038674?pr=1075" + // + // [URL]: https://wikipedia.org/wiki/URL + CICDPipelineTaskRunURLFullKey = attribute.Key("cicd.pipeline.task.run.url.full") + + // CICDPipelineTaskTypeKey is the attribute Key conforming to the + // "cicd.pipeline.task.type" semantic conventions. It represents the type of the + // task within a pipeline. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "build", "test", "deploy" + CICDPipelineTaskTypeKey = attribute.Key("cicd.pipeline.task.type") + + // CICDSystemComponentKey is the attribute Key conforming to the + // "cicd.system.component" semantic conventions. It represents the name of a + // component of the CICD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "controller", "scheduler", "agent" + CICDSystemComponentKey = attribute.Key("cicd.system.component") + + // CICDWorkerIDKey is the attribute Key conforming to the "cicd.worker.id" + // semantic conventions. It represents the unique identifier of a worker within + // a CICD system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "abc123", "10.0.1.2", "controller" + CICDWorkerIDKey = attribute.Key("cicd.worker.id") + + // CICDWorkerNameKey is the attribute Key conforming to the "cicd.worker.name" + // semantic conventions. It represents the name of a worker within a CICD + // system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "agent-abc", "controller", "Ubuntu LTS" + CICDWorkerNameKey = attribute.Key("cicd.worker.name") + + // CICDWorkerStateKey is the attribute Key conforming to the "cicd.worker.state" + // semantic conventions. It represents the state of a CICD worker / agent. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "idle", "busy", "down" + CICDWorkerStateKey = attribute.Key("cicd.worker.state") + + // CICDWorkerURLFullKey is the attribute Key conforming to the + // "cicd.worker.url.full" semantic conventions. It represents the [URL] of the + // worker, providing the complete address in order to locate and identify the + // worker. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://cicd.example.org/worker/abc123" + // + // [URL]: https://wikipedia.org/wiki/URL + CICDWorkerURLFullKey = attribute.Key("cicd.worker.url.full") +) + +// CICDPipelineName returns an attribute KeyValue conforming to the +// "cicd.pipeline.name" semantic conventions. It represents the human readable +// name of the pipeline within a CI/CD system. +func CICDPipelineName(val string) attribute.KeyValue { + return CICDPipelineNameKey.String(val) +} + +// CICDPipelineRunID returns an attribute KeyValue conforming to the +// "cicd.pipeline.run.id" semantic conventions. It represents the unique +// identifier of a pipeline run within a CI/CD system. +func CICDPipelineRunID(val string) attribute.KeyValue { + return CICDPipelineRunIDKey.String(val) +} + +// CICDPipelineRunURLFull returns an attribute KeyValue conforming to the +// "cicd.pipeline.run.url.full" semantic conventions. It represents the [URL] of +// the pipeline run, providing the complete address in order to locate and +// identify the pipeline run. +// +// [URL]: https://wikipedia.org/wiki/URL +func CICDPipelineRunURLFull(val string) attribute.KeyValue { + return CICDPipelineRunURLFullKey.String(val) +} + +// CICDPipelineTaskName returns an attribute KeyValue conforming to the +// "cicd.pipeline.task.name" semantic conventions. It represents the human +// readable name of a task within a pipeline. Task here most closely aligns with +// a [computing process] in a pipeline. Other terms for tasks include commands, +// steps, and procedures. +// +// [computing process]: https://wikipedia.org/wiki/Pipeline_(computing) +func CICDPipelineTaskName(val string) attribute.KeyValue { + return CICDPipelineTaskNameKey.String(val) +} + +// CICDPipelineTaskRunID returns an attribute KeyValue conforming to the +// "cicd.pipeline.task.run.id" semantic conventions. It represents the unique +// identifier of a task run within a pipeline. +func CICDPipelineTaskRunID(val string) attribute.KeyValue { + return CICDPipelineTaskRunIDKey.String(val) +} + +// CICDPipelineTaskRunURLFull returns an attribute KeyValue conforming to the +// "cicd.pipeline.task.run.url.full" semantic conventions. It represents the +// [URL] of the pipeline task run, providing the complete address in order to +// locate and identify the pipeline task run. +// +// [URL]: https://wikipedia.org/wiki/URL +func CICDPipelineTaskRunURLFull(val string) attribute.KeyValue { + return CICDPipelineTaskRunURLFullKey.String(val) +} + +// CICDSystemComponent returns an attribute KeyValue conforming to the +// "cicd.system.component" semantic conventions. It represents the name of a +// component of the CICD system. +func CICDSystemComponent(val string) attribute.KeyValue { + return CICDSystemComponentKey.String(val) +} + +// CICDWorkerID returns an attribute KeyValue conforming to the "cicd.worker.id" +// semantic conventions. It represents the unique identifier of a worker within a +// CICD system. +func CICDWorkerID(val string) attribute.KeyValue { + return CICDWorkerIDKey.String(val) +} + +// CICDWorkerName returns an attribute KeyValue conforming to the +// "cicd.worker.name" semantic conventions. It represents the name of a worker +// within a CICD system. +func CICDWorkerName(val string) attribute.KeyValue { + return CICDWorkerNameKey.String(val) +} + +// CICDWorkerURLFull returns an attribute KeyValue conforming to the +// "cicd.worker.url.full" semantic conventions. It represents the [URL] of the +// worker, providing the complete address in order to locate and identify the +// worker. +// +// [URL]: https://wikipedia.org/wiki/URL +func CICDWorkerURLFull(val string) attribute.KeyValue { + return CICDWorkerURLFullKey.String(val) +} + +// Enum values for cicd.pipeline.action.name +var ( + // The pipeline run is executing a build. + // Stability: development + CICDPipelineActionNameBuild = CICDPipelineActionNameKey.String("BUILD") + // The pipeline run is executing. + // Stability: development + CICDPipelineActionNameRun = CICDPipelineActionNameKey.String("RUN") + // The pipeline run is executing a sync. + // Stability: development + CICDPipelineActionNameSync = CICDPipelineActionNameKey.String("SYNC") +) + +// Enum values for cicd.pipeline.result +var ( + // The pipeline run finished successfully. + // Stability: development + CICDPipelineResultSuccess = CICDPipelineResultKey.String("success") + // The pipeline run did not finish successfully, eg. due to a compile error or a + // failing test. Such failures are usually detected by non-zero exit codes of + // the tools executed in the pipeline run. + // Stability: development + CICDPipelineResultFailure = CICDPipelineResultKey.String("failure") + // The pipeline run failed due to an error in the CICD system, eg. due to the + // worker being killed. + // Stability: development + CICDPipelineResultError = CICDPipelineResultKey.String("error") + // A timeout caused the pipeline run to be interrupted. + // Stability: development + CICDPipelineResultTimeout = CICDPipelineResultKey.String("timeout") + // The pipeline run was cancelled, eg. by a user manually cancelling the + // pipeline run. + // Stability: development + CICDPipelineResultCancellation = CICDPipelineResultKey.String("cancellation") + // The pipeline run was skipped, eg. due to a precondition not being met. + // Stability: development + CICDPipelineResultSkip = CICDPipelineResultKey.String("skip") +) + +// Enum values for cicd.pipeline.run.state +var ( + // The run pending state spans from the event triggering the pipeline run until + // the execution of the run starts (eg. time spent in a queue, provisioning + // agents, creating run resources). + // + // Stability: development + CICDPipelineRunStatePending = CICDPipelineRunStateKey.String("pending") + // The executing state spans the execution of any run tasks (eg. build, test). + // Stability: development + CICDPipelineRunStateExecuting = CICDPipelineRunStateKey.String("executing") + // The finalizing state spans from when the run has finished executing (eg. + // cleanup of run resources). + // Stability: development + CICDPipelineRunStateFinalizing = CICDPipelineRunStateKey.String("finalizing") +) + +// Enum values for cicd.pipeline.task.run.result +var ( + // The task run finished successfully. + // Stability: development + CICDPipelineTaskRunResultSuccess = CICDPipelineTaskRunResultKey.String("success") + // The task run did not finish successfully, eg. due to a compile error or a + // failing test. Such failures are usually detected by non-zero exit codes of + // the tools executed in the task run. + // Stability: development + CICDPipelineTaskRunResultFailure = CICDPipelineTaskRunResultKey.String("failure") + // The task run failed due to an error in the CICD system, eg. due to the worker + // being killed. + // Stability: development + CICDPipelineTaskRunResultError = CICDPipelineTaskRunResultKey.String("error") + // A timeout caused the task run to be interrupted. + // Stability: development + CICDPipelineTaskRunResultTimeout = CICDPipelineTaskRunResultKey.String("timeout") + // The task run was cancelled, eg. by a user manually cancelling the task run. + // Stability: development + CICDPipelineTaskRunResultCancellation = CICDPipelineTaskRunResultKey.String("cancellation") + // The task run was skipped, eg. due to a precondition not being met. + // Stability: development + CICDPipelineTaskRunResultSkip = CICDPipelineTaskRunResultKey.String("skip") +) + +// Enum values for cicd.pipeline.task.type +var ( + // build + // Stability: development + CICDPipelineTaskTypeBuild = CICDPipelineTaskTypeKey.String("build") + // test + // Stability: development + CICDPipelineTaskTypeTest = CICDPipelineTaskTypeKey.String("test") + // deploy + // Stability: development + CICDPipelineTaskTypeDeploy = CICDPipelineTaskTypeKey.String("deploy") +) + +// Enum values for cicd.worker.state +var ( + // The worker is not performing work for the CICD system. It is available to the + // CICD system to perform work on (online / idle). + // Stability: development + CICDWorkerStateAvailable = CICDWorkerStateKey.String("available") + // The worker is performing work for the CICD system. + // Stability: development + CICDWorkerStateBusy = CICDWorkerStateKey.String("busy") + // The worker is not available to the CICD system (disconnected / down). + // Stability: development + CICDWorkerStateOffline = CICDWorkerStateKey.String("offline") +) + +// Namespace: client +const ( + // ClientAddressKey is the attribute Key conforming to the "client.address" + // semantic conventions. It represents the client address - domain name if + // available without reverse DNS lookup; otherwise, IP address or Unix domain + // socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "client.example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the server side, and when communicating through an + // intermediary, `client.address` SHOULD represent the client address behind any + // intermediaries, for example proxies, if it's available. + ClientAddressKey = attribute.Key("client.address") + + // ClientPortKey is the attribute Key conforming to the "client.port" semantic + // conventions. It represents the client port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 65123 + // Note: When observed from the server side, and when communicating through an + // intermediary, `client.port` SHOULD represent the client port behind any + // intermediaries, for example proxies, if it's available. + ClientPortKey = attribute.Key("client.port") +) + +// ClientAddress returns an attribute KeyValue conforming to the "client.address" +// semantic conventions. It represents the client address - domain name if +// available without reverse DNS lookup; otherwise, IP address or Unix domain +// socket name. +func ClientAddress(val string) attribute.KeyValue { + return ClientAddressKey.String(val) +} + +// ClientPort returns an attribute KeyValue conforming to the "client.port" +// semantic conventions. It represents the client port number. +func ClientPort(val int) attribute.KeyValue { + return ClientPortKey.Int(val) +} + +// Namespace: cloud +const ( + // CloudAccountIDKey is the attribute Key conforming to the "cloud.account.id" + // semantic conventions. It represents the cloud account ID the resource is + // assigned to. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "111111111111", "opentelemetry" + CloudAccountIDKey = attribute.Key("cloud.account.id") + + // CloudAvailabilityZoneKey is the attribute Key conforming to the + // "cloud.availability_zone" semantic conventions. It represents the cloud + // regions often have multiple, isolated locations known as zones to increase + // availability. Availability zone represents the zone where the resource is + // running. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "us-east-1c" + // Note: Availability zones are called "zones" on Alibaba Cloud and Google + // Cloud. + CloudAvailabilityZoneKey = attribute.Key("cloud.availability_zone") + + // CloudPlatformKey is the attribute Key conforming to the "cloud.platform" + // semantic conventions. It represents the cloud platform in use. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The prefix of the service SHOULD match the one specified in + // `cloud.provider`. + CloudPlatformKey = attribute.Key("cloud.platform") + + // CloudProviderKey is the attribute Key conforming to the "cloud.provider" + // semantic conventions. It represents the name of the cloud provider. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + CloudProviderKey = attribute.Key("cloud.provider") + + // CloudRegionKey is the attribute Key conforming to the "cloud.region" semantic + // conventions. It represents the geographical region within a cloud provider. + // When associated with a resource, this attribute specifies the region where + // the resource operates. When calling services or APIs deployed on a cloud, + // this attribute identifies the region where the called destination is + // deployed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "us-central1", "us-east-1" + // Note: Refer to your provider's docs to see the available regions, for example + // [Alibaba Cloud regions], [AWS regions], [Azure regions], + // [Google Cloud regions], or [Tencent Cloud regions]. + // + // [Alibaba Cloud regions]: https://www.alibabacloud.com/help/doc-detail/40654.htm + // [AWS regions]: https://aws.amazon.com/about-aws/global-infrastructure/regions_az/ + // [Azure regions]: https://azure.microsoft.com/global-infrastructure/geographies/ + // [Google Cloud regions]: https://cloud.google.com/about/locations + // [Tencent Cloud regions]: https://www.tencentcloud.com/document/product/213/6091 + CloudRegionKey = attribute.Key("cloud.region") + + // CloudResourceIDKey is the attribute Key conforming to the "cloud.resource_id" + // semantic conventions. It represents the cloud provider-specific native + // identifier of the monitored cloud resource (e.g. an [ARN] on AWS, a + // [fully qualified resource ID] on Azure, a [full resource name] on GCP). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "arn:aws:lambda:REGION:ACCOUNT_ID:function:my-function", + // "//run.googleapis.com/projects/PROJECT_ID/locations/LOCATION_ID/services/SERVICE_ID", + // "/subscriptions//resourceGroups/ + // /providers/Microsoft.Web/sites//functions/" + // Note: On some cloud providers, it may not be possible to determine the full + // ID at startup, + // so it may be necessary to set `cloud.resource_id` as a span attribute + // instead. + // + // The exact value to use for `cloud.resource_id` depends on the cloud provider. + // The following well-known definitions MUST be used if you set this attribute + // and they apply: + // + // - **AWS Lambda:** The function [ARN]. + // Take care not to use the "invoked ARN" directly but replace any + // [alias suffix] + // with the resolved function version, as the same runtime instance may be + // invocable with + // multiple different aliases. + // - **GCP:** The [URI of the resource] + // - **Azure:** The [Fully Qualified Resource ID] of the invoked function, + // *not* the function app, having the form + // + // `/subscriptions//resourceGroups//providers/Microsoft.Web/sites//functions/` + // . + // This means that a span attribute MUST be used, as an Azure function app + // can host multiple functions that would usually share + // a TracerProvider. + // + // + // [ARN]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html + // [fully qualified resource ID]: https://learn.microsoft.com/rest/api/resources/resources/get-by-id + // [full resource name]: https://google.aip.dev/122#full-resource-names + // [ARN]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html + // [alias suffix]: https://docs.aws.amazon.com/lambda/latest/dg/configuration-aliases.html + // [URI of the resource]: https://cloud.google.com/iam/docs/full-resource-names + // [Fully Qualified Resource ID]: https://docs.microsoft.com/rest/api/resources/resources/get-by-id + CloudResourceIDKey = attribute.Key("cloud.resource_id") +) + +// CloudAccountID returns an attribute KeyValue conforming to the +// "cloud.account.id" semantic conventions. It represents the cloud account ID +// the resource is assigned to. +func CloudAccountID(val string) attribute.KeyValue { + return CloudAccountIDKey.String(val) +} + +// CloudAvailabilityZone returns an attribute KeyValue conforming to the +// "cloud.availability_zone" semantic conventions. It represents the cloud +// regions often have multiple, isolated locations known as zones to increase +// availability. Availability zone represents the zone where the resource is +// running. +func CloudAvailabilityZone(val string) attribute.KeyValue { + return CloudAvailabilityZoneKey.String(val) +} + +// CloudRegion returns an attribute KeyValue conforming to the "cloud.region" +// semantic conventions. It represents the geographical region within a cloud +// provider. When associated with a resource, this attribute specifies the region +// where the resource operates. When calling services or APIs deployed on a +// cloud, this attribute identifies the region where the called destination is +// deployed. +func CloudRegion(val string) attribute.KeyValue { + return CloudRegionKey.String(val) +} + +// CloudResourceID returns an attribute KeyValue conforming to the +// "cloud.resource_id" semantic conventions. It represents the cloud +// provider-specific native identifier of the monitored cloud resource (e.g. an +// [ARN] on AWS, a [fully qualified resource ID] on Azure, a [full resource name] +// on GCP). +// +// [ARN]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html +// [fully qualified resource ID]: https://learn.microsoft.com/rest/api/resources/resources/get-by-id +// [full resource name]: https://google.aip.dev/122#full-resource-names +func CloudResourceID(val string) attribute.KeyValue { + return CloudResourceIDKey.String(val) +} + +// Enum values for cloud.platform +var ( + // Alibaba Cloud Elastic Compute Service + // Stability: development + CloudPlatformAlibabaCloudECS = CloudPlatformKey.String("alibaba_cloud_ecs") + // Alibaba Cloud Function Compute + // Stability: development + CloudPlatformAlibabaCloudFC = CloudPlatformKey.String("alibaba_cloud_fc") + // Red Hat OpenShift on Alibaba Cloud + // Stability: development + CloudPlatformAlibabaCloudOpenShift = CloudPlatformKey.String("alibaba_cloud_openshift") + // AWS Elastic Compute Cloud + // Stability: development + CloudPlatformAWSEC2 = CloudPlatformKey.String("aws_ec2") + // AWS Elastic Container Service + // Stability: development + CloudPlatformAWSECS = CloudPlatformKey.String("aws_ecs") + // AWS Elastic Kubernetes Service + // Stability: development + CloudPlatformAWSEKS = CloudPlatformKey.String("aws_eks") + // AWS Lambda + // Stability: development + CloudPlatformAWSLambda = CloudPlatformKey.String("aws_lambda") + // AWS Elastic Beanstalk + // Stability: development + CloudPlatformAWSElasticBeanstalk = CloudPlatformKey.String("aws_elastic_beanstalk") + // AWS App Runner + // Stability: development + CloudPlatformAWSAppRunner = CloudPlatformKey.String("aws_app_runner") + // Red Hat OpenShift on AWS (ROSA) + // Stability: development + CloudPlatformAWSOpenShift = CloudPlatformKey.String("aws_openshift") + // Azure Virtual Machines + // Stability: development + CloudPlatformAzureVM = CloudPlatformKey.String("azure_vm") + // Azure Container Apps + // Stability: development + CloudPlatformAzureContainerApps = CloudPlatformKey.String("azure_container_apps") + // Azure Container Instances + // Stability: development + CloudPlatformAzureContainerInstances = CloudPlatformKey.String("azure_container_instances") + // Azure Kubernetes Service + // Stability: development + CloudPlatformAzureAKS = CloudPlatformKey.String("azure_aks") + // Azure Functions + // Stability: development + CloudPlatformAzureFunctions = CloudPlatformKey.String("azure_functions") + // Azure App Service + // Stability: development + CloudPlatformAzureAppService = CloudPlatformKey.String("azure_app_service") + // Azure Red Hat OpenShift + // Stability: development + CloudPlatformAzureOpenShift = CloudPlatformKey.String("azure_openshift") + // Google Bare Metal Solution (BMS) + // Stability: development + CloudPlatformGCPBareMetalSolution = CloudPlatformKey.String("gcp_bare_metal_solution") + // Google Cloud Compute Engine (GCE) + // Stability: development + CloudPlatformGCPComputeEngine = CloudPlatformKey.String("gcp_compute_engine") + // Google Cloud Run + // Stability: development + CloudPlatformGCPCloudRun = CloudPlatformKey.String("gcp_cloud_run") + // Google Cloud Kubernetes Engine (GKE) + // Stability: development + CloudPlatformGCPKubernetesEngine = CloudPlatformKey.String("gcp_kubernetes_engine") + // Google Cloud Functions (GCF) + // Stability: development + CloudPlatformGCPCloudFunctions = CloudPlatformKey.String("gcp_cloud_functions") + // Google Cloud App Engine (GAE) + // Stability: development + CloudPlatformGCPAppEngine = CloudPlatformKey.String("gcp_app_engine") + // Red Hat OpenShift on Google Cloud + // Stability: development + CloudPlatformGCPOpenShift = CloudPlatformKey.String("gcp_openshift") + // Red Hat OpenShift on IBM Cloud + // Stability: development + CloudPlatformIBMCloudOpenShift = CloudPlatformKey.String("ibm_cloud_openshift") + // Compute on Oracle Cloud Infrastructure (OCI) + // Stability: development + CloudPlatformOracleCloudCompute = CloudPlatformKey.String("oracle_cloud_compute") + // Kubernetes Engine (OKE) on Oracle Cloud Infrastructure (OCI) + // Stability: development + CloudPlatformOracleCloudOKE = CloudPlatformKey.String("oracle_cloud_oke") + // Tencent Cloud Cloud Virtual Machine (CVM) + // Stability: development + CloudPlatformTencentCloudCVM = CloudPlatformKey.String("tencent_cloud_cvm") + // Tencent Cloud Elastic Kubernetes Service (EKS) + // Stability: development + CloudPlatformTencentCloudEKS = CloudPlatformKey.String("tencent_cloud_eks") + // Tencent Cloud Serverless Cloud Function (SCF) + // Stability: development + CloudPlatformTencentCloudSCF = CloudPlatformKey.String("tencent_cloud_scf") +) + +// Enum values for cloud.provider +var ( + // Alibaba Cloud + // Stability: development + CloudProviderAlibabaCloud = CloudProviderKey.String("alibaba_cloud") + // Amazon Web Services + // Stability: development + CloudProviderAWS = CloudProviderKey.String("aws") + // Microsoft Azure + // Stability: development + CloudProviderAzure = CloudProviderKey.String("azure") + // Google Cloud Platform + // Stability: development + CloudProviderGCP = CloudProviderKey.String("gcp") + // Heroku Platform as a Service + // Stability: development + CloudProviderHeroku = CloudProviderKey.String("heroku") + // IBM Cloud + // Stability: development + CloudProviderIBMCloud = CloudProviderKey.String("ibm_cloud") + // Oracle Cloud Infrastructure (OCI) + // Stability: development + CloudProviderOracleCloud = CloudProviderKey.String("oracle_cloud") + // Tencent Cloud + // Stability: development + CloudProviderTencentCloud = CloudProviderKey.String("tencent_cloud") +) + +// Namespace: cloudevents +const ( + // CloudEventsEventIDKey is the attribute Key conforming to the + // "cloudevents.event_id" semantic conventions. It represents the [event_id] + // uniquely identifies the event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123e4567-e89b-12d3-a456-426614174000", "0001" + // + // [event_id]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#id + CloudEventsEventIDKey = attribute.Key("cloudevents.event_id") + + // CloudEventsEventSourceKey is the attribute Key conforming to the + // "cloudevents.event_source" semantic conventions. It represents the [source] + // identifies the context in which an event happened. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://github.com/cloudevents", "/cloudevents/spec/pull/123", + // "my-service" + // + // [source]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#source-1 + CloudEventsEventSourceKey = attribute.Key("cloudevents.event_source") + + // CloudEventsEventSpecVersionKey is the attribute Key conforming to the + // "cloudevents.event_spec_version" semantic conventions. It represents the + // [version of the CloudEvents specification] which the event uses. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0 + // + // [version of the CloudEvents specification]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#specversion + CloudEventsEventSpecVersionKey = attribute.Key("cloudevents.event_spec_version") + + // CloudEventsEventSubjectKey is the attribute Key conforming to the + // "cloudevents.event_subject" semantic conventions. It represents the [subject] + // of the event in the context of the event producer (identified by source). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: mynewfile.jpg + // + // [subject]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#subject + CloudEventsEventSubjectKey = attribute.Key("cloudevents.event_subject") + + // CloudEventsEventTypeKey is the attribute Key conforming to the + // "cloudevents.event_type" semantic conventions. It represents the [event_type] + // contains a value describing the type of event related to the originating + // occurrence. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "com.github.pull_request.opened", "com.example.object.deleted.v2" + // + // [event_type]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#type + CloudEventsEventTypeKey = attribute.Key("cloudevents.event_type") +) + +// CloudEventsEventID returns an attribute KeyValue conforming to the +// "cloudevents.event_id" semantic conventions. It represents the [event_id] +// uniquely identifies the event. +// +// [event_id]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#id +func CloudEventsEventID(val string) attribute.KeyValue { + return CloudEventsEventIDKey.String(val) +} + +// CloudEventsEventSource returns an attribute KeyValue conforming to the +// "cloudevents.event_source" semantic conventions. It represents the [source] +// identifies the context in which an event happened. +// +// [source]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#source-1 +func CloudEventsEventSource(val string) attribute.KeyValue { + return CloudEventsEventSourceKey.String(val) +} + +// CloudEventsEventSpecVersion returns an attribute KeyValue conforming to the +// "cloudevents.event_spec_version" semantic conventions. It represents the +// [version of the CloudEvents specification] which the event uses. +// +// [version of the CloudEvents specification]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#specversion +func CloudEventsEventSpecVersion(val string) attribute.KeyValue { + return CloudEventsEventSpecVersionKey.String(val) +} + +// CloudEventsEventSubject returns an attribute KeyValue conforming to the +// "cloudevents.event_subject" semantic conventions. It represents the [subject] +// of the event in the context of the event producer (identified by source). +// +// [subject]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#subject +func CloudEventsEventSubject(val string) attribute.KeyValue { + return CloudEventsEventSubjectKey.String(val) +} + +// CloudEventsEventType returns an attribute KeyValue conforming to the +// "cloudevents.event_type" semantic conventions. It represents the [event_type] +// contains a value describing the type of event related to the originating +// occurrence. +// +// [event_type]: https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/spec.md#type +func CloudEventsEventType(val string) attribute.KeyValue { + return CloudEventsEventTypeKey.String(val) +} + +// Namespace: cloudfoundry +const ( + // CloudFoundryAppIDKey is the attribute Key conforming to the + // "cloudfoundry.app.id" semantic conventions. It represents the guid of the + // application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.application_id`. This is the same value as + // reported by `cf app --guid`. + CloudFoundryAppIDKey = attribute.Key("cloudfoundry.app.id") + + // CloudFoundryAppInstanceIDKey is the attribute Key conforming to the + // "cloudfoundry.app.instance.id" semantic conventions. It represents the index + // of the application instance. 0 when just one instance is active. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0", "1" + // Note: CloudFoundry defines the `instance_id` in the [Loggregator v2 envelope] + // . + // It is used for logs and metrics emitted by CloudFoundry. It is + // supposed to contain the application instance index for applications + // deployed on the runtime. + // + // Application instrumentation should use the value from environment + // variable `CF_INSTANCE_INDEX`. + // + // [Loggregator v2 envelope]: https://github.com/cloudfoundry/loggregator-api#v2-envelope + CloudFoundryAppInstanceIDKey = attribute.Key("cloudfoundry.app.instance.id") + + // CloudFoundryAppNameKey is the attribute Key conforming to the + // "cloudfoundry.app.name" semantic conventions. It represents the name of the + // application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-app-name" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.application_name`. This is the same value + // as reported by `cf apps`. + CloudFoundryAppNameKey = attribute.Key("cloudfoundry.app.name") + + // CloudFoundryOrgIDKey is the attribute Key conforming to the + // "cloudfoundry.org.id" semantic conventions. It represents the guid of the + // CloudFoundry org the application is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.org_id`. This is the same value as + // reported by `cf org --guid`. + CloudFoundryOrgIDKey = attribute.Key("cloudfoundry.org.id") + + // CloudFoundryOrgNameKey is the attribute Key conforming to the + // "cloudfoundry.org.name" semantic conventions. It represents the name of the + // CloudFoundry organization the app is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-org-name" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.org_name`. This is the same value as + // reported by `cf orgs`. + CloudFoundryOrgNameKey = attribute.Key("cloudfoundry.org.name") + + // CloudFoundryProcessIDKey is the attribute Key conforming to the + // "cloudfoundry.process.id" semantic conventions. It represents the UID + // identifying the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.process_id`. It is supposed to be equal to + // `VCAP_APPLICATION.app_id` for applications deployed to the runtime. + // For system components, this could be the actual PID. + CloudFoundryProcessIDKey = attribute.Key("cloudfoundry.process.id") + + // CloudFoundryProcessTypeKey is the attribute Key conforming to the + // "cloudfoundry.process.type" semantic conventions. It represents the type of + // process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "web" + // Note: CloudFoundry applications can consist of multiple jobs. Usually the + // main process will be of type `web`. There can be additional background + // tasks or side-cars with different process types. + CloudFoundryProcessTypeKey = attribute.Key("cloudfoundry.process.type") + + // CloudFoundrySpaceIDKey is the attribute Key conforming to the + // "cloudfoundry.space.id" semantic conventions. It represents the guid of the + // CloudFoundry space the application is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.space_id`. This is the same value as + // reported by `cf space --guid`. + CloudFoundrySpaceIDKey = attribute.Key("cloudfoundry.space.id") + + // CloudFoundrySpaceNameKey is the attribute Key conforming to the + // "cloudfoundry.space.name" semantic conventions. It represents the name of the + // CloudFoundry space the application is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-space-name" + // Note: Application instrumentation should use the value from environment + // variable `VCAP_APPLICATION.space_name`. This is the same value as + // reported by `cf spaces`. + CloudFoundrySpaceNameKey = attribute.Key("cloudfoundry.space.name") + + // CloudFoundrySystemIDKey is the attribute Key conforming to the + // "cloudfoundry.system.id" semantic conventions. It represents a guid or + // another name describing the event source. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cf/gorouter" + // Note: CloudFoundry defines the `source_id` in the [Loggregator v2 envelope]. + // It is used for logs and metrics emitted by CloudFoundry. It is + // supposed to contain the component name, e.g. "gorouter", for + // CloudFoundry components. + // + // When system components are instrumented, values from the + // [Bosh spec] + // should be used. The `system.id` should be set to + // `spec.deployment/spec.name`. + // + // [Loggregator v2 envelope]: https://github.com/cloudfoundry/loggregator-api#v2-envelope + // [Bosh spec]: https://bosh.io/docs/jobs/#properties-spec + CloudFoundrySystemIDKey = attribute.Key("cloudfoundry.system.id") + + // CloudFoundrySystemInstanceIDKey is the attribute Key conforming to the + // "cloudfoundry.system.instance.id" semantic conventions. It represents a guid + // describing the concrete instance of the event source. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: CloudFoundry defines the `instance_id` in the [Loggregator v2 envelope] + // . + // It is used for logs and metrics emitted by CloudFoundry. It is + // supposed to contain the vm id for CloudFoundry components. + // + // When system components are instrumented, values from the + // [Bosh spec] + // should be used. The `system.instance.id` should be set to `spec.id`. + // + // [Loggregator v2 envelope]: https://github.com/cloudfoundry/loggregator-api#v2-envelope + // [Bosh spec]: https://bosh.io/docs/jobs/#properties-spec + CloudFoundrySystemInstanceIDKey = attribute.Key("cloudfoundry.system.instance.id") +) + +// CloudFoundryAppID returns an attribute KeyValue conforming to the +// "cloudfoundry.app.id" semantic conventions. It represents the guid of the +// application. +func CloudFoundryAppID(val string) attribute.KeyValue { + return CloudFoundryAppIDKey.String(val) +} + +// CloudFoundryAppInstanceID returns an attribute KeyValue conforming to the +// "cloudfoundry.app.instance.id" semantic conventions. It represents the index +// of the application instance. 0 when just one instance is active. +func CloudFoundryAppInstanceID(val string) attribute.KeyValue { + return CloudFoundryAppInstanceIDKey.String(val) +} + +// CloudFoundryAppName returns an attribute KeyValue conforming to the +// "cloudfoundry.app.name" semantic conventions. It represents the name of the +// application. +func CloudFoundryAppName(val string) attribute.KeyValue { + return CloudFoundryAppNameKey.String(val) +} + +// CloudFoundryOrgID returns an attribute KeyValue conforming to the +// "cloudfoundry.org.id" semantic conventions. It represents the guid of the +// CloudFoundry org the application is running in. +func CloudFoundryOrgID(val string) attribute.KeyValue { + return CloudFoundryOrgIDKey.String(val) +} + +// CloudFoundryOrgName returns an attribute KeyValue conforming to the +// "cloudfoundry.org.name" semantic conventions. It represents the name of the +// CloudFoundry organization the app is running in. +func CloudFoundryOrgName(val string) attribute.KeyValue { + return CloudFoundryOrgNameKey.String(val) +} + +// CloudFoundryProcessID returns an attribute KeyValue conforming to the +// "cloudfoundry.process.id" semantic conventions. It represents the UID +// identifying the process. +func CloudFoundryProcessID(val string) attribute.KeyValue { + return CloudFoundryProcessIDKey.String(val) +} + +// CloudFoundryProcessType returns an attribute KeyValue conforming to the +// "cloudfoundry.process.type" semantic conventions. It represents the type of +// process. +func CloudFoundryProcessType(val string) attribute.KeyValue { + return CloudFoundryProcessTypeKey.String(val) +} + +// CloudFoundrySpaceID returns an attribute KeyValue conforming to the +// "cloudfoundry.space.id" semantic conventions. It represents the guid of the +// CloudFoundry space the application is running in. +func CloudFoundrySpaceID(val string) attribute.KeyValue { + return CloudFoundrySpaceIDKey.String(val) +} + +// CloudFoundrySpaceName returns an attribute KeyValue conforming to the +// "cloudfoundry.space.name" semantic conventions. It represents the name of the +// CloudFoundry space the application is running in. +func CloudFoundrySpaceName(val string) attribute.KeyValue { + return CloudFoundrySpaceNameKey.String(val) +} + +// CloudFoundrySystemID returns an attribute KeyValue conforming to the +// "cloudfoundry.system.id" semantic conventions. It represents a guid or another +// name describing the event source. +func CloudFoundrySystemID(val string) attribute.KeyValue { + return CloudFoundrySystemIDKey.String(val) +} + +// CloudFoundrySystemInstanceID returns an attribute KeyValue conforming to the +// "cloudfoundry.system.instance.id" semantic conventions. It represents a guid +// describing the concrete instance of the event source. +func CloudFoundrySystemInstanceID(val string) attribute.KeyValue { + return CloudFoundrySystemInstanceIDKey.String(val) +} + +// Namespace: code +const ( + // CodeColumnNumberKey is the attribute Key conforming to the + // "code.column.number" semantic conventions. It represents the column number in + // `code.file.path` best representing the operation. It SHOULD point within the + // code unit named in `code.function.name`. This attribute MUST NOT be used on + // the Profile signal since the data is already captured in 'message Line'. This + // constraint is imposed to prevent redundancy and maintain data integrity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + CodeColumnNumberKey = attribute.Key("code.column.number") + + // CodeFilePathKey is the attribute Key conforming to the "code.file.path" + // semantic conventions. It represents the source code file name that identifies + // the code unit as uniquely as possible (preferably an absolute file path). + // This attribute MUST NOT be used on the Profile signal since the data is + // already captured in 'message Function'. This constraint is imposed to prevent + // redundancy and maintain data integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: /usr/local/MyApplication/content_root/app/index.php + CodeFilePathKey = attribute.Key("code.file.path") + + // CodeFunctionNameKey is the attribute Key conforming to the + // "code.function.name" semantic conventions. It represents the method or + // function fully-qualified name without arguments. The value should fit the + // natural representation of the language runtime, which is also likely the same + // used within `code.stacktrace` attribute value. This attribute MUST NOT be + // used on the Profile signal since the data is already captured in 'message + // Function'. This constraint is imposed to prevent redundancy and maintain data + // integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "com.example.MyHttpService.serveRequest", + // "GuzzleHttp\Client::transfer", "fopen" + // Note: Values and format depends on each language runtime, thus it is + // impossible to provide an exhaustive list of examples. + // The values are usually the same (or prefixes of) the ones found in native + // stack trace representation stored in + // `code.stacktrace` without information on arguments. + // + // Examples: + // + // - Java method: `com.example.MyHttpService.serveRequest` + // - Java anonymous class method: `com.mycompany.Main$1.myMethod` + // - Java lambda method: + // `com.mycompany.Main$$Lambda/0x0000748ae4149c00.myMethod` + // - PHP function: `GuzzleHttp\Client::transfer` + // - Go function: `github.com/my/repo/pkg.foo.func5` + // - Elixir: `OpenTelemetry.Ctx.new` + // - Erlang: `opentelemetry_ctx:new` + // - Rust: `playground::my_module::my_cool_func` + // - C function: `fopen` + CodeFunctionNameKey = attribute.Key("code.function.name") + + // CodeLineNumberKey is the attribute Key conforming to the "code.line.number" + // semantic conventions. It represents the line number in `code.file.path` best + // representing the operation. It SHOULD point within the code unit named in + // `code.function.name`. This attribute MUST NOT be used on the Profile signal + // since the data is already captured in 'message Line'. This constraint is + // imposed to prevent redundancy and maintain data integrity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + CodeLineNumberKey = attribute.Key("code.line.number") + + // CodeStacktraceKey is the attribute Key conforming to the "code.stacktrace" + // semantic conventions. It represents a stacktrace as a string in the natural + // representation for the language runtime. The representation is identical to + // [`exception.stacktrace`]. This attribute MUST NOT be used on the Profile + // signal since the data is already captured in 'message Location'. This + // constraint is imposed to prevent redundancy and maintain data integrity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: at com.example.GenerateTrace.methodB(GenerateTrace.java:13)\n at + // com.example.GenerateTrace.methodA(GenerateTrace.java:9)\n at + // com.example.GenerateTrace.main(GenerateTrace.java:5) + // + // [`exception.stacktrace`]: /docs/exceptions/exceptions-spans.md#stacktrace-representation + CodeStacktraceKey = attribute.Key("code.stacktrace") +) + +// CodeColumnNumber returns an attribute KeyValue conforming to the +// "code.column.number" semantic conventions. It represents the column number in +// `code.file.path` best representing the operation. It SHOULD point within the +// code unit named in `code.function.name`. This attribute MUST NOT be used on +// the Profile signal since the data is already captured in 'message Line'. This +// constraint is imposed to prevent redundancy and maintain data integrity. +func CodeColumnNumber(val int) attribute.KeyValue { + return CodeColumnNumberKey.Int(val) +} + +// CodeFilePath returns an attribute KeyValue conforming to the "code.file.path" +// semantic conventions. It represents the source code file name that identifies +// the code unit as uniquely as possible (preferably an absolute file path). This +// attribute MUST NOT be used on the Profile signal since the data is already +// captured in 'message Function'. This constraint is imposed to prevent +// redundancy and maintain data integrity. +func CodeFilePath(val string) attribute.KeyValue { + return CodeFilePathKey.String(val) +} + +// CodeFunctionName returns an attribute KeyValue conforming to the +// "code.function.name" semantic conventions. It represents the method or +// function fully-qualified name without arguments. The value should fit the +// natural representation of the language runtime, which is also likely the same +// used within `code.stacktrace` attribute value. This attribute MUST NOT be used +// on the Profile signal since the data is already captured in 'message +// Function'. This constraint is imposed to prevent redundancy and maintain data +// integrity. +func CodeFunctionName(val string) attribute.KeyValue { + return CodeFunctionNameKey.String(val) +} + +// CodeLineNumber returns an attribute KeyValue conforming to the +// "code.line.number" semantic conventions. It represents the line number in +// `code.file.path` best representing the operation. It SHOULD point within the +// code unit named in `code.function.name`. This attribute MUST NOT be used on +// the Profile signal since the data is already captured in 'message Line'. This +// constraint is imposed to prevent redundancy and maintain data integrity. +func CodeLineNumber(val int) attribute.KeyValue { + return CodeLineNumberKey.Int(val) +} + +// CodeStacktrace returns an attribute KeyValue conforming to the +// "code.stacktrace" semantic conventions. It represents a stacktrace as a string +// in the natural representation for the language runtime. The representation is +// identical to [`exception.stacktrace`]. This attribute MUST NOT be used on the +// Profile signal since the data is already captured in 'message Location'. This +// constraint is imposed to prevent redundancy and maintain data integrity. +// +// [`exception.stacktrace`]: /docs/exceptions/exceptions-spans.md#stacktrace-representation +func CodeStacktrace(val string) attribute.KeyValue { + return CodeStacktraceKey.String(val) +} + +// Namespace: container +const ( + // ContainerCommandKey is the attribute Key conforming to the + // "container.command" semantic conventions. It represents the command used to + // run the container (i.e. the command name). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcontribcol" + // Note: If using embedded credentials or sensitive data, it is recommended to + // remove them to prevent potential leakage. + ContainerCommandKey = attribute.Key("container.command") + + // ContainerCommandArgsKey is the attribute Key conforming to the + // "container.command_args" semantic conventions. It represents the all the + // command arguments (including the command/executable itself) run by the + // container. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcontribcol", "--config", "config.yaml" + ContainerCommandArgsKey = attribute.Key("container.command_args") + + // ContainerCommandLineKey is the attribute Key conforming to the + // "container.command_line" semantic conventions. It represents the full command + // run by the container as a single string representing the full command. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcontribcol --config config.yaml" + ContainerCommandLineKey = attribute.Key("container.command_line") + + // ContainerCSIPluginNameKey is the attribute Key conforming to the + // "container.csi.plugin.name" semantic conventions. It represents the name of + // the CSI ([Container Storage Interface]) plugin used by the volume. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pd.csi.storage.gke.io" + // Note: This can sometimes be referred to as a "driver" in CSI implementations. + // This should represent the `name` field of the GetPluginInfo RPC. + // + // [Container Storage Interface]: https://github.com/container-storage-interface/spec + ContainerCSIPluginNameKey = attribute.Key("container.csi.plugin.name") + + // ContainerCSIVolumeIDKey is the attribute Key conforming to the + // "container.csi.volume.id" semantic conventions. It represents the unique + // volume ID returned by the CSI ([Container Storage Interface]) plugin. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "projects/my-gcp-project/zones/my-gcp-zone/disks/my-gcp-disk" + // Note: This can sometimes be referred to as a "volume handle" in CSI + // implementations. This should represent the `Volume.volume_id` field in CSI + // spec. + // + // [Container Storage Interface]: https://github.com/container-storage-interface/spec + ContainerCSIVolumeIDKey = attribute.Key("container.csi.volume.id") + + // ContainerIDKey is the attribute Key conforming to the "container.id" semantic + // conventions. It represents the container ID. Usually a UUID, as for example + // used to [identify Docker containers]. The UUID might be abbreviated. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "a3bf90e006b2" + // + // [identify Docker containers]: https://docs.docker.com/engine/containers/run/#container-identification + ContainerIDKey = attribute.Key("container.id") + + // ContainerImageIDKey is the attribute Key conforming to the + // "container.image.id" semantic conventions. It represents the runtime specific + // image identifier. Usually a hash algorithm followed by a UUID. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "sha256:19c92d0a00d1b66d897bceaa7319bee0dd38a10a851c60bcec9474aa3f01e50f" + // Note: Docker defines a sha256 of the image id; `container.image.id` + // corresponds to the `Image` field from the Docker container inspect [API] + // endpoint. + // K8s defines a link to the container registry repository with digest + // `"imageID": "registry.azurecr.io /namespace/service/dockerfile@sha256:bdeabd40c3a8a492eaf9e8e44d0ebbb84bac7ee25ac0cf8a7159d25f62555625"` + // . + // The ID is assigned by the container runtime and can vary in different + // environments. Consider using `oci.manifest.digest` if it is important to + // identify the same image in different environments/runtimes. + // + // [API]: https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerInspect + ContainerImageIDKey = attribute.Key("container.image.id") + + // ContainerImageNameKey is the attribute Key conforming to the + // "container.image.name" semantic conventions. It represents the name of the + // image the container was built on. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "gcr.io/opentelemetry/operator" + ContainerImageNameKey = attribute.Key("container.image.name") + + // ContainerImageRepoDigestsKey is the attribute Key conforming to the + // "container.image.repo_digests" semantic conventions. It represents the repo + // digests of the container image as provided by the container runtime. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb", + // "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578" + // Note: [Docker] and [CRI] report those under the `RepoDigests` field. + // + // [Docker]: https://docs.docker.com/engine/api/v1.43/#tag/Image/operation/ImageInspect + // [CRI]: https://github.com/kubernetes/cri-api/blob/c75ef5b473bbe2d0a4fc92f82235efd665ea8e9f/pkg/apis/runtime/v1/api.proto#L1237-L1238 + ContainerImageRepoDigestsKey = attribute.Key("container.image.repo_digests") + + // ContainerImageTagsKey is the attribute Key conforming to the + // "container.image.tags" semantic conventions. It represents the container + // image tags. An example can be found in [Docker Image Inspect]. Should be only + // the `` section of the full name for example from + // `registry.example.com/my-org/my-image:`. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "v1.27.1", "3.5.7-0" + // + // [Docker Image Inspect]: https://docs.docker.com/engine/api/v1.43/#tag/Image/operation/ImageInspect + ContainerImageTagsKey = attribute.Key("container.image.tags") + + // ContainerNameKey is the attribute Key conforming to the "container.name" + // semantic conventions. It represents the container name used by container + // runtime. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-autoconf" + ContainerNameKey = attribute.Key("container.name") + + // ContainerRuntimeKey is the attribute Key conforming to the + // "container.runtime" semantic conventions. It represents the container runtime + // managing this container. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "docker", "containerd", "rkt" + ContainerRuntimeKey = attribute.Key("container.runtime") +) + +// ContainerCommand returns an attribute KeyValue conforming to the +// "container.command" semantic conventions. It represents the command used to +// run the container (i.e. the command name). +func ContainerCommand(val string) attribute.KeyValue { + return ContainerCommandKey.String(val) +} + +// ContainerCommandArgs returns an attribute KeyValue conforming to the +// "container.command_args" semantic conventions. It represents the all the +// command arguments (including the command/executable itself) run by the +// container. +func ContainerCommandArgs(val ...string) attribute.KeyValue { + return ContainerCommandArgsKey.StringSlice(val) +} + +// ContainerCommandLine returns an attribute KeyValue conforming to the +// "container.command_line" semantic conventions. It represents the full command +// run by the container as a single string representing the full command. +func ContainerCommandLine(val string) attribute.KeyValue { + return ContainerCommandLineKey.String(val) +} + +// ContainerCSIPluginName returns an attribute KeyValue conforming to the +// "container.csi.plugin.name" semantic conventions. It represents the name of +// the CSI ([Container Storage Interface]) plugin used by the volume. +// +// [Container Storage Interface]: https://github.com/container-storage-interface/spec +func ContainerCSIPluginName(val string) attribute.KeyValue { + return ContainerCSIPluginNameKey.String(val) +} + +// ContainerCSIVolumeID returns an attribute KeyValue conforming to the +// "container.csi.volume.id" semantic conventions. It represents the unique +// volume ID returned by the CSI ([Container Storage Interface]) plugin. +// +// [Container Storage Interface]: https://github.com/container-storage-interface/spec +func ContainerCSIVolumeID(val string) attribute.KeyValue { + return ContainerCSIVolumeIDKey.String(val) +} + +// ContainerID returns an attribute KeyValue conforming to the "container.id" +// semantic conventions. It represents the container ID. Usually a UUID, as for +// example used to [identify Docker containers]. The UUID might be abbreviated. +// +// [identify Docker containers]: https://docs.docker.com/engine/containers/run/#container-identification +func ContainerID(val string) attribute.KeyValue { + return ContainerIDKey.String(val) +} + +// ContainerImageID returns an attribute KeyValue conforming to the +// "container.image.id" semantic conventions. It represents the runtime specific +// image identifier. Usually a hash algorithm followed by a UUID. +func ContainerImageID(val string) attribute.KeyValue { + return ContainerImageIDKey.String(val) +} + +// ContainerImageName returns an attribute KeyValue conforming to the +// "container.image.name" semantic conventions. It represents the name of the +// image the container was built on. +func ContainerImageName(val string) attribute.KeyValue { + return ContainerImageNameKey.String(val) +} + +// ContainerImageRepoDigests returns an attribute KeyValue conforming to the +// "container.image.repo_digests" semantic conventions. It represents the repo +// digests of the container image as provided by the container runtime. +func ContainerImageRepoDigests(val ...string) attribute.KeyValue { + return ContainerImageRepoDigestsKey.StringSlice(val) +} + +// ContainerImageTags returns an attribute KeyValue conforming to the +// "container.image.tags" semantic conventions. It represents the container image +// tags. An example can be found in [Docker Image Inspect]. Should be only the +// `` section of the full name for example from +// `registry.example.com/my-org/my-image:`. +// +// [Docker Image Inspect]: https://docs.docker.com/engine/api/v1.43/#tag/Image/operation/ImageInspect +func ContainerImageTags(val ...string) attribute.KeyValue { + return ContainerImageTagsKey.StringSlice(val) +} + +// ContainerName returns an attribute KeyValue conforming to the "container.name" +// semantic conventions. It represents the container name used by container +// runtime. +func ContainerName(val string) attribute.KeyValue { + return ContainerNameKey.String(val) +} + +// ContainerRuntime returns an attribute KeyValue conforming to the +// "container.runtime" semantic conventions. It represents the container runtime +// managing this container. +func ContainerRuntime(val string) attribute.KeyValue { + return ContainerRuntimeKey.String(val) +} + +// Namespace: cpu +const ( + // CPULogicalNumberKey is the attribute Key conforming to the + // "cpu.logical_number" semantic conventions. It represents the logical CPU + // number [0..n-1]. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1 + CPULogicalNumberKey = attribute.Key("cpu.logical_number") + + // CPUModeKey is the attribute Key conforming to the "cpu.mode" semantic + // conventions. It represents the mode of the CPU. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "user", "system" + CPUModeKey = attribute.Key("cpu.mode") +) + +// CPULogicalNumber returns an attribute KeyValue conforming to the +// "cpu.logical_number" semantic conventions. It represents the logical CPU +// number [0..n-1]. +func CPULogicalNumber(val int) attribute.KeyValue { + return CPULogicalNumberKey.Int(val) +} + +// Enum values for cpu.mode +var ( + // user + // Stability: development + CPUModeUser = CPUModeKey.String("user") + // system + // Stability: development + CPUModeSystem = CPUModeKey.String("system") + // nice + // Stability: development + CPUModeNice = CPUModeKey.String("nice") + // idle + // Stability: development + CPUModeIdle = CPUModeKey.String("idle") + // iowait + // Stability: development + CPUModeIOWait = CPUModeKey.String("iowait") + // interrupt + // Stability: development + CPUModeInterrupt = CPUModeKey.String("interrupt") + // steal + // Stability: development + CPUModeSteal = CPUModeKey.String("steal") + // kernel + // Stability: development + CPUModeKernel = CPUModeKey.String("kernel") +) + +// Namespace: db +const ( + // DBClientConnectionPoolNameKey is the attribute Key conforming to the + // "db.client.connection.pool.name" semantic conventions. It represents the name + // of the connection pool; unique within the instrumented application. In case + // the connection pool implementation doesn't provide a name, instrumentation + // SHOULD use a combination of parameters that would make the name unique, for + // example, combining attributes `server.address`, `server.port`, and + // `db.namespace`, formatted as `server.address:server.port/db.namespace`. + // Instrumentations that generate connection pool name following different + // patterns SHOULD document it. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "myDataSource" + DBClientConnectionPoolNameKey = attribute.Key("db.client.connection.pool.name") + + // DBClientConnectionStateKey is the attribute Key conforming to the + // "db.client.connection.state" semantic conventions. It represents the state of + // a connection in the pool. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "idle" + DBClientConnectionStateKey = attribute.Key("db.client.connection.state") + + // DBCollectionNameKey is the attribute Key conforming to the + // "db.collection.name" semantic conventions. It represents the name of a + // collection (table, container) within the database. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "public.users", "customers" + // Note: It is RECOMMENDED to capture the value as provided by the application + // without attempting to do any case normalization. + // + // The collection name SHOULD NOT be extracted from `db.query.text`, + // when the database system supports query text with multiple collections + // in non-batch operations. + // + // For batch operations, if the individual operations are known to have the same + // collection name then that collection name SHOULD be used. + DBCollectionNameKey = attribute.Key("db.collection.name") + + // DBNamespaceKey is the attribute Key conforming to the "db.namespace" semantic + // conventions. It represents the name of the database, fully qualified within + // the server address and port. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "customers", "test.users" + // Note: If a database system has multiple namespace components, they SHOULD be + // concatenated from the most general to the most specific namespace component, + // using `|` as a separator between the components. Any missing components (and + // their associated separators) SHOULD be omitted. + // Semantic conventions for individual database systems SHOULD document what + // `db.namespace` means in the context of that system. + // It is RECOMMENDED to capture the value as provided by the application without + // attempting to do any case normalization. + DBNamespaceKey = attribute.Key("db.namespace") + + // DBOperationBatchSizeKey is the attribute Key conforming to the + // "db.operation.batch.size" semantic conventions. It represents the number of + // queries included in a batch operation. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 2, 3, 4 + // Note: Operations are only considered batches when they contain two or more + // operations, and so `db.operation.batch.size` SHOULD never be `1`. + DBOperationBatchSizeKey = attribute.Key("db.operation.batch.size") + + // DBOperationNameKey is the attribute Key conforming to the "db.operation.name" + // semantic conventions. It represents the name of the operation or command + // being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "findAndModify", "HMSET", "SELECT" + // Note: It is RECOMMENDED to capture the value as provided by the application + // without attempting to do any case normalization. + // + // The operation name SHOULD NOT be extracted from `db.query.text`, + // when the database system supports query text with multiple operations + // in non-batch operations. + // + // If spaces can occur in the operation name, multiple consecutive spaces + // SHOULD be normalized to a single space. + // + // For batch operations, if the individual operations are known to have the same + // operation name + // then that operation name SHOULD be used prepended by `BATCH `, + // otherwise `db.operation.name` SHOULD be `BATCH` or some other database + // system specific term if more applicable. + DBOperationNameKey = attribute.Key("db.operation.name") + + // DBQuerySummaryKey is the attribute Key conforming to the "db.query.summary" + // semantic conventions. It represents the low cardinality summary of a database + // query. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "SELECT wuser_table", "INSERT shipping_details SELECT orders", "get + // user by id" + // Note: The query summary describes a class of database queries and is useful + // as a grouping key, especially when analyzing telemetry for database + // calls involving complex queries. + // + // Summary may be available to the instrumentation through + // instrumentation hooks or other means. If it is not available, + // instrumentations + // that support query parsing SHOULD generate a summary following + // [Generating query summary] + // section. + // + // [Generating query summary]: /docs/database/database-spans.md#generating-a-summary-of-the-query + DBQuerySummaryKey = attribute.Key("db.query.summary") + + // DBQueryTextKey is the attribute Key conforming to the "db.query.text" + // semantic conventions. It represents the database query being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "SELECT * FROM wuser_table where username = ?", "SET mykey ?" + // Note: For sanitization see [Sanitization of `db.query.text`]. + // For batch operations, if the individual operations are known to have the same + // query text then that query text SHOULD be used, otherwise all of the + // individual query texts SHOULD be concatenated with separator `; ` or some + // other database system specific separator if more applicable. + // Parameterized query text SHOULD NOT be sanitized. Even though parameterized + // query text can potentially have sensitive data, by using a parameterized + // query the user is giving a strong signal that any sensitive data will be + // passed as parameter values, and the benefit to observability of capturing the + // static part of the query text by default outweighs the risk. + // + // [Sanitization of `db.query.text`]: /docs/database/database-spans.md#sanitization-of-dbquerytext + DBQueryTextKey = attribute.Key("db.query.text") + + // DBResponseReturnedRowsKey is the attribute Key conforming to the + // "db.response.returned_rows" semantic conventions. It represents the number of + // rows returned by the operation. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 10, 30, 1000 + DBResponseReturnedRowsKey = attribute.Key("db.response.returned_rows") + + // DBResponseStatusCodeKey is the attribute Key conforming to the + // "db.response.status_code" semantic conventions. It represents the database + // response status code. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "102", "ORA-17002", "08P01", "404" + // Note: The status code returned by the database. Usually it represents an + // error code, but may also represent partial success, warning, or differentiate + // between various types of successful outcomes. + // Semantic conventions for individual database systems SHOULD document what + // `db.response.status_code` means in the context of that system. + DBResponseStatusCodeKey = attribute.Key("db.response.status_code") + + // DBStoredProcedureNameKey is the attribute Key conforming to the + // "db.stored_procedure.name" semantic conventions. It represents the name of a + // stored procedure within the database. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "GetCustomer" + // Note: It is RECOMMENDED to capture the value as provided by the application + // without attempting to do any case normalization. + // + // For batch operations, if the individual operations are known to have the same + // stored procedure name then that stored procedure name SHOULD be used. + DBStoredProcedureNameKey = attribute.Key("db.stored_procedure.name") + + // DBSystemNameKey is the attribute Key conforming to the "db.system.name" + // semantic conventions. It represents the database management system (DBMS) + // product as identified by the client instrumentation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: + // Note: The actual DBMS may differ from the one identified by the client. For + // example, when using PostgreSQL client libraries to connect to a CockroachDB, + // the `db.system.name` is set to `postgresql` based on the instrumentation's + // best knowledge. + DBSystemNameKey = attribute.Key("db.system.name") +) + +// DBClientConnectionPoolName returns an attribute KeyValue conforming to the +// "db.client.connection.pool.name" semantic conventions. It represents the name +// of the connection pool; unique within the instrumented application. In case +// the connection pool implementation doesn't provide a name, instrumentation +// SHOULD use a combination of parameters that would make the name unique, for +// example, combining attributes `server.address`, `server.port`, and +// `db.namespace`, formatted as `server.address:server.port/db.namespace`. +// Instrumentations that generate connection pool name following different +// patterns SHOULD document it. +func DBClientConnectionPoolName(val string) attribute.KeyValue { + return DBClientConnectionPoolNameKey.String(val) +} + +// DBCollectionName returns an attribute KeyValue conforming to the +// "db.collection.name" semantic conventions. It represents the name of a +// collection (table, container) within the database. +func DBCollectionName(val string) attribute.KeyValue { + return DBCollectionNameKey.String(val) +} + +// DBNamespace returns an attribute KeyValue conforming to the "db.namespace" +// semantic conventions. It represents the name of the database, fully qualified +// within the server address and port. +func DBNamespace(val string) attribute.KeyValue { + return DBNamespaceKey.String(val) +} + +// DBOperationBatchSize returns an attribute KeyValue conforming to the +// "db.operation.batch.size" semantic conventions. It represents the number of +// queries included in a batch operation. +func DBOperationBatchSize(val int) attribute.KeyValue { + return DBOperationBatchSizeKey.Int(val) +} + +// DBOperationName returns an attribute KeyValue conforming to the +// "db.operation.name" semantic conventions. It represents the name of the +// operation or command being executed. +func DBOperationName(val string) attribute.KeyValue { + return DBOperationNameKey.String(val) +} + +// DBQuerySummary returns an attribute KeyValue conforming to the +// "db.query.summary" semantic conventions. It represents the low cardinality +// summary of a database query. +func DBQuerySummary(val string) attribute.KeyValue { + return DBQuerySummaryKey.String(val) +} + +// DBQueryText returns an attribute KeyValue conforming to the "db.query.text" +// semantic conventions. It represents the database query being executed. +func DBQueryText(val string) attribute.KeyValue { + return DBQueryTextKey.String(val) +} + +// DBResponseReturnedRows returns an attribute KeyValue conforming to the +// "db.response.returned_rows" semantic conventions. It represents the number of +// rows returned by the operation. +func DBResponseReturnedRows(val int) attribute.KeyValue { + return DBResponseReturnedRowsKey.Int(val) +} + +// DBResponseStatusCode returns an attribute KeyValue conforming to the +// "db.response.status_code" semantic conventions. It represents the database +// response status code. +func DBResponseStatusCode(val string) attribute.KeyValue { + return DBResponseStatusCodeKey.String(val) +} + +// DBStoredProcedureName returns an attribute KeyValue conforming to the +// "db.stored_procedure.name" semantic conventions. It represents the name of a +// stored procedure within the database. +func DBStoredProcedureName(val string) attribute.KeyValue { + return DBStoredProcedureNameKey.String(val) +} + +// Enum values for db.client.connection.state +var ( + // idle + // Stability: development + DBClientConnectionStateIdle = DBClientConnectionStateKey.String("idle") + // used + // Stability: development + DBClientConnectionStateUsed = DBClientConnectionStateKey.String("used") +) + +// Enum values for db.system.name +var ( + // Some other SQL database. Fallback only. + // Stability: development + DBSystemNameOtherSQL = DBSystemNameKey.String("other_sql") + // [Adabas (Adaptable Database System)] + // Stability: development + // + // [Adabas (Adaptable Database System)]: https://documentation.softwareag.com/?pf=adabas + DBSystemNameSoftwareagAdabas = DBSystemNameKey.String("softwareag.adabas") + // [Actian Ingres] + // Stability: development + // + // [Actian Ingres]: https://www.actian.com/databases/ingres/ + DBSystemNameActianIngres = DBSystemNameKey.String("actian.ingres") + // [Amazon DynamoDB] + // Stability: development + // + // [Amazon DynamoDB]: https://aws.amazon.com/pm/dynamodb/ + DBSystemNameAWSDynamoDB = DBSystemNameKey.String("aws.dynamodb") + // [Amazon Redshift] + // Stability: development + // + // [Amazon Redshift]: https://aws.amazon.com/redshift/ + DBSystemNameAWSRedshift = DBSystemNameKey.String("aws.redshift") + // [Azure Cosmos DB] + // Stability: development + // + // [Azure Cosmos DB]: https://learn.microsoft.com/azure/cosmos-db + DBSystemNameAzureCosmosDB = DBSystemNameKey.String("azure.cosmosdb") + // [InterSystems Caché] + // Stability: development + // + // [InterSystems Caché]: https://www.intersystems.com/products/cache/ + DBSystemNameIntersystemsCache = DBSystemNameKey.String("intersystems.cache") + // [Apache Cassandra] + // Stability: development + // + // [Apache Cassandra]: https://cassandra.apache.org/ + DBSystemNameCassandra = DBSystemNameKey.String("cassandra") + // [ClickHouse] + // Stability: development + // + // [ClickHouse]: https://clickhouse.com/ + DBSystemNameClickHouse = DBSystemNameKey.String("clickhouse") + // [CockroachDB] + // Stability: development + // + // [CockroachDB]: https://www.cockroachlabs.com/ + DBSystemNameCockroachDB = DBSystemNameKey.String("cockroachdb") + // [Couchbase] + // Stability: development + // + // [Couchbase]: https://www.couchbase.com/ + DBSystemNameCouchbase = DBSystemNameKey.String("couchbase") + // [Apache CouchDB] + // Stability: development + // + // [Apache CouchDB]: https://couchdb.apache.org/ + DBSystemNameCouchDB = DBSystemNameKey.String("couchdb") + // [Apache Derby] + // Stability: development + // + // [Apache Derby]: https://db.apache.org/derby/ + DBSystemNameDerby = DBSystemNameKey.String("derby") + // [Elasticsearch] + // Stability: development + // + // [Elasticsearch]: https://www.elastic.co/elasticsearch + DBSystemNameElasticsearch = DBSystemNameKey.String("elasticsearch") + // [Firebird] + // Stability: development + // + // [Firebird]: https://www.firebirdsql.org/ + DBSystemNameFirebirdSQL = DBSystemNameKey.String("firebirdsql") + // [Google Cloud Spanner] + // Stability: development + // + // [Google Cloud Spanner]: https://cloud.google.com/spanner + DBSystemNameGCPSpanner = DBSystemNameKey.String("gcp.spanner") + // [Apache Geode] + // Stability: development + // + // [Apache Geode]: https://geode.apache.org/ + DBSystemNameGeode = DBSystemNameKey.String("geode") + // [H2 Database] + // Stability: development + // + // [H2 Database]: https://h2database.com/ + DBSystemNameH2database = DBSystemNameKey.String("h2database") + // [Apache HBase] + // Stability: development + // + // [Apache HBase]: https://hbase.apache.org/ + DBSystemNameHBase = DBSystemNameKey.String("hbase") + // [Apache Hive] + // Stability: development + // + // [Apache Hive]: https://hive.apache.org/ + DBSystemNameHive = DBSystemNameKey.String("hive") + // [HyperSQL Database] + // Stability: development + // + // [HyperSQL Database]: https://hsqldb.org/ + DBSystemNameHSQLDB = DBSystemNameKey.String("hsqldb") + // [IBM Db2] + // Stability: development + // + // [IBM Db2]: https://www.ibm.com/db2 + DBSystemNameIBMDB2 = DBSystemNameKey.String("ibm.db2") + // [IBM Informix] + // Stability: development + // + // [IBM Informix]: https://www.ibm.com/products/informix + DBSystemNameIBMInformix = DBSystemNameKey.String("ibm.informix") + // [IBM Netezza] + // Stability: development + // + // [IBM Netezza]: https://www.ibm.com/products/netezza + DBSystemNameIBMNetezza = DBSystemNameKey.String("ibm.netezza") + // [InfluxDB] + // Stability: development + // + // [InfluxDB]: https://www.influxdata.com/ + DBSystemNameInfluxDB = DBSystemNameKey.String("influxdb") + // [Instant] + // Stability: development + // + // [Instant]: https://www.instantdb.com/ + DBSystemNameInstantDB = DBSystemNameKey.String("instantdb") + // [MariaDB] + // Stability: stable + // + // [MariaDB]: https://mariadb.org/ + DBSystemNameMariaDB = DBSystemNameKey.String("mariadb") + // [Memcached] + // Stability: development + // + // [Memcached]: https://memcached.org/ + DBSystemNameMemcached = DBSystemNameKey.String("memcached") + // [MongoDB] + // Stability: development + // + // [MongoDB]: https://www.mongodb.com/ + DBSystemNameMongoDB = DBSystemNameKey.String("mongodb") + // [Microsoft SQL Server] + // Stability: stable + // + // [Microsoft SQL Server]: https://www.microsoft.com/sql-server + DBSystemNameMicrosoftSQLServer = DBSystemNameKey.String("microsoft.sql_server") + // [MySQL] + // Stability: stable + // + // [MySQL]: https://www.mysql.com/ + DBSystemNameMySQL = DBSystemNameKey.String("mysql") + // [Neo4j] + // Stability: development + // + // [Neo4j]: https://neo4j.com/ + DBSystemNameNeo4j = DBSystemNameKey.String("neo4j") + // [OpenSearch] + // Stability: development + // + // [OpenSearch]: https://opensearch.org/ + DBSystemNameOpenSearch = DBSystemNameKey.String("opensearch") + // [Oracle Database] + // Stability: development + // + // [Oracle Database]: https://www.oracle.com/database/ + DBSystemNameOracleDB = DBSystemNameKey.String("oracle.db") + // [PostgreSQL] + // Stability: stable + // + // [PostgreSQL]: https://www.postgresql.org/ + DBSystemNamePostgreSQL = DBSystemNameKey.String("postgresql") + // [Redis] + // Stability: development + // + // [Redis]: https://redis.io/ + DBSystemNameRedis = DBSystemNameKey.String("redis") + // [SAP HANA] + // Stability: development + // + // [SAP HANA]: https://www.sap.com/products/technology-platform/hana/what-is-sap-hana.html + DBSystemNameSAPHANA = DBSystemNameKey.String("sap.hana") + // [SAP MaxDB] + // Stability: development + // + // [SAP MaxDB]: https://maxdb.sap.com/ + DBSystemNameSAPMaxDB = DBSystemNameKey.String("sap.maxdb") + // [SQLite] + // Stability: development + // + // [SQLite]: https://www.sqlite.org/ + DBSystemNameSQLite = DBSystemNameKey.String("sqlite") + // [Teradata] + // Stability: development + // + // [Teradata]: https://www.teradata.com/ + DBSystemNameTeradata = DBSystemNameKey.String("teradata") + // [Trino] + // Stability: development + // + // [Trino]: https://trino.io/ + DBSystemNameTrino = DBSystemNameKey.String("trino") +) + +// Namespace: deployment +const ( + // DeploymentEnvironmentNameKey is the attribute Key conforming to the + // "deployment.environment.name" semantic conventions. It represents the name of + // the [deployment environment] (aka deployment tier). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "staging", "production" + // Note: `deployment.environment.name` does not affect the uniqueness + // constraints defined through + // the `service.namespace`, `service.name` and `service.instance.id` resource + // attributes. + // This implies that resources carrying the following attribute combinations + // MUST be + // considered to be identifying the same service: + // + // - `service.name=frontend`, `deployment.environment.name=production` + // - `service.name=frontend`, `deployment.environment.name=staging`. + // + // + // [deployment environment]: https://wikipedia.org/wiki/Deployment_environment + DeploymentEnvironmentNameKey = attribute.Key("deployment.environment.name") + + // DeploymentIDKey is the attribute Key conforming to the "deployment.id" + // semantic conventions. It represents the id of the deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1208" + DeploymentIDKey = attribute.Key("deployment.id") + + // DeploymentNameKey is the attribute Key conforming to the "deployment.name" + // semantic conventions. It represents the name of the deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "deploy my app", "deploy-frontend" + DeploymentNameKey = attribute.Key("deployment.name") + + // DeploymentStatusKey is the attribute Key conforming to the + // "deployment.status" semantic conventions. It represents the status of the + // deployment. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + DeploymentStatusKey = attribute.Key("deployment.status") +) + +// DeploymentEnvironmentName returns an attribute KeyValue conforming to the +// "deployment.environment.name" semantic conventions. It represents the name of +// the [deployment environment] (aka deployment tier). +// +// [deployment environment]: https://wikipedia.org/wiki/Deployment_environment +func DeploymentEnvironmentName(val string) attribute.KeyValue { + return DeploymentEnvironmentNameKey.String(val) +} + +// DeploymentID returns an attribute KeyValue conforming to the "deployment.id" +// semantic conventions. It represents the id of the deployment. +func DeploymentID(val string) attribute.KeyValue { + return DeploymentIDKey.String(val) +} + +// DeploymentName returns an attribute KeyValue conforming to the +// "deployment.name" semantic conventions. It represents the name of the +// deployment. +func DeploymentName(val string) attribute.KeyValue { + return DeploymentNameKey.String(val) +} + +// Enum values for deployment.status +var ( + // failed + // Stability: development + DeploymentStatusFailed = DeploymentStatusKey.String("failed") + // succeeded + // Stability: development + DeploymentStatusSucceeded = DeploymentStatusKey.String("succeeded") +) + +// Namespace: destination +const ( + // DestinationAddressKey is the attribute Key conforming to the + // "destination.address" semantic conventions. It represents the destination + // address - domain name if available without reverse DNS lookup; otherwise, IP + // address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "destination.example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the source side, and when communicating through an + // intermediary, `destination.address` SHOULD represent the destination address + // behind any intermediaries, for example proxies, if it's available. + DestinationAddressKey = attribute.Key("destination.address") + + // DestinationPortKey is the attribute Key conforming to the "destination.port" + // semantic conventions. It represents the destination port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3389, 2888 + DestinationPortKey = attribute.Key("destination.port") +) + +// DestinationAddress returns an attribute KeyValue conforming to the +// "destination.address" semantic conventions. It represents the destination +// address - domain name if available without reverse DNS lookup; otherwise, IP +// address or Unix domain socket name. +func DestinationAddress(val string) attribute.KeyValue { + return DestinationAddressKey.String(val) +} + +// DestinationPort returns an attribute KeyValue conforming to the +// "destination.port" semantic conventions. It represents the destination port +// number. +func DestinationPort(val int) attribute.KeyValue { + return DestinationPortKey.Int(val) +} + +// Namespace: device +const ( + // DeviceIDKey is the attribute Key conforming to the "device.id" semantic + // conventions. It represents a unique identifier representing the device. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123456789012345", "01:23:45:67:89:AB" + // Note: Its value SHOULD be identical for all apps on a device and it SHOULD + // NOT change if an app is uninstalled and re-installed. + // However, it might be resettable by the user for all apps on a device. + // Hardware IDs (e.g. vendor-specific serial number, IMEI or MAC address) MAY be + // used as values. + // + // More information about Android identifier best practices can be found [here] + // . + // + // > [!WARNING]> This attribute may contain sensitive (PII) information. Caution + // > should be taken when storing personal data or anything which can identify a + // > user. GDPR and data protection laws may apply, + // > ensure you do your own due diligence.> Due to these reasons, this + // > identifier is not recommended for consumer applications and will likely + // > result in rejection from both Google Play and App Store. + // > However, it may be appropriate for specific enterprise scenarios, such as + // > kiosk devices or enterprise-managed devices, with appropriate compliance + // > clearance. + // > Any instrumentation providing this identifier MUST implement it as an + // > opt-in feature.> See [`app.installation.id`]> for a more + // > privacy-preserving alternative. + // + // [here]: https://developer.android.com/training/articles/user-data-ids + // [`app.installation.id`]: /docs/registry/attributes/app.md#app-installation-id + DeviceIDKey = attribute.Key("device.id") + + // DeviceManufacturerKey is the attribute Key conforming to the + // "device.manufacturer" semantic conventions. It represents the name of the + // device manufacturer. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Apple", "Samsung" + // Note: The Android OS provides this field via [Build]. iOS apps SHOULD + // hardcode the value `Apple`. + // + // [Build]: https://developer.android.com/reference/android/os/Build#MANUFACTURER + DeviceManufacturerKey = attribute.Key("device.manufacturer") + + // DeviceModelIdentifierKey is the attribute Key conforming to the + // "device.model.identifier" semantic conventions. It represents the model + // identifier for the device. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iPhone3,4", "SM-G920F" + // Note: It's recommended this value represents a machine-readable version of + // the model identifier rather than the market or consumer-friendly name of the + // device. + DeviceModelIdentifierKey = attribute.Key("device.model.identifier") + + // DeviceModelNameKey is the attribute Key conforming to the "device.model.name" + // semantic conventions. It represents the marketing name for the device model. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iPhone 6s Plus", "Samsung Galaxy S6" + // Note: It's recommended this value represents a human-readable version of the + // device model rather than a machine-readable alternative. + DeviceModelNameKey = attribute.Key("device.model.name") +) + +// DeviceID returns an attribute KeyValue conforming to the "device.id" semantic +// conventions. It represents a unique identifier representing the device. +func DeviceID(val string) attribute.KeyValue { + return DeviceIDKey.String(val) +} + +// DeviceManufacturer returns an attribute KeyValue conforming to the +// "device.manufacturer" semantic conventions. It represents the name of the +// device manufacturer. +func DeviceManufacturer(val string) attribute.KeyValue { + return DeviceManufacturerKey.String(val) +} + +// DeviceModelIdentifier returns an attribute KeyValue conforming to the +// "device.model.identifier" semantic conventions. It represents the model +// identifier for the device. +func DeviceModelIdentifier(val string) attribute.KeyValue { + return DeviceModelIdentifierKey.String(val) +} + +// DeviceModelName returns an attribute KeyValue conforming to the +// "device.model.name" semantic conventions. It represents the marketing name for +// the device model. +func DeviceModelName(val string) attribute.KeyValue { + return DeviceModelNameKey.String(val) +} + +// Namespace: disk +const ( + // DiskIODirectionKey is the attribute Key conforming to the "disk.io.direction" + // semantic conventions. It represents the disk IO operation direction. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "read" + DiskIODirectionKey = attribute.Key("disk.io.direction") +) + +// Enum values for disk.io.direction +var ( + // read + // Stability: development + DiskIODirectionRead = DiskIODirectionKey.String("read") + // write + // Stability: development + DiskIODirectionWrite = DiskIODirectionKey.String("write") +) + +// Namespace: dns +const ( + // DNSQuestionNameKey is the attribute Key conforming to the "dns.question.name" + // semantic conventions. It represents the name being queried. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "www.example.com", "opentelemetry.io" + // Note: If the name field contains non-printable characters (below 32 or above + // 126), those characters should be represented as escaped base 10 integers + // (\DDD). Back slashes and quotes should be escaped. Tabs, carriage returns, + // and line feeds should be converted to \t, \r, and \n respectively. + DNSQuestionNameKey = attribute.Key("dns.question.name") +) + +// DNSQuestionName returns an attribute KeyValue conforming to the +// "dns.question.name" semantic conventions. It represents the name being +// queried. +func DNSQuestionName(val string) attribute.KeyValue { + return DNSQuestionNameKey.String(val) +} + +// Namespace: elasticsearch +const ( + // ElasticsearchNodeNameKey is the attribute Key conforming to the + // "elasticsearch.node.name" semantic conventions. It represents the represents + // the human-readable identifier of the node/instance to which a request was + // routed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "instance-0000000001" + ElasticsearchNodeNameKey = attribute.Key("elasticsearch.node.name") +) + +// ElasticsearchNodeName returns an attribute KeyValue conforming to the +// "elasticsearch.node.name" semantic conventions. It represents the represents +// the human-readable identifier of the node/instance to which a request was +// routed. +func ElasticsearchNodeName(val string) attribute.KeyValue { + return ElasticsearchNodeNameKey.String(val) +} + +// Namespace: enduser +const ( + // EnduserIDKey is the attribute Key conforming to the "enduser.id" semantic + // conventions. It represents the unique identifier of an end user in the + // system. It maybe a username, email address, or other identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "username" + // Note: Unique identifier of an end user in the system. + // + // > [!Warning] + // > This field contains sensitive (PII) information. + EnduserIDKey = attribute.Key("enduser.id") + + // EnduserPseudoIDKey is the attribute Key conforming to the "enduser.pseudo.id" + // semantic conventions. It represents the pseudonymous identifier of an end + // user. This identifier should be a random value that is not directly linked or + // associated with the end user's actual identity. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "QdH5CAWJgqVT4rOr0qtumf" + // Note: Pseudonymous identifier of an end user. + // + // > [!Warning] + // > This field contains sensitive (linkable PII) information. + EnduserPseudoIDKey = attribute.Key("enduser.pseudo.id") +) + +// EnduserID returns an attribute KeyValue conforming to the "enduser.id" +// semantic conventions. It represents the unique identifier of an end user in +// the system. It maybe a username, email address, or other identifier. +func EnduserID(val string) attribute.KeyValue { + return EnduserIDKey.String(val) +} + +// EnduserPseudoID returns an attribute KeyValue conforming to the +// "enduser.pseudo.id" semantic conventions. It represents the pseudonymous +// identifier of an end user. This identifier should be a random value that is +// not directly linked or associated with the end user's actual identity. +func EnduserPseudoID(val string) attribute.KeyValue { + return EnduserPseudoIDKey.String(val) +} + +// Namespace: error +const ( + // ErrorMessageKey is the attribute Key conforming to the "error.message" + // semantic conventions. It represents a message providing more detail about an + // error in human-readable form. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Unexpected input type: string", "The user has exceeded their + // storage quota" + // Note: `error.message` should provide additional context and detail about an + // error. + // It is NOT RECOMMENDED to duplicate the value of `error.type` in + // `error.message`. + // It is also NOT RECOMMENDED to duplicate the value of `exception.message` in + // `error.message`. + // + // `error.message` is NOT RECOMMENDED for metrics or spans due to its unbounded + // cardinality and overlap with span status. + ErrorMessageKey = attribute.Key("error.message") + + // ErrorTypeKey is the attribute Key conforming to the "error.type" semantic + // conventions. It represents the describes a class of error the operation ended + // with. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "timeout", "java.net.UnknownHostException", + // "server_certificate_invalid", "500" + // Note: The `error.type` SHOULD be predictable, and SHOULD have low + // cardinality. + // + // When `error.type` is set to a type (e.g., an exception type), its + // canonical class name identifying the type within the artifact SHOULD be used. + // + // Instrumentations SHOULD document the list of errors they report. + // + // The cardinality of `error.type` within one instrumentation library SHOULD be + // low. + // Telemetry consumers that aggregate data from multiple instrumentation + // libraries and applications + // should be prepared for `error.type` to have high cardinality at query time + // when no + // additional filters are applied. + // + // If the operation has completed successfully, instrumentations SHOULD NOT set + // `error.type`. + // + // If a specific domain defines its own set of error identifiers (such as HTTP + // or gRPC status codes), + // it's RECOMMENDED to: + // + // - Use a domain-specific attribute + // - Set `error.type` to capture all errors, regardless of whether they are + // defined within the domain-specific set or not. + ErrorTypeKey = attribute.Key("error.type") +) + +// ErrorMessage returns an attribute KeyValue conforming to the "error.message" +// semantic conventions. It represents a message providing more detail about an +// error in human-readable form. +func ErrorMessage(val string) attribute.KeyValue { + return ErrorMessageKey.String(val) +} + +// Enum values for error.type +var ( + // A fallback error value to be used when the instrumentation doesn't define a + // custom value. + // + // Stability: stable + ErrorTypeOther = ErrorTypeKey.String("_OTHER") +) + +// Namespace: exception +const ( + // ExceptionMessageKey is the attribute Key conforming to the + // "exception.message" semantic conventions. It represents the exception + // message. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "Division by zero", "Can't convert 'int' object to str implicitly" + ExceptionMessageKey = attribute.Key("exception.message") + + // ExceptionStacktraceKey is the attribute Key conforming to the + // "exception.stacktrace" semantic conventions. It represents a stacktrace as a + // string in the natural representation for the language runtime. The + // representation is to be determined and documented by each language SIG. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: Exception in thread "main" java.lang.RuntimeException: Test + // exception\n at com.example.GenerateTrace.methodB(GenerateTrace.java:13)\n at + // com.example.GenerateTrace.methodA(GenerateTrace.java:9)\n at + // com.example.GenerateTrace.main(GenerateTrace.java:5) + ExceptionStacktraceKey = attribute.Key("exception.stacktrace") + + // ExceptionTypeKey is the attribute Key conforming to the "exception.type" + // semantic conventions. It represents the type of the exception (its + // fully-qualified class name, if applicable). The dynamic type of the exception + // should be preferred over the static type in languages that support it. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "java.net.ConnectException", "OSError" + ExceptionTypeKey = attribute.Key("exception.type") +) + +// ExceptionMessage returns an attribute KeyValue conforming to the +// "exception.message" semantic conventions. It represents the exception message. +func ExceptionMessage(val string) attribute.KeyValue { + return ExceptionMessageKey.String(val) +} + +// ExceptionStacktrace returns an attribute KeyValue conforming to the +// "exception.stacktrace" semantic conventions. It represents a stacktrace as a +// string in the natural representation for the language runtime. The +// representation is to be determined and documented by each language SIG. +func ExceptionStacktrace(val string) attribute.KeyValue { + return ExceptionStacktraceKey.String(val) +} + +// ExceptionType returns an attribute KeyValue conforming to the "exception.type" +// semantic conventions. It represents the type of the exception (its +// fully-qualified class name, if applicable). The dynamic type of the exception +// should be preferred over the static type in languages that support it. +func ExceptionType(val string) attribute.KeyValue { + return ExceptionTypeKey.String(val) +} + +// Namespace: faas +const ( + // FaaSColdstartKey is the attribute Key conforming to the "faas.coldstart" + // semantic conventions. It represents a boolean that is true if the serverless + // function is executed for the first time (aka cold-start). + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FaaSColdstartKey = attribute.Key("faas.coldstart") + + // FaaSCronKey is the attribute Key conforming to the "faas.cron" semantic + // conventions. It represents a string containing the schedule period as + // [Cron Expression]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0/5 * * * ? * + // + // [Cron Expression]: https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm + FaaSCronKey = attribute.Key("faas.cron") + + // FaaSDocumentCollectionKey is the attribute Key conforming to the + // "faas.document.collection" semantic conventions. It represents the name of + // the source on which the triggering operation was performed. For example, in + // Cloud Storage or S3 corresponds to the bucket name, and in Cosmos DB to the + // database name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "myBucketName", "myDbName" + FaaSDocumentCollectionKey = attribute.Key("faas.document.collection") + + // FaaSDocumentNameKey is the attribute Key conforming to the + // "faas.document.name" semantic conventions. It represents the document + // name/table subjected to the operation. For example, in Cloud Storage or S3 is + // the name of the file, and in Cosmos DB the table name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "myFile.txt", "myTableName" + FaaSDocumentNameKey = attribute.Key("faas.document.name") + + // FaaSDocumentOperationKey is the attribute Key conforming to the + // "faas.document.operation" semantic conventions. It represents the describes + // the type of the operation that was performed on the data. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FaaSDocumentOperationKey = attribute.Key("faas.document.operation") + + // FaaSDocumentTimeKey is the attribute Key conforming to the + // "faas.document.time" semantic conventions. It represents a string containing + // the time when the data was accessed in the [ISO 8601] format expressed in + // [UTC]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 2020-01-23T13:47:06Z + // + // [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html + // [UTC]: https://www.w3.org/TR/NOTE-datetime + FaaSDocumentTimeKey = attribute.Key("faas.document.time") + + // FaaSInstanceKey is the attribute Key conforming to the "faas.instance" + // semantic conventions. It represents the execution environment ID as a string, + // that will be potentially reused for other invocations to the same + // function/function version. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021/06/28/[$LATEST]2f399eb14537447da05ab2a2e39309de" + // Note: - **AWS Lambda:** Use the (full) log stream name. + FaaSInstanceKey = attribute.Key("faas.instance") + + // FaaSInvocationIDKey is the attribute Key conforming to the + // "faas.invocation_id" semantic conventions. It represents the invocation ID of + // the current function invocation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: af9d5aa4-a685-4c5f-a22b-444f80b3cc28 + FaaSInvocationIDKey = attribute.Key("faas.invocation_id") + + // FaaSInvokedNameKey is the attribute Key conforming to the "faas.invoked_name" + // semantic conventions. It represents the name of the invoked function. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: my-function + // Note: SHOULD be equal to the `faas.name` resource attribute of the invoked + // function. + FaaSInvokedNameKey = attribute.Key("faas.invoked_name") + + // FaaSInvokedProviderKey is the attribute Key conforming to the + // "faas.invoked_provider" semantic conventions. It represents the cloud + // provider of the invoked function. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: SHOULD be equal to the `cloud.provider` resource attribute of the + // invoked function. + FaaSInvokedProviderKey = attribute.Key("faas.invoked_provider") + + // FaaSInvokedRegionKey is the attribute Key conforming to the + // "faas.invoked_region" semantic conventions. It represents the cloud region of + // the invoked function. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: eu-central-1 + // Note: SHOULD be equal to the `cloud.region` resource attribute of the invoked + // function. + FaaSInvokedRegionKey = attribute.Key("faas.invoked_region") + + // FaaSMaxMemoryKey is the attribute Key conforming to the "faas.max_memory" + // semantic conventions. It represents the amount of memory available to the + // serverless function converted to Bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Note: It's recommended to set this attribute since e.g. too little memory can + // easily stop a Java AWS Lambda function from working correctly. On AWS Lambda, + // the environment variable `AWS_LAMBDA_FUNCTION_MEMORY_SIZE` provides this + // information (which must be multiplied by 1,048,576). + FaaSMaxMemoryKey = attribute.Key("faas.max_memory") + + // FaaSNameKey is the attribute Key conforming to the "faas.name" semantic + // conventions. It represents the name of the single function that this runtime + // instance executes. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-function", "myazurefunctionapp/some-function-name" + // Note: This is the name of the function as configured/deployed on the FaaS + // platform and is usually different from the name of the callback + // function (which may be stored in the + // [`code.namespace`/`code.function.name`] + // span attributes). + // + // For some cloud providers, the above definition is ambiguous. The following + // definition of function name MUST be used for this attribute + // (and consequently the span name) for the listed cloud providers/products: + // + // - **Azure:** The full name `/`, i.e., function app name + // followed by a forward slash followed by the function name (this form + // can also be seen in the resource JSON for the function). + // This means that a span attribute MUST be used, as an Azure function + // app can host multiple functions that would usually share + // a TracerProvider (see also the `cloud.resource_id` attribute). + // + // + // [`code.namespace`/`code.function.name`]: /docs/general/attributes.md#source-code-attributes + FaaSNameKey = attribute.Key("faas.name") + + // FaaSTimeKey is the attribute Key conforming to the "faas.time" semantic + // conventions. It represents a string containing the function invocation time + // in the [ISO 8601] format expressed in [UTC]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 2020-01-23T13:47:06Z + // + // [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html + // [UTC]: https://www.w3.org/TR/NOTE-datetime + FaaSTimeKey = attribute.Key("faas.time") + + // FaaSTriggerKey is the attribute Key conforming to the "faas.trigger" semantic + // conventions. It represents the type of the trigger which caused this function + // invocation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FaaSTriggerKey = attribute.Key("faas.trigger") + + // FaaSVersionKey is the attribute Key conforming to the "faas.version" semantic + // conventions. It represents the immutable version of the function being + // executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "26", "pinkfroid-00002" + // Note: Depending on the cloud provider and platform, use: + // + // - **AWS Lambda:** The [function version] + // (an integer represented as a decimal string). + // - **Google Cloud Run (Services):** The [revision] + // (i.e., the function name plus the revision suffix). + // - **Google Cloud Functions:** The value of the + // [`K_REVISION` environment variable]. + // - **Azure Functions:** Not applicable. Do not set this attribute. + // + // + // [function version]: https://docs.aws.amazon.com/lambda/latest/dg/configuration-versions.html + // [revision]: https://cloud.google.com/run/docs/managing/revisions + // [`K_REVISION` environment variable]: https://cloud.google.com/functions/docs/env-var#runtime_environment_variables_set_automatically + FaaSVersionKey = attribute.Key("faas.version") +) + +// FaaSColdstart returns an attribute KeyValue conforming to the "faas.coldstart" +// semantic conventions. It represents a boolean that is true if the serverless +// function is executed for the first time (aka cold-start). +func FaaSColdstart(val bool) attribute.KeyValue { + return FaaSColdstartKey.Bool(val) +} + +// FaaSCron returns an attribute KeyValue conforming to the "faas.cron" semantic +// conventions. It represents a string containing the schedule period as +// [Cron Expression]. +// +// [Cron Expression]: https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm +func FaaSCron(val string) attribute.KeyValue { + return FaaSCronKey.String(val) +} + +// FaaSDocumentCollection returns an attribute KeyValue conforming to the +// "faas.document.collection" semantic conventions. It represents the name of the +// source on which the triggering operation was performed. For example, in Cloud +// Storage or S3 corresponds to the bucket name, and in Cosmos DB to the database +// name. +func FaaSDocumentCollection(val string) attribute.KeyValue { + return FaaSDocumentCollectionKey.String(val) +} + +// FaaSDocumentName returns an attribute KeyValue conforming to the +// "faas.document.name" semantic conventions. It represents the document +// name/table subjected to the operation. For example, in Cloud Storage or S3 is +// the name of the file, and in Cosmos DB the table name. +func FaaSDocumentName(val string) attribute.KeyValue { + return FaaSDocumentNameKey.String(val) +} + +// FaaSDocumentTime returns an attribute KeyValue conforming to the +// "faas.document.time" semantic conventions. It represents a string containing +// the time when the data was accessed in the [ISO 8601] format expressed in +// [UTC]. +// +// [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html +// [UTC]: https://www.w3.org/TR/NOTE-datetime +func FaaSDocumentTime(val string) attribute.KeyValue { + return FaaSDocumentTimeKey.String(val) +} + +// FaaSInstance returns an attribute KeyValue conforming to the "faas.instance" +// semantic conventions. It represents the execution environment ID as a string, +// that will be potentially reused for other invocations to the same +// function/function version. +func FaaSInstance(val string) attribute.KeyValue { + return FaaSInstanceKey.String(val) +} + +// FaaSInvocationID returns an attribute KeyValue conforming to the +// "faas.invocation_id" semantic conventions. It represents the invocation ID of +// the current function invocation. +func FaaSInvocationID(val string) attribute.KeyValue { + return FaaSInvocationIDKey.String(val) +} + +// FaaSInvokedName returns an attribute KeyValue conforming to the +// "faas.invoked_name" semantic conventions. It represents the name of the +// invoked function. +func FaaSInvokedName(val string) attribute.KeyValue { + return FaaSInvokedNameKey.String(val) +} + +// FaaSInvokedRegion returns an attribute KeyValue conforming to the +// "faas.invoked_region" semantic conventions. It represents the cloud region of +// the invoked function. +func FaaSInvokedRegion(val string) attribute.KeyValue { + return FaaSInvokedRegionKey.String(val) +} + +// FaaSMaxMemory returns an attribute KeyValue conforming to the +// "faas.max_memory" semantic conventions. It represents the amount of memory +// available to the serverless function converted to Bytes. +func FaaSMaxMemory(val int) attribute.KeyValue { + return FaaSMaxMemoryKey.Int(val) +} + +// FaaSName returns an attribute KeyValue conforming to the "faas.name" semantic +// conventions. It represents the name of the single function that this runtime +// instance executes. +func FaaSName(val string) attribute.KeyValue { + return FaaSNameKey.String(val) +} + +// FaaSTime returns an attribute KeyValue conforming to the "faas.time" semantic +// conventions. It represents a string containing the function invocation time in +// the [ISO 8601] format expressed in [UTC]. +// +// [ISO 8601]: https://www.iso.org/iso-8601-date-and-time-format.html +// [UTC]: https://www.w3.org/TR/NOTE-datetime +func FaaSTime(val string) attribute.KeyValue { + return FaaSTimeKey.String(val) +} + +// FaaSVersion returns an attribute KeyValue conforming to the "faas.version" +// semantic conventions. It represents the immutable version of the function +// being executed. +func FaaSVersion(val string) attribute.KeyValue { + return FaaSVersionKey.String(val) +} + +// Enum values for faas.document.operation +var ( + // When a new object is created. + // Stability: development + FaaSDocumentOperationInsert = FaaSDocumentOperationKey.String("insert") + // When an object is modified. + // Stability: development + FaaSDocumentOperationEdit = FaaSDocumentOperationKey.String("edit") + // When an object is deleted. + // Stability: development + FaaSDocumentOperationDelete = FaaSDocumentOperationKey.String("delete") +) + +// Enum values for faas.invoked_provider +var ( + // Alibaba Cloud + // Stability: development + FaaSInvokedProviderAlibabaCloud = FaaSInvokedProviderKey.String("alibaba_cloud") + // Amazon Web Services + // Stability: development + FaaSInvokedProviderAWS = FaaSInvokedProviderKey.String("aws") + // Microsoft Azure + // Stability: development + FaaSInvokedProviderAzure = FaaSInvokedProviderKey.String("azure") + // Google Cloud Platform + // Stability: development + FaaSInvokedProviderGCP = FaaSInvokedProviderKey.String("gcp") + // Tencent Cloud + // Stability: development + FaaSInvokedProviderTencentCloud = FaaSInvokedProviderKey.String("tencent_cloud") +) + +// Enum values for faas.trigger +var ( + // A response to some data source operation such as a database or filesystem + // read/write + // Stability: development + FaaSTriggerDatasource = FaaSTriggerKey.String("datasource") + // To provide an answer to an inbound HTTP request + // Stability: development + FaaSTriggerHTTP = FaaSTriggerKey.String("http") + // A function is set to be executed when messages are sent to a messaging system + // Stability: development + FaaSTriggerPubSub = FaaSTriggerKey.String("pubsub") + // A function is scheduled to be executed regularly + // Stability: development + FaaSTriggerTimer = FaaSTriggerKey.String("timer") + // If none of the others apply + // Stability: development + FaaSTriggerOther = FaaSTriggerKey.String("other") +) + +// Namespace: feature_flag +const ( + // FeatureFlagContextIDKey is the attribute Key conforming to the + // "feature_flag.context.id" semantic conventions. It represents the unique + // identifier for the flag evaluation context. For example, the targeting key. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "5157782b-2203-4c80-a857-dbbd5e7761db" + FeatureFlagContextIDKey = attribute.Key("feature_flag.context.id") + + // FeatureFlagKeyKey is the attribute Key conforming to the "feature_flag.key" + // semantic conventions. It represents the lookup key of the feature flag. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "logo-color" + FeatureFlagKeyKey = attribute.Key("feature_flag.key") + + // FeatureFlagProviderNameKey is the attribute Key conforming to the + // "feature_flag.provider.name" semantic conventions. It represents the + // identifies the feature flag provider. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Flag Manager" + FeatureFlagProviderNameKey = attribute.Key("feature_flag.provider.name") + + // FeatureFlagResultReasonKey is the attribute Key conforming to the + // "feature_flag.result.reason" semantic conventions. It represents the reason + // code which shows how a feature flag value was determined. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "static", "targeting_match", "error", "default" + FeatureFlagResultReasonKey = attribute.Key("feature_flag.result.reason") + + // FeatureFlagResultValueKey is the attribute Key conforming to the + // "feature_flag.result.value" semantic conventions. It represents the evaluated + // value of the feature flag. + // + // Type: any + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "#ff0000", true, 3 + // Note: With some feature flag providers, feature flag results can be quite + // large or contain private or sensitive details. + // Because of this, `feature_flag.result.variant` is often the preferred + // attribute if it is available. + // + // It may be desirable to redact or otherwise limit the size and scope of + // `feature_flag.result.value` if possible. + // Because the evaluated flag value is unstructured and may be any type, it is + // left to the instrumentation author to determine how best to achieve this. + FeatureFlagResultValueKey = attribute.Key("feature_flag.result.value") + + // FeatureFlagResultVariantKey is the attribute Key conforming to the + // "feature_flag.result.variant" semantic conventions. It represents a semantic + // identifier for an evaluated flag value. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "red", "true", "on" + // Note: A semantic identifier, commonly referred to as a variant, provides a + // means + // for referring to a value without including the value itself. This can + // provide additional context for understanding the meaning behind a value. + // For example, the variant `red` maybe be used for the value `#c05543`. + FeatureFlagResultVariantKey = attribute.Key("feature_flag.result.variant") + + // FeatureFlagSetIDKey is the attribute Key conforming to the + // "feature_flag.set.id" semantic conventions. It represents the identifier of + // the [flag set] to which the feature flag belongs. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "proj-1", "ab98sgs", "service1/dev" + // + // [flag set]: https://openfeature.dev/specification/glossary/#flag-set + FeatureFlagSetIDKey = attribute.Key("feature_flag.set.id") + + // FeatureFlagVersionKey is the attribute Key conforming to the + // "feature_flag.version" semantic conventions. It represents the version of the + // ruleset used during the evaluation. This may be any stable value which + // uniquely identifies the ruleset. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1", "01ABCDEF" + FeatureFlagVersionKey = attribute.Key("feature_flag.version") +) + +// FeatureFlagContextID returns an attribute KeyValue conforming to the +// "feature_flag.context.id" semantic conventions. It represents the unique +// identifier for the flag evaluation context. For example, the targeting key. +func FeatureFlagContextID(val string) attribute.KeyValue { + return FeatureFlagContextIDKey.String(val) +} + +// FeatureFlagKey returns an attribute KeyValue conforming to the +// "feature_flag.key" semantic conventions. It represents the lookup key of the +// feature flag. +func FeatureFlagKey(val string) attribute.KeyValue { + return FeatureFlagKeyKey.String(val) +} + +// FeatureFlagProviderName returns an attribute KeyValue conforming to the +// "feature_flag.provider.name" semantic conventions. It represents the +// identifies the feature flag provider. +func FeatureFlagProviderName(val string) attribute.KeyValue { + return FeatureFlagProviderNameKey.String(val) +} + +// FeatureFlagResultVariant returns an attribute KeyValue conforming to the +// "feature_flag.result.variant" semantic conventions. It represents a semantic +// identifier for an evaluated flag value. +func FeatureFlagResultVariant(val string) attribute.KeyValue { + return FeatureFlagResultVariantKey.String(val) +} + +// FeatureFlagSetID returns an attribute KeyValue conforming to the +// "feature_flag.set.id" semantic conventions. It represents the identifier of +// the [flag set] to which the feature flag belongs. +// +// [flag set]: https://openfeature.dev/specification/glossary/#flag-set +func FeatureFlagSetID(val string) attribute.KeyValue { + return FeatureFlagSetIDKey.String(val) +} + +// FeatureFlagVersion returns an attribute KeyValue conforming to the +// "feature_flag.version" semantic conventions. It represents the version of the +// ruleset used during the evaluation. This may be any stable value which +// uniquely identifies the ruleset. +func FeatureFlagVersion(val string) attribute.KeyValue { + return FeatureFlagVersionKey.String(val) +} + +// Enum values for feature_flag.result.reason +var ( + // The resolved value is static (no dynamic evaluation). + // Stability: development + FeatureFlagResultReasonStatic = FeatureFlagResultReasonKey.String("static") + // The resolved value fell back to a pre-configured value (no dynamic evaluation + // occurred or dynamic evaluation yielded no result). + // Stability: development + FeatureFlagResultReasonDefault = FeatureFlagResultReasonKey.String("default") + // The resolved value was the result of a dynamic evaluation, such as a rule or + // specific user-targeting. + // Stability: development + FeatureFlagResultReasonTargetingMatch = FeatureFlagResultReasonKey.String("targeting_match") + // The resolved value was the result of pseudorandom assignment. + // Stability: development + FeatureFlagResultReasonSplit = FeatureFlagResultReasonKey.String("split") + // The resolved value was retrieved from cache. + // Stability: development + FeatureFlagResultReasonCached = FeatureFlagResultReasonKey.String("cached") + // The resolved value was the result of the flag being disabled in the + // management system. + // Stability: development + FeatureFlagResultReasonDisabled = FeatureFlagResultReasonKey.String("disabled") + // The reason for the resolved value could not be determined. + // Stability: development + FeatureFlagResultReasonUnknown = FeatureFlagResultReasonKey.String("unknown") + // The resolved value is non-authoritative or possibly out of date + // Stability: development + FeatureFlagResultReasonStale = FeatureFlagResultReasonKey.String("stale") + // The resolved value was the result of an error. + // Stability: development + FeatureFlagResultReasonError = FeatureFlagResultReasonKey.String("error") +) + +// Namespace: file +const ( + // FileAccessedKey is the attribute Key conforming to the "file.accessed" + // semantic conventions. It represents the time when the file was last accessed, + // in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + // Note: This attribute might not be supported by some file systems — NFS, + // FAT32, in embedded OS, etc. + FileAccessedKey = attribute.Key("file.accessed") + + // FileAttributesKey is the attribute Key conforming to the "file.attributes" + // semantic conventions. It represents the array of file attributes. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "readonly", "hidden" + // Note: Attributes names depend on the OS or file system. Here’s a + // non-exhaustive list of values expected for this attribute: `archive`, + // `compressed`, `directory`, `encrypted`, `execute`, `hidden`, `immutable`, + // `journaled`, `read`, `readonly`, `symbolic link`, `system`, `temporary`, + // `write`. + FileAttributesKey = attribute.Key("file.attributes") + + // FileChangedKey is the attribute Key conforming to the "file.changed" semantic + // conventions. It represents the time when the file attributes or metadata was + // last changed, in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + // Note: `file.changed` captures the time when any of the file's properties or + // attributes (including the content) are changed, while `file.modified` + // captures the timestamp when the file content is modified. + FileChangedKey = attribute.Key("file.changed") + + // FileCreatedKey is the attribute Key conforming to the "file.created" semantic + // conventions. It represents the time when the file was created, in ISO 8601 + // format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + // Note: This attribute might not be supported by some file systems — NFS, + // FAT32, in embedded OS, etc. + FileCreatedKey = attribute.Key("file.created") + + // FileDirectoryKey is the attribute Key conforming to the "file.directory" + // semantic conventions. It represents the directory where the file is located. + // It should include the drive letter, when appropriate. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/home/user", "C:\Program Files\MyApp" + FileDirectoryKey = attribute.Key("file.directory") + + // FileExtensionKey is the attribute Key conforming to the "file.extension" + // semantic conventions. It represents the file extension, excluding the leading + // dot. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "png", "gz" + // Note: When the file name has multiple extensions (example.tar.gz), only the + // last one should be captured ("gz", not "tar.gz"). + FileExtensionKey = attribute.Key("file.extension") + + // FileForkNameKey is the attribute Key conforming to the "file.fork_name" + // semantic conventions. It represents the name of the fork. A fork is + // additional data associated with a filesystem object. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Zone.Identifer" + // Note: On Linux, a resource fork is used to store additional data with a + // filesystem object. A file always has at least one fork for the data portion, + // and additional forks may exist. + // On NTFS, this is analogous to an Alternate Data Stream (ADS), and the default + // data stream for a file is just called $DATA. Zone.Identifier is commonly used + // by Windows to track contents downloaded from the Internet. An ADS is + // typically of the form: C:\path\to\filename.extension:some_fork_name, and + // some_fork_name is the value that should populate `fork_name`. + // `filename.extension` should populate `file.name`, and `extension` should + // populate `file.extension`. The full path, `file.path`, will include the fork + // name. + FileForkNameKey = attribute.Key("file.fork_name") + + // FileGroupIDKey is the attribute Key conforming to the "file.group.id" + // semantic conventions. It represents the primary Group ID (GID) of the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1000" + FileGroupIDKey = attribute.Key("file.group.id") + + // FileGroupNameKey is the attribute Key conforming to the "file.group.name" + // semantic conventions. It represents the primary group name of the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "users" + FileGroupNameKey = attribute.Key("file.group.name") + + // FileInodeKey is the attribute Key conforming to the "file.inode" semantic + // conventions. It represents the inode representing the file in the filesystem. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "256383" + FileInodeKey = attribute.Key("file.inode") + + // FileModeKey is the attribute Key conforming to the "file.mode" semantic + // conventions. It represents the mode of the file in octal representation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0640" + FileModeKey = attribute.Key("file.mode") + + // FileModifiedKey is the attribute Key conforming to the "file.modified" + // semantic conventions. It represents the time when the file content was last + // modified, in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T12:00:00Z" + FileModifiedKey = attribute.Key("file.modified") + + // FileNameKey is the attribute Key conforming to the "file.name" semantic + // conventions. It represents the name of the file including the extension, + // without the directory. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "example.png" + FileNameKey = attribute.Key("file.name") + + // FileOwnerIDKey is the attribute Key conforming to the "file.owner.id" + // semantic conventions. It represents the user ID (UID) or security identifier + // (SID) of the file owner. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1000" + FileOwnerIDKey = attribute.Key("file.owner.id") + + // FileOwnerNameKey is the attribute Key conforming to the "file.owner.name" + // semantic conventions. It represents the username of the file owner. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "root" + FileOwnerNameKey = attribute.Key("file.owner.name") + + // FilePathKey is the attribute Key conforming to the "file.path" semantic + // conventions. It represents the full path to the file, including the file + // name. It should include the drive letter, when appropriate. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/home/alice/example.png", "C:\Program Files\MyApp\myapp.exe" + FilePathKey = attribute.Key("file.path") + + // FileSizeKey is the attribute Key conforming to the "file.size" semantic + // conventions. It represents the file size in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + FileSizeKey = attribute.Key("file.size") + + // FileSymbolicLinkTargetPathKey is the attribute Key conforming to the + // "file.symbolic_link.target_path" semantic conventions. It represents the path + // to the target of a symbolic link. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/usr/bin/python3" + // Note: This attribute is only applicable to symbolic links. + FileSymbolicLinkTargetPathKey = attribute.Key("file.symbolic_link.target_path") +) + +// FileAccessed returns an attribute KeyValue conforming to the "file.accessed" +// semantic conventions. It represents the time when the file was last accessed, +// in ISO 8601 format. +func FileAccessed(val string) attribute.KeyValue { + return FileAccessedKey.String(val) +} + +// FileAttributes returns an attribute KeyValue conforming to the +// "file.attributes" semantic conventions. It represents the array of file +// attributes. +func FileAttributes(val ...string) attribute.KeyValue { + return FileAttributesKey.StringSlice(val) +} + +// FileChanged returns an attribute KeyValue conforming to the "file.changed" +// semantic conventions. It represents the time when the file attributes or +// metadata was last changed, in ISO 8601 format. +func FileChanged(val string) attribute.KeyValue { + return FileChangedKey.String(val) +} + +// FileCreated returns an attribute KeyValue conforming to the "file.created" +// semantic conventions. It represents the time when the file was created, in ISO +// 8601 format. +func FileCreated(val string) attribute.KeyValue { + return FileCreatedKey.String(val) +} + +// FileDirectory returns an attribute KeyValue conforming to the "file.directory" +// semantic conventions. It represents the directory where the file is located. +// It should include the drive letter, when appropriate. +func FileDirectory(val string) attribute.KeyValue { + return FileDirectoryKey.String(val) +} + +// FileExtension returns an attribute KeyValue conforming to the "file.extension" +// semantic conventions. It represents the file extension, excluding the leading +// dot. +func FileExtension(val string) attribute.KeyValue { + return FileExtensionKey.String(val) +} + +// FileForkName returns an attribute KeyValue conforming to the "file.fork_name" +// semantic conventions. It represents the name of the fork. A fork is additional +// data associated with a filesystem object. +func FileForkName(val string) attribute.KeyValue { + return FileForkNameKey.String(val) +} + +// FileGroupID returns an attribute KeyValue conforming to the "file.group.id" +// semantic conventions. It represents the primary Group ID (GID) of the file. +func FileGroupID(val string) attribute.KeyValue { + return FileGroupIDKey.String(val) +} + +// FileGroupName returns an attribute KeyValue conforming to the +// "file.group.name" semantic conventions. It represents the primary group name +// of the file. +func FileGroupName(val string) attribute.KeyValue { + return FileGroupNameKey.String(val) +} + +// FileInode returns an attribute KeyValue conforming to the "file.inode" +// semantic conventions. It represents the inode representing the file in the +// filesystem. +func FileInode(val string) attribute.KeyValue { + return FileInodeKey.String(val) +} + +// FileMode returns an attribute KeyValue conforming to the "file.mode" semantic +// conventions. It represents the mode of the file in octal representation. +func FileMode(val string) attribute.KeyValue { + return FileModeKey.String(val) +} + +// FileModified returns an attribute KeyValue conforming to the "file.modified" +// semantic conventions. It represents the time when the file content was last +// modified, in ISO 8601 format. +func FileModified(val string) attribute.KeyValue { + return FileModifiedKey.String(val) +} + +// FileName returns an attribute KeyValue conforming to the "file.name" semantic +// conventions. It represents the name of the file including the extension, +// without the directory. +func FileName(val string) attribute.KeyValue { + return FileNameKey.String(val) +} + +// FileOwnerID returns an attribute KeyValue conforming to the "file.owner.id" +// semantic conventions. It represents the user ID (UID) or security identifier +// (SID) of the file owner. +func FileOwnerID(val string) attribute.KeyValue { + return FileOwnerIDKey.String(val) +} + +// FileOwnerName returns an attribute KeyValue conforming to the +// "file.owner.name" semantic conventions. It represents the username of the file +// owner. +func FileOwnerName(val string) attribute.KeyValue { + return FileOwnerNameKey.String(val) +} + +// FilePath returns an attribute KeyValue conforming to the "file.path" semantic +// conventions. It represents the full path to the file, including the file name. +// It should include the drive letter, when appropriate. +func FilePath(val string) attribute.KeyValue { + return FilePathKey.String(val) +} + +// FileSize returns an attribute KeyValue conforming to the "file.size" semantic +// conventions. It represents the file size in bytes. +func FileSize(val int) attribute.KeyValue { + return FileSizeKey.Int(val) +} + +// FileSymbolicLinkTargetPath returns an attribute KeyValue conforming to the +// "file.symbolic_link.target_path" semantic conventions. It represents the path +// to the target of a symbolic link. +func FileSymbolicLinkTargetPath(val string) attribute.KeyValue { + return FileSymbolicLinkTargetPathKey.String(val) +} + +// Namespace: gcp +const ( + // GCPAppHubApplicationContainerKey is the attribute Key conforming to the + // "gcp.apphub.application.container" semantic conventions. It represents the + // container within GCP where the AppHub application is defined. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "projects/my-container-project" + GCPAppHubApplicationContainerKey = attribute.Key("gcp.apphub.application.container") + + // GCPAppHubApplicationIDKey is the attribute Key conforming to the + // "gcp.apphub.application.id" semantic conventions. It represents the name of + // the application as configured in AppHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-application" + GCPAppHubApplicationIDKey = attribute.Key("gcp.apphub.application.id") + + // GCPAppHubApplicationLocationKey is the attribute Key conforming to the + // "gcp.apphub.application.location" semantic conventions. It represents the GCP + // zone or region where the application is defined. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "us-central1" + GCPAppHubApplicationLocationKey = attribute.Key("gcp.apphub.application.location") + + // GCPAppHubServiceCriticalityTypeKey is the attribute Key conforming to the + // "gcp.apphub.service.criticality_type" semantic conventions. It represents the + // criticality of a service indicates its importance to the business. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub type enum] + // + // [See AppHub type enum]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type + GCPAppHubServiceCriticalityTypeKey = attribute.Key("gcp.apphub.service.criticality_type") + + // GCPAppHubServiceEnvironmentTypeKey is the attribute Key conforming to the + // "gcp.apphub.service.environment_type" semantic conventions. It represents the + // environment of a service is the stage of a software lifecycle. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub environment type] + // + // [See AppHub environment type]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type_1 + GCPAppHubServiceEnvironmentTypeKey = attribute.Key("gcp.apphub.service.environment_type") + + // GCPAppHubServiceIDKey is the attribute Key conforming to the + // "gcp.apphub.service.id" semantic conventions. It represents the name of the + // service as configured in AppHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-service" + GCPAppHubServiceIDKey = attribute.Key("gcp.apphub.service.id") + + // GCPAppHubWorkloadCriticalityTypeKey is the attribute Key conforming to the + // "gcp.apphub.workload.criticality_type" semantic conventions. It represents + // the criticality of a workload indicates its importance to the business. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub type enum] + // + // [See AppHub type enum]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type + GCPAppHubWorkloadCriticalityTypeKey = attribute.Key("gcp.apphub.workload.criticality_type") + + // GCPAppHubWorkloadEnvironmentTypeKey is the attribute Key conforming to the + // "gcp.apphub.workload.environment_type" semantic conventions. It represents + // the environment of a workload is the stage of a software lifecycle. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: [See AppHub environment type] + // + // [See AppHub environment type]: https://cloud.google.com/app-hub/docs/reference/rest/v1/Attributes#type_1 + GCPAppHubWorkloadEnvironmentTypeKey = attribute.Key("gcp.apphub.workload.environment_type") + + // GCPAppHubWorkloadIDKey is the attribute Key conforming to the + // "gcp.apphub.workload.id" semantic conventions. It represents the name of the + // workload as configured in AppHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-workload" + GCPAppHubWorkloadIDKey = attribute.Key("gcp.apphub.workload.id") + + // GCPClientServiceKey is the attribute Key conforming to the + // "gcp.client.service" semantic conventions. It represents the identifies the + // Google Cloud service for which the official client library is intended. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "appengine", "run", "firestore", "alloydb", "spanner" + // Note: Intended to be a stable identifier for Google Cloud client libraries + // that is uniform across implementation languages. The value should be derived + // from the canonical service domain for the service; for example, + // 'foo.googleapis.com' should result in a value of 'foo'. + GCPClientServiceKey = attribute.Key("gcp.client.service") + + // GCPCloudRunJobExecutionKey is the attribute Key conforming to the + // "gcp.cloud_run.job.execution" semantic conventions. It represents the name of + // the Cloud Run [execution] being run for the Job, as set by the + // [`CLOUD_RUN_EXECUTION`] environment variable. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "job-name-xxxx", "sample-job-mdw84" + // + // [execution]: https://cloud.google.com/run/docs/managing/job-executions + // [`CLOUD_RUN_EXECUTION`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars + GCPCloudRunJobExecutionKey = attribute.Key("gcp.cloud_run.job.execution") + + // GCPCloudRunJobTaskIndexKey is the attribute Key conforming to the + // "gcp.cloud_run.job.task_index" semantic conventions. It represents the index + // for a task within an execution as provided by the [`CLOUD_RUN_TASK_INDEX`] + // environment variable. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 1 + // + // [`CLOUD_RUN_TASK_INDEX`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars + GCPCloudRunJobTaskIndexKey = attribute.Key("gcp.cloud_run.job.task_index") + + // GCPGCEInstanceHostnameKey is the attribute Key conforming to the + // "gcp.gce.instance.hostname" semantic conventions. It represents the hostname + // of a GCE instance. This is the full value of the default or [custom hostname] + // . + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-host1234.example.com", + // "sample-vm.us-west1-b.c.my-project.internal" + // + // [custom hostname]: https://cloud.google.com/compute/docs/instances/custom-hostname-vm + GCPGCEInstanceHostnameKey = attribute.Key("gcp.gce.instance.hostname") + + // GCPGCEInstanceNameKey is the attribute Key conforming to the + // "gcp.gce.instance.name" semantic conventions. It represents the instance name + // of a GCE instance. This is the value provided by `host.name`, the visible + // name of the instance in the Cloud Console UI, and the prefix for the default + // hostname of the instance as defined by the [default internal DNS name]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "instance-1", "my-vm-name" + // + // [default internal DNS name]: https://cloud.google.com/compute/docs/internal-dns#instance-fully-qualified-domain-names + GCPGCEInstanceNameKey = attribute.Key("gcp.gce.instance.name") +) + +// GCPAppHubApplicationContainer returns an attribute KeyValue conforming to the +// "gcp.apphub.application.container" semantic conventions. It represents the +// container within GCP where the AppHub application is defined. +func GCPAppHubApplicationContainer(val string) attribute.KeyValue { + return GCPAppHubApplicationContainerKey.String(val) +} + +// GCPAppHubApplicationID returns an attribute KeyValue conforming to the +// "gcp.apphub.application.id" semantic conventions. It represents the name of +// the application as configured in AppHub. +func GCPAppHubApplicationID(val string) attribute.KeyValue { + return GCPAppHubApplicationIDKey.String(val) +} + +// GCPAppHubApplicationLocation returns an attribute KeyValue conforming to the +// "gcp.apphub.application.location" semantic conventions. It represents the GCP +// zone or region where the application is defined. +func GCPAppHubApplicationLocation(val string) attribute.KeyValue { + return GCPAppHubApplicationLocationKey.String(val) +} + +// GCPAppHubServiceID returns an attribute KeyValue conforming to the +// "gcp.apphub.service.id" semantic conventions. It represents the name of the +// service as configured in AppHub. +func GCPAppHubServiceID(val string) attribute.KeyValue { + return GCPAppHubServiceIDKey.String(val) +} + +// GCPAppHubWorkloadID returns an attribute KeyValue conforming to the +// "gcp.apphub.workload.id" semantic conventions. It represents the name of the +// workload as configured in AppHub. +func GCPAppHubWorkloadID(val string) attribute.KeyValue { + return GCPAppHubWorkloadIDKey.String(val) +} + +// GCPClientService returns an attribute KeyValue conforming to the +// "gcp.client.service" semantic conventions. It represents the identifies the +// Google Cloud service for which the official client library is intended. +func GCPClientService(val string) attribute.KeyValue { + return GCPClientServiceKey.String(val) +} + +// GCPCloudRunJobExecution returns an attribute KeyValue conforming to the +// "gcp.cloud_run.job.execution" semantic conventions. It represents the name of +// the Cloud Run [execution] being run for the Job, as set by the +// [`CLOUD_RUN_EXECUTION`] environment variable. +// +// [execution]: https://cloud.google.com/run/docs/managing/job-executions +// [`CLOUD_RUN_EXECUTION`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars +func GCPCloudRunJobExecution(val string) attribute.KeyValue { + return GCPCloudRunJobExecutionKey.String(val) +} + +// GCPCloudRunJobTaskIndex returns an attribute KeyValue conforming to the +// "gcp.cloud_run.job.task_index" semantic conventions. It represents the index +// for a task within an execution as provided by the [`CLOUD_RUN_TASK_INDEX`] +// environment variable. +// +// [`CLOUD_RUN_TASK_INDEX`]: https://cloud.google.com/run/docs/container-contract#jobs-env-vars +func GCPCloudRunJobTaskIndex(val int) attribute.KeyValue { + return GCPCloudRunJobTaskIndexKey.Int(val) +} + +// GCPGCEInstanceHostname returns an attribute KeyValue conforming to the +// "gcp.gce.instance.hostname" semantic conventions. It represents the hostname +// of a GCE instance. This is the full value of the default or [custom hostname] +// . +// +// [custom hostname]: https://cloud.google.com/compute/docs/instances/custom-hostname-vm +func GCPGCEInstanceHostname(val string) attribute.KeyValue { + return GCPGCEInstanceHostnameKey.String(val) +} + +// GCPGCEInstanceName returns an attribute KeyValue conforming to the +// "gcp.gce.instance.name" semantic conventions. It represents the instance name +// of a GCE instance. This is the value provided by `host.name`, the visible name +// of the instance in the Cloud Console UI, and the prefix for the default +// hostname of the instance as defined by the [default internal DNS name]. +// +// [default internal DNS name]: https://cloud.google.com/compute/docs/internal-dns#instance-fully-qualified-domain-names +func GCPGCEInstanceName(val string) attribute.KeyValue { + return GCPGCEInstanceNameKey.String(val) +} + +// Enum values for gcp.apphub.service.criticality_type +var ( + // Mission critical service. + // Stability: development + GCPAppHubServiceCriticalityTypeMissionCritical = GCPAppHubServiceCriticalityTypeKey.String("MISSION_CRITICAL") + // High impact. + // Stability: development + GCPAppHubServiceCriticalityTypeHigh = GCPAppHubServiceCriticalityTypeKey.String("HIGH") + // Medium impact. + // Stability: development + GCPAppHubServiceCriticalityTypeMedium = GCPAppHubServiceCriticalityTypeKey.String("MEDIUM") + // Low impact. + // Stability: development + GCPAppHubServiceCriticalityTypeLow = GCPAppHubServiceCriticalityTypeKey.String("LOW") +) + +// Enum values for gcp.apphub.service.environment_type +var ( + // Production environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeProduction = GCPAppHubServiceEnvironmentTypeKey.String("PRODUCTION") + // Staging environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeStaging = GCPAppHubServiceEnvironmentTypeKey.String("STAGING") + // Test environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeTest = GCPAppHubServiceEnvironmentTypeKey.String("TEST") + // Development environment. + // Stability: development + GCPAppHubServiceEnvironmentTypeDevelopment = GCPAppHubServiceEnvironmentTypeKey.String("DEVELOPMENT") +) + +// Enum values for gcp.apphub.workload.criticality_type +var ( + // Mission critical service. + // Stability: development + GCPAppHubWorkloadCriticalityTypeMissionCritical = GCPAppHubWorkloadCriticalityTypeKey.String("MISSION_CRITICAL") + // High impact. + // Stability: development + GCPAppHubWorkloadCriticalityTypeHigh = GCPAppHubWorkloadCriticalityTypeKey.String("HIGH") + // Medium impact. + // Stability: development + GCPAppHubWorkloadCriticalityTypeMedium = GCPAppHubWorkloadCriticalityTypeKey.String("MEDIUM") + // Low impact. + // Stability: development + GCPAppHubWorkloadCriticalityTypeLow = GCPAppHubWorkloadCriticalityTypeKey.String("LOW") +) + +// Enum values for gcp.apphub.workload.environment_type +var ( + // Production environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeProduction = GCPAppHubWorkloadEnvironmentTypeKey.String("PRODUCTION") + // Staging environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeStaging = GCPAppHubWorkloadEnvironmentTypeKey.String("STAGING") + // Test environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeTest = GCPAppHubWorkloadEnvironmentTypeKey.String("TEST") + // Development environment. + // Stability: development + GCPAppHubWorkloadEnvironmentTypeDevelopment = GCPAppHubWorkloadEnvironmentTypeKey.String("DEVELOPMENT") +) + +// Namespace: gen_ai +const ( + // GenAIAgentDescriptionKey is the attribute Key conforming to the + // "gen_ai.agent.description" semantic conventions. It represents the free-form + // description of the GenAI agent provided by the application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Helps with math problems", "Generates fiction stories" + GenAIAgentDescriptionKey = attribute.Key("gen_ai.agent.description") + + // GenAIAgentIDKey is the attribute Key conforming to the "gen_ai.agent.id" + // semantic conventions. It represents the unique identifier of the GenAI agent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "asst_5j66UpCpwteGg4YSxUnt7lPY" + GenAIAgentIDKey = attribute.Key("gen_ai.agent.id") + + // GenAIAgentNameKey is the attribute Key conforming to the "gen_ai.agent.name" + // semantic conventions. It represents the human-readable name of the GenAI + // agent provided by the application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Math Tutor", "Fiction Writer" + GenAIAgentNameKey = attribute.Key("gen_ai.agent.name") + + // GenAIConversationIDKey is the attribute Key conforming to the + // "gen_ai.conversation.id" semantic conventions. It represents the unique + // identifier for a conversation (session, thread), used to store and correlate + // messages within this conversation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "conv_5j66UpCpwteGg4YSxUnt7lPY" + GenAIConversationIDKey = attribute.Key("gen_ai.conversation.id") + + // GenAIDataSourceIDKey is the attribute Key conforming to the + // "gen_ai.data_source.id" semantic conventions. It represents the data source + // identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "H7STPQYOND" + // Note: Data sources are used by AI agents and RAG applications to store + // grounding data. A data source may be an external database, object store, + // document collection, website, or any other storage system used by the GenAI + // agent or application. The `gen_ai.data_source.id` SHOULD match the identifier + // used by the GenAI system rather than a name specific to the external storage, + // such as a database or object store. Semantic conventions referencing + // `gen_ai.data_source.id` MAY also leverage additional attributes, such as + // `db.*`, to further identify and describe the data source. + GenAIDataSourceIDKey = attribute.Key("gen_ai.data_source.id") + + // GenAIOpenAIRequestServiceTierKey is the attribute Key conforming to the + // "gen_ai.openai.request.service_tier" semantic conventions. It represents the + // service tier requested. May be a specific tier, default, or auto. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "auto", "default" + GenAIOpenAIRequestServiceTierKey = attribute.Key("gen_ai.openai.request.service_tier") + + // GenAIOpenAIResponseServiceTierKey is the attribute Key conforming to the + // "gen_ai.openai.response.service_tier" semantic conventions. It represents the + // service tier used for the response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "scale", "default" + GenAIOpenAIResponseServiceTierKey = attribute.Key("gen_ai.openai.response.service_tier") + + // GenAIOpenAIResponseSystemFingerprintKey is the attribute Key conforming to + // the "gen_ai.openai.response.system_fingerprint" semantic conventions. It + // represents a fingerprint to track any eventual change in the Generative AI + // environment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "fp_44709d6fcb" + GenAIOpenAIResponseSystemFingerprintKey = attribute.Key("gen_ai.openai.response.system_fingerprint") + + // GenAIOperationNameKey is the attribute Key conforming to the + // "gen_ai.operation.name" semantic conventions. It represents the name of the + // operation being performed. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: If one of the predefined values applies, but specific system uses a + // different name it's RECOMMENDED to document it in the semantic conventions + // for specific GenAI system and use system-specific name in the + // instrumentation. If a different name is not documented, instrumentation + // libraries SHOULD use applicable predefined value. + GenAIOperationNameKey = attribute.Key("gen_ai.operation.name") + + // GenAIOutputTypeKey is the attribute Key conforming to the + // "gen_ai.output.type" semantic conventions. It represents the represents the + // content type requested by the client. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This attribute SHOULD be used when the client requests output of a + // specific type. The model may return zero or more outputs of this type. + // This attribute specifies the output modality and not the actual output + // format. For example, if an image is requested, the actual output could be a + // URL pointing to an image file. + // Additional output format details may be recorded in the future in the + // `gen_ai.output.{type}.*` attributes. + GenAIOutputTypeKey = attribute.Key("gen_ai.output.type") + + // GenAIRequestChoiceCountKey is the attribute Key conforming to the + // "gen_ai.request.choice.count" semantic conventions. It represents the target + // number of candidate completions to return. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3 + GenAIRequestChoiceCountKey = attribute.Key("gen_ai.request.choice.count") + + // GenAIRequestEncodingFormatsKey is the attribute Key conforming to the + // "gen_ai.request.encoding_formats" semantic conventions. It represents the + // encoding formats requested in an embeddings operation, if specified. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "base64"], ["float", "binary" + // Note: In some GenAI systems the encoding formats are called embedding types. + // Also, some GenAI systems only accept a single format per request. + GenAIRequestEncodingFormatsKey = attribute.Key("gen_ai.request.encoding_formats") + + // GenAIRequestFrequencyPenaltyKey is the attribute Key conforming to the + // "gen_ai.request.frequency_penalty" semantic conventions. It represents the + // frequency penalty setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0.1 + GenAIRequestFrequencyPenaltyKey = attribute.Key("gen_ai.request.frequency_penalty") + + // GenAIRequestMaxTokensKey is the attribute Key conforming to the + // "gen_ai.request.max_tokens" semantic conventions. It represents the maximum + // number of tokens the model generates for a request. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + GenAIRequestMaxTokensKey = attribute.Key("gen_ai.request.max_tokens") + + // GenAIRequestModelKey is the attribute Key conforming to the + // "gen_ai.request.model" semantic conventions. It represents the name of the + // GenAI model a request is being made to. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: gpt-4 + GenAIRequestModelKey = attribute.Key("gen_ai.request.model") + + // GenAIRequestPresencePenaltyKey is the attribute Key conforming to the + // "gen_ai.request.presence_penalty" semantic conventions. It represents the + // presence penalty setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0.1 + GenAIRequestPresencePenaltyKey = attribute.Key("gen_ai.request.presence_penalty") + + // GenAIRequestSeedKey is the attribute Key conforming to the + // "gen_ai.request.seed" semantic conventions. It represents the requests with + // same seed value more likely to return same result. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + GenAIRequestSeedKey = attribute.Key("gen_ai.request.seed") + + // GenAIRequestStopSequencesKey is the attribute Key conforming to the + // "gen_ai.request.stop_sequences" semantic conventions. It represents the list + // of sequences that the model will use to stop generating further tokens. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "forest", "lived" + GenAIRequestStopSequencesKey = attribute.Key("gen_ai.request.stop_sequences") + + // GenAIRequestTemperatureKey is the attribute Key conforming to the + // "gen_ai.request.temperature" semantic conventions. It represents the + // temperature setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0.0 + GenAIRequestTemperatureKey = attribute.Key("gen_ai.request.temperature") + + // GenAIRequestTopKKey is the attribute Key conforming to the + // "gen_ai.request.top_k" semantic conventions. It represents the top_k sampling + // setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0 + GenAIRequestTopKKey = attribute.Key("gen_ai.request.top_k") + + // GenAIRequestTopPKey is the attribute Key conforming to the + // "gen_ai.request.top_p" semantic conventions. It represents the top_p sampling + // setting for the GenAI request. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1.0 + GenAIRequestTopPKey = attribute.Key("gen_ai.request.top_p") + + // GenAIResponseFinishReasonsKey is the attribute Key conforming to the + // "gen_ai.response.finish_reasons" semantic conventions. It represents the + // array of reasons the model stopped generating tokens, corresponding to each + // generation received. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "stop"], ["stop", "length" + GenAIResponseFinishReasonsKey = attribute.Key("gen_ai.response.finish_reasons") + + // GenAIResponseIDKey is the attribute Key conforming to the + // "gen_ai.response.id" semantic conventions. It represents the unique + // identifier for the completion. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "chatcmpl-123" + GenAIResponseIDKey = attribute.Key("gen_ai.response.id") + + // GenAIResponseModelKey is the attribute Key conforming to the + // "gen_ai.response.model" semantic conventions. It represents the name of the + // model that generated the response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "gpt-4-0613" + GenAIResponseModelKey = attribute.Key("gen_ai.response.model") + + // GenAISystemKey is the attribute Key conforming to the "gen_ai.system" + // semantic conventions. It represents the Generative AI product as identified + // by the client or server instrumentation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: openai + // Note: The `gen_ai.system` describes a family of GenAI models with specific + // model identified + // by `gen_ai.request.model` and `gen_ai.response.model` attributes. + // + // The actual GenAI product may differ from the one identified by the client. + // Multiple systems, including Azure OpenAI and Gemini, are accessible by OpenAI + // client + // libraries. In such cases, the `gen_ai.system` is set to `openai` based on the + // instrumentation's best knowledge, instead of the actual system. The + // `server.address` + // attribute may help identify the actual system in use for `openai`. + // + // For custom model, a custom friendly name SHOULD be used. + // If none of these options apply, the `gen_ai.system` SHOULD be set to `_OTHER` + // . + GenAISystemKey = attribute.Key("gen_ai.system") + + // GenAITokenTypeKey is the attribute Key conforming to the "gen_ai.token.type" + // semantic conventions. It represents the type of token being counted. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "input", "output" + GenAITokenTypeKey = attribute.Key("gen_ai.token.type") + + // GenAIToolCallIDKey is the attribute Key conforming to the + // "gen_ai.tool.call.id" semantic conventions. It represents the tool call + // identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "call_mszuSIzqtI65i1wAUOE8w5H4" + GenAIToolCallIDKey = attribute.Key("gen_ai.tool.call.id") + + // GenAIToolDescriptionKey is the attribute Key conforming to the + // "gen_ai.tool.description" semantic conventions. It represents the tool + // description. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Multiply two numbers" + GenAIToolDescriptionKey = attribute.Key("gen_ai.tool.description") + + // GenAIToolNameKey is the attribute Key conforming to the "gen_ai.tool.name" + // semantic conventions. It represents the name of the tool utilized by the + // agent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Flights" + GenAIToolNameKey = attribute.Key("gen_ai.tool.name") + + // GenAIToolTypeKey is the attribute Key conforming to the "gen_ai.tool.type" + // semantic conventions. It represents the type of the tool utilized by the + // agent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "function", "extension", "datastore" + // Note: Extension: A tool executed on the agent-side to directly call external + // APIs, bridging the gap between the agent and real-world systems. + // Agent-side operations involve actions that are performed by the agent on the + // server or within the agent's controlled environment. + // Function: A tool executed on the client-side, where the agent generates + // parameters for a predefined function, and the client executes the logic. + // Client-side operations are actions taken on the user's end or within the + // client application. + // Datastore: A tool used by the agent to access and query structured or + // unstructured external data for retrieval-augmented tasks or knowledge + // updates. + GenAIToolTypeKey = attribute.Key("gen_ai.tool.type") + + // GenAIUsageInputTokensKey is the attribute Key conforming to the + // "gen_ai.usage.input_tokens" semantic conventions. It represents the number of + // tokens used in the GenAI input (prompt). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 100 + GenAIUsageInputTokensKey = attribute.Key("gen_ai.usage.input_tokens") + + // GenAIUsageOutputTokensKey is the attribute Key conforming to the + // "gen_ai.usage.output_tokens" semantic conventions. It represents the number + // of tokens used in the GenAI response (completion). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 180 + GenAIUsageOutputTokensKey = attribute.Key("gen_ai.usage.output_tokens") +) + +// GenAIAgentDescription returns an attribute KeyValue conforming to the +// "gen_ai.agent.description" semantic conventions. It represents the free-form +// description of the GenAI agent provided by the application. +func GenAIAgentDescription(val string) attribute.KeyValue { + return GenAIAgentDescriptionKey.String(val) +} + +// GenAIAgentID returns an attribute KeyValue conforming to the "gen_ai.agent.id" +// semantic conventions. It represents the unique identifier of the GenAI agent. +func GenAIAgentID(val string) attribute.KeyValue { + return GenAIAgentIDKey.String(val) +} + +// GenAIAgentName returns an attribute KeyValue conforming to the +// "gen_ai.agent.name" semantic conventions. It represents the human-readable +// name of the GenAI agent provided by the application. +func GenAIAgentName(val string) attribute.KeyValue { + return GenAIAgentNameKey.String(val) +} + +// GenAIConversationID returns an attribute KeyValue conforming to the +// "gen_ai.conversation.id" semantic conventions. It represents the unique +// identifier for a conversation (session, thread), used to store and correlate +// messages within this conversation. +func GenAIConversationID(val string) attribute.KeyValue { + return GenAIConversationIDKey.String(val) +} + +// GenAIDataSourceID returns an attribute KeyValue conforming to the +// "gen_ai.data_source.id" semantic conventions. It represents the data source +// identifier. +func GenAIDataSourceID(val string) attribute.KeyValue { + return GenAIDataSourceIDKey.String(val) +} + +// GenAIOpenAIResponseServiceTier returns an attribute KeyValue conforming to the +// "gen_ai.openai.response.service_tier" semantic conventions. It represents the +// service tier used for the response. +func GenAIOpenAIResponseServiceTier(val string) attribute.KeyValue { + return GenAIOpenAIResponseServiceTierKey.String(val) +} + +// GenAIOpenAIResponseSystemFingerprint returns an attribute KeyValue conforming +// to the "gen_ai.openai.response.system_fingerprint" semantic conventions. It +// represents a fingerprint to track any eventual change in the Generative AI +// environment. +func GenAIOpenAIResponseSystemFingerprint(val string) attribute.KeyValue { + return GenAIOpenAIResponseSystemFingerprintKey.String(val) +} + +// GenAIRequestChoiceCount returns an attribute KeyValue conforming to the +// "gen_ai.request.choice.count" semantic conventions. It represents the target +// number of candidate completions to return. +func GenAIRequestChoiceCount(val int) attribute.KeyValue { + return GenAIRequestChoiceCountKey.Int(val) +} + +// GenAIRequestEncodingFormats returns an attribute KeyValue conforming to the +// "gen_ai.request.encoding_formats" semantic conventions. It represents the +// encoding formats requested in an embeddings operation, if specified. +func GenAIRequestEncodingFormats(val ...string) attribute.KeyValue { + return GenAIRequestEncodingFormatsKey.StringSlice(val) +} + +// GenAIRequestFrequencyPenalty returns an attribute KeyValue conforming to the +// "gen_ai.request.frequency_penalty" semantic conventions. It represents the +// frequency penalty setting for the GenAI request. +func GenAIRequestFrequencyPenalty(val float64) attribute.KeyValue { + return GenAIRequestFrequencyPenaltyKey.Float64(val) +} + +// GenAIRequestMaxTokens returns an attribute KeyValue conforming to the +// "gen_ai.request.max_tokens" semantic conventions. It represents the maximum +// number of tokens the model generates for a request. +func GenAIRequestMaxTokens(val int) attribute.KeyValue { + return GenAIRequestMaxTokensKey.Int(val) +} + +// GenAIRequestModel returns an attribute KeyValue conforming to the +// "gen_ai.request.model" semantic conventions. It represents the name of the +// GenAI model a request is being made to. +func GenAIRequestModel(val string) attribute.KeyValue { + return GenAIRequestModelKey.String(val) +} + +// GenAIRequestPresencePenalty returns an attribute KeyValue conforming to the +// "gen_ai.request.presence_penalty" semantic conventions. It represents the +// presence penalty setting for the GenAI request. +func GenAIRequestPresencePenalty(val float64) attribute.KeyValue { + return GenAIRequestPresencePenaltyKey.Float64(val) +} + +// GenAIRequestSeed returns an attribute KeyValue conforming to the +// "gen_ai.request.seed" semantic conventions. It represents the requests with +// same seed value more likely to return same result. +func GenAIRequestSeed(val int) attribute.KeyValue { + return GenAIRequestSeedKey.Int(val) +} + +// GenAIRequestStopSequences returns an attribute KeyValue conforming to the +// "gen_ai.request.stop_sequences" semantic conventions. It represents the list +// of sequences that the model will use to stop generating further tokens. +func GenAIRequestStopSequences(val ...string) attribute.KeyValue { + return GenAIRequestStopSequencesKey.StringSlice(val) +} + +// GenAIRequestTemperature returns an attribute KeyValue conforming to the +// "gen_ai.request.temperature" semantic conventions. It represents the +// temperature setting for the GenAI request. +func GenAIRequestTemperature(val float64) attribute.KeyValue { + return GenAIRequestTemperatureKey.Float64(val) +} + +// GenAIRequestTopK returns an attribute KeyValue conforming to the +// "gen_ai.request.top_k" semantic conventions. It represents the top_k sampling +// setting for the GenAI request. +func GenAIRequestTopK(val float64) attribute.KeyValue { + return GenAIRequestTopKKey.Float64(val) +} + +// GenAIRequestTopP returns an attribute KeyValue conforming to the +// "gen_ai.request.top_p" semantic conventions. It represents the top_p sampling +// setting for the GenAI request. +func GenAIRequestTopP(val float64) attribute.KeyValue { + return GenAIRequestTopPKey.Float64(val) +} + +// GenAIResponseFinishReasons returns an attribute KeyValue conforming to the +// "gen_ai.response.finish_reasons" semantic conventions. It represents the array +// of reasons the model stopped generating tokens, corresponding to each +// generation received. +func GenAIResponseFinishReasons(val ...string) attribute.KeyValue { + return GenAIResponseFinishReasonsKey.StringSlice(val) +} + +// GenAIResponseID returns an attribute KeyValue conforming to the +// "gen_ai.response.id" semantic conventions. It represents the unique identifier +// for the completion. +func GenAIResponseID(val string) attribute.KeyValue { + return GenAIResponseIDKey.String(val) +} + +// GenAIResponseModel returns an attribute KeyValue conforming to the +// "gen_ai.response.model" semantic conventions. It represents the name of the +// model that generated the response. +func GenAIResponseModel(val string) attribute.KeyValue { + return GenAIResponseModelKey.String(val) +} + +// GenAIToolCallID returns an attribute KeyValue conforming to the +// "gen_ai.tool.call.id" semantic conventions. It represents the tool call +// identifier. +func GenAIToolCallID(val string) attribute.KeyValue { + return GenAIToolCallIDKey.String(val) +} + +// GenAIToolDescription returns an attribute KeyValue conforming to the +// "gen_ai.tool.description" semantic conventions. It represents the tool +// description. +func GenAIToolDescription(val string) attribute.KeyValue { + return GenAIToolDescriptionKey.String(val) +} + +// GenAIToolName returns an attribute KeyValue conforming to the +// "gen_ai.tool.name" semantic conventions. It represents the name of the tool +// utilized by the agent. +func GenAIToolName(val string) attribute.KeyValue { + return GenAIToolNameKey.String(val) +} + +// GenAIToolType returns an attribute KeyValue conforming to the +// "gen_ai.tool.type" semantic conventions. It represents the type of the tool +// utilized by the agent. +func GenAIToolType(val string) attribute.KeyValue { + return GenAIToolTypeKey.String(val) +} + +// GenAIUsageInputTokens returns an attribute KeyValue conforming to the +// "gen_ai.usage.input_tokens" semantic conventions. It represents the number of +// tokens used in the GenAI input (prompt). +func GenAIUsageInputTokens(val int) attribute.KeyValue { + return GenAIUsageInputTokensKey.Int(val) +} + +// GenAIUsageOutputTokens returns an attribute KeyValue conforming to the +// "gen_ai.usage.output_tokens" semantic conventions. It represents the number of +// tokens used in the GenAI response (completion). +func GenAIUsageOutputTokens(val int) attribute.KeyValue { + return GenAIUsageOutputTokensKey.Int(val) +} + +// Enum values for gen_ai.openai.request.service_tier +var ( + // The system will utilize scale tier credits until they are exhausted. + // Stability: development + GenAIOpenAIRequestServiceTierAuto = GenAIOpenAIRequestServiceTierKey.String("auto") + // The system will utilize the default scale tier. + // Stability: development + GenAIOpenAIRequestServiceTierDefault = GenAIOpenAIRequestServiceTierKey.String("default") +) + +// Enum values for gen_ai.operation.name +var ( + // Chat completion operation such as [OpenAI Chat API] + // Stability: development + // + // [OpenAI Chat API]: https://platform.openai.com/docs/api-reference/chat + GenAIOperationNameChat = GenAIOperationNameKey.String("chat") + // Multimodal content generation operation such as [Gemini Generate Content] + // Stability: development + // + // [Gemini Generate Content]: https://ai.google.dev/api/generate-content + GenAIOperationNameGenerateContent = GenAIOperationNameKey.String("generate_content") + // Text completions operation such as [OpenAI Completions API (Legacy)] + // Stability: development + // + // [OpenAI Completions API (Legacy)]: https://platform.openai.com/docs/api-reference/completions + GenAIOperationNameTextCompletion = GenAIOperationNameKey.String("text_completion") + // Embeddings operation such as [OpenAI Create embeddings API] + // Stability: development + // + // [OpenAI Create embeddings API]: https://platform.openai.com/docs/api-reference/embeddings/create + GenAIOperationNameEmbeddings = GenAIOperationNameKey.String("embeddings") + // Create GenAI agent + // Stability: development + GenAIOperationNameCreateAgent = GenAIOperationNameKey.String("create_agent") + // Invoke GenAI agent + // Stability: development + GenAIOperationNameInvokeAgent = GenAIOperationNameKey.String("invoke_agent") + // Execute a tool + // Stability: development + GenAIOperationNameExecuteTool = GenAIOperationNameKey.String("execute_tool") +) + +// Enum values for gen_ai.output.type +var ( + // Plain text + // Stability: development + GenAIOutputTypeText = GenAIOutputTypeKey.String("text") + // JSON object with known or unknown schema + // Stability: development + GenAIOutputTypeJSON = GenAIOutputTypeKey.String("json") + // Image + // Stability: development + GenAIOutputTypeImage = GenAIOutputTypeKey.String("image") + // Speech + // Stability: development + GenAIOutputTypeSpeech = GenAIOutputTypeKey.String("speech") +) + +// Enum values for gen_ai.system +var ( + // OpenAI + // Stability: development + GenAISystemOpenAI = GenAISystemKey.String("openai") + // Any Google generative AI endpoint + // Stability: development + GenAISystemGCPGenAI = GenAISystemKey.String("gcp.gen_ai") + // Vertex AI + // Stability: development + GenAISystemGCPVertexAI = GenAISystemKey.String("gcp.vertex_ai") + // Gemini + // Stability: development + GenAISystemGCPGemini = GenAISystemKey.String("gcp.gemini") + // Deprecated: Use 'gcp.vertex_ai' instead. + GenAISystemVertexAI = GenAISystemKey.String("vertex_ai") + // Deprecated: Use 'gcp.gemini' instead. + GenAISystemGemini = GenAISystemKey.String("gemini") + // Anthropic + // Stability: development + GenAISystemAnthropic = GenAISystemKey.String("anthropic") + // Cohere + // Stability: development + GenAISystemCohere = GenAISystemKey.String("cohere") + // Azure AI Inference + // Stability: development + GenAISystemAzAIInference = GenAISystemKey.String("az.ai.inference") + // Azure OpenAI + // Stability: development + GenAISystemAzAIOpenAI = GenAISystemKey.String("az.ai.openai") + // IBM Watsonx AI + // Stability: development + GenAISystemIBMWatsonxAI = GenAISystemKey.String("ibm.watsonx.ai") + // AWS Bedrock + // Stability: development + GenAISystemAWSBedrock = GenAISystemKey.String("aws.bedrock") + // Perplexity + // Stability: development + GenAISystemPerplexity = GenAISystemKey.String("perplexity") + // xAI + // Stability: development + GenAISystemXai = GenAISystemKey.String("xai") + // DeepSeek + // Stability: development + GenAISystemDeepseek = GenAISystemKey.String("deepseek") + // Groq + // Stability: development + GenAISystemGroq = GenAISystemKey.String("groq") + // Mistral AI + // Stability: development + GenAISystemMistralAI = GenAISystemKey.String("mistral_ai") +) + +// Enum values for gen_ai.token.type +var ( + // Input tokens (prompt, input, etc.) + // Stability: development + GenAITokenTypeInput = GenAITokenTypeKey.String("input") + // Deprecated: Replaced by `output`. + GenAITokenTypeCompletion = GenAITokenTypeKey.String("output") + // Output tokens (completion, response, etc.) + // Stability: development + GenAITokenTypeOutput = GenAITokenTypeKey.String("output") +) + +// Namespace: geo +const ( + // GeoContinentCodeKey is the attribute Key conforming to the + // "geo.continent.code" semantic conventions. It represents the two-letter code + // representing continent’s name. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + GeoContinentCodeKey = attribute.Key("geo.continent.code") + + // GeoCountryISOCodeKey is the attribute Key conforming to the + // "geo.country.iso_code" semantic conventions. It represents the two-letter ISO + // Country Code ([ISO 3166-1 alpha2]). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CA" + // + // [ISO 3166-1 alpha2]: https://wikipedia.org/wiki/ISO_3166-1#Codes + GeoCountryISOCodeKey = attribute.Key("geo.country.iso_code") + + // GeoLocalityNameKey is the attribute Key conforming to the "geo.locality.name" + // semantic conventions. It represents the locality name. Represents the name of + // a city, town, village, or similar populated place. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Montreal", "Berlin" + GeoLocalityNameKey = attribute.Key("geo.locality.name") + + // GeoLocationLatKey is the attribute Key conforming to the "geo.location.lat" + // semantic conventions. It represents the latitude of the geo location in + // [WGS84]. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 45.505918 + // + // [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 + GeoLocationLatKey = attribute.Key("geo.location.lat") + + // GeoLocationLonKey is the attribute Key conforming to the "geo.location.lon" + // semantic conventions. It represents the longitude of the geo location in + // [WGS84]. + // + // Type: double + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: -73.61483 + // + // [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 + GeoLocationLonKey = attribute.Key("geo.location.lon") + + // GeoPostalCodeKey is the attribute Key conforming to the "geo.postal_code" + // semantic conventions. It represents the postal code associated with the + // location. Values appropriate for this field may also be known as a postcode + // or ZIP code and will vary widely from country to country. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "94040" + GeoPostalCodeKey = attribute.Key("geo.postal_code") + + // GeoRegionISOCodeKey is the attribute Key conforming to the + // "geo.region.iso_code" semantic conventions. It represents the region ISO code + // ([ISO 3166-2]). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CA-QC" + // + // [ISO 3166-2]: https://wikipedia.org/wiki/ISO_3166-2 + GeoRegionISOCodeKey = attribute.Key("geo.region.iso_code") +) + +// GeoCountryISOCode returns an attribute KeyValue conforming to the +// "geo.country.iso_code" semantic conventions. It represents the two-letter ISO +// Country Code ([ISO 3166-1 alpha2]). +// +// [ISO 3166-1 alpha2]: https://wikipedia.org/wiki/ISO_3166-1#Codes +func GeoCountryISOCode(val string) attribute.KeyValue { + return GeoCountryISOCodeKey.String(val) +} + +// GeoLocalityName returns an attribute KeyValue conforming to the +// "geo.locality.name" semantic conventions. It represents the locality name. +// Represents the name of a city, town, village, or similar populated place. +func GeoLocalityName(val string) attribute.KeyValue { + return GeoLocalityNameKey.String(val) +} + +// GeoLocationLat returns an attribute KeyValue conforming to the +// "geo.location.lat" semantic conventions. It represents the latitude of the geo +// location in [WGS84]. +// +// [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 +func GeoLocationLat(val float64) attribute.KeyValue { + return GeoLocationLatKey.Float64(val) +} + +// GeoLocationLon returns an attribute KeyValue conforming to the +// "geo.location.lon" semantic conventions. It represents the longitude of the +// geo location in [WGS84]. +// +// [WGS84]: https://wikipedia.org/wiki/World_Geodetic_System#WGS84 +func GeoLocationLon(val float64) attribute.KeyValue { + return GeoLocationLonKey.Float64(val) +} + +// GeoPostalCode returns an attribute KeyValue conforming to the +// "geo.postal_code" semantic conventions. It represents the postal code +// associated with the location. Values appropriate for this field may also be +// known as a postcode or ZIP code and will vary widely from country to country. +func GeoPostalCode(val string) attribute.KeyValue { + return GeoPostalCodeKey.String(val) +} + +// GeoRegionISOCode returns an attribute KeyValue conforming to the +// "geo.region.iso_code" semantic conventions. It represents the region ISO code +// ([ISO 3166-2]). +// +// [ISO 3166-2]: https://wikipedia.org/wiki/ISO_3166-2 +func GeoRegionISOCode(val string) attribute.KeyValue { + return GeoRegionISOCodeKey.String(val) +} + +// Enum values for geo.continent.code +var ( + // Africa + // Stability: development + GeoContinentCodeAf = GeoContinentCodeKey.String("AF") + // Antarctica + // Stability: development + GeoContinentCodeAn = GeoContinentCodeKey.String("AN") + // Asia + // Stability: development + GeoContinentCodeAs = GeoContinentCodeKey.String("AS") + // Europe + // Stability: development + GeoContinentCodeEu = GeoContinentCodeKey.String("EU") + // North America + // Stability: development + GeoContinentCodeNa = GeoContinentCodeKey.String("NA") + // Oceania + // Stability: development + GeoContinentCodeOc = GeoContinentCodeKey.String("OC") + // South America + // Stability: development + GeoContinentCodeSa = GeoContinentCodeKey.String("SA") +) + +// Namespace: go +const ( + // GoMemoryTypeKey is the attribute Key conforming to the "go.memory.type" + // semantic conventions. It represents the type of memory. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "other", "stack" + GoMemoryTypeKey = attribute.Key("go.memory.type") +) + +// Enum values for go.memory.type +var ( + // Memory allocated from the heap that is reserved for stack space, whether or + // not it is currently in-use. + // Stability: development + GoMemoryTypeStack = GoMemoryTypeKey.String("stack") + // Memory used by the Go runtime, excluding other categories of memory usage + // described in this enumeration. + // Stability: development + GoMemoryTypeOther = GoMemoryTypeKey.String("other") +) + +// Namespace: graphql +const ( + // GraphQLDocumentKey is the attribute Key conforming to the "graphql.document" + // semantic conventions. It represents the GraphQL document being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: query findBookById { bookById(id: ?) { name } } + // Note: The value may be sanitized to exclude sensitive information. + GraphQLDocumentKey = attribute.Key("graphql.document") + + // GraphQLOperationNameKey is the attribute Key conforming to the + // "graphql.operation.name" semantic conventions. It represents the name of the + // operation being executed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: findBookById + GraphQLOperationNameKey = attribute.Key("graphql.operation.name") + + // GraphQLOperationTypeKey is the attribute Key conforming to the + // "graphql.operation.type" semantic conventions. It represents the type of the + // operation being executed. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "query", "mutation", "subscription" + GraphQLOperationTypeKey = attribute.Key("graphql.operation.type") +) + +// GraphQLDocument returns an attribute KeyValue conforming to the +// "graphql.document" semantic conventions. It represents the GraphQL document +// being executed. +func GraphQLDocument(val string) attribute.KeyValue { + return GraphQLDocumentKey.String(val) +} + +// GraphQLOperationName returns an attribute KeyValue conforming to the +// "graphql.operation.name" semantic conventions. It represents the name of the +// operation being executed. +func GraphQLOperationName(val string) attribute.KeyValue { + return GraphQLOperationNameKey.String(val) +} + +// Enum values for graphql.operation.type +var ( + // GraphQL query + // Stability: development + GraphQLOperationTypeQuery = GraphQLOperationTypeKey.String("query") + // GraphQL mutation + // Stability: development + GraphQLOperationTypeMutation = GraphQLOperationTypeKey.String("mutation") + // GraphQL subscription + // Stability: development + GraphQLOperationTypeSubscription = GraphQLOperationTypeKey.String("subscription") +) + +// Namespace: heroku +const ( + // HerokuAppIDKey is the attribute Key conforming to the "heroku.app.id" + // semantic conventions. It represents the unique identifier for the + // application. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2daa2797-e42b-4624-9322-ec3f968df4da" + HerokuAppIDKey = attribute.Key("heroku.app.id") + + // HerokuReleaseCommitKey is the attribute Key conforming to the + // "heroku.release.commit" semantic conventions. It represents the commit hash + // for the current release. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "e6134959463efd8966b20e75b913cafe3f5ec" + HerokuReleaseCommitKey = attribute.Key("heroku.release.commit") + + // HerokuReleaseCreationTimestampKey is the attribute Key conforming to the + // "heroku.release.creation_timestamp" semantic conventions. It represents the + // time and date the release was created. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2022-10-23T18:00:42Z" + HerokuReleaseCreationTimestampKey = attribute.Key("heroku.release.creation_timestamp") +) + +// HerokuAppID returns an attribute KeyValue conforming to the "heroku.app.id" +// semantic conventions. It represents the unique identifier for the application. +func HerokuAppID(val string) attribute.KeyValue { + return HerokuAppIDKey.String(val) +} + +// HerokuReleaseCommit returns an attribute KeyValue conforming to the +// "heroku.release.commit" semantic conventions. It represents the commit hash +// for the current release. +func HerokuReleaseCommit(val string) attribute.KeyValue { + return HerokuReleaseCommitKey.String(val) +} + +// HerokuReleaseCreationTimestamp returns an attribute KeyValue conforming to the +// "heroku.release.creation_timestamp" semantic conventions. It represents the +// time and date the release was created. +func HerokuReleaseCreationTimestamp(val string) attribute.KeyValue { + return HerokuReleaseCreationTimestampKey.String(val) +} + +// Namespace: host +const ( + // HostArchKey is the attribute Key conforming to the "host.arch" semantic + // conventions. It represents the CPU architecture the host system is running + // on. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + HostArchKey = attribute.Key("host.arch") + + // HostCPUCacheL2SizeKey is the attribute Key conforming to the + // "host.cpu.cache.l2.size" semantic conventions. It represents the amount of + // level 2 memory cache available to the processor (in Bytes). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 12288000 + HostCPUCacheL2SizeKey = attribute.Key("host.cpu.cache.l2.size") + + // HostCPUFamilyKey is the attribute Key conforming to the "host.cpu.family" + // semantic conventions. It represents the family or generation of the CPU. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "6", "PA-RISC 1.1e" + HostCPUFamilyKey = attribute.Key("host.cpu.family") + + // HostCPUModelIDKey is the attribute Key conforming to the "host.cpu.model.id" + // semantic conventions. It represents the model identifier. It provides more + // granular information about the CPU, distinguishing it from other CPUs within + // the same family. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "6", "9000/778/B180L" + HostCPUModelIDKey = attribute.Key("host.cpu.model.id") + + // HostCPUModelNameKey is the attribute Key conforming to the + // "host.cpu.model.name" semantic conventions. It represents the model + // designation of the processor. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "11th Gen Intel(R) Core(TM) i7-1185G7 @ 3.00GHz" + HostCPUModelNameKey = attribute.Key("host.cpu.model.name") + + // HostCPUSteppingKey is the attribute Key conforming to the "host.cpu.stepping" + // semantic conventions. It represents the stepping or core revisions. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1", "r1p1" + HostCPUSteppingKey = attribute.Key("host.cpu.stepping") + + // HostCPUVendorIDKey is the attribute Key conforming to the + // "host.cpu.vendor.id" semantic conventions. It represents the processor + // manufacturer identifier. A maximum 12-character string. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "GenuineIntel" + // Note: [CPUID] command returns the vendor ID string in EBX, EDX and ECX + // registers. Writing these to memory in this order results in a 12-character + // string. + // + // [CPUID]: https://wiki.osdev.org/CPUID + HostCPUVendorIDKey = attribute.Key("host.cpu.vendor.id") + + // HostIDKey is the attribute Key conforming to the "host.id" semantic + // conventions. It represents the unique host ID. For Cloud, this must be the + // instance_id assigned by the cloud provider. For non-containerized systems, + // this should be the `machine-id`. See the table below for the sources to use + // to determine the `machine-id` based on operating system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "fdbf79e8af94cb7f9e8df36789187052" + HostIDKey = attribute.Key("host.id") + + // HostImageIDKey is the attribute Key conforming to the "host.image.id" + // semantic conventions. It represents the VM image ID or host OS image ID. For + // Cloud, this value is from the provider. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ami-07b06b442921831e5" + HostImageIDKey = attribute.Key("host.image.id") + + // HostImageNameKey is the attribute Key conforming to the "host.image.name" + // semantic conventions. It represents the name of the VM image or OS install + // the host was instantiated from. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "infra-ami-eks-worker-node-7d4ec78312", "CentOS-8-x86_64-1905" + HostImageNameKey = attribute.Key("host.image.name") + + // HostImageVersionKey is the attribute Key conforming to the + // "host.image.version" semantic conventions. It represents the version string + // of the VM image or host OS as defined in [Version Attributes]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0.1" + // + // [Version Attributes]: /docs/resource/README.md#version-attributes + HostImageVersionKey = attribute.Key("host.image.version") + + // HostIPKey is the attribute Key conforming to the "host.ip" semantic + // conventions. It represents the available IP addresses of the host, excluding + // loopback interfaces. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "192.168.1.140", "fe80::abc2:4a28:737a:609e" + // Note: IPv4 Addresses MUST be specified in dotted-quad notation. IPv6 + // addresses MUST be specified in the [RFC 5952] format. + // + // [RFC 5952]: https://www.rfc-editor.org/rfc/rfc5952.html + HostIPKey = attribute.Key("host.ip") + + // HostMacKey is the attribute Key conforming to the "host.mac" semantic + // conventions. It represents the available MAC addresses of the host, excluding + // loopback interfaces. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "AC-DE-48-23-45-67", "AC-DE-48-23-45-67-01-9F" + // Note: MAC Addresses MUST be represented in [IEEE RA hexadecimal form]: as + // hyphen-separated octets in uppercase hexadecimal form from most to least + // significant. + // + // [IEEE RA hexadecimal form]: https://standards.ieee.org/wp-content/uploads/import/documents/tutorials/eui.pdf + HostMacKey = attribute.Key("host.mac") + + // HostNameKey is the attribute Key conforming to the "host.name" semantic + // conventions. It represents the name of the host. On Unix systems, it may + // contain what the hostname command returns, or the fully qualified hostname, + // or another name specified by the user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-test" + HostNameKey = attribute.Key("host.name") + + // HostTypeKey is the attribute Key conforming to the "host.type" semantic + // conventions. It represents the type of host. For Cloud, this must be the + // machine type. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "n1-standard-1" + HostTypeKey = attribute.Key("host.type") +) + +// HostCPUCacheL2Size returns an attribute KeyValue conforming to the +// "host.cpu.cache.l2.size" semantic conventions. It represents the amount of +// level 2 memory cache available to the processor (in Bytes). +func HostCPUCacheL2Size(val int) attribute.KeyValue { + return HostCPUCacheL2SizeKey.Int(val) +} + +// HostCPUFamily returns an attribute KeyValue conforming to the +// "host.cpu.family" semantic conventions. It represents the family or generation +// of the CPU. +func HostCPUFamily(val string) attribute.KeyValue { + return HostCPUFamilyKey.String(val) +} + +// HostCPUModelID returns an attribute KeyValue conforming to the +// "host.cpu.model.id" semantic conventions. It represents the model identifier. +// It provides more granular information about the CPU, distinguishing it from +// other CPUs within the same family. +func HostCPUModelID(val string) attribute.KeyValue { + return HostCPUModelIDKey.String(val) +} + +// HostCPUModelName returns an attribute KeyValue conforming to the +// "host.cpu.model.name" semantic conventions. It represents the model +// designation of the processor. +func HostCPUModelName(val string) attribute.KeyValue { + return HostCPUModelNameKey.String(val) +} + +// HostCPUStepping returns an attribute KeyValue conforming to the +// "host.cpu.stepping" semantic conventions. It represents the stepping or core +// revisions. +func HostCPUStepping(val string) attribute.KeyValue { + return HostCPUSteppingKey.String(val) +} + +// HostCPUVendorID returns an attribute KeyValue conforming to the +// "host.cpu.vendor.id" semantic conventions. It represents the processor +// manufacturer identifier. A maximum 12-character string. +func HostCPUVendorID(val string) attribute.KeyValue { + return HostCPUVendorIDKey.String(val) +} + +// HostID returns an attribute KeyValue conforming to the "host.id" semantic +// conventions. It represents the unique host ID. For Cloud, this must be the +// instance_id assigned by the cloud provider. For non-containerized systems, +// this should be the `machine-id`. See the table below for the sources to use to +// determine the `machine-id` based on operating system. +func HostID(val string) attribute.KeyValue { + return HostIDKey.String(val) +} + +// HostImageID returns an attribute KeyValue conforming to the "host.image.id" +// semantic conventions. It represents the VM image ID or host OS image ID. For +// Cloud, this value is from the provider. +func HostImageID(val string) attribute.KeyValue { + return HostImageIDKey.String(val) +} + +// HostImageName returns an attribute KeyValue conforming to the +// "host.image.name" semantic conventions. It represents the name of the VM image +// or OS install the host was instantiated from. +func HostImageName(val string) attribute.KeyValue { + return HostImageNameKey.String(val) +} + +// HostImageVersion returns an attribute KeyValue conforming to the +// "host.image.version" semantic conventions. It represents the version string of +// the VM image or host OS as defined in [Version Attributes]. +// +// [Version Attributes]: /docs/resource/README.md#version-attributes +func HostImageVersion(val string) attribute.KeyValue { + return HostImageVersionKey.String(val) +} + +// HostIP returns an attribute KeyValue conforming to the "host.ip" semantic +// conventions. It represents the available IP addresses of the host, excluding +// loopback interfaces. +func HostIP(val ...string) attribute.KeyValue { + return HostIPKey.StringSlice(val) +} + +// HostMac returns an attribute KeyValue conforming to the "host.mac" semantic +// conventions. It represents the available MAC addresses of the host, excluding +// loopback interfaces. +func HostMac(val ...string) attribute.KeyValue { + return HostMacKey.StringSlice(val) +} + +// HostName returns an attribute KeyValue conforming to the "host.name" semantic +// conventions. It represents the name of the host. On Unix systems, it may +// contain what the hostname command returns, or the fully qualified hostname, or +// another name specified by the user. +func HostName(val string) attribute.KeyValue { + return HostNameKey.String(val) +} + +// HostType returns an attribute KeyValue conforming to the "host.type" semantic +// conventions. It represents the type of host. For Cloud, this must be the +// machine type. +func HostType(val string) attribute.KeyValue { + return HostTypeKey.String(val) +} + +// Enum values for host.arch +var ( + // AMD64 + // Stability: development + HostArchAMD64 = HostArchKey.String("amd64") + // ARM32 + // Stability: development + HostArchARM32 = HostArchKey.String("arm32") + // ARM64 + // Stability: development + HostArchARM64 = HostArchKey.String("arm64") + // Itanium + // Stability: development + HostArchIA64 = HostArchKey.String("ia64") + // 32-bit PowerPC + // Stability: development + HostArchPPC32 = HostArchKey.String("ppc32") + // 64-bit PowerPC + // Stability: development + HostArchPPC64 = HostArchKey.String("ppc64") + // IBM z/Architecture + // Stability: development + HostArchS390x = HostArchKey.String("s390x") + // 32-bit x86 + // Stability: development + HostArchX86 = HostArchKey.String("x86") +) + +// Namespace: http +const ( + // HTTPConnectionStateKey is the attribute Key conforming to the + // "http.connection.state" semantic conventions. It represents the state of the + // HTTP connection in the HTTP connection pool. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "active", "idle" + HTTPConnectionStateKey = attribute.Key("http.connection.state") + + // HTTPRequestBodySizeKey is the attribute Key conforming to the + // "http.request.body.size" semantic conventions. It represents the size of the + // request payload body in bytes. This is the number of bytes transferred + // excluding headers and is often, but not always, present as the + // [Content-Length] header. For requests using transport encoding, this should + // be the compressed size. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length + HTTPRequestBodySizeKey = attribute.Key("http.request.body.size") + + // HTTPRequestMethodKey is the attribute Key conforming to the + // "http.request.method" semantic conventions. It represents the HTTP request + // method. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "GET", "POST", "HEAD" + // Note: HTTP request method value SHOULD be "known" to the instrumentation. + // By default, this convention defines "known" methods as the ones listed in + // [RFC9110] + // and the PATCH method defined in [RFC5789]. + // + // If the HTTP request method is not known to instrumentation, it MUST set the + // `http.request.method` attribute to `_OTHER`. + // + // If the HTTP instrumentation could end up converting valid HTTP request + // methods to `_OTHER`, then it MUST provide a way to override + // the list of known HTTP methods. If this override is done via environment + // variable, then the environment variable MUST be named + // OTEL_INSTRUMENTATION_HTTP_KNOWN_METHODS and support a comma-separated list of + // case-sensitive known HTTP methods + // (this list MUST be a full override of the default known method, it is not a + // list of known methods in addition to the defaults). + // + // HTTP method names are case-sensitive and `http.request.method` attribute + // value MUST match a known HTTP method name exactly. + // Instrumentations for specific web frameworks that consider HTTP methods to be + // case insensitive, SHOULD populate a canonical equivalent. + // Tracing instrumentations that do so, MUST also set + // `http.request.method_original` to the original value. + // + // [RFC9110]: https://www.rfc-editor.org/rfc/rfc9110.html#name-methods + // [RFC5789]: https://www.rfc-editor.org/rfc/rfc5789.html + HTTPRequestMethodKey = attribute.Key("http.request.method") + + // HTTPRequestMethodOriginalKey is the attribute Key conforming to the + // "http.request.method_original" semantic conventions. It represents the + // original HTTP method sent by the client in the request line. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "GeT", "ACL", "foo" + HTTPRequestMethodOriginalKey = attribute.Key("http.request.method_original") + + // HTTPRequestResendCountKey is the attribute Key conforming to the + // "http.request.resend_count" semantic conventions. It represents the ordinal + // number of request resending attempt (for any reason, including redirects). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Note: The resend count SHOULD be updated each time an HTTP request gets + // resent by the client, regardless of what was the cause of the resending (e.g. + // redirection, authorization failure, 503 Server Unavailable, network issues, + // or any other). + HTTPRequestResendCountKey = attribute.Key("http.request.resend_count") + + // HTTPRequestSizeKey is the attribute Key conforming to the "http.request.size" + // semantic conventions. It represents the total size of the request in bytes. + // This should be the total number of bytes sent over the wire, including the + // request line (HTTP/1.1), framing (HTTP/2 and HTTP/3), headers, and request + // body if any. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + HTTPRequestSizeKey = attribute.Key("http.request.size") + + // HTTPResponseBodySizeKey is the attribute Key conforming to the + // "http.response.body.size" semantic conventions. It represents the size of the + // response payload body in bytes. This is the number of bytes transferred + // excluding headers and is often, but not always, present as the + // [Content-Length] header. For requests using transport encoding, this should + // be the compressed size. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length + HTTPResponseBodySizeKey = attribute.Key("http.response.body.size") + + // HTTPResponseSizeKey is the attribute Key conforming to the + // "http.response.size" semantic conventions. It represents the total size of + // the response in bytes. This should be the total number of bytes sent over the + // wire, including the status line (HTTP/1.1), framing (HTTP/2 and HTTP/3), + // headers, and response body and trailers if any. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + HTTPResponseSizeKey = attribute.Key("http.response.size") + + // HTTPResponseStatusCodeKey is the attribute Key conforming to the + // "http.response.status_code" semantic conventions. It represents the + // [HTTP response status code]. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 200 + // + // [HTTP response status code]: https://tools.ietf.org/html/rfc7231#section-6 + HTTPResponseStatusCodeKey = attribute.Key("http.response.status_code") + + // HTTPRouteKey is the attribute Key conforming to the "http.route" semantic + // conventions. It represents the matched route, that is, the path template in + // the format used by the respective server framework. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "/users/:userID?", "{controller}/{action}/{id?}" + // Note: MUST NOT be populated when this is not supported by the HTTP server + // framework as the route attribute should have low-cardinality and the URI path + // can NOT substitute it. + // SHOULD include the [application root] if there is one. + // + // [application root]: /docs/http/http-spans.md#http-server-definitions + HTTPRouteKey = attribute.Key("http.route") +) + +// HTTPRequestBodySize returns an attribute KeyValue conforming to the +// "http.request.body.size" semantic conventions. It represents the size of the +// request payload body in bytes. This is the number of bytes transferred +// excluding headers and is often, but not always, present as the +// [Content-Length] header. For requests using transport encoding, this should be +// the compressed size. +// +// [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length +func HTTPRequestBodySize(val int) attribute.KeyValue { + return HTTPRequestBodySizeKey.Int(val) +} + +// HTTPRequestMethodOriginal returns an attribute KeyValue conforming to the +// "http.request.method_original" semantic conventions. It represents the +// original HTTP method sent by the client in the request line. +func HTTPRequestMethodOriginal(val string) attribute.KeyValue { + return HTTPRequestMethodOriginalKey.String(val) +} + +// HTTPRequestResendCount returns an attribute KeyValue conforming to the +// "http.request.resend_count" semantic conventions. It represents the ordinal +// number of request resending attempt (for any reason, including redirects). +func HTTPRequestResendCount(val int) attribute.KeyValue { + return HTTPRequestResendCountKey.Int(val) +} + +// HTTPRequestSize returns an attribute KeyValue conforming to the +// "http.request.size" semantic conventions. It represents the total size of the +// request in bytes. This should be the total number of bytes sent over the wire, +// including the request line (HTTP/1.1), framing (HTTP/2 and HTTP/3), headers, +// and request body if any. +func HTTPRequestSize(val int) attribute.KeyValue { + return HTTPRequestSizeKey.Int(val) +} + +// HTTPResponseBodySize returns an attribute KeyValue conforming to the +// "http.response.body.size" semantic conventions. It represents the size of the +// response payload body in bytes. This is the number of bytes transferred +// excluding headers and is often, but not always, present as the +// [Content-Length] header. For requests using transport encoding, this should be +// the compressed size. +// +// [Content-Length]: https://www.rfc-editor.org/rfc/rfc9110.html#field.content-length +func HTTPResponseBodySize(val int) attribute.KeyValue { + return HTTPResponseBodySizeKey.Int(val) +} + +// HTTPResponseSize returns an attribute KeyValue conforming to the +// "http.response.size" semantic conventions. It represents the total size of the +// response in bytes. This should be the total number of bytes sent over the +// wire, including the status line (HTTP/1.1), framing (HTTP/2 and HTTP/3), +// headers, and response body and trailers if any. +func HTTPResponseSize(val int) attribute.KeyValue { + return HTTPResponseSizeKey.Int(val) +} + +// HTTPResponseStatusCode returns an attribute KeyValue conforming to the +// "http.response.status_code" semantic conventions. It represents the +// [HTTP response status code]. +// +// [HTTP response status code]: https://tools.ietf.org/html/rfc7231#section-6 +func HTTPResponseStatusCode(val int) attribute.KeyValue { + return HTTPResponseStatusCodeKey.Int(val) +} + +// HTTPRoute returns an attribute KeyValue conforming to the "http.route" +// semantic conventions. It represents the matched route, that is, the path +// template in the format used by the respective server framework. +func HTTPRoute(val string) attribute.KeyValue { + return HTTPRouteKey.String(val) +} + +// Enum values for http.connection.state +var ( + // active state. + // Stability: development + HTTPConnectionStateActive = HTTPConnectionStateKey.String("active") + // idle state. + // Stability: development + HTTPConnectionStateIdle = HTTPConnectionStateKey.String("idle") +) + +// Enum values for http.request.method +var ( + // CONNECT method. + // Stability: stable + HTTPRequestMethodConnect = HTTPRequestMethodKey.String("CONNECT") + // DELETE method. + // Stability: stable + HTTPRequestMethodDelete = HTTPRequestMethodKey.String("DELETE") + // GET method. + // Stability: stable + HTTPRequestMethodGet = HTTPRequestMethodKey.String("GET") + // HEAD method. + // Stability: stable + HTTPRequestMethodHead = HTTPRequestMethodKey.String("HEAD") + // OPTIONS method. + // Stability: stable + HTTPRequestMethodOptions = HTTPRequestMethodKey.String("OPTIONS") + // PATCH method. + // Stability: stable + HTTPRequestMethodPatch = HTTPRequestMethodKey.String("PATCH") + // POST method. + // Stability: stable + HTTPRequestMethodPost = HTTPRequestMethodKey.String("POST") + // PUT method. + // Stability: stable + HTTPRequestMethodPut = HTTPRequestMethodKey.String("PUT") + // TRACE method. + // Stability: stable + HTTPRequestMethodTrace = HTTPRequestMethodKey.String("TRACE") + // Any HTTP method that the instrumentation has no prior knowledge of. + // Stability: stable + HTTPRequestMethodOther = HTTPRequestMethodKey.String("_OTHER") +) + +// Namespace: hw +const ( + // HwIDKey is the attribute Key conforming to the "hw.id" semantic conventions. + // It represents an identifier for the hardware component, unique within the + // monitored host. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "win32battery_battery_testsysa33_1" + HwIDKey = attribute.Key("hw.id") + + // HwNameKey is the attribute Key conforming to the "hw.name" semantic + // conventions. It represents an easily-recognizable name for the hardware + // component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "eth0" + HwNameKey = attribute.Key("hw.name") + + // HwParentKey is the attribute Key conforming to the "hw.parent" semantic + // conventions. It represents the unique identifier of the parent component + // (typically the `hw.id` attribute of the enclosure, or disk controller). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "dellStorage_perc_0" + HwParentKey = attribute.Key("hw.parent") + + // HwStateKey is the attribute Key conforming to the "hw.state" semantic + // conventions. It represents the current state of the component. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + HwStateKey = attribute.Key("hw.state") + + // HwTypeKey is the attribute Key conforming to the "hw.type" semantic + // conventions. It represents the type of the component. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: Describes the category of the hardware component for which `hw.state` + // is being reported. For example, `hw.type=temperature` along with + // `hw.state=degraded` would indicate that the temperature of the hardware + // component has been reported as `degraded`. + HwTypeKey = attribute.Key("hw.type") +) + +// HwID returns an attribute KeyValue conforming to the "hw.id" semantic +// conventions. It represents an identifier for the hardware component, unique +// within the monitored host. +func HwID(val string) attribute.KeyValue { + return HwIDKey.String(val) +} + +// HwName returns an attribute KeyValue conforming to the "hw.name" semantic +// conventions. It represents an easily-recognizable name for the hardware +// component. +func HwName(val string) attribute.KeyValue { + return HwNameKey.String(val) +} + +// HwParent returns an attribute KeyValue conforming to the "hw.parent" semantic +// conventions. It represents the unique identifier of the parent component +// (typically the `hw.id` attribute of the enclosure, or disk controller). +func HwParent(val string) attribute.KeyValue { + return HwParentKey.String(val) +} + +// Enum values for hw.state +var ( + // Ok + // Stability: development + HwStateOk = HwStateKey.String("ok") + // Degraded + // Stability: development + HwStateDegraded = HwStateKey.String("degraded") + // Failed + // Stability: development + HwStateFailed = HwStateKey.String("failed") +) + +// Enum values for hw.type +var ( + // Battery + // Stability: development + HwTypeBattery = HwTypeKey.String("battery") + // CPU + // Stability: development + HwTypeCPU = HwTypeKey.String("cpu") + // Disk controller + // Stability: development + HwTypeDiskController = HwTypeKey.String("disk_controller") + // Enclosure + // Stability: development + HwTypeEnclosure = HwTypeKey.String("enclosure") + // Fan + // Stability: development + HwTypeFan = HwTypeKey.String("fan") + // GPU + // Stability: development + HwTypeGpu = HwTypeKey.String("gpu") + // Logical disk + // Stability: development + HwTypeLogicalDisk = HwTypeKey.String("logical_disk") + // Memory + // Stability: development + HwTypeMemory = HwTypeKey.String("memory") + // Network + // Stability: development + HwTypeNetwork = HwTypeKey.String("network") + // Physical disk + // Stability: development + HwTypePhysicalDisk = HwTypeKey.String("physical_disk") + // Power supply + // Stability: development + HwTypePowerSupply = HwTypeKey.String("power_supply") + // Tape drive + // Stability: development + HwTypeTapeDrive = HwTypeKey.String("tape_drive") + // Temperature + // Stability: development + HwTypeTemperature = HwTypeKey.String("temperature") + // Voltage + // Stability: development + HwTypeVoltage = HwTypeKey.String("voltage") +) + +// Namespace: ios +const ( + // IOSAppStateKey is the attribute Key conforming to the "ios.app.state" + // semantic conventions. It represents the this attribute represents the state + // of the application. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The iOS lifecycle states are defined in the + // [UIApplicationDelegate documentation], and from which the `OS terminology` + // column values are derived. + // + // [UIApplicationDelegate documentation]: https://developer.apple.com/documentation/uikit/uiapplicationdelegate + IOSAppStateKey = attribute.Key("ios.app.state") +) + +// Enum values for ios.app.state +var ( + // The app has become `active`. Associated with UIKit notification + // `applicationDidBecomeActive`. + // + // Stability: development + IOSAppStateActive = IOSAppStateKey.String("active") + // The app is now `inactive`. Associated with UIKit notification + // `applicationWillResignActive`. + // + // Stability: development + IOSAppStateInactive = IOSAppStateKey.String("inactive") + // The app is now in the background. This value is associated with UIKit + // notification `applicationDidEnterBackground`. + // + // Stability: development + IOSAppStateBackground = IOSAppStateKey.String("background") + // The app is now in the foreground. This value is associated with UIKit + // notification `applicationWillEnterForeground`. + // + // Stability: development + IOSAppStateForeground = IOSAppStateKey.String("foreground") + // The app is about to terminate. Associated with UIKit notification + // `applicationWillTerminate`. + // + // Stability: development + IOSAppStateTerminate = IOSAppStateKey.String("terminate") +) + +// Namespace: k8s +const ( + // K8SClusterNameKey is the attribute Key conforming to the "k8s.cluster.name" + // semantic conventions. It represents the name of the cluster. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-cluster" + K8SClusterNameKey = attribute.Key("k8s.cluster.name") + + // K8SClusterUIDKey is the attribute Key conforming to the "k8s.cluster.uid" + // semantic conventions. It represents a pseudo-ID for the cluster, set to the + // UID of the `kube-system` namespace. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "218fc5a9-a5f1-4b54-aa05-46717d0ab26d" + // Note: K8s doesn't have support for obtaining a cluster ID. If this is ever + // added, we will recommend collecting the `k8s.cluster.uid` through the + // official APIs. In the meantime, we are able to use the `uid` of the + // `kube-system` namespace as a proxy for cluster ID. Read on for the + // rationale. + // + // Every object created in a K8s cluster is assigned a distinct UID. The + // `kube-system` namespace is used by Kubernetes itself and will exist + // for the lifetime of the cluster. Using the `uid` of the `kube-system` + // namespace is a reasonable proxy for the K8s ClusterID as it will only + // change if the cluster is rebuilt. Furthermore, Kubernetes UIDs are + // UUIDs as standardized by + // [ISO/IEC 9834-8 and ITU-T X.667]. + // Which states: + // + // > If generated according to one of the mechanisms defined in Rec. + // > ITU-T X.667 | ISO/IEC 9834-8, a UUID is either guaranteed to be + // > different from all other UUIDs generated before 3603 A.D., or is + // > extremely likely to be different (depending on the mechanism chosen). + // + // Therefore, UIDs between clusters should be extremely unlikely to + // conflict. + // + // [ISO/IEC 9834-8 and ITU-T X.667]: https://www.itu.int/ITU-T/studygroups/com17/oid.html + K8SClusterUIDKey = attribute.Key("k8s.cluster.uid") + + // K8SContainerNameKey is the attribute Key conforming to the + // "k8s.container.name" semantic conventions. It represents the name of the + // Container from Pod specification, must be unique within a Pod. Container + // runtime usually uses different globally unique name (`container.name`). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "redis" + K8SContainerNameKey = attribute.Key("k8s.container.name") + + // K8SContainerRestartCountKey is the attribute Key conforming to the + // "k8s.container.restart_count" semantic conventions. It represents the number + // of times the container was restarted. This attribute can be used to identify + // a particular container (running or stopped) within a container spec. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + K8SContainerRestartCountKey = attribute.Key("k8s.container.restart_count") + + // K8SContainerStatusLastTerminatedReasonKey is the attribute Key conforming to + // the "k8s.container.status.last_terminated_reason" semantic conventions. It + // represents the last terminated reason of the Container. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Evicted", "Error" + K8SContainerStatusLastTerminatedReasonKey = attribute.Key("k8s.container.status.last_terminated_reason") + + // K8SCronJobNameKey is the attribute Key conforming to the "k8s.cronjob.name" + // semantic conventions. It represents the name of the CronJob. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SCronJobNameKey = attribute.Key("k8s.cronjob.name") + + // K8SCronJobUIDKey is the attribute Key conforming to the "k8s.cronjob.uid" + // semantic conventions. It represents the UID of the CronJob. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SCronJobUIDKey = attribute.Key("k8s.cronjob.uid") + + // K8SDaemonSetNameKey is the attribute Key conforming to the + // "k8s.daemonset.name" semantic conventions. It represents the name of the + // DaemonSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SDaemonSetNameKey = attribute.Key("k8s.daemonset.name") + + // K8SDaemonSetUIDKey is the attribute Key conforming to the "k8s.daemonset.uid" + // semantic conventions. It represents the UID of the DaemonSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SDaemonSetUIDKey = attribute.Key("k8s.daemonset.uid") + + // K8SDeploymentNameKey is the attribute Key conforming to the + // "k8s.deployment.name" semantic conventions. It represents the name of the + // Deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SDeploymentNameKey = attribute.Key("k8s.deployment.name") + + // K8SDeploymentUIDKey is the attribute Key conforming to the + // "k8s.deployment.uid" semantic conventions. It represents the UID of the + // Deployment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SDeploymentUIDKey = attribute.Key("k8s.deployment.uid") + + // K8SHPANameKey is the attribute Key conforming to the "k8s.hpa.name" semantic + // conventions. It represents the name of the horizontal pod autoscaler. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SHPANameKey = attribute.Key("k8s.hpa.name") + + // K8SHPAUIDKey is the attribute Key conforming to the "k8s.hpa.uid" semantic + // conventions. It represents the UID of the horizontal pod autoscaler. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SHPAUIDKey = attribute.Key("k8s.hpa.uid") + + // K8SJobNameKey is the attribute Key conforming to the "k8s.job.name" semantic + // conventions. It represents the name of the Job. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SJobNameKey = attribute.Key("k8s.job.name") + + // K8SJobUIDKey is the attribute Key conforming to the "k8s.job.uid" semantic + // conventions. It represents the UID of the Job. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SJobUIDKey = attribute.Key("k8s.job.uid") + + // K8SNamespaceNameKey is the attribute Key conforming to the + // "k8s.namespace.name" semantic conventions. It represents the name of the + // namespace that the pod is running in. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "default" + K8SNamespaceNameKey = attribute.Key("k8s.namespace.name") + + // K8SNamespacePhaseKey is the attribute Key conforming to the + // "k8s.namespace.phase" semantic conventions. It represents the phase of the + // K8s namespace. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "active", "terminating" + // Note: This attribute aligns with the `phase` field of the + // [K8s NamespaceStatus] + // + // [K8s NamespaceStatus]: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#namespacestatus-v1-core + K8SNamespacePhaseKey = attribute.Key("k8s.namespace.phase") + + // K8SNodeNameKey is the attribute Key conforming to the "k8s.node.name" + // semantic conventions. It represents the name of the Node. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "node-1" + K8SNodeNameKey = attribute.Key("k8s.node.name") + + // K8SNodeUIDKey is the attribute Key conforming to the "k8s.node.uid" semantic + // conventions. It represents the UID of the Node. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1eb3a0c6-0477-4080-a9cb-0cb7db65c6a2" + K8SNodeUIDKey = attribute.Key("k8s.node.uid") + + // K8SPodNameKey is the attribute Key conforming to the "k8s.pod.name" semantic + // conventions. It represents the name of the Pod. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry-pod-autoconf" + K8SPodNameKey = attribute.Key("k8s.pod.name") + + // K8SPodUIDKey is the attribute Key conforming to the "k8s.pod.uid" semantic + // conventions. It represents the UID of the Pod. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SPodUIDKey = attribute.Key("k8s.pod.uid") + + // K8SReplicaSetNameKey is the attribute Key conforming to the + // "k8s.replicaset.name" semantic conventions. It represents the name of the + // ReplicaSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SReplicaSetNameKey = attribute.Key("k8s.replicaset.name") + + // K8SReplicaSetUIDKey is the attribute Key conforming to the + // "k8s.replicaset.uid" semantic conventions. It represents the UID of the + // ReplicaSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SReplicaSetUIDKey = attribute.Key("k8s.replicaset.uid") + + // K8SReplicationControllerNameKey is the attribute Key conforming to the + // "k8s.replicationcontroller.name" semantic conventions. It represents the name + // of the replication controller. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SReplicationControllerNameKey = attribute.Key("k8s.replicationcontroller.name") + + // K8SReplicationControllerUIDKey is the attribute Key conforming to the + // "k8s.replicationcontroller.uid" semantic conventions. It represents the UID + // of the replication controller. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SReplicationControllerUIDKey = attribute.Key("k8s.replicationcontroller.uid") + + // K8SResourceQuotaNameKey is the attribute Key conforming to the + // "k8s.resourcequota.name" semantic conventions. It represents the name of the + // resource quota. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SResourceQuotaNameKey = attribute.Key("k8s.resourcequota.name") + + // K8SResourceQuotaUIDKey is the attribute Key conforming to the + // "k8s.resourcequota.uid" semantic conventions. It represents the UID of the + // resource quota. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SResourceQuotaUIDKey = attribute.Key("k8s.resourcequota.uid") + + // K8SStatefulSetNameKey is the attribute Key conforming to the + // "k8s.statefulset.name" semantic conventions. It represents the name of the + // StatefulSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "opentelemetry" + K8SStatefulSetNameKey = attribute.Key("k8s.statefulset.name") + + // K8SStatefulSetUIDKey is the attribute Key conforming to the + // "k8s.statefulset.uid" semantic conventions. It represents the UID of the + // StatefulSet. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "275ecb36-5aa8-4c2a-9c47-d8bb681b9aff" + K8SStatefulSetUIDKey = attribute.Key("k8s.statefulset.uid") + + // K8SVolumeNameKey is the attribute Key conforming to the "k8s.volume.name" + // semantic conventions. It represents the name of the K8s volume. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "volume0" + K8SVolumeNameKey = attribute.Key("k8s.volume.name") + + // K8SVolumeTypeKey is the attribute Key conforming to the "k8s.volume.type" + // semantic conventions. It represents the type of the K8s volume. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "emptyDir", "persistentVolumeClaim" + K8SVolumeTypeKey = attribute.Key("k8s.volume.type") +) + +// K8SClusterName returns an attribute KeyValue conforming to the +// "k8s.cluster.name" semantic conventions. It represents the name of the +// cluster. +func K8SClusterName(val string) attribute.KeyValue { + return K8SClusterNameKey.String(val) +} + +// K8SClusterUID returns an attribute KeyValue conforming to the +// "k8s.cluster.uid" semantic conventions. It represents a pseudo-ID for the +// cluster, set to the UID of the `kube-system` namespace. +func K8SClusterUID(val string) attribute.KeyValue { + return K8SClusterUIDKey.String(val) +} + +// K8SContainerName returns an attribute KeyValue conforming to the +// "k8s.container.name" semantic conventions. It represents the name of the +// Container from Pod specification, must be unique within a Pod. Container +// runtime usually uses different globally unique name (`container.name`). +func K8SContainerName(val string) attribute.KeyValue { + return K8SContainerNameKey.String(val) +} + +// K8SContainerRestartCount returns an attribute KeyValue conforming to the +// "k8s.container.restart_count" semantic conventions. It represents the number +// of times the container was restarted. This attribute can be used to identify a +// particular container (running or stopped) within a container spec. +func K8SContainerRestartCount(val int) attribute.KeyValue { + return K8SContainerRestartCountKey.Int(val) +} + +// K8SContainerStatusLastTerminatedReason returns an attribute KeyValue +// conforming to the "k8s.container.status.last_terminated_reason" semantic +// conventions. It represents the last terminated reason of the Container. +func K8SContainerStatusLastTerminatedReason(val string) attribute.KeyValue { + return K8SContainerStatusLastTerminatedReasonKey.String(val) +} + +// K8SCronJobName returns an attribute KeyValue conforming to the +// "k8s.cronjob.name" semantic conventions. It represents the name of the +// CronJob. +func K8SCronJobName(val string) attribute.KeyValue { + return K8SCronJobNameKey.String(val) +} + +// K8SCronJobUID returns an attribute KeyValue conforming to the +// "k8s.cronjob.uid" semantic conventions. It represents the UID of the CronJob. +func K8SCronJobUID(val string) attribute.KeyValue { + return K8SCronJobUIDKey.String(val) +} + +// K8SDaemonSetName returns an attribute KeyValue conforming to the +// "k8s.daemonset.name" semantic conventions. It represents the name of the +// DaemonSet. +func K8SDaemonSetName(val string) attribute.KeyValue { + return K8SDaemonSetNameKey.String(val) +} + +// K8SDaemonSetUID returns an attribute KeyValue conforming to the +// "k8s.daemonset.uid" semantic conventions. It represents the UID of the +// DaemonSet. +func K8SDaemonSetUID(val string) attribute.KeyValue { + return K8SDaemonSetUIDKey.String(val) +} + +// K8SDeploymentName returns an attribute KeyValue conforming to the +// "k8s.deployment.name" semantic conventions. It represents the name of the +// Deployment. +func K8SDeploymentName(val string) attribute.KeyValue { + return K8SDeploymentNameKey.String(val) +} + +// K8SDeploymentUID returns an attribute KeyValue conforming to the +// "k8s.deployment.uid" semantic conventions. It represents the UID of the +// Deployment. +func K8SDeploymentUID(val string) attribute.KeyValue { + return K8SDeploymentUIDKey.String(val) +} + +// K8SHPAName returns an attribute KeyValue conforming to the "k8s.hpa.name" +// semantic conventions. It represents the name of the horizontal pod autoscaler. +func K8SHPAName(val string) attribute.KeyValue { + return K8SHPANameKey.String(val) +} + +// K8SHPAUID returns an attribute KeyValue conforming to the "k8s.hpa.uid" +// semantic conventions. It represents the UID of the horizontal pod autoscaler. +func K8SHPAUID(val string) attribute.KeyValue { + return K8SHPAUIDKey.String(val) +} + +// K8SJobName returns an attribute KeyValue conforming to the "k8s.job.name" +// semantic conventions. It represents the name of the Job. +func K8SJobName(val string) attribute.KeyValue { + return K8SJobNameKey.String(val) +} + +// K8SJobUID returns an attribute KeyValue conforming to the "k8s.job.uid" +// semantic conventions. It represents the UID of the Job. +func K8SJobUID(val string) attribute.KeyValue { + return K8SJobUIDKey.String(val) +} + +// K8SNamespaceName returns an attribute KeyValue conforming to the +// "k8s.namespace.name" semantic conventions. It represents the name of the +// namespace that the pod is running in. +func K8SNamespaceName(val string) attribute.KeyValue { + return K8SNamespaceNameKey.String(val) +} + +// K8SNodeName returns an attribute KeyValue conforming to the "k8s.node.name" +// semantic conventions. It represents the name of the Node. +func K8SNodeName(val string) attribute.KeyValue { + return K8SNodeNameKey.String(val) +} + +// K8SNodeUID returns an attribute KeyValue conforming to the "k8s.node.uid" +// semantic conventions. It represents the UID of the Node. +func K8SNodeUID(val string) attribute.KeyValue { + return K8SNodeUIDKey.String(val) +} + +// K8SPodName returns an attribute KeyValue conforming to the "k8s.pod.name" +// semantic conventions. It represents the name of the Pod. +func K8SPodName(val string) attribute.KeyValue { + return K8SPodNameKey.String(val) +} + +// K8SPodUID returns an attribute KeyValue conforming to the "k8s.pod.uid" +// semantic conventions. It represents the UID of the Pod. +func K8SPodUID(val string) attribute.KeyValue { + return K8SPodUIDKey.String(val) +} + +// K8SReplicaSetName returns an attribute KeyValue conforming to the +// "k8s.replicaset.name" semantic conventions. It represents the name of the +// ReplicaSet. +func K8SReplicaSetName(val string) attribute.KeyValue { + return K8SReplicaSetNameKey.String(val) +} + +// K8SReplicaSetUID returns an attribute KeyValue conforming to the +// "k8s.replicaset.uid" semantic conventions. It represents the UID of the +// ReplicaSet. +func K8SReplicaSetUID(val string) attribute.KeyValue { + return K8SReplicaSetUIDKey.String(val) +} + +// K8SReplicationControllerName returns an attribute KeyValue conforming to the +// "k8s.replicationcontroller.name" semantic conventions. It represents the name +// of the replication controller. +func K8SReplicationControllerName(val string) attribute.KeyValue { + return K8SReplicationControllerNameKey.String(val) +} + +// K8SReplicationControllerUID returns an attribute KeyValue conforming to the +// "k8s.replicationcontroller.uid" semantic conventions. It represents the UID of +// the replication controller. +func K8SReplicationControllerUID(val string) attribute.KeyValue { + return K8SReplicationControllerUIDKey.String(val) +} + +// K8SResourceQuotaName returns an attribute KeyValue conforming to the +// "k8s.resourcequota.name" semantic conventions. It represents the name of the +// resource quota. +func K8SResourceQuotaName(val string) attribute.KeyValue { + return K8SResourceQuotaNameKey.String(val) +} + +// K8SResourceQuotaUID returns an attribute KeyValue conforming to the +// "k8s.resourcequota.uid" semantic conventions. It represents the UID of the +// resource quota. +func K8SResourceQuotaUID(val string) attribute.KeyValue { + return K8SResourceQuotaUIDKey.String(val) +} + +// K8SStatefulSetName returns an attribute KeyValue conforming to the +// "k8s.statefulset.name" semantic conventions. It represents the name of the +// StatefulSet. +func K8SStatefulSetName(val string) attribute.KeyValue { + return K8SStatefulSetNameKey.String(val) +} + +// K8SStatefulSetUID returns an attribute KeyValue conforming to the +// "k8s.statefulset.uid" semantic conventions. It represents the UID of the +// StatefulSet. +func K8SStatefulSetUID(val string) attribute.KeyValue { + return K8SStatefulSetUIDKey.String(val) +} + +// K8SVolumeName returns an attribute KeyValue conforming to the +// "k8s.volume.name" semantic conventions. It represents the name of the K8s +// volume. +func K8SVolumeName(val string) attribute.KeyValue { + return K8SVolumeNameKey.String(val) +} + +// Enum values for k8s.namespace.phase +var ( + // Active namespace phase as described by [K8s API] + // Stability: development + // + // [K8s API]: https://pkg.go.dev/k8s.io/api@v0.31.3/core/v1#NamespacePhase + K8SNamespacePhaseActive = K8SNamespacePhaseKey.String("active") + // Terminating namespace phase as described by [K8s API] + // Stability: development + // + // [K8s API]: https://pkg.go.dev/k8s.io/api@v0.31.3/core/v1#NamespacePhase + K8SNamespacePhaseTerminating = K8SNamespacePhaseKey.String("terminating") +) + +// Enum values for k8s.volume.type +var ( + // A [persistentVolumeClaim] volume + // Stability: development + // + // [persistentVolumeClaim]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#persistentvolumeclaim + K8SVolumeTypePersistentVolumeClaim = K8SVolumeTypeKey.String("persistentVolumeClaim") + // A [configMap] volume + // Stability: development + // + // [configMap]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#configmap + K8SVolumeTypeConfigMap = K8SVolumeTypeKey.String("configMap") + // A [downwardAPI] volume + // Stability: development + // + // [downwardAPI]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#downwardapi + K8SVolumeTypeDownwardAPI = K8SVolumeTypeKey.String("downwardAPI") + // An [emptyDir] volume + // Stability: development + // + // [emptyDir]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#emptydir + K8SVolumeTypeEmptyDir = K8SVolumeTypeKey.String("emptyDir") + // A [secret] volume + // Stability: development + // + // [secret]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#secret + K8SVolumeTypeSecret = K8SVolumeTypeKey.String("secret") + // A [local] volume + // Stability: development + // + // [local]: https://v1-30.docs.kubernetes.io/docs/concepts/storage/volumes/#local + K8SVolumeTypeLocal = K8SVolumeTypeKey.String("local") +) + +// Namespace: linux +const ( + // LinuxMemorySlabStateKey is the attribute Key conforming to the + // "linux.memory.slab.state" semantic conventions. It represents the Linux Slab + // memory state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "reclaimable", "unreclaimable" + LinuxMemorySlabStateKey = attribute.Key("linux.memory.slab.state") +) + +// Enum values for linux.memory.slab.state +var ( + // reclaimable + // Stability: development + LinuxMemorySlabStateReclaimable = LinuxMemorySlabStateKey.String("reclaimable") + // unreclaimable + // Stability: development + LinuxMemorySlabStateUnreclaimable = LinuxMemorySlabStateKey.String("unreclaimable") +) + +// Namespace: log +const ( + // LogFileNameKey is the attribute Key conforming to the "log.file.name" + // semantic conventions. It represents the basename of the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "audit.log" + LogFileNameKey = attribute.Key("log.file.name") + + // LogFileNameResolvedKey is the attribute Key conforming to the + // "log.file.name_resolved" semantic conventions. It represents the basename of + // the file, with symlinks resolved. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "uuid.log" + LogFileNameResolvedKey = attribute.Key("log.file.name_resolved") + + // LogFilePathKey is the attribute Key conforming to the "log.file.path" + // semantic conventions. It represents the full path to the file. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/var/log/mysql/audit.log" + LogFilePathKey = attribute.Key("log.file.path") + + // LogFilePathResolvedKey is the attribute Key conforming to the + // "log.file.path_resolved" semantic conventions. It represents the full path to + // the file, with symlinks resolved. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/var/lib/docker/uuid.log" + LogFilePathResolvedKey = attribute.Key("log.file.path_resolved") + + // LogIostreamKey is the attribute Key conforming to the "log.iostream" semantic + // conventions. It represents the stream associated with the log. See below for + // a list of well-known values. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + LogIostreamKey = attribute.Key("log.iostream") + + // LogRecordOriginalKey is the attribute Key conforming to the + // "log.record.original" semantic conventions. It represents the complete + // original Log Record. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "77 <86>1 2015-08-06T21:58:59.694Z 192.168.2.133 inactive - - - + // Something happened", "[INFO] 8/3/24 12:34:56 Something happened" + // Note: This value MAY be added when processing a Log Record which was + // originally transmitted as a string or equivalent data type AND the Body field + // of the Log Record does not contain the same value. (e.g. a syslog or a log + // record read from a file.) + LogRecordOriginalKey = attribute.Key("log.record.original") + + // LogRecordUIDKey is the attribute Key conforming to the "log.record.uid" + // semantic conventions. It represents a unique identifier for the Log Record. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "01ARZ3NDEKTSV4RRFFQ69G5FAV" + // Note: If an id is provided, other log records with the same id will be + // considered duplicates and can be removed safely. This means, that two + // distinguishable log records MUST have different values. + // The id MAY be an + // [Universally Unique Lexicographically Sortable Identifier (ULID)], but other + // identifiers (e.g. UUID) may be used as needed. + // + // [Universally Unique Lexicographically Sortable Identifier (ULID)]: https://github.com/ulid/spec + LogRecordUIDKey = attribute.Key("log.record.uid") +) + +// LogFileName returns an attribute KeyValue conforming to the "log.file.name" +// semantic conventions. It represents the basename of the file. +func LogFileName(val string) attribute.KeyValue { + return LogFileNameKey.String(val) +} + +// LogFileNameResolved returns an attribute KeyValue conforming to the +// "log.file.name_resolved" semantic conventions. It represents the basename of +// the file, with symlinks resolved. +func LogFileNameResolved(val string) attribute.KeyValue { + return LogFileNameResolvedKey.String(val) +} + +// LogFilePath returns an attribute KeyValue conforming to the "log.file.path" +// semantic conventions. It represents the full path to the file. +func LogFilePath(val string) attribute.KeyValue { + return LogFilePathKey.String(val) +} + +// LogFilePathResolved returns an attribute KeyValue conforming to the +// "log.file.path_resolved" semantic conventions. It represents the full path to +// the file, with symlinks resolved. +func LogFilePathResolved(val string) attribute.KeyValue { + return LogFilePathResolvedKey.String(val) +} + +// LogRecordOriginal returns an attribute KeyValue conforming to the +// "log.record.original" semantic conventions. It represents the complete +// original Log Record. +func LogRecordOriginal(val string) attribute.KeyValue { + return LogRecordOriginalKey.String(val) +} + +// LogRecordUID returns an attribute KeyValue conforming to the "log.record.uid" +// semantic conventions. It represents a unique identifier for the Log Record. +func LogRecordUID(val string) attribute.KeyValue { + return LogRecordUIDKey.String(val) +} + +// Enum values for log.iostream +var ( + // Logs from stdout stream + // Stability: development + LogIostreamStdout = LogIostreamKey.String("stdout") + // Events from stderr stream + // Stability: development + LogIostreamStderr = LogIostreamKey.String("stderr") +) + +// Namespace: messaging +const ( + // MessagingBatchMessageCountKey is the attribute Key conforming to the + // "messaging.batch.message_count" semantic conventions. It represents the + // number of messages sent, received, or processed in the scope of the batching + // operation. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 0, 1, 2 + // Note: Instrumentations SHOULD NOT set `messaging.batch.message_count` on + // spans that operate with a single message. When a messaging client library + // supports both batch and single-message API for the same operation, + // instrumentations SHOULD use `messaging.batch.message_count` for batching APIs + // and SHOULD NOT use it for single-message APIs. + MessagingBatchMessageCountKey = attribute.Key("messaging.batch.message_count") + + // MessagingClientIDKey is the attribute Key conforming to the + // "messaging.client.id" semantic conventions. It represents a unique identifier + // for the client that consumes or produces a message. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "client-5", "myhost@8742@s8083jm" + MessagingClientIDKey = attribute.Key("messaging.client.id") + + // MessagingConsumerGroupNameKey is the attribute Key conforming to the + // "messaging.consumer.group.name" semantic conventions. It represents the name + // of the consumer group with which a consumer is associated. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-group", "indexer" + // Note: Semantic conventions for individual messaging systems SHOULD document + // whether `messaging.consumer.group.name` is applicable and what it means in + // the context of that system. + MessagingConsumerGroupNameKey = attribute.Key("messaging.consumer.group.name") + + // MessagingDestinationAnonymousKey is the attribute Key conforming to the + // "messaging.destination.anonymous" semantic conventions. It represents a + // boolean that is true if the message destination is anonymous (could be + // unnamed or have auto-generated name). + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingDestinationAnonymousKey = attribute.Key("messaging.destination.anonymous") + + // MessagingDestinationNameKey is the attribute Key conforming to the + // "messaging.destination.name" semantic conventions. It represents the message + // destination name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MyQueue", "MyTopic" + // Note: Destination name SHOULD uniquely identify a specific queue, topic or + // other entity within the broker. If + // the broker doesn't have such notion, the destination name SHOULD uniquely + // identify the broker. + MessagingDestinationNameKey = attribute.Key("messaging.destination.name") + + // MessagingDestinationPartitionIDKey is the attribute Key conforming to the + // "messaging.destination.partition.id" semantic conventions. It represents the + // identifier of the partition messages are sent to or received from, unique + // within the `messaging.destination.name`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1 + MessagingDestinationPartitionIDKey = attribute.Key("messaging.destination.partition.id") + + // MessagingDestinationSubscriptionNameKey is the attribute Key conforming to + // the "messaging.destination.subscription.name" semantic conventions. It + // represents the name of the destination subscription from which a message is + // consumed. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "subscription-a" + // Note: Semantic conventions for individual messaging systems SHOULD document + // whether `messaging.destination.subscription.name` is applicable and what it + // means in the context of that system. + MessagingDestinationSubscriptionNameKey = attribute.Key("messaging.destination.subscription.name") + + // MessagingDestinationTemplateKey is the attribute Key conforming to the + // "messaging.destination.template" semantic conventions. It represents the low + // cardinality representation of the messaging destination name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/customers/{customerId}" + // Note: Destination names could be constructed from templates. An example would + // be a destination name involving a user name or product id. Although the + // destination name in this case is of high cardinality, the underlying template + // is of low cardinality and can be effectively used for grouping and + // aggregation. + MessagingDestinationTemplateKey = attribute.Key("messaging.destination.template") + + // MessagingDestinationTemporaryKey is the attribute Key conforming to the + // "messaging.destination.temporary" semantic conventions. It represents a + // boolean that is true if the message destination is temporary and might not + // exist anymore after messages are processed. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingDestinationTemporaryKey = attribute.Key("messaging.destination.temporary") + + // MessagingEventHubsMessageEnqueuedTimeKey is the attribute Key conforming to + // the "messaging.eventhubs.message.enqueued_time" semantic conventions. It + // represents the UTC epoch seconds at which the message has been accepted and + // stored in the entity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingEventHubsMessageEnqueuedTimeKey = attribute.Key("messaging.eventhubs.message.enqueued_time") + + // MessagingGCPPubSubMessageAckDeadlineKey is the attribute Key conforming to + // the "messaging.gcp_pubsub.message.ack_deadline" semantic conventions. It + // represents the ack deadline in seconds set for the modify ack deadline + // request. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingGCPPubSubMessageAckDeadlineKey = attribute.Key("messaging.gcp_pubsub.message.ack_deadline") + + // MessagingGCPPubSubMessageAckIDKey is the attribute Key conforming to the + // "messaging.gcp_pubsub.message.ack_id" semantic conventions. It represents the + // ack id for a given message. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: ack_id + MessagingGCPPubSubMessageAckIDKey = attribute.Key("messaging.gcp_pubsub.message.ack_id") + + // MessagingGCPPubSubMessageDeliveryAttemptKey is the attribute Key conforming + // to the "messaging.gcp_pubsub.message.delivery_attempt" semantic conventions. + // It represents the delivery attempt for a given message. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingGCPPubSubMessageDeliveryAttemptKey = attribute.Key("messaging.gcp_pubsub.message.delivery_attempt") + + // MessagingGCPPubSubMessageOrderingKeyKey is the attribute Key conforming to + // the "messaging.gcp_pubsub.message.ordering_key" semantic conventions. It + // represents the ordering key for a given message. If the attribute is not + // present, the message does not have an ordering key. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: ordering_key + MessagingGCPPubSubMessageOrderingKeyKey = attribute.Key("messaging.gcp_pubsub.message.ordering_key") + + // MessagingKafkaMessageKeyKey is the attribute Key conforming to the + // "messaging.kafka.message.key" semantic conventions. It represents the message + // keys in Kafka are used for grouping alike messages to ensure they're + // processed on the same partition. They differ from `messaging.message.id` in + // that they're not unique. If the key is `null`, the attribute MUST NOT be set. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myKey + // Note: If the key type is not string, it's string representation has to be + // supplied for the attribute. If the key has no unambiguous, canonical string + // form, don't include its value. + MessagingKafkaMessageKeyKey = attribute.Key("messaging.kafka.message.key") + + // MessagingKafkaMessageTombstoneKey is the attribute Key conforming to the + // "messaging.kafka.message.tombstone" semantic conventions. It represents a + // boolean that is true if the message is a tombstone. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingKafkaMessageTombstoneKey = attribute.Key("messaging.kafka.message.tombstone") + + // MessagingKafkaOffsetKey is the attribute Key conforming to the + // "messaging.kafka.offset" semantic conventions. It represents the offset of a + // record in the corresponding Kafka partition. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingKafkaOffsetKey = attribute.Key("messaging.kafka.offset") + + // MessagingMessageBodySizeKey is the attribute Key conforming to the + // "messaging.message.body.size" semantic conventions. It represents the size of + // the message body in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Note: This can refer to both the compressed or uncompressed body size. If + // both sizes are known, the uncompressed + // body size should be used. + MessagingMessageBodySizeKey = attribute.Key("messaging.message.body.size") + + // MessagingMessageConversationIDKey is the attribute Key conforming to the + // "messaging.message.conversation_id" semantic conventions. It represents the + // conversation ID identifying the conversation to which the message belongs, + // represented as a string. Sometimes called "Correlation ID". + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: MyConversationId + MessagingMessageConversationIDKey = attribute.Key("messaging.message.conversation_id") + + // MessagingMessageEnvelopeSizeKey is the attribute Key conforming to the + // "messaging.message.envelope.size" semantic conventions. It represents the + // size of the message body and metadata in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Note: This can refer to both the compressed or uncompressed size. If both + // sizes are known, the uncompressed + // size should be used. + MessagingMessageEnvelopeSizeKey = attribute.Key("messaging.message.envelope.size") + + // MessagingMessageIDKey is the attribute Key conforming to the + // "messaging.message.id" semantic conventions. It represents a value used by + // the messaging system as an identifier for the message, represented as a + // string. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 452a7c7c7c7048c2f887f61572b18fc2 + MessagingMessageIDKey = attribute.Key("messaging.message.id") + + // MessagingOperationNameKey is the attribute Key conforming to the + // "messaging.operation.name" semantic conventions. It represents the + // system-specific name of the messaging operation. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ack", "nack", "send" + MessagingOperationNameKey = attribute.Key("messaging.operation.name") + + // MessagingOperationTypeKey is the attribute Key conforming to the + // "messaging.operation.type" semantic conventions. It represents a string + // identifying the type of the messaging operation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: If a custom value is used, it MUST be of low cardinality. + MessagingOperationTypeKey = attribute.Key("messaging.operation.type") + + // MessagingRabbitMQDestinationRoutingKeyKey is the attribute Key conforming to + // the "messaging.rabbitmq.destination.routing_key" semantic conventions. It + // represents the rabbitMQ message routing key. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myKey + MessagingRabbitMQDestinationRoutingKeyKey = attribute.Key("messaging.rabbitmq.destination.routing_key") + + // MessagingRabbitMQMessageDeliveryTagKey is the attribute Key conforming to the + // "messaging.rabbitmq.message.delivery_tag" semantic conventions. It represents + // the rabbitMQ message delivery tag. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingRabbitMQMessageDeliveryTagKey = attribute.Key("messaging.rabbitmq.message.delivery_tag") + + // MessagingRocketMQConsumptionModelKey is the attribute Key conforming to the + // "messaging.rocketmq.consumption_model" semantic conventions. It represents + // the model of message consumption. This only applies to consumer spans. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingRocketMQConsumptionModelKey = attribute.Key("messaging.rocketmq.consumption_model") + + // MessagingRocketMQMessageDelayTimeLevelKey is the attribute Key conforming to + // the "messaging.rocketmq.message.delay_time_level" semantic conventions. It + // represents the delay time level for delay message, which determines the + // message delay time. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingRocketMQMessageDelayTimeLevelKey = attribute.Key("messaging.rocketmq.message.delay_time_level") + + // MessagingRocketMQMessageDeliveryTimestampKey is the attribute Key conforming + // to the "messaging.rocketmq.message.delivery_timestamp" semantic conventions. + // It represents the timestamp in milliseconds that the delay message is + // expected to be delivered to consumer. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingRocketMQMessageDeliveryTimestampKey = attribute.Key("messaging.rocketmq.message.delivery_timestamp") + + // MessagingRocketMQMessageGroupKey is the attribute Key conforming to the + // "messaging.rocketmq.message.group" semantic conventions. It represents the it + // is essential for FIFO message. Messages that belong to the same message group + // are always processed one by one within the same consumer group. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myMessageGroup + MessagingRocketMQMessageGroupKey = attribute.Key("messaging.rocketmq.message.group") + + // MessagingRocketMQMessageKeysKey is the attribute Key conforming to the + // "messaging.rocketmq.message.keys" semantic conventions. It represents the + // key(s) of message, another way to mark message besides message id. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "keyA", "keyB" + MessagingRocketMQMessageKeysKey = attribute.Key("messaging.rocketmq.message.keys") + + // MessagingRocketMQMessageTagKey is the attribute Key conforming to the + // "messaging.rocketmq.message.tag" semantic conventions. It represents the + // secondary classifier of message besides topic. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: tagA + MessagingRocketMQMessageTagKey = attribute.Key("messaging.rocketmq.message.tag") + + // MessagingRocketMQMessageTypeKey is the attribute Key conforming to the + // "messaging.rocketmq.message.type" semantic conventions. It represents the + // type of message. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + MessagingRocketMQMessageTypeKey = attribute.Key("messaging.rocketmq.message.type") + + // MessagingRocketMQNamespaceKey is the attribute Key conforming to the + // "messaging.rocketmq.namespace" semantic conventions. It represents the + // namespace of RocketMQ resources, resources in different namespaces are + // individual. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myNamespace + MessagingRocketMQNamespaceKey = attribute.Key("messaging.rocketmq.namespace") + + // MessagingServiceBusDispositionStatusKey is the attribute Key conforming to + // the "messaging.servicebus.disposition_status" semantic conventions. It + // represents the describes the [settlement type]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [settlement type]: https://learn.microsoft.com/azure/service-bus-messaging/message-transfers-locks-settlement#peeklock + MessagingServiceBusDispositionStatusKey = attribute.Key("messaging.servicebus.disposition_status") + + // MessagingServiceBusMessageDeliveryCountKey is the attribute Key conforming to + // the "messaging.servicebus.message.delivery_count" semantic conventions. It + // represents the number of deliveries that have been attempted for this + // message. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingServiceBusMessageDeliveryCountKey = attribute.Key("messaging.servicebus.message.delivery_count") + + // MessagingServiceBusMessageEnqueuedTimeKey is the attribute Key conforming to + // the "messaging.servicebus.message.enqueued_time" semantic conventions. It + // represents the UTC epoch seconds at which the message has been accepted and + // stored in the entity. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + MessagingServiceBusMessageEnqueuedTimeKey = attribute.Key("messaging.servicebus.message.enqueued_time") + + // MessagingSystemKey is the attribute Key conforming to the "messaging.system" + // semantic conventions. It represents the messaging system as identified by the + // client instrumentation. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The actual messaging system may differ from the one known by the + // client. For example, when using Kafka client libraries to communicate with + // Azure Event Hubs, the `messaging.system` is set to `kafka` based on the + // instrumentation's best knowledge. + MessagingSystemKey = attribute.Key("messaging.system") +) + +// MessagingBatchMessageCount returns an attribute KeyValue conforming to the +// "messaging.batch.message_count" semantic conventions. It represents the number +// of messages sent, received, or processed in the scope of the batching +// operation. +func MessagingBatchMessageCount(val int) attribute.KeyValue { + return MessagingBatchMessageCountKey.Int(val) +} + +// MessagingClientID returns an attribute KeyValue conforming to the +// "messaging.client.id" semantic conventions. It represents a unique identifier +// for the client that consumes or produces a message. +func MessagingClientID(val string) attribute.KeyValue { + return MessagingClientIDKey.String(val) +} + +// MessagingConsumerGroupName returns an attribute KeyValue conforming to the +// "messaging.consumer.group.name" semantic conventions. It represents the name +// of the consumer group with which a consumer is associated. +func MessagingConsumerGroupName(val string) attribute.KeyValue { + return MessagingConsumerGroupNameKey.String(val) +} + +// MessagingDestinationAnonymous returns an attribute KeyValue conforming to the +// "messaging.destination.anonymous" semantic conventions. It represents a +// boolean that is true if the message destination is anonymous (could be unnamed +// or have auto-generated name). +func MessagingDestinationAnonymous(val bool) attribute.KeyValue { + return MessagingDestinationAnonymousKey.Bool(val) +} + +// MessagingDestinationName returns an attribute KeyValue conforming to the +// "messaging.destination.name" semantic conventions. It represents the message +// destination name. +func MessagingDestinationName(val string) attribute.KeyValue { + return MessagingDestinationNameKey.String(val) +} + +// MessagingDestinationPartitionID returns an attribute KeyValue conforming to +// the "messaging.destination.partition.id" semantic conventions. It represents +// the identifier of the partition messages are sent to or received from, unique +// within the `messaging.destination.name`. +func MessagingDestinationPartitionID(val string) attribute.KeyValue { + return MessagingDestinationPartitionIDKey.String(val) +} + +// MessagingDestinationSubscriptionName returns an attribute KeyValue conforming +// to the "messaging.destination.subscription.name" semantic conventions. It +// represents the name of the destination subscription from which a message is +// consumed. +func MessagingDestinationSubscriptionName(val string) attribute.KeyValue { + return MessagingDestinationSubscriptionNameKey.String(val) +} + +// MessagingDestinationTemplate returns an attribute KeyValue conforming to the +// "messaging.destination.template" semantic conventions. It represents the low +// cardinality representation of the messaging destination name. +func MessagingDestinationTemplate(val string) attribute.KeyValue { + return MessagingDestinationTemplateKey.String(val) +} + +// MessagingDestinationTemporary returns an attribute KeyValue conforming to the +// "messaging.destination.temporary" semantic conventions. It represents a +// boolean that is true if the message destination is temporary and might not +// exist anymore after messages are processed. +func MessagingDestinationTemporary(val bool) attribute.KeyValue { + return MessagingDestinationTemporaryKey.Bool(val) +} + +// MessagingEventHubsMessageEnqueuedTime returns an attribute KeyValue conforming +// to the "messaging.eventhubs.message.enqueued_time" semantic conventions. It +// represents the UTC epoch seconds at which the message has been accepted and +// stored in the entity. +func MessagingEventHubsMessageEnqueuedTime(val int) attribute.KeyValue { + return MessagingEventHubsMessageEnqueuedTimeKey.Int(val) +} + +// MessagingGCPPubSubMessageAckDeadline returns an attribute KeyValue conforming +// to the "messaging.gcp_pubsub.message.ack_deadline" semantic conventions. It +// represents the ack deadline in seconds set for the modify ack deadline +// request. +func MessagingGCPPubSubMessageAckDeadline(val int) attribute.KeyValue { + return MessagingGCPPubSubMessageAckDeadlineKey.Int(val) +} + +// MessagingGCPPubSubMessageAckID returns an attribute KeyValue conforming to the +// "messaging.gcp_pubsub.message.ack_id" semantic conventions. It represents the +// ack id for a given message. +func MessagingGCPPubSubMessageAckID(val string) attribute.KeyValue { + return MessagingGCPPubSubMessageAckIDKey.String(val) +} + +// MessagingGCPPubSubMessageDeliveryAttempt returns an attribute KeyValue +// conforming to the "messaging.gcp_pubsub.message.delivery_attempt" semantic +// conventions. It represents the delivery attempt for a given message. +func MessagingGCPPubSubMessageDeliveryAttempt(val int) attribute.KeyValue { + return MessagingGCPPubSubMessageDeliveryAttemptKey.Int(val) +} + +// MessagingGCPPubSubMessageOrderingKey returns an attribute KeyValue conforming +// to the "messaging.gcp_pubsub.message.ordering_key" semantic conventions. It +// represents the ordering key for a given message. If the attribute is not +// present, the message does not have an ordering key. +func MessagingGCPPubSubMessageOrderingKey(val string) attribute.KeyValue { + return MessagingGCPPubSubMessageOrderingKeyKey.String(val) +} + +// MessagingKafkaMessageKey returns an attribute KeyValue conforming to the +// "messaging.kafka.message.key" semantic conventions. It represents the message +// keys in Kafka are used for grouping alike messages to ensure they're processed +// on the same partition. They differ from `messaging.message.id` in that they're +// not unique. If the key is `null`, the attribute MUST NOT be set. +func MessagingKafkaMessageKey(val string) attribute.KeyValue { + return MessagingKafkaMessageKeyKey.String(val) +} + +// MessagingKafkaMessageTombstone returns an attribute KeyValue conforming to the +// "messaging.kafka.message.tombstone" semantic conventions. It represents a +// boolean that is true if the message is a tombstone. +func MessagingKafkaMessageTombstone(val bool) attribute.KeyValue { + return MessagingKafkaMessageTombstoneKey.Bool(val) +} + +// MessagingKafkaOffset returns an attribute KeyValue conforming to the +// "messaging.kafka.offset" semantic conventions. It represents the offset of a +// record in the corresponding Kafka partition. +func MessagingKafkaOffset(val int) attribute.KeyValue { + return MessagingKafkaOffsetKey.Int(val) +} + +// MessagingMessageBodySize returns an attribute KeyValue conforming to the +// "messaging.message.body.size" semantic conventions. It represents the size of +// the message body in bytes. +func MessagingMessageBodySize(val int) attribute.KeyValue { + return MessagingMessageBodySizeKey.Int(val) +} + +// MessagingMessageConversationID returns an attribute KeyValue conforming to the +// "messaging.message.conversation_id" semantic conventions. It represents the +// conversation ID identifying the conversation to which the message belongs, +// represented as a string. Sometimes called "Correlation ID". +func MessagingMessageConversationID(val string) attribute.KeyValue { + return MessagingMessageConversationIDKey.String(val) +} + +// MessagingMessageEnvelopeSize returns an attribute KeyValue conforming to the +// "messaging.message.envelope.size" semantic conventions. It represents the size +// of the message body and metadata in bytes. +func MessagingMessageEnvelopeSize(val int) attribute.KeyValue { + return MessagingMessageEnvelopeSizeKey.Int(val) +} + +// MessagingMessageID returns an attribute KeyValue conforming to the +// "messaging.message.id" semantic conventions. It represents a value used by the +// messaging system as an identifier for the message, represented as a string. +func MessagingMessageID(val string) attribute.KeyValue { + return MessagingMessageIDKey.String(val) +} + +// MessagingOperationName returns an attribute KeyValue conforming to the +// "messaging.operation.name" semantic conventions. It represents the +// system-specific name of the messaging operation. +func MessagingOperationName(val string) attribute.KeyValue { + return MessagingOperationNameKey.String(val) +} + +// MessagingRabbitMQDestinationRoutingKey returns an attribute KeyValue +// conforming to the "messaging.rabbitmq.destination.routing_key" semantic +// conventions. It represents the rabbitMQ message routing key. +func MessagingRabbitMQDestinationRoutingKey(val string) attribute.KeyValue { + return MessagingRabbitMQDestinationRoutingKeyKey.String(val) +} + +// MessagingRabbitMQMessageDeliveryTag returns an attribute KeyValue conforming +// to the "messaging.rabbitmq.message.delivery_tag" semantic conventions. It +// represents the rabbitMQ message delivery tag. +func MessagingRabbitMQMessageDeliveryTag(val int) attribute.KeyValue { + return MessagingRabbitMQMessageDeliveryTagKey.Int(val) +} + +// MessagingRocketMQMessageDelayTimeLevel returns an attribute KeyValue +// conforming to the "messaging.rocketmq.message.delay_time_level" semantic +// conventions. It represents the delay time level for delay message, which +// determines the message delay time. +func MessagingRocketMQMessageDelayTimeLevel(val int) attribute.KeyValue { + return MessagingRocketMQMessageDelayTimeLevelKey.Int(val) +} + +// MessagingRocketMQMessageDeliveryTimestamp returns an attribute KeyValue +// conforming to the "messaging.rocketmq.message.delivery_timestamp" semantic +// conventions. It represents the timestamp in milliseconds that the delay +// message is expected to be delivered to consumer. +func MessagingRocketMQMessageDeliveryTimestamp(val int) attribute.KeyValue { + return MessagingRocketMQMessageDeliveryTimestampKey.Int(val) +} + +// MessagingRocketMQMessageGroup returns an attribute KeyValue conforming to the +// "messaging.rocketmq.message.group" semantic conventions. It represents the it +// is essential for FIFO message. Messages that belong to the same message group +// are always processed one by one within the same consumer group. +func MessagingRocketMQMessageGroup(val string) attribute.KeyValue { + return MessagingRocketMQMessageGroupKey.String(val) +} + +// MessagingRocketMQMessageKeys returns an attribute KeyValue conforming to the +// "messaging.rocketmq.message.keys" semantic conventions. It represents the +// key(s) of message, another way to mark message besides message id. +func MessagingRocketMQMessageKeys(val ...string) attribute.KeyValue { + return MessagingRocketMQMessageKeysKey.StringSlice(val) +} + +// MessagingRocketMQMessageTag returns an attribute KeyValue conforming to the +// "messaging.rocketmq.message.tag" semantic conventions. It represents the +// secondary classifier of message besides topic. +func MessagingRocketMQMessageTag(val string) attribute.KeyValue { + return MessagingRocketMQMessageTagKey.String(val) +} + +// MessagingRocketMQNamespace returns an attribute KeyValue conforming to the +// "messaging.rocketmq.namespace" semantic conventions. It represents the +// namespace of RocketMQ resources, resources in different namespaces are +// individual. +func MessagingRocketMQNamespace(val string) attribute.KeyValue { + return MessagingRocketMQNamespaceKey.String(val) +} + +// MessagingServiceBusMessageDeliveryCount returns an attribute KeyValue +// conforming to the "messaging.servicebus.message.delivery_count" semantic +// conventions. It represents the number of deliveries that have been attempted +// for this message. +func MessagingServiceBusMessageDeliveryCount(val int) attribute.KeyValue { + return MessagingServiceBusMessageDeliveryCountKey.Int(val) +} + +// MessagingServiceBusMessageEnqueuedTime returns an attribute KeyValue +// conforming to the "messaging.servicebus.message.enqueued_time" semantic +// conventions. It represents the UTC epoch seconds at which the message has been +// accepted and stored in the entity. +func MessagingServiceBusMessageEnqueuedTime(val int) attribute.KeyValue { + return MessagingServiceBusMessageEnqueuedTimeKey.Int(val) +} + +// Enum values for messaging.operation.type +var ( + // A message is created. "Create" spans always refer to a single message and are + // used to provide a unique creation context for messages in batch sending + // scenarios. + // + // Stability: development + MessagingOperationTypeCreate = MessagingOperationTypeKey.String("create") + // One or more messages are provided for sending to an intermediary. If a single + // message is sent, the context of the "Send" span can be used as the creation + // context and no "Create" span needs to be created. + // + // Stability: development + MessagingOperationTypeSend = MessagingOperationTypeKey.String("send") + // One or more messages are requested by a consumer. This operation refers to + // pull-based scenarios, where consumers explicitly call methods of messaging + // SDKs to receive messages. + // + // Stability: development + MessagingOperationTypeReceive = MessagingOperationTypeKey.String("receive") + // One or more messages are processed by a consumer. + // + // Stability: development + MessagingOperationTypeProcess = MessagingOperationTypeKey.String("process") + // One or more messages are settled. + // + // Stability: development + MessagingOperationTypeSettle = MessagingOperationTypeKey.String("settle") + // Deprecated: Replaced by `process`. + MessagingOperationTypeDeliver = MessagingOperationTypeKey.String("deliver") + // Deprecated: Replaced by `send`. + MessagingOperationTypePublish = MessagingOperationTypeKey.String("publish") +) + +// Enum values for messaging.rocketmq.consumption_model +var ( + // Clustering consumption model + // Stability: development + MessagingRocketMQConsumptionModelClustering = MessagingRocketMQConsumptionModelKey.String("clustering") + // Broadcasting consumption model + // Stability: development + MessagingRocketMQConsumptionModelBroadcasting = MessagingRocketMQConsumptionModelKey.String("broadcasting") +) + +// Enum values for messaging.rocketmq.message.type +var ( + // Normal message + // Stability: development + MessagingRocketMQMessageTypeNormal = MessagingRocketMQMessageTypeKey.String("normal") + // FIFO message + // Stability: development + MessagingRocketMQMessageTypeFifo = MessagingRocketMQMessageTypeKey.String("fifo") + // Delay message + // Stability: development + MessagingRocketMQMessageTypeDelay = MessagingRocketMQMessageTypeKey.String("delay") + // Transaction message + // Stability: development + MessagingRocketMQMessageTypeTransaction = MessagingRocketMQMessageTypeKey.String("transaction") +) + +// Enum values for messaging.servicebus.disposition_status +var ( + // Message is completed + // Stability: development + MessagingServiceBusDispositionStatusComplete = MessagingServiceBusDispositionStatusKey.String("complete") + // Message is abandoned + // Stability: development + MessagingServiceBusDispositionStatusAbandon = MessagingServiceBusDispositionStatusKey.String("abandon") + // Message is sent to dead letter queue + // Stability: development + MessagingServiceBusDispositionStatusDeadLetter = MessagingServiceBusDispositionStatusKey.String("dead_letter") + // Message is deferred + // Stability: development + MessagingServiceBusDispositionStatusDefer = MessagingServiceBusDispositionStatusKey.String("defer") +) + +// Enum values for messaging.system +var ( + // Apache ActiveMQ + // Stability: development + MessagingSystemActiveMQ = MessagingSystemKey.String("activemq") + // Amazon Simple Queue Service (SQS) + // Stability: development + MessagingSystemAWSSQS = MessagingSystemKey.String("aws_sqs") + // Azure Event Grid + // Stability: development + MessagingSystemEventGrid = MessagingSystemKey.String("eventgrid") + // Azure Event Hubs + // Stability: development + MessagingSystemEventHubs = MessagingSystemKey.String("eventhubs") + // Azure Service Bus + // Stability: development + MessagingSystemServiceBus = MessagingSystemKey.String("servicebus") + // Google Cloud Pub/Sub + // Stability: development + MessagingSystemGCPPubSub = MessagingSystemKey.String("gcp_pubsub") + // Java Message Service + // Stability: development + MessagingSystemJMS = MessagingSystemKey.String("jms") + // Apache Kafka + // Stability: development + MessagingSystemKafka = MessagingSystemKey.String("kafka") + // RabbitMQ + // Stability: development + MessagingSystemRabbitMQ = MessagingSystemKey.String("rabbitmq") + // Apache RocketMQ + // Stability: development + MessagingSystemRocketMQ = MessagingSystemKey.String("rocketmq") + // Apache Pulsar + // Stability: development + MessagingSystemPulsar = MessagingSystemKey.String("pulsar") +) + +// Namespace: network +const ( + // NetworkCarrierICCKey is the attribute Key conforming to the + // "network.carrier.icc" semantic conventions. It represents the ISO 3166-1 + // alpha-2 2-character country code associated with the mobile carrier network. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: DE + NetworkCarrierICCKey = attribute.Key("network.carrier.icc") + + // NetworkCarrierMCCKey is the attribute Key conforming to the + // "network.carrier.mcc" semantic conventions. It represents the mobile carrier + // country code. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 310 + NetworkCarrierMCCKey = attribute.Key("network.carrier.mcc") + + // NetworkCarrierMNCKey is the attribute Key conforming to the + // "network.carrier.mnc" semantic conventions. It represents the mobile carrier + // network code. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 001 + NetworkCarrierMNCKey = attribute.Key("network.carrier.mnc") + + // NetworkCarrierNameKey is the attribute Key conforming to the + // "network.carrier.name" semantic conventions. It represents the name of the + // mobile carrier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: sprint + NetworkCarrierNameKey = attribute.Key("network.carrier.name") + + // NetworkConnectionStateKey is the attribute Key conforming to the + // "network.connection.state" semantic conventions. It represents the state of + // network connection. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "close_wait" + // Note: Connection states are defined as part of the [rfc9293] + // + // [rfc9293]: https://datatracker.ietf.org/doc/html/rfc9293#section-3.3.2 + NetworkConnectionStateKey = attribute.Key("network.connection.state") + + // NetworkConnectionSubtypeKey is the attribute Key conforming to the + // "network.connection.subtype" semantic conventions. It represents the this + // describes more details regarding the connection.type. It may be the type of + // cell technology connection, but it could be used for describing details about + // a wifi connection. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: LTE + NetworkConnectionSubtypeKey = attribute.Key("network.connection.subtype") + + // NetworkConnectionTypeKey is the attribute Key conforming to the + // "network.connection.type" semantic conventions. It represents the internet + // connection type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: wifi + NetworkConnectionTypeKey = attribute.Key("network.connection.type") + + // NetworkInterfaceNameKey is the attribute Key conforming to the + // "network.interface.name" semantic conventions. It represents the network + // interface name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "lo", "eth0" + NetworkInterfaceNameKey = attribute.Key("network.interface.name") + + // NetworkIODirectionKey is the attribute Key conforming to the + // "network.io.direction" semantic conventions. It represents the network IO + // operation direction. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "transmit" + NetworkIODirectionKey = attribute.Key("network.io.direction") + + // NetworkLocalAddressKey is the attribute Key conforming to the + // "network.local.address" semantic conventions. It represents the local address + // of the network connection - IP address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "10.1.2.80", "/tmp/my.sock" + NetworkLocalAddressKey = attribute.Key("network.local.address") + + // NetworkLocalPortKey is the attribute Key conforming to the + // "network.local.port" semantic conventions. It represents the local port + // number of the network connection. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 65123 + NetworkLocalPortKey = attribute.Key("network.local.port") + + // NetworkPeerAddressKey is the attribute Key conforming to the + // "network.peer.address" semantic conventions. It represents the peer address + // of the network connection - IP address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "10.1.2.80", "/tmp/my.sock" + NetworkPeerAddressKey = attribute.Key("network.peer.address") + + // NetworkPeerPortKey is the attribute Key conforming to the "network.peer.port" + // semantic conventions. It represents the peer port number of the network + // connection. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 65123 + NetworkPeerPortKey = attribute.Key("network.peer.port") + + // NetworkProtocolNameKey is the attribute Key conforming to the + // "network.protocol.name" semantic conventions. It represents the + // [OSI application layer] or non-OSI equivalent. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "amqp", "http", "mqtt" + // Note: The value SHOULD be normalized to lowercase. + // + // [OSI application layer]: https://wikipedia.org/wiki/Application_layer + NetworkProtocolNameKey = attribute.Key("network.protocol.name") + + // NetworkProtocolVersionKey is the attribute Key conforming to the + // "network.protocol.version" semantic conventions. It represents the actual + // version of the protocol used for network communication. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "1.1", "2" + // Note: If protocol version is subject to negotiation (for example using [ALPN] + // ), this attribute SHOULD be set to the negotiated version. If the actual + // protocol version is not known, this attribute SHOULD NOT be set. + // + // [ALPN]: https://www.rfc-editor.org/rfc/rfc7301.html + NetworkProtocolVersionKey = attribute.Key("network.protocol.version") + + // NetworkTransportKey is the attribute Key conforming to the + // "network.transport" semantic conventions. It represents the + // [OSI transport layer] or [inter-process communication method]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "tcp", "udp" + // Note: The value SHOULD be normalized to lowercase. + // + // Consider always setting the transport when setting a port number, since + // a port number is ambiguous without knowing the transport. For example + // different processes could be listening on TCP port 12345 and UDP port 12345. + // + // [OSI transport layer]: https://wikipedia.org/wiki/Transport_layer + // [inter-process communication method]: https://wikipedia.org/wiki/Inter-process_communication + NetworkTransportKey = attribute.Key("network.transport") + + // NetworkTypeKey is the attribute Key conforming to the "network.type" semantic + // conventions. It represents the [OSI network layer] or non-OSI equivalent. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "ipv4", "ipv6" + // Note: The value SHOULD be normalized to lowercase. + // + // [OSI network layer]: https://wikipedia.org/wiki/Network_layer + NetworkTypeKey = attribute.Key("network.type") +) + +// NetworkCarrierICC returns an attribute KeyValue conforming to the +// "network.carrier.icc" semantic conventions. It represents the ISO 3166-1 +// alpha-2 2-character country code associated with the mobile carrier network. +func NetworkCarrierICC(val string) attribute.KeyValue { + return NetworkCarrierICCKey.String(val) +} + +// NetworkCarrierMCC returns an attribute KeyValue conforming to the +// "network.carrier.mcc" semantic conventions. It represents the mobile carrier +// country code. +func NetworkCarrierMCC(val string) attribute.KeyValue { + return NetworkCarrierMCCKey.String(val) +} + +// NetworkCarrierMNC returns an attribute KeyValue conforming to the +// "network.carrier.mnc" semantic conventions. It represents the mobile carrier +// network code. +func NetworkCarrierMNC(val string) attribute.KeyValue { + return NetworkCarrierMNCKey.String(val) +} + +// NetworkCarrierName returns an attribute KeyValue conforming to the +// "network.carrier.name" semantic conventions. It represents the name of the +// mobile carrier. +func NetworkCarrierName(val string) attribute.KeyValue { + return NetworkCarrierNameKey.String(val) +} + +// NetworkInterfaceName returns an attribute KeyValue conforming to the +// "network.interface.name" semantic conventions. It represents the network +// interface name. +func NetworkInterfaceName(val string) attribute.KeyValue { + return NetworkInterfaceNameKey.String(val) +} + +// NetworkLocalAddress returns an attribute KeyValue conforming to the +// "network.local.address" semantic conventions. It represents the local address +// of the network connection - IP address or Unix domain socket name. +func NetworkLocalAddress(val string) attribute.KeyValue { + return NetworkLocalAddressKey.String(val) +} + +// NetworkLocalPort returns an attribute KeyValue conforming to the +// "network.local.port" semantic conventions. It represents the local port number +// of the network connection. +func NetworkLocalPort(val int) attribute.KeyValue { + return NetworkLocalPortKey.Int(val) +} + +// NetworkPeerAddress returns an attribute KeyValue conforming to the +// "network.peer.address" semantic conventions. It represents the peer address of +// the network connection - IP address or Unix domain socket name. +func NetworkPeerAddress(val string) attribute.KeyValue { + return NetworkPeerAddressKey.String(val) +} + +// NetworkPeerPort returns an attribute KeyValue conforming to the +// "network.peer.port" semantic conventions. It represents the peer port number +// of the network connection. +func NetworkPeerPort(val int) attribute.KeyValue { + return NetworkPeerPortKey.Int(val) +} + +// NetworkProtocolName returns an attribute KeyValue conforming to the +// "network.protocol.name" semantic conventions. It represents the +// [OSI application layer] or non-OSI equivalent. +// +// [OSI application layer]: https://wikipedia.org/wiki/Application_layer +func NetworkProtocolName(val string) attribute.KeyValue { + return NetworkProtocolNameKey.String(val) +} + +// NetworkProtocolVersion returns an attribute KeyValue conforming to the +// "network.protocol.version" semantic conventions. It represents the actual +// version of the protocol used for network communication. +func NetworkProtocolVersion(val string) attribute.KeyValue { + return NetworkProtocolVersionKey.String(val) +} + +// Enum values for network.connection.state +var ( + // closed + // Stability: development + NetworkConnectionStateClosed = NetworkConnectionStateKey.String("closed") + // close_wait + // Stability: development + NetworkConnectionStateCloseWait = NetworkConnectionStateKey.String("close_wait") + // closing + // Stability: development + NetworkConnectionStateClosing = NetworkConnectionStateKey.String("closing") + // established + // Stability: development + NetworkConnectionStateEstablished = NetworkConnectionStateKey.String("established") + // fin_wait_1 + // Stability: development + NetworkConnectionStateFinWait1 = NetworkConnectionStateKey.String("fin_wait_1") + // fin_wait_2 + // Stability: development + NetworkConnectionStateFinWait2 = NetworkConnectionStateKey.String("fin_wait_2") + // last_ack + // Stability: development + NetworkConnectionStateLastAck = NetworkConnectionStateKey.String("last_ack") + // listen + // Stability: development + NetworkConnectionStateListen = NetworkConnectionStateKey.String("listen") + // syn_received + // Stability: development + NetworkConnectionStateSynReceived = NetworkConnectionStateKey.String("syn_received") + // syn_sent + // Stability: development + NetworkConnectionStateSynSent = NetworkConnectionStateKey.String("syn_sent") + // time_wait + // Stability: development + NetworkConnectionStateTimeWait = NetworkConnectionStateKey.String("time_wait") +) + +// Enum values for network.connection.subtype +var ( + // GPRS + // Stability: development + NetworkConnectionSubtypeGprs = NetworkConnectionSubtypeKey.String("gprs") + // EDGE + // Stability: development + NetworkConnectionSubtypeEdge = NetworkConnectionSubtypeKey.String("edge") + // UMTS + // Stability: development + NetworkConnectionSubtypeUmts = NetworkConnectionSubtypeKey.String("umts") + // CDMA + // Stability: development + NetworkConnectionSubtypeCdma = NetworkConnectionSubtypeKey.String("cdma") + // EVDO Rel. 0 + // Stability: development + NetworkConnectionSubtypeEvdo0 = NetworkConnectionSubtypeKey.String("evdo_0") + // EVDO Rev. A + // Stability: development + NetworkConnectionSubtypeEvdoA = NetworkConnectionSubtypeKey.String("evdo_a") + // CDMA2000 1XRTT + // Stability: development + NetworkConnectionSubtypeCdma20001xrtt = NetworkConnectionSubtypeKey.String("cdma2000_1xrtt") + // HSDPA + // Stability: development + NetworkConnectionSubtypeHsdpa = NetworkConnectionSubtypeKey.String("hsdpa") + // HSUPA + // Stability: development + NetworkConnectionSubtypeHsupa = NetworkConnectionSubtypeKey.String("hsupa") + // HSPA + // Stability: development + NetworkConnectionSubtypeHspa = NetworkConnectionSubtypeKey.String("hspa") + // IDEN + // Stability: development + NetworkConnectionSubtypeIden = NetworkConnectionSubtypeKey.String("iden") + // EVDO Rev. B + // Stability: development + NetworkConnectionSubtypeEvdoB = NetworkConnectionSubtypeKey.String("evdo_b") + // LTE + // Stability: development + NetworkConnectionSubtypeLte = NetworkConnectionSubtypeKey.String("lte") + // EHRPD + // Stability: development + NetworkConnectionSubtypeEhrpd = NetworkConnectionSubtypeKey.String("ehrpd") + // HSPAP + // Stability: development + NetworkConnectionSubtypeHspap = NetworkConnectionSubtypeKey.String("hspap") + // GSM + // Stability: development + NetworkConnectionSubtypeGsm = NetworkConnectionSubtypeKey.String("gsm") + // TD-SCDMA + // Stability: development + NetworkConnectionSubtypeTdScdma = NetworkConnectionSubtypeKey.String("td_scdma") + // IWLAN + // Stability: development + NetworkConnectionSubtypeIwlan = NetworkConnectionSubtypeKey.String("iwlan") + // 5G NR (New Radio) + // Stability: development + NetworkConnectionSubtypeNr = NetworkConnectionSubtypeKey.String("nr") + // 5G NRNSA (New Radio Non-Standalone) + // Stability: development + NetworkConnectionSubtypeNrnsa = NetworkConnectionSubtypeKey.String("nrnsa") + // LTE CA + // Stability: development + NetworkConnectionSubtypeLteCa = NetworkConnectionSubtypeKey.String("lte_ca") +) + +// Enum values for network.connection.type +var ( + // wifi + // Stability: development + NetworkConnectionTypeWifi = NetworkConnectionTypeKey.String("wifi") + // wired + // Stability: development + NetworkConnectionTypeWired = NetworkConnectionTypeKey.String("wired") + // cell + // Stability: development + NetworkConnectionTypeCell = NetworkConnectionTypeKey.String("cell") + // unavailable + // Stability: development + NetworkConnectionTypeUnavailable = NetworkConnectionTypeKey.String("unavailable") + // unknown + // Stability: development + NetworkConnectionTypeUnknown = NetworkConnectionTypeKey.String("unknown") +) + +// Enum values for network.io.direction +var ( + // transmit + // Stability: development + NetworkIODirectionTransmit = NetworkIODirectionKey.String("transmit") + // receive + // Stability: development + NetworkIODirectionReceive = NetworkIODirectionKey.String("receive") +) + +// Enum values for network.transport +var ( + // TCP + // Stability: stable + NetworkTransportTCP = NetworkTransportKey.String("tcp") + // UDP + // Stability: stable + NetworkTransportUDP = NetworkTransportKey.String("udp") + // Named or anonymous pipe. + // Stability: stable + NetworkTransportPipe = NetworkTransportKey.String("pipe") + // Unix domain socket + // Stability: stable + NetworkTransportUnix = NetworkTransportKey.String("unix") + // QUIC + // Stability: stable + NetworkTransportQUIC = NetworkTransportKey.String("quic") +) + +// Enum values for network.type +var ( + // IPv4 + // Stability: stable + NetworkTypeIPv4 = NetworkTypeKey.String("ipv4") + // IPv6 + // Stability: stable + NetworkTypeIPv6 = NetworkTypeKey.String("ipv6") +) + +// Namespace: oci +const ( + // OCIManifestDigestKey is the attribute Key conforming to the + // "oci.manifest.digest" semantic conventions. It represents the digest of the + // OCI image manifest. For container images specifically is the digest by which + // the container image is known. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "sha256:e4ca62c0d62f3e886e684806dfe9d4e0cda60d54986898173c1083856cfda0f4" + // Note: Follows [OCI Image Manifest Specification], and specifically the + // [Digest property]. + // An example can be found in [Example Image Manifest]. + // + // [OCI Image Manifest Specification]: https://github.com/opencontainers/image-spec/blob/main/manifest.md + // [Digest property]: https://github.com/opencontainers/image-spec/blob/main/descriptor.md#digests + // [Example Image Manifest]: https://github.com/opencontainers/image-spec/blob/main/manifest.md#example-image-manifest + OCIManifestDigestKey = attribute.Key("oci.manifest.digest") +) + +// OCIManifestDigest returns an attribute KeyValue conforming to the +// "oci.manifest.digest" semantic conventions. It represents the digest of the +// OCI image manifest. For container images specifically is the digest by which +// the container image is known. +func OCIManifestDigest(val string) attribute.KeyValue { + return OCIManifestDigestKey.String(val) +} + +// Namespace: opentracing +const ( + // OpenTracingRefTypeKey is the attribute Key conforming to the + // "opentracing.ref_type" semantic conventions. It represents the parent-child + // Reference type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: The causal relationship between a child Span and a parent Span. + OpenTracingRefTypeKey = attribute.Key("opentracing.ref_type") +) + +// Enum values for opentracing.ref_type +var ( + // The parent Span depends on the child Span in some capacity + // Stability: development + OpenTracingRefTypeChildOf = OpenTracingRefTypeKey.String("child_of") + // The parent Span doesn't depend in any way on the result of the child Span + // Stability: development + OpenTracingRefTypeFollowsFrom = OpenTracingRefTypeKey.String("follows_from") +) + +// Namespace: os +const ( + // OSBuildIDKey is the attribute Key conforming to the "os.build_id" semantic + // conventions. It represents the unique identifier for a particular build or + // compilation of the operating system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TQ3C.230805.001.B2", "20E247", "22621" + OSBuildIDKey = attribute.Key("os.build_id") + + // OSDescriptionKey is the attribute Key conforming to the "os.description" + // semantic conventions. It represents the human readable (not intended to be + // parsed) OS version information, like e.g. reported by `ver` or + // `lsb_release -a` commands. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Microsoft Windows [Version 10.0.18363.778]", "Ubuntu 18.04.1 LTS" + OSDescriptionKey = attribute.Key("os.description") + + // OSNameKey is the attribute Key conforming to the "os.name" semantic + // conventions. It represents the human readable operating system name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iOS", "Android", "Ubuntu" + OSNameKey = attribute.Key("os.name") + + // OSTypeKey is the attribute Key conforming to the "os.type" semantic + // conventions. It represents the operating system type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + OSTypeKey = attribute.Key("os.type") + + // OSVersionKey is the attribute Key conforming to the "os.version" semantic + // conventions. It represents the version string of the operating system as + // defined in [Version Attributes]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "14.2.1", "18.04.1" + // + // [Version Attributes]: /docs/resource/README.md#version-attributes + OSVersionKey = attribute.Key("os.version") +) + +// OSBuildID returns an attribute KeyValue conforming to the "os.build_id" +// semantic conventions. It represents the unique identifier for a particular +// build or compilation of the operating system. +func OSBuildID(val string) attribute.KeyValue { + return OSBuildIDKey.String(val) +} + +// OSDescription returns an attribute KeyValue conforming to the "os.description" +// semantic conventions. It represents the human readable (not intended to be +// parsed) OS version information, like e.g. reported by `ver` or +// `lsb_release -a` commands. +func OSDescription(val string) attribute.KeyValue { + return OSDescriptionKey.String(val) +} + +// OSName returns an attribute KeyValue conforming to the "os.name" semantic +// conventions. It represents the human readable operating system name. +func OSName(val string) attribute.KeyValue { + return OSNameKey.String(val) +} + +// OSVersion returns an attribute KeyValue conforming to the "os.version" +// semantic conventions. It represents the version string of the operating system +// as defined in [Version Attributes]. +// +// [Version Attributes]: /docs/resource/README.md#version-attributes +func OSVersion(val string) attribute.KeyValue { + return OSVersionKey.String(val) +} + +// Enum values for os.type +var ( + // Microsoft Windows + // Stability: development + OSTypeWindows = OSTypeKey.String("windows") + // Linux + // Stability: development + OSTypeLinux = OSTypeKey.String("linux") + // Apple Darwin + // Stability: development + OSTypeDarwin = OSTypeKey.String("darwin") + // FreeBSD + // Stability: development + OSTypeFreeBSD = OSTypeKey.String("freebsd") + // NetBSD + // Stability: development + OSTypeNetBSD = OSTypeKey.String("netbsd") + // OpenBSD + // Stability: development + OSTypeOpenBSD = OSTypeKey.String("openbsd") + // DragonFly BSD + // Stability: development + OSTypeDragonflyBSD = OSTypeKey.String("dragonflybsd") + // HP-UX (Hewlett Packard Unix) + // Stability: development + OSTypeHPUX = OSTypeKey.String("hpux") + // AIX (Advanced Interactive eXecutive) + // Stability: development + OSTypeAIX = OSTypeKey.String("aix") + // SunOS, Oracle Solaris + // Stability: development + OSTypeSolaris = OSTypeKey.String("solaris") + // IBM z/OS + // Stability: development + OSTypeZOS = OSTypeKey.String("z_os") +) + +// Namespace: otel +const ( + // OTelComponentNameKey is the attribute Key conforming to the + // "otel.component.name" semantic conventions. It represents a name uniquely + // identifying the instance of the OpenTelemetry component within its containing + // SDK instance. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otlp_grpc_span_exporter/0", "custom-name" + // Note: Implementations SHOULD ensure a low cardinality for this attribute, + // even across application or SDK restarts. + // E.g. implementations MUST NOT use UUIDs as values for this attribute. + // + // Implementations MAY achieve these goals by following a + // `/` pattern, e.g. + // `batching_span_processor/0`. + // Hereby `otel.component.type` refers to the corresponding attribute value of + // the component. + // + // The value of `instance-counter` MAY be automatically assigned by the + // component and uniqueness within the enclosing SDK instance MUST be + // guaranteed. + // For example, `` MAY be implemented by using a monotonically + // increasing counter (starting with `0`), which is incremented every time an + // instance of the given component type is started. + // + // With this implementation, for example the first Batching Span Processor would + // have `batching_span_processor/0` + // as `otel.component.name`, the second one `batching_span_processor/1` and so + // on. + // These values will therefore be reused in the case of an application restart. + OTelComponentNameKey = attribute.Key("otel.component.name") + + // OTelComponentTypeKey is the attribute Key conforming to the + // "otel.component.type" semantic conventions. It represents a name identifying + // the type of the OpenTelemetry component. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "batching_span_processor", "com.example.MySpanExporter" + // Note: If none of the standardized values apply, implementations SHOULD use + // the language-defined name of the type. + // E.g. for Java the fully qualified classname SHOULD be used in this case. + OTelComponentTypeKey = attribute.Key("otel.component.type") + + // OTelScopeNameKey is the attribute Key conforming to the "otel.scope.name" + // semantic conventions. It represents the name of the instrumentation scope - ( + // `InstrumentationScope.Name` in OTLP). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "io.opentelemetry.contrib.mongodb" + OTelScopeNameKey = attribute.Key("otel.scope.name") + + // OTelScopeVersionKey is the attribute Key conforming to the + // "otel.scope.version" semantic conventions. It represents the version of the + // instrumentation scope - (`InstrumentationScope.Version` in OTLP). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "1.0.0" + OTelScopeVersionKey = attribute.Key("otel.scope.version") + + // OTelSpanSamplingResultKey is the attribute Key conforming to the + // "otel.span.sampling_result" semantic conventions. It represents the result + // value of the sampler for this span. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + OTelSpanSamplingResultKey = attribute.Key("otel.span.sampling_result") + + // OTelStatusCodeKey is the attribute Key conforming to the "otel.status_code" + // semantic conventions. It represents the name of the code, either "OK" or + // "ERROR". MUST NOT be set if the status code is UNSET. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: + OTelStatusCodeKey = attribute.Key("otel.status_code") + + // OTelStatusDescriptionKey is the attribute Key conforming to the + // "otel.status_description" semantic conventions. It represents the description + // of the Status if it has a value, otherwise not set. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "resource not found" + OTelStatusDescriptionKey = attribute.Key("otel.status_description") +) + +// OTelComponentName returns an attribute KeyValue conforming to the +// "otel.component.name" semantic conventions. It represents a name uniquely +// identifying the instance of the OpenTelemetry component within its containing +// SDK instance. +func OTelComponentName(val string) attribute.KeyValue { + return OTelComponentNameKey.String(val) +} + +// OTelScopeName returns an attribute KeyValue conforming to the +// "otel.scope.name" semantic conventions. It represents the name of the +// instrumentation scope - (`InstrumentationScope.Name` in OTLP). +func OTelScopeName(val string) attribute.KeyValue { + return OTelScopeNameKey.String(val) +} + +// OTelScopeVersion returns an attribute KeyValue conforming to the +// "otel.scope.version" semantic conventions. It represents the version of the +// instrumentation scope - (`InstrumentationScope.Version` in OTLP). +func OTelScopeVersion(val string) attribute.KeyValue { + return OTelScopeVersionKey.String(val) +} + +// OTelStatusDescription returns an attribute KeyValue conforming to the +// "otel.status_description" semantic conventions. It represents the description +// of the Status if it has a value, otherwise not set. +func OTelStatusDescription(val string) attribute.KeyValue { + return OTelStatusDescriptionKey.String(val) +} + +// Enum values for otel.component.type +var ( + // The builtin SDK batching span processor + // + // Stability: development + OTelComponentTypeBatchingSpanProcessor = OTelComponentTypeKey.String("batching_span_processor") + // The builtin SDK simple span processor + // + // Stability: development + OTelComponentTypeSimpleSpanProcessor = OTelComponentTypeKey.String("simple_span_processor") + // The builtin SDK batching log record processor + // + // Stability: development + OTelComponentTypeBatchingLogProcessor = OTelComponentTypeKey.String("batching_log_processor") + // The builtin SDK simple log record processor + // + // Stability: development + OTelComponentTypeSimpleLogProcessor = OTelComponentTypeKey.String("simple_log_processor") + // OTLP span exporter over gRPC with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpGRPCSpanExporter = OTelComponentTypeKey.String("otlp_grpc_span_exporter") + // OTLP span exporter over HTTP with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPSpanExporter = OTelComponentTypeKey.String("otlp_http_span_exporter") + // OTLP span exporter over HTTP with JSON serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPJSONSpanExporter = OTelComponentTypeKey.String("otlp_http_json_span_exporter") + // OTLP log record exporter over gRPC with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpGRPCLogExporter = OTelComponentTypeKey.String("otlp_grpc_log_exporter") + // OTLP log record exporter over HTTP with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPLogExporter = OTelComponentTypeKey.String("otlp_http_log_exporter") + // OTLP log record exporter over HTTP with JSON serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPJSONLogExporter = OTelComponentTypeKey.String("otlp_http_json_log_exporter") + // The builtin SDK periodically exporting metric reader + // + // Stability: development + OTelComponentTypePeriodicMetricReader = OTelComponentTypeKey.String("periodic_metric_reader") + // OTLP metric exporter over gRPC with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpGRPCMetricExporter = OTelComponentTypeKey.String("otlp_grpc_metric_exporter") + // OTLP metric exporter over HTTP with protobuf serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPMetricExporter = OTelComponentTypeKey.String("otlp_http_metric_exporter") + // OTLP metric exporter over HTTP with JSON serialization + // + // Stability: development + OTelComponentTypeOtlpHTTPJSONMetricExporter = OTelComponentTypeKey.String("otlp_http_json_metric_exporter") +) + +// Enum values for otel.span.sampling_result +var ( + // The span is not sampled and not recording + // Stability: development + OTelSpanSamplingResultDrop = OTelSpanSamplingResultKey.String("DROP") + // The span is not sampled, but recording + // Stability: development + OTelSpanSamplingResultRecordOnly = OTelSpanSamplingResultKey.String("RECORD_ONLY") + // The span is sampled and recording + // Stability: development + OTelSpanSamplingResultRecordAndSample = OTelSpanSamplingResultKey.String("RECORD_AND_SAMPLE") +) + +// Enum values for otel.status_code +var ( + // The operation has been validated by an Application developer or Operator to + // have completed successfully. + // Stability: stable + OTelStatusCodeOk = OTelStatusCodeKey.String("OK") + // The operation contains an error. + // Stability: stable + OTelStatusCodeError = OTelStatusCodeKey.String("ERROR") +) + +// Namespace: peer +const ( + // PeerServiceKey is the attribute Key conforming to the "peer.service" semantic + // conventions. It represents the [`service.name`] of the remote service. SHOULD + // be equal to the actual `service.name` resource attribute of the remote + // service if any. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: AuthTokenCache + // + // [`service.name`]: /docs/resource/README.md#service + PeerServiceKey = attribute.Key("peer.service") +) + +// PeerService returns an attribute KeyValue conforming to the "peer.service" +// semantic conventions. It represents the [`service.name`] of the remote +// service. SHOULD be equal to the actual `service.name` resource attribute of +// the remote service if any. +// +// [`service.name`]: /docs/resource/README.md#service +func PeerService(val string) attribute.KeyValue { + return PeerServiceKey.String(val) +} + +// Namespace: process +const ( + // ProcessArgsCountKey is the attribute Key conforming to the + // "process.args_count" semantic conventions. It represents the length of the + // process.command_args array. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 4 + // Note: This field can be useful for querying or performing bucket analysis on + // how many arguments were provided to start a process. More arguments may be an + // indication of suspicious activity. + ProcessArgsCountKey = attribute.Key("process.args_count") + + // ProcessCommandKey is the attribute Key conforming to the "process.command" + // semantic conventions. It represents the command used to launch the process + // (i.e. the command name). On Linux based systems, can be set to the zeroth + // string in `proc/[pid]/cmdline`. On Windows, can be set to the first parameter + // extracted from `GetCommandLineW`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cmd/otelcol" + ProcessCommandKey = attribute.Key("process.command") + + // ProcessCommandArgsKey is the attribute Key conforming to the + // "process.command_args" semantic conventions. It represents the all the + // command arguments (including the command/executable itself) as received by + // the process. On Linux-based systems (and some other Unixoid systems + // supporting procfs), can be set according to the list of null-delimited + // strings extracted from `proc/[pid]/cmdline`. For libc-based executables, this + // would be the full argv vector passed to `main`. SHOULD NOT be collected by + // default unless there is sanitization that excludes sensitive data. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cmd/otecol", "--config=config.yaml" + ProcessCommandArgsKey = attribute.Key("process.command_args") + + // ProcessCommandLineKey is the attribute Key conforming to the + // "process.command_line" semantic conventions. It represents the full command + // used to launch the process as a single string representing the full command. + // On Windows, can be set to the result of `GetCommandLineW`. Do not set this if + // you have to assemble it just for monitoring; use `process.command_args` + // instead. SHOULD NOT be collected by default unless there is sanitization that + // excludes sensitive data. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "C:\cmd\otecol --config="my directory\config.yaml"" + ProcessCommandLineKey = attribute.Key("process.command_line") + + // ProcessContextSwitchTypeKey is the attribute Key conforming to the + // "process.context_switch_type" semantic conventions. It represents the + // specifies whether the context switches for this data point were voluntary or + // involuntary. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + ProcessContextSwitchTypeKey = attribute.Key("process.context_switch_type") + + // ProcessCreationTimeKey is the attribute Key conforming to the + // "process.creation.time" semantic conventions. It represents the date and time + // the process was created, in ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2023-11-21T09:25:34.853Z" + ProcessCreationTimeKey = attribute.Key("process.creation.time") + + // ProcessExecutableBuildIDGNUKey is the attribute Key conforming to the + // "process.executable.build_id.gnu" semantic conventions. It represents the GNU + // build ID as found in the `.note.gnu.build-id` ELF section (hex string). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "c89b11207f6479603b0d49bf291c092c2b719293" + ProcessExecutableBuildIDGNUKey = attribute.Key("process.executable.build_id.gnu") + + // ProcessExecutableBuildIDGoKey is the attribute Key conforming to the + // "process.executable.build_id.go" semantic conventions. It represents the Go + // build ID as retrieved by `go tool buildid `. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "foh3mEXu7BLZjsN9pOwG/kATcXlYVCDEFouRMQed_/WwRFB1hPo9LBkekthSPG/x8hMC8emW2cCjXD0_1aY" + ProcessExecutableBuildIDGoKey = attribute.Key("process.executable.build_id.go") + + // ProcessExecutableBuildIDHtlhashKey is the attribute Key conforming to the + // "process.executable.build_id.htlhash" semantic conventions. It represents the + // profiling specific build ID for executables. See the OTel specification for + // Profiles for more information. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "600DCAFE4A110000F2BF38C493F5FB92" + ProcessExecutableBuildIDHtlhashKey = attribute.Key("process.executable.build_id.htlhash") + + // ProcessExecutableNameKey is the attribute Key conforming to the + // "process.executable.name" semantic conventions. It represents the name of the + // process executable. On Linux based systems, this SHOULD be set to the base + // name of the target of `/proc/[pid]/exe`. On Windows, this SHOULD be set to + // the base name of `GetProcessImageFileNameW`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "otelcol" + ProcessExecutableNameKey = attribute.Key("process.executable.name") + + // ProcessExecutablePathKey is the attribute Key conforming to the + // "process.executable.path" semantic conventions. It represents the full path + // to the process executable. On Linux based systems, can be set to the target + // of `proc/[pid]/exe`. On Windows, can be set to the result of + // `GetProcessImageFileNameW`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/usr/bin/cmd/otelcol" + ProcessExecutablePathKey = attribute.Key("process.executable.path") + + // ProcessExitCodeKey is the attribute Key conforming to the "process.exit.code" + // semantic conventions. It represents the exit code of the process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 127 + ProcessExitCodeKey = attribute.Key("process.exit.code") + + // ProcessExitTimeKey is the attribute Key conforming to the "process.exit.time" + // semantic conventions. It represents the date and time the process exited, in + // ISO 8601 format. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2023-11-21T09:26:12.315Z" + ProcessExitTimeKey = attribute.Key("process.exit.time") + + // ProcessGroupLeaderPIDKey is the attribute Key conforming to the + // "process.group_leader.pid" semantic conventions. It represents the PID of the + // process's group leader. This is also the process group ID (PGID) of the + // process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 23 + ProcessGroupLeaderPIDKey = attribute.Key("process.group_leader.pid") + + // ProcessInteractiveKey is the attribute Key conforming to the + // "process.interactive" semantic conventions. It represents the whether the + // process is connected to an interactive shell. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + ProcessInteractiveKey = attribute.Key("process.interactive") + + // ProcessLinuxCgroupKey is the attribute Key conforming to the + // "process.linux.cgroup" semantic conventions. It represents the control group + // associated with the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1:name=systemd:/user.slice/user-1000.slice/session-3.scope", + // "0::/user.slice/user-1000.slice/user@1000.service/tmux-spawn-0267755b-4639-4a27-90ed-f19f88e53748.scope" + // Note: Control groups (cgroups) are a kernel feature used to organize and + // manage process resources. This attribute provides the path(s) to the + // cgroup(s) associated with the process, which should match the contents of the + // [/proc/[PID]/cgroup] file. + // + // [/proc/[PID]/cgroup]: https://man7.org/linux/man-pages/man7/cgroups.7.html + ProcessLinuxCgroupKey = attribute.Key("process.linux.cgroup") + + // ProcessOwnerKey is the attribute Key conforming to the "process.owner" + // semantic conventions. It represents the username of the user that owns the + // process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "root" + ProcessOwnerKey = attribute.Key("process.owner") + + // ProcessPagingFaultTypeKey is the attribute Key conforming to the + // "process.paging.fault_type" semantic conventions. It represents the type of + // page fault for this data point. Type `major` is for major/hard page faults, + // and `minor` is for minor/soft page faults. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + ProcessPagingFaultTypeKey = attribute.Key("process.paging.fault_type") + + // ProcessParentPIDKey is the attribute Key conforming to the + // "process.parent_pid" semantic conventions. It represents the parent Process + // identifier (PPID). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 111 + ProcessParentPIDKey = attribute.Key("process.parent_pid") + + // ProcessPIDKey is the attribute Key conforming to the "process.pid" semantic + // conventions. It represents the process identifier (PID). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1234 + ProcessPIDKey = attribute.Key("process.pid") + + // ProcessRealUserIDKey is the attribute Key conforming to the + // "process.real_user.id" semantic conventions. It represents the real user ID + // (RUID) of the process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1000 + ProcessRealUserIDKey = attribute.Key("process.real_user.id") + + // ProcessRealUserNameKey is the attribute Key conforming to the + // "process.real_user.name" semantic conventions. It represents the username of + // the real user of the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "operator" + ProcessRealUserNameKey = attribute.Key("process.real_user.name") + + // ProcessRuntimeDescriptionKey is the attribute Key conforming to the + // "process.runtime.description" semantic conventions. It represents an + // additional description about the runtime of the process, for example a + // specific vendor customization of the runtime environment. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: Eclipse OpenJ9 Eclipse OpenJ9 VM openj9-0.21.0 + ProcessRuntimeDescriptionKey = attribute.Key("process.runtime.description") + + // ProcessRuntimeNameKey is the attribute Key conforming to the + // "process.runtime.name" semantic conventions. It represents the name of the + // runtime of this process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "OpenJDK Runtime Environment" + ProcessRuntimeNameKey = attribute.Key("process.runtime.name") + + // ProcessRuntimeVersionKey is the attribute Key conforming to the + // "process.runtime.version" semantic conventions. It represents the version of + // the runtime of this process, as returned by the runtime without modification. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 14.0.2 + ProcessRuntimeVersionKey = attribute.Key("process.runtime.version") + + // ProcessSavedUserIDKey is the attribute Key conforming to the + // "process.saved_user.id" semantic conventions. It represents the saved user ID + // (SUID) of the process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1002 + ProcessSavedUserIDKey = attribute.Key("process.saved_user.id") + + // ProcessSavedUserNameKey is the attribute Key conforming to the + // "process.saved_user.name" semantic conventions. It represents the username of + // the saved user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "operator" + ProcessSavedUserNameKey = attribute.Key("process.saved_user.name") + + // ProcessSessionLeaderPIDKey is the attribute Key conforming to the + // "process.session_leader.pid" semantic conventions. It represents the PID of + // the process's session leader. This is also the session ID (SID) of the + // process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 14 + ProcessSessionLeaderPIDKey = attribute.Key("process.session_leader.pid") + + // ProcessTitleKey is the attribute Key conforming to the "process.title" + // semantic conventions. It represents the process title (proctitle). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cat /etc/hostname", "xfce4-session", "bash" + // Note: In many Unix-like systems, process title (proctitle), is the string + // that represents the name or command line of a running process, displayed by + // system monitoring tools like ps, top, and htop. + ProcessTitleKey = attribute.Key("process.title") + + // ProcessUserIDKey is the attribute Key conforming to the "process.user.id" + // semantic conventions. It represents the effective user ID (EUID) of the + // process. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1001 + ProcessUserIDKey = attribute.Key("process.user.id") + + // ProcessUserNameKey is the attribute Key conforming to the "process.user.name" + // semantic conventions. It represents the username of the effective user of the + // process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "root" + ProcessUserNameKey = attribute.Key("process.user.name") + + // ProcessVpidKey is the attribute Key conforming to the "process.vpid" semantic + // conventions. It represents the virtual process identifier. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 12 + // Note: The process ID within a PID namespace. This is not necessarily unique + // across all processes on the host but it is unique within the process + // namespace that the process exists within. + ProcessVpidKey = attribute.Key("process.vpid") + + // ProcessWorkingDirectoryKey is the attribute Key conforming to the + // "process.working_directory" semantic conventions. It represents the working + // directory of the process. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/root" + ProcessWorkingDirectoryKey = attribute.Key("process.working_directory") +) + +// ProcessArgsCount returns an attribute KeyValue conforming to the +// "process.args_count" semantic conventions. It represents the length of the +// process.command_args array. +func ProcessArgsCount(val int) attribute.KeyValue { + return ProcessArgsCountKey.Int(val) +} + +// ProcessCommand returns an attribute KeyValue conforming to the +// "process.command" semantic conventions. It represents the command used to +// launch the process (i.e. the command name). On Linux based systems, can be set +// to the zeroth string in `proc/[pid]/cmdline`. On Windows, can be set to the +// first parameter extracted from `GetCommandLineW`. +func ProcessCommand(val string) attribute.KeyValue { + return ProcessCommandKey.String(val) +} + +// ProcessCommandArgs returns an attribute KeyValue conforming to the +// "process.command_args" semantic conventions. It represents the all the command +// arguments (including the command/executable itself) as received by the +// process. On Linux-based systems (and some other Unixoid systems supporting +// procfs), can be set according to the list of null-delimited strings extracted +// from `proc/[pid]/cmdline`. For libc-based executables, this would be the full +// argv vector passed to `main`. SHOULD NOT be collected by default unless there +// is sanitization that excludes sensitive data. +func ProcessCommandArgs(val ...string) attribute.KeyValue { + return ProcessCommandArgsKey.StringSlice(val) +} + +// ProcessCommandLine returns an attribute KeyValue conforming to the +// "process.command_line" semantic conventions. It represents the full command +// used to launch the process as a single string representing the full command. +// On Windows, can be set to the result of `GetCommandLineW`. Do not set this if +// you have to assemble it just for monitoring; use `process.command_args` +// instead. SHOULD NOT be collected by default unless there is sanitization that +// excludes sensitive data. +func ProcessCommandLine(val string) attribute.KeyValue { + return ProcessCommandLineKey.String(val) +} + +// ProcessCreationTime returns an attribute KeyValue conforming to the +// "process.creation.time" semantic conventions. It represents the date and time +// the process was created, in ISO 8601 format. +func ProcessCreationTime(val string) attribute.KeyValue { + return ProcessCreationTimeKey.String(val) +} + +// ProcessExecutableBuildIDGNU returns an attribute KeyValue conforming to the +// "process.executable.build_id.gnu" semantic conventions. It represents the GNU +// build ID as found in the `.note.gnu.build-id` ELF section (hex string). +func ProcessExecutableBuildIDGNU(val string) attribute.KeyValue { + return ProcessExecutableBuildIDGNUKey.String(val) +} + +// ProcessExecutableBuildIDGo returns an attribute KeyValue conforming to the +// "process.executable.build_id.go" semantic conventions. It represents the Go +// build ID as retrieved by `go tool buildid `. +func ProcessExecutableBuildIDGo(val string) attribute.KeyValue { + return ProcessExecutableBuildIDGoKey.String(val) +} + +// ProcessExecutableBuildIDHtlhash returns an attribute KeyValue conforming to +// the "process.executable.build_id.htlhash" semantic conventions. It represents +// the profiling specific build ID for executables. See the OTel specification +// for Profiles for more information. +func ProcessExecutableBuildIDHtlhash(val string) attribute.KeyValue { + return ProcessExecutableBuildIDHtlhashKey.String(val) +} + +// ProcessExecutableName returns an attribute KeyValue conforming to the +// "process.executable.name" semantic conventions. It represents the name of the +// process executable. On Linux based systems, this SHOULD be set to the base +// name of the target of `/proc/[pid]/exe`. On Windows, this SHOULD be set to the +// base name of `GetProcessImageFileNameW`. +func ProcessExecutableName(val string) attribute.KeyValue { + return ProcessExecutableNameKey.String(val) +} + +// ProcessExecutablePath returns an attribute KeyValue conforming to the +// "process.executable.path" semantic conventions. It represents the full path to +// the process executable. On Linux based systems, can be set to the target of +// `proc/[pid]/exe`. On Windows, can be set to the result of +// `GetProcessImageFileNameW`. +func ProcessExecutablePath(val string) attribute.KeyValue { + return ProcessExecutablePathKey.String(val) +} + +// ProcessExitCode returns an attribute KeyValue conforming to the +// "process.exit.code" semantic conventions. It represents the exit code of the +// process. +func ProcessExitCode(val int) attribute.KeyValue { + return ProcessExitCodeKey.Int(val) +} + +// ProcessExitTime returns an attribute KeyValue conforming to the +// "process.exit.time" semantic conventions. It represents the date and time the +// process exited, in ISO 8601 format. +func ProcessExitTime(val string) attribute.KeyValue { + return ProcessExitTimeKey.String(val) +} + +// ProcessGroupLeaderPID returns an attribute KeyValue conforming to the +// "process.group_leader.pid" semantic conventions. It represents the PID of the +// process's group leader. This is also the process group ID (PGID) of the +// process. +func ProcessGroupLeaderPID(val int) attribute.KeyValue { + return ProcessGroupLeaderPIDKey.Int(val) +} + +// ProcessInteractive returns an attribute KeyValue conforming to the +// "process.interactive" semantic conventions. It represents the whether the +// process is connected to an interactive shell. +func ProcessInteractive(val bool) attribute.KeyValue { + return ProcessInteractiveKey.Bool(val) +} + +// ProcessLinuxCgroup returns an attribute KeyValue conforming to the +// "process.linux.cgroup" semantic conventions. It represents the control group +// associated with the process. +func ProcessLinuxCgroup(val string) attribute.KeyValue { + return ProcessLinuxCgroupKey.String(val) +} + +// ProcessOwner returns an attribute KeyValue conforming to the "process.owner" +// semantic conventions. It represents the username of the user that owns the +// process. +func ProcessOwner(val string) attribute.KeyValue { + return ProcessOwnerKey.String(val) +} + +// ProcessParentPID returns an attribute KeyValue conforming to the +// "process.parent_pid" semantic conventions. It represents the parent Process +// identifier (PPID). +func ProcessParentPID(val int) attribute.KeyValue { + return ProcessParentPIDKey.Int(val) +} + +// ProcessPID returns an attribute KeyValue conforming to the "process.pid" +// semantic conventions. It represents the process identifier (PID). +func ProcessPID(val int) attribute.KeyValue { + return ProcessPIDKey.Int(val) +} + +// ProcessRealUserID returns an attribute KeyValue conforming to the +// "process.real_user.id" semantic conventions. It represents the real user ID +// (RUID) of the process. +func ProcessRealUserID(val int) attribute.KeyValue { + return ProcessRealUserIDKey.Int(val) +} + +// ProcessRealUserName returns an attribute KeyValue conforming to the +// "process.real_user.name" semantic conventions. It represents the username of +// the real user of the process. +func ProcessRealUserName(val string) attribute.KeyValue { + return ProcessRealUserNameKey.String(val) +} + +// ProcessRuntimeDescription returns an attribute KeyValue conforming to the +// "process.runtime.description" semantic conventions. It represents an +// additional description about the runtime of the process, for example a +// specific vendor customization of the runtime environment. +func ProcessRuntimeDescription(val string) attribute.KeyValue { + return ProcessRuntimeDescriptionKey.String(val) +} + +// ProcessRuntimeName returns an attribute KeyValue conforming to the +// "process.runtime.name" semantic conventions. It represents the name of the +// runtime of this process. +func ProcessRuntimeName(val string) attribute.KeyValue { + return ProcessRuntimeNameKey.String(val) +} + +// ProcessRuntimeVersion returns an attribute KeyValue conforming to the +// "process.runtime.version" semantic conventions. It represents the version of +// the runtime of this process, as returned by the runtime without modification. +func ProcessRuntimeVersion(val string) attribute.KeyValue { + return ProcessRuntimeVersionKey.String(val) +} + +// ProcessSavedUserID returns an attribute KeyValue conforming to the +// "process.saved_user.id" semantic conventions. It represents the saved user ID +// (SUID) of the process. +func ProcessSavedUserID(val int) attribute.KeyValue { + return ProcessSavedUserIDKey.Int(val) +} + +// ProcessSavedUserName returns an attribute KeyValue conforming to the +// "process.saved_user.name" semantic conventions. It represents the username of +// the saved user. +func ProcessSavedUserName(val string) attribute.KeyValue { + return ProcessSavedUserNameKey.String(val) +} + +// ProcessSessionLeaderPID returns an attribute KeyValue conforming to the +// "process.session_leader.pid" semantic conventions. It represents the PID of +// the process's session leader. This is also the session ID (SID) of the +// process. +func ProcessSessionLeaderPID(val int) attribute.KeyValue { + return ProcessSessionLeaderPIDKey.Int(val) +} + +// ProcessTitle returns an attribute KeyValue conforming to the "process.title" +// semantic conventions. It represents the process title (proctitle). +func ProcessTitle(val string) attribute.KeyValue { + return ProcessTitleKey.String(val) +} + +// ProcessUserID returns an attribute KeyValue conforming to the +// "process.user.id" semantic conventions. It represents the effective user ID +// (EUID) of the process. +func ProcessUserID(val int) attribute.KeyValue { + return ProcessUserIDKey.Int(val) +} + +// ProcessUserName returns an attribute KeyValue conforming to the +// "process.user.name" semantic conventions. It represents the username of the +// effective user of the process. +func ProcessUserName(val string) attribute.KeyValue { + return ProcessUserNameKey.String(val) +} + +// ProcessVpid returns an attribute KeyValue conforming to the "process.vpid" +// semantic conventions. It represents the virtual process identifier. +func ProcessVpid(val int) attribute.KeyValue { + return ProcessVpidKey.Int(val) +} + +// ProcessWorkingDirectory returns an attribute KeyValue conforming to the +// "process.working_directory" semantic conventions. It represents the working +// directory of the process. +func ProcessWorkingDirectory(val string) attribute.KeyValue { + return ProcessWorkingDirectoryKey.String(val) +} + +// Enum values for process.context_switch_type +var ( + // voluntary + // Stability: development + ProcessContextSwitchTypeVoluntary = ProcessContextSwitchTypeKey.String("voluntary") + // involuntary + // Stability: development + ProcessContextSwitchTypeInvoluntary = ProcessContextSwitchTypeKey.String("involuntary") +) + +// Enum values for process.paging.fault_type +var ( + // major + // Stability: development + ProcessPagingFaultTypeMajor = ProcessPagingFaultTypeKey.String("major") + // minor + // Stability: development + ProcessPagingFaultTypeMinor = ProcessPagingFaultTypeKey.String("minor") +) + +// Namespace: profile +const ( + // ProfileFrameTypeKey is the attribute Key conforming to the + // "profile.frame.type" semantic conventions. It represents the describes the + // interpreter or compiler of a single frame. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "cpython" + ProfileFrameTypeKey = attribute.Key("profile.frame.type") +) + +// Enum values for profile.frame.type +var ( + // [.NET] + // + // Stability: development + // + // [.NET]: https://wikipedia.org/wiki/.NET + ProfileFrameTypeDotnet = ProfileFrameTypeKey.String("dotnet") + // [JVM] + // + // Stability: development + // + // [JVM]: https://wikipedia.org/wiki/Java_virtual_machine + ProfileFrameTypeJVM = ProfileFrameTypeKey.String("jvm") + // [Kernel] + // + // Stability: development + // + // [Kernel]: https://wikipedia.org/wiki/Kernel_(operating_system) + ProfileFrameTypeKernel = ProfileFrameTypeKey.String("kernel") + // Can be one of but not limited to [C], [C++], [Go] or [Rust]. If possible, a + // more precise value MUST be used. + // + // Stability: development + // + // [C]: https://wikipedia.org/wiki/C_(programming_language) + // [C++]: https://wikipedia.org/wiki/C%2B%2B + // [Go]: https://wikipedia.org/wiki/Go_(programming_language) + // [Rust]: https://wikipedia.org/wiki/Rust_(programming_language) + ProfileFrameTypeNative = ProfileFrameTypeKey.String("native") + // [Perl] + // + // Stability: development + // + // [Perl]: https://wikipedia.org/wiki/Perl + ProfileFrameTypePerl = ProfileFrameTypeKey.String("perl") + // [PHP] + // + // Stability: development + // + // [PHP]: https://wikipedia.org/wiki/PHP + ProfileFrameTypePHP = ProfileFrameTypeKey.String("php") + // [Python] + // + // Stability: development + // + // [Python]: https://wikipedia.org/wiki/Python_(programming_language) + ProfileFrameTypeCpython = ProfileFrameTypeKey.String("cpython") + // [Ruby] + // + // Stability: development + // + // [Ruby]: https://wikipedia.org/wiki/Ruby_(programming_language) + ProfileFrameTypeRuby = ProfileFrameTypeKey.String("ruby") + // [V8JS] + // + // Stability: development + // + // [V8JS]: https://wikipedia.org/wiki/V8_(JavaScript_engine) + ProfileFrameTypeV8JS = ProfileFrameTypeKey.String("v8js") + // [Erlang] + // + // Stability: development + // + // [Erlang]: https://en.wikipedia.org/wiki/BEAM_(Erlang_virtual_machine) + ProfileFrameTypeBeam = ProfileFrameTypeKey.String("beam") + // [Go], + // + // Stability: development + // + // [Go]: https://wikipedia.org/wiki/Go_(programming_language) + ProfileFrameTypeGo = ProfileFrameTypeKey.String("go") + // [Rust] + // + // Stability: development + // + // [Rust]: https://wikipedia.org/wiki/Rust_(programming_language) + ProfileFrameTypeRust = ProfileFrameTypeKey.String("rust") +) + +// Namespace: rpc +const ( + // RPCConnectRPCErrorCodeKey is the attribute Key conforming to the + // "rpc.connect_rpc.error_code" semantic conventions. It represents the + // [error codes] of the Connect request. Error codes are always string values. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [error codes]: https://connectrpc.com//docs/protocol/#error-codes + RPCConnectRPCErrorCodeKey = attribute.Key("rpc.connect_rpc.error_code") + + // RPCGRPCStatusCodeKey is the attribute Key conforming to the + // "rpc.grpc.status_code" semantic conventions. It represents the + // [numeric status code] of the gRPC request. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [numeric status code]: https://github.com/grpc/grpc/blob/v1.33.2/doc/statuscodes.md + RPCGRPCStatusCodeKey = attribute.Key("rpc.grpc.status_code") + + // RPCJSONRPCErrorCodeKey is the attribute Key conforming to the + // "rpc.jsonrpc.error_code" semantic conventions. It represents the `error.code` + // property of response if it is an error response. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: -32700, 100 + RPCJSONRPCErrorCodeKey = attribute.Key("rpc.jsonrpc.error_code") + + // RPCJSONRPCErrorMessageKey is the attribute Key conforming to the + // "rpc.jsonrpc.error_message" semantic conventions. It represents the + // `error.message` property of response if it is an error response. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Parse error", "User already exists" + RPCJSONRPCErrorMessageKey = attribute.Key("rpc.jsonrpc.error_message") + + // RPCJSONRPCRequestIDKey is the attribute Key conforming to the + // "rpc.jsonrpc.request_id" semantic conventions. It represents the `id` + // property of request or response. Since protocol allows id to be int, string, + // `null` or missing (for notifications), value is expected to be cast to string + // for simplicity. Use empty string in case of `null` value. Omit entirely if + // this is a notification. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "10", "request-7", "" + RPCJSONRPCRequestIDKey = attribute.Key("rpc.jsonrpc.request_id") + + // RPCJSONRPCVersionKey is the attribute Key conforming to the + // "rpc.jsonrpc.version" semantic conventions. It represents the protocol + // version as in `jsonrpc` property of request/response. Since JSON-RPC 1.0 + // doesn't specify this, the value can be omitted. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2.0", "1.0" + RPCJSONRPCVersionKey = attribute.Key("rpc.jsonrpc.version") + + // RPCMessageCompressedSizeKey is the attribute Key conforming to the + // "rpc.message.compressed_size" semantic conventions. It represents the + // compressed size of the message in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCMessageCompressedSizeKey = attribute.Key("rpc.message.compressed_size") + + // RPCMessageIDKey is the attribute Key conforming to the "rpc.message.id" + // semantic conventions. It MUST be calculated as two different counters + // starting from `1` one for sent messages and one for received message.. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This way we guarantee that the values will be consistent between + // different implementations. + RPCMessageIDKey = attribute.Key("rpc.message.id") + + // RPCMessageTypeKey is the attribute Key conforming to the "rpc.message.type" + // semantic conventions. It represents the whether this is a received or sent + // message. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCMessageTypeKey = attribute.Key("rpc.message.type") + + // RPCMessageUncompressedSizeKey is the attribute Key conforming to the + // "rpc.message.uncompressed_size" semantic conventions. It represents the + // uncompressed size of the message in bytes. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCMessageUncompressedSizeKey = attribute.Key("rpc.message.uncompressed_size") + + // RPCMethodKey is the attribute Key conforming to the "rpc.method" semantic + // conventions. It represents the name of the (logical) method being called, + // must be equal to the $method part in the span name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: exampleMethod + // Note: This is the logical name of the method from the RPC interface + // perspective, which can be different from the name of any implementing + // method/function. The `code.function.name` attribute may be used to store the + // latter (e.g., method actually executing the call on the server side, RPC + // client stub method on the client side). + RPCMethodKey = attribute.Key("rpc.method") + + // RPCServiceKey is the attribute Key conforming to the "rpc.service" semantic + // conventions. It represents the full (logical) name of the service being + // called, including its package name, if applicable. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: myservice.EchoService + // Note: This is the logical name of the service from the RPC interface + // perspective, which can be different from the name of any implementing class. + // The `code.namespace` attribute may be used to store the latter (despite the + // attribute name, it may include a class name; e.g., class with method actually + // executing the call on the server side, RPC client stub class on the client + // side). + RPCServiceKey = attribute.Key("rpc.service") + + // RPCSystemKey is the attribute Key conforming to the "rpc.system" semantic + // conventions. It represents a string identifying the remoting system. See + // below for a list of well-known identifiers. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + RPCSystemKey = attribute.Key("rpc.system") +) + +// RPCJSONRPCErrorCode returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.error_code" semantic conventions. It represents the `error.code` +// property of response if it is an error response. +func RPCJSONRPCErrorCode(val int) attribute.KeyValue { + return RPCJSONRPCErrorCodeKey.Int(val) +} + +// RPCJSONRPCErrorMessage returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.error_message" semantic conventions. It represents the +// `error.message` property of response if it is an error response. +func RPCJSONRPCErrorMessage(val string) attribute.KeyValue { + return RPCJSONRPCErrorMessageKey.String(val) +} + +// RPCJSONRPCRequestID returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.request_id" semantic conventions. It represents the `id` property +// of request or response. Since protocol allows id to be int, string, `null` or +// missing (for notifications), value is expected to be cast to string for +// simplicity. Use empty string in case of `null` value. Omit entirely if this is +// a notification. +func RPCJSONRPCRequestID(val string) attribute.KeyValue { + return RPCJSONRPCRequestIDKey.String(val) +} + +// RPCJSONRPCVersion returns an attribute KeyValue conforming to the +// "rpc.jsonrpc.version" semantic conventions. It represents the protocol version +// as in `jsonrpc` property of request/response. Since JSON-RPC 1.0 doesn't +// specify this, the value can be omitted. +func RPCJSONRPCVersion(val string) attribute.KeyValue { + return RPCJSONRPCVersionKey.String(val) +} + +// RPCMessageCompressedSize returns an attribute KeyValue conforming to the +// "rpc.message.compressed_size" semantic conventions. It represents the +// compressed size of the message in bytes. +func RPCMessageCompressedSize(val int) attribute.KeyValue { + return RPCMessageCompressedSizeKey.Int(val) +} + +// RPCMessageID returns an attribute KeyValue conforming to the "rpc.message.id" +// semantic conventions. It MUST be calculated as two different counters starting +// from `1` one for sent messages and one for received message.. +func RPCMessageID(val int) attribute.KeyValue { + return RPCMessageIDKey.Int(val) +} + +// RPCMessageUncompressedSize returns an attribute KeyValue conforming to the +// "rpc.message.uncompressed_size" semantic conventions. It represents the +// uncompressed size of the message in bytes. +func RPCMessageUncompressedSize(val int) attribute.KeyValue { + return RPCMessageUncompressedSizeKey.Int(val) +} + +// RPCMethod returns an attribute KeyValue conforming to the "rpc.method" +// semantic conventions. It represents the name of the (logical) method being +// called, must be equal to the $method part in the span name. +func RPCMethod(val string) attribute.KeyValue { + return RPCMethodKey.String(val) +} + +// RPCService returns an attribute KeyValue conforming to the "rpc.service" +// semantic conventions. It represents the full (logical) name of the service +// being called, including its package name, if applicable. +func RPCService(val string) attribute.KeyValue { + return RPCServiceKey.String(val) +} + +// Enum values for rpc.connect_rpc.error_code +var ( + // cancelled + // Stability: development + RPCConnectRPCErrorCodeCancelled = RPCConnectRPCErrorCodeKey.String("cancelled") + // unknown + // Stability: development + RPCConnectRPCErrorCodeUnknown = RPCConnectRPCErrorCodeKey.String("unknown") + // invalid_argument + // Stability: development + RPCConnectRPCErrorCodeInvalidArgument = RPCConnectRPCErrorCodeKey.String("invalid_argument") + // deadline_exceeded + // Stability: development + RPCConnectRPCErrorCodeDeadlineExceeded = RPCConnectRPCErrorCodeKey.String("deadline_exceeded") + // not_found + // Stability: development + RPCConnectRPCErrorCodeNotFound = RPCConnectRPCErrorCodeKey.String("not_found") + // already_exists + // Stability: development + RPCConnectRPCErrorCodeAlreadyExists = RPCConnectRPCErrorCodeKey.String("already_exists") + // permission_denied + // Stability: development + RPCConnectRPCErrorCodePermissionDenied = RPCConnectRPCErrorCodeKey.String("permission_denied") + // resource_exhausted + // Stability: development + RPCConnectRPCErrorCodeResourceExhausted = RPCConnectRPCErrorCodeKey.String("resource_exhausted") + // failed_precondition + // Stability: development + RPCConnectRPCErrorCodeFailedPrecondition = RPCConnectRPCErrorCodeKey.String("failed_precondition") + // aborted + // Stability: development + RPCConnectRPCErrorCodeAborted = RPCConnectRPCErrorCodeKey.String("aborted") + // out_of_range + // Stability: development + RPCConnectRPCErrorCodeOutOfRange = RPCConnectRPCErrorCodeKey.String("out_of_range") + // unimplemented + // Stability: development + RPCConnectRPCErrorCodeUnimplemented = RPCConnectRPCErrorCodeKey.String("unimplemented") + // internal + // Stability: development + RPCConnectRPCErrorCodeInternal = RPCConnectRPCErrorCodeKey.String("internal") + // unavailable + // Stability: development + RPCConnectRPCErrorCodeUnavailable = RPCConnectRPCErrorCodeKey.String("unavailable") + // data_loss + // Stability: development + RPCConnectRPCErrorCodeDataLoss = RPCConnectRPCErrorCodeKey.String("data_loss") + // unauthenticated + // Stability: development + RPCConnectRPCErrorCodeUnauthenticated = RPCConnectRPCErrorCodeKey.String("unauthenticated") +) + +// Enum values for rpc.grpc.status_code +var ( + // OK + // Stability: development + RPCGRPCStatusCodeOk = RPCGRPCStatusCodeKey.Int(0) + // CANCELLED + // Stability: development + RPCGRPCStatusCodeCancelled = RPCGRPCStatusCodeKey.Int(1) + // UNKNOWN + // Stability: development + RPCGRPCStatusCodeUnknown = RPCGRPCStatusCodeKey.Int(2) + // INVALID_ARGUMENT + // Stability: development + RPCGRPCStatusCodeInvalidArgument = RPCGRPCStatusCodeKey.Int(3) + // DEADLINE_EXCEEDED + // Stability: development + RPCGRPCStatusCodeDeadlineExceeded = RPCGRPCStatusCodeKey.Int(4) + // NOT_FOUND + // Stability: development + RPCGRPCStatusCodeNotFound = RPCGRPCStatusCodeKey.Int(5) + // ALREADY_EXISTS + // Stability: development + RPCGRPCStatusCodeAlreadyExists = RPCGRPCStatusCodeKey.Int(6) + // PERMISSION_DENIED + // Stability: development + RPCGRPCStatusCodePermissionDenied = RPCGRPCStatusCodeKey.Int(7) + // RESOURCE_EXHAUSTED + // Stability: development + RPCGRPCStatusCodeResourceExhausted = RPCGRPCStatusCodeKey.Int(8) + // FAILED_PRECONDITION + // Stability: development + RPCGRPCStatusCodeFailedPrecondition = RPCGRPCStatusCodeKey.Int(9) + // ABORTED + // Stability: development + RPCGRPCStatusCodeAborted = RPCGRPCStatusCodeKey.Int(10) + // OUT_OF_RANGE + // Stability: development + RPCGRPCStatusCodeOutOfRange = RPCGRPCStatusCodeKey.Int(11) + // UNIMPLEMENTED + // Stability: development + RPCGRPCStatusCodeUnimplemented = RPCGRPCStatusCodeKey.Int(12) + // INTERNAL + // Stability: development + RPCGRPCStatusCodeInternal = RPCGRPCStatusCodeKey.Int(13) + // UNAVAILABLE + // Stability: development + RPCGRPCStatusCodeUnavailable = RPCGRPCStatusCodeKey.Int(14) + // DATA_LOSS + // Stability: development + RPCGRPCStatusCodeDataLoss = RPCGRPCStatusCodeKey.Int(15) + // UNAUTHENTICATED + // Stability: development + RPCGRPCStatusCodeUnauthenticated = RPCGRPCStatusCodeKey.Int(16) +) + +// Enum values for rpc.message.type +var ( + // sent + // Stability: development + RPCMessageTypeSent = RPCMessageTypeKey.String("SENT") + // received + // Stability: development + RPCMessageTypeReceived = RPCMessageTypeKey.String("RECEIVED") +) + +// Enum values for rpc.system +var ( + // gRPC + // Stability: development + RPCSystemGRPC = RPCSystemKey.String("grpc") + // Java RMI + // Stability: development + RPCSystemJavaRmi = RPCSystemKey.String("java_rmi") + // .NET WCF + // Stability: development + RPCSystemDotnetWcf = RPCSystemKey.String("dotnet_wcf") + // Apache Dubbo + // Stability: development + RPCSystemApacheDubbo = RPCSystemKey.String("apache_dubbo") + // Connect RPC + // Stability: development + RPCSystemConnectRPC = RPCSystemKey.String("connect_rpc") +) + +// Namespace: security_rule +const ( + // SecurityRuleCategoryKey is the attribute Key conforming to the + // "security_rule.category" semantic conventions. It represents a categorization + // value keyword used by the entity using the rule for detection of this event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Attempted Information Leak" + SecurityRuleCategoryKey = attribute.Key("security_rule.category") + + // SecurityRuleDescriptionKey is the attribute Key conforming to the + // "security_rule.description" semantic conventions. It represents the + // description of the rule generating the event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Block requests to public DNS over HTTPS / TLS protocols" + SecurityRuleDescriptionKey = attribute.Key("security_rule.description") + + // SecurityRuleLicenseKey is the attribute Key conforming to the + // "security_rule.license" semantic conventions. It represents the name of the + // license under which the rule used to generate this event is made available. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Apache 2.0" + SecurityRuleLicenseKey = attribute.Key("security_rule.license") + + // SecurityRuleNameKey is the attribute Key conforming to the + // "security_rule.name" semantic conventions. It represents the name of the rule + // or signature generating the event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "BLOCK_DNS_over_TLS" + SecurityRuleNameKey = attribute.Key("security_rule.name") + + // SecurityRuleReferenceKey is the attribute Key conforming to the + // "security_rule.reference" semantic conventions. It represents the reference + // URL to additional information about the rule used to generate this event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://en.wikipedia.org/wiki/DNS_over_TLS" + // Note: The URL can point to the vendor’s documentation about the rule. If + // that’s not available, it can also be a link to a more general page + // describing this type of alert. + SecurityRuleReferenceKey = attribute.Key("security_rule.reference") + + // SecurityRuleRulesetNameKey is the attribute Key conforming to the + // "security_rule.ruleset.name" semantic conventions. It represents the name of + // the ruleset, policy, group, or parent category in which the rule used to + // generate this event is a member. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Standard_Protocol_Filters" + SecurityRuleRulesetNameKey = attribute.Key("security_rule.ruleset.name") + + // SecurityRuleUUIDKey is the attribute Key conforming to the + // "security_rule.uuid" semantic conventions. It represents a rule ID that is + // unique within the scope of a set or group of agents, observers, or other + // entities using the rule for detection of this event. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "550e8400-e29b-41d4-a716-446655440000", "1100110011" + SecurityRuleUUIDKey = attribute.Key("security_rule.uuid") + + // SecurityRuleVersionKey is the attribute Key conforming to the + // "security_rule.version" semantic conventions. It represents the version / + // revision of the rule being used for analysis. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1.0.0" + SecurityRuleVersionKey = attribute.Key("security_rule.version") +) + +// SecurityRuleCategory returns an attribute KeyValue conforming to the +// "security_rule.category" semantic conventions. It represents a categorization +// value keyword used by the entity using the rule for detection of this event. +func SecurityRuleCategory(val string) attribute.KeyValue { + return SecurityRuleCategoryKey.String(val) +} + +// SecurityRuleDescription returns an attribute KeyValue conforming to the +// "security_rule.description" semantic conventions. It represents the +// description of the rule generating the event. +func SecurityRuleDescription(val string) attribute.KeyValue { + return SecurityRuleDescriptionKey.String(val) +} + +// SecurityRuleLicense returns an attribute KeyValue conforming to the +// "security_rule.license" semantic conventions. It represents the name of the +// license under which the rule used to generate this event is made available. +func SecurityRuleLicense(val string) attribute.KeyValue { + return SecurityRuleLicenseKey.String(val) +} + +// SecurityRuleName returns an attribute KeyValue conforming to the +// "security_rule.name" semantic conventions. It represents the name of the rule +// or signature generating the event. +func SecurityRuleName(val string) attribute.KeyValue { + return SecurityRuleNameKey.String(val) +} + +// SecurityRuleReference returns an attribute KeyValue conforming to the +// "security_rule.reference" semantic conventions. It represents the reference +// URL to additional information about the rule used to generate this event. +func SecurityRuleReference(val string) attribute.KeyValue { + return SecurityRuleReferenceKey.String(val) +} + +// SecurityRuleRulesetName returns an attribute KeyValue conforming to the +// "security_rule.ruleset.name" semantic conventions. It represents the name of +// the ruleset, policy, group, or parent category in which the rule used to +// generate this event is a member. +func SecurityRuleRulesetName(val string) attribute.KeyValue { + return SecurityRuleRulesetNameKey.String(val) +} + +// SecurityRuleUUID returns an attribute KeyValue conforming to the +// "security_rule.uuid" semantic conventions. It represents a rule ID that is +// unique within the scope of a set or group of agents, observers, or other +// entities using the rule for detection of this event. +func SecurityRuleUUID(val string) attribute.KeyValue { + return SecurityRuleUUIDKey.String(val) +} + +// SecurityRuleVersion returns an attribute KeyValue conforming to the +// "security_rule.version" semantic conventions. It represents the version / +// revision of the rule being used for analysis. +func SecurityRuleVersion(val string) attribute.KeyValue { + return SecurityRuleVersionKey.String(val) +} + +// Namespace: server +const ( + // ServerAddressKey is the attribute Key conforming to the "server.address" + // semantic conventions. It represents the server domain name if available + // without reverse DNS lookup; otherwise, IP address or Unix domain socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the client side, and when communicating through an + // intermediary, `server.address` SHOULD represent the server address behind any + // intermediaries, for example proxies, if it's available. + ServerAddressKey = attribute.Key("server.address") + + // ServerPortKey is the attribute Key conforming to the "server.port" semantic + // conventions. It represents the server port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: 80, 8080, 443 + // Note: When observed from the client side, and when communicating through an + // intermediary, `server.port` SHOULD represent the server port behind any + // intermediaries, for example proxies, if it's available. + ServerPortKey = attribute.Key("server.port") +) + +// ServerAddress returns an attribute KeyValue conforming to the "server.address" +// semantic conventions. It represents the server domain name if available +// without reverse DNS lookup; otherwise, IP address or Unix domain socket name. +func ServerAddress(val string) attribute.KeyValue { + return ServerAddressKey.String(val) +} + +// ServerPort returns an attribute KeyValue conforming to the "server.port" +// semantic conventions. It represents the server port number. +func ServerPort(val int) attribute.KeyValue { + return ServerPortKey.Int(val) +} + +// Namespace: service +const ( + // ServiceInstanceIDKey is the attribute Key conforming to the + // "service.instance.id" semantic conventions. It represents the string ID of + // the service instance. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "627cc493-f310-47de-96bd-71410b7dec09" + // Note: MUST be unique for each instance of the same + // `service.namespace,service.name` pair (in other words + // `service.namespace,service.name,service.instance.id` triplet MUST be globally + // unique). The ID helps to + // distinguish instances of the same service that exist at the same time (e.g. + // instances of a horizontally scaled + // service). + // + // Implementations, such as SDKs, are recommended to generate a random Version 1 + // or Version 4 [RFC + // 4122] UUID, but are free to use an inherent unique ID as + // the source of + // this value if stability is desirable. In that case, the ID SHOULD be used as + // source of a UUID Version 5 and + // SHOULD use the following UUID as the namespace: + // `4d63009a-8d0f-11ee-aad7-4c796ed8e320`. + // + // UUIDs are typically recommended, as only an opaque value for the purposes of + // identifying a service instance is + // needed. Similar to what can be seen in the man page for the + // [`/etc/machine-id`] file, the underlying + // data, such as pod name and namespace should be treated as confidential, being + // the user's choice to expose it + // or not via another resource attribute. + // + // For applications running behind an application server (like unicorn), we do + // not recommend using one identifier + // for all processes participating in the application. Instead, it's recommended + // each division (e.g. a worker + // thread in unicorn) to have its own instance.id. + // + // It's not recommended for a Collector to set `service.instance.id` if it can't + // unambiguously determine the + // service instance that is generating that telemetry. For instance, creating an + // UUID based on `pod.name` will + // likely be wrong, as the Collector might not know from which container within + // that pod the telemetry originated. + // However, Collectors can set the `service.instance.id` if they can + // unambiguously determine the service instance + // for that telemetry. This is typically the case for scraping receivers, as + // they know the target address and + // port. + // + // [RFC + // 4122]: https://www.ietf.org/rfc/rfc4122.txt + // [`/etc/machine-id`]: https://www.freedesktop.org/software/systemd/man/latest/machine-id.html + ServiceInstanceIDKey = attribute.Key("service.instance.id") + + // ServiceNameKey is the attribute Key conforming to the "service.name" semantic + // conventions. It represents the logical name of the service. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "shoppingcart" + // Note: MUST be the same for all instances of horizontally scaled services. If + // the value was not specified, SDKs MUST fallback to `unknown_service:` + // concatenated with [`process.executable.name`], e.g. `unknown_service:bash`. + // If `process.executable.name` is not available, the value MUST be set to + // `unknown_service`. + // + // [`process.executable.name`]: process.md + ServiceNameKey = attribute.Key("service.name") + + // ServiceNamespaceKey is the attribute Key conforming to the + // "service.namespace" semantic conventions. It represents a namespace for + // `service.name`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Shop" + // Note: A string value having a meaning that helps to distinguish a group of + // services, for example the team name that owns a group of services. + // `service.name` is expected to be unique within the same namespace. If + // `service.namespace` is not specified in the Resource then `service.name` is + // expected to be unique for all services that have no explicit namespace + // defined (so the empty/unspecified namespace is simply one more valid + // namespace). Zero-length namespace string is assumed equal to unspecified + // namespace. + ServiceNamespaceKey = attribute.Key("service.namespace") + + // ServiceVersionKey is the attribute Key conforming to the "service.version" + // semantic conventions. It represents the version string of the service API or + // implementation. The format is not defined by these conventions. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "2.0.0", "a01dbef8a" + ServiceVersionKey = attribute.Key("service.version") +) + +// ServiceInstanceID returns an attribute KeyValue conforming to the +// "service.instance.id" semantic conventions. It represents the string ID of the +// service instance. +func ServiceInstanceID(val string) attribute.KeyValue { + return ServiceInstanceIDKey.String(val) +} + +// ServiceName returns an attribute KeyValue conforming to the "service.name" +// semantic conventions. It represents the logical name of the service. +func ServiceName(val string) attribute.KeyValue { + return ServiceNameKey.String(val) +} + +// ServiceNamespace returns an attribute KeyValue conforming to the +// "service.namespace" semantic conventions. It represents a namespace for +// `service.name`. +func ServiceNamespace(val string) attribute.KeyValue { + return ServiceNamespaceKey.String(val) +} + +// ServiceVersion returns an attribute KeyValue conforming to the +// "service.version" semantic conventions. It represents the version string of +// the service API or implementation. The format is not defined by these +// conventions. +func ServiceVersion(val string) attribute.KeyValue { + return ServiceVersionKey.String(val) +} + +// Namespace: session +const ( + // SessionIDKey is the attribute Key conforming to the "session.id" semantic + // conventions. It represents a unique id to identify a session. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 00112233-4455-6677-8899-aabbccddeeff + SessionIDKey = attribute.Key("session.id") + + // SessionPreviousIDKey is the attribute Key conforming to the + // "session.previous_id" semantic conventions. It represents the previous + // `session.id` for this user, when known. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 00112233-4455-6677-8899-aabbccddeeff + SessionPreviousIDKey = attribute.Key("session.previous_id") +) + +// SessionID returns an attribute KeyValue conforming to the "session.id" +// semantic conventions. It represents a unique id to identify a session. +func SessionID(val string) attribute.KeyValue { + return SessionIDKey.String(val) +} + +// SessionPreviousID returns an attribute KeyValue conforming to the +// "session.previous_id" semantic conventions. It represents the previous +// `session.id` for this user, when known. +func SessionPreviousID(val string) attribute.KeyValue { + return SessionPreviousIDKey.String(val) +} + +// Namespace: signalr +const ( + // SignalRConnectionStatusKey is the attribute Key conforming to the + // "signalr.connection.status" semantic conventions. It represents the signalR + // HTTP connection closure status. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "app_shutdown", "timeout" + SignalRConnectionStatusKey = attribute.Key("signalr.connection.status") + + // SignalRTransportKey is the attribute Key conforming to the + // "signalr.transport" semantic conventions. It represents the + // [SignalR transport type]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "web_sockets", "long_polling" + // + // [SignalR transport type]: https://github.com/dotnet/aspnetcore/blob/main/src/SignalR/docs/specs/TransportProtocols.md + SignalRTransportKey = attribute.Key("signalr.transport") +) + +// Enum values for signalr.connection.status +var ( + // The connection was closed normally. + // Stability: stable + SignalRConnectionStatusNormalClosure = SignalRConnectionStatusKey.String("normal_closure") + // The connection was closed due to a timeout. + // Stability: stable + SignalRConnectionStatusTimeout = SignalRConnectionStatusKey.String("timeout") + // The connection was closed because the app is shutting down. + // Stability: stable + SignalRConnectionStatusAppShutdown = SignalRConnectionStatusKey.String("app_shutdown") +) + +// Enum values for signalr.transport +var ( + // ServerSentEvents protocol + // Stability: stable + SignalRTransportServerSentEvents = SignalRTransportKey.String("server_sent_events") + // LongPolling protocol + // Stability: stable + SignalRTransportLongPolling = SignalRTransportKey.String("long_polling") + // WebSockets protocol + // Stability: stable + SignalRTransportWebSockets = SignalRTransportKey.String("web_sockets") +) + +// Namespace: source +const ( + // SourceAddressKey is the attribute Key conforming to the "source.address" + // semantic conventions. It represents the source address - domain name if + // available without reverse DNS lookup; otherwise, IP address or Unix domain + // socket name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "source.example.com", "10.1.2.80", "/tmp/my.sock" + // Note: When observed from the destination side, and when communicating through + // an intermediary, `source.address` SHOULD represent the source address behind + // any intermediaries, for example proxies, if it's available. + SourceAddressKey = attribute.Key("source.address") + + // SourcePortKey is the attribute Key conforming to the "source.port" semantic + // conventions. It represents the source port number. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 3389, 2888 + SourcePortKey = attribute.Key("source.port") +) + +// SourceAddress returns an attribute KeyValue conforming to the "source.address" +// semantic conventions. It represents the source address - domain name if +// available without reverse DNS lookup; otherwise, IP address or Unix domain +// socket name. +func SourceAddress(val string) attribute.KeyValue { + return SourceAddressKey.String(val) +} + +// SourcePort returns an attribute KeyValue conforming to the "source.port" +// semantic conventions. It represents the source port number. +func SourcePort(val int) attribute.KeyValue { + return SourcePortKey.Int(val) +} + +// Namespace: system +const ( + // SystemCPULogicalNumberKey is the attribute Key conforming to the + // "system.cpu.logical_number" semantic conventions. It represents the + // deprecated, use `cpu.logical_number` instead. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 1 + SystemCPULogicalNumberKey = attribute.Key("system.cpu.logical_number") + + // SystemDeviceKey is the attribute Key conforming to the "system.device" + // semantic conventions. It represents the device identifier. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "(identifier)" + SystemDeviceKey = attribute.Key("system.device") + + // SystemFilesystemModeKey is the attribute Key conforming to the + // "system.filesystem.mode" semantic conventions. It represents the filesystem + // mode. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "rw, ro" + SystemFilesystemModeKey = attribute.Key("system.filesystem.mode") + + // SystemFilesystemMountpointKey is the attribute Key conforming to the + // "system.filesystem.mountpoint" semantic conventions. It represents the + // filesystem mount path. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/mnt/data" + SystemFilesystemMountpointKey = attribute.Key("system.filesystem.mountpoint") + + // SystemFilesystemStateKey is the attribute Key conforming to the + // "system.filesystem.state" semantic conventions. It represents the filesystem + // state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "used" + SystemFilesystemStateKey = attribute.Key("system.filesystem.state") + + // SystemFilesystemTypeKey is the attribute Key conforming to the + // "system.filesystem.type" semantic conventions. It represents the filesystem + // type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ext4" + SystemFilesystemTypeKey = attribute.Key("system.filesystem.type") + + // SystemMemoryStateKey is the attribute Key conforming to the + // "system.memory.state" semantic conventions. It represents the memory state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "free", "cached" + SystemMemoryStateKey = attribute.Key("system.memory.state") + + // SystemPagingDirectionKey is the attribute Key conforming to the + // "system.paging.direction" semantic conventions. It represents the paging + // access direction. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "in" + SystemPagingDirectionKey = attribute.Key("system.paging.direction") + + // SystemPagingStateKey is the attribute Key conforming to the + // "system.paging.state" semantic conventions. It represents the memory paging + // state. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "free" + SystemPagingStateKey = attribute.Key("system.paging.state") + + // SystemPagingTypeKey is the attribute Key conforming to the + // "system.paging.type" semantic conventions. It represents the memory paging + // type. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "minor" + SystemPagingTypeKey = attribute.Key("system.paging.type") + + // SystemProcessStatusKey is the attribute Key conforming to the + // "system.process.status" semantic conventions. It represents the process + // state, e.g., [Linux Process State Codes]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "running" + // + // [Linux Process State Codes]: https://man7.org/linux/man-pages/man1/ps.1.html#PROCESS_STATE_CODES + SystemProcessStatusKey = attribute.Key("system.process.status") +) + +// SystemCPULogicalNumber returns an attribute KeyValue conforming to the +// "system.cpu.logical_number" semantic conventions. It represents the +// deprecated, use `cpu.logical_number` instead. +func SystemCPULogicalNumber(val int) attribute.KeyValue { + return SystemCPULogicalNumberKey.Int(val) +} + +// SystemDevice returns an attribute KeyValue conforming to the "system.device" +// semantic conventions. It represents the device identifier. +func SystemDevice(val string) attribute.KeyValue { + return SystemDeviceKey.String(val) +} + +// SystemFilesystemMode returns an attribute KeyValue conforming to the +// "system.filesystem.mode" semantic conventions. It represents the filesystem +// mode. +func SystemFilesystemMode(val string) attribute.KeyValue { + return SystemFilesystemModeKey.String(val) +} + +// SystemFilesystemMountpoint returns an attribute KeyValue conforming to the +// "system.filesystem.mountpoint" semantic conventions. It represents the +// filesystem mount path. +func SystemFilesystemMountpoint(val string) attribute.KeyValue { + return SystemFilesystemMountpointKey.String(val) +} + +// Enum values for system.filesystem.state +var ( + // used + // Stability: development + SystemFilesystemStateUsed = SystemFilesystemStateKey.String("used") + // free + // Stability: development + SystemFilesystemStateFree = SystemFilesystemStateKey.String("free") + // reserved + // Stability: development + SystemFilesystemStateReserved = SystemFilesystemStateKey.String("reserved") +) + +// Enum values for system.filesystem.type +var ( + // fat32 + // Stability: development + SystemFilesystemTypeFat32 = SystemFilesystemTypeKey.String("fat32") + // exfat + // Stability: development + SystemFilesystemTypeExfat = SystemFilesystemTypeKey.String("exfat") + // ntfs + // Stability: development + SystemFilesystemTypeNtfs = SystemFilesystemTypeKey.String("ntfs") + // refs + // Stability: development + SystemFilesystemTypeRefs = SystemFilesystemTypeKey.String("refs") + // hfsplus + // Stability: development + SystemFilesystemTypeHfsplus = SystemFilesystemTypeKey.String("hfsplus") + // ext4 + // Stability: development + SystemFilesystemTypeExt4 = SystemFilesystemTypeKey.String("ext4") +) + +// Enum values for system.memory.state +var ( + // used + // Stability: development + SystemMemoryStateUsed = SystemMemoryStateKey.String("used") + // free + // Stability: development + SystemMemoryStateFree = SystemMemoryStateKey.String("free") + // Deprecated: Removed, report shared memory usage with + // `metric.system.memory.shared` metric. + SystemMemoryStateShared = SystemMemoryStateKey.String("shared") + // buffers + // Stability: development + SystemMemoryStateBuffers = SystemMemoryStateKey.String("buffers") + // cached + // Stability: development + SystemMemoryStateCached = SystemMemoryStateKey.String("cached") +) + +// Enum values for system.paging.direction +var ( + // in + // Stability: development + SystemPagingDirectionIn = SystemPagingDirectionKey.String("in") + // out + // Stability: development + SystemPagingDirectionOut = SystemPagingDirectionKey.String("out") +) + +// Enum values for system.paging.state +var ( + // used + // Stability: development + SystemPagingStateUsed = SystemPagingStateKey.String("used") + // free + // Stability: development + SystemPagingStateFree = SystemPagingStateKey.String("free") +) + +// Enum values for system.paging.type +var ( + // major + // Stability: development + SystemPagingTypeMajor = SystemPagingTypeKey.String("major") + // minor + // Stability: development + SystemPagingTypeMinor = SystemPagingTypeKey.String("minor") +) + +// Enum values for system.process.status +var ( + // running + // Stability: development + SystemProcessStatusRunning = SystemProcessStatusKey.String("running") + // sleeping + // Stability: development + SystemProcessStatusSleeping = SystemProcessStatusKey.String("sleeping") + // stopped + // Stability: development + SystemProcessStatusStopped = SystemProcessStatusKey.String("stopped") + // defunct + // Stability: development + SystemProcessStatusDefunct = SystemProcessStatusKey.String("defunct") +) + +// Namespace: telemetry +const ( + // TelemetryDistroNameKey is the attribute Key conforming to the + // "telemetry.distro.name" semantic conventions. It represents the name of the + // auto instrumentation agent or distribution, if used. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "parts-unlimited-java" + // Note: Official auto instrumentation agents and distributions SHOULD set the + // `telemetry.distro.name` attribute to + // a string starting with `opentelemetry-`, e.g. + // `opentelemetry-java-instrumentation`. + TelemetryDistroNameKey = attribute.Key("telemetry.distro.name") + + // TelemetryDistroVersionKey is the attribute Key conforming to the + // "telemetry.distro.version" semantic conventions. It represents the version + // string of the auto instrumentation agent or distribution, if used. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1.2.3" + TelemetryDistroVersionKey = attribute.Key("telemetry.distro.version") + + // TelemetrySDKLanguageKey is the attribute Key conforming to the + // "telemetry.sdk.language" semantic conventions. It represents the language of + // the telemetry SDK. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: + TelemetrySDKLanguageKey = attribute.Key("telemetry.sdk.language") + + // TelemetrySDKNameKey is the attribute Key conforming to the + // "telemetry.sdk.name" semantic conventions. It represents the name of the + // telemetry SDK as defined above. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "opentelemetry" + // Note: The OpenTelemetry SDK MUST set the `telemetry.sdk.name` attribute to + // `opentelemetry`. + // If another SDK, like a fork or a vendor-provided implementation, is used, + // this SDK MUST set the + // `telemetry.sdk.name` attribute to the fully-qualified class or module name of + // this SDK's main entry point + // or another suitable identifier depending on the language. + // The identifier `opentelemetry` is reserved and MUST NOT be used in this case. + // All custom identifiers SHOULD be stable across different versions of an + // implementation. + TelemetrySDKNameKey = attribute.Key("telemetry.sdk.name") + + // TelemetrySDKVersionKey is the attribute Key conforming to the + // "telemetry.sdk.version" semantic conventions. It represents the version + // string of the telemetry SDK. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "1.2.3" + TelemetrySDKVersionKey = attribute.Key("telemetry.sdk.version") +) + +// TelemetryDistroName returns an attribute KeyValue conforming to the +// "telemetry.distro.name" semantic conventions. It represents the name of the +// auto instrumentation agent or distribution, if used. +func TelemetryDistroName(val string) attribute.KeyValue { + return TelemetryDistroNameKey.String(val) +} + +// TelemetryDistroVersion returns an attribute KeyValue conforming to the +// "telemetry.distro.version" semantic conventions. It represents the version +// string of the auto instrumentation agent or distribution, if used. +func TelemetryDistroVersion(val string) attribute.KeyValue { + return TelemetryDistroVersionKey.String(val) +} + +// TelemetrySDKName returns an attribute KeyValue conforming to the +// "telemetry.sdk.name" semantic conventions. It represents the name of the +// telemetry SDK as defined above. +func TelemetrySDKName(val string) attribute.KeyValue { + return TelemetrySDKNameKey.String(val) +} + +// TelemetrySDKVersion returns an attribute KeyValue conforming to the +// "telemetry.sdk.version" semantic conventions. It represents the version string +// of the telemetry SDK. +func TelemetrySDKVersion(val string) attribute.KeyValue { + return TelemetrySDKVersionKey.String(val) +} + +// Enum values for telemetry.sdk.language +var ( + // cpp + // Stability: stable + TelemetrySDKLanguageCPP = TelemetrySDKLanguageKey.String("cpp") + // dotnet + // Stability: stable + TelemetrySDKLanguageDotnet = TelemetrySDKLanguageKey.String("dotnet") + // erlang + // Stability: stable + TelemetrySDKLanguageErlang = TelemetrySDKLanguageKey.String("erlang") + // go + // Stability: stable + TelemetrySDKLanguageGo = TelemetrySDKLanguageKey.String("go") + // java + // Stability: stable + TelemetrySDKLanguageJava = TelemetrySDKLanguageKey.String("java") + // nodejs + // Stability: stable + TelemetrySDKLanguageNodejs = TelemetrySDKLanguageKey.String("nodejs") + // php + // Stability: stable + TelemetrySDKLanguagePHP = TelemetrySDKLanguageKey.String("php") + // python + // Stability: stable + TelemetrySDKLanguagePython = TelemetrySDKLanguageKey.String("python") + // ruby + // Stability: stable + TelemetrySDKLanguageRuby = TelemetrySDKLanguageKey.String("ruby") + // rust + // Stability: stable + TelemetrySDKLanguageRust = TelemetrySDKLanguageKey.String("rust") + // swift + // Stability: stable + TelemetrySDKLanguageSwift = TelemetrySDKLanguageKey.String("swift") + // webjs + // Stability: stable + TelemetrySDKLanguageWebJS = TelemetrySDKLanguageKey.String("webjs") +) + +// Namespace: test +const ( + // TestCaseNameKey is the attribute Key conforming to the "test.case.name" + // semantic conventions. It represents the fully qualified human readable name + // of the [test case]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "org.example.TestCase1.test1", "example/tests/TestCase1.test1", + // "ExampleTestCase1_test1" + // + // [test case]: https://wikipedia.org/wiki/Test_case + TestCaseNameKey = attribute.Key("test.case.name") + + // TestCaseResultStatusKey is the attribute Key conforming to the + // "test.case.result.status" semantic conventions. It represents the status of + // the actual test case result from test execution. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "pass", "fail" + TestCaseResultStatusKey = attribute.Key("test.case.result.status") + + // TestSuiteNameKey is the attribute Key conforming to the "test.suite.name" + // semantic conventions. It represents the human readable name of a [test suite] + // . + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TestSuite1" + // + // [test suite]: https://wikipedia.org/wiki/Test_suite + TestSuiteNameKey = attribute.Key("test.suite.name") + + // TestSuiteRunStatusKey is the attribute Key conforming to the + // "test.suite.run.status" semantic conventions. It represents the status of the + // test suite run. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "success", "failure", "skipped", "aborted", "timed_out", + // "in_progress" + TestSuiteRunStatusKey = attribute.Key("test.suite.run.status") +) + +// TestCaseName returns an attribute KeyValue conforming to the "test.case.name" +// semantic conventions. It represents the fully qualified human readable name of +// the [test case]. +// +// [test case]: https://wikipedia.org/wiki/Test_case +func TestCaseName(val string) attribute.KeyValue { + return TestCaseNameKey.String(val) +} + +// TestSuiteName returns an attribute KeyValue conforming to the +// "test.suite.name" semantic conventions. It represents the human readable name +// of a [test suite]. +// +// [test suite]: https://wikipedia.org/wiki/Test_suite +func TestSuiteName(val string) attribute.KeyValue { + return TestSuiteNameKey.String(val) +} + +// Enum values for test.case.result.status +var ( + // pass + // Stability: development + TestCaseResultStatusPass = TestCaseResultStatusKey.String("pass") + // fail + // Stability: development + TestCaseResultStatusFail = TestCaseResultStatusKey.String("fail") +) + +// Enum values for test.suite.run.status +var ( + // success + // Stability: development + TestSuiteRunStatusSuccess = TestSuiteRunStatusKey.String("success") + // failure + // Stability: development + TestSuiteRunStatusFailure = TestSuiteRunStatusKey.String("failure") + // skipped + // Stability: development + TestSuiteRunStatusSkipped = TestSuiteRunStatusKey.String("skipped") + // aborted + // Stability: development + TestSuiteRunStatusAborted = TestSuiteRunStatusKey.String("aborted") + // timed_out + // Stability: development + TestSuiteRunStatusTimedOut = TestSuiteRunStatusKey.String("timed_out") + // in_progress + // Stability: development + TestSuiteRunStatusInProgress = TestSuiteRunStatusKey.String("in_progress") +) + +// Namespace: thread +const ( + // ThreadIDKey is the attribute Key conforming to the "thread.id" semantic + // conventions. It represents the current "managed" thread ID (as opposed to OS + // thread ID). + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + ThreadIDKey = attribute.Key("thread.id") + + // ThreadNameKey is the attribute Key conforming to the "thread.name" semantic + // conventions. It represents the current thread name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: main + ThreadNameKey = attribute.Key("thread.name") +) + +// ThreadID returns an attribute KeyValue conforming to the "thread.id" semantic +// conventions. It represents the current "managed" thread ID (as opposed to OS +// thread ID). +func ThreadID(val int) attribute.KeyValue { + return ThreadIDKey.Int(val) +} + +// ThreadName returns an attribute KeyValue conforming to the "thread.name" +// semantic conventions. It represents the current thread name. +func ThreadName(val string) attribute.KeyValue { + return ThreadNameKey.String(val) +} + +// Namespace: tls +const ( + // TLSCipherKey is the attribute Key conforming to the "tls.cipher" semantic + // conventions. It represents the string indicating the [cipher] used during the + // current connection. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TLS_RSA_WITH_3DES_EDE_CBC_SHA", + // "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256" + // Note: The values allowed for `tls.cipher` MUST be one of the `Descriptions` + // of the [registered TLS Cipher Suits]. + // + // [cipher]: https://datatracker.ietf.org/doc/html/rfc5246#appendix-A.5 + // [registered TLS Cipher Suits]: https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#table-tls-parameters-4 + TLSCipherKey = attribute.Key("tls.cipher") + + // TLSClientCertificateKey is the attribute Key conforming to the + // "tls.client.certificate" semantic conventions. It represents the PEM-encoded + // stand-alone certificate offered by the client. This is usually + // mutually-exclusive of `client.certificate_chain` since this value also exists + // in that list. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII..." + TLSClientCertificateKey = attribute.Key("tls.client.certificate") + + // TLSClientCertificateChainKey is the attribute Key conforming to the + // "tls.client.certificate_chain" semantic conventions. It represents the array + // of PEM-encoded certificates that make up the certificate chain offered by the + // client. This is usually mutually-exclusive of `client.certificate` since that + // value should be the first certificate in the chain. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII...", "MI..." + TLSClientCertificateChainKey = attribute.Key("tls.client.certificate_chain") + + // TLSClientHashMd5Key is the attribute Key conforming to the + // "tls.client.hash.md5" semantic conventions. It represents the certificate + // fingerprint using the MD5 digest of DER-encoded version of certificate + // offered by the client. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0F76C7F2C55BFD7D8E8B8F4BFBF0C9EC" + TLSClientHashMd5Key = attribute.Key("tls.client.hash.md5") + + // TLSClientHashSha1Key is the attribute Key conforming to the + // "tls.client.hash.sha1" semantic conventions. It represents the certificate + // fingerprint using the SHA1 digest of DER-encoded version of certificate + // offered by the client. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9E393D93138888D288266C2D915214D1D1CCEB2A" + TLSClientHashSha1Key = attribute.Key("tls.client.hash.sha1") + + // TLSClientHashSha256Key is the attribute Key conforming to the + // "tls.client.hash.sha256" semantic conventions. It represents the certificate + // fingerprint using the SHA256 digest of DER-encoded version of certificate + // offered by the client. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0687F666A054EF17A08E2F2162EAB4CBC0D265E1D7875BE74BF3C712CA92DAF0" + TLSClientHashSha256Key = attribute.Key("tls.client.hash.sha256") + + // TLSClientIssuerKey is the attribute Key conforming to the "tls.client.issuer" + // semantic conventions. It represents the distinguished name of [subject] of + // the issuer of the x.509 certificate presented by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=Example Root CA, OU=Infrastructure Team, DC=example, DC=com" + // + // [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 + TLSClientIssuerKey = attribute.Key("tls.client.issuer") + + // TLSClientJa3Key is the attribute Key conforming to the "tls.client.ja3" + // semantic conventions. It represents a hash that identifies clients based on + // how they perform an SSL/TLS handshake. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "d4e5b18d6b55c71272893221c96ba240" + TLSClientJa3Key = attribute.Key("tls.client.ja3") + + // TLSClientNotAfterKey is the attribute Key conforming to the + // "tls.client.not_after" semantic conventions. It represents the date/Time + // indicating when client certificate is no longer considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T00:00:00.000Z" + TLSClientNotAfterKey = attribute.Key("tls.client.not_after") + + // TLSClientNotBeforeKey is the attribute Key conforming to the + // "tls.client.not_before" semantic conventions. It represents the date/Time + // indicating when client certificate is first considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1970-01-01T00:00:00.000Z" + TLSClientNotBeforeKey = attribute.Key("tls.client.not_before") + + // TLSClientSubjectKey is the attribute Key conforming to the + // "tls.client.subject" semantic conventions. It represents the distinguished + // name of subject of the x.509 certificate presented by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=myclient, OU=Documentation Team, DC=example, DC=com" + TLSClientSubjectKey = attribute.Key("tls.client.subject") + + // TLSClientSupportedCiphersKey is the attribute Key conforming to the + // "tls.client.supported_ciphers" semantic conventions. It represents the array + // of ciphers offered by the client during the client hello. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + // "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384" + TLSClientSupportedCiphersKey = attribute.Key("tls.client.supported_ciphers") + + // TLSCurveKey is the attribute Key conforming to the "tls.curve" semantic + // conventions. It represents the string indicating the curve used for the given + // cipher, when applicable. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "secp256r1" + TLSCurveKey = attribute.Key("tls.curve") + + // TLSEstablishedKey is the attribute Key conforming to the "tls.established" + // semantic conventions. It represents the boolean flag indicating if the TLS + // negotiation was successful and transitioned to an encrypted tunnel. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: true + TLSEstablishedKey = attribute.Key("tls.established") + + // TLSNextProtocolKey is the attribute Key conforming to the "tls.next_protocol" + // semantic conventions. It represents the string indicating the protocol being + // tunneled. Per the values in the [IANA registry], this string should be lower + // case. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "http/1.1" + // + // [IANA registry]: https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + TLSNextProtocolKey = attribute.Key("tls.next_protocol") + + // TLSProtocolNameKey is the attribute Key conforming to the "tls.protocol.name" + // semantic conventions. It represents the normalized lowercase protocol name + // parsed from original string of the negotiated [SSL/TLS protocol version]. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // + // [SSL/TLS protocol version]: https://docs.openssl.org/1.1.1/man3/SSL_get_version/#return-values + TLSProtocolNameKey = attribute.Key("tls.protocol.name") + + // TLSProtocolVersionKey is the attribute Key conforming to the + // "tls.protocol.version" semantic conventions. It represents the numeric part + // of the version parsed from the original string of the negotiated + // [SSL/TLS protocol version]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1.2", "3" + // + // [SSL/TLS protocol version]: https://docs.openssl.org/1.1.1/man3/SSL_get_version/#return-values + TLSProtocolVersionKey = attribute.Key("tls.protocol.version") + + // TLSResumedKey is the attribute Key conforming to the "tls.resumed" semantic + // conventions. It represents the boolean flag indicating if this TLS connection + // was resumed from an existing TLS negotiation. + // + // Type: boolean + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: true + TLSResumedKey = attribute.Key("tls.resumed") + + // TLSServerCertificateKey is the attribute Key conforming to the + // "tls.server.certificate" semantic conventions. It represents the PEM-encoded + // stand-alone certificate offered by the server. This is usually + // mutually-exclusive of `server.certificate_chain` since this value also exists + // in that list. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII..." + TLSServerCertificateKey = attribute.Key("tls.server.certificate") + + // TLSServerCertificateChainKey is the attribute Key conforming to the + // "tls.server.certificate_chain" semantic conventions. It represents the array + // of PEM-encoded certificates that make up the certificate chain offered by the + // server. This is usually mutually-exclusive of `server.certificate` since that + // value should be the first certificate in the chain. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "MII...", "MI..." + TLSServerCertificateChainKey = attribute.Key("tls.server.certificate_chain") + + // TLSServerHashMd5Key is the attribute Key conforming to the + // "tls.server.hash.md5" semantic conventions. It represents the certificate + // fingerprint using the MD5 digest of DER-encoded version of certificate + // offered by the server. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0F76C7F2C55BFD7D8E8B8F4BFBF0C9EC" + TLSServerHashMd5Key = attribute.Key("tls.server.hash.md5") + + // TLSServerHashSha1Key is the attribute Key conforming to the + // "tls.server.hash.sha1" semantic conventions. It represents the certificate + // fingerprint using the SHA1 digest of DER-encoded version of certificate + // offered by the server. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9E393D93138888D288266C2D915214D1D1CCEB2A" + TLSServerHashSha1Key = attribute.Key("tls.server.hash.sha1") + + // TLSServerHashSha256Key is the attribute Key conforming to the + // "tls.server.hash.sha256" semantic conventions. It represents the certificate + // fingerprint using the SHA256 digest of DER-encoded version of certificate + // offered by the server. For consistency with other hash values, this value + // should be formatted as an uppercase hash. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "0687F666A054EF17A08E2F2162EAB4CBC0D265E1D7875BE74BF3C712CA92DAF0" + TLSServerHashSha256Key = attribute.Key("tls.server.hash.sha256") + + // TLSServerIssuerKey is the attribute Key conforming to the "tls.server.issuer" + // semantic conventions. It represents the distinguished name of [subject] of + // the issuer of the x.509 certificate presented by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=Example Root CA, OU=Infrastructure Team, DC=example, DC=com" + // + // [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 + TLSServerIssuerKey = attribute.Key("tls.server.issuer") + + // TLSServerJa3sKey is the attribute Key conforming to the "tls.server.ja3s" + // semantic conventions. It represents a hash that identifies servers based on + // how they perform an SSL/TLS handshake. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "d4e5b18d6b55c71272893221c96ba240" + TLSServerJa3sKey = attribute.Key("tls.server.ja3s") + + // TLSServerNotAfterKey is the attribute Key conforming to the + // "tls.server.not_after" semantic conventions. It represents the date/Time + // indicating when server certificate is no longer considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "2021-01-01T00:00:00.000Z" + TLSServerNotAfterKey = attribute.Key("tls.server.not_after") + + // TLSServerNotBeforeKey is the attribute Key conforming to the + // "tls.server.not_before" semantic conventions. It represents the date/Time + // indicating when server certificate is first considered valid. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "1970-01-01T00:00:00.000Z" + TLSServerNotBeforeKey = attribute.Key("tls.server.not_before") + + // TLSServerSubjectKey is the attribute Key conforming to the + // "tls.server.subject" semantic conventions. It represents the distinguished + // name of subject of the x.509 certificate presented by the server. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "CN=myserver, OU=Documentation Team, DC=example, DC=com" + TLSServerSubjectKey = attribute.Key("tls.server.subject") +) + +// TLSCipher returns an attribute KeyValue conforming to the "tls.cipher" +// semantic conventions. It represents the string indicating the [cipher] used +// during the current connection. +// +// [cipher]: https://datatracker.ietf.org/doc/html/rfc5246#appendix-A.5 +func TLSCipher(val string) attribute.KeyValue { + return TLSCipherKey.String(val) +} + +// TLSClientCertificate returns an attribute KeyValue conforming to the +// "tls.client.certificate" semantic conventions. It represents the PEM-encoded +// stand-alone certificate offered by the client. This is usually +// mutually-exclusive of `client.certificate_chain` since this value also exists +// in that list. +func TLSClientCertificate(val string) attribute.KeyValue { + return TLSClientCertificateKey.String(val) +} + +// TLSClientCertificateChain returns an attribute KeyValue conforming to the +// "tls.client.certificate_chain" semantic conventions. It represents the array +// of PEM-encoded certificates that make up the certificate chain offered by the +// client. This is usually mutually-exclusive of `client.certificate` since that +// value should be the first certificate in the chain. +func TLSClientCertificateChain(val ...string) attribute.KeyValue { + return TLSClientCertificateChainKey.StringSlice(val) +} + +// TLSClientHashMd5 returns an attribute KeyValue conforming to the +// "tls.client.hash.md5" semantic conventions. It represents the certificate +// fingerprint using the MD5 digest of DER-encoded version of certificate offered +// by the client. For consistency with other hash values, this value should be +// formatted as an uppercase hash. +func TLSClientHashMd5(val string) attribute.KeyValue { + return TLSClientHashMd5Key.String(val) +} + +// TLSClientHashSha1 returns an attribute KeyValue conforming to the +// "tls.client.hash.sha1" semantic conventions. It represents the certificate +// fingerprint using the SHA1 digest of DER-encoded version of certificate +// offered by the client. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSClientHashSha1(val string) attribute.KeyValue { + return TLSClientHashSha1Key.String(val) +} + +// TLSClientHashSha256 returns an attribute KeyValue conforming to the +// "tls.client.hash.sha256" semantic conventions. It represents the certificate +// fingerprint using the SHA256 digest of DER-encoded version of certificate +// offered by the client. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSClientHashSha256(val string) attribute.KeyValue { + return TLSClientHashSha256Key.String(val) +} + +// TLSClientIssuer returns an attribute KeyValue conforming to the +// "tls.client.issuer" semantic conventions. It represents the distinguished name +// of [subject] of the issuer of the x.509 certificate presented by the client. +// +// [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 +func TLSClientIssuer(val string) attribute.KeyValue { + return TLSClientIssuerKey.String(val) +} + +// TLSClientJa3 returns an attribute KeyValue conforming to the "tls.client.ja3" +// semantic conventions. It represents a hash that identifies clients based on +// how they perform an SSL/TLS handshake. +func TLSClientJa3(val string) attribute.KeyValue { + return TLSClientJa3Key.String(val) +} + +// TLSClientNotAfter returns an attribute KeyValue conforming to the +// "tls.client.not_after" semantic conventions. It represents the date/Time +// indicating when client certificate is no longer considered valid. +func TLSClientNotAfter(val string) attribute.KeyValue { + return TLSClientNotAfterKey.String(val) +} + +// TLSClientNotBefore returns an attribute KeyValue conforming to the +// "tls.client.not_before" semantic conventions. It represents the date/Time +// indicating when client certificate is first considered valid. +func TLSClientNotBefore(val string) attribute.KeyValue { + return TLSClientNotBeforeKey.String(val) +} + +// TLSClientSubject returns an attribute KeyValue conforming to the +// "tls.client.subject" semantic conventions. It represents the distinguished +// name of subject of the x.509 certificate presented by the client. +func TLSClientSubject(val string) attribute.KeyValue { + return TLSClientSubjectKey.String(val) +} + +// TLSClientSupportedCiphers returns an attribute KeyValue conforming to the +// "tls.client.supported_ciphers" semantic conventions. It represents the array +// of ciphers offered by the client during the client hello. +func TLSClientSupportedCiphers(val ...string) attribute.KeyValue { + return TLSClientSupportedCiphersKey.StringSlice(val) +} + +// TLSCurve returns an attribute KeyValue conforming to the "tls.curve" semantic +// conventions. It represents the string indicating the curve used for the given +// cipher, when applicable. +func TLSCurve(val string) attribute.KeyValue { + return TLSCurveKey.String(val) +} + +// TLSEstablished returns an attribute KeyValue conforming to the +// "tls.established" semantic conventions. It represents the boolean flag +// indicating if the TLS negotiation was successful and transitioned to an +// encrypted tunnel. +func TLSEstablished(val bool) attribute.KeyValue { + return TLSEstablishedKey.Bool(val) +} + +// TLSNextProtocol returns an attribute KeyValue conforming to the +// "tls.next_protocol" semantic conventions. It represents the string indicating +// the protocol being tunneled. Per the values in the [IANA registry], this +// string should be lower case. +// +// [IANA registry]: https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids +func TLSNextProtocol(val string) attribute.KeyValue { + return TLSNextProtocolKey.String(val) +} + +// TLSProtocolVersion returns an attribute KeyValue conforming to the +// "tls.protocol.version" semantic conventions. It represents the numeric part of +// the version parsed from the original string of the negotiated +// [SSL/TLS protocol version]. +// +// [SSL/TLS protocol version]: https://docs.openssl.org/1.1.1/man3/SSL_get_version/#return-values +func TLSProtocolVersion(val string) attribute.KeyValue { + return TLSProtocolVersionKey.String(val) +} + +// TLSResumed returns an attribute KeyValue conforming to the "tls.resumed" +// semantic conventions. It represents the boolean flag indicating if this TLS +// connection was resumed from an existing TLS negotiation. +func TLSResumed(val bool) attribute.KeyValue { + return TLSResumedKey.Bool(val) +} + +// TLSServerCertificate returns an attribute KeyValue conforming to the +// "tls.server.certificate" semantic conventions. It represents the PEM-encoded +// stand-alone certificate offered by the server. This is usually +// mutually-exclusive of `server.certificate_chain` since this value also exists +// in that list. +func TLSServerCertificate(val string) attribute.KeyValue { + return TLSServerCertificateKey.String(val) +} + +// TLSServerCertificateChain returns an attribute KeyValue conforming to the +// "tls.server.certificate_chain" semantic conventions. It represents the array +// of PEM-encoded certificates that make up the certificate chain offered by the +// server. This is usually mutually-exclusive of `server.certificate` since that +// value should be the first certificate in the chain. +func TLSServerCertificateChain(val ...string) attribute.KeyValue { + return TLSServerCertificateChainKey.StringSlice(val) +} + +// TLSServerHashMd5 returns an attribute KeyValue conforming to the +// "tls.server.hash.md5" semantic conventions. It represents the certificate +// fingerprint using the MD5 digest of DER-encoded version of certificate offered +// by the server. For consistency with other hash values, this value should be +// formatted as an uppercase hash. +func TLSServerHashMd5(val string) attribute.KeyValue { + return TLSServerHashMd5Key.String(val) +} + +// TLSServerHashSha1 returns an attribute KeyValue conforming to the +// "tls.server.hash.sha1" semantic conventions. It represents the certificate +// fingerprint using the SHA1 digest of DER-encoded version of certificate +// offered by the server. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSServerHashSha1(val string) attribute.KeyValue { + return TLSServerHashSha1Key.String(val) +} + +// TLSServerHashSha256 returns an attribute KeyValue conforming to the +// "tls.server.hash.sha256" semantic conventions. It represents the certificate +// fingerprint using the SHA256 digest of DER-encoded version of certificate +// offered by the server. For consistency with other hash values, this value +// should be formatted as an uppercase hash. +func TLSServerHashSha256(val string) attribute.KeyValue { + return TLSServerHashSha256Key.String(val) +} + +// TLSServerIssuer returns an attribute KeyValue conforming to the +// "tls.server.issuer" semantic conventions. It represents the distinguished name +// of [subject] of the issuer of the x.509 certificate presented by the client. +// +// [subject]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.6 +func TLSServerIssuer(val string) attribute.KeyValue { + return TLSServerIssuerKey.String(val) +} + +// TLSServerJa3s returns an attribute KeyValue conforming to the +// "tls.server.ja3s" semantic conventions. It represents a hash that identifies +// servers based on how they perform an SSL/TLS handshake. +func TLSServerJa3s(val string) attribute.KeyValue { + return TLSServerJa3sKey.String(val) +} + +// TLSServerNotAfter returns an attribute KeyValue conforming to the +// "tls.server.not_after" semantic conventions. It represents the date/Time +// indicating when server certificate is no longer considered valid. +func TLSServerNotAfter(val string) attribute.KeyValue { + return TLSServerNotAfterKey.String(val) +} + +// TLSServerNotBefore returns an attribute KeyValue conforming to the +// "tls.server.not_before" semantic conventions. It represents the date/Time +// indicating when server certificate is first considered valid. +func TLSServerNotBefore(val string) attribute.KeyValue { + return TLSServerNotBeforeKey.String(val) +} + +// TLSServerSubject returns an attribute KeyValue conforming to the +// "tls.server.subject" semantic conventions. It represents the distinguished +// name of subject of the x.509 certificate presented by the server. +func TLSServerSubject(val string) attribute.KeyValue { + return TLSServerSubjectKey.String(val) +} + +// Enum values for tls.protocol.name +var ( + // ssl + // Stability: development + TLSProtocolNameSsl = TLSProtocolNameKey.String("ssl") + // tls + // Stability: development + TLSProtocolNameTLS = TLSProtocolNameKey.String("tls") +) + +// Namespace: url +const ( + // URLDomainKey is the attribute Key conforming to the "url.domain" semantic + // conventions. It represents the domain extracted from the `url.full`, such as + // "opentelemetry.io". + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "www.foo.bar", "opentelemetry.io", "3.12.167.2", + // "[1080:0:0:0:8:800:200C:417A]" + // Note: In some cases a URL may refer to an IP and/or port directly, without a + // domain name. In this case, the IP address would go to the domain field. If + // the URL contains a [literal IPv6 address] enclosed by `[` and `]`, the `[` + // and `]` characters should also be captured in the domain field. + // + // [literal IPv6 address]: https://www.rfc-editor.org/rfc/rfc2732#section-2 + URLDomainKey = attribute.Key("url.domain") + + // URLExtensionKey is the attribute Key conforming to the "url.extension" + // semantic conventions. It represents the file extension extracted from the + // `url.full`, excluding the leading dot. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "png", "gz" + // Note: The file extension is only set if it exists, as not every url has a + // file extension. When the file name has multiple extensions `example.tar.gz`, + // only the last one should be captured `gz`, not `tar.gz`. + URLExtensionKey = attribute.Key("url.extension") + + // URLFragmentKey is the attribute Key conforming to the "url.fragment" semantic + // conventions. It represents the [URI fragment] component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "SemConv" + // + // [URI fragment]: https://www.rfc-editor.org/rfc/rfc3986#section-3.5 + URLFragmentKey = attribute.Key("url.fragment") + + // URLFullKey is the attribute Key conforming to the "url.full" semantic + // conventions. It represents the absolute URL describing a network resource + // according to [RFC3986]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "https://www.foo.bar/search?q=OpenTelemetry#SemConv", "//localhost" + // Note: For network calls, URL usually has + // `scheme://host[:port][path][?query][#fragment]` format, where the fragment + // is not transmitted over HTTP, but if it is known, it SHOULD be included + // nevertheless. + // + // `url.full` MUST NOT contain credentials passed via URL in form of + // `https://username:password@www.example.com/`. + // In such case username and password SHOULD be redacted and attribute's value + // SHOULD be `https://REDACTED:REDACTED@www.example.com/`. + // + // `url.full` SHOULD capture the absolute URL when it is available (or can be + // reconstructed). + // + // Sensitive content provided in `url.full` SHOULD be scrubbed when + // instrumentations can identify it. + // + // + // Query string values for the following keys SHOULD be redacted by default and + // replaced by the + // value `REDACTED`: + // + // - [`AWSAccessKeyId`] + // - [`Signature`] + // - [`sig`] + // - [`X-Goog-Signature`] + // + // This list is subject to change over time. + // + // When a query string value is redacted, the query string key SHOULD still be + // preserved, e.g. + // `https://www.example.com/path?color=blue&sig=REDACTED`. + // + // [RFC3986]: https://www.rfc-editor.org/rfc/rfc3986 + // [`AWSAccessKeyId`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`Signature`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`sig`]: https://learn.microsoft.com/azure/storage/common/storage-sas-overview#sas-token + // [`X-Goog-Signature`]: https://cloud.google.com/storage/docs/access-control/signed-urls + URLFullKey = attribute.Key("url.full") + + // URLOriginalKey is the attribute Key conforming to the "url.original" semantic + // conventions. It represents the unmodified original URL as seen in the event + // source. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "https://www.foo.bar/search?q=OpenTelemetry#SemConv", + // "search?q=OpenTelemetry" + // Note: In network monitoring, the observed URL may be a full URL, whereas in + // access logs, the URL is often just represented as a path. This field is meant + // to represent the URL as it was observed, complete or not. + // `url.original` might contain credentials passed via URL in form of + // `https://username:password@www.example.com/`. In such case password and + // username SHOULD NOT be redacted and attribute's value SHOULD remain the same. + URLOriginalKey = attribute.Key("url.original") + + // URLPathKey is the attribute Key conforming to the "url.path" semantic + // conventions. It represents the [URI path] component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "/search" + // Note: Sensitive content provided in `url.path` SHOULD be scrubbed when + // instrumentations can identify it. + // + // [URI path]: https://www.rfc-editor.org/rfc/rfc3986#section-3.3 + URLPathKey = attribute.Key("url.path") + + // URLPortKey is the attribute Key conforming to the "url.port" semantic + // conventions. It represents the port extracted from the `url.full`. + // + // Type: int + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: 443 + URLPortKey = attribute.Key("url.port") + + // URLQueryKey is the attribute Key conforming to the "url.query" semantic + // conventions. It represents the [URI query] component. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "q=OpenTelemetry" + // Note: Sensitive content provided in `url.query` SHOULD be scrubbed when + // instrumentations can identify it. + // + // + // Query string values for the following keys SHOULD be redacted by default and + // replaced by the value `REDACTED`: + // + // - [`AWSAccessKeyId`] + // - [`Signature`] + // - [`sig`] + // - [`X-Goog-Signature`] + // + // This list is subject to change over time. + // + // When a query string value is redacted, the query string key SHOULD still be + // preserved, e.g. + // `q=OpenTelemetry&sig=REDACTED`. + // + // [URI query]: https://www.rfc-editor.org/rfc/rfc3986#section-3.4 + // [`AWSAccessKeyId`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`Signature`]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/RESTAuthentication.html#RESTAuthenticationQueryStringAuth + // [`sig`]: https://learn.microsoft.com/azure/storage/common/storage-sas-overview#sas-token + // [`X-Goog-Signature`]: https://cloud.google.com/storage/docs/access-control/signed-urls + URLQueryKey = attribute.Key("url.query") + + // URLRegisteredDomainKey is the attribute Key conforming to the + // "url.registered_domain" semantic conventions. It represents the highest + // registered url domain, stripped of the subdomain. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "example.com", "foo.co.uk" + // Note: This value can be determined precisely with the [public suffix list]. + // For example, the registered domain for `foo.example.com` is `example.com`. + // Trying to approximate this by simply taking the last two labels will not work + // well for TLDs such as `co.uk`. + // + // [public suffix list]: https://publicsuffix.org/ + URLRegisteredDomainKey = attribute.Key("url.registered_domain") + + // URLSchemeKey is the attribute Key conforming to the "url.scheme" semantic + // conventions. It represents the [URI scheme] component identifying the used + // protocol. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "https", "ftp", "telnet" + // + // [URI scheme]: https://www.rfc-editor.org/rfc/rfc3986#section-3.1 + URLSchemeKey = attribute.Key("url.scheme") + + // URLSubdomainKey is the attribute Key conforming to the "url.subdomain" + // semantic conventions. It represents the subdomain portion of a fully + // qualified domain name includes all of the names except the host name under + // the registered_domain. In a partially qualified domain, or if the + // qualification level of the full name cannot be determined, subdomain contains + // all of the names below the registered domain. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "east", "sub2.sub1" + // Note: The subdomain portion of `www.east.mydomain.co.uk` is `east`. If the + // domain has multiple levels of subdomain, such as `sub2.sub1.example.com`, the + // subdomain field should contain `sub2.sub1`, with no trailing period. + URLSubdomainKey = attribute.Key("url.subdomain") + + // URLTemplateKey is the attribute Key conforming to the "url.template" semantic + // conventions. It represents the low-cardinality template of an + // [absolute path reference]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "/users/{id}", "/users/:id", "/users?id={id}" + // + // [absolute path reference]: https://www.rfc-editor.org/rfc/rfc3986#section-4.2 + URLTemplateKey = attribute.Key("url.template") + + // URLTopLevelDomainKey is the attribute Key conforming to the + // "url.top_level_domain" semantic conventions. It represents the effective top + // level domain (eTLD), also known as the domain suffix, is the last part of the + // domain name. For example, the top level domain for example.com is `com`. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "com", "co.uk" + // Note: This value can be determined precisely with the [public suffix list]. + // + // [public suffix list]: https://publicsuffix.org/ + URLTopLevelDomainKey = attribute.Key("url.top_level_domain") +) + +// URLDomain returns an attribute KeyValue conforming to the "url.domain" +// semantic conventions. It represents the domain extracted from the `url.full`, +// such as "opentelemetry.io". +func URLDomain(val string) attribute.KeyValue { + return URLDomainKey.String(val) +} + +// URLExtension returns an attribute KeyValue conforming to the "url.extension" +// semantic conventions. It represents the file extension extracted from the +// `url.full`, excluding the leading dot. +func URLExtension(val string) attribute.KeyValue { + return URLExtensionKey.String(val) +} + +// URLFragment returns an attribute KeyValue conforming to the "url.fragment" +// semantic conventions. It represents the [URI fragment] component. +// +// [URI fragment]: https://www.rfc-editor.org/rfc/rfc3986#section-3.5 +func URLFragment(val string) attribute.KeyValue { + return URLFragmentKey.String(val) +} + +// URLFull returns an attribute KeyValue conforming to the "url.full" semantic +// conventions. It represents the absolute URL describing a network resource +// according to [RFC3986]. +// +// [RFC3986]: https://www.rfc-editor.org/rfc/rfc3986 +func URLFull(val string) attribute.KeyValue { + return URLFullKey.String(val) +} + +// URLOriginal returns an attribute KeyValue conforming to the "url.original" +// semantic conventions. It represents the unmodified original URL as seen in the +// event source. +func URLOriginal(val string) attribute.KeyValue { + return URLOriginalKey.String(val) +} + +// URLPath returns an attribute KeyValue conforming to the "url.path" semantic +// conventions. It represents the [URI path] component. +// +// [URI path]: https://www.rfc-editor.org/rfc/rfc3986#section-3.3 +func URLPath(val string) attribute.KeyValue { + return URLPathKey.String(val) +} + +// URLPort returns an attribute KeyValue conforming to the "url.port" semantic +// conventions. It represents the port extracted from the `url.full`. +func URLPort(val int) attribute.KeyValue { + return URLPortKey.Int(val) +} + +// URLQuery returns an attribute KeyValue conforming to the "url.query" semantic +// conventions. It represents the [URI query] component. +// +// [URI query]: https://www.rfc-editor.org/rfc/rfc3986#section-3.4 +func URLQuery(val string) attribute.KeyValue { + return URLQueryKey.String(val) +} + +// URLRegisteredDomain returns an attribute KeyValue conforming to the +// "url.registered_domain" semantic conventions. It represents the highest +// registered url domain, stripped of the subdomain. +func URLRegisteredDomain(val string) attribute.KeyValue { + return URLRegisteredDomainKey.String(val) +} + +// URLScheme returns an attribute KeyValue conforming to the "url.scheme" +// semantic conventions. It represents the [URI scheme] component identifying the +// used protocol. +// +// [URI scheme]: https://www.rfc-editor.org/rfc/rfc3986#section-3.1 +func URLScheme(val string) attribute.KeyValue { + return URLSchemeKey.String(val) +} + +// URLSubdomain returns an attribute KeyValue conforming to the "url.subdomain" +// semantic conventions. It represents the subdomain portion of a fully qualified +// domain name includes all of the names except the host name under the +// registered_domain. In a partially qualified domain, or if the qualification +// level of the full name cannot be determined, subdomain contains all of the +// names below the registered domain. +func URLSubdomain(val string) attribute.KeyValue { + return URLSubdomainKey.String(val) +} + +// URLTemplate returns an attribute KeyValue conforming to the "url.template" +// semantic conventions. It represents the low-cardinality template of an +// [absolute path reference]. +// +// [absolute path reference]: https://www.rfc-editor.org/rfc/rfc3986#section-4.2 +func URLTemplate(val string) attribute.KeyValue { + return URLTemplateKey.String(val) +} + +// URLTopLevelDomain returns an attribute KeyValue conforming to the +// "url.top_level_domain" semantic conventions. It represents the effective top +// level domain (eTLD), also known as the domain suffix, is the last part of the +// domain name. For example, the top level domain for example.com is `com`. +func URLTopLevelDomain(val string) attribute.KeyValue { + return URLTopLevelDomainKey.String(val) +} + +// Namespace: user +const ( + // UserEmailKey is the attribute Key conforming to the "user.email" semantic + // conventions. It represents the user email address. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "a.einstein@example.com" + UserEmailKey = attribute.Key("user.email") + + // UserFullNameKey is the attribute Key conforming to the "user.full_name" + // semantic conventions. It represents the user's full name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Albert Einstein" + UserFullNameKey = attribute.Key("user.full_name") + + // UserHashKey is the attribute Key conforming to the "user.hash" semantic + // conventions. It represents the unique user hash to correlate information for + // a user in anonymized form. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "364fc68eaf4c8acec74a4e52d7d1feaa" + // Note: Useful if `user.id` or `user.name` contain confidential information and + // cannot be used. + UserHashKey = attribute.Key("user.hash") + + // UserIDKey is the attribute Key conforming to the "user.id" semantic + // conventions. It represents the unique identifier of the user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "S-1-5-21-202424912787-2692429404-2351956786-1000" + UserIDKey = attribute.Key("user.id") + + // UserNameKey is the attribute Key conforming to the "user.name" semantic + // conventions. It represents the short name or login/username of the user. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "a.einstein" + UserNameKey = attribute.Key("user.name") + + // UserRolesKey is the attribute Key conforming to the "user.roles" semantic + // conventions. It represents the array of user roles at the time of the event. + // + // Type: string[] + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "admin", "reporting_user" + UserRolesKey = attribute.Key("user.roles") +) + +// UserEmail returns an attribute KeyValue conforming to the "user.email" +// semantic conventions. It represents the user email address. +func UserEmail(val string) attribute.KeyValue { + return UserEmailKey.String(val) +} + +// UserFullName returns an attribute KeyValue conforming to the "user.full_name" +// semantic conventions. It represents the user's full name. +func UserFullName(val string) attribute.KeyValue { + return UserFullNameKey.String(val) +} + +// UserHash returns an attribute KeyValue conforming to the "user.hash" semantic +// conventions. It represents the unique user hash to correlate information for a +// user in anonymized form. +func UserHash(val string) attribute.KeyValue { + return UserHashKey.String(val) +} + +// UserID returns an attribute KeyValue conforming to the "user.id" semantic +// conventions. It represents the unique identifier of the user. +func UserID(val string) attribute.KeyValue { + return UserIDKey.String(val) +} + +// UserName returns an attribute KeyValue conforming to the "user.name" semantic +// conventions. It represents the short name or login/username of the user. +func UserName(val string) attribute.KeyValue { + return UserNameKey.String(val) +} + +// UserRoles returns an attribute KeyValue conforming to the "user.roles" +// semantic conventions. It represents the array of user roles at the time of the +// event. +func UserRoles(val ...string) attribute.KeyValue { + return UserRolesKey.StringSlice(val) +} + +// Namespace: user_agent +const ( + // UserAgentNameKey is the attribute Key conforming to the "user_agent.name" + // semantic conventions. It represents the name of the user-agent extracted from + // original. Usually refers to the browser's name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Safari", "YourApp" + // Note: [Example] of extracting browser's name from original string. In the + // case of using a user-agent for non-browser products, such as microservices + // with multiple names/versions inside the `user_agent.original`, the most + // significant name SHOULD be selected. In such a scenario it should align with + // `user_agent.version` + // + // [Example]: https://www.whatsmyua.info + UserAgentNameKey = attribute.Key("user_agent.name") + + // UserAgentOriginalKey is the attribute Key conforming to the + // "user_agent.original" semantic conventions. It represents the value of the + // [HTTP User-Agent] header sent by the client. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Stable + // + // Examples: "CERN-LineMode/2.15 libwww/2.17b3", "Mozilla/5.0 (iPhone; CPU + // iPhone OS 14_7_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) + // Version/14.1.2 Mobile/15E148 Safari/604.1", "YourApp/1.0.0 + // grpc-java-okhttp/1.27.2" + // + // [HTTP User-Agent]: https://www.rfc-editor.org/rfc/rfc9110.html#field.user-agent + UserAgentOriginalKey = attribute.Key("user_agent.original") + + // UserAgentOSNameKey is the attribute Key conforming to the + // "user_agent.os.name" semantic conventions. It represents the human readable + // operating system name. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "iOS", "Android", "Ubuntu" + // Note: For mapping user agent strings to OS names, libraries such as + // [ua-parser] can be utilized. + // + // [ua-parser]: https://github.com/ua-parser + UserAgentOSNameKey = attribute.Key("user_agent.os.name") + + // UserAgentOSVersionKey is the attribute Key conforming to the + // "user_agent.os.version" semantic conventions. It represents the version + // string of the operating system as defined in [Version Attributes]. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "14.2.1", "18.04.1" + // Note: For mapping user agent strings to OS versions, libraries such as + // [ua-parser] can be utilized. + // + // [Version Attributes]: /docs/resource/README.md#version-attributes + // [ua-parser]: https://github.com/ua-parser + UserAgentOSVersionKey = attribute.Key("user_agent.os.version") + + // UserAgentSyntheticTypeKey is the attribute Key conforming to the + // "user_agent.synthetic.type" semantic conventions. It represents the specifies + // the category of synthetic traffic, such as tests or bots. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // Note: This attribute MAY be derived from the contents of the + // `user_agent.original` attribute. Components that populate the attribute are + // responsible for determining what they consider to be synthetic bot or test + // traffic. This attribute can either be set for self-identification purposes, + // or on telemetry detected to be generated as a result of a synthetic request. + // This attribute is useful for distinguishing between genuine client traffic + // and synthetic traffic generated by bots or tests. + UserAgentSyntheticTypeKey = attribute.Key("user_agent.synthetic.type") + + // UserAgentVersionKey is the attribute Key conforming to the + // "user_agent.version" semantic conventions. It represents the version of the + // user-agent extracted from original. Usually refers to the browser's version. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "14.1.2", "1.0.0" + // Note: [Example] of extracting browser's version from original string. In the + // case of using a user-agent for non-browser products, such as microservices + // with multiple names/versions inside the `user_agent.original`, the most + // significant version SHOULD be selected. In such a scenario it should align + // with `user_agent.name` + // + // [Example]: https://www.whatsmyua.info + UserAgentVersionKey = attribute.Key("user_agent.version") +) + +// UserAgentName returns an attribute KeyValue conforming to the +// "user_agent.name" semantic conventions. It represents the name of the +// user-agent extracted from original. Usually refers to the browser's name. +func UserAgentName(val string) attribute.KeyValue { + return UserAgentNameKey.String(val) +} + +// UserAgentOriginal returns an attribute KeyValue conforming to the +// "user_agent.original" semantic conventions. It represents the value of the +// [HTTP User-Agent] header sent by the client. +// +// [HTTP User-Agent]: https://www.rfc-editor.org/rfc/rfc9110.html#field.user-agent +func UserAgentOriginal(val string) attribute.KeyValue { + return UserAgentOriginalKey.String(val) +} + +// UserAgentOSName returns an attribute KeyValue conforming to the +// "user_agent.os.name" semantic conventions. It represents the human readable +// operating system name. +func UserAgentOSName(val string) attribute.KeyValue { + return UserAgentOSNameKey.String(val) +} + +// UserAgentOSVersion returns an attribute KeyValue conforming to the +// "user_agent.os.version" semantic conventions. It represents the version string +// of the operating system as defined in [Version Attributes]. +// +// [Version Attributes]: /docs/resource/README.md#version-attributes +func UserAgentOSVersion(val string) attribute.KeyValue { + return UserAgentOSVersionKey.String(val) +} + +// UserAgentVersion returns an attribute KeyValue conforming to the +// "user_agent.version" semantic conventions. It represents the version of the +// user-agent extracted from original. Usually refers to the browser's version. +func UserAgentVersion(val string) attribute.KeyValue { + return UserAgentVersionKey.String(val) +} + +// Enum values for user_agent.synthetic.type +var ( + // Bot source. + // Stability: development + UserAgentSyntheticTypeBot = UserAgentSyntheticTypeKey.String("bot") + // Synthetic test source. + // Stability: development + UserAgentSyntheticTypeTest = UserAgentSyntheticTypeKey.String("test") +) + +// Namespace: vcs +const ( + // VCSChangeIDKey is the attribute Key conforming to the "vcs.change.id" + // semantic conventions. It represents the ID of the change (pull request/merge + // request/changelist) if applicable. This is usually a unique (within + // repository) identifier generated by the VCS system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "123" + VCSChangeIDKey = attribute.Key("vcs.change.id") + + // VCSChangeStateKey is the attribute Key conforming to the "vcs.change.state" + // semantic conventions. It represents the state of the change (pull + // request/merge request/changelist). + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "open", "closed", "merged" + VCSChangeStateKey = attribute.Key("vcs.change.state") + + // VCSChangeTitleKey is the attribute Key conforming to the "vcs.change.title" + // semantic conventions. It represents the human readable title of the change + // (pull request/merge request/changelist). This title is often a brief summary + // of the change and may get merged in to a ref as the commit summary. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "Fixes broken thing", "feat: add my new feature", "[chore] update + // dependency" + VCSChangeTitleKey = attribute.Key("vcs.change.title") + + // VCSLineChangeTypeKey is the attribute Key conforming to the + // "vcs.line_change.type" semantic conventions. It represents the type of line + // change being measured on a branch or change. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "added", "removed" + VCSLineChangeTypeKey = attribute.Key("vcs.line_change.type") + + // VCSOwnerNameKey is the attribute Key conforming to the "vcs.owner.name" + // semantic conventions. It represents the group owner within the version + // control system. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-org", "myteam", "business-unit" + VCSOwnerNameKey = attribute.Key("vcs.owner.name") + + // VCSProviderNameKey is the attribute Key conforming to the "vcs.provider.name" + // semantic conventions. It represents the name of the version control system + // provider. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "github", "gitlab", "gitea", "bitbucket" + VCSProviderNameKey = attribute.Key("vcs.provider.name") + + // VCSRefBaseNameKey is the attribute Key conforming to the "vcs.ref.base.name" + // semantic conventions. It represents the name of the [reference] such as + // **branch** or **tag** in the repository. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-feature-branch", "tag-1-test" + // Note: `base` refers to the starting point of a change. For example, `main` + // would be the base reference of type branch if you've created a new + // reference of type branch from it and created new commits. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefBaseNameKey = attribute.Key("vcs.ref.base.name") + + // VCSRefBaseRevisionKey is the attribute Key conforming to the + // "vcs.ref.base.revision" semantic conventions. It represents the revision, + // literally [revised version], The revision most often refers to a commit + // object in Git, or a revision number in SVN. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9d59409acf479dfa0df1aa568182e43e43df8bbe28d60fcf2bc52e30068802cc", + // "main", "123", "HEAD" + // Note: `base` refers to the starting point of a change. For example, `main` + // would be the base reference of type branch if you've created a new + // reference of type branch from it and created new commits. The + // revision can be a full [hash value (see + // glossary)], + // of the recorded change to a ref within a repository pointing to a + // commit [commit] object. It does + // not necessarily have to be a hash; it can simply define a [revision + // number] + // which is an integer that is monotonically increasing. In cases where + // it is identical to the `ref.base.name`, it SHOULD still be included. + // It is up to the implementer to decide which value to set as the + // revision based on the VCS system and situational context. + // + // [revised version]: https://www.merriam-webster.com/dictionary/revision + // [hash value (see + // glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + // [commit]: https://git-scm.com/docs/git-commit + // [revision + // number]: https://svnbook.red-bean.com/en/1.7/svn.tour.revs.specifiers.html + VCSRefBaseRevisionKey = attribute.Key("vcs.ref.base.revision") + + // VCSRefBaseTypeKey is the attribute Key conforming to the "vcs.ref.base.type" + // semantic conventions. It represents the type of the [reference] in the + // repository. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "branch", "tag" + // Note: `base` refers to the starting point of a change. For example, `main` + // would be the base reference of type branch if you've created a new + // reference of type branch from it and created new commits. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefBaseTypeKey = attribute.Key("vcs.ref.base.type") + + // VCSRefHeadNameKey is the attribute Key conforming to the "vcs.ref.head.name" + // semantic conventions. It represents the name of the [reference] such as + // **branch** or **tag** in the repository. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "my-feature-branch", "tag-1-test" + // Note: `head` refers to where you are right now; the current reference at a + // given time. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefHeadNameKey = attribute.Key("vcs.ref.head.name") + + // VCSRefHeadRevisionKey is the attribute Key conforming to the + // "vcs.ref.head.revision" semantic conventions. It represents the revision, + // literally [revised version], The revision most often refers to a commit + // object in Git, or a revision number in SVN. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "9d59409acf479dfa0df1aa568182e43e43df8bbe28d60fcf2bc52e30068802cc", + // "main", "123", "HEAD" + // Note: `head` refers to where you are right now; the current reference at a + // given time.The revision can be a full [hash value (see + // glossary)], + // of the recorded change to a ref within a repository pointing to a + // commit [commit] object. It does + // not necessarily have to be a hash; it can simply define a [revision + // number] + // which is an integer that is monotonically increasing. In cases where + // it is identical to the `ref.head.name`, it SHOULD still be included. + // It is up to the implementer to decide which value to set as the + // revision based on the VCS system and situational context. + // + // [revised version]: https://www.merriam-webster.com/dictionary/revision + // [hash value (see + // glossary)]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-5.pdf + // [commit]: https://git-scm.com/docs/git-commit + // [revision + // number]: https://svnbook.red-bean.com/en/1.7/svn.tour.revs.specifiers.html + VCSRefHeadRevisionKey = attribute.Key("vcs.ref.head.revision") + + // VCSRefHeadTypeKey is the attribute Key conforming to the "vcs.ref.head.type" + // semantic conventions. It represents the type of the [reference] in the + // repository. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "branch", "tag" + // Note: `head` refers to where you are right now; the current reference at a + // given time. + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefHeadTypeKey = attribute.Key("vcs.ref.head.type") + + // VCSRefTypeKey is the attribute Key conforming to the "vcs.ref.type" semantic + // conventions. It represents the type of the [reference] in the repository. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "branch", "tag" + // + // [reference]: https://git-scm.com/docs/gitglossary#def_ref + VCSRefTypeKey = attribute.Key("vcs.ref.type") + + // VCSRepositoryNameKey is the attribute Key conforming to the + // "vcs.repository.name" semantic conventions. It represents the human readable + // name of the repository. It SHOULD NOT include any additional identifier like + // Group/SubGroup in GitLab or organization in GitHub. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "semantic-conventions", "my-cool-repo" + // Note: Due to it only being the name, it can clash with forks of the same + // repository if collecting telemetry across multiple orgs or groups in + // the same backends. + VCSRepositoryNameKey = attribute.Key("vcs.repository.name") + + // VCSRepositoryURLFullKey is the attribute Key conforming to the + // "vcs.repository.url.full" semantic conventions. It represents the + // [canonical URL] of the repository providing the complete HTTP(S) address in + // order to locate and identify the repository through a browser. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: + // "https://github.com/opentelemetry/open-telemetry-collector-contrib", + // "https://gitlab.com/my-org/my-project/my-projects-project/repo" + // Note: In Git Version Control Systems, the canonical URL SHOULD NOT include + // the `.git` extension. + // + // [canonical URL]: https://support.google.com/webmasters/answer/10347851?hl=en#:~:text=A%20canonical%20URL%20is%20the,Google%20chooses%20one%20as%20canonical. + VCSRepositoryURLFullKey = attribute.Key("vcs.repository.url.full") + + // VCSRevisionDeltaDirectionKey is the attribute Key conforming to the + // "vcs.revision_delta.direction" semantic conventions. It represents the type + // of revision comparison. + // + // Type: Enum + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "ahead", "behind" + VCSRevisionDeltaDirectionKey = attribute.Key("vcs.revision_delta.direction") +) + +// VCSChangeID returns an attribute KeyValue conforming to the "vcs.change.id" +// semantic conventions. It represents the ID of the change (pull request/merge +// request/changelist) if applicable. This is usually a unique (within +// repository) identifier generated by the VCS system. +func VCSChangeID(val string) attribute.KeyValue { + return VCSChangeIDKey.String(val) +} + +// VCSChangeTitle returns an attribute KeyValue conforming to the +// "vcs.change.title" semantic conventions. It represents the human readable +// title of the change (pull request/merge request/changelist). This title is +// often a brief summary of the change and may get merged in to a ref as the +// commit summary. +func VCSChangeTitle(val string) attribute.KeyValue { + return VCSChangeTitleKey.String(val) +} + +// VCSOwnerName returns an attribute KeyValue conforming to the "vcs.owner.name" +// semantic conventions. It represents the group owner within the version control +// system. +func VCSOwnerName(val string) attribute.KeyValue { + return VCSOwnerNameKey.String(val) +} + +// VCSRefBaseName returns an attribute KeyValue conforming to the +// "vcs.ref.base.name" semantic conventions. It represents the name of the +// [reference] such as **branch** or **tag** in the repository. +// +// [reference]: https://git-scm.com/docs/gitglossary#def_ref +func VCSRefBaseName(val string) attribute.KeyValue { + return VCSRefBaseNameKey.String(val) +} + +// VCSRefBaseRevision returns an attribute KeyValue conforming to the +// "vcs.ref.base.revision" semantic conventions. It represents the revision, +// literally [revised version], The revision most often refers to a commit object +// in Git, or a revision number in SVN. +// +// [revised version]: https://www.merriam-webster.com/dictionary/revision +func VCSRefBaseRevision(val string) attribute.KeyValue { + return VCSRefBaseRevisionKey.String(val) +} + +// VCSRefHeadName returns an attribute KeyValue conforming to the +// "vcs.ref.head.name" semantic conventions. It represents the name of the +// [reference] such as **branch** or **tag** in the repository. +// +// [reference]: https://git-scm.com/docs/gitglossary#def_ref +func VCSRefHeadName(val string) attribute.KeyValue { + return VCSRefHeadNameKey.String(val) +} + +// VCSRefHeadRevision returns an attribute KeyValue conforming to the +// "vcs.ref.head.revision" semantic conventions. It represents the revision, +// literally [revised version], The revision most often refers to a commit object +// in Git, or a revision number in SVN. +// +// [revised version]: https://www.merriam-webster.com/dictionary/revision +func VCSRefHeadRevision(val string) attribute.KeyValue { + return VCSRefHeadRevisionKey.String(val) +} + +// VCSRepositoryName returns an attribute KeyValue conforming to the +// "vcs.repository.name" semantic conventions. It represents the human readable +// name of the repository. It SHOULD NOT include any additional identifier like +// Group/SubGroup in GitLab or organization in GitHub. +func VCSRepositoryName(val string) attribute.KeyValue { + return VCSRepositoryNameKey.String(val) +} + +// VCSRepositoryURLFull returns an attribute KeyValue conforming to the +// "vcs.repository.url.full" semantic conventions. It represents the +// [canonical URL] of the repository providing the complete HTTP(S) address in +// order to locate and identify the repository through a browser. +// +// [canonical URL]: https://support.google.com/webmasters/answer/10347851?hl=en#:~:text=A%20canonical%20URL%20is%20the,Google%20chooses%20one%20as%20canonical. +func VCSRepositoryURLFull(val string) attribute.KeyValue { + return VCSRepositoryURLFullKey.String(val) +} + +// Enum values for vcs.change.state +var ( + // Open means the change is currently active and under review. It hasn't been + // merged into the target branch yet, and it's still possible to make changes or + // add comments. + // Stability: development + VCSChangeStateOpen = VCSChangeStateKey.String("open") + // WIP (work-in-progress, draft) means the change is still in progress and not + // yet ready for a full review. It might still undergo significant changes. + // Stability: development + VCSChangeStateWip = VCSChangeStateKey.String("wip") + // Closed means the merge request has been closed without merging. This can + // happen for various reasons, such as the changes being deemed unnecessary, the + // issue being resolved in another way, or the author deciding to withdraw the + // request. + // Stability: development + VCSChangeStateClosed = VCSChangeStateKey.String("closed") + // Merged indicates that the change has been successfully integrated into the + // target codebase. + // Stability: development + VCSChangeStateMerged = VCSChangeStateKey.String("merged") +) + +// Enum values for vcs.line_change.type +var ( + // How many lines were added. + // Stability: development + VCSLineChangeTypeAdded = VCSLineChangeTypeKey.String("added") + // How many lines were removed. + // Stability: development + VCSLineChangeTypeRemoved = VCSLineChangeTypeKey.String("removed") +) + +// Enum values for vcs.provider.name +var ( + // [GitHub] + // Stability: development + // + // [GitHub]: https://github.com + VCSProviderNameGithub = VCSProviderNameKey.String("github") + // [GitLab] + // Stability: development + // + // [GitLab]: https://gitlab.com + VCSProviderNameGitlab = VCSProviderNameKey.String("gitlab") + // Deprecated: Replaced by `gitea`. + VCSProviderNameGittea = VCSProviderNameKey.String("gittea") + // [Gitea] + // Stability: development + // + // [Gitea]: https://gitea.io + VCSProviderNameGitea = VCSProviderNameKey.String("gitea") + // [Bitbucket] + // Stability: development + // + // [Bitbucket]: https://bitbucket.org + VCSProviderNameBitbucket = VCSProviderNameKey.String("bitbucket") +) + +// Enum values for vcs.ref.base.type +var ( + // [branch] + // Stability: development + // + // [branch]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefbranchabranch + VCSRefBaseTypeBranch = VCSRefBaseTypeKey.String("branch") + // [tag] + // Stability: development + // + // [tag]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddeftagatag + VCSRefBaseTypeTag = VCSRefBaseTypeKey.String("tag") +) + +// Enum values for vcs.ref.head.type +var ( + // [branch] + // Stability: development + // + // [branch]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefbranchabranch + VCSRefHeadTypeBranch = VCSRefHeadTypeKey.String("branch") + // [tag] + // Stability: development + // + // [tag]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddeftagatag + VCSRefHeadTypeTag = VCSRefHeadTypeKey.String("tag") +) + +// Enum values for vcs.ref.type +var ( + // [branch] + // Stability: development + // + // [branch]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefbranchabranch + VCSRefTypeBranch = VCSRefTypeKey.String("branch") + // [tag] + // Stability: development + // + // [tag]: https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddeftagatag + VCSRefTypeTag = VCSRefTypeKey.String("tag") +) + +// Enum values for vcs.revision_delta.direction +var ( + // How many revisions the change is behind the target ref. + // Stability: development + VCSRevisionDeltaDirectionBehind = VCSRevisionDeltaDirectionKey.String("behind") + // How many revisions the change is ahead of the target ref. + // Stability: development + VCSRevisionDeltaDirectionAhead = VCSRevisionDeltaDirectionKey.String("ahead") +) + +// Namespace: webengine +const ( + // WebEngineDescriptionKey is the attribute Key conforming to the + // "webengine.description" semantic conventions. It represents the additional + // description of the web engine (e.g. detailed version and edition + // information). + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "WildFly Full 21.0.0.Final (WildFly Core 13.0.1.Final) - + // 2.2.2.Final" + WebEngineDescriptionKey = attribute.Key("webengine.description") + + // WebEngineNameKey is the attribute Key conforming to the "webengine.name" + // semantic conventions. It represents the name of the web engine. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "WildFly" + WebEngineNameKey = attribute.Key("webengine.name") + + // WebEngineVersionKey is the attribute Key conforming to the + // "webengine.version" semantic conventions. It represents the version of the + // web engine. + // + // Type: string + // RequirementLevel: Recommended + // Stability: Development + // + // Examples: "21.0.0" + WebEngineVersionKey = attribute.Key("webengine.version") +) + +// WebEngineDescription returns an attribute KeyValue conforming to the +// "webengine.description" semantic conventions. It represents the additional +// description of the web engine (e.g. detailed version and edition information). +func WebEngineDescription(val string) attribute.KeyValue { + return WebEngineDescriptionKey.String(val) +} + +// WebEngineName returns an attribute KeyValue conforming to the "webengine.name" +// semantic conventions. It represents the name of the web engine. +func WebEngineName(val string) attribute.KeyValue { + return WebEngineNameKey.String(val) +} + +// WebEngineVersion returns an attribute KeyValue conforming to the +// "webengine.version" semantic conventions. It represents the version of the web +// engine. +func WebEngineVersion(val string) attribute.KeyValue { + return WebEngineVersionKey.String(val) +} \ No newline at end of file diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/doc.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/doc.go new file mode 100644 index 00000000..2c5c7ebd --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/doc.go @@ -0,0 +1,9 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +// Package semconv implements OpenTelemetry semantic conventions. +// +// OpenTelemetry semantic conventions are agreed standardized naming +// patterns for OpenTelemetry things. This package represents the v1.34.0 +// version of the OpenTelemetry semantic conventions. +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/exception.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/exception.go new file mode 100644 index 00000000..88a998f1 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/exception.go @@ -0,0 +1,9 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" + +const ( + // ExceptionEventName is the name of the Span event representing an exception. + ExceptionEventName = "exception" +) diff --git a/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/schema.go b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/schema.go new file mode 100644 index 00000000..3c23d459 --- /dev/null +++ b/vendor/go.opentelemetry.io/otel/semconv/v1.34.0/schema.go @@ -0,0 +1,9 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package semconv // import "go.opentelemetry.io/otel/semconv/v1.34.0" + +// SchemaURL is the schema URL that matches the version of the semantic conventions +// that this package defines. Semconv packages starting from v1.4.0 must declare +// non-empty schema URL in the form https://opentelemetry.io/schemas/ +const SchemaURL = "https://opentelemetry.io/schemas/1.34.0" diff --git a/vendor/go.opentelemetry.io/otel/trace/auto.go b/vendor/go.opentelemetry.io/otel/trace/auto.go index d90af8f6..f3aa3981 100644 --- a/vendor/go.opentelemetry.io/otel/trace/auto.go +++ b/vendor/go.opentelemetry.io/otel/trace/auto.go @@ -20,7 +20,7 @@ import ( "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/codes" - semconv "go.opentelemetry.io/otel/semconv/v1.26.0" + semconv "go.opentelemetry.io/otel/semconv/v1.34.0" "go.opentelemetry.io/otel/trace/embedded" "go.opentelemetry.io/otel/trace/internal/telemetry" ) diff --git a/vendor/go.opentelemetry.io/otel/version.go b/vendor/go.opentelemetry.io/otel/version.go index ac3c0b15..7afe92b5 100644 --- a/vendor/go.opentelemetry.io/otel/version.go +++ b/vendor/go.opentelemetry.io/otel/version.go @@ -5,5 +5,5 @@ package otel // import "go.opentelemetry.io/otel" // Version is the current release version of OpenTelemetry in use. func Version() string { - return "1.36.0" + return "1.37.0" } diff --git a/vendor/go.opentelemetry.io/otel/versions.yaml b/vendor/go.opentelemetry.io/otel/versions.yaml index 79f82f3d..9d4742a1 100644 --- a/vendor/go.opentelemetry.io/otel/versions.yaml +++ b/vendor/go.opentelemetry.io/otel/versions.yaml @@ -3,13 +3,12 @@ module-sets: stable-v1: - version: v1.36.0 + version: v1.37.0 modules: - go.opentelemetry.io/otel - go.opentelemetry.io/otel/bridge/opencensus - go.opentelemetry.io/otel/bridge/opencensus/test - go.opentelemetry.io/otel/bridge/opentracing - - go.opentelemetry.io/otel/bridge/opentracing/test - go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc - go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp - go.opentelemetry.io/otel/exporters/otlp/otlptrace @@ -23,14 +22,16 @@ module-sets: - go.opentelemetry.io/otel/sdk/metric - go.opentelemetry.io/otel/trace experimental-metrics: - version: v0.58.0 + version: v0.59.0 modules: - go.opentelemetry.io/otel/exporters/prometheus experimental-logs: - version: v0.12.0 + version: v0.13.0 modules: - go.opentelemetry.io/otel/log + - go.opentelemetry.io/otel/log/logtest - go.opentelemetry.io/otel/sdk/log + - go.opentelemetry.io/otel/sdk/log/logtest - go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc - go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp - go.opentelemetry.io/otel/exporters/stdout/stdoutlog @@ -40,6 +41,4 @@ module-sets: - go.opentelemetry.io/otel/schema excluded-modules: - go.opentelemetry.io/otel/internal/tools - - go.opentelemetry.io/otel/log/logtest - - go.opentelemetry.io/otel/sdk/log/logtest - go.opentelemetry.io/otel/trace/internal/telemetry/test diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go index cb6bb9ad..1d8cffae 100644 --- a/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -12,8 +12,6 @@ package errgroup import ( "context" "fmt" - "runtime" - "runtime/debug" "sync" ) @@ -33,10 +31,6 @@ type Group struct { errOnce sync.Once err error - - mu sync.Mutex - panicValue any // = PanicError | PanicValue; non-nil if some Group.Go goroutine panicked. - abnormal bool // some Group.Go goroutine terminated abnormally (panic or goexit). } func (g *Group) done() { @@ -56,22 +50,13 @@ func WithContext(ctx context.Context) (*Group, context.Context) { return &Group{cancel: cancel}, ctx } -// Wait blocks until all function calls from the Go method have returned -// normally, then returns the first non-nil error (if any) from them. -// -// If any of the calls panics, Wait panics with a [PanicValue]; -// and if any of them calls [runtime.Goexit], Wait calls runtime.Goexit. +// Wait blocks until all function calls from the Go method have returned, then +// returns the first non-nil error (if any) from them. func (g *Group) Wait() error { g.wg.Wait() if g.cancel != nil { g.cancel(g.err) } - if g.panicValue != nil { - panic(g.panicValue) - } - if g.abnormal { - runtime.Goexit() - } return g.err } @@ -81,53 +66,31 @@ func (g *Group) Wait() error { // It blocks until the new goroutine can be added without the number of // goroutines in the group exceeding the configured limit. // -// The first goroutine in the group that returns a non-nil error, panics, or -// invokes [runtime.Goexit] will cancel the associated Context, if any. +// The first goroutine in the group that returns a non-nil error will +// cancel the associated Context, if any. The error will be returned +// by Wait. func (g *Group) Go(f func() error) { if g.sem != nil { g.sem <- token{} } - g.add(f) -} - -func (g *Group) add(f func() error) { g.wg.Add(1) go func() { defer g.done() - normalReturn := false - defer func() { - if normalReturn { - return - } - v := recover() - g.mu.Lock() - defer g.mu.Unlock() - if !g.abnormal { - if g.cancel != nil { - g.cancel(g.err) - } - g.abnormal = true - } - if v != nil && g.panicValue == nil { - switch v := v.(type) { - case error: - g.panicValue = PanicError{ - Recovered: v, - Stack: debug.Stack(), - } - default: - g.panicValue = PanicValue{ - Recovered: v, - Stack: debug.Stack(), - } - } - } - }() - err := f() - normalReturn = true - if err != nil { + // It is tempting to propagate panics from f() + // up to the goroutine that calls Wait, but + // it creates more problems than it solves: + // - it delays panics arbitrarily, + // making bugs harder to detect; + // - it turns f's panic stack into a mere value, + // hiding it from crash-monitoring tools; + // - it risks deadlocks that hide the panic entirely, + // if f's panic leaves the program in a state + // that prevents the Wait call from being reached. + // See #53757, #74275, #74304, #74306. + + if err := f(); err != nil { g.errOnce.Do(func() { g.err = err if g.cancel != nil { @@ -152,7 +115,19 @@ func (g *Group) TryGo(f func() error) bool { } } - g.add(f) + g.wg.Add(1) + go func() { + defer g.done() + + if err := f(); err != nil { + g.errOnce.Do(func() { + g.err = err + if g.cancel != nil { + g.cancel(g.err) + } + }) + } + }() return true } @@ -174,34 +149,3 @@ func (g *Group) SetLimit(n int) { } g.sem = make(chan token, n) } - -// PanicError wraps an error recovered from an unhandled panic -// when calling a function passed to Go or TryGo. -type PanicError struct { - Recovered error - Stack []byte // result of call to [debug.Stack] -} - -func (p PanicError) Error() string { - if len(p.Stack) > 0 { - return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) - } - return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) -} - -func (p PanicError) Unwrap() error { return p.Recovered } - -// PanicValue wraps a value that does not implement the error interface, -// recovered from an unhandled panic when calling a function passed to Go or -// TryGo. -type PanicValue struct { - Recovered any - Stack []byte // result of call to [debug.Stack] -} - -func (p PanicValue) String() string { - if len(p.Stack) > 0 { - return fmt.Sprintf("recovered from errgroup.Group: %v\n%s", p.Recovered, p.Stack) - } - return fmt.Sprintf("recovered from errgroup.Group: %v", p.Recovered) -} diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index 4f432bfe..9e7a6c5a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -319,6 +319,7 @@ const ( AUDIT_INTEGRITY_POLICY_RULE = 0x70f AUDIT_INTEGRITY_RULE = 0x70d AUDIT_INTEGRITY_STATUS = 0x70a + AUDIT_INTEGRITY_USERSPACE = 0x710 AUDIT_IPC = 0x517 AUDIT_IPC_SET_PERM = 0x51f AUDIT_IPE_ACCESS = 0x58c @@ -843,9 +844,9 @@ const ( DM_UUID_FLAG = 0x4000 DM_UUID_LEN = 0x81 DM_VERSION = 0xc138fd00 - DM_VERSION_EXTRA = "-ioctl (2023-03-01)" + DM_VERSION_EXTRA = "-ioctl (2025-01-17)" DM_VERSION_MAJOR = 0x4 - DM_VERSION_MINOR = 0x30 + DM_VERSION_MINOR = 0x31 DM_VERSION_PATCHLEVEL = 0x0 DT_BLK = 0x6 DT_CHR = 0x2 @@ -941,6 +942,8 @@ const ( ETHER_FLOW = 0x12 ETHTOOL_BUSINFO_LEN = 0x20 ETHTOOL_EROMVERS_LEN = 0x20 + ETHTOOL_FAMILY_NAME = "ethtool" + ETHTOOL_FAMILY_VERSION = 0x1 ETHTOOL_FEC_AUTO = 0x2 ETHTOOL_FEC_BASER = 0x10 ETHTOOL_FEC_LLRS = 0x20 @@ -1203,6 +1206,9 @@ const ( FAN_DENY = 0x2 FAN_ENABLE_AUDIT = 0x40 FAN_EPIDFD = -0x2 + FAN_ERRNO_BITS = 0x8 + FAN_ERRNO_MASK = 0xff + FAN_ERRNO_SHIFT = 0x18 FAN_EVENT_INFO_TYPE_DFID = 0x3 FAN_EVENT_INFO_TYPE_DFID_NAME = 0x2 FAN_EVENT_INFO_TYPE_ERROR = 0x5 @@ -1210,6 +1216,7 @@ const ( FAN_EVENT_INFO_TYPE_NEW_DFID_NAME = 0xc FAN_EVENT_INFO_TYPE_OLD_DFID_NAME = 0xa FAN_EVENT_INFO_TYPE_PIDFD = 0x4 + FAN_EVENT_INFO_TYPE_RANGE = 0x6 FAN_EVENT_METADATA_LEN = 0x18 FAN_EVENT_ON_CHILD = 0x8000000 FAN_FS_ERROR = 0x8000 @@ -1240,6 +1247,7 @@ const ( FAN_OPEN_EXEC = 0x1000 FAN_OPEN_EXEC_PERM = 0x40000 FAN_OPEN_PERM = 0x10000 + FAN_PRE_ACCESS = 0x100000 FAN_Q_OVERFLOW = 0x4000 FAN_RENAME = 0x10000000 FAN_REPORT_DFID_NAME = 0xc00 @@ -2787,7 +2795,7 @@ const ( RTAX_UNSPEC = 0x0 RTAX_WINDOW = 0x3 RTA_ALIGNTO = 0x4 - RTA_MAX = 0x1e + RTA_MAX = 0x1f RTCF_DIRECTSRC = 0x4000000 RTCF_DOREDIRECT = 0x1000000 RTCF_LOG = 0x2000000 @@ -2864,10 +2872,12 @@ const ( RTM_DELACTION = 0x31 RTM_DELADDR = 0x15 RTM_DELADDRLABEL = 0x49 + RTM_DELANYCAST = 0x3d RTM_DELCHAIN = 0x65 RTM_DELLINK = 0x11 RTM_DELLINKPROP = 0x6d RTM_DELMDB = 0x55 + RTM_DELMULTICAST = 0x39 RTM_DELNEIGH = 0x1d RTM_DELNETCONF = 0x51 RTM_DELNEXTHOP = 0x69 @@ -2917,11 +2927,13 @@ const ( RTM_NEWACTION = 0x30 RTM_NEWADDR = 0x14 RTM_NEWADDRLABEL = 0x48 + RTM_NEWANYCAST = 0x3c RTM_NEWCACHEREPORT = 0x60 RTM_NEWCHAIN = 0x64 RTM_NEWLINK = 0x10 RTM_NEWLINKPROP = 0x6c RTM_NEWMDB = 0x54 + RTM_NEWMULTICAST = 0x38 RTM_NEWNDUSEROPT = 0x44 RTM_NEWNEIGH = 0x1c RTM_NEWNEIGHTBL = 0x40 @@ -2987,11 +2999,12 @@ const ( RUSAGE_THREAD = 0x1 RWF_APPEND = 0x10 RWF_ATOMIC = 0x40 + RWF_DONTCACHE = 0x80 RWF_DSYNC = 0x2 RWF_HIPRI = 0x1 RWF_NOAPPEND = 0x20 RWF_NOWAIT = 0x8 - RWF_SUPPORTED = 0x7f + RWF_SUPPORTED = 0xff RWF_SYNC = 0x4 RWF_WRITE_LIFE_NOT_SET = 0x0 SCHED_BATCH = 0x3 @@ -3271,6 +3284,7 @@ const ( STATX_BTIME = 0x800 STATX_CTIME = 0x80 STATX_DIOALIGN = 0x2000 + STATX_DIO_READ_ALIGN = 0x20000 STATX_GID = 0x10 STATX_INO = 0x100 STATX_MNT_ID = 0x1000 @@ -3322,7 +3336,7 @@ const ( TASKSTATS_GENL_NAME = "TASKSTATS" TASKSTATS_GENL_VERSION = 0x1 TASKSTATS_TYPE_MAX = 0x6 - TASKSTATS_VERSION = 0xe + TASKSTATS_VERSION = 0xf TCIFLUSH = 0x0 TCIOFF = 0x2 TCIOFLUSH = 0x2 @@ -3503,6 +3517,7 @@ const ( TP_STATUS_WRONG_FORMAT = 0x4 TRACEFS_MAGIC = 0x74726163 TS_COMM_LEN = 0x20 + UBI_IOCECNFO = 0xc01c6f06 UDF_SUPER_MAGIC = 0x15013346 UDP_CORK = 0x1 UDP_ENCAP = 0x64 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go index 75207613..a8c421e2 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go @@ -372,6 +372,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go index c68acda5..9a88d181 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go @@ -373,6 +373,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go index a8c607ab..7cb6a867 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go @@ -378,6 +378,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go index 18563dd8..d0ecd2c5 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go index 22912cda..7a2940ae 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go @@ -365,6 +365,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go index 29344eb3..d14ca8f2 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go index 20d51fb9..2da1bac1 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go index 321b6090..28727514 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go index 9bacdf1e..7f287b54 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go @@ -371,6 +371,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x1004 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x1006 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x1006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go index c2242726..7e5f9e6a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go @@ -426,6 +426,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x10 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x12 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go index 6270c8ee..37c87952 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go @@ -430,6 +430,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x10 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x12 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go index 9966c194..52201336 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go @@ -430,6 +430,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x10 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x12 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go index 848e5fcc..4bfe2b5b 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go @@ -362,6 +362,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go index 669b2adb..e3cffb86 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go @@ -434,6 +434,7 @@ const ( SO_RCVBUFFORCE = 0x21 SO_RCVLOWAT = 0x12 SO_RCVMARK = 0x4b + SO_RCVPRIORITY = 0x52 SO_RCVTIMEO = 0x14 SO_RCVTIMEO_NEW = 0x42 SO_RCVTIMEO_OLD = 0x14 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go index 4834e575..c219c8db 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go @@ -473,6 +473,7 @@ const ( SO_RCVBUFFORCE = 0x100b SO_RCVLOWAT = 0x800 SO_RCVMARK = 0x54 + SO_RCVPRIORITY = 0x5b SO_RCVTIMEO = 0x2000 SO_RCVTIMEO_NEW = 0x44 SO_RCVTIMEO_OLD = 0x2000 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index a46abe64..8bcac283 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -114,7 +114,7 @@ type Statx_t struct { Atomic_write_unit_min uint32 Atomic_write_unit_max uint32 Atomic_write_segments_max uint32 - _ [1]uint32 + Dio_read_offset_align uint32 _ [9]uint64 } @@ -2226,8 +2226,11 @@ const ( NFT_PAYLOAD_LL_HEADER = 0x0 NFT_PAYLOAD_NETWORK_HEADER = 0x1 NFT_PAYLOAD_TRANSPORT_HEADER = 0x2 + NFT_PAYLOAD_INNER_HEADER = 0x3 + NFT_PAYLOAD_TUN_HEADER = 0x4 NFT_PAYLOAD_CSUM_NONE = 0x0 NFT_PAYLOAD_CSUM_INET = 0x1 + NFT_PAYLOAD_CSUM_SCTP = 0x2 NFT_PAYLOAD_L4CSUM_PSEUDOHDR = 0x1 NFTA_PAYLOAD_UNSPEC = 0x0 NFTA_PAYLOAD_DREG = 0x1 @@ -3802,7 +3805,16 @@ const ( ETHTOOL_MSG_PSE_GET = 0x24 ETHTOOL_MSG_PSE_SET = 0x25 ETHTOOL_MSG_RSS_GET = 0x26 - ETHTOOL_MSG_USER_MAX = 0x2d + ETHTOOL_MSG_PLCA_GET_CFG = 0x27 + ETHTOOL_MSG_PLCA_SET_CFG = 0x28 + ETHTOOL_MSG_PLCA_GET_STATUS = 0x29 + ETHTOOL_MSG_MM_GET = 0x2a + ETHTOOL_MSG_MM_SET = 0x2b + ETHTOOL_MSG_MODULE_FW_FLASH_ACT = 0x2c + ETHTOOL_MSG_PHY_GET = 0x2d + ETHTOOL_MSG_TSCONFIG_GET = 0x2e + ETHTOOL_MSG_TSCONFIG_SET = 0x2f + ETHTOOL_MSG_USER_MAX = 0x2f ETHTOOL_MSG_KERNEL_NONE = 0x0 ETHTOOL_MSG_STRSET_GET_REPLY = 0x1 ETHTOOL_MSG_LINKINFO_GET_REPLY = 0x2 @@ -3842,7 +3854,17 @@ const ( ETHTOOL_MSG_MODULE_NTF = 0x24 ETHTOOL_MSG_PSE_GET_REPLY = 0x25 ETHTOOL_MSG_RSS_GET_REPLY = 0x26 - ETHTOOL_MSG_KERNEL_MAX = 0x2e + ETHTOOL_MSG_PLCA_GET_CFG_REPLY = 0x27 + ETHTOOL_MSG_PLCA_GET_STATUS_REPLY = 0x28 + ETHTOOL_MSG_PLCA_NTF = 0x29 + ETHTOOL_MSG_MM_GET_REPLY = 0x2a + ETHTOOL_MSG_MM_NTF = 0x2b + ETHTOOL_MSG_MODULE_FW_FLASH_NTF = 0x2c + ETHTOOL_MSG_PHY_GET_REPLY = 0x2d + ETHTOOL_MSG_PHY_NTF = 0x2e + ETHTOOL_MSG_TSCONFIG_GET_REPLY = 0x2f + ETHTOOL_MSG_TSCONFIG_SET_REPLY = 0x30 + ETHTOOL_MSG_KERNEL_MAX = 0x30 ETHTOOL_FLAG_COMPACT_BITSETS = 0x1 ETHTOOL_FLAG_OMIT_REPLY = 0x2 ETHTOOL_FLAG_STATS = 0x4 @@ -3949,7 +3971,12 @@ const ( ETHTOOL_A_RINGS_TCP_DATA_SPLIT = 0xb ETHTOOL_A_RINGS_CQE_SIZE = 0xc ETHTOOL_A_RINGS_TX_PUSH = 0xd - ETHTOOL_A_RINGS_MAX = 0x10 + ETHTOOL_A_RINGS_RX_PUSH = 0xe + ETHTOOL_A_RINGS_TX_PUSH_BUF_LEN = 0xf + ETHTOOL_A_RINGS_TX_PUSH_BUF_LEN_MAX = 0x10 + ETHTOOL_A_RINGS_HDS_THRESH = 0x11 + ETHTOOL_A_RINGS_HDS_THRESH_MAX = 0x12 + ETHTOOL_A_RINGS_MAX = 0x12 ETHTOOL_A_CHANNELS_UNSPEC = 0x0 ETHTOOL_A_CHANNELS_HEADER = 0x1 ETHTOOL_A_CHANNELS_RX_MAX = 0x2 @@ -4015,7 +4042,9 @@ const ( ETHTOOL_A_TSINFO_TX_TYPES = 0x3 ETHTOOL_A_TSINFO_RX_FILTERS = 0x4 ETHTOOL_A_TSINFO_PHC_INDEX = 0x5 - ETHTOOL_A_TSINFO_MAX = 0x6 + ETHTOOL_A_TSINFO_STATS = 0x6 + ETHTOOL_A_TSINFO_HWTSTAMP_PROVIDER = 0x7 + ETHTOOL_A_TSINFO_MAX = 0x7 ETHTOOL_A_CABLE_TEST_UNSPEC = 0x0 ETHTOOL_A_CABLE_TEST_HEADER = 0x1 ETHTOOL_A_CABLE_TEST_MAX = 0x1 @@ -4613,6 +4642,7 @@ const ( NL80211_ATTR_AKM_SUITES = 0x4c NL80211_ATTR_AP_ISOLATE = 0x60 NL80211_ATTR_AP_SETTINGS_FLAGS = 0x135 + NL80211_ATTR_ASSOC_SPP_AMSDU = 0x14a NL80211_ATTR_AUTH_DATA = 0x9c NL80211_ATTR_AUTH_TYPE = 0x35 NL80211_ATTR_BANDS = 0xef @@ -4623,6 +4653,7 @@ const ( NL80211_ATTR_BSS_BASIC_RATES = 0x24 NL80211_ATTR_BSS = 0x2f NL80211_ATTR_BSS_CTS_PROT = 0x1c + NL80211_ATTR_BSS_DUMP_INCLUDE_USE_DATA = 0x147 NL80211_ATTR_BSS_HT_OPMODE = 0x6d NL80211_ATTR_BSSID = 0xf5 NL80211_ATTR_BSS_SELECT = 0xe3 @@ -4682,6 +4713,7 @@ const ( NL80211_ATTR_DTIM_PERIOD = 0xd NL80211_ATTR_DURATION = 0x57 NL80211_ATTR_EHT_CAPABILITY = 0x136 + NL80211_ATTR_EMA_RNR_ELEMS = 0x145 NL80211_ATTR_EML_CAPABILITY = 0x13d NL80211_ATTR_EXT_CAPA = 0xa9 NL80211_ATTR_EXT_CAPA_MASK = 0xaa @@ -4717,6 +4749,7 @@ const ( NL80211_ATTR_HIDDEN_SSID = 0x7e NL80211_ATTR_HT_CAPABILITY = 0x1f NL80211_ATTR_HT_CAPABILITY_MASK = 0x94 + NL80211_ATTR_HW_TIMESTAMP_ENABLED = 0x144 NL80211_ATTR_IE_ASSOC_RESP = 0x80 NL80211_ATTR_IE = 0x2a NL80211_ATTR_IE_PROBE_RESP = 0x7f @@ -4747,9 +4780,10 @@ const ( NL80211_ATTR_MAC_HINT = 0xc8 NL80211_ATTR_MAC_MASK = 0xd7 NL80211_ATTR_MAX_AP_ASSOC_STA = 0xca - NL80211_ATTR_MAX = 0x14d + NL80211_ATTR_MAX = 0x150 NL80211_ATTR_MAX_CRIT_PROT_DURATION = 0xb4 NL80211_ATTR_MAX_CSA_COUNTERS = 0xce + NL80211_ATTR_MAX_HW_TIMESTAMP_PEERS = 0x143 NL80211_ATTR_MAX_MATCH_SETS = 0x85 NL80211_ATTR_MAX_NUM_AKM_SUITES = 0x13c NL80211_ATTR_MAX_NUM_PMKIDS = 0x56 @@ -4774,9 +4808,12 @@ const ( NL80211_ATTR_MGMT_SUBTYPE = 0x29 NL80211_ATTR_MLD_ADDR = 0x13a NL80211_ATTR_MLD_CAPA_AND_OPS = 0x13e + NL80211_ATTR_MLO_LINK_DISABLED = 0x146 NL80211_ATTR_MLO_LINK_ID = 0x139 NL80211_ATTR_MLO_LINKS = 0x138 NL80211_ATTR_MLO_SUPPORT = 0x13b + NL80211_ATTR_MLO_TTLM_DLINK = 0x148 + NL80211_ATTR_MLO_TTLM_ULINK = 0x149 NL80211_ATTR_MNTR_FLAGS = 0x17 NL80211_ATTR_MPATH_INFO = 0x1b NL80211_ATTR_MPATH_NEXT_HOP = 0x1a @@ -4809,12 +4846,14 @@ const ( NL80211_ATTR_PORT_AUTHORIZED = 0x103 NL80211_ATTR_POWER_RULE_MAX_ANT_GAIN = 0x5 NL80211_ATTR_POWER_RULE_MAX_EIRP = 0x6 + NL80211_ATTR_POWER_RULE_PSD = 0x8 NL80211_ATTR_PREV_BSSID = 0x4f NL80211_ATTR_PRIVACY = 0x46 NL80211_ATTR_PROBE_RESP = 0x91 NL80211_ATTR_PROBE_RESP_OFFLOAD = 0x90 NL80211_ATTR_PROTOCOL_FEATURES = 0xad NL80211_ATTR_PS_STATE = 0x5d + NL80211_ATTR_PUNCT_BITMAP = 0x142 NL80211_ATTR_QOS_MAP = 0xc7 NL80211_ATTR_RADAR_BACKGROUND = 0x134 NL80211_ATTR_RADAR_EVENT = 0xa8 @@ -4943,7 +4982,9 @@ const ( NL80211_ATTR_WIPHY_FREQ = 0x26 NL80211_ATTR_WIPHY_FREQ_HINT = 0xc9 NL80211_ATTR_WIPHY_FREQ_OFFSET = 0x122 + NL80211_ATTR_WIPHY_INTERFACE_COMBINATIONS = 0x14c NL80211_ATTR_WIPHY_NAME = 0x2 + NL80211_ATTR_WIPHY_RADIOS = 0x14b NL80211_ATTR_WIPHY_RETRY_LONG = 0x3e NL80211_ATTR_WIPHY_RETRY_SHORT = 0x3d NL80211_ATTR_WIPHY_RTS_THRESHOLD = 0x40 @@ -4978,6 +5019,8 @@ const ( NL80211_BAND_ATTR_IFTYPE_DATA = 0x9 NL80211_BAND_ATTR_MAX = 0xd NL80211_BAND_ATTR_RATES = 0x2 + NL80211_BAND_ATTR_S1G_CAPA = 0xd + NL80211_BAND_ATTR_S1G_MCS_NSS_SET = 0xc NL80211_BAND_ATTR_VHT_CAPA = 0x8 NL80211_BAND_ATTR_VHT_MCS_SET = 0x7 NL80211_BAND_IFTYPE_ATTR_EHT_CAP_MAC = 0x8 @@ -5001,6 +5044,10 @@ const ( NL80211_BSS_BEACON_INTERVAL = 0x4 NL80211_BSS_BEACON_TSF = 0xd NL80211_BSS_BSSID = 0x1 + NL80211_BSS_CANNOT_USE_6GHZ_PWR_MISMATCH = 0x2 + NL80211_BSS_CANNOT_USE_NSTR_NONPRIMARY = 0x1 + NL80211_BSS_CANNOT_USE_REASONS = 0x18 + NL80211_BSS_CANNOT_USE_UHB_PWR_MISMATCH = 0x2 NL80211_BSS_CAPABILITY = 0x5 NL80211_BSS_CHAIN_SIGNAL = 0x13 NL80211_BSS_CHAN_WIDTH_10 = 0x1 @@ -5032,6 +5079,9 @@ const ( NL80211_BSS_STATUS = 0x9 NL80211_BSS_STATUS_IBSS_JOINED = 0x2 NL80211_BSS_TSF = 0x3 + NL80211_BSS_USE_FOR = 0x17 + NL80211_BSS_USE_FOR_MLD_LINK = 0x2 + NL80211_BSS_USE_FOR_NORMAL = 0x1 NL80211_CHAN_HT20 = 0x1 NL80211_CHAN_HT40MINUS = 0x2 NL80211_CHAN_HT40PLUS = 0x3 @@ -5117,7 +5167,8 @@ const ( NL80211_CMD_LEAVE_IBSS = 0x2c NL80211_CMD_LEAVE_MESH = 0x45 NL80211_CMD_LEAVE_OCB = 0x6d - NL80211_CMD_MAX = 0x9b + NL80211_CMD_LINKS_REMOVED = 0x9a + NL80211_CMD_MAX = 0x9d NL80211_CMD_MICHAEL_MIC_FAILURE = 0x29 NL80211_CMD_MODIFY_LINK_STA = 0x97 NL80211_CMD_NAN_MATCH = 0x78 @@ -5161,6 +5212,7 @@ const ( NL80211_CMD_SET_COALESCE = 0x65 NL80211_CMD_SET_CQM = 0x3f NL80211_CMD_SET_FILS_AAD = 0x92 + NL80211_CMD_SET_HW_TIMESTAMP = 0x99 NL80211_CMD_SET_INTERFACE = 0x6 NL80211_CMD_SET_KEY = 0xa NL80211_CMD_SET_MAC_ACL = 0x5d @@ -5180,6 +5232,7 @@ const ( NL80211_CMD_SET_SAR_SPECS = 0x8c NL80211_CMD_SET_STATION = 0x12 NL80211_CMD_SET_TID_CONFIG = 0x89 + NL80211_CMD_SET_TID_TO_LINK_MAPPING = 0x9b NL80211_CMD_SET_TX_BITRATE_MASK = 0x39 NL80211_CMD_SET_WDS_PEER = 0x42 NL80211_CMD_SET_WIPHY = 0x2 @@ -5247,6 +5300,7 @@ const ( NL80211_EXT_FEATURE_AIRTIME_FAIRNESS = 0x21 NL80211_EXT_FEATURE_AP_PMKSA_CACHING = 0x22 NL80211_EXT_FEATURE_AQL = 0x28 + NL80211_EXT_FEATURE_AUTH_AND_DEAUTH_RANDOM_TA = 0x40 NL80211_EXT_FEATURE_BEACON_PROTECTION_CLIENT = 0x2e NL80211_EXT_FEATURE_BEACON_PROTECTION = 0x29 NL80211_EXT_FEATURE_BEACON_RATE_HE = 0x36 @@ -5262,6 +5316,7 @@ const ( NL80211_EXT_FEATURE_CQM_RSSI_LIST = 0xd NL80211_EXT_FEATURE_DATA_ACK_SIGNAL_SUPPORT = 0x1b NL80211_EXT_FEATURE_DEL_IBSS_STA = 0x2c + NL80211_EXT_FEATURE_DFS_CONCURRENT = 0x43 NL80211_EXT_FEATURE_DFS_OFFLOAD = 0x19 NL80211_EXT_FEATURE_ENABLE_FTM_RESPONDER = 0x20 NL80211_EXT_FEATURE_EXT_KEY_ID = 0x24 @@ -5281,9 +5336,12 @@ const ( NL80211_EXT_FEATURE_OCE_PROBE_REQ_DEFERRAL_SUPPRESSION = 0x14 NL80211_EXT_FEATURE_OCE_PROBE_REQ_HIGH_TX_RATE = 0x13 NL80211_EXT_FEATURE_OPERATING_CHANNEL_VALIDATION = 0x31 + NL80211_EXT_FEATURE_OWE_OFFLOAD_AP = 0x42 + NL80211_EXT_FEATURE_OWE_OFFLOAD = 0x41 NL80211_EXT_FEATURE_POWERED_ADDR_CHANGE = 0x3d NL80211_EXT_FEATURE_PROTECTED_TWT = 0x2b NL80211_EXT_FEATURE_PROT_RANGE_NEGO_AND_MEASURE = 0x39 + NL80211_EXT_FEATURE_PUNCT = 0x3e NL80211_EXT_FEATURE_RADAR_BACKGROUND = 0x3c NL80211_EXT_FEATURE_RRM = 0x1 NL80211_EXT_FEATURE_SAE_OFFLOAD_AP = 0x33 @@ -5295,8 +5353,10 @@ const ( NL80211_EXT_FEATURE_SCHED_SCAN_BAND_SPECIFIC_RSSI_THOLD = 0x23 NL80211_EXT_FEATURE_SCHED_SCAN_RELATIVE_RSSI = 0xc NL80211_EXT_FEATURE_SECURE_LTF = 0x37 + NL80211_EXT_FEATURE_SECURE_NAN = 0x3f NL80211_EXT_FEATURE_SECURE_RTT = 0x38 NL80211_EXT_FEATURE_SET_SCAN_DWELL = 0x5 + NL80211_EXT_FEATURE_SPP_AMSDU_SUPPORT = 0x44 NL80211_EXT_FEATURE_STA_TX_PWR = 0x25 NL80211_EXT_FEATURE_TXQS = 0x1c NL80211_EXT_FEATURE_UNSOL_BCAST_PROBE_RESP = 0x35 @@ -5343,7 +5403,10 @@ const ( NL80211_FREQUENCY_ATTR_2MHZ = 0x16 NL80211_FREQUENCY_ATTR_4MHZ = 0x17 NL80211_FREQUENCY_ATTR_8MHZ = 0x18 + NL80211_FREQUENCY_ATTR_ALLOW_6GHZ_VLP_AP = 0x21 + NL80211_FREQUENCY_ATTR_CAN_MONITOR = 0x20 NL80211_FREQUENCY_ATTR_DFS_CAC_TIME = 0xd + NL80211_FREQUENCY_ATTR_DFS_CONCURRENT = 0x1d NL80211_FREQUENCY_ATTR_DFS_STATE = 0x7 NL80211_FREQUENCY_ATTR_DFS_TIME = 0x8 NL80211_FREQUENCY_ATTR_DISABLED = 0x2 @@ -5357,6 +5420,8 @@ const ( NL80211_FREQUENCY_ATTR_NO_160MHZ = 0xc NL80211_FREQUENCY_ATTR_NO_20MHZ = 0x10 NL80211_FREQUENCY_ATTR_NO_320MHZ = 0x1a + NL80211_FREQUENCY_ATTR_NO_6GHZ_AFC_CLIENT = 0x1f + NL80211_FREQUENCY_ATTR_NO_6GHZ_VLP_CLIENT = 0x1e NL80211_FREQUENCY_ATTR_NO_80MHZ = 0xb NL80211_FREQUENCY_ATTR_NO_EHT = 0x1b NL80211_FREQUENCY_ATTR_NO_HE = 0x13 @@ -5364,8 +5429,11 @@ const ( NL80211_FREQUENCY_ATTR_NO_HT40_PLUS = 0xa NL80211_FREQUENCY_ATTR_NO_IBSS = 0x3 NL80211_FREQUENCY_ATTR_NO_IR = 0x3 + NL80211_FREQUENCY_ATTR_NO_UHB_AFC_CLIENT = 0x1f + NL80211_FREQUENCY_ATTR_NO_UHB_VLP_CLIENT = 0x1e NL80211_FREQUENCY_ATTR_OFFSET = 0x14 NL80211_FREQUENCY_ATTR_PASSIVE_SCAN = 0x3 + NL80211_FREQUENCY_ATTR_PSD = 0x1c NL80211_FREQUENCY_ATTR_RADAR = 0x5 NL80211_FREQUENCY_ATTR_WMM = 0x12 NL80211_FTM_RESP_ATTR_CIVICLOC = 0x3 @@ -5430,6 +5498,7 @@ const ( NL80211_IFTYPE_STATION = 0x2 NL80211_IFTYPE_UNSPECIFIED = 0x0 NL80211_IFTYPE_WDS = 0x5 + NL80211_KCK_EXT_LEN_32 = 0x20 NL80211_KCK_EXT_LEN = 0x18 NL80211_KCK_LEN = 0x10 NL80211_KEK_EXT_LEN = 0x20 @@ -5458,6 +5527,7 @@ const ( NL80211_MAX_SUPP_HT_RATES = 0x4d NL80211_MAX_SUPP_RATES = 0x20 NL80211_MAX_SUPP_REG_RULES = 0x80 + NL80211_MAX_SUPP_SELECTORS = 0x80 NL80211_MBSSID_CONFIG_ATTR_EMA = 0x5 NL80211_MBSSID_CONFIG_ATTR_INDEX = 0x3 NL80211_MBSSID_CONFIG_ATTR_MAX = 0x5 @@ -5703,11 +5773,16 @@ const ( NL80211_RADAR_PRE_CAC_EXPIRED = 0x4 NL80211_RATE_INFO_10_MHZ_WIDTH = 0xb NL80211_RATE_INFO_160_MHZ_WIDTH = 0xa + NL80211_RATE_INFO_16_MHZ_WIDTH = 0x1d + NL80211_RATE_INFO_1_MHZ_WIDTH = 0x19 + NL80211_RATE_INFO_2_MHZ_WIDTH = 0x1a NL80211_RATE_INFO_320_MHZ_WIDTH = 0x12 NL80211_RATE_INFO_40_MHZ_WIDTH = 0x3 + NL80211_RATE_INFO_4_MHZ_WIDTH = 0x1b NL80211_RATE_INFO_5_MHZ_WIDTH = 0xc NL80211_RATE_INFO_80_MHZ_WIDTH = 0x8 NL80211_RATE_INFO_80P80_MHZ_WIDTH = 0x9 + NL80211_RATE_INFO_8_MHZ_WIDTH = 0x1c NL80211_RATE_INFO_BITRATE32 = 0x5 NL80211_RATE_INFO_BITRATE = 0x1 NL80211_RATE_INFO_EHT_GI_0_8 = 0x0 @@ -5753,6 +5828,8 @@ const ( NL80211_RATE_INFO_HE_RU_ALLOC = 0x11 NL80211_RATE_INFO_MAX = 0x1d NL80211_RATE_INFO_MCS = 0x2 + NL80211_RATE_INFO_S1G_MCS = 0x17 + NL80211_RATE_INFO_S1G_NSS = 0x18 NL80211_RATE_INFO_SHORT_GI = 0x4 NL80211_RATE_INFO_VHT_MCS = 0x6 NL80211_RATE_INFO_VHT_NSS = 0x7 @@ -5770,14 +5847,19 @@ const ( NL80211_REKEY_DATA_KEK = 0x1 NL80211_REKEY_DATA_REPLAY_CTR = 0x3 NL80211_REPLAY_CTR_LEN = 0x8 + NL80211_RRF_ALLOW_6GHZ_VLP_AP = 0x1000000 NL80211_RRF_AUTO_BW = 0x800 NL80211_RRF_DFS = 0x10 + NL80211_RRF_DFS_CONCURRENT = 0x200000 NL80211_RRF_GO_CONCURRENT = 0x1000 NL80211_RRF_IR_CONCURRENT = 0x1000 NL80211_RRF_NO_160MHZ = 0x10000 NL80211_RRF_NO_320MHZ = 0x40000 + NL80211_RRF_NO_6GHZ_AFC_CLIENT = 0x800000 + NL80211_RRF_NO_6GHZ_VLP_CLIENT = 0x400000 NL80211_RRF_NO_80MHZ = 0x8000 NL80211_RRF_NO_CCK = 0x2 + NL80211_RRF_NO_EHT = 0x80000 NL80211_RRF_NO_HE = 0x20000 NL80211_RRF_NO_HT40 = 0x6000 NL80211_RRF_NO_HT40MINUS = 0x2000 @@ -5788,7 +5870,10 @@ const ( NL80211_RRF_NO_IR = 0x80 NL80211_RRF_NO_OFDM = 0x1 NL80211_RRF_NO_OUTDOOR = 0x8 + NL80211_RRF_NO_UHB_AFC_CLIENT = 0x800000 + NL80211_RRF_NO_UHB_VLP_CLIENT = 0x400000 NL80211_RRF_PASSIVE_SCAN = 0x80 + NL80211_RRF_PSD = 0x100000 NL80211_RRF_PTMP_ONLY = 0x40 NL80211_RRF_PTP_ONLY = 0x20 NL80211_RXMGMT_FLAG_ANSWERED = 0x1 @@ -5849,6 +5934,7 @@ const ( NL80211_STA_FLAG_MAX_OLD_API = 0x6 NL80211_STA_FLAG_MFP = 0x4 NL80211_STA_FLAG_SHORT_PREAMBLE = 0x2 + NL80211_STA_FLAG_SPP_AMSDU = 0x8 NL80211_STA_FLAG_TDLS_PEER = 0x6 NL80211_STA_FLAG_WME = 0x3 NL80211_STA_INFO_ACK_SIGNAL_AVG = 0x23 @@ -6007,6 +6093,13 @@ const ( NL80211_VHT_CAPABILITY_LEN = 0xc NL80211_VHT_NSS_MAX = 0x8 NL80211_WIPHY_NAME_MAXLEN = 0x40 + NL80211_WIPHY_RADIO_ATTR_FREQ_RANGE = 0x2 + NL80211_WIPHY_RADIO_ATTR_INDEX = 0x1 + NL80211_WIPHY_RADIO_ATTR_INTERFACE_COMBINATION = 0x3 + NL80211_WIPHY_RADIO_ATTR_MAX = 0x4 + NL80211_WIPHY_RADIO_FREQ_ATTR_END = 0x2 + NL80211_WIPHY_RADIO_FREQ_ATTR_MAX = 0x2 + NL80211_WIPHY_RADIO_FREQ_ATTR_START = 0x1 NL80211_WMMR_AIFSN = 0x3 NL80211_WMMR_CW_MAX = 0x2 NL80211_WMMR_CW_MIN = 0x1 @@ -6038,6 +6131,7 @@ const ( NL80211_WOWLAN_TRIG_PKT_PATTERN = 0x4 NL80211_WOWLAN_TRIG_RFKILL_RELEASE = 0x9 NL80211_WOWLAN_TRIG_TCP_CONNECTION = 0xe + NL80211_WOWLAN_TRIG_UNPROTECTED_DEAUTH_DISASSOC = 0x14 NL80211_WOWLAN_TRIG_WAKEUP_PKT_80211 = 0xa NL80211_WOWLAN_TRIG_WAKEUP_PKT_80211_LEN = 0xb NL80211_WOWLAN_TRIG_WAKEUP_PKT_8023 = 0xc diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go index fd402da4..62db85f6 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go @@ -285,10 +285,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -324,11 +330,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -336,8 +348,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go index eb7a5e18..7d89d648 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go @@ -300,10 +300,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -338,19 +344,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go index d78ac108..9c0b39ee 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go @@ -276,10 +276,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -315,11 +321,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -327,8 +339,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go index cd06d47f..de9c7ff3 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go @@ -279,10 +279,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -317,19 +323,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go index 2f28fe26..2336bd2b 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go @@ -280,10 +280,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -318,19 +324,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go index 71d6cac2..4711f0be 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go @@ -281,10 +281,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,11 +326,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -332,8 +344,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go index 8596d453..ab99a34b 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go @@ -282,10 +282,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,19 +326,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go index cd60ea18..04c9866e 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go @@ -282,10 +282,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,19 +326,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go index b0ae420c..60aa69f6 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go @@ -281,10 +281,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -320,11 +326,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -332,8 +344,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go index 83597287..cb4fad78 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go @@ -288,10 +288,16 @@ type Taskstats struct { _ [4]byte Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,11 +333,17 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -339,8 +351,12 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go index 69eb6a5c..60272cfc 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go @@ -289,10 +289,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,19 +333,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go index 5f583cb6..3f5b91bc 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go @@ -289,10 +289,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -327,19 +333,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go index ad05b51a..51550f15 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go @@ -307,10 +307,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -345,19 +351,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go index cf3ce900..3239e50e 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go @@ -302,10 +302,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -340,19 +346,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go index 590b5673..faf20027 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go @@ -284,10 +284,16 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -322,19 +328,29 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Irq_delay_max uint64 + Irq_delay_min uint64 } type cpuMask uint64 diff --git a/vendor/gorm.io/datatypes/json.go b/vendor/gorm.io/datatypes/json.go index 79476781..97e6b4aa 100644 --- a/vendor/gorm.io/datatypes/json.go +++ b/vendor/gorm.io/datatypes/json.go @@ -35,16 +35,20 @@ func (j *JSON) Scan(value interface{}) error { return nil } var bytes []byte - switch v := value.(type) { - case []byte: - if len(v) > 0 { - bytes = make([]byte, len(v)) - copy(bytes, v) + if s, ok := value.(fmt.Stringer); ok { + bytes = []byte(s.String()) + } else { + switch v := value.(type) { + case []byte: + if len(v) > 0 { + bytes = make([]byte, len(v)) + copy(bytes, v) + } + case string: + bytes = []byte(v) + default: + return errors.New(fmt.Sprint("Failed to unmarshal JSONB value:", value)) } - case string: - bytes = []byte(v) - default: - return errors.New(fmt.Sprint("Failed to unmarshal JSONB value:", value)) } result := json.RawMessage(bytes) @@ -394,6 +398,8 @@ func (jsonSet *JSONSetExpression) Build(builder clause.Builder) { break } stmt.AddVar(builder, gorm.Expr("CAST(? AS JSON)", string(b))) + case reflect.Bool: + builder.WriteString(strconv.FormatBool(rv.Bool())) default: stmt.AddVar(builder, value) } diff --git a/vendor/modules.txt b/vendor/modules.txt index 9dc5ecdc..980c48a0 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -21,7 +21,7 @@ github.com/cespare/xxhash/v2 # github.com/chzyer/readline v1.5.1 ## explicit; go 1.15 github.com/chzyer/readline -# github.com/cloudbase/garm-provider-common v0.1.5 +# github.com/cloudbase/garm-provider-common v0.1.6 ## explicit; go 1.23.0 github.com/cloudbase/garm-provider-common/defaults github.com/cloudbase/garm-provider-common/errors @@ -94,7 +94,7 @@ github.com/go-sql-driver/mysql # github.com/golang-jwt/jwt/v4 v4.5.2 ## explicit; go 1.16 github.com/golang-jwt/jwt/v4 -# github.com/golang-jwt/jwt/v5 v5.2.2 +# github.com/golang-jwt/jwt/v5 v5.2.3 ## explicit; go 1.18 github.com/golang-jwt/jwt/v5 # github.com/google/go-github/v72 v72.0.0 @@ -208,11 +208,11 @@ github.com/prometheus/client_golang/prometheus/promhttp/internal # github.com/prometheus/client_model v0.6.2 ## explicit; go 1.22.0 github.com/prometheus/client_model/go -# github.com/prometheus/common v0.64.0 +# github.com/prometheus/common v0.65.0 ## explicit; go 1.23.0 github.com/prometheus/common/expfmt github.com/prometheus/common/model -# github.com/prometheus/procfs v0.16.1 +# github.com/prometheus/procfs v0.17.0 ## explicit; go 1.23.0 github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs @@ -223,7 +223,7 @@ github.com/rivo/uniseg # github.com/spf13/cobra v1.9.1 ## explicit; go 1.15 github.com/spf13/cobra -# github.com/spf13/pflag v1.0.6 +# github.com/spf13/pflag v1.0.7 ## explicit; go 1.12 github.com/spf13/pflag # github.com/stretchr/objx v0.5.2 @@ -252,7 +252,7 @@ go.mongodb.org/mongo-driver/x/bsonx/bsoncore ## explicit; go 1.22.0 go.opentelemetry.io/auto/sdk go.opentelemetry.io/auto/sdk/internal/telemetry -# go.opentelemetry.io/otel v1.36.0 +# go.opentelemetry.io/otel v1.37.0 ## explicit; go 1.23.0 go.opentelemetry.io/otel go.opentelemetry.io/otel/attribute @@ -266,17 +266,18 @@ go.opentelemetry.io/otel/semconv/internal/v2 go.opentelemetry.io/otel/semconv/v1.17.0 go.opentelemetry.io/otel/semconv/v1.17.0/httpconv go.opentelemetry.io/otel/semconv/v1.26.0 -# go.opentelemetry.io/otel/metric v1.36.0 +go.opentelemetry.io/otel/semconv/v1.34.0 +# go.opentelemetry.io/otel/metric v1.37.0 ## explicit; go 1.23.0 go.opentelemetry.io/otel/metric go.opentelemetry.io/otel/metric/embedded -# go.opentelemetry.io/otel/trace v1.36.0 +# go.opentelemetry.io/otel/trace v1.37.0 ## explicit; go 1.23.0 go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.39.0 +# golang.org/x/crypto v0.40.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -285,7 +286,7 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/net v0.41.0 +# golang.org/x/net v0.42.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks golang.org/x/net/proxy @@ -293,15 +294,15 @@ golang.org/x/net/proxy ## explicit; go 1.23.0 golang.org/x/oauth2 golang.org/x/oauth2/internal -# golang.org/x/sync v0.15.0 +# golang.org/x/sync v0.16.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup -# golang.org/x/sys v0.33.0 +# golang.org/x/sys v0.34.0 ## explicit; go 1.23.0 golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.26.0 +# golang.org/x/text v0.27.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal @@ -352,7 +353,7 @@ gopkg.in/natefinch/lumberjack.v2 # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3 -# gorm.io/datatypes v1.2.5 +# gorm.io/datatypes v1.2.6 ## explicit; go 1.19 gorm.io/datatypes # gorm.io/driver/mysql v1.6.0 From 03aa14c91288e63f9bcbc16e5940e2bf6c88b0c1 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 20 Jul 2025 01:05:27 +0000 Subject: [PATCH 126/179] Update docs to reflect new release Signed-off-by: Gabriel Adrian Samfira --- README.md | 25 ++++++++++++------------- doc/quickstart.md | 33 +++++++++++++++++---------------- doc/using_garm.md | 2 +- 3 files changed, 30 insertions(+), 30 deletions(-) diff --git a/README.md b/README.md index 4411834c..390dc15b 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,18 @@ # GitHub Actions Runner Manager (GARM) -[![Go Tests](https://github.com/cloudbase/garm/actions/workflows/go-tests.yml/badge.svg)](https://github.com/cloudbase/garm/actions/workflows/go-tests.yml) - -- [About GARM](#about-garm) -- [Join us on slack](#join-us-on-slack) -- [Installing](#installing) - - [Quickstart](#quickstart) - - [Installing on Kubernetes](#installing-on-kubernetes) -- [Using GARM](#using-garm) -- [Supported providers](#supported-providers) - - [Installing external providers](#installing-external-providers) -- [Optimizing your runners](#optimizing-your-runners) -- [Write your own provider](#write-your-own-provider) +- [GitHub Actions Runner Manager GARM](#github-actions-runner-manager-garm) + - [About GARM](#about-garm) + - [Join us on slack](#join-us-on-slack) + - [Installing](#installing) + - [Quickstart](#quickstart) + - [Installing on Kubernetes](#installing-on-kubernetes) + - [Using GARM](#using-garm) + - [Supported providers](#supported-providers) + - [Installing external providers](#installing-external-providers) + - [Optimizing your runners](#optimizing-your-runners) + - [Write your own provider](#write-your-own-provider) @@ -34,7 +33,7 @@ Here is a brief architectural diagram of how GARM reacts to workflows triggered ![GARM architecture diagram](/doc/images/garm-light.drawio.svg?raw=true#gh-light-mode-only) ![GARM architecture diagram](/doc/images/garm-dark.drawio.svg?raw=true#gh-dark-mode-only) -:warning: **Important note**: The README and documentation in the `main` branch are relevant to the not yet released code that is present in `main`. Following the documentation from the `main` branch for a stable release of GARM, may lead to errors. To view the documentation for the latest stable release, please switch to the appropriate tag. For information about setting up `v0.1.5`, please refer to the [v0.1.5 tag](https://github.com/cloudbase/garm/tree/v0.1.5). +:warning: **Important note**: The README and documentation in the `main` branch are relevant to the not yet released code that is present in `main`. Following the documentation from the `main` branch for a stable release of GARM, may lead to errors. To view the documentation for the latest stable release, please switch to the appropriate tag. For information about setting up `v0.1.6`, please refer to the [v0.1.6 tag](https://github.com/cloudbase/garm/tree/v0.1.6). ## Join us on slack diff --git a/doc/quickstart.md b/doc/quickstart.md index 603bd12c..a2016cbc 100644 --- a/doc/quickstart.md +++ b/doc/quickstart.md @@ -2,18 +2,19 @@ - - [Create the config folder](#create-the-config-folder) - - [The config file](#the-config-file) - - [The provider section](#the-provider-section) - - [Starting the service](#starting-the-service) - - [Using Docker](#using-docker) - - [Setting up GARM as a system service](#setting-up-garm-as-a-system-service) - - [Initializing GARM](#initializing-garm) - - [Setting up the webhook](#setting-up-the-webhook) - - [Creating a GitHub endpoint Optional](#creating-a-github-endpoint-optional) - - [Adding credentials](#adding-credentials) - - [Define a repo](#define-a-repo) - - [Create a pool](#create-a-pool) +- [Quick start](#quick-start) + - [Create the config folder](#create-the-config-folder) + - [The config file](#the-config-file) + - [The provider section](#the-provider-section) + - [Starting the service](#starting-the-service) + - [Using Docker](#using-docker) + - [Setting up GARM as a system service](#setting-up-garm-as-a-system-service) + - [Initializing GARM](#initializing-garm) + - [Setting up the webhook](#setting-up-the-webhook) + - [Creating a GitHub endpoint Optional](#creating-a-github-endpoint-optional) + - [Adding credentials](#adding-credentials) + - [Define a repo](#define-a-repo) + - [Create a pool](#create-a-pool) @@ -133,7 +134,7 @@ docker run -d \ -p 80:80 \ -v /etc/garm:/etc/garm:rw \ -v /var/snap/lxd/common/lxd/unix.socket:/var/snap/lxd/common/lxd/unix.socket:rw \ - ghcr.io/cloudbase/garm:v0.1.4 + ghcr.io/cloudbase/garm:v0.1.6 ``` You will notice that we also mounted the LXD unix socket from the host inside the container where the config you pasted expects to find it. If you plan to use an external provider that does not need to connect to LXD over a unix socket, feel free to remove that mount. @@ -166,7 +167,7 @@ Adding the `garm` user to the LXD group will allow it to connect to the LXD unix Next, download the latest release from the [releases page](https://github.com/cloudbase/garm/releases). ```bash -wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.5/garm-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ +wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.6/garm-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ ``` We'll be running under an unprivileged user. If we want to be able to listen on any port under `1024`, we'll have to set some capabilities on the binary: @@ -199,7 +200,7 @@ Copy the sample `systemd` service file: ```bash wget -O /etc/systemd/system/garm.service \ - https://raw.githubusercontent.com/cloudbase/garm/v0.1.5/contrib/garm.service + https://raw.githubusercontent.com/cloudbase/garm/v0.1.6/contrib/garm.service ``` Reload the `systemd` daemon and start the service: @@ -234,7 +235,7 @@ Before we can start using GARM, we need initialize it. This will create the `adm To initialize GARM, we'll use the `garm-cli` tool. You can download the latest release from the [releases page](https://github.com/cloudbase/garm/releases): ```bash -wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.5/garm-cli-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ +wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.6/garm-cli-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ ``` Now we can initialize GARM: diff --git a/doc/using_garm.md b/doc/using_garm.md index ba8cf2d6..e5e093d3 100644 --- a/doc/using_garm.md +++ b/doc/using_garm.md @@ -66,7 +66,7 @@ garm-cli controller show | Webhook Base URL | https://garm.example.com/webhooks | | Controller Webhook URL | https://garm.example.com/webhooks/a4dd5f41-8e1e-42a7-af53-c0ba5ff6b0b3 | | Minimum Job Age Backoff | 30 | -| Version | v0.1.5 | +| Version | v0.1.6 | +-------------------------+----------------------------------------------------------------------------+ ``` From e6919e36a535d42ca834bc68e2e81ff20d46648a Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 20 Jul 2025 16:18:13 +0000 Subject: [PATCH 127/179] Bump provider versions Signed-off-by: Gabriel Adrian Samfira --- Dockerfile | 40 ++++++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 14 deletions(-) diff --git a/Dockerfile b/Dockerfile index 44f96a59..13314403 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,14 @@ FROM docker.io/golang:alpine AS builder ARG GARM_REF +ARG AZURE_REF=v0.1.2 +ARG OPENSTACK_REF=v0.1.2 +ARG LXD_REF=v0.1.2 +ARG INCUS_REF=v0.1.2 +ARG AWS_REF=v0.1.3 +ARG GCP_REF=v0.1.2 +ARG EQUINIX_REF=v0.1.2 +ARG K8S_REF=v0.3.2 +ARG LINODE_REF=v0.2.0 LABEL stage=builder @@ -8,13 +17,14 @@ RUN git config --global --add safe.directory /build ADD . /build/garm RUN cd /build/garm && git checkout ${GARM_REF} -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-azure /build/garm-provider-azure -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-openstack /build/garm-provider-openstack -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-lxd /build/garm-provider-lxd -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-incus /build/garm-provider-incus -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-aws /build/garm-provider-aws -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-gcp /build/garm-provider-gcp -RUN git clone --depth 1 --branch v0.1.0 https://github.com/cloudbase/garm-provider-equinix /build/garm-provider-equinix +RUN git clone --depth 1 --branch ${AZURE_REF} https://github.com/cloudbase/garm-provider-azure /build/garm-provider-azure +RUN git clone --depth 1 --branch ${OPENSTACK_REF} https://github.com/cloudbase/garm-provider-openstack /build/garm-provider-openstack +RUN git clone --depth 1 --branch ${LXD_REF} https://github.com/cloudbase/garm-provider-lxd /build/garm-provider-lxd +RUN git clone --depth 1 --branch ${INCUS_REF} https://github.com/cloudbase/garm-provider-incus /build/garm-provider-incus +RUN git clone --depth 1 --branch ${AWS_REF} https://github.com/cloudbase/garm-provider-aws /build/garm-provider-aws +RUN git clone --depth 1 --branch ${GCP_REF} https://github.com/cloudbase/garm-provider-gcp /build/garm-provider-gcp +RUN git clone --depth 1 --branch ${EQUINIX_REF} https://github.com/cloudbase/garm-provider-equinix /build/garm-provider-equinix +RUN git clone --depth 1 --branch ${LINODE_REF} https://github.com/flatcar/garm-provider-linode /build/garm-provider-linode RUN git clone --depth 1 --branch v0.3.1 https://github.com/mercedes-benz/garm-provider-k8s /build/garm-provider-k8s @@ -23,13 +33,14 @@ RUN cd /build/garm && go build -o /bin/garm \ -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ /build/garm/cmd/garm && upx /bin/garm RUN mkdir -p /opt/garm/providers.d -RUN cd /build/garm-provider-azure && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-azure . && upx /opt/garm/providers.d/garm-provider-azure -RUN cd /build/garm-provider-openstack && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-openstack . && upx /opt/garm/providers.d/garm-provider-openstack -RUN cd /build/garm-provider-lxd && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-lxd . && upx /opt/garm/providers.d/garm-provider-lxd -RUN cd /build/garm-provider-incus && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-incus . && upx /opt/garm/providers.d/garm-provider-incus -RUN cd /build/garm-provider-aws && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-aws . && upx /opt/garm/providers.d/garm-provider-aws -RUN cd /build/garm-provider-gcp && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-gcp . && upx /opt/garm/providers.d/garm-provider-gcp -RUN cd /build/garm-provider-equinix && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=v0.1.0" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-equinix +RUN cd /build/garm-provider-azure && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${AZURE_REF}" -o /opt/garm/providers.d/garm-provider-azure . && upx /opt/garm/providers.d/garm-provider-azure +RUN cd /build/garm-provider-openstack && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${OPENSTACK_REF}" -o /opt/garm/providers.d/garm-provider-openstack . && upx /opt/garm/providers.d/garm-provider-openstack +RUN cd /build/garm-provider-lxd && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${LXD_REF}" -o /opt/garm/providers.d/garm-provider-lxd . && upx /opt/garm/providers.d/garm-provider-lxd +RUN cd /build/garm-provider-incus && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${INCUS_REF}" -o /opt/garm/providers.d/garm-provider-incus . && upx /opt/garm/providers.d/garm-provider-incus +RUN cd /build/garm-provider-aws && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${AWS_REF}" -o /opt/garm/providers.d/garm-provider-aws . && upx /opt/garm/providers.d/garm-provider-aws +RUN cd /build/garm-provider-gcp && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${GCP_REF}" -o /opt/garm/providers.d/garm-provider-gcp . && upx /opt/garm/providers.d/garm-provider-gcp +RUN cd /build/garm-provider-equinix && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${EQUINIX_REF}" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-equinix +RUN cd /build/garm-provider-linode && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-linode RUN cd /build/garm-provider-k8s/cmd/garm-provider-k8s && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-k8s . && upx /opt/garm/providers.d/garm-provider-k8s @@ -43,6 +54,7 @@ COPY --from=builder /opt/garm/providers.d/garm-provider-azure /opt/garm/provider COPY --from=builder /opt/garm/providers.d/garm-provider-aws /opt/garm/providers.d/garm-provider-aws COPY --from=builder /opt/garm/providers.d/garm-provider-gcp /opt/garm/providers.d/garm-provider-gcp COPY --from=builder /opt/garm/providers.d/garm-provider-equinix /opt/garm/providers.d/garm-provider-equinix +COPY --from=builder /opt/garm/providers.d/garm-provider-linode /opt/garm/providers.d/garm-provider-linode COPY --from=builder /opt/garm/providers.d/garm-provider-k8s /opt/garm/providers.d/garm-provider-k8s COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ From 87a13e377c94b2ea1a6472cb229f5466b57394f9 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 20 Jul 2025 18:13:24 +0000 Subject: [PATCH 128/179] Fix typo Copy-paste typo. Signed-off-by: Gabriel Adrian Samfira --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 13314403..66e78962 100644 --- a/Dockerfile +++ b/Dockerfile @@ -40,7 +40,7 @@ RUN cd /build/garm-provider-incus && go build -ldflags="-linkmode external -extl RUN cd /build/garm-provider-aws && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${AWS_REF}" -o /opt/garm/providers.d/garm-provider-aws . && upx /opt/garm/providers.d/garm-provider-aws RUN cd /build/garm-provider-gcp && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${GCP_REF}" -o /opt/garm/providers.d/garm-provider-gcp . && upx /opt/garm/providers.d/garm-provider-gcp RUN cd /build/garm-provider-equinix && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=${EQUINIX_REF}" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-equinix -RUN cd /build/garm-provider-linode && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-linode +RUN cd /build/garm-provider-linode && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-linode . && upx /opt/garm/providers.d/garm-provider-linode RUN cd /build/garm-provider-k8s/cmd/garm-provider-k8s && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-k8s . && upx /opt/garm/providers.d/garm-provider-k8s From 80735ac2eb2532296d500fbf61c1009e530238ae Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 20 Jul 2025 20:20:38 +0000 Subject: [PATCH 129/179] Update docs and deprecate the --all flag Update the docs to reflect the latest stable version and deprecate the --all flag for runner list and pool list. Signed-off-by: Gabriel Adrian Samfira --- README.md | 6 ++++-- cmd/garm-cli/cmd/pool.go | 10 ++++------ cmd/garm-cli/cmd/runner.go | 28 ++++++++++++++++++++-------- doc/gitea.md | 2 +- doc/quickstart.md | 14 +++++++------- doc/using_garm.md | 8 ++++---- 6 files changed, 40 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 92664859..5d09135f 100644 --- a/README.md +++ b/README.md @@ -48,9 +48,11 @@ Here is a brief architectural diagram of how pools work and how GARM reacts to w **Scale sets** work differently. While pools (as they are defined in GARM) rely on webhooks to know when a job was started and GARM needs to internally make the right decission in terms of which pool should handle that runner, scale sets have a lot of the scheduling and decission making logic done in GitHub itself. -:warning: **Important note**: The README and documentation in the `main` branch are relevant to the not yet released code that is present in `main`. Following the documentation from the `main` branch for a stable release of GARM, may lead to errors. To view the documentation for the latest stable release, please switch to the appropriate tag. For information about setting up `v0.1.5`, please refer to the [v0.1.5 tag](https://github.com/cloudbase/garm/tree/v0.1.5). +> [!IMPORTANT] +> The README and documentation in the `main` branch are relevant to the not yet released code that is present in `main`. Following the documentation from the `main` branch for a stable release of GARM, may lead to errors. To view the documentation for the latest stable release, please switch to the appropriate tag. For information about setting up `v0.1.6`, please refer to the [v0.1.6 tag](https://github.com/cloudbase/garm/tree/v0.1.6). -:warning: **Important note**: The `main` branch holds the latest code and is not guaranteed to be stable. If you are looking for a stable release, please check the releases page. If you plan to use the `main` branch, please do so on a new instance. Do not upgrade from a stable release to `main`. +> [!CAUTION] +> The `main` branch holds the latest code and is not guaranteed to be stable. If you are looking for a stable release, please check the releases page. If you plan to use the `main` branch, please do so on a new instance. Do not upgrade from a stable release to `main`. ## Join us on slack diff --git a/cmd/garm-cli/cmd/pool.go b/cmd/garm-cli/cmd/pool.go index 0c667c4a..445801a6 100644 --- a/cmd/garm-cli/cmd/pool.go +++ b/cmd/garm-cli/cmd/pool.go @@ -128,12 +128,9 @@ Example: listEnterprisePoolsReq := apiClientEnterprises.NewListEnterprisePoolsParams() listEnterprisePoolsReq.EnterpriseID = poolEnterprise response, err = apiCli.Enterprises.ListEnterprisePools(listEnterprisePoolsReq, authToken) - } else if cmd.Flags().Changed("all") { + } else { listPoolsReq := apiClientPools.NewListPoolsParams() response, err = apiCli.Pools.ListPools(listPoolsReq, authToken) - } else { - cmd.Help() //nolint - os.Exit(0) } default: cmd.Help() //nolint @@ -409,11 +406,12 @@ func init() { poolListCmd.Flags().StringVarP(&poolRepository, "repo", "r", "", "List all pools within this repository.") poolListCmd.Flags().StringVarP(&poolOrganization, "org", "o", "", "List all pools within this organization.") poolListCmd.Flags().StringVarP(&poolEnterprise, "enterprise", "e", "", "List all pools within this enterprise.") - poolListCmd.Flags().BoolVarP(&poolAll, "all", "a", false, "List all pools, regardless of org or repo.") + poolListCmd.Flags().BoolVarP(&poolAll, "all", "a", true, "List all pools, regardless of org or repo.") poolListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") poolListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") - poolListCmd.MarkFlagsMutuallyExclusive("repo", "org", "all", "enterprise") + poolListCmd.Flags().MarkDeprecated("all", "all pools are listed by default in the absence of --repo, --org or --enterprise.") + poolListCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise", "all") poolUpdateCmd.Flags().StringVar(&poolImage, "image", "", "The provider-specific image name to use for runners in this pool.") poolUpdateCmd.Flags().UintVar(&priority, "priority", 0, "When multiple pools match the same labels, priority dictates the order by which they are returned, in descending order.") diff --git a/cmd/garm-cli/cmd/runner.go b/cmd/garm-cli/cmd/runner.go index adee2965..44a7b8df 100644 --- a/cmd/garm-cli/cmd/runner.go +++ b/cmd/garm-cli/cmd/runner.go @@ -104,23 +104,32 @@ Example: response, err = apiCli.Instances.ListPoolInstances(listPoolInstancesReq, authToken) case 0: if cmd.Flags().Changed("repo") { + runnerRepo, resErr := resolveRepository(runnerRepository, endpointName) + if resErr != nil { + return resErr + } listRepoInstancesReq := apiClientRepos.NewListRepoInstancesParams() - listRepoInstancesReq.RepoID = runnerRepository + listRepoInstancesReq.RepoID = runnerRepo response, err = apiCli.Repositories.ListRepoInstances(listRepoInstancesReq, authToken) } else if cmd.Flags().Changed("org") { + runnerOrg, resErr := resolveOrganization(runnerOrganization, endpointName) + if resErr != nil { + return resErr + } listOrgInstancesReq := apiClientOrgs.NewListOrgInstancesParams() - listOrgInstancesReq.OrgID = runnerOrganization + listOrgInstancesReq.OrgID = runnerOrg response, err = apiCli.Organizations.ListOrgInstances(listOrgInstancesReq, authToken) } else if cmd.Flags().Changed("enterprise") { + runnerEnt, resErr := resolveEnterprise(runnerEnterprise, endpointName) + if resErr != nil { + return resErr + } listEnterpriseInstancesReq := apiClientEnterprises.NewListEnterpriseInstancesParams() - listEnterpriseInstancesReq.EnterpriseID = runnerEnterprise + listEnterpriseInstancesReq.EnterpriseID = runnerEnt response, err = apiCli.Enterprises.ListEnterpriseInstances(listEnterpriseInstancesReq, authToken) - } else if cmd.Flags().Changed("all") { + } else { listInstancesReq := apiClientInstances.NewListInstancesParams() response, err = apiCli.Instances.ListInstances(listInstancesReq, authToken) - } else { - cmd.Help() //nolint - os.Exit(0) } default: cmd.Help() //nolint @@ -205,9 +214,12 @@ func init() { runnerListCmd.Flags().StringVarP(&runnerRepository, "repo", "r", "", "List all runners from all pools within this repository.") runnerListCmd.Flags().StringVarP(&runnerOrganization, "org", "o", "", "List all runners from all pools within this organization.") runnerListCmd.Flags().StringVarP(&runnerEnterprise, "enterprise", "e", "", "List all runners from all pools within this enterprise.") - runnerListCmd.Flags().BoolVarP(&runnerAll, "all", "a", false, "List all runners, regardless of org or repo.") + runnerListCmd.Flags().BoolVarP(&runnerAll, "all", "a", true, "List all runners, regardless of org or repo. (deprecated)") runnerListCmd.Flags().BoolVarP(&long, "long", "l", false, "Include additional info.") runnerListCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise", "all") + runnerListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") + + runnerListCmd.Flags().MarkDeprecated("all", "all runners are listed by default in the absence of --repo, --org or --enterprise.") runnerDeleteCmd.Flags().BoolVarP(&forceRemove, "force-remove-runner", "f", false, "Forcefully remove a runner. If set to true, GARM will ignore provider errors when removing the runner.") runnerDeleteCmd.Flags().BoolVarP(&bypassGHUnauthorized, "bypass-github-unauthorized", "b", false, "Ignore Unauthorized errors from GitHub and proceed with removing runner from provider and DB. This is useful when credentials are no longer valid and you want to remove your runners. Warning, this has the potential to leave orphaned runners in GitHub. You will need to update your credentials to properly consolidate.") diff --git a/doc/gitea.md b/doc/gitea.md index 923d59fd..72d3a202 100644 --- a/doc/gitea.md +++ b/doc/gitea.md @@ -346,7 +346,7 @@ garm-cli pool add \ You should now see 1 runner being spun up in LXD. You can check the status of the pool by doing: ```bash -garm-cli runner ls -a +garm-cli runner ls ``` To get more details about the runner, run: diff --git a/doc/quickstart.md b/doc/quickstart.md index 603bd12c..66afead3 100644 --- a/doc/quickstart.md +++ b/doc/quickstart.md @@ -133,7 +133,7 @@ docker run -d \ -p 80:80 \ -v /etc/garm:/etc/garm:rw \ -v /var/snap/lxd/common/lxd/unix.socket:/var/snap/lxd/common/lxd/unix.socket:rw \ - ghcr.io/cloudbase/garm:v0.1.4 + ghcr.io/cloudbase/garm:v0.1.6 ``` You will notice that we also mounted the LXD unix socket from the host inside the container where the config you pasted expects to find it. If you plan to use an external provider that does not need to connect to LXD over a unix socket, feel free to remove that mount. @@ -166,7 +166,7 @@ Adding the `garm` user to the LXD group will allow it to connect to the LXD unix Next, download the latest release from the [releases page](https://github.com/cloudbase/garm/releases). ```bash -wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.5/garm-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ +wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.6/garm-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ ``` We'll be running under an unprivileged user. If we want to be able to listen on any port under `1024`, we'll have to set some capabilities on the binary: @@ -199,7 +199,7 @@ Copy the sample `systemd` service file: ```bash wget -O /etc/systemd/system/garm.service \ - https://raw.githubusercontent.com/cloudbase/garm/v0.1.5/contrib/garm.service + https://raw.githubusercontent.com/cloudbase/garm/v0.1.6/contrib/garm.service ``` Reload the `systemd` daemon and start the service: @@ -234,7 +234,7 @@ Before we can start using GARM, we need initialize it. This will create the `adm To initialize GARM, we'll use the `garm-cli` tool. You can download the latest release from the [releases page](https://github.com/cloudbase/garm/releases): ```bash -wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.5/garm-cli-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ +wget -q -O - https://github.com/cloudbase/garm/releases/download/v0.1.6/garm-cli-linux-amd64.tgz | tar xzf - -C /usr/local/bin/ ``` Now we can initialize GARM: @@ -502,7 +502,7 @@ gabriel@rossak:~$ garm-cli pool add \ If we list the pool we should see it: ```bash -gabriel@rock:~$ garm-cli pool ls -a +gabriel@rock:~$ garm-cli pool ls +--------------------------------------+---------------------------+--------------+-----------------+------------------+-------+---------+---------------+----------+ | ID | IMAGE | FLAVOR | TAGS | BELONGS TO | LEVEL | ENABLED | RUNNER PREFIX | PRIORITY | +--------------------------------------+---------------------------+--------------+-----------------+------------------+-------+---------+---------------+----------+ @@ -517,7 +517,7 @@ For the purposes of this guide, we'll increase it to 1 so we have a runner creat First, list current runners: ```bash -gabriel@rossak:~$ garm-cli runner ls -a +gabriel@rossak:~$ garm-cli runner ls +----+------+--------+---------------+---------+ | NR | NAME | STATUS | RUNNER STATUS | POOL ID | +----+------+--------+---------------+---------+ @@ -554,7 +554,7 @@ gabriel@rossak:~$ garm-cli pool update 344e4a72-2035-4a18-a3d5-87bd3874b56c --mi Now if we list the runners: ```bash -gabriel@rossak:~$ garm-cli runner ls -a +gabriel@rossak:~$ garm-cli runner ls +----+-------------------+----------------+---------------+--------------------------------------+ | NR | NAME | STATUS | RUNNER STATUS | POOL ID | +----+-------------------+----------------+---------------+--------------------------------------+ diff --git a/doc/using_garm.md b/doc/using_garm.md index ba8cf2d6..e7758410 100644 --- a/doc/using_garm.md +++ b/doc/using_garm.md @@ -66,7 +66,7 @@ garm-cli controller show | Webhook Base URL | https://garm.example.com/webhooks | | Controller Webhook URL | https://garm.example.com/webhooks/a4dd5f41-8e1e-42a7-af53-c0ba5ff6b0b3 | | Minimum Job Age Backoff | 30 | -| Version | v0.1.5 | +| Version | v0.1.6 | +-------------------------+----------------------------------------------------------------------------+ ``` @@ -567,10 +567,10 @@ ubuntu@garm:~$ garm-cli pool list --repo=be3a0673-56af-4395-9ebf-4521fea67567 If you want to list pools for an organization or enterprise, you can use the `--org` or `--enterprise` options respectively. -You can also list **all** pools from all configureg github entities by using the `--all` option. +In the absence or the `--repo`, `--org` or `--enterprise` options, the command will list all pools in GARM, regardless of the entity they belong to. ```bash -ubuntu@garm:~/garm$ garm-cli pool list --all +ubuntu@garm:~/garm$ garm-cli pool list +--------------------------------------+---------------------------+--------------+-----------------------------------------+------------------+-------+---------+---------------+----------+ | ID | IMAGE | FLAVOR | TAGS | BELONGS TO | LEVEL | ENABLED | RUNNER PREFIX | PRIORITY | +--------------------------------------+---------------------------+--------------+-----------------------------------------+------------------+-------+---------+---------------+----------+ @@ -705,7 +705,7 @@ Awesome! This runner will be able to pick up jobs that match the labels we've se You can list runners for a pool, for a repository, organization or enterprise, or for all of them. To list all runners, you can run: ```bash -ubuntu@garm:~$ garm-cli runner list --all +ubuntu@garm:~$ garm-cli runner list +----+---------------------+---------+---------------+--------------------------------------+ | NR | NAME | STATUS | RUNNER STATUS | POOL ID | +----+---------------------+---------+---------------+--------------------------------------+ From 567c465ad79383686e12abc7637858f4a6cdc745 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 07:58:08 +0000 Subject: [PATCH 130/179] Bump github.com/cloudbase/garm-provider-common from 0.1.5 to 0.1.6 Bumps [github.com/cloudbase/garm-provider-common](https://github.com/cloudbase/garm-provider-common) from 0.1.5 to 0.1.6. - [Release notes](https://github.com/cloudbase/garm-provider-common/releases) - [Commits](https://github.com/cloudbase/garm-provider-common/compare/v0.1.5...v0.1.6) --- updated-dependencies: - dependency-name: github.com/cloudbase/garm-provider-common dependency-version: 0.1.6 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- vendor/modules.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index 56175f82..1cfe5c05 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 - github.com/cloudbase/garm-provider-common v0.1.5 + github.com/cloudbase/garm-provider-common v0.1.6 github.com/felixge/httpsnoop v1.0.4 github.com/go-openapi/errors v0.22.1 github.com/go-openapi/runtime v0.28.0 diff --git a/go.sum b/go.sum index d936ccf8..7de7ba80 100644 --- a/go.sum +++ b/go.sum @@ -19,8 +19,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= -github.com/cloudbase/garm-provider-common v0.1.5 h1:aJL646l+VnZceQ2grbDYhWfxYpaQR2/QsUSD76kSZVs= -github.com/cloudbase/garm-provider-common v0.1.5/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= +github.com/cloudbase/garm-provider-common v0.1.6 h1:wLqolRkUD2Z4rzuBLDs2exL1Aq+eJ5RBVnRvk5JP6fs= +github.com/cloudbase/garm-provider-common v0.1.6/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= diff --git a/vendor/modules.txt b/vendor/modules.txt index b060fed7..95e5f35b 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -21,7 +21,7 @@ github.com/cespare/xxhash/v2 # github.com/chzyer/readline v1.5.1 ## explicit; go 1.15 github.com/chzyer/readline -# github.com/cloudbase/garm-provider-common v0.1.5 +# github.com/cloudbase/garm-provider-common v0.1.6 ## explicit; go 1.23.0 github.com/cloudbase/garm-provider-common/defaults github.com/cloudbase/garm-provider-common/errors From f4892be193182e02d41f7ee8479591c2a4ace1ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 24 Jul 2025 06:47:05 +0000 Subject: [PATCH 131/179] Bump gorm.io/gorm from 1.30.0 to 1.30.1 Bumps [gorm.io/gorm](https://github.com/go-gorm/gorm) from 1.30.0 to 1.30.1. - [Release notes](https://github.com/go-gorm/gorm/releases) - [Commits](https://github.com/go-gorm/gorm/compare/v1.30.0...v1.30.1) --- updated-dependencies: - dependency-name: gorm.io/gorm dependency-version: 1.30.1 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- vendor/gorm.io/gorm/callbacks/create.go | 26 ++++++++++++-------- vendor/gorm.io/gorm/generics.go | 2 +- vendor/gorm.io/gorm/gorm.go | 8 +++++++ vendor/gorm.io/gorm/migrator/migrator.go | 14 +++++++++-- vendor/gorm.io/gorm/schema/field.go | 30 ++++++++++++++++++------ vendor/gorm.io/gorm/schema/schema.go | 13 +++++++--- vendor/gorm.io/gorm/statement.go | 10 +++++++- vendor/modules.txt | 2 +- 10 files changed, 83 insertions(+), 28 deletions(-) diff --git a/go.mod b/go.mod index 1cfe5c05..41db9109 100644 --- a/go.mod +++ b/go.mod @@ -37,7 +37,7 @@ require ( gorm.io/datatypes v1.2.6 gorm.io/driver/mysql v1.6.0 gorm.io/driver/sqlite v1.6.0 - gorm.io/gorm v1.30.0 + gorm.io/gorm v1.30.1 ) require ( diff --git a/go.sum b/go.sum index 7de7ba80..b0557ac9 100644 --- a/go.sum +++ b/go.sum @@ -229,5 +229,5 @@ gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= gorm.io/driver/sqlserver v1.6.0 h1:VZOBQVsVhkHU/NzNhRJKoANt5pZGQAS1Bwc6m6dgfnc= gorm.io/driver/sqlserver v1.6.0/go.mod h1:WQzt4IJo/WHKnckU9jXBLMJIVNMVeTu25dnOzehntWw= -gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs= -gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= +gorm.io/gorm v1.30.1 h1:lSHg33jJTBxs2mgJRfRZeLDG+WZaHYCk3Wtfl6Ngzo4= +gorm.io/gorm v1.30.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= diff --git a/vendor/gorm.io/gorm/callbacks/create.go b/vendor/gorm.io/gorm/callbacks/create.go index d8701f51..cb8429b3 100644 --- a/vendor/gorm.io/gorm/callbacks/create.go +++ b/vendor/gorm.io/gorm/callbacks/create.go @@ -53,9 +53,13 @@ func Create(config *Config) func(db *gorm.DB) { if _, ok := db.Statement.Clauses["RETURNING"]; !ok { fromColumns := make([]clause.Column, 0, len(db.Statement.Schema.FieldsWithDefaultDBValue)) for _, field := range db.Statement.Schema.FieldsWithDefaultDBValue { - fromColumns = append(fromColumns, clause.Column{Name: field.DBName}) + if field.Readable { + fromColumns = append(fromColumns, clause.Column{Name: field.DBName}) + } + } + if len(fromColumns) > 0 { + db.Statement.AddClause(clause.Returning{Columns: fromColumns}) } - db.Statement.AddClause(clause.Returning{Columns: fromColumns}) } } } @@ -122,6 +126,16 @@ func Create(config *Config) func(db *gorm.DB) { pkFieldName = "@id" ) + if db.Statement.Schema != nil { + if db.Statement.Schema.PrioritizedPrimaryField == nil || + !db.Statement.Schema.PrioritizedPrimaryField.HasDefaultValue || + !db.Statement.Schema.PrioritizedPrimaryField.Readable { + return + } + pkField = db.Statement.Schema.PrioritizedPrimaryField + pkFieldName = db.Statement.Schema.PrioritizedPrimaryField.DBName + } + insertID, err := result.LastInsertId() insertOk := err == nil && insertID > 0 @@ -132,14 +146,6 @@ func Create(config *Config) func(db *gorm.DB) { return } - if db.Statement.Schema != nil { - if db.Statement.Schema.PrioritizedPrimaryField == nil || !db.Statement.Schema.PrioritizedPrimaryField.HasDefaultValue { - return - } - pkField = db.Statement.Schema.PrioritizedPrimaryField - pkFieldName = db.Statement.Schema.PrioritizedPrimaryField.DBName - } - // append @id column with value for auto-increment primary key // the @id value is correct, when: 1. without setting auto-increment primary key, 2. database AutoIncrementIncrement = 1 switch values := db.Statement.Dest.(type) { diff --git a/vendor/gorm.io/gorm/generics.go b/vendor/gorm.io/gorm/generics.go index ad2d063f..f3c3e553 100644 --- a/vendor/gorm.io/gorm/generics.go +++ b/vendor/gorm.io/gorm/generics.go @@ -567,7 +567,7 @@ func (g execG[T]) First(ctx context.Context) (T, error) { func (g execG[T]) Scan(ctx context.Context, result interface{}) error { var r T - err := g.g.apply(ctx).Model(r).Find(&result).Error + err := g.g.apply(ctx).Model(r).Find(result).Error return err } diff --git a/vendor/gorm.io/gorm/gorm.go b/vendor/gorm.io/gorm/gorm.go index 67889262..6619f071 100644 --- a/vendor/gorm.io/gorm/gorm.go +++ b/vendor/gorm.io/gorm/gorm.go @@ -137,6 +137,14 @@ func Open(dialector Dialector, opts ...Option) (db *DB, err error) { return isConfig && !isConfig2 }) + if len(opts) > 0 { + if c, ok := opts[0].(*Config); ok { + config = c + } else { + opts = append([]Option{config}, opts...) + } + } + var skipAfterInitialize bool for _, opt := range opts { if opt != nil { diff --git a/vendor/gorm.io/gorm/migrator/migrator.go b/vendor/gorm.io/gorm/migrator/migrator.go index cec4e30f..50a36d10 100644 --- a/vendor/gorm.io/gorm/migrator/migrator.go +++ b/vendor/gorm.io/gorm/migrator/migrator.go @@ -474,7 +474,6 @@ func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnTy // found, smart migrate fullDataType := strings.TrimSpace(strings.ToLower(m.DB.Migrator().FullDataTypeOf(field).SQL)) realDataType := strings.ToLower(columnType.DatabaseTypeName()) - var ( alterColumn bool isSameType = fullDataType == realDataType @@ -513,8 +512,19 @@ func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnTy } } } + } - // check precision + // check precision + if realDataType == "decimal" || realDataType == "numeric" && + regexp.MustCompile(realDataType+`\(.*\)`).FindString(fullDataType) != "" { // if realDataType has no precision,ignore + precision, scale, ok := columnType.DecimalSize() + if ok { + if !strings.HasPrefix(fullDataType, fmt.Sprintf("%s(%d,%d)", realDataType, precision, scale)) && + !strings.HasPrefix(fullDataType, fmt.Sprintf("%s(%d)", realDataType, precision)) { + alterColumn = true + } + } + } else { if precision, _, ok := columnType.DecimalSize(); ok && int64(field.Precision) != precision { if regexp.MustCompile(fmt.Sprintf("[^0-9]%d[^0-9]", field.Precision)).MatchString(m.DataTypeOf(field)) { alterColumn = true diff --git a/vendor/gorm.io/gorm/schema/field.go b/vendor/gorm.io/gorm/schema/field.go index a6ff1a72..67e60f70 100644 --- a/vendor/gorm.io/gorm/schema/field.go +++ b/vendor/gorm.io/gorm/schema/field.go @@ -448,21 +448,30 @@ func (schema *Schema) ParseField(fieldStruct reflect.StructField) *Field { } // create valuer, setter when parse struct -func (field *Field) setupValuerAndSetter() { +func (field *Field) setupValuerAndSetter(modelType reflect.Type) { // Setup NewValuePool field.setupNewValuePool() // ValueOf returns field's value and if it is zero fieldIndex := field.StructField.Index[0] switch { - case len(field.StructField.Index) == 1 && fieldIndex > 0: - field.ValueOf = func(ctx context.Context, value reflect.Value) (interface{}, bool) { - fieldValue := reflect.Indirect(value).Field(fieldIndex) + case len(field.StructField.Index) == 1 && fieldIndex >= 0: + field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) { + v = reflect.Indirect(v) + if v.Type() != modelType { + fieldValue := v.FieldByName(field.Name) + return fieldValue.Interface(), fieldValue.IsZero() + } + fieldValue := v.Field(fieldIndex) return fieldValue.Interface(), fieldValue.IsZero() } default: field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) { v = reflect.Indirect(v) + if v.Type() != modelType { + fieldValue := v.FieldByName(field.Name) + return fieldValue.Interface(), fieldValue.IsZero() + } for _, fieldIdx := range field.StructField.Index { if fieldIdx >= 0 { v = v.Field(fieldIdx) @@ -504,13 +513,20 @@ func (field *Field) setupValuerAndSetter() { // ReflectValueOf returns field's reflect value switch { - case len(field.StructField.Index) == 1 && fieldIndex > 0: - field.ReflectValueOf = func(ctx context.Context, value reflect.Value) reflect.Value { - return reflect.Indirect(value).Field(fieldIndex) + case len(field.StructField.Index) == 1 && fieldIndex >= 0: + field.ReflectValueOf = func(ctx context.Context, v reflect.Value) reflect.Value { + v = reflect.Indirect(v) + if v.Type() != modelType { + return v.FieldByName(field.Name) + } + return v.Field(fieldIndex) } default: field.ReflectValueOf = func(ctx context.Context, v reflect.Value) reflect.Value { v = reflect.Indirect(v) + if v.Type() != modelType { + return v.FieldByName(field.Name) + } for idx, fieldIdx := range field.StructField.Index { if fieldIdx >= 0 { v = v.Field(fieldIdx) diff --git a/vendor/gorm.io/gorm/schema/schema.go b/vendor/gorm.io/gorm/schema/schema.go index db236797..2a5c28e2 100644 --- a/vendor/gorm.io/gorm/schema/schema.go +++ b/vendor/gorm.io/gorm/schema/schema.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "go/ast" + "path" "reflect" "strings" "sync" @@ -247,7 +248,7 @@ func ParseWithSpecialTableName(dest interface{}, cacheStore *sync.Map, namer Nam schema.FieldsByBindName[bindName] = field } - field.setupValuerAndSetter() + field.setupValuerAndSetter(modelType) } prioritizedPrimaryField := schema.LookUpField("id") @@ -313,8 +314,14 @@ func ParseWithSpecialTableName(dest interface{}, cacheStore *sync.Map, namer Nam for _, cbName := range callbackTypes { if methodValue := callBackToMethodValue(modelValue, cbName); methodValue.IsValid() { switch methodValue.Type().String() { - case "func(*gorm.DB) error": // TODO hack - reflect.Indirect(reflect.ValueOf(schema)).FieldByName(string(cbName)).SetBool(true) + case "func(*gorm.DB) error": + expectedPkgPath := path.Dir(reflect.TypeOf(schema).Elem().PkgPath()) + if inVarPkg := methodValue.Type().In(0).Elem().PkgPath(); inVarPkg == expectedPkgPath { + reflect.Indirect(reflect.ValueOf(schema)).FieldByName(string(cbName)).SetBool(true) + } else { + logger.Default.Warn(context.Background(), "In model %v, the hook function `%v(*gorm.DB) error` has an incorrect parameter type. The expected parameter type is `%v`, but the provided type is `%v`.", schema, cbName, expectedPkgPath, inVarPkg) + // PASS + } default: logger.Default.Warn(context.Background(), "Model %v don't match %vInterface, should be `%v(*gorm.DB) error`. Please see https://gorm.io/docs/hooks.html", schema, cbName, cbName) } diff --git a/vendor/gorm.io/gorm/statement.go b/vendor/gorm.io/gorm/statement.go index c6183724..ba5d3f18 100644 --- a/vendor/gorm.io/gorm/statement.go +++ b/vendor/gorm.io/gorm/statement.go @@ -341,7 +341,9 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] if where, ok := cs.Expression.(clause.Where); ok { if len(where.Exprs) == 1 { if orConds, ok := where.Exprs[0].(clause.OrConditions); ok { - where.Exprs[0] = clause.AndConditions(orConds) + if len(orConds.Exprs) == 1 { + where.Exprs[0] = clause.AndConditions(orConds) + } } } conds = append(conds, clause.And(where.Exprs...)) @@ -362,6 +364,9 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] for _, key := range keys { column := clause.Column{Name: key, Table: curTable} + if strings.Contains(key, ".") { + column = clause.Column{Name: key} + } conds = append(conds, clause.Eq{Column: column, Value: v[key]}) } case map[string]interface{}: @@ -374,6 +379,9 @@ func (stmt *Statement) BuildCondition(query interface{}, args ...interface{}) [] for _, key := range keys { reflectValue := reflect.Indirect(reflect.ValueOf(v[key])) column := clause.Column{Name: key, Table: curTable} + if strings.Contains(key, ".") { + column = clause.Column{Name: key} + } switch reflectValue.Kind() { case reflect.Slice, reflect.Array: if _, ok := v[key].(driver.Valuer); ok { diff --git a/vendor/modules.txt b/vendor/modules.txt index 95e5f35b..71b2f88c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -364,7 +364,7 @@ gorm.io/driver/mysql # gorm.io/driver/sqlite v1.6.0 ## explicit; go 1.20 gorm.io/driver/sqlite -# gorm.io/gorm v1.30.0 +# gorm.io/gorm v1.30.1 ## explicit; go 1.18 gorm.io/gorm gorm.io/gorm/callbacks From 7817d205164d6811e8c1666fde01bed9192d5f44 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Jul 2025 08:04:00 +0000 Subject: [PATCH 132/179] Bump github.com/jedib0t/go-pretty/v6 from 6.6.7 to 6.6.8 Bumps [github.com/jedib0t/go-pretty/v6](https://github.com/jedib0t/go-pretty) from 6.6.7 to 6.6.8. - [Release notes](https://github.com/jedib0t/go-pretty/releases) - [Commits](https://github.com/jedib0t/go-pretty/compare/v6.6.7...v6.6.8) --- updated-dependencies: - dependency-name: github.com/jedib0t/go-pretty/v6 dependency-version: 6.6.8 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- .../jedib0t/go-pretty/v6/text/escape_seq_parser.go | 10 ++++++++++ vendor/modules.txt | 2 +- 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index 41db9109..ffe2c7ed 100644 --- a/go.mod +++ b/go.mod @@ -19,7 +19,7 @@ require ( github.com/gorilla/handlers v1.5.2 github.com/gorilla/mux v1.8.1 github.com/gorilla/websocket v1.5.4-0.20240702125206-a62d9d2a8413 - github.com/jedib0t/go-pretty/v6 v6.6.7 + github.com/jedib0t/go-pretty/v6 v6.6.8 github.com/juju/clock v1.1.1 github.com/juju/retry v1.0.1 github.com/manifoldco/promptui v0.9.0 diff --git a/go.sum b/go.sum index b0557ac9..f255d308 100644 --- a/go.sum +++ b/go.sum @@ -87,8 +87,8 @@ github.com/jackc/pgx/v5 v5.5.5 h1:amBjrZVmksIdNjxGW/IiIMzxMKZFelXbUoPNb+8sjQw= github.com/jackc/pgx/v5 v5.5.5/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A= github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= -github.com/jedib0t/go-pretty/v6 v6.6.7 h1:m+LbHpm0aIAPLzLbMfn8dc3Ht8MW7lsSO4MPItz/Uuo= -github.com/jedib0t/go-pretty/v6 v6.6.7/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= +github.com/jedib0t/go-pretty/v6 v6.6.8 h1:JnnzQeRz2bACBobIaa/r+nqjvws4yEhcmaZ4n1QzsEc= +github.com/jedib0t/go-pretty/v6 v6.6.8/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= diff --git a/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go b/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go index ab618acc..c6ffa437 100644 --- a/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go +++ b/vendor/github.com/jedib0t/go-pretty/v6/text/escape_seq_parser.go @@ -78,6 +78,16 @@ func (s *escSeqParser) Consume(char rune) { if s.inEscSeq { s.escapeSeq += string(char) + // --- FIX for OSC 8 hyperlinks (e.g. \x1b]8;;url\x07label\x1b]8;;\x07) + if s.escSeqKind == escSeqKindOSI && + strings.HasPrefix(s.escapeSeq, escapeStartConcealOSI) && + char == '\a' { // BEL + + s.ParseSeq(s.escapeSeq, s.escSeqKind) + s.Reset() + return + } + if s.isEscapeStopRune(char) { s.ParseSeq(s.escapeSeq, s.escSeqKind) s.Reset() diff --git a/vendor/modules.txt b/vendor/modules.txt index 71b2f88c..faf4441b 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -118,7 +118,7 @@ github.com/gorilla/websocket # github.com/inconshreveable/mousetrap v1.1.0 ## explicit; go 1.18 github.com/inconshreveable/mousetrap -# github.com/jedib0t/go-pretty/v6 v6.6.7 +# github.com/jedib0t/go-pretty/v6 v6.6.8 ## explicit; go 1.18 github.com/jedib0t/go-pretty/v6/table github.com/jedib0t/go-pretty/v6/text From 97ef92706babf3473cc18764a7904bebcd76e18e Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Thu, 17 Jul 2025 12:45:35 +0200 Subject: [PATCH 133/179] refactor workflows to enable multiple docker images --- .github/workflows/build-and-push.yml | 23 +++++++++++++---------- .github/workflows/trigger-manual.yml | 19 +++++++++++++++++++ .github/workflows/trigger-nightly.yml | 13 +++++++++++++ 3 files changed, 45 insertions(+), 10 deletions(-) create mode 100644 .github/workflows/trigger-manual.yml create mode 100644 .github/workflows/trigger-nightly.yml diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 475ec651..7f24431e 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -1,14 +1,16 @@ -name: "Build GARM images" +name: "Build and push GARM images" on: - workflow_dispatch: + workflow_call: inputs: push_to_project: description: "Project to build images for" required: true + type: string default: "ghcr.io/cloudbase" ref: description: "Ref to build" required: true + type: string default: "main" schedule: - cron: "0 2 * * *" @@ -24,8 +26,9 @@ jobs: runs-on: ubuntu-latest steps: - name: "Checkout" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: + ref: ${{ inputs.ref }} path: src/github.com/cloudbase/garm fetch-depth: 0 @@ -40,15 +43,12 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push + env: + REGISTRY_INPUT: ${{ inputs.push_to_project }} + GH_REF: ${{ inputs.ref }} + working-directory: src/github.com/cloudbase/garm run: | set -x - REGISTRY_INPUT="${{ github.event.inputs.push_to_project }}" - REF_INPUT="${{ github.event.inputs.ref }}" - - PUSH_TO_PROJECT="${REGISTRY_INPUT:-ghcr.io/cloudbase}" - GH_REF="${REF_INPUT:-main}" - cd src/github.com/cloudbase/garm && git checkout "${GH_REF}" - VERSION=$(git describe --tags --match='v[0-9]*' --always) AZURE_REF=v0.1.0 OPENSTACK_REF=v0.1.0 @@ -69,6 +69,9 @@ jobs: K8S_REF="main" VERSION="nightly" fi + if [ "$GH_REF" == "release/v1" ]; then + VERSION="v0.1" + fi docker buildx build \ --provenance=false \ --platform linux/amd64,linux/arm64 \ diff --git a/.github/workflows/trigger-manual.yml b/.github/workflows/trigger-manual.yml new file mode 100644 index 00000000..faf166d4 --- /dev/null +++ b/.github/workflows/trigger-manual.yml @@ -0,0 +1,19 @@ +name: Manual build of GARM images +on: + workflow_dispatch: + inputs: + push_to_project: + description: "Project to build images for" + required: true + default: "ghcr.io/cloudbase" + ref: + description: "Ref to build" + required: true + default: "main" + +jobs: + call-build-and-push: + uses: ./.github/workflows/build-and-push.yml + with: + push_to_project: ${{ inputs.push_to_project }} + ref: ${{ inputs.ref }} \ No newline at end of file diff --git a/.github/workflows/trigger-nightly.yml b/.github/workflows/trigger-nightly.yml new file mode 100644 index 00000000..79c42228 --- /dev/null +++ b/.github/workflows/trigger-nightly.yml @@ -0,0 +1,13 @@ +name: Nightly build of GARM images +on: + schedule: + - cron: "0 2 * * *" + +jobs: + call-build-and-push: + uses: ./.github/workflows/build-and-push.yml + strategy: + matrix: + ref: ["main", "release/v1"] + with: + ref: ${{ matrix.ref }} From 1e8d0d79a6052de814716964802977b7ec38a249 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Tue, 29 Jul 2025 09:47:29 +0200 Subject: [PATCH 134/179] improvements after review comments --- .github/workflows/build-and-push.yml | 32 +++++++++++++++++++--------- Dockerfile | 2 +- 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 7f24431e..907dd8c1 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -48,17 +48,15 @@ jobs: GH_REF: ${{ inputs.ref }} working-directory: src/github.com/cloudbase/garm run: | + get_gh_latest_release() { + curl -s -L -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/repos/$1/latest" \ + | jq -r '.tag_name' + } set -x - VERSION=$(git describe --tags --match='v[0-9]*' --always) - AZURE_REF=v0.1.0 - OPENSTACK_REF=v0.1.0 - LXD_REF=v0.1.0 - INCUS_REF=v0.1.0 - AWS_REF=v0.1.0 - GCP_REF=v0.1.0 - EQUINIX_REF=v0.1.0 - K8S_REF=v0.3.2 if [ "$GH_REF" == "main" ]; then + VERSION="nightly" AZURE_REF="main" OPENSTACK_REF="main" LXD_REF="main" @@ -66,12 +64,26 @@ jobs: AWS_REF="main" GCP_REF="main" EQUINIX_REF="main" + LINODE_REF="main" K8S_REF="main" - VERSION="nightly" + else + VERSION=$(git describe --tags --match='v[0-9]*' --always) + AZURE_REF=$(get_gh_latest_release cloudbase/garm-provider-azure) + OPENSTACK_REF=$(get_gh_latest_release cloudbase/garm-provider-openstack) + LXD_REF=$(get_gh_latest_release cloudbase/garm-provider-lxd) + INCUS_REF=$(get_gh_latest_release cloudbase/garm-provider-incus) + AWS_REF=$(get_gh_latest_release cloudbase/garm-provider-aws) + GCP_REF=$(get_gh_latest_release cloudbase/garm-provider-gcp) + EQUINIX_REF=$(get_gh_latest_release cloudbase/garm-provider-equinix) + LINODE_REF=$(get_gh_latest_release flatcar/garm-provider-linode) + K8S_REF=$(get_gh_latest_release mercedes-benz/garm-provider-k8s) fi if [ "$GH_REF" == "release/v1" ]; then VERSION="v0.1" fi + if [ "$GH_REF" == "release/v2" ]; then + VERSION="v0.2" + fi docker buildx build \ --provenance=false \ --platform linux/amd64,linux/arm64 \ diff --git a/Dockerfile b/Dockerfile index 47723a6b..b7ff032b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ ARG K8S_REF=v0.3.2 LABEL stage=builder -RUN apk add musl-dev gcc libtool m4 autoconf g++ make libblkid util-linux-dev git linux-headers upx +RUN apk add --no-cache musl-dev gcc libtool m4 autoconf g++ make libblkid util-linux-dev git linux-headers upx RUN git config --global --add safe.directory /build ADD . /build/garm From 3687c7fea416c27e6b29fa932472f379338727d2 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Tue, 29 Jul 2025 09:48:53 +0200 Subject: [PATCH 135/179] activate release/v2 nightly build --- .github/workflows/trigger-nightly.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/trigger-nightly.yml b/.github/workflows/trigger-nightly.yml index 79c42228..133d95ae 100644 --- a/.github/workflows/trigger-nightly.yml +++ b/.github/workflows/trigger-nightly.yml @@ -8,6 +8,6 @@ jobs: uses: ./.github/workflows/build-and-push.yml strategy: matrix: - ref: ["main", "release/v1"] + ref: ["main", "release/v1", "release/v2"] with: ref: ${{ matrix.ref }} From 5152bab1b8840b13cbe6a87eeb6a3859b6430b2d Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Tue, 29 Jul 2025 13:32:42 +0200 Subject: [PATCH 136/179] fix branch names --- .github/workflows/build-and-push.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 907dd8c1..9a466c94 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -78,10 +78,10 @@ jobs: LINODE_REF=$(get_gh_latest_release flatcar/garm-provider-linode) K8S_REF=$(get_gh_latest_release mercedes-benz/garm-provider-k8s) fi - if [ "$GH_REF" == "release/v1" ]; then + if [ "$GH_REF" == "release/v0.1" ]; then VERSION="v0.1" fi - if [ "$GH_REF" == "release/v2" ]; then + if [ "$GH_REF" == "release/v0.2" ]; then VERSION="v0.2" fi docker buildx build \ From be3026e87ceee2266559d1aff2b0aca1f23f67c1 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Tue, 29 Jul 2025 13:33:22 +0200 Subject: [PATCH 137/179] fix branch names --- .github/workflows/trigger-nightly.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/trigger-nightly.yml b/.github/workflows/trigger-nightly.yml index 133d95ae..b643129a 100644 --- a/.github/workflows/trigger-nightly.yml +++ b/.github/workflows/trigger-nightly.yml @@ -8,6 +8,6 @@ jobs: uses: ./.github/workflows/build-and-push.yml strategy: matrix: - ref: ["main", "release/v1", "release/v2"] + ref: ["main", "release/v0.1", "release/v0.2"] with: ref: ${{ matrix.ref }} From af1c090db59f0444cba749239443a4434116fc35 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Wed, 30 Jul 2025 09:03:28 +0200 Subject: [PATCH 138/179] transfer providers branch computation inside Dockerfile --- .github/workflows/build-and-push.yml | 32 ---------- Dockerfile | 87 ++++++++++++++++------------ 2 files changed, 49 insertions(+), 70 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 9a466c94..d6520e22 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -48,35 +48,11 @@ jobs: GH_REF: ${{ inputs.ref }} working-directory: src/github.com/cloudbase/garm run: | - get_gh_latest_release() { - curl -s -L -H "Accept: application/vnd.github+json" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - "https://api.github.com/repos/$1/latest" \ - | jq -r '.tag_name' - } set -x if [ "$GH_REF" == "main" ]; then VERSION="nightly" - AZURE_REF="main" - OPENSTACK_REF="main" - LXD_REF="main" - INCUS_REF="main" - AWS_REF="main" - GCP_REF="main" - EQUINIX_REF="main" - LINODE_REF="main" - K8S_REF="main" else VERSION=$(git describe --tags --match='v[0-9]*' --always) - AZURE_REF=$(get_gh_latest_release cloudbase/garm-provider-azure) - OPENSTACK_REF=$(get_gh_latest_release cloudbase/garm-provider-openstack) - LXD_REF=$(get_gh_latest_release cloudbase/garm-provider-lxd) - INCUS_REF=$(get_gh_latest_release cloudbase/garm-provider-incus) - AWS_REF=$(get_gh_latest_release cloudbase/garm-provider-aws) - GCP_REF=$(get_gh_latest_release cloudbase/garm-provider-gcp) - EQUINIX_REF=$(get_gh_latest_release cloudbase/garm-provider-equinix) - LINODE_REF=$(get_gh_latest_release flatcar/garm-provider-linode) - K8S_REF=$(get_gh_latest_release mercedes-benz/garm-provider-k8s) fi if [ "$GH_REF" == "release/v0.1" ]; then VERSION="v0.1" @@ -91,13 +67,5 @@ jobs: --label "org.opencontainers.image.description=GARM ${GH_REF}" \ --label "org.opencontainers.image.licenses=Apache 2.0" \ --build-arg="GARM_REF=${GH_REF}" \ - --build-arg="AZURE_REF=${AZURE_REF}" \ - --build-arg="OPENSTACK_REF=${OPENSTACK_REF}" \ - --build-arg="LXD_REF=${LXD_REF}" \ - --build-arg="INCUS_REF=${INCUS_REF}" \ - --build-arg="AWS_REF=${AWS_REF}" \ - --build-arg="GCP_REF=${GCP_REF}" \ - --build-arg="EQUINIX_REF=${EQUINIX_REF}" \ - --build-arg="K8S_REF=${K8S_REF}" \ -t ${PUSH_TO_PROJECT}/garm:"${VERSION}" \ --push . diff --git a/Dockerfile b/Dockerfile index b7ff032b..f13eb35a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,49 +1,60 @@ FROM docker.io/golang:alpine AS builder ARG GARM_REF -ARG AZURE_REF=v0.1.0 -ARG OPENSTACK_REF=v0.1.0 -ARG LXD_REF=v0.1.0 -ARG INCUS_REF=v0.1.0 -ARG AWS_REF=v0.1.0 -ARG GCP_REF=v0.1.0 -ARG EQUINIX_REF=v0.1.0 -ARG K8S_REF=v0.3.2 LABEL stage=builder -RUN apk add --no-cache musl-dev gcc libtool m4 autoconf g++ make libblkid util-linux-dev git linux-headers upx -RUN git config --global --add safe.directory /build +RUN apk add --no-cache musl-dev gcc libtool m4 autoconf g++ make libblkid util-linux-dev git linux-headers upx curl jq +RUN git config --global --add safe.directory /build && git config --global --add advice.detachedHead false ADD . /build/garm -RUN cd /build/garm && git checkout ${GARM_REF} -RUN git clone --depth 1 --branch ${AZURE_REF} https://github.com/cloudbase/garm-provider-azure /build/garm-provider-azure -RUN git clone --depth 1 --branch ${OPENSTACK_REF} https://github.com/cloudbase/garm-provider-openstack /build/garm-provider-openstack -RUN git clone --depth 1 --branch ${LXD_REF} https://github.com/cloudbase/garm-provider-lxd /build/garm-provider-lxd -RUN git clone --depth 1 --branch ${INCUS_REF} https://github.com/cloudbase/garm-provider-incus /build/garm-provider-incus -RUN git clone --depth 1 --branch ${AWS_REF} https://github.com/cloudbase/garm-provider-aws /build/garm-provider-aws -RUN git clone --depth 1 --branch ${GCP_REF} https://github.com/cloudbase/garm-provider-gcp /build/garm-provider-gcp -RUN git clone --depth 1 --branch ${EQUINIX_REF} https://github.com/cloudbase/garm-provider-equinix /build/garm-provider-equinix -RUN git clone --depth 1 --branch ${K8S_REF} https://github.com/mercedes-benz/garm-provider-k8s /build/garm-provider-k8s - -RUN cd /build/garm && go build -o /bin/garm \ - -tags osusergo,netgo,sqlite_omit_load_extension \ - -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ - /build/garm/cmd/garm && upx /bin/garm -RUN cd /build/garm/cmd/garm-cli && go build -o /bin/garm-cli \ - -tags osusergo,netgo,sqlite_omit_load_extension \ - -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ - . && upx /bin/garm-cli -RUN mkdir -p /opt/garm/providers.d -RUN cd /build/garm-provider-azure && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-azure . && upx /opt/garm/providers.d/garm-provider-azure -RUN cd /build/garm-provider-openstack && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-openstack . && upx /opt/garm/providers.d/garm-provider-openstack -RUN cd /build/garm-provider-lxd && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-lxd . && upx /opt/garm/providers.d/garm-provider-lxd -RUN cd /build/garm-provider-incus && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-incus . && upx /opt/garm/providers.d/garm-provider-incus -RUN cd /build/garm-provider-aws && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-aws . && upx /opt/garm/providers.d/garm-provider-aws -RUN cd /build/garm-provider-gcp && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-gcp . && upx /opt/garm/providers.d/garm-provider-gcp -RUN cd /build/garm-provider-equinix && go build -ldflags="-linkmode external -extldflags '-static' -s -w -X main.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" -o /opt/garm/providers.d/garm-provider-equinix . && upx /opt/garm/providers.d/garm-provider-equinix - -RUN cd /build/garm-provider-k8s/cmd/garm-provider-k8s && go build -ldflags="-linkmode external -extldflags '-static' -s -w" -o /opt/garm/providers.d/garm-provider-k8s . && upx /opt/garm/providers.d/garm-provider-k8s +RUN cd /build/garm && git checkout ${GARM_REF} \ + && go build -o /bin/garm \ + -tags osusergo,netgo,sqlite_omit_load_extension \ + -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ + /build/garm/cmd/garm && upx /bin/garm +RUN cd /build/garm/cmd/garm-cli \ + && go build -o /bin/garm-cli \ + -tags osusergo,netgo,sqlite_omit_load_extension \ + -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ + . && upx /bin/garm-cli +RUN mkdir -p /opt/garm/providers.d; \ + for repo in \ + cloudbase/garm-provider-azure \ + cloudbase/garm-provider-openstack \ + cloudbase/garm-provider-lxd \ + cloudbase/garm-provider-incus \ + cloudbase/garm-provider-aws \ + cloudbase/garm-provider-gcp \ + cloudbase/garm-provider-equinix \ + flatcar/garm-provider-linode \ + mercedes-benz/garm-provider-k8s; \ + do \ + export PROVIDER_NAME="$(basename $repo)"; \ + export PROVIDER_SUBDIR=""; \ + if [ "$GARM_REF" == "main" ]; then \ + export PROVIDER_TAG="main"; \ + else \ + export PROVIDER_TAG="$(curl -s -L https://api.github.com/repos/$repo/releases/latest | jq -r '.tag_name')"; \ + fi; \ + case $PROVIDER_NAME in \ + "garm-provider-k8s") \ + export PROVIDER_TAG=v0.3.1; \ + export PROVIDER_SUBDIR="cmd/garm-provider-k8s"; \ + export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ + ;; \ + "garm-provider-linode") \ + export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ + ;; \ + *) \ + export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w -X main.Version=$PROVIDER_TAG"; \ + ;; \ + esac; \ + git clone --depth 1 --branch "$PROVIDER_TAG" "https://github.com/$repo" "/build/$PROVIDER_NAME" \ + && cd "/build/$PROVIDER_NAME/$PROVIDER_SUBDIR" \ + && go build -ldflags="$PROVIDER_LDFLAGS" -o /opt/garm/providers.d/$PROVIDER_NAME . \ + && upx /opt/garm/providers.d/$PROVIDER_NAME; \ + done FROM busybox From f6f22cb6864fcc395026e036337244f2fa066607 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Wed, 30 Jul 2025 09:23:55 +0200 Subject: [PATCH 139/179] small fixes --- .github/workflows/build-and-push.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index d6520e22..3aa5bef6 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -48,7 +48,6 @@ jobs: GH_REF: ${{ inputs.ref }} working-directory: src/github.com/cloudbase/garm run: | - set -x if [ "$GH_REF" == "main" ]; then VERSION="nightly" else @@ -67,5 +66,5 @@ jobs: --label "org.opencontainers.image.description=GARM ${GH_REF}" \ --label "org.opencontainers.image.licenses=Apache 2.0" \ --build-arg="GARM_REF=${GH_REF}" \ - -t ${PUSH_TO_PROJECT}/garm:"${VERSION}" \ + -t ${REGISTRY_INPUT}/garm:"${VERSION}" \ --push . From 0f4f98dd03c2a7f3ceb96cfe81fb309cc6d28f29 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Wed, 30 Jul 2025 11:16:16 +0200 Subject: [PATCH 140/179] put a better git version in providers --- Dockerfile | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index f13eb35a..87d701e3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,8 @@ RUN git config --global --add safe.directory /build && git config --global --add ADD . /build/garm -RUN cd /build/garm && git checkout ${GARM_REF} \ +RUN git -C /build/garm checkout ${GARM_REF} +RUN cd /build/garm \ && go build -o /bin/garm \ -tags osusergo,netgo,sqlite_omit_load_extension \ -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ @@ -18,7 +19,8 @@ RUN cd /build/garm/cmd/garm-cli \ -tags osusergo,netgo,sqlite_omit_load_extension \ -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ . && upx /bin/garm-cli -RUN mkdir -p /opt/garm/providers.d; \ +RUN set -e; \ + mkdir -p /opt/garm/providers.d; \ for repo in \ cloudbase/garm-provider-azure \ cloudbase/garm-provider-openstack \ @@ -37,20 +39,20 @@ RUN mkdir -p /opt/garm/providers.d; \ else \ export PROVIDER_TAG="$(curl -s -L https://api.github.com/repos/$repo/releases/latest | jq -r '.tag_name')"; \ fi; \ + git clone --branch "$PROVIDER_TAG" "https://github.com/$repo" "/build/$PROVIDER_NAME"; \ case $PROVIDER_NAME in \ "garm-provider-k8s") \ - export PROVIDER_TAG=v0.3.1; \ export PROVIDER_SUBDIR="cmd/garm-provider-k8s"; \ export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ + git -C /build/garm-provider-k8s checkout v0.3.1; \ ;; \ "garm-provider-linode") \ export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ ;; \ *) \ - export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w -X main.Version=$PROVIDER_TAG"; \ + export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w -X main.Version=$(git -C /build/$PROVIDER_NAME describe --tags --match='v[0-9]*' --dirty --always)"; \ ;; \ esac; \ - git clone --depth 1 --branch "$PROVIDER_TAG" "https://github.com/$repo" "/build/$PROVIDER_NAME" \ && cd "/build/$PROVIDER_NAME/$PROVIDER_SUBDIR" \ && go build -ldflags="$PROVIDER_LDFLAGS" -o /opt/garm/providers.d/$PROVIDER_NAME . \ && upx /opt/garm/providers.d/$PROVIDER_NAME; \ From eb07ed377437eb8b1d22f7b15f6949172829a1ad Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Wed, 30 Jul 2025 11:29:00 +0200 Subject: [PATCH 141/179] remove obsolete tech debt --- Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 87d701e3..e0c67206 100644 --- a/Dockerfile +++ b/Dockerfile @@ -44,7 +44,6 @@ RUN set -e; \ "garm-provider-k8s") \ export PROVIDER_SUBDIR="cmd/garm-provider-k8s"; \ export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ - git -C /build/garm-provider-k8s checkout v0.3.1; \ ;; \ "garm-provider-linode") \ export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ @@ -53,7 +52,7 @@ RUN set -e; \ export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w -X main.Version=$(git -C /build/$PROVIDER_NAME describe --tags --match='v[0-9]*' --dirty --always)"; \ ;; \ esac; \ - && cd "/build/$PROVIDER_NAME/$PROVIDER_SUBDIR" \ + cd "/build/$PROVIDER_NAME/$PROVIDER_SUBDIR" \ && go build -ldflags="$PROVIDER_LDFLAGS" -o /opt/garm/providers.d/$PROVIDER_NAME . \ && upx /opt/garm/providers.d/$PROVIDER_NAME; \ done From 158b35db06c45b0880c48d1ec2bca87916cf9093 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Wed, 30 Jul 2025 14:35:01 +0200 Subject: [PATCH 142/179] simplify workflows --- .github/workflows/build-and-push.yml | 16 +++++----------- .github/workflows/trigger-nightly.yml | 5 +---- Dockerfile | 9 +++++---- 3 files changed, 11 insertions(+), 19 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 3aa5bef6..4c520e4e 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -42,22 +42,16 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Build and push + - name: Build and push image env: - REGISTRY_INPUT: ${{ inputs.push_to_project }} + IMAGE_REGISTRY: ${{ inputs.push_to_project }} GH_REF: ${{ inputs.ref }} working-directory: src/github.com/cloudbase/garm run: | if [ "$GH_REF" == "main" ]; then - VERSION="nightly" + IMAGE_TAG="nightly" else - VERSION=$(git describe --tags --match='v[0-9]*' --always) - fi - if [ "$GH_REF" == "release/v0.1" ]; then - VERSION="v0.1" - fi - if [ "$GH_REF" == "release/v0.2" ]; then - VERSION="v0.2" + IMAGE_TAG=$(git describe --tags --match='v[0-9]*' --always) fi docker buildx build \ --provenance=false \ @@ -66,5 +60,5 @@ jobs: --label "org.opencontainers.image.description=GARM ${GH_REF}" \ --label "org.opencontainers.image.licenses=Apache 2.0" \ --build-arg="GARM_REF=${GH_REF}" \ - -t ${REGISTRY_INPUT}/garm:"${VERSION}" \ + -t ${IMAGE_REGISTRY}/garm:"${IMAGE_TAG}" \ --push . diff --git a/.github/workflows/trigger-nightly.yml b/.github/workflows/trigger-nightly.yml index b643129a..e0b83856 100644 --- a/.github/workflows/trigger-nightly.yml +++ b/.github/workflows/trigger-nightly.yml @@ -6,8 +6,5 @@ on: jobs: call-build-and-push: uses: ./.github/workflows/build-and-push.yml - strategy: - matrix: - ref: ["main", "release/v0.1", "release/v0.2"] with: - ref: ${{ matrix.ref }} + ref: "main" diff --git a/Dockerfile b/Dockerfile index e0c67206..045581f6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -35,11 +35,11 @@ RUN set -e; \ export PROVIDER_NAME="$(basename $repo)"; \ export PROVIDER_SUBDIR=""; \ if [ "$GARM_REF" == "main" ]; then \ - export PROVIDER_TAG="main"; \ + export PROVIDER_REF="main"; \ else \ - export PROVIDER_TAG="$(curl -s -L https://api.github.com/repos/$repo/releases/latest | jq -r '.tag_name')"; \ + export PROVIDER_REF="$(curl -s -L https://api.github.com/repos/$repo/releases/latest | jq -r '.tag_name')"; \ fi; \ - git clone --branch "$PROVIDER_TAG" "https://github.com/$repo" "/build/$PROVIDER_NAME"; \ + git clone --branch "$PROVIDER_REF" "https://github.com/$repo" "/build/$PROVIDER_NAME"; \ case $PROVIDER_NAME in \ "garm-provider-k8s") \ export PROVIDER_SUBDIR="cmd/garm-provider-k8s"; \ @@ -49,7 +49,8 @@ RUN set -e; \ export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w"; \ ;; \ *) \ - export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w -X main.Version=$(git -C /build/$PROVIDER_NAME describe --tags --match='v[0-9]*' --dirty --always)"; \ + export PROVIDER_VERSION=$(git -C /build/$PROVIDER_NAME describe --tags --match='v[0-9]*' --dirty --always); \ + export PROVIDER_LDFLAGS="-linkmode external -extldflags \"-static\" -s -w -X main.Version=$PROVIDER_VERSION"; \ ;; \ esac; \ cd "/build/$PROVIDER_NAME/$PROVIDER_SUBDIR" \ From 22f655f48db003e295ac64b502a0345235768139 Mon Sep 17 00:00:00 2001 From: Lionel ORRY Date: Thu, 31 Jul 2025 08:42:16 +0200 Subject: [PATCH 143/179] fixes after testing --- .github/workflows/build-and-push.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 4c520e4e..93dd9a75 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -4,16 +4,14 @@ on: inputs: push_to_project: description: "Project to build images for" - required: true + required: false type: string default: "ghcr.io/cloudbase" ref: description: "Ref to build" - required: true + required: false type: string default: "main" - schedule: - - cron: "0 2 * * *" permissions: contents: read From 4ad7d8e856903a639a14d16a62211da4c161769e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 31 Jul 2025 07:04:47 +0000 Subject: [PATCH 144/179] Bump github.com/golang-jwt/jwt/v5 from 5.2.3 to 5.3.0 Bumps [github.com/golang-jwt/jwt/v5](https://github.com/golang-jwt/jwt) from 5.2.3 to 5.3.0. - [Release notes](https://github.com/golang-jwt/jwt/releases) - [Changelog](https://github.com/golang-jwt/jwt/blob/main/VERSION_HISTORY.md) - [Commits](https://github.com/golang-jwt/jwt/compare/v5.2.3...v5.3.0) --- updated-dependencies: - dependency-name: github.com/golang-jwt/jwt/v5 dependency-version: 5.3.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 +- vendor/github.com/golang-jwt/jwt/v5/errors.go | 40 ++++++++++ .../golang-jwt/jwt/v5/errors_go1_20.go | 47 ----------- .../golang-jwt/jwt/v5/errors_go_other.go | 78 ------------------- .../github.com/golang-jwt/jwt/v5/rsa_pss.go | 3 - vendor/modules.txt | 4 +- 7 files changed, 45 insertions(+), 133 deletions(-) delete mode 100644 vendor/github.com/golang-jwt/jwt/v5/errors_go1_20.go delete mode 100644 vendor/github.com/golang-jwt/jwt/v5/errors_go_other.go diff --git a/go.mod b/go.mod index ffe2c7ed..6a93a578 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,7 @@ require ( github.com/go-openapi/runtime v0.28.0 github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 - github.com/golang-jwt/jwt/v5 v5.2.3 + github.com/golang-jwt/jwt/v5 v5.3.0 github.com/google/go-github/v72 v72.0.0 github.com/google/uuid v1.6.0 github.com/gorilla/handlers v1.5.2 diff --git a/go.sum b/go.sum index f255d308..811be92c 100644 --- a/go.sum +++ b/go.sum @@ -56,8 +56,8 @@ github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1 github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0= -github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= diff --git a/vendor/github.com/golang-jwt/jwt/v5/errors.go b/vendor/github.com/golang-jwt/jwt/v5/errors.go index 23bb616d..14e00751 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/errors.go +++ b/vendor/github.com/golang-jwt/jwt/v5/errors.go @@ -2,6 +2,7 @@ package jwt import ( "errors" + "fmt" "strings" ) @@ -47,3 +48,42 @@ func joinErrors(errs ...error) error { errs: errs, } } + +// Unwrap implements the multiple error unwrapping for this error type, which is +// possible in Go 1.20. +func (je joinedError) Unwrap() []error { + return je.errs +} + +// newError creates a new error message with a detailed error message. The +// message will be prefixed with the contents of the supplied error type. +// Additionally, more errors, that provide more context can be supplied which +// will be appended to the message. This makes use of Go 1.20's possibility to +// include more than one %w formatting directive in [fmt.Errorf]. +// +// For example, +// +// newError("no keyfunc was provided", ErrTokenUnverifiable) +// +// will produce the error string +// +// "token is unverifiable: no keyfunc was provided" +func newError(message string, err error, more ...error) error { + var format string + var args []any + if message != "" { + format = "%w: %s" + args = []any{err, message} + } else { + format = "%w" + args = []any{err} + } + + for _, e := range more { + format += ": %w" + args = append(args, e) + } + + err = fmt.Errorf(format, args...) + return err +} diff --git a/vendor/github.com/golang-jwt/jwt/v5/errors_go1_20.go b/vendor/github.com/golang-jwt/jwt/v5/errors_go1_20.go deleted file mode 100644 index a893d355..00000000 --- a/vendor/github.com/golang-jwt/jwt/v5/errors_go1_20.go +++ /dev/null @@ -1,47 +0,0 @@ -//go:build go1.20 -// +build go1.20 - -package jwt - -import ( - "fmt" -) - -// Unwrap implements the multiple error unwrapping for this error type, which is -// possible in Go 1.20. -func (je joinedError) Unwrap() []error { - return je.errs -} - -// newError creates a new error message with a detailed error message. The -// message will be prefixed with the contents of the supplied error type. -// Additionally, more errors, that provide more context can be supplied which -// will be appended to the message. This makes use of Go 1.20's possibility to -// include more than one %w formatting directive in [fmt.Errorf]. -// -// For example, -// -// newError("no keyfunc was provided", ErrTokenUnverifiable) -// -// will produce the error string -// -// "token is unverifiable: no keyfunc was provided" -func newError(message string, err error, more ...error) error { - var format string - var args []any - if message != "" { - format = "%w: %s" - args = []any{err, message} - } else { - format = "%w" - args = []any{err} - } - - for _, e := range more { - format += ": %w" - args = append(args, e) - } - - err = fmt.Errorf(format, args...) - return err -} diff --git a/vendor/github.com/golang-jwt/jwt/v5/errors_go_other.go b/vendor/github.com/golang-jwt/jwt/v5/errors_go_other.go deleted file mode 100644 index 2ad542f0..00000000 --- a/vendor/github.com/golang-jwt/jwt/v5/errors_go_other.go +++ /dev/null @@ -1,78 +0,0 @@ -//go:build !go1.20 -// +build !go1.20 - -package jwt - -import ( - "errors" - "fmt" -) - -// Is implements checking for multiple errors using [errors.Is], since multiple -// error unwrapping is not possible in versions less than Go 1.20. -func (je joinedError) Is(err error) bool { - for _, e := range je.errs { - if errors.Is(e, err) { - return true - } - } - - return false -} - -// wrappedErrors is a workaround for wrapping multiple errors in environments -// where Go 1.20 is not available. It basically uses the already implemented -// functionality of joinedError to handle multiple errors with supplies a -// custom error message that is identical to the one we produce in Go 1.20 using -// multiple %w directives. -type wrappedErrors struct { - msg string - joinedError -} - -// Error returns the stored error string -func (we wrappedErrors) Error() string { - return we.msg -} - -// newError creates a new error message with a detailed error message. The -// message will be prefixed with the contents of the supplied error type. -// Additionally, more errors, that provide more context can be supplied which -// will be appended to the message. Since we cannot use of Go 1.20's possibility -// to include more than one %w formatting directive in [fmt.Errorf], we have to -// emulate that. -// -// For example, -// -// newError("no keyfunc was provided", ErrTokenUnverifiable) -// -// will produce the error string -// -// "token is unverifiable: no keyfunc was provided" -func newError(message string, err error, more ...error) error { - // We cannot wrap multiple errors here with %w, so we have to be a little - // bit creative. Basically, we are using %s instead of %w to produce the - // same error message and then throw the result into a custom error struct. - var format string - var args []any - if message != "" { - format = "%s: %s" - args = []any{err, message} - } else { - format = "%s" - args = []any{err} - } - errs := []error{err} - - for _, e := range more { - format += ": %s" - args = append(args, e) - errs = append(errs, e) - } - - err = &wrappedErrors{ - msg: fmt.Sprintf(format, args...), - joinedError: joinedError{errs: errs}, - } - return err -} diff --git a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go index 7c216ae0..f17590cc 100644 --- a/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go +++ b/vendor/github.com/golang-jwt/jwt/v5/rsa_pss.go @@ -1,6 +1,3 @@ -//go:build go1.4 -// +build go1.4 - package jwt import ( diff --git a/vendor/modules.txt b/vendor/modules.txt index faf4441b..750625dd 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -94,8 +94,8 @@ github.com/go-sql-driver/mysql # github.com/golang-jwt/jwt/v4 v4.5.2 ## explicit; go 1.16 github.com/golang-jwt/jwt/v4 -# github.com/golang-jwt/jwt/v5 v5.2.3 -## explicit; go 1.18 +# github.com/golang-jwt/jwt/v5 v5.3.0 +## explicit; go 1.21 github.com/golang-jwt/jwt/v5 # github.com/google/go-github/v72 v72.0.0 ## explicit; go 1.23.0 From f85fe3d63fa2f530b7591d51031ad3b9105ff06f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Aug 2025 06:26:29 +0000 Subject: [PATCH 145/179] Bump github.com/prometheus/client_golang from 1.22.0 to 1.23.0 Bumps [github.com/prometheus/client_golang](https://github.com/prometheus/client_golang) from 1.22.0 to 1.23.0. - [Release notes](https://github.com/prometheus/client_golang/releases) - [Changelog](https://github.com/prometheus/client_golang/blob/v1.23.0/CHANGELOG.md) - [Commits](https://github.com/prometheus/client_golang/compare/v1.22.0...v1.23.0) --- updated-dependencies: - dependency-name: github.com/prometheus/client_golang dependency-version: 1.23.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 4 +-- go.sum | 10 +++--- .../prometheus/internal/difflib.go | 4 +-- .../client_golang/prometheus/metric.go | 25 +++++++++---- .../prometheus/process_collector_darwin.go | 6 ++-- .../process_collector_mem_nocgo_darwin.go | 2 +- .../process_collector_procfsenabled.go | 8 ++--- .../prometheus/promhttp/instrument_server.go | 2 +- .../client_golang/prometheus/vec.go | 10 +++--- .../client_golang/prometheus/wrap.go | 36 ++++++++++++++++++- .../prometheus/common/model/time.go | 25 +++++++++++-- vendor/modules.txt | 6 ++-- 12 files changed, 102 insertions(+), 36 deletions(-) diff --git a/go.mod b/go.mod index 6a93a578..c7ba787a 100644 --- a/go.mod +++ b/go.mod @@ -25,7 +25,7 @@ require ( github.com/manifoldco/promptui v0.9.0 github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 github.com/pkg/errors v0.9.1 - github.com/prometheus/client_golang v1.22.0 + github.com/prometheus/client_golang v1.23.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 golang.org/x/crypto v0.40.0 @@ -76,7 +76,7 @@ require ( github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect - github.com/prometheus/common v0.64.0 // indirect + github.com/prometheus/common v0.65.0 // indirect github.com/prometheus/procfs v0.16.1 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/spf13/pflag v1.0.6 // indirect diff --git a/go.sum b/go.sum index 811be92c..863e7b1a 100644 --- a/go.sum +++ b/go.sum @@ -148,12 +148,12 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= -github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= +github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc= +github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= -github.com/prometheus/common v0.64.0 h1:pdZeA+g617P7oGv1CzdTzyeShxAGrTBsolKNOLQPGO4= -github.com/prometheus/common v0.64.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= @@ -187,6 +187,8 @@ go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucg go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go index 8b016355..7bac0da3 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go @@ -453,7 +453,7 @@ func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { } group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) } - if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { + if len(group) > 0 && (len(group) != 1 || group[0].Tag != 'e') { groups = append(groups, group) } return groups @@ -568,7 +568,7 @@ func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { buf := bufio.NewWriter(writer) defer buf.Flush() wf := func(format string, args ...interface{}) error { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) + _, err := fmt.Fprintf(buf, format, args...) return err } ws := func(s string) error { diff --git a/vendor/github.com/prometheus/client_golang/prometheus/metric.go b/vendor/github.com/prometheus/client_golang/prometheus/metric.go index 592eec3e..76e59f12 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/metric.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/metric.go @@ -186,21 +186,31 @@ func (m *withExemplarsMetric) Write(pb *dto.Metric) error { case pb.Counter != nil: pb.Counter.Exemplar = m.exemplars[len(m.exemplars)-1] case pb.Histogram != nil: + h := pb.Histogram for _, e := range m.exemplars { - // pb.Histogram.Bucket are sorted by UpperBound. - i := sort.Search(len(pb.Histogram.Bucket), func(i int) bool { - return pb.Histogram.Bucket[i].GetUpperBound() >= e.GetValue() + if (h.GetZeroThreshold() != 0 || h.GetZeroCount() != 0 || + len(h.PositiveSpan) != 0 || len(h.NegativeSpan) != 0) && + e.GetTimestamp() != nil { + h.Exemplars = append(h.Exemplars, e) + if len(h.Bucket) == 0 { + // Don't proceed to classic buckets if there are none. + continue + } + } + // h.Bucket are sorted by UpperBound. + i := sort.Search(len(h.Bucket), func(i int) bool { + return h.Bucket[i].GetUpperBound() >= e.GetValue() }) - if i < len(pb.Histogram.Bucket) { - pb.Histogram.Bucket[i].Exemplar = e + if i < len(h.Bucket) { + h.Bucket[i].Exemplar = e } else { // The +Inf bucket should be explicitly added if there is an exemplar for it, similar to non-const histogram logic in https://github.com/prometheus/client_golang/blob/main/prometheus/histogram.go#L357-L365. b := &dto.Bucket{ - CumulativeCount: proto.Uint64(pb.Histogram.GetSampleCount()), + CumulativeCount: proto.Uint64(h.GetSampleCount()), UpperBound: proto.Float64(math.Inf(1)), Exemplar: e, } - pb.Histogram.Bucket = append(pb.Histogram.Bucket, b) + h.Bucket = append(h.Bucket, b) } } default: @@ -227,6 +237,7 @@ type Exemplar struct { // Only last applicable exemplar is injected from the list. // For example for Counter it means last exemplar is injected. // For Histogram, it means last applicable exemplar for each bucket is injected. +// For a Native Histogram, all valid exemplars are injected. // // NewMetricWithExemplars works best with MustNewConstMetric and // MustNewConstHistogram, see example. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go index 0a61b984..b32c95fa 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_darwin.go @@ -25,9 +25,9 @@ import ( "golang.org/x/sys/unix" ) -// notImplementedErr is returned by stub functions that replace cgo functions, when cgo +// errNotImplemented is returned by stub functions that replace cgo functions, when cgo // isn't available. -var notImplementedErr = errors.New("not implemented") +var errNotImplemented = errors.New("not implemented") type memoryInfo struct { vsize uint64 // Virtual memory size in bytes @@ -101,7 +101,7 @@ func (c *processCollector) processCollect(ch chan<- Metric) { if memInfo, err := getMemory(); err == nil { ch <- MustNewConstMetric(c.rss, GaugeValue, float64(memInfo.rss)) ch <- MustNewConstMetric(c.vsize, GaugeValue, float64(memInfo.vsize)) - } else if !errors.Is(err, notImplementedErr) { + } else if !errors.Is(err, errNotImplemented) { // Don't report an error when support is not compiled in. c.reportError(ch, c.rss, err) c.reportError(ch, c.vsize, err) diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go index 8ddb0995..37886512 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_mem_nocgo_darwin.go @@ -16,7 +16,7 @@ package prometheus func getMemory() (*memoryInfo, error) { - return nil, notImplementedErr + return nil, errNotImplemented } // describe returns all descriptions of the collector for Darwin. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go index 9f4b130b..8074f70f 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_procfsenabled.go @@ -66,11 +66,11 @@ func (c *processCollector) processCollect(ch chan<- Metric) { if netstat, err := p.Netstat(); err == nil { var inOctets, outOctets float64 - if netstat.IpExt.InOctets != nil { - inOctets = *netstat.IpExt.InOctets + if netstat.InOctets != nil { + inOctets = *netstat.InOctets } - if netstat.IpExt.OutOctets != nil { - outOctets = *netstat.IpExt.OutOctets + if netstat.OutOctets != nil { + outOctets = *netstat.OutOctets } ch <- MustNewConstMetric(c.inBytes, CounterValue, inOctets) ch <- MustNewConstMetric(c.outBytes, CounterValue, outOctets) diff --git a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go index 356edb78..9332b024 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go @@ -392,7 +392,7 @@ func isLabelCurried(c prometheus.Collector, label string) bool { func labels(code, method bool, reqMethod string, status int, extraMethods ...string) prometheus.Labels { labels := prometheus.Labels{} - if !(code || method) { + if !code && !method { return labels } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vec.go b/vendor/github.com/prometheus/client_golang/prometheus/vec.go index 2c808eec..487b4665 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/vec.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/vec.go @@ -79,7 +79,7 @@ func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { return false } - return m.metricMap.deleteByHashWithLabelValues(h, lvs, m.curry) + return m.deleteByHashWithLabelValues(h, lvs, m.curry) } // Delete deletes the metric where the variable labels are the same as those @@ -101,7 +101,7 @@ func (m *MetricVec) Delete(labels Labels) bool { return false } - return m.metricMap.deleteByHashWithLabels(h, labels, m.curry) + return m.deleteByHashWithLabels(h, labels, m.curry) } // DeletePartialMatch deletes all metrics where the variable labels contain all of those @@ -114,7 +114,7 @@ func (m *MetricVec) DeletePartialMatch(labels Labels) int { labels, closer := constrainLabels(m.desc, labels) defer closer() - return m.metricMap.deleteByLabels(labels, m.curry) + return m.deleteByLabels(labels, m.curry) } // Without explicit forwarding of Describe, Collect, Reset, those methods won't @@ -216,7 +216,7 @@ func (m *MetricVec) GetMetricWithLabelValues(lvs ...string) (Metric, error) { return nil, err } - return m.metricMap.getOrCreateMetricWithLabelValues(h, lvs, m.curry), nil + return m.getOrCreateMetricWithLabelValues(h, lvs, m.curry), nil } // GetMetricWith returns the Metric for the given Labels map (the label names @@ -244,7 +244,7 @@ func (m *MetricVec) GetMetricWith(labels Labels) (Metric, error) { return nil, err } - return m.metricMap.getOrCreateMetricWithLabels(h, labels, m.curry), nil + return m.getOrCreateMetricWithLabels(h, labels, m.curry), nil } func (m *MetricVec) hashLabelValues(vals []string) (uint64, error) { diff --git a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go index 25da157f..2ed12850 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go @@ -63,7 +63,7 @@ func WrapRegistererWith(labels Labels, reg Registerer) Registerer { // metric names that are standardized across applications, as that would break // horizontal monitoring, for example the metrics provided by the Go collector // (see NewGoCollector) and the process collector (see NewProcessCollector). (In -// fact, those metrics are already prefixed with “go_” or “process_”, +// fact, those metrics are already prefixed with "go_" or "process_", // respectively.) // // Conflicts between Collectors registered through the original Registerer with @@ -78,6 +78,40 @@ func WrapRegistererWithPrefix(prefix string, reg Registerer) Registerer { } } +// WrapCollectorWith returns a Collector wrapping the provided Collector. The +// wrapped Collector will add the provided Labels to all Metrics it collects (as +// ConstLabels). The Metrics collected by the unmodified Collector must not +// duplicate any of those labels. +// +// WrapCollectorWith can be useful to work with multiple instances of a third +// party library that does not expose enough flexibility on the lifecycle of its +// registered metrics. +// For example, let's say you have a foo.New(reg Registerer) constructor that +// registers metrics but never unregisters them, and you want to create multiple +// instances of foo.Foo with different labels. +// The way to achieve that, is to create a new Registry, pass it to foo.New, +// then use WrapCollectorWith to wrap that Registry with the desired labels and +// register that as a collector in your main Registry. +// Then you can un-register the wrapped collector effectively un-registering the +// metrics registered by foo.New. +func WrapCollectorWith(labels Labels, c Collector) Collector { + return &wrappingCollector{ + wrappedCollector: c, + labels: labels, + } +} + +// WrapCollectorWithPrefix returns a Collector wrapping the provided Collector. The +// wrapped Collector will add the provided prefix to the name of all Metrics it collects. +// +// See the documentation of WrapCollectorWith for more details on the use case. +func WrapCollectorWithPrefix(prefix string, c Collector) Collector { + return &wrappingCollector{ + wrappedCollector: c, + prefix: prefix, + } +} + type wrappingRegisterer struct { wrappedRegisterer Registerer prefix string diff --git a/vendor/github.com/prometheus/common/model/time.go b/vendor/github.com/prometheus/common/model/time.go index 5727452c..fed9e87b 100644 --- a/vendor/github.com/prometheus/common/model/time.go +++ b/vendor/github.com/prometheus/common/model/time.go @@ -201,6 +201,7 @@ var unitMap = map[string]struct { // ParseDuration parses a string into a time.Duration, assuming that a year // always has 365d, a week always has 7d, and a day always has 24h. +// Negative durations are not supported. func ParseDuration(s string) (Duration, error) { switch s { case "0": @@ -253,18 +254,36 @@ func ParseDuration(s string) (Duration, error) { return 0, errors.New("duration out of range") } } + return Duration(dur), nil } +// ParseDurationAllowNegative is like ParseDuration but also accepts negative durations. +func ParseDurationAllowNegative(s string) (Duration, error) { + if s == "" || s[0] != '-' { + return ParseDuration(s) + } + + d, err := ParseDuration(s[1:]) + + return -d, err +} + func (d Duration) String() string { var ( - ms = int64(time.Duration(d) / time.Millisecond) - r = "" + ms = int64(time.Duration(d) / time.Millisecond) + r = "" + sign = "" ) + if ms == 0 { return "0s" } + if ms < 0 { + sign, ms = "-", -ms + } + f := func(unit string, mult int64, exact bool) { if exact && ms%mult != 0 { return @@ -286,7 +305,7 @@ func (d Duration) String() string { f("s", 1000, false) f("ms", 1, false) - return r + return sign + r } // MarshalJSON implements the json.Marshaler interface. diff --git a/vendor/modules.txt b/vendor/modules.txt index 750625dd..a9818b9c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -197,8 +197,8 @@ github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 ## explicit github.com/pmezard/go-difflib/difflib -# github.com/prometheus/client_golang v1.22.0 -## explicit; go 1.22 +# github.com/prometheus/client_golang v1.23.0 +## explicit; go 1.23.0 github.com/prometheus/client_golang/internal/github.com/golang/gddo/httputil github.com/prometheus/client_golang/internal/github.com/golang/gddo/httputil/header github.com/prometheus/client_golang/prometheus @@ -208,7 +208,7 @@ github.com/prometheus/client_golang/prometheus/promhttp/internal # github.com/prometheus/client_model v0.6.2 ## explicit; go 1.22.0 github.com/prometheus/client_model/go -# github.com/prometheus/common v0.64.0 +# github.com/prometheus/common v0.65.0 ## explicit; go 1.23.0 github.com/prometheus/common/expfmt github.com/prometheus/common/model From 5915107446a6a96a99b42dd290e9a92408d336a0 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 6 Aug 2025 23:00:04 +0300 Subject: [PATCH 146/179] WiP Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/build-and-push.yml | 1 - Dockerfile | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 93dd9a75..35560e67 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -26,7 +26,6 @@ jobs: - name: "Checkout" uses: actions/checkout@v4 with: - ref: ${{ inputs.ref }} path: src/github.com/cloudbase/garm fetch-depth: 0 diff --git a/Dockerfile b/Dockerfile index 045581f6..81033292 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,6 +5,7 @@ LABEL stage=builder RUN apk add --no-cache musl-dev gcc libtool m4 autoconf g++ make libblkid util-linux-dev git linux-headers upx curl jq RUN git config --global --add safe.directory /build && git config --global --add advice.detachedHead false +RUN echo ${GARM_REF} ADD . /build/garm @@ -19,7 +20,7 @@ RUN cd /build/garm/cmd/garm-cli \ -tags osusergo,netgo,sqlite_omit_load_extension \ -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ . && upx /bin/garm-cli -RUN set -e; \ +RUN set -ex; \ mkdir -p /opt/garm/providers.d; \ for repo in \ cloudbase/garm-provider-azure \ From 2ee2fca8aebbfc3685d6af3f3163230e26aa1bb0 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Wed, 6 Aug 2025 23:00:04 +0300 Subject: [PATCH 147/179] Use the Dockerfile from the main branch There is no way to change the Dockerfile in a tag. We need to use the Dockerfile in the main branch. So even if we're buildin the image for a stable version, we need to check out the main branch. The Dockerfile will take care of checking out the proper tags. Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/build-and-push.yml | 1 - Dockerfile | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 93dd9a75..35560e67 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -26,7 +26,6 @@ jobs: - name: "Checkout" uses: actions/checkout@v4 with: - ref: ${{ inputs.ref }} path: src/github.com/cloudbase/garm fetch-depth: 0 diff --git a/Dockerfile b/Dockerfile index 045581f6..2444443d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,7 +19,7 @@ RUN cd /build/garm/cmd/garm-cli \ -tags osusergo,netgo,sqlite_omit_load_extension \ -ldflags "-linkmode external -extldflags '-static' -s -w -X github.com/cloudbase/garm/util/appdefaults.Version=$(git describe --tags --match='v[0-9]*' --dirty --always)" \ . && upx /bin/garm-cli -RUN set -e; \ +RUN set -ex; \ mkdir -p /opt/garm/providers.d; \ for repo in \ cloudbase/garm-provider-azure \ From 20a16d923cdc4a3c6249e2368c638d34796ab627 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Thu, 7 Aug 2025 00:12:35 +0300 Subject: [PATCH 148/179] Get the image tag from supplied ref We need to pass the ref used in the workflow. If we supply a tag, we should just get that same tag. If we supply a branch, we should get the latest release from that branch. Signed-off-by: Gabriel Adrian Samfira --- .github/workflows/build-and-push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml index 35560e67..8226039f 100644 --- a/.github/workflows/build-and-push.yml +++ b/.github/workflows/build-and-push.yml @@ -48,7 +48,7 @@ jobs: if [ "$GH_REF" == "main" ]; then IMAGE_TAG="nightly" else - IMAGE_TAG=$(git describe --tags --match='v[0-9]*' --always) + IMAGE_TAG=$(git describe --tags --match='v[0-9]*' --always ${GH_REF}) fi docker buildx build \ --provenance=false \ From e2169865a192c3e2f2366a8ca2bdde9094790541 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Aug 2025 07:05:40 +0000 Subject: [PATCH 149/179] Bump github.com/cloudbase/garm-provider-common from 0.1.6 to 0.1.7 Bumps [github.com/cloudbase/garm-provider-common](https://github.com/cloudbase/garm-provider-common) from 0.1.6 to 0.1.7. - [Release notes](https://github.com/cloudbase/garm-provider-common/releases) - [Commits](https://github.com/cloudbase/garm-provider-common/compare/v0.1.6...v0.1.7) --- updated-dependencies: - dependency-name: github.com/cloudbase/garm-provider-common dependency-version: 0.1.7 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 +- .../garm-provider-common/errors/errors.go | 138 +++++++++++++++++- vendor/modules.txt | 2 +- 4 files changed, 139 insertions(+), 7 deletions(-) diff --git a/go.mod b/go.mod index c7ba787a..00479f2a 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.23.6 require ( github.com/BurntSushi/toml v1.5.0 github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 - github.com/cloudbase/garm-provider-common v0.1.6 + github.com/cloudbase/garm-provider-common v0.1.7 github.com/felixge/httpsnoop v1.0.4 github.com/go-openapi/errors v0.22.1 github.com/go-openapi/runtime v0.28.0 diff --git a/go.sum b/go.sum index 863e7b1a..5d9411e9 100644 --- a/go.sum +++ b/go.sum @@ -19,8 +19,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= -github.com/cloudbase/garm-provider-common v0.1.6 h1:wLqolRkUD2Z4rzuBLDs2exL1Aq+eJ5RBVnRvk5JP6fs= -github.com/cloudbase/garm-provider-common v0.1.6/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= +github.com/cloudbase/garm-provider-common v0.1.7 h1:V0upTejFRDiyFBO4hhkMWmPtmRTguyOt/4i1u9/rfbg= +github.com/cloudbase/garm-provider-common v0.1.7/go.mod h1:2O51WbcfqRx5fDHyyJgIFq7KdTZZnefsM+aoOchyleU= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= diff --git a/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go b/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go index 9f98c33a..76e85d9c 100644 --- a/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go +++ b/vendor/github.com/cloudbase/garm-provider-common/errors/errors.go @@ -29,9 +29,9 @@ var ( // ErrBadRequest is returned is a malformed request is sent ErrBadRequest = NewBadRequestError("invalid request") // ErrTimeout is returned when a timeout occurs. - ErrTimeout = fmt.Errorf("timed out") - ErrUnprocessable = fmt.Errorf("cannot process request") - ErrNoPoolsAvailable = fmt.Errorf("no pools available") + ErrTimeout = NewTimeoutError("timed out") + ErrUnprocessable = NewUnprocessableError("cannot process request") + ErrNoPoolsAvailable = NewNoPoolsAvailableError("no pools available") ) type baseError struct { @@ -56,6 +56,15 @@ type ProviderError struct { baseError } +func (p *ProviderError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*ProviderError) + return ok +} + // NewMissingSecretError returns a new MissingSecretError func NewMissingSecretError(msg string, a ...interface{}) error { return &MissingSecretError{ @@ -70,6 +79,15 @@ type MissingSecretError struct { baseError } +func (p *MissingSecretError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*MissingSecretError) + return ok +} + // NewUnauthorizedError returns a new UnauthorizedError func NewUnauthorizedError(msg string) error { return &UnauthorizedError{ @@ -84,6 +102,15 @@ type UnauthorizedError struct { baseError } +func (p *UnauthorizedError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*UnauthorizedError) + return ok +} + // NewNotFoundError returns a new NotFoundError func NewNotFoundError(msg string, a ...interface{}) error { return &NotFoundError{ @@ -98,6 +125,15 @@ type NotFoundError struct { baseError } +func (p *NotFoundError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*NotFoundError) + return ok +} + // NewDuplicateUserError returns a new DuplicateUserError func NewDuplicateUserError(msg string) error { return &DuplicateUserError{ @@ -112,6 +148,15 @@ type DuplicateUserError struct { baseError } +func (p *DuplicateUserError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*DuplicateUserError) + return ok +} + // NewBadRequestError returns a new BadRequestError func NewBadRequestError(msg string, a ...interface{}) error { return &BadRequestError{ @@ -126,6 +171,15 @@ type BadRequestError struct { baseError } +func (p *BadRequestError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*BadRequestError) + return ok +} + // NewConflictError returns a new ConflictError func NewConflictError(msg string, a ...interface{}) error { return &ConflictError{ @@ -139,3 +193,81 @@ func NewConflictError(msg string, a ...interface{}) error { type ConflictError struct { baseError } + +func (p *ConflictError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*ConflictError) + return ok +} + +// NewTimeoutError returns a new TimoutError +func NewTimeoutError(msg string, a ...interface{}) error { + return &TimoutError{ + baseError{ + msg: fmt.Sprintf(msg, a...), + }, + } +} + +// TimoutError is returned when an operation times out. +type TimoutError struct { + baseError +} + +func (p *TimoutError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*TimoutError) + return ok +} + +// NewUnprocessableError returns a new UnprocessableError +func NewUnprocessableError(msg string, a ...interface{}) error { + return &TimoutError{ + baseError{ + msg: fmt.Sprintf(msg, a...), + }, + } +} + +// TimoutError is returned when an operation times out. +type UnprocessableError struct { + baseError +} + +func (p *UnprocessableError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*UnprocessableError) + return ok +} + +// NewNoPoolsAvailableError returns a new UnprocessableError +func NewNoPoolsAvailableError(msg string, a ...interface{}) error { + return &TimoutError{ + baseError{ + msg: fmt.Sprintf(msg, a...), + }, + } +} + +// NoPoolsAvailableError is returned when anthere are not pools available. +type NoPoolsAvailableError struct { + baseError +} + +func (p *NoPoolsAvailableError) Is(target error) bool { + if target == nil { + return false + } + + _, ok := target.(*NoPoolsAvailableError) + return ok +} diff --git a/vendor/modules.txt b/vendor/modules.txt index a9818b9c..ba3c7488 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -21,7 +21,7 @@ github.com/cespare/xxhash/v2 # github.com/chzyer/readline v1.5.1 ## explicit; go 1.15 github.com/chzyer/readline -# github.com/cloudbase/garm-provider-common v0.1.6 +# github.com/cloudbase/garm-provider-common v0.1.7 ## explicit; go 1.23.0 github.com/cloudbase/garm-provider-common/defaults github.com/cloudbase/garm-provider-common/errors From 3f5104627974cc8cd0127e521255b0bb33e4f176 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 06:49:58 +0000 Subject: [PATCH 150/179] Bump golang.org/x/mod from 0.26.0 to 0.27.0 Bumps [golang.org/x/mod](https://github.com/golang/mod) from 0.26.0 to 0.27.0. - [Commits](https://github.com/golang/mod/compare/v0.26.0...v0.27.0) --- updated-dependencies: - dependency-name: golang.org/x/mod dependency-version: 0.27.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- vendor/modules.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index 00479f2a..e5ea14e4 100644 --- a/go.mod +++ b/go.mod @@ -29,7 +29,7 @@ require ( github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 golang.org/x/crypto v0.40.0 - golang.org/x/mod v0.26.0 + golang.org/x/mod v0.27.0 golang.org/x/oauth2 v0.30.0 golang.org/x/sync v0.16.0 gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 diff --git a/go.sum b/go.sum index 5d9411e9..78fc27dd 100644 --- a/go.sum +++ b/go.sum @@ -191,8 +191,8 @@ go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= -golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= -golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= +golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= +golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= diff --git a/vendor/modules.txt b/vendor/modules.txt index ba3c7488..e17e5481 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -285,7 +285,7 @@ golang.org/x/crypto/chacha20poly1305 golang.org/x/crypto/hkdf golang.org/x/crypto/internal/alias golang.org/x/crypto/internal/poly1305 -# golang.org/x/mod v0.26.0 +# golang.org/x/mod v0.27.0 ## explicit; go 1.23.0 golang.org/x/mod/semver # golang.org/x/net v0.41.0 From f24a22d537db8adc8acc5a58a3d0cf45f9c9fa90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 09:47:38 +0000 Subject: [PATCH 151/179] Bump golang.org/x/crypto from 0.40.0 to 0.41.0 Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.40.0 to 0.41.0. - [Commits](https://github.com/golang/crypto/compare/v0.40.0...v0.41.0) --- updated-dependencies: - dependency-name: golang.org/x/crypto dependency-version: 0.41.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 8 +-- go.sum | 16 +++--- vendor/golang.org/x/sys/unix/mkerrors.sh | 3 + .../golang.org/x/sys/unix/syscall_darwin.go | 56 +------------------ vendor/golang.org/x/sys/unix/zerrors_linux.go | 44 ++++++++++----- .../x/sys/unix/zerrors_linux_386.go | 2 + .../x/sys/unix/zerrors_linux_amd64.go | 2 + .../x/sys/unix/zerrors_linux_arm.go | 2 + .../x/sys/unix/zerrors_linux_arm64.go | 2 + .../x/sys/unix/zerrors_linux_loong64.go | 2 + .../x/sys/unix/zerrors_linux_mips.go | 2 + .../x/sys/unix/zerrors_linux_mips64.go | 2 + .../x/sys/unix/zerrors_linux_mips64le.go | 2 + .../x/sys/unix/zerrors_linux_mipsle.go | 2 + .../x/sys/unix/zerrors_linux_ppc.go | 2 + .../x/sys/unix/zerrors_linux_ppc64.go | 2 + .../x/sys/unix/zerrors_linux_ppc64le.go | 2 + .../x/sys/unix/zerrors_linux_riscv64.go | 2 + .../x/sys/unix/zerrors_linux_s390x.go | 2 + .../x/sys/unix/zerrors_linux_sparc64.go | 2 + .../x/sys/unix/zsysnum_linux_386.go | 1 + .../x/sys/unix/zsysnum_linux_amd64.go | 1 + .../x/sys/unix/zsysnum_linux_arm.go | 1 + .../x/sys/unix/zsysnum_linux_arm64.go | 1 + .../x/sys/unix/zsysnum_linux_loong64.go | 1 + .../x/sys/unix/zsysnum_linux_mips.go | 1 + .../x/sys/unix/zsysnum_linux_mips64.go | 1 + .../x/sys/unix/zsysnum_linux_mips64le.go | 1 + .../x/sys/unix/zsysnum_linux_mipsle.go | 1 + .../x/sys/unix/zsysnum_linux_ppc.go | 1 + .../x/sys/unix/zsysnum_linux_ppc64.go | 1 + .../x/sys/unix/zsysnum_linux_ppc64le.go | 1 + .../x/sys/unix/zsysnum_linux_riscv64.go | 1 + .../x/sys/unix/zsysnum_linux_s390x.go | 1 + .../x/sys/unix/zsysnum_linux_sparc64.go | 1 + vendor/golang.org/x/sys/unix/ztypes_linux.go | 37 +++++++++--- .../golang.org/x/sys/unix/ztypes_linux_386.go | 30 +++++----- .../x/sys/unix/ztypes_linux_amd64.go | 28 +++++----- .../golang.org/x/sys/unix/ztypes_linux_arm.go | 32 +++++------ .../x/sys/unix/ztypes_linux_arm64.go | 28 +++++----- .../x/sys/unix/ztypes_linux_loong64.go | 28 +++++----- .../x/sys/unix/ztypes_linux_mips.go | 30 +++++----- .../x/sys/unix/ztypes_linux_mips64.go | 28 +++++----- .../x/sys/unix/ztypes_linux_mips64le.go | 28 +++++----- .../x/sys/unix/ztypes_linux_mipsle.go | 30 +++++----- .../golang.org/x/sys/unix/ztypes_linux_ppc.go | 32 +++++------ .../x/sys/unix/ztypes_linux_ppc64.go | 28 +++++----- .../x/sys/unix/ztypes_linux_ppc64le.go | 28 +++++----- .../x/sys/unix/ztypes_linux_riscv64.go | 28 +++++----- .../x/sys/unix/ztypes_linux_s390x.go | 28 +++++----- .../x/sys/unix/ztypes_linux_sparc64.go | 28 +++++----- vendor/modules.txt | 8 +-- 52 files changed, 341 insertions(+), 310 deletions(-) diff --git a/go.mod b/go.mod index e5ea14e4..7e930b4b 100644 --- a/go.mod +++ b/go.mod @@ -28,7 +28,7 @@ require ( github.com/prometheus/client_golang v1.23.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 - golang.org/x/crypto v0.40.0 + golang.org/x/crypto v0.41.0 golang.org/x/mod v0.27.0 golang.org/x/oauth2 v0.30.0 golang.org/x/sync v0.16.0 @@ -87,9 +87,9 @@ require ( go.opentelemetry.io/otel v1.36.0 // indirect go.opentelemetry.io/otel/metric v1.36.0 // indirect go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sys v0.34.0 // indirect - golang.org/x/text v0.27.0 // indirect + golang.org/x/net v0.42.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 78fc27dd..b1828804 100644 --- a/go.sum +++ b/go.sum @@ -189,12 +189,12 @@ go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKr go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= -golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= @@ -202,10 +202,10 @@ golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= -golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= -golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh index 6ab02b6c..d1c8b264 100644 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -349,6 +349,9 @@ struct ltchars { #define _HIDIOCGRAWPHYS HIDIOCGRAWPHYS(_HIDIOCGRAWPHYS_LEN) #define _HIDIOCGRAWUNIQ HIDIOCGRAWUNIQ(_HIDIOCGRAWUNIQ_LEN) +// Renamed in v6.16, commit c6d732c38f93 ("net: ethtool: remove duplicate defines for family info") +#define ETHTOOL_FAMILY_NAME ETHTOOL_GENL_NAME +#define ETHTOOL_FAMILY_VERSION ETHTOOL_GENL_VERSION ' includes_NetBSD=' diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin.go b/vendor/golang.org/x/sys/unix/syscall_darwin.go index 798f61ad..7838ca5d 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin.go @@ -602,14 +602,9 @@ func Connectx(fd int, srcIf uint32, srcAddr, dstAddr Sockaddr, associd SaeAssocI return } -// sys connectx(fd int, endpoints *SaEndpoints, associd SaeAssocID, flags uint32, iov []Iovec, n *uintptr, connid *SaeConnID) (err error) const minIovec = 8 func Readv(fd int, iovs [][]byte) (n int, err error) { - if !darwinKernelVersionMin(11, 0, 0) { - return 0, ENOSYS - } - iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) n, err = readv(fd, iovecs) @@ -618,9 +613,6 @@ func Readv(fd int, iovs [][]byte) (n int, err error) { } func Preadv(fd int, iovs [][]byte, offset int64) (n int, err error) { - if !darwinKernelVersionMin(11, 0, 0) { - return 0, ENOSYS - } iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) n, err = preadv(fd, iovecs, offset) @@ -629,10 +621,6 @@ func Preadv(fd int, iovs [][]byte, offset int64) (n int, err error) { } func Writev(fd int, iovs [][]byte) (n int, err error) { - if !darwinKernelVersionMin(11, 0, 0) { - return 0, ENOSYS - } - iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) if raceenabled { @@ -644,10 +632,6 @@ func Writev(fd int, iovs [][]byte) (n int, err error) { } func Pwritev(fd int, iovs [][]byte, offset int64) (n int, err error) { - if !darwinKernelVersionMin(11, 0, 0) { - return 0, ENOSYS - } - iovecs := make([]Iovec, 0, minIovec) iovecs = appendBytes(iovecs, iovs) if raceenabled { @@ -707,45 +691,7 @@ func readvRacedetect(iovecs []Iovec, n int, err error) { } } -func darwinMajorMinPatch() (maj, min, patch int, err error) { - var un Utsname - err = Uname(&un) - if err != nil { - return - } - - var mmp [3]int - c := 0 -Loop: - for _, b := range un.Release[:] { - switch { - case b >= '0' && b <= '9': - mmp[c] = 10*mmp[c] + int(b-'0') - case b == '.': - c++ - if c > 2 { - return 0, 0, 0, ENOTSUP - } - case b == 0: - break Loop - default: - return 0, 0, 0, ENOTSUP - } - } - if c != 2 { - return 0, 0, 0, ENOTSUP - } - return mmp[0], mmp[1], mmp[2], nil -} - -func darwinKernelVersionMin(maj, min, patch int) bool { - actualMaj, actualMin, actualPatch, err := darwinMajorMinPatch() - if err != nil { - return false - } - return actualMaj > maj || actualMaj == maj && (actualMin > min || actualMin == min && actualPatch >= patch) -} - +//sys connectx(fd int, endpoints *SaEndpoints, associd SaeAssocID, flags uint32, iov []Iovec, n *uintptr, connid *SaeConnID) (err error) //sys sendfile(infd int, outfd int, offset int64, len *int64, hdtr unsafe.Pointer, flags int) (err error) //sys shmat(id int, addr uintptr, flag int) (ret uintptr, err error) diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index 9e7a6c5a..b6db27d9 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -328,6 +328,8 @@ const ( AUDIT_KERNEL = 0x7d0 AUDIT_KERNEL_OTHER = 0x524 AUDIT_KERN_MODULE = 0x532 + AUDIT_LANDLOCK_ACCESS = 0x58f + AUDIT_LANDLOCK_DOMAIN = 0x590 AUDIT_LAST_FEATURE = 0x1 AUDIT_LAST_KERN_ANOM_MSG = 0x707 AUDIT_LAST_USER_MSG = 0x4af @@ -492,6 +494,7 @@ const ( BPF_F_BEFORE = 0x8 BPF_F_ID = 0x20 BPF_F_NETFILTER_IP_DEFRAG = 0x1 + BPF_F_PREORDER = 0x40 BPF_F_QUERY_EFFECTIVE = 0x1 BPF_F_REDIRECT_FLAGS = 0x19 BPF_F_REPLACE = 0x4 @@ -528,6 +531,7 @@ const ( BPF_LDX = 0x1 BPF_LEN = 0x80 BPF_LL_OFF = -0x200000 + BPF_LOAD_ACQ = 0x100 BPF_LSH = 0x60 BPF_MAJOR_VERSION = 0x1 BPF_MAXINSNS = 0x1000 @@ -555,6 +559,7 @@ const ( BPF_RET = 0x6 BPF_RSH = 0x70 BPF_ST = 0x2 + BPF_STORE_REL = 0x110 BPF_STX = 0x3 BPF_SUB = 0x10 BPF_TAG_SIZE = 0x8 @@ -844,9 +849,9 @@ const ( DM_UUID_FLAG = 0x4000 DM_UUID_LEN = 0x81 DM_VERSION = 0xc138fd00 - DM_VERSION_EXTRA = "-ioctl (2025-01-17)" + DM_VERSION_EXTRA = "-ioctl (2025-04-28)" DM_VERSION_MAJOR = 0x4 - DM_VERSION_MINOR = 0x31 + DM_VERSION_MINOR = 0x32 DM_VERSION_PATCHLEVEL = 0x0 DT_BLK = 0x6 DT_CHR = 0x2 @@ -937,9 +942,6 @@ const ( EPOLL_CTL_MOD = 0x3 EPOLL_IOC_TYPE = 0x8a EROFS_SUPER_MAGIC_V1 = 0xe0f5e1e2 - ESP_V4_FLOW = 0xa - ESP_V6_FLOW = 0xc - ETHER_FLOW = 0x12 ETHTOOL_BUSINFO_LEN = 0x20 ETHTOOL_EROMVERS_LEN = 0x20 ETHTOOL_FAMILY_NAME = "ethtool" @@ -1213,6 +1215,7 @@ const ( FAN_EVENT_INFO_TYPE_DFID_NAME = 0x2 FAN_EVENT_INFO_TYPE_ERROR = 0x5 FAN_EVENT_INFO_TYPE_FID = 0x1 + FAN_EVENT_INFO_TYPE_MNT = 0x7 FAN_EVENT_INFO_TYPE_NEW_DFID_NAME = 0xc FAN_EVENT_INFO_TYPE_OLD_DFID_NAME = 0xa FAN_EVENT_INFO_TYPE_PIDFD = 0x4 @@ -1231,9 +1234,12 @@ const ( FAN_MARK_IGNORED_SURV_MODIFY = 0x40 FAN_MARK_IGNORE_SURV = 0x440 FAN_MARK_INODE = 0x0 + FAN_MARK_MNTNS = 0x110 FAN_MARK_MOUNT = 0x10 FAN_MARK_ONLYDIR = 0x8 FAN_MARK_REMOVE = 0x2 + FAN_MNT_ATTACH = 0x1000000 + FAN_MNT_DETACH = 0x2000000 FAN_MODIFY = 0x2 FAN_MOVE = 0xc0 FAN_MOVED_FROM = 0x40 @@ -1255,6 +1261,7 @@ const ( FAN_REPORT_DIR_FID = 0x400 FAN_REPORT_FD_ERROR = 0x2000 FAN_REPORT_FID = 0x200 + FAN_REPORT_MNT = 0x4000 FAN_REPORT_NAME = 0x800 FAN_REPORT_PIDFD = 0x80 FAN_REPORT_TARGET_FID = 0x1000 @@ -1274,6 +1281,7 @@ const ( FIB_RULE_PERMANENT = 0x1 FIB_RULE_UNRESOLVED = 0x4 FIDEDUPERANGE = 0xc0189436 + FSCRYPT_ADD_KEY_FLAG_HW_WRAPPED = 0x1 FSCRYPT_KEY_DESCRIPTOR_SIZE = 0x8 FSCRYPT_KEY_DESC_PREFIX = "fscrypt:" FSCRYPT_KEY_DESC_PREFIX_SIZE = 0x8 @@ -1582,7 +1590,6 @@ const ( IPV6_DONTFRAG = 0x3e IPV6_DROP_MEMBERSHIP = 0x15 IPV6_DSTOPTS = 0x3b - IPV6_FLOW = 0x11 IPV6_FREEBIND = 0x4e IPV6_HDRINCL = 0x24 IPV6_HOPLIMIT = 0x34 @@ -1633,7 +1640,6 @@ const ( IPV6_TRANSPARENT = 0x4b IPV6_UNICAST_HOPS = 0x10 IPV6_UNICAST_IF = 0x4c - IPV6_USER_FLOW = 0xe IPV6_V6ONLY = 0x1a IPV6_VERSION = 0x60 IPV6_VERSION_MASK = 0xf0 @@ -1695,7 +1701,6 @@ const ( IP_TTL = 0x2 IP_UNBLOCK_SOURCE = 0x25 IP_UNICAST_IF = 0x32 - IP_USER_FLOW = 0xd IP_XFRM_POLICY = 0x11 ISOFS_SUPER_MAGIC = 0x9660 ISTRIP = 0x20 @@ -1817,7 +1822,11 @@ const ( LANDLOCK_ACCESS_FS_WRITE_FILE = 0x2 LANDLOCK_ACCESS_NET_BIND_TCP = 0x1 LANDLOCK_ACCESS_NET_CONNECT_TCP = 0x2 + LANDLOCK_CREATE_RULESET_ERRATA = 0x2 LANDLOCK_CREATE_RULESET_VERSION = 0x1 + LANDLOCK_RESTRICT_SELF_LOG_NEW_EXEC_ON = 0x2 + LANDLOCK_RESTRICT_SELF_LOG_SAME_EXEC_OFF = 0x1 + LANDLOCK_RESTRICT_SELF_LOG_SUBDOMAINS_OFF = 0x4 LANDLOCK_SCOPE_ABSTRACT_UNIX_SOCKET = 0x1 LANDLOCK_SCOPE_SIGNAL = 0x2 LINUX_REBOOT_CMD_CAD_OFF = 0x0 @@ -2493,6 +2502,10 @@ const ( PR_FP_EXC_UND = 0x40000 PR_FP_MODE_FR = 0x1 PR_FP_MODE_FRE = 0x2 + PR_FUTEX_HASH = 0x4e + PR_FUTEX_HASH_GET_IMMUTABLE = 0x3 + PR_FUTEX_HASH_GET_SLOTS = 0x2 + PR_FUTEX_HASH_SET_SLOTS = 0x1 PR_GET_AUXV = 0x41555856 PR_GET_CHILD_SUBREAPER = 0x25 PR_GET_DUMPABLE = 0x3 @@ -2652,6 +2665,10 @@ const ( PR_TAGGED_ADDR_ENABLE = 0x1 PR_TASK_PERF_EVENTS_DISABLE = 0x1f PR_TASK_PERF_EVENTS_ENABLE = 0x20 + PR_TIMER_CREATE_RESTORE_IDS = 0x4d + PR_TIMER_CREATE_RESTORE_IDS_GET = 0x2 + PR_TIMER_CREATE_RESTORE_IDS_OFF = 0x0 + PR_TIMER_CREATE_RESTORE_IDS_ON = 0x1 PR_TIMING_STATISTICAL = 0x0 PR_TIMING_TIMESTAMP = 0x1 PR_TSC_ENABLE = 0x1 @@ -2732,6 +2749,7 @@ const ( PTRACE_SETREGSET = 0x4205 PTRACE_SETSIGINFO = 0x4203 PTRACE_SETSIGMASK = 0x420b + PTRACE_SET_SYSCALL_INFO = 0x4212 PTRACE_SET_SYSCALL_USER_DISPATCH_CONFIG = 0x4210 PTRACE_SINGLESTEP = 0x9 PTRACE_SYSCALL = 0x18 @@ -2982,6 +3000,7 @@ const ( RTPROT_NTK = 0xf RTPROT_OPENR = 0x63 RTPROT_OSPF = 0xbc + RTPROT_OVN = 0x54 RTPROT_RA = 0x9 RTPROT_REDIRECT = 0x1 RTPROT_RIP = 0xbd @@ -3336,7 +3355,7 @@ const ( TASKSTATS_GENL_NAME = "TASKSTATS" TASKSTATS_GENL_VERSION = 0x1 TASKSTATS_TYPE_MAX = 0x6 - TASKSTATS_VERSION = 0xf + TASKSTATS_VERSION = 0x10 TCIFLUSH = 0x0 TCIOFF = 0x2 TCIOFLUSH = 0x2 @@ -3406,8 +3425,6 @@ const ( TCP_TX_DELAY = 0x25 TCP_ULP = 0x1f TCP_USER_TIMEOUT = 0x12 - TCP_V4_FLOW = 0x1 - TCP_V6_FLOW = 0x5 TCP_WINDOW_CLAMP = 0xa TCP_ZEROCOPY_RECEIVE = 0x23 TFD_TIMER_ABSTIME = 0x1 @@ -3530,8 +3547,6 @@ const ( UDP_NO_CHECK6_RX = 0x66 UDP_NO_CHECK6_TX = 0x65 UDP_SEGMENT = 0x67 - UDP_V4_FLOW = 0x2 - UDP_V6_FLOW = 0x6 UMOUNT_NOFOLLOW = 0x8 USBDEVICE_SUPER_MAGIC = 0x9fa2 UTIME_NOW = 0x3fffffff @@ -3574,7 +3589,7 @@ const ( WDIOS_TEMPPANIC = 0x4 WDIOS_UNKNOWN = -0x1 WEXITED = 0x4 - WGALLOWEDIP_A_MAX = 0x3 + WGALLOWEDIP_A_MAX = 0x4 WGDEVICE_A_MAX = 0x8 WGPEER_A_MAX = 0xa WG_CMD_MAX = 0x1 @@ -3688,6 +3703,7 @@ const ( XDP_SHARED_UMEM = 0x1 XDP_STATISTICS = 0x7 XDP_TXMD_FLAGS_CHECKSUM = 0x2 + XDP_TXMD_FLAGS_LAUNCH_TIME = 0x4 XDP_TXMD_FLAGS_TIMESTAMP = 0x1 XDP_TX_METADATA = 0x2 XDP_TX_RING = 0x3 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go index a8c421e2..1c37f9fb 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -360,6 +361,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go index 9a88d181..6f54d34a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -361,6 +362,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go index 7cb6a867..783ec5c1 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -366,6 +367,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go index d0ecd2c5..ca83d3ba 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -359,6 +360,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go index 7a2940ae..607e611c 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -353,6 +354,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go index d14ca8f2..b9cb5bd3 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -359,6 +360,7 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go index 2da1bac1..65b078a6 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -359,6 +360,7 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go index 28727514..5298a303 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -359,6 +360,7 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go index 7f287b54..7bc557c8 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -359,6 +360,7 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x11 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go index 7e5f9e6a..152399bb 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go @@ -68,6 +68,7 @@ const ( CS8 = 0x300 CSIZE = 0x300 CSTOPB = 0x400 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x40 @@ -414,6 +415,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x14 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x15 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go index 37c87952..1a1ce240 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go @@ -68,6 +68,7 @@ const ( CS8 = 0x300 CSIZE = 0x300 CSTOPB = 0x400 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x40 @@ -418,6 +419,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x14 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x15 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go index 52201336..4231a1fb 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go @@ -68,6 +68,7 @@ const ( CS8 = 0x300 CSIZE = 0x300 CSTOPB = 0x400 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x40 @@ -418,6 +419,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x14 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x15 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go index 4bfe2b5b..21c0e952 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -350,6 +351,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go index e3cffb86..f00d1cd7 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go @@ -68,6 +68,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0xfd12 ECCGETLAYOUT = 0x81484d11 ECCGETSTATS = 0x80104d12 ECHOCTL = 0x200 @@ -422,6 +423,7 @@ const ( SO_OOBINLINE = 0xa SO_PASSCRED = 0x10 SO_PASSPIDFD = 0x4c + SO_PASSRIGHTS = 0x53 SO_PASSSEC = 0x22 SO_PEEK_OFF = 0x2a SO_PEERCRED = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go index c219c8db..bc8d539e 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go @@ -71,6 +71,7 @@ const ( CS8 = 0x30 CSIZE = 0x30 CSTOPB = 0x40 + DM_MPATH_PROBE_PATHS = 0x2000fd12 ECCGETLAYOUT = 0x41484d11 ECCGETSTATS = 0x40104d12 ECHOCTL = 0x200 @@ -461,6 +462,7 @@ const ( SO_OOBINLINE = 0x100 SO_PASSCRED = 0x2 SO_PASSPIDFD = 0x55 + SO_PASSRIGHTS = 0x5c SO_PASSSEC = 0x1f SO_PEEK_OFF = 0x26 SO_PEERCRED = 0x40 diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go index c79aaff3..aca56ee4 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_386.go @@ -462,4 +462,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go index 5eb45069..2ea1ef58 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go @@ -385,4 +385,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go index 05e50297..d22c8af3 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go @@ -426,4 +426,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go index 38c53ec5..5ee264ae 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go @@ -329,4 +329,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go index 31d2e71a..f9f03ebf 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go @@ -325,4 +325,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go index f4184a33..87c2118e 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go @@ -446,4 +446,5 @@ const ( SYS_GETXATTRAT = 4464 SYS_LISTXATTRAT = 4465 SYS_REMOVEXATTRAT = 4466 + SYS_OPEN_TREE_ATTR = 4467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go index 05b99622..391ad102 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go @@ -376,4 +376,5 @@ const ( SYS_GETXATTRAT = 5464 SYS_LISTXATTRAT = 5465 SYS_REMOVEXATTRAT = 5466 + SYS_OPEN_TREE_ATTR = 5467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go index 43a256e9..56561577 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go @@ -376,4 +376,5 @@ const ( SYS_GETXATTRAT = 5464 SYS_LISTXATTRAT = 5465 SYS_REMOVEXATTRAT = 5466 + SYS_OPEN_TREE_ATTR = 5467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go index eea5ddfc..0482b52e 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go @@ -446,4 +446,5 @@ const ( SYS_GETXATTRAT = 4464 SYS_LISTXATTRAT = 4465 SYS_REMOVEXATTRAT = 4466 + SYS_OPEN_TREE_ATTR = 4467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go index 0d777bfb..71806f08 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go @@ -453,4 +453,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go index b4463650..e35a7105 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go @@ -425,4 +425,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go index 0c7d21c1..2aea4767 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go @@ -425,4 +425,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go index 84053916..6c9bb4e5 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go @@ -330,4 +330,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go index fcf1b790..680bc991 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go @@ -391,4 +391,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go index 52d15b5f..620f2710 100644 --- a/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go @@ -404,4 +404,5 @@ const ( SYS_GETXATTRAT = 464 SYS_LISTXATTRAT = 465 SYS_REMOVEXATTRAT = 466 + SYS_OPEN_TREE_ATTR = 467 ) diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index 8bcac283..cd236443 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -115,7 +115,9 @@ type Statx_t struct { Atomic_write_unit_max uint32 Atomic_write_segments_max uint32 Dio_read_offset_align uint32 - _ [9]uint64 + Atomic_write_unit_max_opt uint32 + _ [1]uint32 + _ [8]uint64 } type Fsid struct { @@ -199,7 +201,8 @@ type FscryptAddKeyArg struct { Key_spec FscryptKeySpecifier Raw_size uint32 Key_id uint32 - _ [8]uint32 + Flags uint32 + _ [7]uint32 } type FscryptRemoveKeyArg struct { @@ -2317,6 +2320,11 @@ const ( NFT_CT_AVGPKT = 0x10 NFT_CT_ZONE = 0x11 NFT_CT_EVENTMASK = 0x12 + NFT_CT_SRC_IP = 0x13 + NFT_CT_DST_IP = 0x14 + NFT_CT_SRC_IP6 = 0x15 + NFT_CT_DST_IP6 = 0x16 + NFT_CT_ID = 0x17 NFTA_CT_UNSPEC = 0x0 NFTA_CT_DREG = 0x1 NFTA_CT_KEY = 0x2 @@ -2597,8 +2605,8 @@ const ( SOF_TIMESTAMPING_BIND_PHC = 0x8000 SOF_TIMESTAMPING_OPT_ID_TCP = 0x10000 - SOF_TIMESTAMPING_LAST = 0x20000 - SOF_TIMESTAMPING_MASK = 0x3ffff + SOF_TIMESTAMPING_LAST = 0x40000 + SOF_TIMESTAMPING_MASK = 0x7ffff SCM_TSTAMP_SND = 0x0 SCM_TSTAMP_SCHED = 0x1 @@ -4044,7 +4052,7 @@ const ( ETHTOOL_A_TSINFO_PHC_INDEX = 0x5 ETHTOOL_A_TSINFO_STATS = 0x6 ETHTOOL_A_TSINFO_HWTSTAMP_PROVIDER = 0x7 - ETHTOOL_A_TSINFO_MAX = 0x7 + ETHTOOL_A_TSINFO_MAX = 0x9 ETHTOOL_A_CABLE_TEST_UNSPEC = 0x0 ETHTOOL_A_CABLE_TEST_HEADER = 0x1 ETHTOOL_A_CABLE_TEST_MAX = 0x1 @@ -4130,6 +4138,19 @@ const ( ETHTOOL_A_TUNNEL_INFO_MAX = 0x2 ) +const ( + TCP_V4_FLOW = 0x1 + UDP_V4_FLOW = 0x2 + TCP_V6_FLOW = 0x5 + UDP_V6_FLOW = 0x6 + ESP_V4_FLOW = 0xa + ESP_V6_FLOW = 0xc + IP_USER_FLOW = 0xd + IPV6_USER_FLOW = 0xe + IPV6_FLOW = 0x11 + ETHER_FLOW = 0x12 +) + const SPEED_UNKNOWN = -0x1 type EthtoolDrvinfo struct { @@ -4780,7 +4801,7 @@ const ( NL80211_ATTR_MAC_HINT = 0xc8 NL80211_ATTR_MAC_MASK = 0xd7 NL80211_ATTR_MAX_AP_ASSOC_STA = 0xca - NL80211_ATTR_MAX = 0x150 + NL80211_ATTR_MAX = 0x151 NL80211_ATTR_MAX_CRIT_PROT_DURATION = 0xb4 NL80211_ATTR_MAX_CSA_COUNTERS = 0xce NL80211_ATTR_MAX_HW_TIMESTAMP_PEERS = 0x143 @@ -5414,7 +5435,7 @@ const ( NL80211_FREQUENCY_ATTR_GO_CONCURRENT = 0xf NL80211_FREQUENCY_ATTR_INDOOR_ONLY = 0xe NL80211_FREQUENCY_ATTR_IR_CONCURRENT = 0xf - NL80211_FREQUENCY_ATTR_MAX = 0x21 + NL80211_FREQUENCY_ATTR_MAX = 0x22 NL80211_FREQUENCY_ATTR_MAX_TX_POWER = 0x6 NL80211_FREQUENCY_ATTR_NO_10MHZ = 0x11 NL80211_FREQUENCY_ATTR_NO_160MHZ = 0xc @@ -5530,7 +5551,7 @@ const ( NL80211_MAX_SUPP_SELECTORS = 0x80 NL80211_MBSSID_CONFIG_ATTR_EMA = 0x5 NL80211_MBSSID_CONFIG_ATTR_INDEX = 0x3 - NL80211_MBSSID_CONFIG_ATTR_MAX = 0x5 + NL80211_MBSSID_CONFIG_ATTR_MAX = 0x6 NL80211_MBSSID_CONFIG_ATTR_MAX_EMA_PROFILE_PERIODICITY = 0x2 NL80211_MBSSID_CONFIG_ATTR_MAX_INTERFACES = 0x1 NL80211_MBSSID_CONFIG_ATTR_TX_IFINDEX = 0x4 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go index 62db85f6..485f2d3a 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_386.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_386.go @@ -282,19 +282,13 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [4]byte + _ [6]byte Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -330,17 +324,11 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -348,10 +336,22 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go index 7d89d648..ecbd1ad8 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go @@ -300,16 +300,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -344,27 +338,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go index 9c0b39ee..02f0463a 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go @@ -91,7 +91,7 @@ type Stat_t struct { Gid uint32 Rdev uint64 _ uint16 - _ [4]byte + _ [6]byte Size int64 Blksize int32 _ [4]byte @@ -273,19 +273,13 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [4]byte + _ [6]byte Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -321,17 +315,11 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -339,10 +327,22 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go index de9c7ff3..6f4d400d 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go @@ -279,16 +279,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -323,27 +317,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go index 2336bd2b..cd532cfa 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go @@ -280,16 +280,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -324,27 +318,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go index 4711f0be..41336208 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go @@ -278,19 +278,13 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [4]byte + _ [6]byte Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -326,17 +320,11 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -344,10 +332,22 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go index ab99a34b..eaa37eb7 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go @@ -282,16 +282,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -326,27 +320,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go index 04c9866e..98ae6a1e 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go @@ -282,16 +282,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -326,27 +320,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go index 60aa69f6..cae19615 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go @@ -278,19 +278,13 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [4]byte + _ [6]byte Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -326,17 +320,11 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -344,10 +332,22 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go index cb4fad78..6ce3b4e0 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go @@ -90,7 +90,7 @@ type Stat_t struct { Gid uint32 Rdev uint64 _ uint16 - _ [4]byte + _ [6]byte Size int64 Blksize int32 _ [4]byte @@ -285,19 +285,13 @@ type Taskstats struct { Ac_exitcode uint32 Ac_flag uint8 Ac_nice uint8 - _ [4]byte + _ [6]byte Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -333,17 +327,11 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 _ [4]byte Ac_tgetime uint64 @@ -351,10 +339,22 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go index 60272cfc..c7429c6a 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go @@ -289,16 +289,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -333,27 +327,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go index 3f5b91bc..4bf4baf4 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go @@ -289,16 +289,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -333,27 +327,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go index 51550f15..e9709d70 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go @@ -307,16 +307,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]uint8 @@ -351,27 +345,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go index 3239e50e..fb44268c 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go @@ -302,16 +302,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -346,27 +340,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go index faf20027..9c38265c 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go @@ -284,16 +284,10 @@ type Taskstats struct { Ac_nice uint8 Cpu_count uint64 Cpu_delay_total uint64 - Cpu_delay_max uint64 - Cpu_delay_min uint64 Blkio_count uint64 Blkio_delay_total uint64 - Blkio_delay_max uint64 - Blkio_delay_min uint64 Swapin_count uint64 Swapin_delay_total uint64 - Swapin_delay_max uint64 - Swapin_delay_min uint64 Cpu_run_real_total uint64 Cpu_run_virtual_total uint64 Ac_comm [32]int8 @@ -328,27 +322,33 @@ type Taskstats struct { Cpu_scaled_run_real_total uint64 Freepages_count uint64 Freepages_delay_total uint64 - Freepages_delay_max uint64 - Freepages_delay_min uint64 Thrashing_count uint64 Thrashing_delay_total uint64 - Thrashing_delay_max uint64 - Thrashing_delay_min uint64 Ac_btime64 uint64 Compact_count uint64 Compact_delay_total uint64 - Compact_delay_max uint64 - Compact_delay_min uint64 Ac_tgid uint32 Ac_tgetime uint64 Ac_exe_dev uint64 Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 - Wpcopy_delay_max uint64 - Wpcopy_delay_min uint64 Irq_count uint64 Irq_delay_total uint64 + Cpu_delay_max uint64 + Cpu_delay_min uint64 + Blkio_delay_max uint64 + Blkio_delay_min uint64 + Swapin_delay_max uint64 + Swapin_delay_min uint64 + Freepages_delay_max uint64 + Freepages_delay_min uint64 + Thrashing_delay_max uint64 + Thrashing_delay_min uint64 + Compact_delay_max uint64 + Compact_delay_min uint64 + Wpcopy_delay_max uint64 + Wpcopy_delay_min uint64 Irq_delay_max uint64 Irq_delay_min uint64 } diff --git a/vendor/modules.txt b/vendor/modules.txt index e17e5481..121a3d80 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -276,7 +276,7 @@ go.opentelemetry.io/otel/trace go.opentelemetry.io/otel/trace/embedded go.opentelemetry.io/otel/trace/internal/telemetry go.opentelemetry.io/otel/trace/noop -# golang.org/x/crypto v0.40.0 +# golang.org/x/crypto v0.41.0 ## explicit; go 1.23.0 golang.org/x/crypto/bcrypt golang.org/x/crypto/blowfish @@ -288,7 +288,7 @@ golang.org/x/crypto/internal/poly1305 # golang.org/x/mod v0.27.0 ## explicit; go 1.23.0 golang.org/x/mod/semver -# golang.org/x/net v0.41.0 +# golang.org/x/net v0.42.0 ## explicit; go 1.23.0 golang.org/x/net/internal/socks golang.org/x/net/proxy @@ -299,12 +299,12 @@ golang.org/x/oauth2/internal # golang.org/x/sync v0.16.0 ## explicit; go 1.23.0 golang.org/x/sync/errgroup -# golang.org/x/sys v0.34.0 +# golang.org/x/sys v0.35.0 ## explicit; go 1.23.0 golang.org/x/sys/cpu golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.27.0 +# golang.org/x/text v0.28.0 ## explicit; go 1.23.0 golang.org/x/text/cases golang.org/x/text/internal From 37eba0fed9f0d986ec284bf8a990bb801e41ad8a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Aug 2025 10:34:11 +0000 Subject: [PATCH 152/179] Bump github.com/go-openapi/errors from 0.22.1 to 0.22.2 Bumps [github.com/go-openapi/errors](https://github.com/go-openapi/errors) from 0.22.1 to 0.22.2. - [Commits](https://github.com/go-openapi/errors/compare/v0.22.1...v0.22.2) --- updated-dependencies: - dependency-name: github.com/go-openapi/errors dependency-version: 0.22.2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 +- .../go-openapi/errors/.golangci.yml | 120 ++++++++++-------- .../go-openapi/errors/middleware.go | 4 +- .../github.com/go-openapi/errors/parsing.go | 36 +++--- vendor/modules.txt | 2 +- 6 files changed, 94 insertions(+), 74 deletions(-) diff --git a/go.mod b/go.mod index 7e930b4b..36e42be2 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/bradleyfalzon/ghinstallation/v2 v2.16.0 github.com/cloudbase/garm-provider-common v0.1.7 github.com/felixge/httpsnoop v1.0.4 - github.com/go-openapi/errors v0.22.1 + github.com/go-openapi/errors v0.22.2 github.com/go-openapi/runtime v0.28.0 github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/swag v0.23.1 diff --git a/go.sum b/go.sum index b1828804..1eaeff3e 100644 --- a/go.sum +++ b/go.sum @@ -34,8 +34,8 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= -github.com/go-openapi/errors v0.22.1 h1:kslMRRnK7NCb/CvR1q1VWuEQCEIsBGn5GgKD9e+HYhU= -github.com/go-openapi/errors v0.22.1/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0= +github.com/go-openapi/errors v0.22.2 h1:rdxhzcBUazEcGccKqbY1Y7NS8FDcMyIRr0934jrYnZg= +github.com/go-openapi/errors v0.22.2/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0= github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic= github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= diff --git a/vendor/github.com/go-openapi/errors/.golangci.yml b/vendor/github.com/go-openapi/errors/.golangci.yml index ee8b9bd1..60798c21 100644 --- a/vendor/github.com/go-openapi/errors/.golangci.yml +++ b/vendor/github.com/go-openapi/errors/.golangci.yml @@ -1,55 +1,75 @@ -linters-settings: - gocyclo: - min-complexity: 45 - dupl: - threshold: 200 - goconst: - min-len: 2 - min-occurrences: 3 - +version: "2" linters: - enable-all: true + default: all disable: - - unparam - - lll - - gochecknoinits - - gochecknoglobals - - funlen - - godox - - gocognit - - whitespace - - wsl - - wrapcheck - - testpackage - - nlreturn - - errorlint - - nestif - - godot - - gofumpt - - paralleltest - - tparallel - - thelper - - exhaustruct - - varnamelen - - gci + - cyclop - depguard - errchkjson - - inamedparam - - nonamedreturns - - musttag - - ireturn + - errorlint + - exhaustruct - forcetypeassert - - cyclop - # deprecated linters - #- deadcode - #- interfacer - #- scopelint - #- varcheck - #- structcheck - #- golint - #- nosnakecase - #- maligned - #- goerr113 - #- ifshort - #- gomnd - #- exhaustivestruct + - funlen + - gochecknoglobals + - gochecknoinits + - gocognit + - godot + - godox + - gosmopolitan + - inamedparam + - intrange # disabled while < go1.22 + - ireturn + - lll + - musttag + - nestif + - nlreturn + - noinlineerr + - nonamedreturns + - paralleltest + - recvcheck + - testpackage + - thelper + - tparallel + - unparam + - varnamelen + - whitespace + - wrapcheck + - wsl + - wsl_v5 + settings: + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + gocyclo: + min-complexity: 45 + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - third_party$ + - builtin$ + - examples$ +formatters: + enable: + - gofmt + - goimports + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ +issues: + # Maximum issues count per one linter. + # Set to 0 to disable. + # Default: 50 + max-issues-per-linter: 0 + # Maximum count of issues with the same text. + # Set to 0 to disable. + # Default: 3 + max-same-issues: 0 diff --git a/vendor/github.com/go-openapi/errors/middleware.go b/vendor/github.com/go-openapi/errors/middleware.go index 67f80386..1b9f3a93 100644 --- a/vendor/github.com/go-openapi/errors/middleware.go +++ b/vendor/github.com/go-openapi/errors/middleware.go @@ -35,7 +35,7 @@ func (v *APIVerificationFailed) Error() string { hasSpecMissing := len(v.MissingSpecification) > 0 if hasRegMissing { - buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section)) + fmt.Fprintf(buf, "missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section) } if hasRegMissing && hasSpecMissing { @@ -43,7 +43,7 @@ func (v *APIVerificationFailed) Error() string { } if hasSpecMissing { - buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section)) + fmt.Fprintf(buf, "missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section) } return buf.String() diff --git a/vendor/github.com/go-openapi/errors/parsing.go b/vendor/github.com/go-openapi/errors/parsing.go index ce1ef9cb..34930c08 100644 --- a/vendor/github.com/go-openapi/errors/parsing.go +++ b/vendor/github.com/go-openapi/errors/parsing.go @@ -30,6 +30,24 @@ type ParseError struct { message string } +// NewParseError creates a new parse error +func NewParseError(name, in, value string, reason error) *ParseError { + var msg string + if in == "" { + msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason) + } else { + msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason) + } + return &ParseError{ + code: http.StatusBadRequest, + Name: name, + In: in, + Value: value, + Reason: reason, + message: msg, + } +} + func (e *ParseError) Error() string { return e.message } @@ -59,21 +77,3 @@ const ( parseErrorTemplContent = `parsing %s %s from %q failed, because %s` parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s` ) - -// NewParseError creates a new parse error -func NewParseError(name, in, value string, reason error) *ParseError { - var msg string - if in == "" { - msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason) - } else { - msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason) - } - return &ParseError{ - code: http.StatusBadRequest, - Name: name, - In: in, - Value: value, - Reason: reason, - message: msg, - } -} diff --git a/vendor/modules.txt b/vendor/modules.txt index 121a3d80..257d4f95 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -52,7 +52,7 @@ github.com/go-openapi/analysis/internal/flatten/operations github.com/go-openapi/analysis/internal/flatten/replace github.com/go-openapi/analysis/internal/flatten/schutils github.com/go-openapi/analysis/internal/flatten/sortref -# github.com/go-openapi/errors v0.22.1 +# github.com/go-openapi/errors v0.22.2 ## explicit; go 1.20 github.com/go-openapi/errors # github.com/go-openapi/jsonpointer v0.21.1 From b2dee1d844ac8988ca5401b193d0decff11dc89d Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 12 Aug 2025 09:15:58 +0000 Subject: [PATCH 153/179] Preload missing resources There are some inconsistencies in the way the API returns some values for pools and scale sets. This is due to not preloading the appropriate relations. Signed-off-by: Gabriel Adrian Samfira --- database/sql/instances.go | 11 +++++++++-- database/sql/pools.go | 1 + database/sql/scaleset_instances.go | 5 ++++- database/sql/scalesets.go | 3 +++ database/sql/util.go | 13 ++++++++++++- 5 files changed, 29 insertions(+), 4 deletions(-) diff --git a/database/sql/instances.go b/database/sql/instances.go index dab81f10..c6c2d204 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -326,7 +326,10 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par } var instances []Instance - query := s.conn.Model(&Instance{}).Preload("Job").Where("pool_id = ?", u) + query := s.conn. + Preload("Pool"). + Preload("Job"). + Where("pool_id = ?", u) if err := query.Find(&instances); err.Error != nil { return nil, errors.Wrap(err.Error, "fetching instances") @@ -345,7 +348,11 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, error) { var instances []Instance - q := s.conn.Model(&Instance{}).Preload("Job").Find(&instances) + q := s.conn. + Preload("Pool"). + Preload("ScaleSet"). + Preload("Job"). + Find(&instances) if q.Error != nil { return nil, errors.Wrap(q.Error, "fetching instances") } diff --git a/database/sql/pools.go b/database/sql/pools.go index a4b3354e..350e1dc2 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -46,6 +46,7 @@ func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { Preload("Enterprise"). Preload("Enterprise.Endpoint"). Omit("extra_specs"). + Omit("status_messages"). Find(&pools) if q.Error != nil { return nil, errors.Wrap(q.Error, "fetching all pools") diff --git a/database/sql/scaleset_instances.go b/database/sql/scaleset_instances.go index bbc4f593..61271e8b 100644 --- a/database/sql/scaleset_instances.go +++ b/database/sql/scaleset_instances.go @@ -66,7 +66,10 @@ func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) ([]params.Instance, error) { var instances []Instance - query := s.conn.Model(&Instance{}).Preload("Job").Where("scale_set_fk_id = ?", scalesetID) + query := s.conn. + Preload("ScaleSet"). + Preload("Job"). + Where("scale_set_fk_id = ?", scalesetID) if err := query.Find(&instances); err.Error != nil { return nil, errors.Wrap(err.Error, "fetching instances") diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 65a51ca0..752c7948 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -33,8 +33,11 @@ func (s *sqlDatabase) ListAllScaleSets(_ context.Context) ([]params.ScaleSet, er q := s.conn.Model(&ScaleSet{}). Preload("Organization"). + Preload("Organization.Endpoint"). Preload("Repository"). + Preload("Repository.Endpoint"). Preload("Enterprise"). + Preload("Enterprise.Endpoint"). Omit("extra_specs"). Omit("status_messages"). Find(&scaleSets) diff --git a/database/sql/util.go b/database/sql/util.go index 2b2a1de8..ebb3c57c 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -330,6 +330,8 @@ func (s *sqlDatabase) sqlToCommonPool(pool Pool) (params.Pool, error) { func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, error) { ret := params.ScaleSet{ ID: scaleSet.ID, + CreatedAt: scaleSet.CreatedAt, + UpdatedAt: scaleSet.UpdatedAt, ScaleSetID: scaleSet.ScaleSetID, Name: scaleSet.Name, DisableUpdate: scaleSet.DisableUpdate, @@ -355,24 +357,33 @@ func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, e DesiredRunnerCount: scaleSet.DesiredRunnerCount, } + var ep GithubEndpoint if scaleSet.RepoID != nil { ret.RepoID = scaleSet.RepoID.String() if scaleSet.Repository.Owner != "" && scaleSet.Repository.Name != "" { ret.RepoName = fmt.Sprintf("%s/%s", scaleSet.Repository.Owner, scaleSet.Repository.Name) } + ep = scaleSet.Repository.Endpoint } if scaleSet.OrgID != nil { ret.OrgID = scaleSet.OrgID.String() ret.OrgName = scaleSet.Organization.Name + ep = scaleSet.Organization.Endpoint } if scaleSet.EnterpriseID != nil { ret.EnterpriseID = scaleSet.EnterpriseID.String() ret.EnterpriseName = scaleSet.Enterprise.Name + ep = scaleSet.Enterprise.Endpoint } - var err error + endpoint, err := s.sqlToCommonGithubEndpoint(ep) + if err != nil { + return params.ScaleSet{}, errors.Wrap(err, "converting endpoint") + } + ret.Endpoint = endpoint + for idx, inst := range scaleSet.Instances { ret.Instances[idx], err = s.sqlToParamsInstance(inst) if err != nil { From 325bca4af391cf4966dec5518b578e6600fd936c Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 12 Aug 2025 09:17:16 +0000 Subject: [PATCH 154/179] Add swagger annotations and updates Add swagger annotations to models to allow generating a full swagger definition. This will help generate clients in other languages if needed. Signed-off-by: Gabriel Adrian Samfira --- apiserver/params/params.go | 1 + params/params.go | 37 ++++++++++++++++++++++++++++++++++++- params/requests.go | 21 +++++++++++++++++++++ 3 files changed, 58 insertions(+), 1 deletion(-) diff --git a/apiserver/params/params.go b/apiserver/params/params.go index 7aee3bd2..ec42fab6 100644 --- a/apiserver/params/params.go +++ b/apiserver/params/params.go @@ -14,6 +14,7 @@ package params +// swagger:model APIErrorResponse // APIErrorResponse holds information about an error, returned by the API type APIErrorResponse struct { Error string `json:"error"` diff --git a/params/params.go b/params/params.go index 9cd4fc83..50e26d26 100644 --- a/params/params.go +++ b/params/params.go @@ -172,6 +172,7 @@ const ( MessageTypeJobAvailable = "JobAvailable" ) +// swagger:model StatusMessage type StatusMessage struct { CreatedAt time.Time `json:"created_at,omitempty"` Message string `json:"message,omitempty"` @@ -179,6 +180,7 @@ type StatusMessage struct { EventLevel EventLevel `json:"event_level,omitempty"` } +// swagger:model EntityEvent type EntityEvent struct { ID uint `json:"id,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` @@ -188,6 +190,7 @@ type EntityEvent struct { Message string `json:"message,omitempty"` } +// swagger:model Instance type Instance struct { // ID is the database ID of this instance. ID string `json:"id,omitempty"` @@ -282,6 +285,7 @@ func (i Instance) GetID() string { } // used by swagger client generated code +// swagger:model Instances type Instances []Instance type BootstrapInstance struct { @@ -352,6 +356,7 @@ type Tag struct { Name string `json:"name,omitempty"` } +// swagger:model Pool type Pool struct { RunnerPrefix @@ -376,7 +381,7 @@ type Pool struct { EnterpriseID string `json:"enterprise_id,omitempty"` EnterpriseName string `json:"enterprise_name,omitempty"` - Endpoint ForgeEndpoint `json:"forge_type,omitempty"` + Endpoint ForgeEndpoint `json:"endpoint,omitempty"` RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` @@ -487,11 +492,16 @@ func (p *Pool) HasRequiredLabels(set []string) bool { } // used by swagger client generated code +// swagger:model Pools type Pools []Pool +// swagger:model ScaleSet type ScaleSet struct { RunnerPrefix + CreatedAt time.Time `json:"created_at,omitempty"` + UpdatedAt time.Time `json:"updated_at,omitempty"` + ID uint `json:"id,omitempty"` ScaleSetID int `json:"scale_set_id,omitempty"` Name string `json:"name,omitempty"` @@ -511,6 +521,8 @@ type ScaleSet struct { Instances []Instance `json:"instances,omitempty"` DesiredRunnerCount int `json:"desired_runner_count,omitempty"` + Endpoint ForgeEndpoint `json:"endpoint,omitempty"` + RunnerBootstrapTimeout uint `json:"runner_bootstrap_timeout,omitempty"` // ExtraSpecs is an opaque raw json that gets sent to the provider // as part of the bootstrap params for instances. It can contain @@ -593,8 +605,10 @@ func (p *ScaleSet) RunnerTimeout() uint { } // used by swagger client generated code +// swagger:model ScaleSets type ScaleSets []ScaleSet +// swagger:model Repository type Repository struct { ID string `json:"id,omitempty"` Owner string `json:"owner,omitempty"` @@ -666,8 +680,10 @@ func (r Repository) String() string { } // used by swagger client generated code +// swagger:model Repositories type Repositories []Repository +// swagger:model Organization type Organization struct { ID string `json:"id,omitempty"` Name string `json:"name,omitempty"` @@ -724,8 +740,10 @@ func (o Organization) GetBalancerType() PoolBalancerType { } // used by swagger client generated code +// swagger:model Organizations type Organizations []Organization +// swagger:model Enterprise type Enterprise struct { ID string `json:"id,omitempty"` Name string `json:"name,omitempty"` @@ -782,9 +800,11 @@ func (e Enterprise) GetBalancerType() PoolBalancerType { } // used by swagger client generated code +// swagger:model Enterprises type Enterprises []Enterprise // Users holds information about a particular user +// swagger:model User type User struct { ID string `json:"id,omitempty"` CreatedAt time.Time `json:"created_at,omitempty"` @@ -801,10 +821,12 @@ type User struct { // JWTResponse holds the JWT token returned as a result of a // successful auth +// swagger:model JWTResponse type JWTResponse struct { Token string `json:"token,omitempty"` } +// swagger:model ControllerInfo type ControllerInfo struct { // ControllerID is the unique ID of this controller. This ID gets generated // automatically on controller init. @@ -857,6 +879,7 @@ func (c *ControllerInfo) JobBackoff() time.Duration { return time.Duration(int64(c.MinimumJobAgeBackoff)) } +// swagger:model GithubRateLimit type GithubRateLimit struct { Limit int `json:"limit,omitempty"` Used int `json:"used,omitempty"` @@ -875,6 +898,7 @@ func (g GithubRateLimit) ResetAt() time.Time { return time.Unix(g.Reset, 0) } +// swagger:model ForgeCredentials type ForgeCredentials struct { ID uint `json:"id,omitempty"` Name string `json:"name,omitempty"` @@ -1000,8 +1024,10 @@ func (g ForgeCredentials) RootCertificateBundle() (CertificateBundle, error) { } // used by swagger client generated code +// swagger:model Credentials type Credentials []ForgeCredentials +// swagger:model Provider type Provider struct { Name string `json:"name,omitempty"` ProviderType ProviderType `json:"type,omitempty"` @@ -1009,8 +1035,10 @@ type Provider struct { } // used by swagger client generated code +// swagger:model Providers type Providers []Provider +// swagger:model PoolManagerStatus type PoolManagerStatus struct { IsRunning bool `json:"running,omitempty"` FailureReason string `json:"failure_reason,omitempty"` @@ -1032,6 +1060,7 @@ func (p RunnerPrefix) GetRunnerPrefix() string { return p.Prefix } +// swagger:model Job type Job struct { // ID is the ID of the job. ID int64 `json:"id,omitempty"` @@ -1086,14 +1115,17 @@ type Job struct { UpdatedAt time.Time `json:"updated_at,omitempty"` } +// swagger:model Jobs // used by swagger client generated code type Jobs []Job +// swagger:model InstallWebhookParams type InstallWebhookParams struct { WebhookEndpointType WebhookEndpointType `json:"webhook_endpoint_type,omitempty"` InsecureSSL bool `json:"insecure_ssl,omitempty"` } +// swagger:model HookInfo type HookInfo struct { ID int64 `json:"id,omitempty"` URL string `json:"url,omitempty"` @@ -1106,6 +1138,7 @@ type CertificateBundle struct { RootCertificates map[string][]byte `json:"root_certificates,omitempty"` } +// swagger:model ForgeEntity type UpdateSystemInfoParams struct { OSName string `json:"os_name,omitempty"` OSVersion string `json:"os_version,omitempty"` @@ -1194,8 +1227,10 @@ func (g ForgeEntity) GetIDAsUUID() (uuid.UUID, error) { } // used by swagger client generated code +// swagger:model ForgeEndpoints type ForgeEndpoints []ForgeEndpoint +// swagger:model ForgeEndpoint type ForgeEndpoint struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` diff --git a/params/requests.go b/params/requests.go index 3f2fcfab..3f4e1737 100644 --- a/params/requests.go +++ b/params/requests.go @@ -39,6 +39,7 @@ type InstanceRequest struct { OSVersion string `json:"os_version"` } +// swagger:model CreateRepoParams type CreateRepoParams struct { Owner string `json:"owner,omitempty"` Name string `json:"name,omitempty"` @@ -80,6 +81,7 @@ func (c *CreateRepoParams) Validate() error { return nil } +// swagger:model CreateOrgParams type CreateOrgParams struct { Name string `json:"name,omitempty"` CredentialsName string `json:"credentials_name,omitempty"` @@ -115,6 +117,7 @@ func (c *CreateOrgParams) Validate() error { return nil } +// swagger:model CreateEnterpriseParams type CreateEnterpriseParams struct { Name string `json:"name,omitempty"` CredentialsName string `json:"credentials_name,omitempty"` @@ -143,6 +146,7 @@ func (c *CreateEnterpriseParams) Validate() error { // NewUserParams holds the needed information to create // a new user +// swagger:model NewUserParams type NewUserParams struct { Email string `json:"email,omitempty"` Username string `json:"username,omitempty"` @@ -152,6 +156,7 @@ type NewUserParams struct { Enabled bool `json:"-"` } +// swagger:model UpdatePoolParams type UpdatePoolParams struct { RunnerPrefix @@ -189,6 +194,7 @@ type CreateInstanceParams struct { JitConfiguration map[string]string `json:"jit_configuration,omitempty"` } +// swagger:model CreatePoolParams type CreatePoolParams struct { RunnerPrefix @@ -263,6 +269,7 @@ type UpdateUserParams struct { Enabled *bool `json:"enabled,omitempty"` } +// swagger:model PasswordLoginParams // PasswordLoginParams holds information used during // password authentication, that will be passed to a // password login function @@ -279,6 +286,7 @@ func (p PasswordLoginParams) Validate() error { return nil } +// swagger:model UpdateEntityParams type UpdateEntityParams struct { CredentialsName string `json:"credentials_name,omitempty"` WebhookSecret string `json:"webhook_secret,omitempty"` @@ -291,6 +299,7 @@ type InstanceUpdateMessage struct { AgentID *int64 `json:"agent_id,omitempty"` } +// swagger:model CreateGithubEndpointParams type CreateGithubEndpointParams struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` @@ -358,6 +367,7 @@ func (c CreateGithubEndpointParams) Validate() error { return nil } +// swagger:model UpdateGithubEndpointParams type UpdateGithubEndpointParams struct { Description *string `json:"description,omitempty"` APIBaseURL *string `json:"api_base_url,omitempty"` @@ -416,10 +426,12 @@ func (u UpdateGithubEndpointParams) Validate() error { return nil } +// swagger:model GithubPAT type GithubPAT struct { OAuth2Token string `json:"oauth2_token,omitempty"` } +// swagger:model GithubApp type GithubApp struct { AppID int64 `json:"app_id,omitempty"` InstallationID int64 `json:"installation_id,omitempty"` @@ -452,6 +464,7 @@ func (g GithubApp) Validate() error { return nil } +// swagger:model CreateGithubCredentialsParams type CreateGithubCredentialsParams struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` @@ -491,6 +504,7 @@ func (c CreateGithubCredentialsParams) Validate() error { return nil } +// swagger:model UpdateGithubCredentialsParams type UpdateGithubCredentialsParams struct { Name *string `json:"name,omitempty"` Description *string `json:"description,omitempty"` @@ -518,6 +532,7 @@ func (u UpdateGithubCredentialsParams) Validate() error { return nil } +// swagger:model UpdateControllerParams type UpdateControllerParams struct { MetadataURL *string `json:"metadata_url,omitempty"` CallbackURL *string `json:"callback_url,omitempty"` @@ -550,6 +565,7 @@ func (u UpdateControllerParams) Validate() error { return nil } +// swagger:model CreateScaleSetParams type CreateScaleSetParams struct { RunnerPrefix @@ -602,6 +618,7 @@ func (s *CreateScaleSetParams) Validate() error { return nil } +// swagger:model UpdateScaleSetParams type UpdateScaleSetParams struct { RunnerPrefix @@ -623,6 +640,7 @@ type UpdateScaleSetParams struct { ExtendedState *string `json:"extended_state"` } +// swagger:model CreateGiteaEndpointParams type CreateGiteaEndpointParams struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` @@ -674,6 +692,7 @@ func (c CreateGiteaEndpointParams) Validate() error { return nil } +// swagger:model UpdateGiteaEndpointParams type UpdateGiteaEndpointParams struct { Description *string `json:"description,omitempty"` APIBaseURL *string `json:"api_base_url,omitempty"` @@ -719,6 +738,7 @@ func (u UpdateGiteaEndpointParams) Validate() error { return nil } +// swagger:model CreateGiteaCredentialsParams type CreateGiteaCredentialsParams struct { Name string `json:"name,omitempty"` Description string `json:"description,omitempty"` @@ -752,6 +772,7 @@ func (c CreateGiteaCredentialsParams) Validate() error { return nil } +// swagger:model UpdateGiteaCredentialsParams type UpdateGiteaCredentialsParams struct { Name *string `json:"name,omitempty"` Description *string `json:"description,omitempty"` From 5a6ac121183d6453b717f664524b53b693b1f606 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 12 Aug 2025 09:21:43 +0000 Subject: [PATCH 155/179] Fix for gitea tools and scale set cleanup Filter out gitea tools to only consider archived downloads. This should help in situations where bandwidth is more important than CPU time used to unarchive the tools. Also a drive by fix for scale sets cleanup. Signed-off-by: Gabriel Adrian Samfira --- doc/events.md | 4 +++- workers/cache/gitea_tools.go | 4 ++++ workers/scaleset/scaleset.go | 6 ++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/doc/events.md b/doc/events.md index 6bc61a9d..e643a5c2 100644 --- a/doc/events.md +++ b/doc/events.md @@ -88,7 +88,9 @@ The filter is defined as a JSON that you write over the websocket connections. T "job", "controller", "github_credentials", - "github_endpoint" + "gitea_credentials", + "github_endpoint", + "scaleset" ], "title": "entity type", "description": "The type of entity to filter on", diff --git a/workers/cache/gitea_tools.go b/workers/cache/gitea_tools.go index 43fd86ba..8410a826 100644 --- a/workers/cache/gitea_tools.go +++ b/workers/cache/gitea_tools.go @@ -180,6 +180,10 @@ func getTools(ctx context.Context) ([]commonParams.RunnerApplicationDownload, er slog.InfoContext(ctx, "ignoring unrecognized tools os", "tool", asset.Name) continue } + if !strings.HasSuffix(asset.DownloadURL, ".xz") { + // filter out non compressed versions. + continue + } ret = append(ret, commonParams.RunnerApplicationDownload{ OS: os, Architecture: arch, diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index f5b34400..03a93387 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -193,6 +193,12 @@ func (w *Worker) Start() (err error) { return fmt.Errorf("updating runner %s: %w", instance.Name, err) } } + } else if instance.Status == commonParams.InstanceDeleted { + if err := w.handleInstanceCleanup(instance); err != nil { + locking.Unlock(instance.Name, false) + return fmt.Errorf("failed to remove database entry for %s: %w", instance.Name, err) + } + continue } w.runners[instance.ID] = instance locking.Unlock(instance.Name, false) From 98a769b8d190ff71a1288235b46e76cc460f1a13 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 12 Aug 2025 09:23:22 +0000 Subject: [PATCH 156/179] Allow cookie login to API endpoints This change considers cookies as a source for the JWT token. Signed-off-by: Gabriel Adrian Samfira --- auth/jwt.go | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/auth/jwt.go b/auth/jwt.go index e9b5745f..52fce0c9 100644 --- a/auth/jwt.go +++ b/auth/jwt.go @@ -97,26 +97,37 @@ func invalidAuthResponse(ctx context.Context, w http.ResponseWriter) { } } +func (amw *jwtMiddleware) getTokenFromRequest(r *http.Request) (string, error) { + authorizationHeader := r.Header.Get("authorization") + if authorizationHeader == "" { + cookie, err := r.Cookie("garm_token") + if err != nil { + return "", fmt.Errorf("failed to get cookie: %w", err) + } + return cookie.Value, nil + } + + bearerToken := strings.Split(authorizationHeader, " ") + if len(bearerToken) != 2 { + return "", fmt.Errorf("invalid auth header") + } + return bearerToken[1], nil +} + // Middleware implements the middleware interface func (amw *jwtMiddleware) Middleware(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // nolint:golangci-lint,godox // TODO: Log error details when authentication fails ctx := r.Context() - authorizationHeader := r.Header.Get("authorization") - if authorizationHeader == "" { + authToken, err := amw.getTokenFromRequest(r) + if err != nil { + slog.ErrorContext(ctx, "failed to get auth token", "error", err) invalidAuthResponse(ctx, w) return } - - bearerToken := strings.Split(authorizationHeader, " ") - if len(bearerToken) != 2 { - invalidAuthResponse(ctx, w) - return - } - claims := &JWTClaims{} - token, err := jwt.ParseWithClaims(bearerToken[1], claims, func(token *jwt.Token) (interface{}, error) { + token, err := jwt.ParseWithClaims(authToken, claims, func(token *jwt.Token) (interface{}, error) { if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { return nil, fmt.Errorf("invalid signing method") } From a811d129d00ed4073d2b0c07891d499226db98bb Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 12 Aug 2025 09:25:21 +0000 Subject: [PATCH 157/179] Update .gitignore Signed-off-by: Gabriel Adrian Samfira --- .gitignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitignore b/.gitignore index 97f747e2..4e8b3d79 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,7 @@ bin/ cmd/temp build/ release/ +node_modules/ +.svelte-kit/ +debug.html +git_push.sh From eec158b32c1cd89c94eaafdea14c3e6e7a127343 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Tue, 12 Aug 2025 09:28:21 +0000 Subject: [PATCH 158/179] Add SPA UI for GARM This change adds a single page application front-end to GARM. It uses a generated REST client, built from the swagger definitions, the websocket interface for live updates of entities and eager loading of everything except runners, as users may have many runners and we don't want to load hundreds of runners in memory. Proper pagination should be implemented in the API, in future commits, to avoid loading lots of elements for no reason. Signed-off-by: Gabriel Adrian Samfira --- .gitignore | 2 + .mockery.yaml | 27 + Makefile | 16 + README.md | 12 + apiserver/controllers/controllers.go | 29 +- apiserver/routers/routers.go | 28 +- auth/init_required.go | 4 +- build-webapp.sh | 16 + cmd/garm/main.go | 5 +- config/config.go | 21 + database/common/mocks/Store.go | 2689 +++- database/common/store.go | 2 +- database/sql/pools.go | 10 +- database/sql/scalesets.go | 19 +- database/watcher/watcher_store_test.go | 5 + doc/building_from_source.md | 66 +- doc/config.md | 2 + doc/quickstart.md | 3 + go.mod | 6 +- go.sum | 11 +- runner/common/mocks/GithubClient.go | 413 +- runner/common/mocks/GithubEntityOperations.go | 382 +- runner/common/mocks/PoolManager.go | 368 +- runner/common/mocks/Provider.go | 273 +- runner/common/mocks/RateLimitClient.go | 38 +- runner/common/pool.go | 2 +- runner/common/provider.go | 2 +- runner/common/util.go | 2 +- runner/interfaces.go | 2 +- runner/mocks/PoolManagerController.go | 352 +- testdata/config.toml | 2 + util/util.go | 68 + util/util_test.go | 394 + .../go-openapi/jsonpointer/.golangci.yml | 108 +- .../go-openapi/jsonpointer/pointer.go | 10 + vendor/github.com/mattn/go-sqlite3/README.md | 2 + .../mattn/go-sqlite3/sqlite3-binding.c | 4728 ++++--- .../mattn/go-sqlite3/sqlite3-binding.h | 316 +- .../mattn/go-sqlite3/sqlite3_opt_userauth.go | 153 +- .../github.com/mattn/go-sqlite3/sqlite3ext.h | 4 + vendor/github.com/spf13/pflag/README.md | 27 + vendor/github.com/spf13/pflag/bool_func.go | 40 + vendor/github.com/spf13/pflag/count.go | 2 +- vendor/github.com/spf13/pflag/errors.go | 149 + vendor/github.com/spf13/pflag/flag.go | 85 +- vendor/github.com/spf13/pflag/func.go | 37 + vendor/github.com/spf13/pflag/golangflag.go | 22 + vendor/github.com/spf13/pflag/ipnet_slice.go | 2 +- vendor/github.com/spf13/pflag/text.go | 81 + vendor/github.com/spf13/pflag/time.go | 118 + vendor/modules.txt | 6 +- webapp/.env.development | 2 + webapp/.env.example | 8 + webapp/DEV_SETUP.md | 79 + webapp/README.md | 102 + webapp/assets/_app/env.js | 1 + .../_app/immutable/assets/0.BPrCR_r7.css | 1 + .../immutable/assets/_layout.BPrCR_r7.css | 1 + .../assets/_app/immutable/chunks/5WA7h8uK.js | 1 + .../assets/_app/immutable/chunks/B3Pzt0F_.js | 1 + .../assets/_app/immutable/chunks/B7ITzBt8.js | 1 + .../assets/_app/immutable/chunks/BAg1iRPq.js | 1 + .../assets/_app/immutable/chunks/BE4wujub.js | 1 + .../assets/_app/immutable/chunks/BEkVdVE1.js | 1 + .../assets/_app/immutable/chunks/BGVHQGl-.js | 4 + .../assets/_app/immutable/chunks/BmGWMSQm.js | 1 + .../assets/_app/immutable/chunks/C41YH50Q.js | 1 + .../assets/_app/immutable/chunks/C6k1Q4We.js | 1 + .../assets/_app/immutable/chunks/C89fcOde.js | 1 + .../assets/_app/immutable/chunks/C9DJVOi1.js | 1 + .../assets/_app/immutable/chunks/CCSWcuVN.js | 1 + .../assets/_app/immutable/chunks/CGpPw4EW.js | 1 + .../assets/_app/immutable/chunks/CLYUNKnN.js | 1 + .../assets/_app/immutable/chunks/CNMHKIIK.js | 1 + .../assets/_app/immutable/chunks/CO4LUyTP.js | 1 + .../assets/_app/immutable/chunks/CTf6mQoE.js | 3 + .../assets/_app/immutable/chunks/CclkODgu.js | 1 + .../assets/_app/immutable/chunks/CiE1LlKV.js | 7 + .../assets/_app/immutable/chunks/CoIRRsD9.js | 1 + .../assets/_app/immutable/chunks/CwqI2jFH.js | 1 + .../assets/_app/immutable/chunks/D4Caz1gY.js | 1 + .../assets/_app/immutable/chunks/D8EpLgQ1.js | 2 + .../assets/_app/immutable/chunks/DDhBTdDt.js | 1 + .../assets/_app/immutable/chunks/DQP15tlf.js | 1 + .../assets/_app/immutable/chunks/DZblzgqm.js | 4 + .../assets/_app/immutable/chunks/Dbd6PPbz.js | 1 + .../assets/_app/immutable/chunks/DsnmJJEf.js | 1 + .../assets/_app/immutable/chunks/KQ2xQpA3.js | 1 + .../assets/_app/immutable/chunks/duD3WMbl.js | 1 + .../assets/_app/immutable/chunks/ow_oMtSd.js | 1 + .../assets/_app/immutable/chunks/qB7B8uiS.js | 1 + .../assets/_app/immutable/chunks/u94nIB4-.js | 1 + .../assets/_app/immutable/chunks/wyaP0EDu.js | 1 + .../_app/immutable/entry/app.kAVAdeq9.js | 2 + .../_app/immutable/entry/start.CI0Cdear.js | 1 + .../assets/_app/immutable/nodes/0.DINiyk_8.js | 13 + .../assets/_app/immutable/nodes/1.DcR4nNsi.js | 1 + .../_app/immutable/nodes/10.LnrIJgIa.js | 1 + .../_app/immutable/nodes/11.Bsn67lBa.js | 1 + .../_app/immutable/nodes/12.B-vC_cmu.js | 1 + .../_app/immutable/nodes/13.Br7HzjXP.js | 1 + .../_app/immutable/nodes/14.Cd0DOn96.js | 1 + .../_app/immutable/nodes/15.CkHQugXH.js | 1 + .../_app/immutable/nodes/16.B35VVkOd.js | 1 + .../_app/immutable/nodes/17.CCltcs-Z.js | 1 + .../_app/immutable/nodes/18.iVIhGVtu.js | 1 + .../assets/_app/immutable/nodes/2.CiT4lj0D.js | 1 + .../assets/_app/immutable/nodes/3.BSFz0YHn.js | 7 + .../assets/_app/immutable/nodes/4.XnVoh6ca.js | 3 + .../assets/_app/immutable/nodes/5.rvsSG-AQ.js | 1 + .../assets/_app/immutable/nodes/6.CtGX0qgG.js | 1 + .../assets/_app/immutable/nodes/7.0w3i9VHx.js | 1 + .../assets/_app/immutable/nodes/8.BiZNKYxk.js | 1 + .../assets/_app/immutable/nodes/9.DpSfMRgo.js | 1 + webapp/assets/_app/version.json | 1 + webapp/assets/assets.go | 83 + webapp/assets/assets/garm-dark.svg | 37 + webapp/assets/assets/garm-light.svg | 36 + webapp/assets/assets/gitea.svg | 1 + webapp/assets/assets/github-mark-white.svg | 1 + webapp/assets/assets/github-mark.svg | 1 + webapp/assets/favicon-dark.png | Bin 0 -> 3506 bytes webapp/assets/favicon-light.png | Bin 0 -> 3506 bytes webapp/assets/index.html | 105 + webapp/assets/openapitools.json | 7 + webapp/openapitools.json | 7 + webapp/package-lock.json | 5603 ++++++++ webapp/package.json | 43 + webapp/postcss.config.js | 6 + webapp/src/app.css | 18 + webapp/src/app.d.ts | 10 + webapp/src/app.html | 78 + webapp/src/lib/api/client.ts | 77 + webapp/src/lib/api/generated-client.ts | 596 + webapp/src/lib/api/generated/.gitignore | 4 + webapp/src/lib/api/generated/.npmignore | 1 + .../api/generated/.openapi-generator-ignore | 23 + .../api/generated/.openapi-generator/FILES | 70 + .../api/generated/.openapi-generator/VERSION | 1 + webapp/src/lib/api/generated/api.ts | 11684 ++++++++++++++++ webapp/src/lib/api/generated/base.ts | 86 + webapp/src/lib/api/generated/common.ts | 150 + webapp/src/lib/api/generated/configuration.ts | 115 + webapp/src/lib/api/generated/index.ts | 18 + webapp/src/lib/components/ActionButton.svelte | 68 + webapp/src/lib/components/Badge.svelte | 48 + webapp/src/lib/components/Button.svelte | 82 + .../lib/components/ControllerInfoCard.svelte | 403 + .../components/CreateEnterpriseModal.svelte | 213 + .../components/CreateOrganizationModal.svelte | 271 + .../src/lib/components/CreatePoolModal.svelte | 541 + .../components/CreateRepositoryModal.svelte | 294 + .../lib/components/CreateScaleSetModal.svelte | 472 + webapp/src/lib/components/DataTable.svelte | 237 + webapp/src/lib/components/DeleteModal.svelte | 57 + webapp/src/lib/components/DetailHeader.svelte | 56 + webapp/src/lib/components/EmptyState.svelte | 37 + .../lib/components/EntityInformation.svelte | 103 + webapp/src/lib/components/ErrorState.svelte | 37 + .../src/lib/components/EventsSection.svelte | 47 + .../lib/components/ForgeTypeSelector.svelte | 40 + webapp/src/lib/components/Icons.svelte | 51 + .../lib/components/InstancesSection.svelte | 114 + webapp/src/lib/components/JsonEditor.svelte | 48 + webapp/src/lib/components/LoadingState.svelte | 8 + webapp/src/lib/components/MobileCard.svelte | 254 + webapp/src/lib/components/Modal.svelte | 40 + webapp/src/lib/components/Navigation.svelte | 406 + webapp/src/lib/components/PageHeader.svelte | 39 + webapp/src/lib/components/PoolsSection.svelte | 136 + webapp/src/lib/components/SearchBar.svelte | 30 + .../src/lib/components/SearchFilterBar.svelte | 55 + .../src/lib/components/TablePagination.svelte | 98 + webapp/src/lib/components/Toast.svelte | 107 + webapp/src/lib/components/Tooltip.svelte | 29 + .../components/UpdateEnterpriseModal.svelte | 207 + .../lib/components/UpdateEntityModal.svelte | 265 + .../components/UpdateOrganizationModal.svelte | 210 + .../src/lib/components/UpdatePoolModal.svelte | 426 + .../components/UpdateRepositoryModal.svelte | 146 + .../lib/components/UpdateScaleSetModal.svelte | 339 + .../src/lib/components/WebhookSection.svelte | 172 + .../lib/components/cells/ActionsCell.svelte | 46 + .../lib/components/cells/EndpointCell.svelte | 15 + .../lib/components/cells/EntityCell.svelte | 84 + .../lib/components/cells/GenericCell.svelte | 59 + .../components/cells/InstancePoolCell.svelte | 19 + .../components/cells/PoolEntityCell.svelte | 16 + .../lib/components/cells/StatusCell.svelte | 118 + webapp/src/lib/components/cells/index.ts | 8 + webapp/src/lib/stores/auth.ts | 281 + webapp/src/lib/stores/eager-cache.ts | 609 + webapp/src/lib/stores/toast.ts | 58 + webapp/src/lib/stores/websocket.ts | 367 + webapp/src/lib/utils/common.ts | 296 + webapp/src/lib/utils/status.ts | 90 + webapp/src/openapitools.json | 7 + webapp/src/routes/+layout.svelte | 87 + webapp/src/routes/+layout.ts | 13 + webapp/src/routes/+page.svelte | 321 + webapp/src/routes/credentials/+page.svelte | 1022 ++ webapp/src/routes/endpoints/+page.svelte | 838 ++ webapp/src/routes/enterprises/+page.svelte | 329 + .../src/routes/enterprises/[id]/+page.svelte | 381 + webapp/src/routes/init/+page.svelte | 431 + webapp/src/routes/instances/+page.svelte | 284 + webapp/src/routes/instances/[id]/+page.svelte | 344 + webapp/src/routes/login/+page.svelte | 159 + webapp/src/routes/organizations/+page.svelte | 364 + .../routes/organizations/[id]/+page.svelte | 392 + webapp/src/routes/pools/+page.svelte | 339 + webapp/src/routes/pools/[id]/+page.svelte | 398 + webapp/src/routes/repositories/+page.svelte | 339 + .../src/routes/repositories/[id]/+page.svelte | 392 + webapp/src/routes/scalesets/+page.svelte | 316 + webapp/src/routes/scalesets/[id]/+page.svelte | 383 + webapp/static/assets/garm-dark.svg | 37 + webapp/static/assets/garm-light.svg | 36 + webapp/static/assets/gitea.svg | 1 + webapp/static/assets/github-mark-white.svg | 1 + webapp/static/assets/github-mark.svg | 1 + webapp/static/favicon-dark.png | Bin 0 -> 3506 bytes webapp/static/favicon-light.png | Bin 0 -> 3506 bytes webapp/svelte.config.js | 25 + webapp/swagger.yaml | 3469 +++++ webapp/tailwind.config.js | 34 + webapp/tsconfig.json | 14 + webapp/vite.config.ts | 36 + workers/entity/worker.go | 8 + workers/scaleset/scaleset.go | 7 +- 230 files changed, 47324 insertions(+), 2045 deletions(-) create mode 100644 .mockery.yaml create mode 100755 build-webapp.sh create mode 100644 util/util_test.go create mode 100644 vendor/github.com/spf13/pflag/bool_func.go create mode 100644 vendor/github.com/spf13/pflag/errors.go create mode 100644 vendor/github.com/spf13/pflag/func.go create mode 100644 vendor/github.com/spf13/pflag/text.go create mode 100644 vendor/github.com/spf13/pflag/time.go create mode 100644 webapp/.env.development create mode 100644 webapp/.env.example create mode 100644 webapp/DEV_SETUP.md create mode 100644 webapp/README.md create mode 100644 webapp/assets/_app/env.js create mode 100644 webapp/assets/_app/immutable/assets/0.BPrCR_r7.css create mode 100644 webapp/assets/_app/immutable/assets/_layout.BPrCR_r7.css create mode 100644 webapp/assets/_app/immutable/chunks/5WA7h8uK.js create mode 100644 webapp/assets/_app/immutable/chunks/B3Pzt0F_.js create mode 100644 webapp/assets/_app/immutable/chunks/B7ITzBt8.js create mode 100644 webapp/assets/_app/immutable/chunks/BAg1iRPq.js create mode 100644 webapp/assets/_app/immutable/chunks/BE4wujub.js create mode 100644 webapp/assets/_app/immutable/chunks/BEkVdVE1.js create mode 100644 webapp/assets/_app/immutable/chunks/BGVHQGl-.js create mode 100644 webapp/assets/_app/immutable/chunks/BmGWMSQm.js create mode 100644 webapp/assets/_app/immutable/chunks/C41YH50Q.js create mode 100644 webapp/assets/_app/immutable/chunks/C6k1Q4We.js create mode 100644 webapp/assets/_app/immutable/chunks/C89fcOde.js create mode 100644 webapp/assets/_app/immutable/chunks/C9DJVOi1.js create mode 100644 webapp/assets/_app/immutable/chunks/CCSWcuVN.js create mode 100644 webapp/assets/_app/immutable/chunks/CGpPw4EW.js create mode 100644 webapp/assets/_app/immutable/chunks/CLYUNKnN.js create mode 100644 webapp/assets/_app/immutable/chunks/CNMHKIIK.js create mode 100644 webapp/assets/_app/immutable/chunks/CO4LUyTP.js create mode 100644 webapp/assets/_app/immutable/chunks/CTf6mQoE.js create mode 100644 webapp/assets/_app/immutable/chunks/CclkODgu.js create mode 100644 webapp/assets/_app/immutable/chunks/CiE1LlKV.js create mode 100644 webapp/assets/_app/immutable/chunks/CoIRRsD9.js create mode 100644 webapp/assets/_app/immutable/chunks/CwqI2jFH.js create mode 100644 webapp/assets/_app/immutable/chunks/D4Caz1gY.js create mode 100644 webapp/assets/_app/immutable/chunks/D8EpLgQ1.js create mode 100644 webapp/assets/_app/immutable/chunks/DDhBTdDt.js create mode 100644 webapp/assets/_app/immutable/chunks/DQP15tlf.js create mode 100644 webapp/assets/_app/immutable/chunks/DZblzgqm.js create mode 100644 webapp/assets/_app/immutable/chunks/Dbd6PPbz.js create mode 100644 webapp/assets/_app/immutable/chunks/DsnmJJEf.js create mode 100644 webapp/assets/_app/immutable/chunks/KQ2xQpA3.js create mode 100644 webapp/assets/_app/immutable/chunks/duD3WMbl.js create mode 100644 webapp/assets/_app/immutable/chunks/ow_oMtSd.js create mode 100644 webapp/assets/_app/immutable/chunks/qB7B8uiS.js create mode 100644 webapp/assets/_app/immutable/chunks/u94nIB4-.js create mode 100644 webapp/assets/_app/immutable/chunks/wyaP0EDu.js create mode 100644 webapp/assets/_app/immutable/entry/app.kAVAdeq9.js create mode 100644 webapp/assets/_app/immutable/entry/start.CI0Cdear.js create mode 100644 webapp/assets/_app/immutable/nodes/0.DINiyk_8.js create mode 100644 webapp/assets/_app/immutable/nodes/1.DcR4nNsi.js create mode 100644 webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js create mode 100644 webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js create mode 100644 webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js create mode 100644 webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js create mode 100644 webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js create mode 100644 webapp/assets/_app/immutable/nodes/15.CkHQugXH.js create mode 100644 webapp/assets/_app/immutable/nodes/16.B35VVkOd.js create mode 100644 webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js create mode 100644 webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js create mode 100644 webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js create mode 100644 webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js create mode 100644 webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js create mode 100644 webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js create mode 100644 webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js create mode 100644 webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js create mode 100644 webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js create mode 100644 webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js create mode 100644 webapp/assets/_app/version.json create mode 100644 webapp/assets/assets.go create mode 100644 webapp/assets/assets/garm-dark.svg create mode 100644 webapp/assets/assets/garm-light.svg create mode 100644 webapp/assets/assets/gitea.svg create mode 100644 webapp/assets/assets/github-mark-white.svg create mode 100644 webapp/assets/assets/github-mark.svg create mode 100644 webapp/assets/favicon-dark.png create mode 100644 webapp/assets/favicon-light.png create mode 100644 webapp/assets/index.html create mode 100644 webapp/assets/openapitools.json create mode 100644 webapp/openapitools.json create mode 100644 webapp/package-lock.json create mode 100644 webapp/package.json create mode 100644 webapp/postcss.config.js create mode 100644 webapp/src/app.css create mode 100644 webapp/src/app.d.ts create mode 100644 webapp/src/app.html create mode 100644 webapp/src/lib/api/client.ts create mode 100644 webapp/src/lib/api/generated-client.ts create mode 100644 webapp/src/lib/api/generated/.gitignore create mode 100644 webapp/src/lib/api/generated/.npmignore create mode 100644 webapp/src/lib/api/generated/.openapi-generator-ignore create mode 100644 webapp/src/lib/api/generated/.openapi-generator/FILES create mode 100644 webapp/src/lib/api/generated/.openapi-generator/VERSION create mode 100644 webapp/src/lib/api/generated/api.ts create mode 100644 webapp/src/lib/api/generated/base.ts create mode 100644 webapp/src/lib/api/generated/common.ts create mode 100644 webapp/src/lib/api/generated/configuration.ts create mode 100644 webapp/src/lib/api/generated/index.ts create mode 100644 webapp/src/lib/components/ActionButton.svelte create mode 100644 webapp/src/lib/components/Badge.svelte create mode 100644 webapp/src/lib/components/Button.svelte create mode 100644 webapp/src/lib/components/ControllerInfoCard.svelte create mode 100644 webapp/src/lib/components/CreateEnterpriseModal.svelte create mode 100644 webapp/src/lib/components/CreateOrganizationModal.svelte create mode 100644 webapp/src/lib/components/CreatePoolModal.svelte create mode 100644 webapp/src/lib/components/CreateRepositoryModal.svelte create mode 100644 webapp/src/lib/components/CreateScaleSetModal.svelte create mode 100644 webapp/src/lib/components/DataTable.svelte create mode 100644 webapp/src/lib/components/DeleteModal.svelte create mode 100644 webapp/src/lib/components/DetailHeader.svelte create mode 100644 webapp/src/lib/components/EmptyState.svelte create mode 100644 webapp/src/lib/components/EntityInformation.svelte create mode 100644 webapp/src/lib/components/ErrorState.svelte create mode 100644 webapp/src/lib/components/EventsSection.svelte create mode 100644 webapp/src/lib/components/ForgeTypeSelector.svelte create mode 100644 webapp/src/lib/components/Icons.svelte create mode 100644 webapp/src/lib/components/InstancesSection.svelte create mode 100644 webapp/src/lib/components/JsonEditor.svelte create mode 100644 webapp/src/lib/components/LoadingState.svelte create mode 100644 webapp/src/lib/components/MobileCard.svelte create mode 100644 webapp/src/lib/components/Modal.svelte create mode 100644 webapp/src/lib/components/Navigation.svelte create mode 100644 webapp/src/lib/components/PageHeader.svelte create mode 100644 webapp/src/lib/components/PoolsSection.svelte create mode 100644 webapp/src/lib/components/SearchBar.svelte create mode 100644 webapp/src/lib/components/SearchFilterBar.svelte create mode 100644 webapp/src/lib/components/TablePagination.svelte create mode 100644 webapp/src/lib/components/Toast.svelte create mode 100644 webapp/src/lib/components/Tooltip.svelte create mode 100644 webapp/src/lib/components/UpdateEnterpriseModal.svelte create mode 100644 webapp/src/lib/components/UpdateEntityModal.svelte create mode 100644 webapp/src/lib/components/UpdateOrganizationModal.svelte create mode 100644 webapp/src/lib/components/UpdatePoolModal.svelte create mode 100644 webapp/src/lib/components/UpdateRepositoryModal.svelte create mode 100644 webapp/src/lib/components/UpdateScaleSetModal.svelte create mode 100644 webapp/src/lib/components/WebhookSection.svelte create mode 100644 webapp/src/lib/components/cells/ActionsCell.svelte create mode 100644 webapp/src/lib/components/cells/EndpointCell.svelte create mode 100644 webapp/src/lib/components/cells/EntityCell.svelte create mode 100644 webapp/src/lib/components/cells/GenericCell.svelte create mode 100644 webapp/src/lib/components/cells/InstancePoolCell.svelte create mode 100644 webapp/src/lib/components/cells/PoolEntityCell.svelte create mode 100644 webapp/src/lib/components/cells/StatusCell.svelte create mode 100644 webapp/src/lib/components/cells/index.ts create mode 100644 webapp/src/lib/stores/auth.ts create mode 100644 webapp/src/lib/stores/eager-cache.ts create mode 100644 webapp/src/lib/stores/toast.ts create mode 100644 webapp/src/lib/stores/websocket.ts create mode 100644 webapp/src/lib/utils/common.ts create mode 100644 webapp/src/lib/utils/status.ts create mode 100644 webapp/src/openapitools.json create mode 100644 webapp/src/routes/+layout.svelte create mode 100644 webapp/src/routes/+layout.ts create mode 100644 webapp/src/routes/+page.svelte create mode 100644 webapp/src/routes/credentials/+page.svelte create mode 100644 webapp/src/routes/endpoints/+page.svelte create mode 100644 webapp/src/routes/enterprises/+page.svelte create mode 100644 webapp/src/routes/enterprises/[id]/+page.svelte create mode 100644 webapp/src/routes/init/+page.svelte create mode 100644 webapp/src/routes/instances/+page.svelte create mode 100644 webapp/src/routes/instances/[id]/+page.svelte create mode 100644 webapp/src/routes/login/+page.svelte create mode 100644 webapp/src/routes/organizations/+page.svelte create mode 100644 webapp/src/routes/organizations/[id]/+page.svelte create mode 100644 webapp/src/routes/pools/+page.svelte create mode 100644 webapp/src/routes/pools/[id]/+page.svelte create mode 100644 webapp/src/routes/repositories/+page.svelte create mode 100644 webapp/src/routes/repositories/[id]/+page.svelte create mode 100644 webapp/src/routes/scalesets/+page.svelte create mode 100644 webapp/src/routes/scalesets/[id]/+page.svelte create mode 100644 webapp/static/assets/garm-dark.svg create mode 100644 webapp/static/assets/garm-light.svg create mode 100644 webapp/static/assets/gitea.svg create mode 100644 webapp/static/assets/github-mark-white.svg create mode 100644 webapp/static/assets/github-mark.svg create mode 100644 webapp/static/favicon-dark.png create mode 100644 webapp/static/favicon-light.png create mode 100644 webapp/svelte.config.js create mode 100644 webapp/swagger.yaml create mode 100644 webapp/tailwind.config.js create mode 100644 webapp/tsconfig.json create mode 100644 webapp/vite.config.ts diff --git a/.gitignore b/.gitignore index 4e8b3d79..54c931c8 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,5 @@ node_modules/ .svelte-kit/ debug.html git_push.sh +webapp/src/lib/api/generated/docs +.env diff --git a/.mockery.yaml b/.mockery.yaml new file mode 100644 index 00000000..b7858821 --- /dev/null +++ b/.mockery.yaml @@ -0,0 +1,27 @@ +with-expecter: true +dir: "mocks" +mockname: "{{ .InterfaceName }}" +outpkg: "mocks" +filename: "{{ .InterfaceName }}.go" +# V3 compatibility settings +resolve-type-alias: false +disable-version-string: true +issue-845-fix: true +packages: + # Database store interfaces + github.com/cloudbase/garm/database/common: + interfaces: + Store: + config: + dir: "{{ .InterfaceDir }}/mocks" + # Runner interfaces + github.com/cloudbase/garm/runner: + interfaces: + PoolManagerController: + config: + dir: "{{ .InterfaceDir }}/mocks" + # Runner common interfaces (generate all interfaces in this package) + github.com/cloudbase/garm/runner/common: + config: + dir: "{{ .InterfaceDir }}/mocks" + all: true \ No newline at end of file diff --git a/Makefile b/Makefile index 9a09e999..56d2d7c9 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,7 @@ export SHELLOPTS:=$(if $(SHELLOPTS),$(SHELLOPTS):)pipefail:errexit GEN_PASSWORD=$(shell (/usr/bin/apg -n1 -m32)) IMAGE_TAG = garm-build +HAS_TAILWINDCSS=$(shell (which tailwindcss || echo "no")) IMAGE_BUILDER=$(shell (which docker || which podman)) IS_PODMAN=$(shell (($(IMAGE_BUILDER) --version | grep -q podman) && echo "yes" || echo "no")) USER_ID=$(if $(filter yes,$(IS_PODMAN)),0,$(shell id -u)) @@ -55,6 +56,21 @@ build: ## Build garm @$(GO) build -ldflags "-s -w -X github.com/cloudbase/garm/util/appdefaults.Version=${VERSION}" -tags osusergo,netgo,sqlite_omit_load_extension -o bin/garm-cli ./cmd/garm-cli @echo Binaries are available in $(PWD)/bin +.PHONY: build-webui +build-webui: + @echo Building GARM web ui + ./build-webapp.sh + rm -rf webapp/assets/_app + cp -r webapp/build/* webapp/assets/ + +.PHONY: generate +generate: ## Run go generate after checking required tools are in PATH + @echo Checking required tools... + @which openapi-generator-cli > /dev/null || (echo "Error: openapi-generator-cli not found in PATH" && exit 1) + @which tailwindcss > /dev/null || (echo "Error: tailwindcss not found in PATH" && exit 1) + @echo Running go generate + @$(GO) generate ./... + test: verify go-test ## Run tests ##@ Release diff --git a/README.md b/README.md index 5d09135f..24fbbcc4 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ - [Installing on Kubernetes](#installing-on-kubernetes) - [Configuring GARM for GHES](#configuring-garm-for-ghes) - [Configuring GARM for Gitea](#configuring-garm-for-gitea) + - [Enabling the web UI](#enabling-the-web-ui) - [Using GARM](#using-garm) - [Supported providers](#supported-providers) - [Installing external providers](#installing-external-providers) @@ -78,6 +79,17 @@ GARM supports creating pools and scale sets in either GitHub itself or in your o GARM now has support for Gitea (>=1.24.0). For information on getting started with Gitea, see the [Gitea quickstart](/doc/gitea.md) document. +## Enabling the web UI + +GARM now ships with a single page application. To enable it, add the following to your GARM config: + +```toml +[apiserver.webui] + enable = true +``` + +Check the [README.md](/webapp/README.md) file for details on the web UI. + ## Using GARM GARM is designed with simplicity in mind. At least we try to keep it as simple as possible. We're aware that adding a new tool in your workflow can be painful, especially when you already have to deal with so many. The cognitive load for OPS has reached a level where it feels overwhelming at times to even wrap your head around a new tool. As such, we believe that tools should be simple, should take no more than a few hours to understand and set up and if you absolutely need to interact with the tool, it should be as intuitive as possible. Although we try our best to make this happen, we're aware that GARM has some rough edges, especially for new users. If you encounter issues or feel like the setup process was too complicated, please let us know. We're always looking to improve the user experience. diff --git a/apiserver/controllers/controllers.go b/apiserver/controllers/controllers.go index 2a57f9cf..0c610c38 100644 --- a/apiserver/controllers/controllers.go +++ b/apiserver/controllers/controllers.go @@ -20,6 +20,7 @@ import ( "io" "log/slog" "net/http" + "net/url" "strings" "github.com/gorilla/mux" @@ -31,17 +32,42 @@ import ( "github.com/cloudbase/garm/apiserver/events" "github.com/cloudbase/garm/apiserver/params" "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/config" "github.com/cloudbase/garm/metrics" runnerParams "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner" //nolint:typecheck + garmUtil "github.com/cloudbase/garm/util" wsWriter "github.com/cloudbase/garm/websocket" ) -func NewAPIController(r *runner.Runner, authenticator *auth.Authenticator, hub *wsWriter.Hub) (*APIController, error) { +func NewAPIController(r *runner.Runner, authenticator *auth.Authenticator, hub *wsWriter.Hub, apiCfg config.APIServer) (*APIController, error) { controllerInfo, err := r.GetControllerInfo(auth.GetAdminContext(context.Background())) if err != nil { return nil, errors.Wrap(err, "failed to get controller info") } + var checkOrigin func(r *http.Request) bool + if len(apiCfg.CORSOrigins) > 0 { + checkOrigin = func(r *http.Request) bool { + origin := r.Header["Origin"] + if len(origin) == 0 { + return true + } + u, err := url.Parse(origin[0]) + if err != nil { + return false + } + for _, val := range apiCfg.CORSOrigins { + corsVal, err := url.Parse(val) + if err != nil { + continue + } + if garmUtil.ASCIIEqualFold(u.Host, corsVal.Host) { + return true + } + } + return false + } + } return &APIController{ r: r, auth: authenticator, @@ -49,6 +75,7 @@ func NewAPIController(r *runner.Runner, authenticator *auth.Authenticator, hub * upgrader: websocket.Upgrader{ ReadBufferSize: 1024, WriteBufferSize: 16384, + CheckOrigin: checkOrigin, }, controllerID: controllerInfo.ControllerID.String(), }, nil diff --git a/apiserver/routers/routers.go b/apiserver/routers/routers.go index 2036b5f1..ff241165 100644 --- a/apiserver/routers/routers.go +++ b/apiserver/routers/routers.go @@ -57,6 +57,8 @@ import ( "github.com/cloudbase/garm/apiserver/controllers" "github.com/cloudbase/garm/auth" + "github.com/cloudbase/garm/config" + spaAssets "github.com/cloudbase/garm/webapp/assets" ) func WithMetricsRouter(parentRouter *mux.Router, disableAuth bool, metricsMiddlerware auth.Middleware) *mux.Router { @@ -82,6 +84,30 @@ func WithDebugServer(parentRouter *mux.Router) *mux.Router { return parentRouter } +func WithWebUI(parentRouter *mux.Router, apiConfig config.APIServer) *mux.Router { + if parentRouter == nil { + return nil + } + + if apiConfig.WebUI.EnableWebUI { + slog.Info("WebUI is enabled, adding webapp routes") + webappPath := apiConfig.WebUI.GetWebappPath() + slog.Info("Using webapp path", "path", webappPath) + // Accessing / should redirect to the UI + parentRouter.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + http.Redirect(w, r, webappPath, http.StatusMovedPermanently) // 301 + }) + // Serve the SPA with dynamic path + parentRouter.PathPrefix(webappPath).HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + spaAssets.ServeSPAWithPath(w, r, webappPath) + }).Methods("GET") + } else { + slog.Info("WebUI is disabled, skipping webapp routes") + } + + return parentRouter +} + func requestLogger(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // gathers metrics from the upstream handlers @@ -505,7 +531,7 @@ func NewAPIRouter(han *controllers.APIController, authMiddleware, initMiddleware apiRouter.Handle("/ws/events/", http.HandlerFunc(han.EventsHandler)).Methods("GET") apiRouter.Handle("/ws/events", http.HandlerFunc(han.EventsHandler)).Methods("GET") - // NotFound handler + // NotFound handler - this should be last apiRouter.PathPrefix("/").HandlerFunc(han.NotFoundHandler).Methods("GET", "POST", "PUT", "DELETE", "OPTIONS") return router } diff --git a/auth/init_required.go b/auth/init_required.go index 2d3e1715..3ef31d70 100644 --- a/auth/init_required.go +++ b/auth/init_required.go @@ -38,8 +38,8 @@ type initRequired struct { func (i *initRequired) Middleware(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - ctrlInfo, err := i.store.ControllerInfo() - if err != nil || ctrlInfo.ControllerID.String() == "" { + + if !i.store.HasAdminUser(ctx) { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusConflict) if err := json.NewEncoder(w).Encode(params.InitializationRequired); err != nil { diff --git a/build-webapp.sh b/build-webapp.sh new file mode 100755 index 00000000..01b13c04 --- /dev/null +++ b/build-webapp.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +set -e + +echo "Building GARM SPA (SvelteKit)..." + +# Navigate to webapp directory +cd webapp + +# Install dependencies if node_modules doesn't exist +npm install + +# Build the SPA +echo "Building SPA..." +npm run build +echo "SPA built successfully!" diff --git a/cmd/garm/main.go b/cmd/garm/main.go index f0cca079..226a9e2a 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -283,7 +283,7 @@ func main() { } authenticator := auth.NewAuthenticator(cfg.JWTAuth, db) - controller, err := controllers.NewAPIController(runner, authenticator, hub) + controller, err := controllers.NewAPIController(runner, authenticator, hub, cfg.APIServer) if err != nil { log.Fatalf("failed to create controller: %+v", err) } @@ -315,6 +315,9 @@ func main() { router := routers.NewAPIRouter(controller, jwtMiddleware, initMiddleware, urlsRequiredMiddleware, instanceMiddleware, cfg.Default.EnableWebhookManagement) + // Add WebUI routes + router = routers.WithWebUI(router, cfg.APIServer) + // start the metrics collector if cfg.Metrics.Enable { slog.InfoContext(ctx, "setting up metric routes") diff --git a/config/config.go b/config/config.go index 57ec0e80..cdbec393 100644 --- a/config/config.go +++ b/config/config.go @@ -663,6 +663,21 @@ func (m *Metrics) Duration() time.Duration { return duration } +// WebUI holds configuration for the web UI +type WebUI struct { + EnableWebUI bool `toml:"enable" json:"enable"` +} + +// Validate validates the WebUI config +func (w *WebUI) Validate() error { + return nil +} + +// GetWebappPath returns the webapp path with proper formatting +func (w *WebUI) GetWebappPath() string { + return "/ui/" +} + // APIServer holds configuration for the API server // worker type APIServer struct { @@ -671,6 +686,7 @@ type APIServer struct { UseTLS bool `toml:"use_tls" json:"use-tls"` TLSConfig TLSConfig `toml:"tls" json:"tls"` CORSOrigins []string `toml:"cors_origins" json:"cors-origins"` + WebUI WebUI `toml:"webui" json:"webui"` } // BindAddress returns a host:port string. @@ -696,6 +712,11 @@ func (a *APIServer) Validate() error { // when we try to bind to it. return fmt.Errorf("invalid IP address") } + + if err := a.WebUI.Validate(); err != nil { + return fmt.Errorf("invalid webui config: %w", err) + } + return nil } diff --git a/database/common/mocks/Store.go b/database/common/mocks/Store.go index ec107854..26a80b0a 100644 --- a/database/common/mocks/Store.go +++ b/database/common/mocks/Store.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -14,6 +14,14 @@ type Store struct { mock.Mock } +type Store_Expecter struct { + mock *mock.Mock +} + +func (_m *Store) EXPECT() *Store_Expecter { + return &Store_Expecter{mock: &_m.Mock} +} + // AddEntityEvent provides a mock function with given fields: ctx, entity, event, eventLevel, statusMessage, maxEvents func (_m *Store) AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { ret := _m.Called(ctx, entity, event, eventLevel, statusMessage, maxEvents) @@ -32,6 +40,39 @@ func (_m *Store) AddEntityEvent(ctx context.Context, entity params.ForgeEntity, return r0 } +// Store_AddEntityEvent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddEntityEvent' +type Store_AddEntityEvent_Call struct { + *mock.Call +} + +// AddEntityEvent is a helper method to define mock.On call +// - ctx context.Context +// - entity params.ForgeEntity +// - event params.EventType +// - eventLevel params.EventLevel +// - statusMessage string +// - maxEvents int +func (_e *Store_Expecter) AddEntityEvent(ctx interface{}, entity interface{}, event interface{}, eventLevel interface{}, statusMessage interface{}, maxEvents interface{}) *Store_AddEntityEvent_Call { + return &Store_AddEntityEvent_Call{Call: _e.mock.On("AddEntityEvent", ctx, entity, event, eventLevel, statusMessage, maxEvents)} +} + +func (_c *Store_AddEntityEvent_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int)) *Store_AddEntityEvent_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(params.EventType), args[3].(params.EventLevel), args[4].(string), args[5].(int)) + }) + return _c +} + +func (_c *Store_AddEntityEvent_Call) Return(_a0 error) *Store_AddEntityEvent_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_AddEntityEvent_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, params.EventType, params.EventLevel, string, int) error) *Store_AddEntityEvent_Call { + _c.Call.Return(run) + return _c +} + // AddInstanceEvent provides a mock function with given fields: ctx, instanceName, event, eventLevel, eventMessage func (_m *Store) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error { ret := _m.Called(ctx, instanceName, event, eventLevel, eventMessage) @@ -50,6 +91,38 @@ func (_m *Store) AddInstanceEvent(ctx context.Context, instanceName string, even return r0 } +// Store_AddInstanceEvent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddInstanceEvent' +type Store_AddInstanceEvent_Call struct { + *mock.Call +} + +// AddInstanceEvent is a helper method to define mock.On call +// - ctx context.Context +// - instanceName string +// - event params.EventType +// - eventLevel params.EventLevel +// - eventMessage string +func (_e *Store_Expecter) AddInstanceEvent(ctx interface{}, instanceName interface{}, event interface{}, eventLevel interface{}, eventMessage interface{}) *Store_AddInstanceEvent_Call { + return &Store_AddInstanceEvent_Call{Call: _e.mock.On("AddInstanceEvent", ctx, instanceName, event, eventLevel, eventMessage)} +} + +func (_c *Store_AddInstanceEvent_Call) Run(run func(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, eventMessage string)) *Store_AddInstanceEvent_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.EventType), args[3].(params.EventLevel), args[4].(string)) + }) + return _c +} + +func (_c *Store_AddInstanceEvent_Call) Return(_a0 error) *Store_AddInstanceEvent_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_AddInstanceEvent_Call) RunAndReturn(run func(context.Context, string, params.EventType, params.EventLevel, string) error) *Store_AddInstanceEvent_Call { + _c.Call.Return(run) + return _c +} + // BreakLockJobIsQueued provides a mock function with given fields: ctx, jobID func (_m *Store) BreakLockJobIsQueued(ctx context.Context, jobID int64) error { ret := _m.Called(ctx, jobID) @@ -68,6 +141,35 @@ func (_m *Store) BreakLockJobIsQueued(ctx context.Context, jobID int64) error { return r0 } +// Store_BreakLockJobIsQueued_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BreakLockJobIsQueued' +type Store_BreakLockJobIsQueued_Call struct { + *mock.Call +} + +// BreakLockJobIsQueued is a helper method to define mock.On call +// - ctx context.Context +// - jobID int64 +func (_e *Store_Expecter) BreakLockJobIsQueued(ctx interface{}, jobID interface{}) *Store_BreakLockJobIsQueued_Call { + return &Store_BreakLockJobIsQueued_Call{Call: _e.mock.On("BreakLockJobIsQueued", ctx, jobID)} +} + +func (_c *Store_BreakLockJobIsQueued_Call) Run(run func(ctx context.Context, jobID int64)) *Store_BreakLockJobIsQueued_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *Store_BreakLockJobIsQueued_Call) Return(_a0 error) *Store_BreakLockJobIsQueued_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_BreakLockJobIsQueued_Call) RunAndReturn(run func(context.Context, int64) error) *Store_BreakLockJobIsQueued_Call { + _c.Call.Return(run) + return _c +} + // ControllerInfo provides a mock function with no fields func (_m *Store) ControllerInfo() (params.ControllerInfo, error) { ret := _m.Called() @@ -96,6 +198,33 @@ func (_m *Store) ControllerInfo() (params.ControllerInfo, error) { return r0, r1 } +// Store_ControllerInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ControllerInfo' +type Store_ControllerInfo_Call struct { + *mock.Call +} + +// ControllerInfo is a helper method to define mock.On call +func (_e *Store_Expecter) ControllerInfo() *Store_ControllerInfo_Call { + return &Store_ControllerInfo_Call{Call: _e.mock.On("ControllerInfo")} +} + +func (_c *Store_ControllerInfo_Call) Run(run func()) *Store_ControllerInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Store_ControllerInfo_Call) Return(_a0 params.ControllerInfo, _a1 error) *Store_ControllerInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ControllerInfo_Call) RunAndReturn(run func() (params.ControllerInfo, error)) *Store_ControllerInfo_Call { + _c.Call.Return(run) + return _c +} + // CreateEnterprise provides a mock function with given fields: ctx, name, credentialsName, webhookSecret, poolBalancerType func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Enterprise, error) { ret := _m.Called(ctx, name, credentialsName, webhookSecret, poolBalancerType) @@ -124,6 +253,38 @@ func (_m *Store) CreateEnterprise(ctx context.Context, name string, credentialsN return r0, r1 } +// Store_CreateEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEnterprise' +type Store_CreateEnterprise_Call struct { + *mock.Call +} + +// CreateEnterprise is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - credentialsName params.ForgeCredentials +// - webhookSecret string +// - poolBalancerType params.PoolBalancerType +func (_e *Store_Expecter) CreateEnterprise(ctx interface{}, name interface{}, credentialsName interface{}, webhookSecret interface{}, poolBalancerType interface{}) *Store_CreateEnterprise_Call { + return &Store_CreateEnterprise_Call{Call: _e.mock.On("CreateEnterprise", ctx, name, credentialsName, webhookSecret, poolBalancerType)} +} + +func (_c *Store_CreateEnterprise_Call) Run(run func(ctx context.Context, name string, credentialsName params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType)) *Store_CreateEnterprise_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.ForgeCredentials), args[3].(string), args[4].(params.PoolBalancerType)) + }) + return _c +} + +func (_c *Store_CreateEnterprise_Call) Return(_a0 params.Enterprise, _a1 error) *Store_CreateEnterprise_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_CreateEnterprise_Call) RunAndReturn(run func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Enterprise, error)) *Store_CreateEnterprise_Call { + _c.Call.Return(run) + return _c +} + // CreateEntityPool provides a mock function with given fields: ctx, entity, param func (_m *Store) CreateEntityPool(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams) (params.Pool, error) { ret := _m.Called(ctx, entity, param) @@ -152,6 +313,36 @@ func (_m *Store) CreateEntityPool(ctx context.Context, entity params.ForgeEntity return r0, r1 } +// Store_CreateEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityPool' +type Store_CreateEntityPool_Call struct { + *mock.Call +} + +// CreateEntityPool is a helper method to define mock.On call +// - ctx context.Context +// - entity params.ForgeEntity +// - param params.CreatePoolParams +func (_e *Store_Expecter) CreateEntityPool(ctx interface{}, entity interface{}, param interface{}) *Store_CreateEntityPool_Call { + return &Store_CreateEntityPool_Call{Call: _e.mock.On("CreateEntityPool", ctx, entity, param)} +} + +func (_c *Store_CreateEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, param params.CreatePoolParams)) *Store_CreateEntityPool_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(params.CreatePoolParams)) + }) + return _c +} + +func (_c *Store_CreateEntityPool_Call) Return(_a0 params.Pool, _a1 error) *Store_CreateEntityPool_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_CreateEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, params.CreatePoolParams) (params.Pool, error)) *Store_CreateEntityPool_Call { + _c.Call.Return(run) + return _c +} + // CreateEntityScaleSet provides a mock function with given fields: _a0, entity, param func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams) (params.ScaleSet, error) { ret := _m.Called(_a0, entity, param) @@ -180,6 +371,36 @@ func (_m *Store) CreateEntityScaleSet(_a0 context.Context, entity params.ForgeEn return r0, r1 } +// Store_CreateEntityScaleSet_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityScaleSet' +type Store_CreateEntityScaleSet_Call struct { + *mock.Call +} + +// CreateEntityScaleSet is a helper method to define mock.On call +// - _a0 context.Context +// - entity params.ForgeEntity +// - param params.CreateScaleSetParams +func (_e *Store_Expecter) CreateEntityScaleSet(_a0 interface{}, entity interface{}, param interface{}) *Store_CreateEntityScaleSet_Call { + return &Store_CreateEntityScaleSet_Call{Call: _e.mock.On("CreateEntityScaleSet", _a0, entity, param)} +} + +func (_c *Store_CreateEntityScaleSet_Call) Run(run func(_a0 context.Context, entity params.ForgeEntity, param params.CreateScaleSetParams)) *Store_CreateEntityScaleSet_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(params.CreateScaleSetParams)) + }) + return _c +} + +func (_c *Store_CreateEntityScaleSet_Call) Return(scaleSet params.ScaleSet, err error) *Store_CreateEntityScaleSet_Call { + _c.Call.Return(scaleSet, err) + return _c +} + +func (_c *Store_CreateEntityScaleSet_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, params.CreateScaleSetParams) (params.ScaleSet, error)) *Store_CreateEntityScaleSet_Call { + _c.Call.Return(run) + return _c +} + // CreateGiteaCredentials provides a mock function with given fields: ctx, param func (_m *Store) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error) { ret := _m.Called(ctx, param) @@ -208,6 +429,35 @@ func (_m *Store) CreateGiteaCredentials(ctx context.Context, param params.Create return r0, r1 } +// Store_CreateGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGiteaCredentials' +type Store_CreateGiteaCredentials_Call struct { + *mock.Call +} + +// CreateGiteaCredentials is a helper method to define mock.On call +// - ctx context.Context +// - param params.CreateGiteaCredentialsParams +func (_e *Store_Expecter) CreateGiteaCredentials(ctx interface{}, param interface{}) *Store_CreateGiteaCredentials_Call { + return &Store_CreateGiteaCredentials_Call{Call: _e.mock.On("CreateGiteaCredentials", ctx, param)} +} + +func (_c *Store_CreateGiteaCredentials_Call) Run(run func(ctx context.Context, param params.CreateGiteaCredentialsParams)) *Store_CreateGiteaCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.CreateGiteaCredentialsParams)) + }) + return _c +} + +func (_c *Store_CreateGiteaCredentials_Call) Return(gtCreds params.ForgeCredentials, err error) *Store_CreateGiteaCredentials_Call { + _c.Call.Return(gtCreds, err) + return _c +} + +func (_c *Store_CreateGiteaCredentials_Call) RunAndReturn(run func(context.Context, params.CreateGiteaCredentialsParams) (params.ForgeCredentials, error)) *Store_CreateGiteaCredentials_Call { + _c.Call.Return(run) + return _c +} + // CreateGiteaEndpoint provides a mock function with given fields: _a0, param func (_m *Store) CreateGiteaEndpoint(_a0 context.Context, param params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error) { ret := _m.Called(_a0, param) @@ -236,6 +486,35 @@ func (_m *Store) CreateGiteaEndpoint(_a0 context.Context, param params.CreateGit return r0, r1 } +// Store_CreateGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGiteaEndpoint' +type Store_CreateGiteaEndpoint_Call struct { + *mock.Call +} + +// CreateGiteaEndpoint is a helper method to define mock.On call +// - _a0 context.Context +// - param params.CreateGiteaEndpointParams +func (_e *Store_Expecter) CreateGiteaEndpoint(_a0 interface{}, param interface{}) *Store_CreateGiteaEndpoint_Call { + return &Store_CreateGiteaEndpoint_Call{Call: _e.mock.On("CreateGiteaEndpoint", _a0, param)} +} + +func (_c *Store_CreateGiteaEndpoint_Call) Run(run func(_a0 context.Context, param params.CreateGiteaEndpointParams)) *Store_CreateGiteaEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.CreateGiteaEndpointParams)) + }) + return _c +} + +func (_c *Store_CreateGiteaEndpoint_Call) Return(ghEndpoint params.ForgeEndpoint, err error) *Store_CreateGiteaEndpoint_Call { + _c.Call.Return(ghEndpoint, err) + return _c +} + +func (_c *Store_CreateGiteaEndpoint_Call) RunAndReturn(run func(context.Context, params.CreateGiteaEndpointParams) (params.ForgeEndpoint, error)) *Store_CreateGiteaEndpoint_Call { + _c.Call.Return(run) + return _c +} + // CreateGithubCredentials provides a mock function with given fields: ctx, param func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (params.ForgeCredentials, error) { ret := _m.Called(ctx, param) @@ -264,6 +543,35 @@ func (_m *Store) CreateGithubCredentials(ctx context.Context, param params.Creat return r0, r1 } +// Store_CreateGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGithubCredentials' +type Store_CreateGithubCredentials_Call struct { + *mock.Call +} + +// CreateGithubCredentials is a helper method to define mock.On call +// - ctx context.Context +// - param params.CreateGithubCredentialsParams +func (_e *Store_Expecter) CreateGithubCredentials(ctx interface{}, param interface{}) *Store_CreateGithubCredentials_Call { + return &Store_CreateGithubCredentials_Call{Call: _e.mock.On("CreateGithubCredentials", ctx, param)} +} + +func (_c *Store_CreateGithubCredentials_Call) Run(run func(ctx context.Context, param params.CreateGithubCredentialsParams)) *Store_CreateGithubCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.CreateGithubCredentialsParams)) + }) + return _c +} + +func (_c *Store_CreateGithubCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_CreateGithubCredentials_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_CreateGithubCredentials_Call) RunAndReturn(run func(context.Context, params.CreateGithubCredentialsParams) (params.ForgeCredentials, error)) *Store_CreateGithubCredentials_Call { + _c.Call.Return(run) + return _c +} + // CreateGithubEndpoint provides a mock function with given fields: ctx, param func (_m *Store) CreateGithubEndpoint(ctx context.Context, param params.CreateGithubEndpointParams) (params.ForgeEndpoint, error) { ret := _m.Called(ctx, param) @@ -292,6 +600,35 @@ func (_m *Store) CreateGithubEndpoint(ctx context.Context, param params.CreateGi return r0, r1 } +// Store_CreateGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateGithubEndpoint' +type Store_CreateGithubEndpoint_Call struct { + *mock.Call +} + +// CreateGithubEndpoint is a helper method to define mock.On call +// - ctx context.Context +// - param params.CreateGithubEndpointParams +func (_e *Store_Expecter) CreateGithubEndpoint(ctx interface{}, param interface{}) *Store_CreateGithubEndpoint_Call { + return &Store_CreateGithubEndpoint_Call{Call: _e.mock.On("CreateGithubEndpoint", ctx, param)} +} + +func (_c *Store_CreateGithubEndpoint_Call) Run(run func(ctx context.Context, param params.CreateGithubEndpointParams)) *Store_CreateGithubEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.CreateGithubEndpointParams)) + }) + return _c +} + +func (_c *Store_CreateGithubEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_CreateGithubEndpoint_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_CreateGithubEndpoint_Call) RunAndReturn(run func(context.Context, params.CreateGithubEndpointParams) (params.ForgeEndpoint, error)) *Store_CreateGithubEndpoint_Call { + _c.Call.Return(run) + return _c +} + // CreateInstance provides a mock function with given fields: ctx, poolID, param func (_m *Store) CreateInstance(ctx context.Context, poolID string, param params.CreateInstanceParams) (params.Instance, error) { ret := _m.Called(ctx, poolID, param) @@ -320,6 +657,36 @@ func (_m *Store) CreateInstance(ctx context.Context, poolID string, param params return r0, r1 } +// Store_CreateInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateInstance' +type Store_CreateInstance_Call struct { + *mock.Call +} + +// CreateInstance is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +// - param params.CreateInstanceParams +func (_e *Store_Expecter) CreateInstance(ctx interface{}, poolID interface{}, param interface{}) *Store_CreateInstance_Call { + return &Store_CreateInstance_Call{Call: _e.mock.On("CreateInstance", ctx, poolID, param)} +} + +func (_c *Store_CreateInstance_Call) Run(run func(ctx context.Context, poolID string, param params.CreateInstanceParams)) *Store_CreateInstance_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.CreateInstanceParams)) + }) + return _c +} + +func (_c *Store_CreateInstance_Call) Return(_a0 params.Instance, _a1 error) *Store_CreateInstance_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_CreateInstance_Call) RunAndReturn(run func(context.Context, string, params.CreateInstanceParams) (params.Instance, error)) *Store_CreateInstance_Call { + _c.Call.Return(run) + return _c +} + // CreateOrUpdateJob provides a mock function with given fields: ctx, job func (_m *Store) CreateOrUpdateJob(ctx context.Context, job params.Job) (params.Job, error) { ret := _m.Called(ctx, job) @@ -348,6 +715,35 @@ func (_m *Store) CreateOrUpdateJob(ctx context.Context, job params.Job) (params. return r0, r1 } +// Store_CreateOrUpdateJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateOrUpdateJob' +type Store_CreateOrUpdateJob_Call struct { + *mock.Call +} + +// CreateOrUpdateJob is a helper method to define mock.On call +// - ctx context.Context +// - job params.Job +func (_e *Store_Expecter) CreateOrUpdateJob(ctx interface{}, job interface{}) *Store_CreateOrUpdateJob_Call { + return &Store_CreateOrUpdateJob_Call{Call: _e.mock.On("CreateOrUpdateJob", ctx, job)} +} + +func (_c *Store_CreateOrUpdateJob_Call) Run(run func(ctx context.Context, job params.Job)) *Store_CreateOrUpdateJob_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.Job)) + }) + return _c +} + +func (_c *Store_CreateOrUpdateJob_Call) Return(_a0 params.Job, _a1 error) *Store_CreateOrUpdateJob_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_CreateOrUpdateJob_Call) RunAndReturn(run func(context.Context, params.Job) (params.Job, error)) *Store_CreateOrUpdateJob_Call { + _c.Call.Return(run) + return _c +} + // CreateOrganization provides a mock function with given fields: ctx, name, credentials, webhookSecret, poolBalancerType func (_m *Store) CreateOrganization(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Organization, error) { ret := _m.Called(ctx, name, credentials, webhookSecret, poolBalancerType) @@ -376,6 +772,38 @@ func (_m *Store) CreateOrganization(ctx context.Context, name string, credential return r0, r1 } +// Store_CreateOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateOrganization' +type Store_CreateOrganization_Call struct { + *mock.Call +} + +// CreateOrganization is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - credentials params.ForgeCredentials +// - webhookSecret string +// - poolBalancerType params.PoolBalancerType +func (_e *Store_Expecter) CreateOrganization(ctx interface{}, name interface{}, credentials interface{}, webhookSecret interface{}, poolBalancerType interface{}) *Store_CreateOrganization_Call { + return &Store_CreateOrganization_Call{Call: _e.mock.On("CreateOrganization", ctx, name, credentials, webhookSecret, poolBalancerType)} +} + +func (_c *Store_CreateOrganization_Call) Run(run func(ctx context.Context, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType)) *Store_CreateOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.ForgeCredentials), args[3].(string), args[4].(params.PoolBalancerType)) + }) + return _c +} + +func (_c *Store_CreateOrganization_Call) Return(org params.Organization, err error) *Store_CreateOrganization_Call { + _c.Call.Return(org, err) + return _c +} + +func (_c *Store_CreateOrganization_Call) RunAndReturn(run func(context.Context, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Organization, error)) *Store_CreateOrganization_Call { + _c.Call.Return(run) + return _c +} + // CreateRepository provides a mock function with given fields: ctx, owner, name, credentials, webhookSecret, poolBalancerType func (_m *Store) CreateRepository(ctx context.Context, owner string, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType) (params.Repository, error) { ret := _m.Called(ctx, owner, name, credentials, webhookSecret, poolBalancerType) @@ -404,6 +832,39 @@ func (_m *Store) CreateRepository(ctx context.Context, owner string, name string return r0, r1 } +// Store_CreateRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateRepository' +type Store_CreateRepository_Call struct { + *mock.Call +} + +// CreateRepository is a helper method to define mock.On call +// - ctx context.Context +// - owner string +// - name string +// - credentials params.ForgeCredentials +// - webhookSecret string +// - poolBalancerType params.PoolBalancerType +func (_e *Store_Expecter) CreateRepository(ctx interface{}, owner interface{}, name interface{}, credentials interface{}, webhookSecret interface{}, poolBalancerType interface{}) *Store_CreateRepository_Call { + return &Store_CreateRepository_Call{Call: _e.mock.On("CreateRepository", ctx, owner, name, credentials, webhookSecret, poolBalancerType)} +} + +func (_c *Store_CreateRepository_Call) Run(run func(ctx context.Context, owner string, name string, credentials params.ForgeCredentials, webhookSecret string, poolBalancerType params.PoolBalancerType)) *Store_CreateRepository_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(params.ForgeCredentials), args[4].(string), args[5].(params.PoolBalancerType)) + }) + return _c +} + +func (_c *Store_CreateRepository_Call) Return(param params.Repository, err error) *Store_CreateRepository_Call { + _c.Call.Return(param, err) + return _c +} + +func (_c *Store_CreateRepository_Call) RunAndReturn(run func(context.Context, string, string, params.ForgeCredentials, string, params.PoolBalancerType) (params.Repository, error)) *Store_CreateRepository_Call { + _c.Call.Return(run) + return _c +} + // CreateScaleSetInstance provides a mock function with given fields: _a0, scaleSetID, param func (_m *Store) CreateScaleSetInstance(_a0 context.Context, scaleSetID uint, param params.CreateInstanceParams) (params.Instance, error) { ret := _m.Called(_a0, scaleSetID, param) @@ -432,6 +893,36 @@ func (_m *Store) CreateScaleSetInstance(_a0 context.Context, scaleSetID uint, pa return r0, r1 } +// Store_CreateScaleSetInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateScaleSetInstance' +type Store_CreateScaleSetInstance_Call struct { + *mock.Call +} + +// CreateScaleSetInstance is a helper method to define mock.On call +// - _a0 context.Context +// - scaleSetID uint +// - param params.CreateInstanceParams +func (_e *Store_Expecter) CreateScaleSetInstance(_a0 interface{}, scaleSetID interface{}, param interface{}) *Store_CreateScaleSetInstance_Call { + return &Store_CreateScaleSetInstance_Call{Call: _e.mock.On("CreateScaleSetInstance", _a0, scaleSetID, param)} +} + +func (_c *Store_CreateScaleSetInstance_Call) Run(run func(_a0 context.Context, scaleSetID uint, param params.CreateInstanceParams)) *Store_CreateScaleSetInstance_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint), args[2].(params.CreateInstanceParams)) + }) + return _c +} + +func (_c *Store_CreateScaleSetInstance_Call) Return(instance params.Instance, err error) *Store_CreateScaleSetInstance_Call { + _c.Call.Return(instance, err) + return _c +} + +func (_c *Store_CreateScaleSetInstance_Call) RunAndReturn(run func(context.Context, uint, params.CreateInstanceParams) (params.Instance, error)) *Store_CreateScaleSetInstance_Call { + _c.Call.Return(run) + return _c +} + // CreateUser provides a mock function with given fields: ctx, user func (_m *Store) CreateUser(ctx context.Context, user params.NewUserParams) (params.User, error) { ret := _m.Called(ctx, user) @@ -460,6 +951,35 @@ func (_m *Store) CreateUser(ctx context.Context, user params.NewUserParams) (par return r0, r1 } +// Store_CreateUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateUser' +type Store_CreateUser_Call struct { + *mock.Call +} + +// CreateUser is a helper method to define mock.On call +// - ctx context.Context +// - user params.NewUserParams +func (_e *Store_Expecter) CreateUser(ctx interface{}, user interface{}) *Store_CreateUser_Call { + return &Store_CreateUser_Call{Call: _e.mock.On("CreateUser", ctx, user)} +} + +func (_c *Store_CreateUser_Call) Run(run func(ctx context.Context, user params.NewUserParams)) *Store_CreateUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.NewUserParams)) + }) + return _c +} + +func (_c *Store_CreateUser_Call) Return(_a0 params.User, _a1 error) *Store_CreateUser_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_CreateUser_Call) RunAndReturn(run func(context.Context, params.NewUserParams) (params.User, error)) *Store_CreateUser_Call { + _c.Call.Return(run) + return _c +} + // DeleteCompletedJobs provides a mock function with given fields: ctx func (_m *Store) DeleteCompletedJobs(ctx context.Context) error { ret := _m.Called(ctx) @@ -478,6 +998,34 @@ func (_m *Store) DeleteCompletedJobs(ctx context.Context) error { return r0 } +// Store_DeleteCompletedJobs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteCompletedJobs' +type Store_DeleteCompletedJobs_Call struct { + *mock.Call +} + +// DeleteCompletedJobs is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) DeleteCompletedJobs(ctx interface{}) *Store_DeleteCompletedJobs_Call { + return &Store_DeleteCompletedJobs_Call{Call: _e.mock.On("DeleteCompletedJobs", ctx)} +} + +func (_c *Store_DeleteCompletedJobs_Call) Run(run func(ctx context.Context)) *Store_DeleteCompletedJobs_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_DeleteCompletedJobs_Call) Return(_a0 error) *Store_DeleteCompletedJobs_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteCompletedJobs_Call) RunAndReturn(run func(context.Context) error) *Store_DeleteCompletedJobs_Call { + _c.Call.Return(run) + return _c +} + // DeleteEnterprise provides a mock function with given fields: ctx, enterpriseID func (_m *Store) DeleteEnterprise(ctx context.Context, enterpriseID string) error { ret := _m.Called(ctx, enterpriseID) @@ -496,6 +1044,35 @@ func (_m *Store) DeleteEnterprise(ctx context.Context, enterpriseID string) erro return r0 } +// Store_DeleteEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEnterprise' +type Store_DeleteEnterprise_Call struct { + *mock.Call +} + +// DeleteEnterprise is a helper method to define mock.On call +// - ctx context.Context +// - enterpriseID string +func (_e *Store_Expecter) DeleteEnterprise(ctx interface{}, enterpriseID interface{}) *Store_DeleteEnterprise_Call { + return &Store_DeleteEnterprise_Call{Call: _e.mock.On("DeleteEnterprise", ctx, enterpriseID)} +} + +func (_c *Store_DeleteEnterprise_Call) Run(run func(ctx context.Context, enterpriseID string)) *Store_DeleteEnterprise_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_DeleteEnterprise_Call) Return(_a0 error) *Store_DeleteEnterprise_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteEnterprise_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteEnterprise_Call { + _c.Call.Return(run) + return _c +} + // DeleteEntityPool provides a mock function with given fields: ctx, entity, poolID func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) error { ret := _m.Called(ctx, entity, poolID) @@ -514,6 +1091,36 @@ func (_m *Store) DeleteEntityPool(ctx context.Context, entity params.ForgeEntity return r0 } +// Store_DeleteEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEntityPool' +type Store_DeleteEntityPool_Call struct { + *mock.Call +} + +// DeleteEntityPool is a helper method to define mock.On call +// - ctx context.Context +// - entity params.ForgeEntity +// - poolID string +func (_e *Store_Expecter) DeleteEntityPool(ctx interface{}, entity interface{}, poolID interface{}) *Store_DeleteEntityPool_Call { + return &Store_DeleteEntityPool_Call{Call: _e.mock.On("DeleteEntityPool", ctx, entity, poolID)} +} + +func (_c *Store_DeleteEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, poolID string)) *Store_DeleteEntityPool_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(string)) + }) + return _c +} + +func (_c *Store_DeleteEntityPool_Call) Return(_a0 error) *Store_DeleteEntityPool_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, string) error) *Store_DeleteEntityPool_Call { + _c.Call.Return(run) + return _c +} + // DeleteGiteaCredentials provides a mock function with given fields: ctx, id func (_m *Store) DeleteGiteaCredentials(ctx context.Context, id uint) error { ret := _m.Called(ctx, id) @@ -532,6 +1139,35 @@ func (_m *Store) DeleteGiteaCredentials(ctx context.Context, id uint) error { return r0 } +// Store_DeleteGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGiteaCredentials' +type Store_DeleteGiteaCredentials_Call struct { + *mock.Call +} + +// DeleteGiteaCredentials is a helper method to define mock.On call +// - ctx context.Context +// - id uint +func (_e *Store_Expecter) DeleteGiteaCredentials(ctx interface{}, id interface{}) *Store_DeleteGiteaCredentials_Call { + return &Store_DeleteGiteaCredentials_Call{Call: _e.mock.On("DeleteGiteaCredentials", ctx, id)} +} + +func (_c *Store_DeleteGiteaCredentials_Call) Run(run func(ctx context.Context, id uint)) *Store_DeleteGiteaCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint)) + }) + return _c +} + +func (_c *Store_DeleteGiteaCredentials_Call) Return(err error) *Store_DeleteGiteaCredentials_Call { + _c.Call.Return(err) + return _c +} + +func (_c *Store_DeleteGiteaCredentials_Call) RunAndReturn(run func(context.Context, uint) error) *Store_DeleteGiteaCredentials_Call { + _c.Call.Return(run) + return _c +} + // DeleteGiteaEndpoint provides a mock function with given fields: _a0, name func (_m *Store) DeleteGiteaEndpoint(_a0 context.Context, name string) error { ret := _m.Called(_a0, name) @@ -550,6 +1186,35 @@ func (_m *Store) DeleteGiteaEndpoint(_a0 context.Context, name string) error { return r0 } +// Store_DeleteGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGiteaEndpoint' +type Store_DeleteGiteaEndpoint_Call struct { + *mock.Call +} + +// DeleteGiteaEndpoint is a helper method to define mock.On call +// - _a0 context.Context +// - name string +func (_e *Store_Expecter) DeleteGiteaEndpoint(_a0 interface{}, name interface{}) *Store_DeleteGiteaEndpoint_Call { + return &Store_DeleteGiteaEndpoint_Call{Call: _e.mock.On("DeleteGiteaEndpoint", _a0, name)} +} + +func (_c *Store_DeleteGiteaEndpoint_Call) Run(run func(_a0 context.Context, name string)) *Store_DeleteGiteaEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_DeleteGiteaEndpoint_Call) Return(err error) *Store_DeleteGiteaEndpoint_Call { + _c.Call.Return(err) + return _c +} + +func (_c *Store_DeleteGiteaEndpoint_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteGiteaEndpoint_Call { + _c.Call.Return(run) + return _c +} + // DeleteGithubCredentials provides a mock function with given fields: ctx, id func (_m *Store) DeleteGithubCredentials(ctx context.Context, id uint) error { ret := _m.Called(ctx, id) @@ -568,6 +1233,35 @@ func (_m *Store) DeleteGithubCredentials(ctx context.Context, id uint) error { return r0 } +// Store_DeleteGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGithubCredentials' +type Store_DeleteGithubCredentials_Call struct { + *mock.Call +} + +// DeleteGithubCredentials is a helper method to define mock.On call +// - ctx context.Context +// - id uint +func (_e *Store_Expecter) DeleteGithubCredentials(ctx interface{}, id interface{}) *Store_DeleteGithubCredentials_Call { + return &Store_DeleteGithubCredentials_Call{Call: _e.mock.On("DeleteGithubCredentials", ctx, id)} +} + +func (_c *Store_DeleteGithubCredentials_Call) Run(run func(ctx context.Context, id uint)) *Store_DeleteGithubCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint)) + }) + return _c +} + +func (_c *Store_DeleteGithubCredentials_Call) Return(_a0 error) *Store_DeleteGithubCredentials_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteGithubCredentials_Call) RunAndReturn(run func(context.Context, uint) error) *Store_DeleteGithubCredentials_Call { + _c.Call.Return(run) + return _c +} + // DeleteGithubEndpoint provides a mock function with given fields: ctx, name func (_m *Store) DeleteGithubEndpoint(ctx context.Context, name string) error { ret := _m.Called(ctx, name) @@ -586,6 +1280,35 @@ func (_m *Store) DeleteGithubEndpoint(ctx context.Context, name string) error { return r0 } +// Store_DeleteGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteGithubEndpoint' +type Store_DeleteGithubEndpoint_Call struct { + *mock.Call +} + +// DeleteGithubEndpoint is a helper method to define mock.On call +// - ctx context.Context +// - name string +func (_e *Store_Expecter) DeleteGithubEndpoint(ctx interface{}, name interface{}) *Store_DeleteGithubEndpoint_Call { + return &Store_DeleteGithubEndpoint_Call{Call: _e.mock.On("DeleteGithubEndpoint", ctx, name)} +} + +func (_c *Store_DeleteGithubEndpoint_Call) Run(run func(ctx context.Context, name string)) *Store_DeleteGithubEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_DeleteGithubEndpoint_Call) Return(_a0 error) *Store_DeleteGithubEndpoint_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteGithubEndpoint_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteGithubEndpoint_Call { + _c.Call.Return(run) + return _c +} + // DeleteInstance provides a mock function with given fields: ctx, poolID, instanceName func (_m *Store) DeleteInstance(ctx context.Context, poolID string, instanceName string) error { ret := _m.Called(ctx, poolID, instanceName) @@ -604,6 +1327,36 @@ func (_m *Store) DeleteInstance(ctx context.Context, poolID string, instanceName return r0 } +// Store_DeleteInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteInstance' +type Store_DeleteInstance_Call struct { + *mock.Call +} + +// DeleteInstance is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +// - instanceName string +func (_e *Store_Expecter) DeleteInstance(ctx interface{}, poolID interface{}, instanceName interface{}) *Store_DeleteInstance_Call { + return &Store_DeleteInstance_Call{Call: _e.mock.On("DeleteInstance", ctx, poolID, instanceName)} +} + +func (_c *Store_DeleteInstance_Call) Run(run func(ctx context.Context, poolID string, instanceName string)) *Store_DeleteInstance_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *Store_DeleteInstance_Call) Return(_a0 error) *Store_DeleteInstance_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteInstance_Call) RunAndReturn(run func(context.Context, string, string) error) *Store_DeleteInstance_Call { + _c.Call.Return(run) + return _c +} + // DeleteInstanceByName provides a mock function with given fields: ctx, instanceName func (_m *Store) DeleteInstanceByName(ctx context.Context, instanceName string) error { ret := _m.Called(ctx, instanceName) @@ -622,6 +1375,35 @@ func (_m *Store) DeleteInstanceByName(ctx context.Context, instanceName string) return r0 } +// Store_DeleteInstanceByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteInstanceByName' +type Store_DeleteInstanceByName_Call struct { + *mock.Call +} + +// DeleteInstanceByName is a helper method to define mock.On call +// - ctx context.Context +// - instanceName string +func (_e *Store_Expecter) DeleteInstanceByName(ctx interface{}, instanceName interface{}) *Store_DeleteInstanceByName_Call { + return &Store_DeleteInstanceByName_Call{Call: _e.mock.On("DeleteInstanceByName", ctx, instanceName)} +} + +func (_c *Store_DeleteInstanceByName_Call) Run(run func(ctx context.Context, instanceName string)) *Store_DeleteInstanceByName_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_DeleteInstanceByName_Call) Return(_a0 error) *Store_DeleteInstanceByName_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteInstanceByName_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteInstanceByName_Call { + _c.Call.Return(run) + return _c +} + // DeleteJob provides a mock function with given fields: ctx, jobID func (_m *Store) DeleteJob(ctx context.Context, jobID int64) error { ret := _m.Called(ctx, jobID) @@ -640,6 +1422,35 @@ func (_m *Store) DeleteJob(ctx context.Context, jobID int64) error { return r0 } +// Store_DeleteJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteJob' +type Store_DeleteJob_Call struct { + *mock.Call +} + +// DeleteJob is a helper method to define mock.On call +// - ctx context.Context +// - jobID int64 +func (_e *Store_Expecter) DeleteJob(ctx interface{}, jobID interface{}) *Store_DeleteJob_Call { + return &Store_DeleteJob_Call{Call: _e.mock.On("DeleteJob", ctx, jobID)} +} + +func (_c *Store_DeleteJob_Call) Run(run func(ctx context.Context, jobID int64)) *Store_DeleteJob_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *Store_DeleteJob_Call) Return(_a0 error) *Store_DeleteJob_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteJob_Call) RunAndReturn(run func(context.Context, int64) error) *Store_DeleteJob_Call { + _c.Call.Return(run) + return _c +} + // DeleteOrganization provides a mock function with given fields: ctx, orgID func (_m *Store) DeleteOrganization(ctx context.Context, orgID string) error { ret := _m.Called(ctx, orgID) @@ -658,6 +1469,35 @@ func (_m *Store) DeleteOrganization(ctx context.Context, orgID string) error { return r0 } +// Store_DeleteOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteOrganization' +type Store_DeleteOrganization_Call struct { + *mock.Call +} + +// DeleteOrganization is a helper method to define mock.On call +// - ctx context.Context +// - orgID string +func (_e *Store_Expecter) DeleteOrganization(ctx interface{}, orgID interface{}) *Store_DeleteOrganization_Call { + return &Store_DeleteOrganization_Call{Call: _e.mock.On("DeleteOrganization", ctx, orgID)} +} + +func (_c *Store_DeleteOrganization_Call) Run(run func(ctx context.Context, orgID string)) *Store_DeleteOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_DeleteOrganization_Call) Return(_a0 error) *Store_DeleteOrganization_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteOrganization_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteOrganization_Call { + _c.Call.Return(run) + return _c +} + // DeletePoolByID provides a mock function with given fields: ctx, poolID func (_m *Store) DeletePoolByID(ctx context.Context, poolID string) error { ret := _m.Called(ctx, poolID) @@ -676,6 +1516,35 @@ func (_m *Store) DeletePoolByID(ctx context.Context, poolID string) error { return r0 } +// Store_DeletePoolByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeletePoolByID' +type Store_DeletePoolByID_Call struct { + *mock.Call +} + +// DeletePoolByID is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +func (_e *Store_Expecter) DeletePoolByID(ctx interface{}, poolID interface{}) *Store_DeletePoolByID_Call { + return &Store_DeletePoolByID_Call{Call: _e.mock.On("DeletePoolByID", ctx, poolID)} +} + +func (_c *Store_DeletePoolByID_Call) Run(run func(ctx context.Context, poolID string)) *Store_DeletePoolByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_DeletePoolByID_Call) Return(_a0 error) *Store_DeletePoolByID_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeletePoolByID_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeletePoolByID_Call { + _c.Call.Return(run) + return _c +} + // DeleteRepository provides a mock function with given fields: ctx, repoID func (_m *Store) DeleteRepository(ctx context.Context, repoID string) error { ret := _m.Called(ctx, repoID) @@ -694,6 +1563,35 @@ func (_m *Store) DeleteRepository(ctx context.Context, repoID string) error { return r0 } +// Store_DeleteRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteRepository' +type Store_DeleteRepository_Call struct { + *mock.Call +} + +// DeleteRepository is a helper method to define mock.On call +// - ctx context.Context +// - repoID string +func (_e *Store_Expecter) DeleteRepository(ctx interface{}, repoID interface{}) *Store_DeleteRepository_Call { + return &Store_DeleteRepository_Call{Call: _e.mock.On("DeleteRepository", ctx, repoID)} +} + +func (_c *Store_DeleteRepository_Call) Run(run func(ctx context.Context, repoID string)) *Store_DeleteRepository_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_DeleteRepository_Call) Return(_a0 error) *Store_DeleteRepository_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_DeleteRepository_Call) RunAndReturn(run func(context.Context, string) error) *Store_DeleteRepository_Call { + _c.Call.Return(run) + return _c +} + // DeleteScaleSetByID provides a mock function with given fields: ctx, scaleSetID func (_m *Store) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error { ret := _m.Called(ctx, scaleSetID) @@ -712,6 +1610,35 @@ func (_m *Store) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error return r0 } +// Store_DeleteScaleSetByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteScaleSetByID' +type Store_DeleteScaleSetByID_Call struct { + *mock.Call +} + +// DeleteScaleSetByID is a helper method to define mock.On call +// - ctx context.Context +// - scaleSetID uint +func (_e *Store_Expecter) DeleteScaleSetByID(ctx interface{}, scaleSetID interface{}) *Store_DeleteScaleSetByID_Call { + return &Store_DeleteScaleSetByID_Call{Call: _e.mock.On("DeleteScaleSetByID", ctx, scaleSetID)} +} + +func (_c *Store_DeleteScaleSetByID_Call) Run(run func(ctx context.Context, scaleSetID uint)) *Store_DeleteScaleSetByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint)) + }) + return _c +} + +func (_c *Store_DeleteScaleSetByID_Call) Return(err error) *Store_DeleteScaleSetByID_Call { + _c.Call.Return(err) + return _c +} + +func (_c *Store_DeleteScaleSetByID_Call) RunAndReturn(run func(context.Context, uint) error) *Store_DeleteScaleSetByID_Call { + _c.Call.Return(run) + return _c +} + // FindPoolsMatchingAllTags provides a mock function with given fields: ctx, entityType, entityID, tags func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) { ret := _m.Called(ctx, entityType, entityID, tags) @@ -742,6 +1669,37 @@ func (_m *Store) FindPoolsMatchingAllTags(ctx context.Context, entityType params return r0, r1 } +// Store_FindPoolsMatchingAllTags_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FindPoolsMatchingAllTags' +type Store_FindPoolsMatchingAllTags_Call struct { + *mock.Call +} + +// FindPoolsMatchingAllTags is a helper method to define mock.On call +// - ctx context.Context +// - entityType params.ForgeEntityType +// - entityID string +// - tags []string +func (_e *Store_Expecter) FindPoolsMatchingAllTags(ctx interface{}, entityType interface{}, entityID interface{}, tags interface{}) *Store_FindPoolsMatchingAllTags_Call { + return &Store_FindPoolsMatchingAllTags_Call{Call: _e.mock.On("FindPoolsMatchingAllTags", ctx, entityType, entityID, tags)} +} + +func (_c *Store_FindPoolsMatchingAllTags_Call) Run(run func(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string)) *Store_FindPoolsMatchingAllTags_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntityType), args[2].(string), args[3].([]string)) + }) + return _c +} + +func (_c *Store_FindPoolsMatchingAllTags_Call) Return(_a0 []params.Pool, _a1 error) *Store_FindPoolsMatchingAllTags_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_FindPoolsMatchingAllTags_Call) RunAndReturn(run func(context.Context, params.ForgeEntityType, string, []string) ([]params.Pool, error)) *Store_FindPoolsMatchingAllTags_Call { + _c.Call.Return(run) + return _c +} + // GetAdminUser provides a mock function with given fields: ctx func (_m *Store) GetAdminUser(ctx context.Context) (params.User, error) { ret := _m.Called(ctx) @@ -770,6 +1728,34 @@ func (_m *Store) GetAdminUser(ctx context.Context) (params.User, error) { return r0, r1 } +// Store_GetAdminUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAdminUser' +type Store_GetAdminUser_Call struct { + *mock.Call +} + +// GetAdminUser is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) GetAdminUser(ctx interface{}) *Store_GetAdminUser_Call { + return &Store_GetAdminUser_Call{Call: _e.mock.On("GetAdminUser", ctx)} +} + +func (_c *Store_GetAdminUser_Call) Run(run func(ctx context.Context)) *Store_GetAdminUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_GetAdminUser_Call) Return(_a0 params.User, _a1 error) *Store_GetAdminUser_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetAdminUser_Call) RunAndReturn(run func(context.Context) (params.User, error)) *Store_GetAdminUser_Call { + _c.Call.Return(run) + return _c +} + // GetEnterprise provides a mock function with given fields: ctx, name, endpointName func (_m *Store) GetEnterprise(ctx context.Context, name string, endpointName string) (params.Enterprise, error) { ret := _m.Called(ctx, name, endpointName) @@ -798,6 +1784,36 @@ func (_m *Store) GetEnterprise(ctx context.Context, name string, endpointName st return r0, r1 } +// Store_GetEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterprise' +type Store_GetEnterprise_Call struct { + *mock.Call +} + +// GetEnterprise is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - endpointName string +func (_e *Store_Expecter) GetEnterprise(ctx interface{}, name interface{}, endpointName interface{}) *Store_GetEnterprise_Call { + return &Store_GetEnterprise_Call{Call: _e.mock.On("GetEnterprise", ctx, name, endpointName)} +} + +func (_c *Store_GetEnterprise_Call) Run(run func(ctx context.Context, name string, endpointName string)) *Store_GetEnterprise_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *Store_GetEnterprise_Call) Return(_a0 params.Enterprise, _a1 error) *Store_GetEnterprise_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetEnterprise_Call) RunAndReturn(run func(context.Context, string, string) (params.Enterprise, error)) *Store_GetEnterprise_Call { + _c.Call.Return(run) + return _c +} + // GetEnterpriseByID provides a mock function with given fields: ctx, enterpriseID func (_m *Store) GetEnterpriseByID(ctx context.Context, enterpriseID string) (params.Enterprise, error) { ret := _m.Called(ctx, enterpriseID) @@ -826,6 +1842,35 @@ func (_m *Store) GetEnterpriseByID(ctx context.Context, enterpriseID string) (pa return r0, r1 } +// Store_GetEnterpriseByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterpriseByID' +type Store_GetEnterpriseByID_Call struct { + *mock.Call +} + +// GetEnterpriseByID is a helper method to define mock.On call +// - ctx context.Context +// - enterpriseID string +func (_e *Store_Expecter) GetEnterpriseByID(ctx interface{}, enterpriseID interface{}) *Store_GetEnterpriseByID_Call { + return &Store_GetEnterpriseByID_Call{Call: _e.mock.On("GetEnterpriseByID", ctx, enterpriseID)} +} + +func (_c *Store_GetEnterpriseByID_Call) Run(run func(ctx context.Context, enterpriseID string)) *Store_GetEnterpriseByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetEnterpriseByID_Call) Return(_a0 params.Enterprise, _a1 error) *Store_GetEnterpriseByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetEnterpriseByID_Call) RunAndReturn(run func(context.Context, string) (params.Enterprise, error)) *Store_GetEnterpriseByID_Call { + _c.Call.Return(run) + return _c +} + // GetEntityPool provides a mock function with given fields: ctx, entity, poolID func (_m *Store) GetEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string) (params.Pool, error) { ret := _m.Called(ctx, entity, poolID) @@ -854,6 +1899,36 @@ func (_m *Store) GetEntityPool(ctx context.Context, entity params.ForgeEntity, p return r0, r1 } +// Store_GetEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityPool' +type Store_GetEntityPool_Call struct { + *mock.Call +} + +// GetEntityPool is a helper method to define mock.On call +// - ctx context.Context +// - entity params.ForgeEntity +// - poolID string +func (_e *Store_Expecter) GetEntityPool(ctx interface{}, entity interface{}, poolID interface{}) *Store_GetEntityPool_Call { + return &Store_GetEntityPool_Call{Call: _e.mock.On("GetEntityPool", ctx, entity, poolID)} +} + +func (_c *Store_GetEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, poolID string)) *Store_GetEntityPool_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(string)) + }) + return _c +} + +func (_c *Store_GetEntityPool_Call) Return(_a0 params.Pool, _a1 error) *Store_GetEntityPool_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, string) (params.Pool, error)) *Store_GetEntityPool_Call { + _c.Call.Return(run) + return _c +} + // GetForgeEntity provides a mock function with given fields: _a0, entityType, entityID func (_m *Store) GetForgeEntity(_a0 context.Context, entityType params.ForgeEntityType, entityID string) (params.ForgeEntity, error) { ret := _m.Called(_a0, entityType, entityID) @@ -882,6 +1957,36 @@ func (_m *Store) GetForgeEntity(_a0 context.Context, entityType params.ForgeEnti return r0, r1 } +// Store_GetForgeEntity_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetForgeEntity' +type Store_GetForgeEntity_Call struct { + *mock.Call +} + +// GetForgeEntity is a helper method to define mock.On call +// - _a0 context.Context +// - entityType params.ForgeEntityType +// - entityID string +func (_e *Store_Expecter) GetForgeEntity(_a0 interface{}, entityType interface{}, entityID interface{}) *Store_GetForgeEntity_Call { + return &Store_GetForgeEntity_Call{Call: _e.mock.On("GetForgeEntity", _a0, entityType, entityID)} +} + +func (_c *Store_GetForgeEntity_Call) Run(run func(_a0 context.Context, entityType params.ForgeEntityType, entityID string)) *Store_GetForgeEntity_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntityType), args[2].(string)) + }) + return _c +} + +func (_c *Store_GetForgeEntity_Call) Return(_a0 params.ForgeEntity, _a1 error) *Store_GetForgeEntity_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetForgeEntity_Call) RunAndReturn(run func(context.Context, params.ForgeEntityType, string) (params.ForgeEntity, error)) *Store_GetForgeEntity_Call { + _c.Call.Return(run) + return _c +} + // GetGiteaCredentials provides a mock function with given fields: ctx, id, detailed func (_m *Store) GetGiteaCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { ret := _m.Called(ctx, id, detailed) @@ -910,6 +2015,36 @@ func (_m *Store) GetGiteaCredentials(ctx context.Context, id uint, detailed bool return r0, r1 } +// Store_GetGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGiteaCredentials' +type Store_GetGiteaCredentials_Call struct { + *mock.Call +} + +// GetGiteaCredentials is a helper method to define mock.On call +// - ctx context.Context +// - id uint +// - detailed bool +func (_e *Store_Expecter) GetGiteaCredentials(ctx interface{}, id interface{}, detailed interface{}) *Store_GetGiteaCredentials_Call { + return &Store_GetGiteaCredentials_Call{Call: _e.mock.On("GetGiteaCredentials", ctx, id, detailed)} +} + +func (_c *Store_GetGiteaCredentials_Call) Run(run func(ctx context.Context, id uint, detailed bool)) *Store_GetGiteaCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint), args[2].(bool)) + }) + return _c +} + +func (_c *Store_GetGiteaCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGiteaCredentials_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetGiteaCredentials_Call) RunAndReturn(run func(context.Context, uint, bool) (params.ForgeCredentials, error)) *Store_GetGiteaCredentials_Call { + _c.Call.Return(run) + return _c +} + // GetGiteaCredentialsByName provides a mock function with given fields: ctx, name, detailed func (_m *Store) GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { ret := _m.Called(ctx, name, detailed) @@ -938,6 +2073,36 @@ func (_m *Store) GetGiteaCredentialsByName(ctx context.Context, name string, det return r0, r1 } +// Store_GetGiteaCredentialsByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGiteaCredentialsByName' +type Store_GetGiteaCredentialsByName_Call struct { + *mock.Call +} + +// GetGiteaCredentialsByName is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - detailed bool +func (_e *Store_Expecter) GetGiteaCredentialsByName(ctx interface{}, name interface{}, detailed interface{}) *Store_GetGiteaCredentialsByName_Call { + return &Store_GetGiteaCredentialsByName_Call{Call: _e.mock.On("GetGiteaCredentialsByName", ctx, name, detailed)} +} + +func (_c *Store_GetGiteaCredentialsByName_Call) Run(run func(ctx context.Context, name string, detailed bool)) *Store_GetGiteaCredentialsByName_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(bool)) + }) + return _c +} + +func (_c *Store_GetGiteaCredentialsByName_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGiteaCredentialsByName_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetGiteaCredentialsByName_Call) RunAndReturn(run func(context.Context, string, bool) (params.ForgeCredentials, error)) *Store_GetGiteaCredentialsByName_Call { + _c.Call.Return(run) + return _c +} + // GetGiteaEndpoint provides a mock function with given fields: _a0, name func (_m *Store) GetGiteaEndpoint(_a0 context.Context, name string) (params.ForgeEndpoint, error) { ret := _m.Called(_a0, name) @@ -966,6 +2131,35 @@ func (_m *Store) GetGiteaEndpoint(_a0 context.Context, name string) (params.Forg return r0, r1 } +// Store_GetGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGiteaEndpoint' +type Store_GetGiteaEndpoint_Call struct { + *mock.Call +} + +// GetGiteaEndpoint is a helper method to define mock.On call +// - _a0 context.Context +// - name string +func (_e *Store_Expecter) GetGiteaEndpoint(_a0 interface{}, name interface{}) *Store_GetGiteaEndpoint_Call { + return &Store_GetGiteaEndpoint_Call{Call: _e.mock.On("GetGiteaEndpoint", _a0, name)} +} + +func (_c *Store_GetGiteaEndpoint_Call) Run(run func(_a0 context.Context, name string)) *Store_GetGiteaEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetGiteaEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_GetGiteaEndpoint_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetGiteaEndpoint_Call) RunAndReturn(run func(context.Context, string) (params.ForgeEndpoint, error)) *Store_GetGiteaEndpoint_Call { + _c.Call.Return(run) + return _c +} + // GetGithubCredentials provides a mock function with given fields: ctx, id, detailed func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed bool) (params.ForgeCredentials, error) { ret := _m.Called(ctx, id, detailed) @@ -994,6 +2188,36 @@ func (_m *Store) GetGithubCredentials(ctx context.Context, id uint, detailed boo return r0, r1 } +// Store_GetGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGithubCredentials' +type Store_GetGithubCredentials_Call struct { + *mock.Call +} + +// GetGithubCredentials is a helper method to define mock.On call +// - ctx context.Context +// - id uint +// - detailed bool +func (_e *Store_Expecter) GetGithubCredentials(ctx interface{}, id interface{}, detailed interface{}) *Store_GetGithubCredentials_Call { + return &Store_GetGithubCredentials_Call{Call: _e.mock.On("GetGithubCredentials", ctx, id, detailed)} +} + +func (_c *Store_GetGithubCredentials_Call) Run(run func(ctx context.Context, id uint, detailed bool)) *Store_GetGithubCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint), args[2].(bool)) + }) + return _c +} + +func (_c *Store_GetGithubCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGithubCredentials_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetGithubCredentials_Call) RunAndReturn(run func(context.Context, uint, bool) (params.ForgeCredentials, error)) *Store_GetGithubCredentials_Call { + _c.Call.Return(run) + return _c +} + // GetGithubCredentialsByName provides a mock function with given fields: ctx, name, detailed func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { ret := _m.Called(ctx, name, detailed) @@ -1022,6 +2246,36 @@ func (_m *Store) GetGithubCredentialsByName(ctx context.Context, name string, de return r0, r1 } +// Store_GetGithubCredentialsByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGithubCredentialsByName' +type Store_GetGithubCredentialsByName_Call struct { + *mock.Call +} + +// GetGithubCredentialsByName is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - detailed bool +func (_e *Store_Expecter) GetGithubCredentialsByName(ctx interface{}, name interface{}, detailed interface{}) *Store_GetGithubCredentialsByName_Call { + return &Store_GetGithubCredentialsByName_Call{Call: _e.mock.On("GetGithubCredentialsByName", ctx, name, detailed)} +} + +func (_c *Store_GetGithubCredentialsByName_Call) Run(run func(ctx context.Context, name string, detailed bool)) *Store_GetGithubCredentialsByName_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(bool)) + }) + return _c +} + +func (_c *Store_GetGithubCredentialsByName_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_GetGithubCredentialsByName_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetGithubCredentialsByName_Call) RunAndReturn(run func(context.Context, string, bool) (params.ForgeCredentials, error)) *Store_GetGithubCredentialsByName_Call { + _c.Call.Return(run) + return _c +} + // GetGithubEndpoint provides a mock function with given fields: ctx, name func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.ForgeEndpoint, error) { ret := _m.Called(ctx, name) @@ -1050,6 +2304,35 @@ func (_m *Store) GetGithubEndpoint(ctx context.Context, name string) (params.For return r0, r1 } +// Store_GetGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGithubEndpoint' +type Store_GetGithubEndpoint_Call struct { + *mock.Call +} + +// GetGithubEndpoint is a helper method to define mock.On call +// - ctx context.Context +// - name string +func (_e *Store_Expecter) GetGithubEndpoint(ctx interface{}, name interface{}) *Store_GetGithubEndpoint_Call { + return &Store_GetGithubEndpoint_Call{Call: _e.mock.On("GetGithubEndpoint", ctx, name)} +} + +func (_c *Store_GetGithubEndpoint_Call) Run(run func(ctx context.Context, name string)) *Store_GetGithubEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetGithubEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_GetGithubEndpoint_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetGithubEndpoint_Call) RunAndReturn(run func(context.Context, string) (params.ForgeEndpoint, error)) *Store_GetGithubEndpoint_Call { + _c.Call.Return(run) + return _c +} + // GetInstanceByName provides a mock function with given fields: ctx, instanceName func (_m *Store) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { ret := _m.Called(ctx, instanceName) @@ -1078,6 +2361,35 @@ func (_m *Store) GetInstanceByName(ctx context.Context, instanceName string) (pa return r0, r1 } +// Store_GetInstanceByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetInstanceByName' +type Store_GetInstanceByName_Call struct { + *mock.Call +} + +// GetInstanceByName is a helper method to define mock.On call +// - ctx context.Context +// - instanceName string +func (_e *Store_Expecter) GetInstanceByName(ctx interface{}, instanceName interface{}) *Store_GetInstanceByName_Call { + return &Store_GetInstanceByName_Call{Call: _e.mock.On("GetInstanceByName", ctx, instanceName)} +} + +func (_c *Store_GetInstanceByName_Call) Run(run func(ctx context.Context, instanceName string)) *Store_GetInstanceByName_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetInstanceByName_Call) Return(_a0 params.Instance, _a1 error) *Store_GetInstanceByName_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetInstanceByName_Call) RunAndReturn(run func(context.Context, string) (params.Instance, error)) *Store_GetInstanceByName_Call { + _c.Call.Return(run) + return _c +} + // GetJobByID provides a mock function with given fields: ctx, jobID func (_m *Store) GetJobByID(ctx context.Context, jobID int64) (params.Job, error) { ret := _m.Called(ctx, jobID) @@ -1106,6 +2418,35 @@ func (_m *Store) GetJobByID(ctx context.Context, jobID int64) (params.Job, error return r0, r1 } +// Store_GetJobByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetJobByID' +type Store_GetJobByID_Call struct { + *mock.Call +} + +// GetJobByID is a helper method to define mock.On call +// - ctx context.Context +// - jobID int64 +func (_e *Store_Expecter) GetJobByID(ctx interface{}, jobID interface{}) *Store_GetJobByID_Call { + return &Store_GetJobByID_Call{Call: _e.mock.On("GetJobByID", ctx, jobID)} +} + +func (_c *Store_GetJobByID_Call) Run(run func(ctx context.Context, jobID int64)) *Store_GetJobByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *Store_GetJobByID_Call) Return(_a0 params.Job, _a1 error) *Store_GetJobByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetJobByID_Call) RunAndReturn(run func(context.Context, int64) (params.Job, error)) *Store_GetJobByID_Call { + _c.Call.Return(run) + return _c +} + // GetOrganization provides a mock function with given fields: ctx, name, endpointName func (_m *Store) GetOrganization(ctx context.Context, name string, endpointName string) (params.Organization, error) { ret := _m.Called(ctx, name, endpointName) @@ -1134,6 +2475,36 @@ func (_m *Store) GetOrganization(ctx context.Context, name string, endpointName return r0, r1 } +// Store_GetOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrganization' +type Store_GetOrganization_Call struct { + *mock.Call +} + +// GetOrganization is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - endpointName string +func (_e *Store_Expecter) GetOrganization(ctx interface{}, name interface{}, endpointName interface{}) *Store_GetOrganization_Call { + return &Store_GetOrganization_Call{Call: _e.mock.On("GetOrganization", ctx, name, endpointName)} +} + +func (_c *Store_GetOrganization_Call) Run(run func(ctx context.Context, name string, endpointName string)) *Store_GetOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *Store_GetOrganization_Call) Return(_a0 params.Organization, _a1 error) *Store_GetOrganization_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetOrganization_Call) RunAndReturn(run func(context.Context, string, string) (params.Organization, error)) *Store_GetOrganization_Call { + _c.Call.Return(run) + return _c +} + // GetOrganizationByID provides a mock function with given fields: ctx, orgID func (_m *Store) GetOrganizationByID(ctx context.Context, orgID string) (params.Organization, error) { ret := _m.Called(ctx, orgID) @@ -1162,6 +2533,35 @@ func (_m *Store) GetOrganizationByID(ctx context.Context, orgID string) (params. return r0, r1 } +// Store_GetOrganizationByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrganizationByID' +type Store_GetOrganizationByID_Call struct { + *mock.Call +} + +// GetOrganizationByID is a helper method to define mock.On call +// - ctx context.Context +// - orgID string +func (_e *Store_Expecter) GetOrganizationByID(ctx interface{}, orgID interface{}) *Store_GetOrganizationByID_Call { + return &Store_GetOrganizationByID_Call{Call: _e.mock.On("GetOrganizationByID", ctx, orgID)} +} + +func (_c *Store_GetOrganizationByID_Call) Run(run func(ctx context.Context, orgID string)) *Store_GetOrganizationByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetOrganizationByID_Call) Return(_a0 params.Organization, _a1 error) *Store_GetOrganizationByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetOrganizationByID_Call) RunAndReturn(run func(context.Context, string) (params.Organization, error)) *Store_GetOrganizationByID_Call { + _c.Call.Return(run) + return _c +} + // GetPoolByID provides a mock function with given fields: ctx, poolID func (_m *Store) GetPoolByID(ctx context.Context, poolID string) (params.Pool, error) { ret := _m.Called(ctx, poolID) @@ -1190,6 +2590,35 @@ func (_m *Store) GetPoolByID(ctx context.Context, poolID string) (params.Pool, e return r0, r1 } +// Store_GetPoolByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetPoolByID' +type Store_GetPoolByID_Call struct { + *mock.Call +} + +// GetPoolByID is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +func (_e *Store_Expecter) GetPoolByID(ctx interface{}, poolID interface{}) *Store_GetPoolByID_Call { + return &Store_GetPoolByID_Call{Call: _e.mock.On("GetPoolByID", ctx, poolID)} +} + +func (_c *Store_GetPoolByID_Call) Run(run func(ctx context.Context, poolID string)) *Store_GetPoolByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetPoolByID_Call) Return(_a0 params.Pool, _a1 error) *Store_GetPoolByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetPoolByID_Call) RunAndReturn(run func(context.Context, string) (params.Pool, error)) *Store_GetPoolByID_Call { + _c.Call.Return(run) + return _c +} + // GetPoolInstanceByName provides a mock function with given fields: ctx, poolID, instanceName func (_m *Store) GetPoolInstanceByName(ctx context.Context, poolID string, instanceName string) (params.Instance, error) { ret := _m.Called(ctx, poolID, instanceName) @@ -1218,6 +2647,36 @@ func (_m *Store) GetPoolInstanceByName(ctx context.Context, poolID string, insta return r0, r1 } +// Store_GetPoolInstanceByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetPoolInstanceByName' +type Store_GetPoolInstanceByName_Call struct { + *mock.Call +} + +// GetPoolInstanceByName is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +// - instanceName string +func (_e *Store_Expecter) GetPoolInstanceByName(ctx interface{}, poolID interface{}, instanceName interface{}) *Store_GetPoolInstanceByName_Call { + return &Store_GetPoolInstanceByName_Call{Call: _e.mock.On("GetPoolInstanceByName", ctx, poolID, instanceName)} +} + +func (_c *Store_GetPoolInstanceByName_Call) Run(run func(ctx context.Context, poolID string, instanceName string)) *Store_GetPoolInstanceByName_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *Store_GetPoolInstanceByName_Call) Return(_a0 params.Instance, _a1 error) *Store_GetPoolInstanceByName_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetPoolInstanceByName_Call) RunAndReturn(run func(context.Context, string, string) (params.Instance, error)) *Store_GetPoolInstanceByName_Call { + _c.Call.Return(run) + return _c +} + // GetRepository provides a mock function with given fields: ctx, owner, name, endpointName func (_m *Store) GetRepository(ctx context.Context, owner string, name string, endpointName string) (params.Repository, error) { ret := _m.Called(ctx, owner, name, endpointName) @@ -1246,6 +2705,37 @@ func (_m *Store) GetRepository(ctx context.Context, owner string, name string, e return r0, r1 } +// Store_GetRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepository' +type Store_GetRepository_Call struct { + *mock.Call +} + +// GetRepository is a helper method to define mock.On call +// - ctx context.Context +// - owner string +// - name string +// - endpointName string +func (_e *Store_Expecter) GetRepository(ctx interface{}, owner interface{}, name interface{}, endpointName interface{}) *Store_GetRepository_Call { + return &Store_GetRepository_Call{Call: _e.mock.On("GetRepository", ctx, owner, name, endpointName)} +} + +func (_c *Store_GetRepository_Call) Run(run func(ctx context.Context, owner string, name string, endpointName string)) *Store_GetRepository_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(string)) + }) + return _c +} + +func (_c *Store_GetRepository_Call) Return(_a0 params.Repository, _a1 error) *Store_GetRepository_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetRepository_Call) RunAndReturn(run func(context.Context, string, string, string) (params.Repository, error)) *Store_GetRepository_Call { + _c.Call.Return(run) + return _c +} + // GetRepositoryByID provides a mock function with given fields: ctx, repoID func (_m *Store) GetRepositoryByID(ctx context.Context, repoID string) (params.Repository, error) { ret := _m.Called(ctx, repoID) @@ -1274,6 +2764,35 @@ func (_m *Store) GetRepositoryByID(ctx context.Context, repoID string) (params.R return r0, r1 } +// Store_GetRepositoryByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepositoryByID' +type Store_GetRepositoryByID_Call struct { + *mock.Call +} + +// GetRepositoryByID is a helper method to define mock.On call +// - ctx context.Context +// - repoID string +func (_e *Store_Expecter) GetRepositoryByID(ctx interface{}, repoID interface{}) *Store_GetRepositoryByID_Call { + return &Store_GetRepositoryByID_Call{Call: _e.mock.On("GetRepositoryByID", ctx, repoID)} +} + +func (_c *Store_GetRepositoryByID_Call) Run(run func(ctx context.Context, repoID string)) *Store_GetRepositoryByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetRepositoryByID_Call) Return(_a0 params.Repository, _a1 error) *Store_GetRepositoryByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetRepositoryByID_Call) RunAndReturn(run func(context.Context, string) (params.Repository, error)) *Store_GetRepositoryByID_Call { + _c.Call.Return(run) + return _c +} + // GetScaleSetByID provides a mock function with given fields: ctx, scaleSet func (_m *Store) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.ScaleSet, error) { ret := _m.Called(ctx, scaleSet) @@ -1302,6 +2821,35 @@ func (_m *Store) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.Sca return r0, r1 } +// Store_GetScaleSetByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetScaleSetByID' +type Store_GetScaleSetByID_Call struct { + *mock.Call +} + +// GetScaleSetByID is a helper method to define mock.On call +// - ctx context.Context +// - scaleSet uint +func (_e *Store_Expecter) GetScaleSetByID(ctx interface{}, scaleSet interface{}) *Store_GetScaleSetByID_Call { + return &Store_GetScaleSetByID_Call{Call: _e.mock.On("GetScaleSetByID", ctx, scaleSet)} +} + +func (_c *Store_GetScaleSetByID_Call) Run(run func(ctx context.Context, scaleSet uint)) *Store_GetScaleSetByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint)) + }) + return _c +} + +func (_c *Store_GetScaleSetByID_Call) Return(_a0 params.ScaleSet, _a1 error) *Store_GetScaleSetByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetScaleSetByID_Call) RunAndReturn(run func(context.Context, uint) (params.ScaleSet, error)) *Store_GetScaleSetByID_Call { + _c.Call.Return(run) + return _c +} + // GetUser provides a mock function with given fields: ctx, user func (_m *Store) GetUser(ctx context.Context, user string) (params.User, error) { ret := _m.Called(ctx, user) @@ -1330,6 +2878,35 @@ func (_m *Store) GetUser(ctx context.Context, user string) (params.User, error) return r0, r1 } +// Store_GetUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUser' +type Store_GetUser_Call struct { + *mock.Call +} + +// GetUser is a helper method to define mock.On call +// - ctx context.Context +// - user string +func (_e *Store_Expecter) GetUser(ctx interface{}, user interface{}) *Store_GetUser_Call { + return &Store_GetUser_Call{Call: _e.mock.On("GetUser", ctx, user)} +} + +func (_c *Store_GetUser_Call) Run(run func(ctx context.Context, user string)) *Store_GetUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetUser_Call) Return(_a0 params.User, _a1 error) *Store_GetUser_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetUser_Call) RunAndReturn(run func(context.Context, string) (params.User, error)) *Store_GetUser_Call { + _c.Call.Return(run) + return _c +} + // GetUserByID provides a mock function with given fields: ctx, userID func (_m *Store) GetUserByID(ctx context.Context, userID string) (params.User, error) { ret := _m.Called(ctx, userID) @@ -1358,6 +2935,35 @@ func (_m *Store) GetUserByID(ctx context.Context, userID string) (params.User, e return r0, r1 } +// Store_GetUserByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUserByID' +type Store_GetUserByID_Call struct { + *mock.Call +} + +// GetUserByID is a helper method to define mock.On call +// - ctx context.Context +// - userID string +func (_e *Store_Expecter) GetUserByID(ctx interface{}, userID interface{}) *Store_GetUserByID_Call { + return &Store_GetUserByID_Call{Call: _e.mock.On("GetUserByID", ctx, userID)} +} + +func (_c *Store_GetUserByID_Call) Run(run func(ctx context.Context, userID string)) *Store_GetUserByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_GetUserByID_Call) Return(_a0 params.User, _a1 error) *Store_GetUserByID_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_GetUserByID_Call) RunAndReturn(run func(context.Context, string) (params.User, error)) *Store_GetUserByID_Call { + _c.Call.Return(run) + return _c +} + // HasAdminUser provides a mock function with given fields: ctx func (_m *Store) HasAdminUser(ctx context.Context) bool { ret := _m.Called(ctx) @@ -1376,6 +2982,34 @@ func (_m *Store) HasAdminUser(ctx context.Context) bool { return r0 } +// Store_HasAdminUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HasAdminUser' +type Store_HasAdminUser_Call struct { + *mock.Call +} + +// HasAdminUser is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) HasAdminUser(ctx interface{}) *Store_HasAdminUser_Call { + return &Store_HasAdminUser_Call{Call: _e.mock.On("HasAdminUser", ctx)} +} + +func (_c *Store_HasAdminUser_Call) Run(run func(ctx context.Context)) *Store_HasAdminUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_HasAdminUser_Call) Return(_a0 bool) *Store_HasAdminUser_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_HasAdminUser_Call) RunAndReturn(run func(context.Context) bool) *Store_HasAdminUser_Call { + _c.Call.Return(run) + return _c +} + // InitController provides a mock function with no fields func (_m *Store) InitController() (params.ControllerInfo, error) { ret := _m.Called() @@ -1404,6 +3038,33 @@ func (_m *Store) InitController() (params.ControllerInfo, error) { return r0, r1 } +// Store_InitController_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'InitController' +type Store_InitController_Call struct { + *mock.Call +} + +// InitController is a helper method to define mock.On call +func (_e *Store_Expecter) InitController() *Store_InitController_Call { + return &Store_InitController_Call{Call: _e.mock.On("InitController")} +} + +func (_c *Store_InitController_Call) Run(run func()) *Store_InitController_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Store_InitController_Call) Return(_a0 params.ControllerInfo, _a1 error) *Store_InitController_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_InitController_Call) RunAndReturn(run func() (params.ControllerInfo, error)) *Store_InitController_Call { + _c.Call.Return(run) + return _c +} + // ListAllInstances provides a mock function with given fields: ctx func (_m *Store) ListAllInstances(ctx context.Context) ([]params.Instance, error) { ret := _m.Called(ctx) @@ -1434,6 +3095,34 @@ func (_m *Store) ListAllInstances(ctx context.Context) ([]params.Instance, error return r0, r1 } +// Store_ListAllInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllInstances' +type Store_ListAllInstances_Call struct { + *mock.Call +} + +// ListAllInstances is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) ListAllInstances(ctx interface{}) *Store_ListAllInstances_Call { + return &Store_ListAllInstances_Call{Call: _e.mock.On("ListAllInstances", ctx)} +} + +func (_c *Store_ListAllInstances_Call) Run(run func(ctx context.Context)) *Store_ListAllInstances_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListAllInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListAllInstances_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListAllInstances_Call) RunAndReturn(run func(context.Context) ([]params.Instance, error)) *Store_ListAllInstances_Call { + _c.Call.Return(run) + return _c +} + // ListAllJobs provides a mock function with given fields: ctx func (_m *Store) ListAllJobs(ctx context.Context) ([]params.Job, error) { ret := _m.Called(ctx) @@ -1464,6 +3153,34 @@ func (_m *Store) ListAllJobs(ctx context.Context) ([]params.Job, error) { return r0, r1 } +// Store_ListAllJobs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllJobs' +type Store_ListAllJobs_Call struct { + *mock.Call +} + +// ListAllJobs is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) ListAllJobs(ctx interface{}) *Store_ListAllJobs_Call { + return &Store_ListAllJobs_Call{Call: _e.mock.On("ListAllJobs", ctx)} +} + +func (_c *Store_ListAllJobs_Call) Run(run func(ctx context.Context)) *Store_ListAllJobs_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListAllJobs_Call) Return(_a0 []params.Job, _a1 error) *Store_ListAllJobs_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListAllJobs_Call) RunAndReturn(run func(context.Context) ([]params.Job, error)) *Store_ListAllJobs_Call { + _c.Call.Return(run) + return _c +} + // ListAllPools provides a mock function with given fields: ctx func (_m *Store) ListAllPools(ctx context.Context) ([]params.Pool, error) { ret := _m.Called(ctx) @@ -1494,6 +3211,34 @@ func (_m *Store) ListAllPools(ctx context.Context) ([]params.Pool, error) { return r0, r1 } +// Store_ListAllPools_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllPools' +type Store_ListAllPools_Call struct { + *mock.Call +} + +// ListAllPools is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) ListAllPools(ctx interface{}) *Store_ListAllPools_Call { + return &Store_ListAllPools_Call{Call: _e.mock.On("ListAllPools", ctx)} +} + +func (_c *Store_ListAllPools_Call) Run(run func(ctx context.Context)) *Store_ListAllPools_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListAllPools_Call) Return(_a0 []params.Pool, _a1 error) *Store_ListAllPools_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListAllPools_Call) RunAndReturn(run func(context.Context) ([]params.Pool, error)) *Store_ListAllPools_Call { + _c.Call.Return(run) + return _c +} + // ListAllScaleSets provides a mock function with given fields: ctx func (_m *Store) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error) { ret := _m.Called(ctx) @@ -1524,6 +3269,34 @@ func (_m *Store) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error return r0, r1 } +// Store_ListAllScaleSets_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListAllScaleSets' +type Store_ListAllScaleSets_Call struct { + *mock.Call +} + +// ListAllScaleSets is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) ListAllScaleSets(ctx interface{}) *Store_ListAllScaleSets_Call { + return &Store_ListAllScaleSets_Call{Call: _e.mock.On("ListAllScaleSets", ctx)} +} + +func (_c *Store_ListAllScaleSets_Call) Run(run func(ctx context.Context)) *Store_ListAllScaleSets_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListAllScaleSets_Call) Return(_a0 []params.ScaleSet, _a1 error) *Store_ListAllScaleSets_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListAllScaleSets_Call) RunAndReturn(run func(context.Context) ([]params.ScaleSet, error)) *Store_ListAllScaleSets_Call { + _c.Call.Return(run) + return _c +} + // ListEnterprises provides a mock function with given fields: ctx, filter func (_m *Store) ListEnterprises(ctx context.Context, filter params.EnterpriseFilter) ([]params.Enterprise, error) { ret := _m.Called(ctx, filter) @@ -1554,6 +3327,35 @@ func (_m *Store) ListEnterprises(ctx context.Context, filter params.EnterpriseFi return r0, r1 } +// Store_ListEnterprises_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEnterprises' +type Store_ListEnterprises_Call struct { + *mock.Call +} + +// ListEnterprises is a helper method to define mock.On call +// - ctx context.Context +// - filter params.EnterpriseFilter +func (_e *Store_Expecter) ListEnterprises(ctx interface{}, filter interface{}) *Store_ListEnterprises_Call { + return &Store_ListEnterprises_Call{Call: _e.mock.On("ListEnterprises", ctx, filter)} +} + +func (_c *Store_ListEnterprises_Call) Run(run func(ctx context.Context, filter params.EnterpriseFilter)) *Store_ListEnterprises_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.EnterpriseFilter)) + }) + return _c +} + +func (_c *Store_ListEnterprises_Call) Return(_a0 []params.Enterprise, _a1 error) *Store_ListEnterprises_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListEnterprises_Call) RunAndReturn(run func(context.Context, params.EnterpriseFilter) ([]params.Enterprise, error)) *Store_ListEnterprises_Call { + _c.Call.Return(run) + return _c +} + // ListEntityInstances provides a mock function with given fields: ctx, entity func (_m *Store) ListEntityInstances(ctx context.Context, entity params.ForgeEntity) ([]params.Instance, error) { ret := _m.Called(ctx, entity) @@ -1584,6 +3386,35 @@ func (_m *Store) ListEntityInstances(ctx context.Context, entity params.ForgeEnt return r0, r1 } +// Store_ListEntityInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityInstances' +type Store_ListEntityInstances_Call struct { + *mock.Call +} + +// ListEntityInstances is a helper method to define mock.On call +// - ctx context.Context +// - entity params.ForgeEntity +func (_e *Store_Expecter) ListEntityInstances(ctx interface{}, entity interface{}) *Store_ListEntityInstances_Call { + return &Store_ListEntityInstances_Call{Call: _e.mock.On("ListEntityInstances", ctx, entity)} +} + +func (_c *Store_ListEntityInstances_Call) Run(run func(ctx context.Context, entity params.ForgeEntity)) *Store_ListEntityInstances_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity)) + }) + return _c +} + +func (_c *Store_ListEntityInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListEntityInstances_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListEntityInstances_Call) RunAndReturn(run func(context.Context, params.ForgeEntity) ([]params.Instance, error)) *Store_ListEntityInstances_Call { + _c.Call.Return(run) + return _c +} + // ListEntityJobsByStatus provides a mock function with given fields: ctx, entityType, entityID, status func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus) ([]params.Job, error) { ret := _m.Called(ctx, entityType, entityID, status) @@ -1614,6 +3445,37 @@ func (_m *Store) ListEntityJobsByStatus(ctx context.Context, entityType params.F return r0, r1 } +// Store_ListEntityJobsByStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityJobsByStatus' +type Store_ListEntityJobsByStatus_Call struct { + *mock.Call +} + +// ListEntityJobsByStatus is a helper method to define mock.On call +// - ctx context.Context +// - entityType params.ForgeEntityType +// - entityID string +// - status params.JobStatus +func (_e *Store_Expecter) ListEntityJobsByStatus(ctx interface{}, entityType interface{}, entityID interface{}, status interface{}) *Store_ListEntityJobsByStatus_Call { + return &Store_ListEntityJobsByStatus_Call{Call: _e.mock.On("ListEntityJobsByStatus", ctx, entityType, entityID, status)} +} + +func (_c *Store_ListEntityJobsByStatus_Call) Run(run func(ctx context.Context, entityType params.ForgeEntityType, entityID string, status params.JobStatus)) *Store_ListEntityJobsByStatus_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntityType), args[2].(string), args[3].(params.JobStatus)) + }) + return _c +} + +func (_c *Store_ListEntityJobsByStatus_Call) Return(_a0 []params.Job, _a1 error) *Store_ListEntityJobsByStatus_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListEntityJobsByStatus_Call) RunAndReturn(run func(context.Context, params.ForgeEntityType, string, params.JobStatus) ([]params.Job, error)) *Store_ListEntityJobsByStatus_Call { + _c.Call.Return(run) + return _c +} + // ListEntityPools provides a mock function with given fields: ctx, entity func (_m *Store) ListEntityPools(ctx context.Context, entity params.ForgeEntity) ([]params.Pool, error) { ret := _m.Called(ctx, entity) @@ -1644,6 +3506,35 @@ func (_m *Store) ListEntityPools(ctx context.Context, entity params.ForgeEntity) return r0, r1 } +// Store_ListEntityPools_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityPools' +type Store_ListEntityPools_Call struct { + *mock.Call +} + +// ListEntityPools is a helper method to define mock.On call +// - ctx context.Context +// - entity params.ForgeEntity +func (_e *Store_Expecter) ListEntityPools(ctx interface{}, entity interface{}) *Store_ListEntityPools_Call { + return &Store_ListEntityPools_Call{Call: _e.mock.On("ListEntityPools", ctx, entity)} +} + +func (_c *Store_ListEntityPools_Call) Run(run func(ctx context.Context, entity params.ForgeEntity)) *Store_ListEntityPools_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity)) + }) + return _c +} + +func (_c *Store_ListEntityPools_Call) Return(_a0 []params.Pool, _a1 error) *Store_ListEntityPools_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListEntityPools_Call) RunAndReturn(run func(context.Context, params.ForgeEntity) ([]params.Pool, error)) *Store_ListEntityPools_Call { + _c.Call.Return(run) + return _c +} + // ListEntityScaleSets provides a mock function with given fields: _a0, entity func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) { ret := _m.Called(_a0, entity) @@ -1674,6 +3565,35 @@ func (_m *Store) ListEntityScaleSets(_a0 context.Context, entity params.ForgeEnt return r0, r1 } +// Store_ListEntityScaleSets_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityScaleSets' +type Store_ListEntityScaleSets_Call struct { + *mock.Call +} + +// ListEntityScaleSets is a helper method to define mock.On call +// - _a0 context.Context +// - entity params.ForgeEntity +func (_e *Store_Expecter) ListEntityScaleSets(_a0 interface{}, entity interface{}) *Store_ListEntityScaleSets_Call { + return &Store_ListEntityScaleSets_Call{Call: _e.mock.On("ListEntityScaleSets", _a0, entity)} +} + +func (_c *Store_ListEntityScaleSets_Call) Run(run func(_a0 context.Context, entity params.ForgeEntity)) *Store_ListEntityScaleSets_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity)) + }) + return _c +} + +func (_c *Store_ListEntityScaleSets_Call) Return(_a0 []params.ScaleSet, _a1 error) *Store_ListEntityScaleSets_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListEntityScaleSets_Call) RunAndReturn(run func(context.Context, params.ForgeEntity) ([]params.ScaleSet, error)) *Store_ListEntityScaleSets_Call { + _c.Call.Return(run) + return _c +} + // ListGiteaCredentials provides a mock function with given fields: ctx func (_m *Store) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { ret := _m.Called(ctx) @@ -1704,6 +3624,34 @@ func (_m *Store) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCreden return r0, r1 } +// Store_ListGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGiteaCredentials' +type Store_ListGiteaCredentials_Call struct { + *mock.Call +} + +// ListGiteaCredentials is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) ListGiteaCredentials(ctx interface{}) *Store_ListGiteaCredentials_Call { + return &Store_ListGiteaCredentials_Call{Call: _e.mock.On("ListGiteaCredentials", ctx)} +} + +func (_c *Store_ListGiteaCredentials_Call) Run(run func(ctx context.Context)) *Store_ListGiteaCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListGiteaCredentials_Call) Return(_a0 []params.ForgeCredentials, _a1 error) *Store_ListGiteaCredentials_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListGiteaCredentials_Call) RunAndReturn(run func(context.Context) ([]params.ForgeCredentials, error)) *Store_ListGiteaCredentials_Call { + _c.Call.Return(run) + return _c +} + // ListGiteaEndpoints provides a mock function with given fields: _a0 func (_m *Store) ListGiteaEndpoints(_a0 context.Context) ([]params.ForgeEndpoint, error) { ret := _m.Called(_a0) @@ -1734,6 +3682,34 @@ func (_m *Store) ListGiteaEndpoints(_a0 context.Context) ([]params.ForgeEndpoint return r0, r1 } +// Store_ListGiteaEndpoints_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGiteaEndpoints' +type Store_ListGiteaEndpoints_Call struct { + *mock.Call +} + +// ListGiteaEndpoints is a helper method to define mock.On call +// - _a0 context.Context +func (_e *Store_Expecter) ListGiteaEndpoints(_a0 interface{}) *Store_ListGiteaEndpoints_Call { + return &Store_ListGiteaEndpoints_Call{Call: _e.mock.On("ListGiteaEndpoints", _a0)} +} + +func (_c *Store_ListGiteaEndpoints_Call) Run(run func(_a0 context.Context)) *Store_ListGiteaEndpoints_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListGiteaEndpoints_Call) Return(_a0 []params.ForgeEndpoint, _a1 error) *Store_ListGiteaEndpoints_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListGiteaEndpoints_Call) RunAndReturn(run func(context.Context) ([]params.ForgeEndpoint, error)) *Store_ListGiteaEndpoints_Call { + _c.Call.Return(run) + return _c +} + // ListGithubCredentials provides a mock function with given fields: ctx func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.ForgeCredentials, error) { ret := _m.Called(ctx) @@ -1764,6 +3740,34 @@ func (_m *Store) ListGithubCredentials(ctx context.Context) ([]params.ForgeCrede return r0, r1 } +// Store_ListGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGithubCredentials' +type Store_ListGithubCredentials_Call struct { + *mock.Call +} + +// ListGithubCredentials is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) ListGithubCredentials(ctx interface{}) *Store_ListGithubCredentials_Call { + return &Store_ListGithubCredentials_Call{Call: _e.mock.On("ListGithubCredentials", ctx)} +} + +func (_c *Store_ListGithubCredentials_Call) Run(run func(ctx context.Context)) *Store_ListGithubCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListGithubCredentials_Call) Return(_a0 []params.ForgeCredentials, _a1 error) *Store_ListGithubCredentials_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListGithubCredentials_Call) RunAndReturn(run func(context.Context) ([]params.ForgeCredentials, error)) *Store_ListGithubCredentials_Call { + _c.Call.Return(run) + return _c +} + // ListGithubEndpoints provides a mock function with given fields: ctx func (_m *Store) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoint, error) { ret := _m.Called(ctx) @@ -1794,6 +3798,34 @@ func (_m *Store) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoin return r0, r1 } +// Store_ListGithubEndpoints_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListGithubEndpoints' +type Store_ListGithubEndpoints_Call struct { + *mock.Call +} + +// ListGithubEndpoints is a helper method to define mock.On call +// - ctx context.Context +func (_e *Store_Expecter) ListGithubEndpoints(ctx interface{}) *Store_ListGithubEndpoints_Call { + return &Store_ListGithubEndpoints_Call{Call: _e.mock.On("ListGithubEndpoints", ctx)} +} + +func (_c *Store_ListGithubEndpoints_Call) Run(run func(ctx context.Context)) *Store_ListGithubEndpoints_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Store_ListGithubEndpoints_Call) Return(_a0 []params.ForgeEndpoint, _a1 error) *Store_ListGithubEndpoints_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListGithubEndpoints_Call) RunAndReturn(run func(context.Context) ([]params.ForgeEndpoint, error)) *Store_ListGithubEndpoints_Call { + _c.Call.Return(run) + return _c +} + // ListJobsByStatus provides a mock function with given fields: ctx, status func (_m *Store) ListJobsByStatus(ctx context.Context, status params.JobStatus) ([]params.Job, error) { ret := _m.Called(ctx, status) @@ -1824,6 +3856,35 @@ func (_m *Store) ListJobsByStatus(ctx context.Context, status params.JobStatus) return r0, r1 } +// Store_ListJobsByStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListJobsByStatus' +type Store_ListJobsByStatus_Call struct { + *mock.Call +} + +// ListJobsByStatus is a helper method to define mock.On call +// - ctx context.Context +// - status params.JobStatus +func (_e *Store_Expecter) ListJobsByStatus(ctx interface{}, status interface{}) *Store_ListJobsByStatus_Call { + return &Store_ListJobsByStatus_Call{Call: _e.mock.On("ListJobsByStatus", ctx, status)} +} + +func (_c *Store_ListJobsByStatus_Call) Run(run func(ctx context.Context, status params.JobStatus)) *Store_ListJobsByStatus_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.JobStatus)) + }) + return _c +} + +func (_c *Store_ListJobsByStatus_Call) Return(_a0 []params.Job, _a1 error) *Store_ListJobsByStatus_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListJobsByStatus_Call) RunAndReturn(run func(context.Context, params.JobStatus) ([]params.Job, error)) *Store_ListJobsByStatus_Call { + _c.Call.Return(run) + return _c +} + // ListOrganizations provides a mock function with given fields: ctx, filter func (_m *Store) ListOrganizations(ctx context.Context, filter params.OrganizationFilter) ([]params.Organization, error) { ret := _m.Called(ctx, filter) @@ -1854,6 +3915,35 @@ func (_m *Store) ListOrganizations(ctx context.Context, filter params.Organizati return r0, r1 } +// Store_ListOrganizations_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListOrganizations' +type Store_ListOrganizations_Call struct { + *mock.Call +} + +// ListOrganizations is a helper method to define mock.On call +// - ctx context.Context +// - filter params.OrganizationFilter +func (_e *Store_Expecter) ListOrganizations(ctx interface{}, filter interface{}) *Store_ListOrganizations_Call { + return &Store_ListOrganizations_Call{Call: _e.mock.On("ListOrganizations", ctx, filter)} +} + +func (_c *Store_ListOrganizations_Call) Run(run func(ctx context.Context, filter params.OrganizationFilter)) *Store_ListOrganizations_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.OrganizationFilter)) + }) + return _c +} + +func (_c *Store_ListOrganizations_Call) Return(_a0 []params.Organization, _a1 error) *Store_ListOrganizations_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListOrganizations_Call) RunAndReturn(run func(context.Context, params.OrganizationFilter) ([]params.Organization, error)) *Store_ListOrganizations_Call { + _c.Call.Return(run) + return _c +} + // ListPoolInstances provides a mock function with given fields: ctx, poolID func (_m *Store) ListPoolInstances(ctx context.Context, poolID string) ([]params.Instance, error) { ret := _m.Called(ctx, poolID) @@ -1884,6 +3974,35 @@ func (_m *Store) ListPoolInstances(ctx context.Context, poolID string) ([]params return r0, r1 } +// Store_ListPoolInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListPoolInstances' +type Store_ListPoolInstances_Call struct { + *mock.Call +} + +// ListPoolInstances is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +func (_e *Store_Expecter) ListPoolInstances(ctx interface{}, poolID interface{}) *Store_ListPoolInstances_Call { + return &Store_ListPoolInstances_Call{Call: _e.mock.On("ListPoolInstances", ctx, poolID)} +} + +func (_c *Store_ListPoolInstances_Call) Run(run func(ctx context.Context, poolID string)) *Store_ListPoolInstances_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_ListPoolInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListPoolInstances_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListPoolInstances_Call) RunAndReturn(run func(context.Context, string) ([]params.Instance, error)) *Store_ListPoolInstances_Call { + _c.Call.Return(run) + return _c +} + // ListRepositories provides a mock function with given fields: ctx, filter func (_m *Store) ListRepositories(ctx context.Context, filter params.RepositoryFilter) ([]params.Repository, error) { ret := _m.Called(ctx, filter) @@ -1914,6 +4033,35 @@ func (_m *Store) ListRepositories(ctx context.Context, filter params.RepositoryF return r0, r1 } +// Store_ListRepositories_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListRepositories' +type Store_ListRepositories_Call struct { + *mock.Call +} + +// ListRepositories is a helper method to define mock.On call +// - ctx context.Context +// - filter params.RepositoryFilter +func (_e *Store_Expecter) ListRepositories(ctx interface{}, filter interface{}) *Store_ListRepositories_Call { + return &Store_ListRepositories_Call{Call: _e.mock.On("ListRepositories", ctx, filter)} +} + +func (_c *Store_ListRepositories_Call) Run(run func(ctx context.Context, filter params.RepositoryFilter)) *Store_ListRepositories_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.RepositoryFilter)) + }) + return _c +} + +func (_c *Store_ListRepositories_Call) Return(_a0 []params.Repository, _a1 error) *Store_ListRepositories_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListRepositories_Call) RunAndReturn(run func(context.Context, params.RepositoryFilter) ([]params.Repository, error)) *Store_ListRepositories_Call { + _c.Call.Return(run) + return _c +} + // ListScaleSetInstances provides a mock function with given fields: _a0, scalesetID func (_m *Store) ListScaleSetInstances(_a0 context.Context, scalesetID uint) ([]params.Instance, error) { ret := _m.Called(_a0, scalesetID) @@ -1944,6 +4092,35 @@ func (_m *Store) ListScaleSetInstances(_a0 context.Context, scalesetID uint) ([] return r0, r1 } +// Store_ListScaleSetInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListScaleSetInstances' +type Store_ListScaleSetInstances_Call struct { + *mock.Call +} + +// ListScaleSetInstances is a helper method to define mock.On call +// - _a0 context.Context +// - scalesetID uint +func (_e *Store_Expecter) ListScaleSetInstances(_a0 interface{}, scalesetID interface{}) *Store_ListScaleSetInstances_Call { + return &Store_ListScaleSetInstances_Call{Call: _e.mock.On("ListScaleSetInstances", _a0, scalesetID)} +} + +func (_c *Store_ListScaleSetInstances_Call) Run(run func(_a0 context.Context, scalesetID uint)) *Store_ListScaleSetInstances_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint)) + }) + return _c +} + +func (_c *Store_ListScaleSetInstances_Call) Return(_a0 []params.Instance, _a1 error) *Store_ListScaleSetInstances_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_ListScaleSetInstances_Call) RunAndReturn(run func(context.Context, uint) ([]params.Instance, error)) *Store_ListScaleSetInstances_Call { + _c.Call.Return(run) + return _c +} + // LockJob provides a mock function with given fields: ctx, jobID, entityID func (_m *Store) LockJob(ctx context.Context, jobID int64, entityID string) error { ret := _m.Called(ctx, jobID, entityID) @@ -1962,6 +4139,36 @@ func (_m *Store) LockJob(ctx context.Context, jobID int64, entityID string) erro return r0 } +// Store_LockJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LockJob' +type Store_LockJob_Call struct { + *mock.Call +} + +// LockJob is a helper method to define mock.On call +// - ctx context.Context +// - jobID int64 +// - entityID string +func (_e *Store_Expecter) LockJob(ctx interface{}, jobID interface{}, entityID interface{}) *Store_LockJob_Call { + return &Store_LockJob_Call{Call: _e.mock.On("LockJob", ctx, jobID, entityID)} +} + +func (_c *Store_LockJob_Call) Run(run func(ctx context.Context, jobID int64, entityID string)) *Store_LockJob_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64), args[2].(string)) + }) + return _c +} + +func (_c *Store_LockJob_Call) Return(_a0 error) *Store_LockJob_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_LockJob_Call) RunAndReturn(run func(context.Context, int64, string) error) *Store_LockJob_Call { + _c.Call.Return(run) + return _c +} + // PoolInstanceCount provides a mock function with given fields: ctx, poolID func (_m *Store) PoolInstanceCount(ctx context.Context, poolID string) (int64, error) { ret := _m.Called(ctx, poolID) @@ -1990,6 +4197,35 @@ func (_m *Store) PoolInstanceCount(ctx context.Context, poolID string) (int64, e return r0, r1 } +// Store_PoolInstanceCount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PoolInstanceCount' +type Store_PoolInstanceCount_Call struct { + *mock.Call +} + +// PoolInstanceCount is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +func (_e *Store_Expecter) PoolInstanceCount(ctx interface{}, poolID interface{}) *Store_PoolInstanceCount_Call { + return &Store_PoolInstanceCount_Call{Call: _e.mock.On("PoolInstanceCount", ctx, poolID)} +} + +func (_c *Store_PoolInstanceCount_Call) Run(run func(ctx context.Context, poolID string)) *Store_PoolInstanceCount_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *Store_PoolInstanceCount_Call) Return(_a0 int64, _a1 error) *Store_PoolInstanceCount_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_PoolInstanceCount_Call) RunAndReturn(run func(context.Context, string) (int64, error)) *Store_PoolInstanceCount_Call { + _c.Call.Return(run) + return _c +} + // SetScaleSetDesiredRunnerCount provides a mock function with given fields: ctx, scaleSetID, desiredRunnerCount func (_m *Store) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID uint, desiredRunnerCount int) error { ret := _m.Called(ctx, scaleSetID, desiredRunnerCount) @@ -2008,6 +4244,36 @@ func (_m *Store) SetScaleSetDesiredRunnerCount(ctx context.Context, scaleSetID u return r0 } +// Store_SetScaleSetDesiredRunnerCount_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetScaleSetDesiredRunnerCount' +type Store_SetScaleSetDesiredRunnerCount_Call struct { + *mock.Call +} + +// SetScaleSetDesiredRunnerCount is a helper method to define mock.On call +// - ctx context.Context +// - scaleSetID uint +// - desiredRunnerCount int +func (_e *Store_Expecter) SetScaleSetDesiredRunnerCount(ctx interface{}, scaleSetID interface{}, desiredRunnerCount interface{}) *Store_SetScaleSetDesiredRunnerCount_Call { + return &Store_SetScaleSetDesiredRunnerCount_Call{Call: _e.mock.On("SetScaleSetDesiredRunnerCount", ctx, scaleSetID, desiredRunnerCount)} +} + +func (_c *Store_SetScaleSetDesiredRunnerCount_Call) Run(run func(ctx context.Context, scaleSetID uint, desiredRunnerCount int)) *Store_SetScaleSetDesiredRunnerCount_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint), args[2].(int)) + }) + return _c +} + +func (_c *Store_SetScaleSetDesiredRunnerCount_Call) Return(_a0 error) *Store_SetScaleSetDesiredRunnerCount_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_SetScaleSetDesiredRunnerCount_Call) RunAndReturn(run func(context.Context, uint, int) error) *Store_SetScaleSetDesiredRunnerCount_Call { + _c.Call.Return(run) + return _c +} + // SetScaleSetLastMessageID provides a mock function with given fields: ctx, scaleSetID, lastMessageID func (_m *Store) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, lastMessageID int64) error { ret := _m.Called(ctx, scaleSetID, lastMessageID) @@ -2026,6 +4292,36 @@ func (_m *Store) SetScaleSetLastMessageID(ctx context.Context, scaleSetID uint, return r0 } +// Store_SetScaleSetLastMessageID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetScaleSetLastMessageID' +type Store_SetScaleSetLastMessageID_Call struct { + *mock.Call +} + +// SetScaleSetLastMessageID is a helper method to define mock.On call +// - ctx context.Context +// - scaleSetID uint +// - lastMessageID int64 +func (_e *Store_Expecter) SetScaleSetLastMessageID(ctx interface{}, scaleSetID interface{}, lastMessageID interface{}) *Store_SetScaleSetLastMessageID_Call { + return &Store_SetScaleSetLastMessageID_Call{Call: _e.mock.On("SetScaleSetLastMessageID", ctx, scaleSetID, lastMessageID)} +} + +func (_c *Store_SetScaleSetLastMessageID_Call) Run(run func(ctx context.Context, scaleSetID uint, lastMessageID int64)) *Store_SetScaleSetLastMessageID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint), args[2].(int64)) + }) + return _c +} + +func (_c *Store_SetScaleSetLastMessageID_Call) Return(_a0 error) *Store_SetScaleSetLastMessageID_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_SetScaleSetLastMessageID_Call) RunAndReturn(run func(context.Context, uint, int64) error) *Store_SetScaleSetLastMessageID_Call { + _c.Call.Return(run) + return _c +} + // UnlockJob provides a mock function with given fields: ctx, jobID, entityID func (_m *Store) UnlockJob(ctx context.Context, jobID int64, entityID string) error { ret := _m.Called(ctx, jobID, entityID) @@ -2044,6 +4340,36 @@ func (_m *Store) UnlockJob(ctx context.Context, jobID int64, entityID string) er return r0 } +// Store_UnlockJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UnlockJob' +type Store_UnlockJob_Call struct { + *mock.Call +} + +// UnlockJob is a helper method to define mock.On call +// - ctx context.Context +// - jobID int64 +// - entityID string +func (_e *Store_Expecter) UnlockJob(ctx interface{}, jobID interface{}, entityID interface{}) *Store_UnlockJob_Call { + return &Store_UnlockJob_Call{Call: _e.mock.On("UnlockJob", ctx, jobID, entityID)} +} + +func (_c *Store_UnlockJob_Call) Run(run func(ctx context.Context, jobID int64, entityID string)) *Store_UnlockJob_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64), args[2].(string)) + }) + return _c +} + +func (_c *Store_UnlockJob_Call) Return(_a0 error) *Store_UnlockJob_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Store_UnlockJob_Call) RunAndReturn(run func(context.Context, int64, string) error) *Store_UnlockJob_Call { + _c.Call.Return(run) + return _c +} + // UpdateController provides a mock function with given fields: info func (_m *Store) UpdateController(info params.UpdateControllerParams) (params.ControllerInfo, error) { ret := _m.Called(info) @@ -2072,6 +4398,34 @@ func (_m *Store) UpdateController(info params.UpdateControllerParams) (params.Co return r0, r1 } +// Store_UpdateController_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateController' +type Store_UpdateController_Call struct { + *mock.Call +} + +// UpdateController is a helper method to define mock.On call +// - info params.UpdateControllerParams +func (_e *Store_Expecter) UpdateController(info interface{}) *Store_UpdateController_Call { + return &Store_UpdateController_Call{Call: _e.mock.On("UpdateController", info)} +} + +func (_c *Store_UpdateController_Call) Run(run func(info params.UpdateControllerParams)) *Store_UpdateController_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.UpdateControllerParams)) + }) + return _c +} + +func (_c *Store_UpdateController_Call) Return(_a0 params.ControllerInfo, _a1 error) *Store_UpdateController_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateController_Call) RunAndReturn(run func(params.UpdateControllerParams) (params.ControllerInfo, error)) *Store_UpdateController_Call { + _c.Call.Return(run) + return _c +} + // UpdateEnterprise provides a mock function with given fields: ctx, enterpriseID, param func (_m *Store) UpdateEnterprise(ctx context.Context, enterpriseID string, param params.UpdateEntityParams) (params.Enterprise, error) { ret := _m.Called(ctx, enterpriseID, param) @@ -2100,6 +4454,36 @@ func (_m *Store) UpdateEnterprise(ctx context.Context, enterpriseID string, para return r0, r1 } +// Store_UpdateEnterprise_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateEnterprise' +type Store_UpdateEnterprise_Call struct { + *mock.Call +} + +// UpdateEnterprise is a helper method to define mock.On call +// - ctx context.Context +// - enterpriseID string +// - param params.UpdateEntityParams +func (_e *Store_Expecter) UpdateEnterprise(ctx interface{}, enterpriseID interface{}, param interface{}) *Store_UpdateEnterprise_Call { + return &Store_UpdateEnterprise_Call{Call: _e.mock.On("UpdateEnterprise", ctx, enterpriseID, param)} +} + +func (_c *Store_UpdateEnterprise_Call) Run(run func(ctx context.Context, enterpriseID string, param params.UpdateEntityParams)) *Store_UpdateEnterprise_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateEntityParams)) + }) + return _c +} + +func (_c *Store_UpdateEnterprise_Call) Return(_a0 params.Enterprise, _a1 error) *Store_UpdateEnterprise_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateEnterprise_Call) RunAndReturn(run func(context.Context, string, params.UpdateEntityParams) (params.Enterprise, error)) *Store_UpdateEnterprise_Call { + _c.Call.Return(run) + return _c +} + // UpdateEntityPool provides a mock function with given fields: ctx, entity, poolID, param func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (params.Pool, error) { ret := _m.Called(ctx, entity, poolID, param) @@ -2128,6 +4512,37 @@ func (_m *Store) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity return r0, r1 } +// Store_UpdateEntityPool_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateEntityPool' +type Store_UpdateEntityPool_Call struct { + *mock.Call +} + +// UpdateEntityPool is a helper method to define mock.On call +// - ctx context.Context +// - entity params.ForgeEntity +// - poolID string +// - param params.UpdatePoolParams +func (_e *Store_Expecter) UpdateEntityPool(ctx interface{}, entity interface{}, poolID interface{}, param interface{}) *Store_UpdateEntityPool_Call { + return &Store_UpdateEntityPool_Call{Call: _e.mock.On("UpdateEntityPool", ctx, entity, poolID, param)} +} + +func (_c *Store_UpdateEntityPool_Call) Run(run func(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams)) *Store_UpdateEntityPool_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(string), args[3].(params.UpdatePoolParams)) + }) + return _c +} + +func (_c *Store_UpdateEntityPool_Call) Return(_a0 params.Pool, _a1 error) *Store_UpdateEntityPool_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateEntityPool_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, string, params.UpdatePoolParams) (params.Pool, error)) *Store_UpdateEntityPool_Call { + _c.Call.Return(run) + return _c +} + // UpdateEntityScaleSet provides a mock function with given fields: _a0, entity, scaleSetID, param, callback func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error) { ret := _m.Called(_a0, entity, scaleSetID, param, callback) @@ -2156,6 +4571,38 @@ func (_m *Store) UpdateEntityScaleSet(_a0 context.Context, entity params.ForgeEn return r0, r1 } +// Store_UpdateEntityScaleSet_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateEntityScaleSet' +type Store_UpdateEntityScaleSet_Call struct { + *mock.Call +} + +// UpdateEntityScaleSet is a helper method to define mock.On call +// - _a0 context.Context +// - entity params.ForgeEntity +// - scaleSetID uint +// - param params.UpdateScaleSetParams +// - callback func(params.ScaleSet , params.ScaleSet) error +func (_e *Store_Expecter) UpdateEntityScaleSet(_a0 interface{}, entity interface{}, scaleSetID interface{}, param interface{}, callback interface{}) *Store_UpdateEntityScaleSet_Call { + return &Store_UpdateEntityScaleSet_Call{Call: _e.mock.On("UpdateEntityScaleSet", _a0, entity, scaleSetID, param, callback)} +} + +func (_c *Store_UpdateEntityScaleSet_Call) Run(run func(_a0 context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(params.ScaleSet, params.ScaleSet) error)) *Store_UpdateEntityScaleSet_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.ForgeEntity), args[2].(uint), args[3].(params.UpdateScaleSetParams), args[4].(func(params.ScaleSet, params.ScaleSet) error)) + }) + return _c +} + +func (_c *Store_UpdateEntityScaleSet_Call) Return(updatedScaleSet params.ScaleSet, err error) *Store_UpdateEntityScaleSet_Call { + _c.Call.Return(updatedScaleSet, err) + return _c +} + +func (_c *Store_UpdateEntityScaleSet_Call) RunAndReturn(run func(context.Context, params.ForgeEntity, uint, params.UpdateScaleSetParams, func(params.ScaleSet, params.ScaleSet) error) (params.ScaleSet, error)) *Store_UpdateEntityScaleSet_Call { + _c.Call.Return(run) + return _c +} + // UpdateGiteaCredentials provides a mock function with given fields: ctx, id, param func (_m *Store) UpdateGiteaCredentials(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error) { ret := _m.Called(ctx, id, param) @@ -2184,6 +4631,36 @@ func (_m *Store) UpdateGiteaCredentials(ctx context.Context, id uint, param para return r0, r1 } +// Store_UpdateGiteaCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGiteaCredentials' +type Store_UpdateGiteaCredentials_Call struct { + *mock.Call +} + +// UpdateGiteaCredentials is a helper method to define mock.On call +// - ctx context.Context +// - id uint +// - param params.UpdateGiteaCredentialsParams +func (_e *Store_Expecter) UpdateGiteaCredentials(ctx interface{}, id interface{}, param interface{}) *Store_UpdateGiteaCredentials_Call { + return &Store_UpdateGiteaCredentials_Call{Call: _e.mock.On("UpdateGiteaCredentials", ctx, id, param)} +} + +func (_c *Store_UpdateGiteaCredentials_Call) Run(run func(ctx context.Context, id uint, param params.UpdateGiteaCredentialsParams)) *Store_UpdateGiteaCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint), args[2].(params.UpdateGiteaCredentialsParams)) + }) + return _c +} + +func (_c *Store_UpdateGiteaCredentials_Call) Return(gtCreds params.ForgeCredentials, err error) *Store_UpdateGiteaCredentials_Call { + _c.Call.Return(gtCreds, err) + return _c +} + +func (_c *Store_UpdateGiteaCredentials_Call) RunAndReturn(run func(context.Context, uint, params.UpdateGiteaCredentialsParams) (params.ForgeCredentials, error)) *Store_UpdateGiteaCredentials_Call { + _c.Call.Return(run) + return _c +} + // UpdateGiteaEndpoint provides a mock function with given fields: _a0, name, param func (_m *Store) UpdateGiteaEndpoint(_a0 context.Context, name string, param params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error) { ret := _m.Called(_a0, name, param) @@ -2212,6 +4689,36 @@ func (_m *Store) UpdateGiteaEndpoint(_a0 context.Context, name string, param par return r0, r1 } +// Store_UpdateGiteaEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGiteaEndpoint' +type Store_UpdateGiteaEndpoint_Call struct { + *mock.Call +} + +// UpdateGiteaEndpoint is a helper method to define mock.On call +// - _a0 context.Context +// - name string +// - param params.UpdateGiteaEndpointParams +func (_e *Store_Expecter) UpdateGiteaEndpoint(_a0 interface{}, name interface{}, param interface{}) *Store_UpdateGiteaEndpoint_Call { + return &Store_UpdateGiteaEndpoint_Call{Call: _e.mock.On("UpdateGiteaEndpoint", _a0, name, param)} +} + +func (_c *Store_UpdateGiteaEndpoint_Call) Run(run func(_a0 context.Context, name string, param params.UpdateGiteaEndpointParams)) *Store_UpdateGiteaEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateGiteaEndpointParams)) + }) + return _c +} + +func (_c *Store_UpdateGiteaEndpoint_Call) Return(ghEndpoint params.ForgeEndpoint, err error) *Store_UpdateGiteaEndpoint_Call { + _c.Call.Return(ghEndpoint, err) + return _c +} + +func (_c *Store_UpdateGiteaEndpoint_Call) RunAndReturn(run func(context.Context, string, params.UpdateGiteaEndpointParams) (params.ForgeEndpoint, error)) *Store_UpdateGiteaEndpoint_Call { + _c.Call.Return(run) + return _c +} + // UpdateGithubCredentials provides a mock function with given fields: ctx, id, param func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error) { ret := _m.Called(ctx, id, param) @@ -2240,6 +4747,36 @@ func (_m *Store) UpdateGithubCredentials(ctx context.Context, id uint, param par return r0, r1 } +// Store_UpdateGithubCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGithubCredentials' +type Store_UpdateGithubCredentials_Call struct { + *mock.Call +} + +// UpdateGithubCredentials is a helper method to define mock.On call +// - ctx context.Context +// - id uint +// - param params.UpdateGithubCredentialsParams +func (_e *Store_Expecter) UpdateGithubCredentials(ctx interface{}, id interface{}, param interface{}) *Store_UpdateGithubCredentials_Call { + return &Store_UpdateGithubCredentials_Call{Call: _e.mock.On("UpdateGithubCredentials", ctx, id, param)} +} + +func (_c *Store_UpdateGithubCredentials_Call) Run(run func(ctx context.Context, id uint, param params.UpdateGithubCredentialsParams)) *Store_UpdateGithubCredentials_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint), args[2].(params.UpdateGithubCredentialsParams)) + }) + return _c +} + +func (_c *Store_UpdateGithubCredentials_Call) Return(_a0 params.ForgeCredentials, _a1 error) *Store_UpdateGithubCredentials_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateGithubCredentials_Call) RunAndReturn(run func(context.Context, uint, params.UpdateGithubCredentialsParams) (params.ForgeCredentials, error)) *Store_UpdateGithubCredentials_Call { + _c.Call.Return(run) + return _c +} + // UpdateGithubEndpoint provides a mock function with given fields: ctx, name, param func (_m *Store) UpdateGithubEndpoint(ctx context.Context, name string, param params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error) { ret := _m.Called(ctx, name, param) @@ -2268,6 +4805,36 @@ func (_m *Store) UpdateGithubEndpoint(ctx context.Context, name string, param pa return r0, r1 } +// Store_UpdateGithubEndpoint_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateGithubEndpoint' +type Store_UpdateGithubEndpoint_Call struct { + *mock.Call +} + +// UpdateGithubEndpoint is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - param params.UpdateGithubEndpointParams +func (_e *Store_Expecter) UpdateGithubEndpoint(ctx interface{}, name interface{}, param interface{}) *Store_UpdateGithubEndpoint_Call { + return &Store_UpdateGithubEndpoint_Call{Call: _e.mock.On("UpdateGithubEndpoint", ctx, name, param)} +} + +func (_c *Store_UpdateGithubEndpoint_Call) Run(run func(ctx context.Context, name string, param params.UpdateGithubEndpointParams)) *Store_UpdateGithubEndpoint_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateGithubEndpointParams)) + }) + return _c +} + +func (_c *Store_UpdateGithubEndpoint_Call) Return(_a0 params.ForgeEndpoint, _a1 error) *Store_UpdateGithubEndpoint_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateGithubEndpoint_Call) RunAndReturn(run func(context.Context, string, params.UpdateGithubEndpointParams) (params.ForgeEndpoint, error)) *Store_UpdateGithubEndpoint_Call { + _c.Call.Return(run) + return _c +} + // UpdateInstance provides a mock function with given fields: ctx, instanceName, param func (_m *Store) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { ret := _m.Called(ctx, instanceName, param) @@ -2296,6 +4863,36 @@ func (_m *Store) UpdateInstance(ctx context.Context, instanceName string, param return r0, r1 } +// Store_UpdateInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateInstance' +type Store_UpdateInstance_Call struct { + *mock.Call +} + +// UpdateInstance is a helper method to define mock.On call +// - ctx context.Context +// - instanceName string +// - param params.UpdateInstanceParams +func (_e *Store_Expecter) UpdateInstance(ctx interface{}, instanceName interface{}, param interface{}) *Store_UpdateInstance_Call { + return &Store_UpdateInstance_Call{Call: _e.mock.On("UpdateInstance", ctx, instanceName, param)} +} + +func (_c *Store_UpdateInstance_Call) Run(run func(ctx context.Context, instanceName string, param params.UpdateInstanceParams)) *Store_UpdateInstance_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateInstanceParams)) + }) + return _c +} + +func (_c *Store_UpdateInstance_Call) Return(_a0 params.Instance, _a1 error) *Store_UpdateInstance_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateInstance_Call) RunAndReturn(run func(context.Context, string, params.UpdateInstanceParams) (params.Instance, error)) *Store_UpdateInstance_Call { + _c.Call.Return(run) + return _c +} + // UpdateOrganization provides a mock function with given fields: ctx, orgID, param func (_m *Store) UpdateOrganization(ctx context.Context, orgID string, param params.UpdateEntityParams) (params.Organization, error) { ret := _m.Called(ctx, orgID, param) @@ -2324,6 +4921,36 @@ func (_m *Store) UpdateOrganization(ctx context.Context, orgID string, param par return r0, r1 } +// Store_UpdateOrganization_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateOrganization' +type Store_UpdateOrganization_Call struct { + *mock.Call +} + +// UpdateOrganization is a helper method to define mock.On call +// - ctx context.Context +// - orgID string +// - param params.UpdateEntityParams +func (_e *Store_Expecter) UpdateOrganization(ctx interface{}, orgID interface{}, param interface{}) *Store_UpdateOrganization_Call { + return &Store_UpdateOrganization_Call{Call: _e.mock.On("UpdateOrganization", ctx, orgID, param)} +} + +func (_c *Store_UpdateOrganization_Call) Run(run func(ctx context.Context, orgID string, param params.UpdateEntityParams)) *Store_UpdateOrganization_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateEntityParams)) + }) + return _c +} + +func (_c *Store_UpdateOrganization_Call) Return(_a0 params.Organization, _a1 error) *Store_UpdateOrganization_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateOrganization_Call) RunAndReturn(run func(context.Context, string, params.UpdateEntityParams) (params.Organization, error)) *Store_UpdateOrganization_Call { + _c.Call.Return(run) + return _c +} + // UpdateRepository provides a mock function with given fields: ctx, repoID, param func (_m *Store) UpdateRepository(ctx context.Context, repoID string, param params.UpdateEntityParams) (params.Repository, error) { ret := _m.Called(ctx, repoID, param) @@ -2352,6 +4979,36 @@ func (_m *Store) UpdateRepository(ctx context.Context, repoID string, param para return r0, r1 } +// Store_UpdateRepository_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateRepository' +type Store_UpdateRepository_Call struct { + *mock.Call +} + +// UpdateRepository is a helper method to define mock.On call +// - ctx context.Context +// - repoID string +// - param params.UpdateEntityParams +func (_e *Store_Expecter) UpdateRepository(ctx interface{}, repoID interface{}, param interface{}) *Store_UpdateRepository_Call { + return &Store_UpdateRepository_Call{Call: _e.mock.On("UpdateRepository", ctx, repoID, param)} +} + +func (_c *Store_UpdateRepository_Call) Run(run func(ctx context.Context, repoID string, param params.UpdateEntityParams)) *Store_UpdateRepository_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateEntityParams)) + }) + return _c +} + +func (_c *Store_UpdateRepository_Call) Return(_a0 params.Repository, _a1 error) *Store_UpdateRepository_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateRepository_Call) RunAndReturn(run func(context.Context, string, params.UpdateEntityParams) (params.Repository, error)) *Store_UpdateRepository_Call { + _c.Call.Return(run) + return _c +} + // UpdateUser provides a mock function with given fields: ctx, user, param func (_m *Store) UpdateUser(ctx context.Context, user string, param params.UpdateUserParams) (params.User, error) { ret := _m.Called(ctx, user, param) @@ -2380,6 +5037,36 @@ func (_m *Store) UpdateUser(ctx context.Context, user string, param params.Updat return r0, r1 } +// Store_UpdateUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateUser' +type Store_UpdateUser_Call struct { + *mock.Call +} + +// UpdateUser is a helper method to define mock.On call +// - ctx context.Context +// - user string +// - param params.UpdateUserParams +func (_e *Store_Expecter) UpdateUser(ctx interface{}, user interface{}, param interface{}) *Store_UpdateUser_Call { + return &Store_UpdateUser_Call{Call: _e.mock.On("UpdateUser", ctx, user, param)} +} + +func (_c *Store_UpdateUser_Call) Run(run func(ctx context.Context, user string, param params.UpdateUserParams)) *Store_UpdateUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.UpdateUserParams)) + }) + return _c +} + +func (_c *Store_UpdateUser_Call) Return(_a0 params.User, _a1 error) *Store_UpdateUser_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Store_UpdateUser_Call) RunAndReturn(run func(context.Context, string, params.UpdateUserParams) (params.User, error)) *Store_UpdateUser_Call { + _c.Call.Return(run) + return _c +} + // NewStore creates a new instance of Store. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewStore(t interface { diff --git a/database/common/store.go b/database/common/store.go index 8b3c4f7c..d768f159 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -169,7 +169,7 @@ type GiteaCredentialsStore interface { DeleteGiteaCredentials(ctx context.Context, id uint) (err error) } -//go:generate mockery --name=Store +//go:generate go run github.com/vektra/mockery/v2@latest type Store interface { RepoStore OrgStore diff --git a/database/sql/pools.go b/database/sql/pools.go index 350e1dc2..889cbc58 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -37,7 +37,7 @@ const ( func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { var pools []Pool - q := s.conn.Model(&Pool{}). + q := s.conn. Preload("Tags"). Preload("Organization"). Preload("Organization.Endpoint"). @@ -46,7 +46,6 @@ func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { Preload("Enterprise"). Preload("Enterprise.Endpoint"). Omit("extra_specs"). - Omit("status_messages"). Find(&pools) if q.Error != nil { return nil, errors.Wrap(q.Error, "fetching all pools") @@ -393,7 +392,7 @@ func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.ForgeEnt return nil } -func (s *sqlDatabase) UpdateEntityPool(_ context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (updatedPool params.Pool, err error) { +func (s *sqlDatabase) UpdateEntityPool(ctx context.Context, entity params.ForgeEntity, poolID string, param params.UpdatePoolParams) (updatedPool params.Pool, err error) { defer func() { if err == nil { s.sendNotify(common.PoolEntityType, common.UpdateOperation, updatedPool) @@ -414,6 +413,11 @@ func (s *sqlDatabase) UpdateEntityPool(_ context.Context, entity params.ForgeEnt if err != nil { return params.Pool{}, err } + + updatedPool, err = s.GetPoolByID(ctx, poolID) + if err != nil { + return params.Pool{}, err + } return updatedPool, nil } diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 752c7948..4748ed66 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -193,7 +193,7 @@ func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.Forge return ret, nil } -func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { +func (s *sqlDatabase) UpdateEntityScaleSet(ctx context.Context, entity params.ForgeEntity, scaleSetID uint, param params.UpdateScaleSetParams, callback func(old, newSet params.ScaleSet) error) (updatedScaleSet params.ScaleSet, err error) { defer func() { if err == nil { s.sendNotify(common.ScaleSetEntityType, common.UpdateOperation, updatedScaleSet) @@ -225,6 +225,11 @@ func (s *sqlDatabase) UpdateEntityScaleSet(_ context.Context, entity params.Forg if err != nil { return params.ScaleSet{}, err } + + updatedScaleSet, err = s.GetScaleSetByID(ctx, scaleSetID) + if err != nil { + return params.ScaleSet{}, err + } return updatedScaleSet, nil } @@ -345,7 +350,17 @@ func (s *sqlDatabase) updateScaleSet(tx *gorm.DB, scaleSet ScaleSet, param param } func (s *sqlDatabase) GetScaleSetByID(_ context.Context, scaleSet uint) (params.ScaleSet, error) { - set, err := s.getScaleSetByID(s.conn, scaleSet, "Instances", "Enterprise", "Organization", "Repository") + set, err := s.getScaleSetByID( + s.conn, + scaleSet, + "Instances", + "Enterprise", + "Enterprise.Endpoint", + "Organization", + "Organization.Endpoint", + "Repository", + "Repository.Endpoint", + ) if err != nil { return params.ScaleSet{}, errors.Wrap(err, "fetching scale set by ID") } diff --git a/database/watcher/watcher_store_test.go b/database/watcher/watcher_store_test.go index e682270a..a71ed1cf 100644 --- a/database/watcher/watcher_store_test.go +++ b/database/watcher/watcher_store_test.go @@ -600,6 +600,11 @@ func (s *WatcherStoreTestSuite) TestScaleSetWatcher() { // We updated last message ID and desired runner count above. updatedScaleSet.DesiredRunnerCount = 5 updatedScaleSet.LastMessageID = 99 + payloadFromEvent, ok := event.Payload.(params.ScaleSet) + s.Require().True(ok) + updatedScaleSet.UpdatedAt = payloadFromEvent.UpdatedAt + updatedScaleSet.CreatedAt = payloadFromEvent.CreatedAt + updatedScaleSet.Endpoint = params.ForgeEndpoint{} s.Require().Equal(common.ChangePayload{ EntityType: common.ScaleSetEntityType, Operation: common.DeleteOperation, diff --git a/doc/building_from_source.md b/doc/building_from_source.md index 9058820e..e5d2d0fd 100644 --- a/doc/building_from_source.md +++ b/doc/building_from_source.md @@ -6,12 +6,13 @@ First, clone the repository: ```bash git clone https://github.com/cloudbase/garm +cd garm ``` Then build garm: ```bash -make +make build ``` You should now have both `garm` and `garm-cli` available in the `./bin` folder. @@ -22,4 +23,65 @@ If you have docker/podman installed, you can also build a static binary against make build-static ``` -This command will also build for both AMD64 and ARM64. Resulting binaries will be in the `./bin` folder. \ No newline at end of file +This command will also build for both AMD64 and ARM64. Resulting binaries will be in the `./bin` folder. + +## Hacking + +If you're hacking on GARM and want to override the default version GARM injects, you can run the following command: + +```bash +VERSION=v1.0.0 make build +``` + +> [!IMPORTANT] +> This only works for `make build`. The `make build-static` command does not support version overrides. + +## The Web UI SPA + +GARM now ships with a single page application. The application is written in svelte and tailwind CSS. To rebuild it or hack on it, you will need a number of dependencies installed and placed in your `$PATH`. + +### Prerequisites + +- **Node.js 24+** and **npm** +- **Go 1.21+** (for building the GARM backend) +- **openapi-generator-cli** in your PATH (for API client generation) + +### Installing openapi-generator-cli + +**Option 1: NPM Global Install** +```bash +npm install -g @openapitools/openapi-generator-cli +``` + +**Option 2: Manual Install** +Download from [OpenAPI Generator releases](https://github.com/OpenAPITools/openapi-generator/releases) and add to your PATH. + +**Verify Installation:** + +```bash +openapi-generator-cli version +``` + + + +### Hacking on the Web UI + +If you need to change something in the `webapp/src` folder, make sure to rebuild the webapp before rebuilding GARM: + +```bash +make build-webui +make build +``` + +> [!IMPORTANT] +> The Web UI that GARM ships with has `go generate` stanzas that require `@openapitools/openapi-generator-cli` and `tailwindcss` to be installed. You will also have to make sure that if you change API models, the Web UI still works, as adding new fields or changing the json tags of old fields will change accessors in the client code. + +### Changing API models + +If you need to change the models in the `params/` package, you will also need to regenerate the client both for garm-cli and for the web application we ship with GARM. To do this, you can run: + +```bash +make generate +``` + +You will also need to make sure that the web app still works. diff --git a/doc/config.md b/doc/config.md index 8b4d3a05..3c67e1b4 100644 --- a/doc/config.md +++ b/doc/config.md @@ -473,6 +473,8 @@ The config options are fairly straight forward. certificate = "" # The path on disk to the corresponding private key for the certificate. key = "" + [apiserver.webui] + enable = true ``` The GARM API server has the option to enable TLS, but I suggest you use a reverse proxy and enable TLS termination in that reverse proxy. There is an `nginx` sample in this repository with TLS termination enabled. diff --git a/doc/quickstart.md b/doc/quickstart.md index 66afead3..889f799b 100644 --- a/doc/quickstart.md +++ b/doc/quickstart.md @@ -61,6 +61,9 @@ time_to_live = "8760h" bind = "0.0.0.0" port = 80 use_tls = false + [apiserver.webui] + # Set this to false if you want to disable the Web UI. + enable = true [database] backend = "sqlite3" diff --git a/go.mod b/go.mod index 36e42be2..da91a90d 100644 --- a/go.mod +++ b/go.mod @@ -50,7 +50,7 @@ require ( github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/analysis v0.23.0 // indirect - github.com/go-openapi/jsonpointer v0.21.1 // indirect + github.com/go-openapi/jsonpointer v0.21.2 // indirect github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/loads v0.22.0 // indirect github.com/go-openapi/spec v0.21.0 // indirect @@ -68,7 +68,7 @@ require ( github.com/mailru/easyjson v0.9.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect - github.com/mattn/go-sqlite3 v1.14.28 // indirect + github.com/mattn/go-sqlite3 v1.14.31 // indirect github.com/minio/sio v0.4.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect @@ -79,7 +79,7 @@ require ( github.com/prometheus/common v0.65.0 // indirect github.com/prometheus/procfs v0.16.1 // indirect github.com/rivo/uniseg v0.4.7 // indirect - github.com/spf13/pflag v1.0.6 // indirect + github.com/spf13/pflag v1.0.7 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 // indirect go.mongodb.org/mongo-driver v1.17.4 // indirect diff --git a/go.sum b/go.sum index 1eaeff3e..2008dff3 100644 --- a/go.sum +++ b/go.sum @@ -36,8 +36,8 @@ github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC0 github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= github.com/go-openapi/errors v0.22.2 h1:rdxhzcBUazEcGccKqbY1Y7NS8FDcMyIRr0934jrYnZg= github.com/go-openapi/errors v0.22.2/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0= -github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic= -github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= +github.com/go-openapi/jsonpointer v0.21.2 h1:AqQaNADVwq/VnkCmQg6ogE+M3FOsKTytwges0JdwVuA= +github.com/go-openapi/jsonpointer v0.21.2/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= @@ -127,8 +127,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A= -github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/go-sqlite3 v1.14.31 h1:ldt6ghyPJsokUIlksH63gWZkG6qVGeEAu4zLeS4aVZM= +github.com/mattn/go-sqlite3 v1.14.31/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/microsoft/go-mssqldb v1.7.2 h1:CHkFJiObW7ItKTJfHo1QX7QBBD1iV+mn1eOyRP3b/PA= github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA= github.com/minio/sio v0.4.1 h1:EMe3YBC1nf+sRQia65Rutxi+Z554XPV0dt8BIBA+a/0= @@ -164,8 +164,9 @@ github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWN github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= -github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M= +github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= diff --git a/runner/common/mocks/GithubClient.go b/runner/common/mocks/GithubClient.go index f44d54cb..92d4aa06 100644 --- a/runner/common/mocks/GithubClient.go +++ b/runner/common/mocks/GithubClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -18,6 +18,14 @@ type GithubClient struct { mock.Mock } +type GithubClient_Expecter struct { + mock *mock.Mock +} + +func (_m *GithubClient) EXPECT() *GithubClient_Expecter { + return &GithubClient_Expecter{mock: &_m.Mock} +} + // CreateEntityHook provides a mock function with given fields: ctx, hook func (_m *GithubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) (*github.Hook, error) { ret := _m.Called(ctx, hook) @@ -48,6 +56,35 @@ func (_m *GithubClient) CreateEntityHook(ctx context.Context, hook *github.Hook) return r0, r1 } +// GithubClient_CreateEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityHook' +type GithubClient_CreateEntityHook_Call struct { + *mock.Call +} + +// CreateEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - hook *github.Hook +func (_e *GithubClient_Expecter) CreateEntityHook(ctx interface{}, hook interface{}) *GithubClient_CreateEntityHook_Call { + return &GithubClient_CreateEntityHook_Call{Call: _e.mock.On("CreateEntityHook", ctx, hook)} +} + +func (_c *GithubClient_CreateEntityHook_Call) Run(run func(ctx context.Context, hook *github.Hook)) *GithubClient_CreateEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*github.Hook)) + }) + return _c +} + +func (_c *GithubClient_CreateEntityHook_Call) Return(ret *github.Hook, err error) *GithubClient_CreateEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubClient_CreateEntityHook_Call) RunAndReturn(run func(context.Context, *github.Hook) (*github.Hook, error)) *GithubClient_CreateEntityHook_Call { + _c.Call.Return(run) + return _c +} + // CreateEntityRegistrationToken provides a mock function with given fields: ctx func (_m *GithubClient) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { ret := _m.Called(ctx) @@ -87,6 +124,34 @@ func (_m *GithubClient) CreateEntityRegistrationToken(ctx context.Context) (*git return r0, r1, r2 } +// GithubClient_CreateEntityRegistrationToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityRegistrationToken' +type GithubClient_CreateEntityRegistrationToken_Call struct { + *mock.Call +} + +// CreateEntityRegistrationToken is a helper method to define mock.On call +// - ctx context.Context +func (_e *GithubClient_Expecter) CreateEntityRegistrationToken(ctx interface{}) *GithubClient_CreateEntityRegistrationToken_Call { + return &GithubClient_CreateEntityRegistrationToken_Call{Call: _e.mock.On("CreateEntityRegistrationToken", ctx)} +} + +func (_c *GithubClient_CreateEntityRegistrationToken_Call) Run(run func(ctx context.Context)) *GithubClient_CreateEntityRegistrationToken_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *GithubClient_CreateEntityRegistrationToken_Call) Return(_a0 *github.RegistrationToken, _a1 *github.Response, _a2 error) *GithubClient_CreateEntityRegistrationToken_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *GithubClient_CreateEntityRegistrationToken_Call) RunAndReturn(run func(context.Context) (*github.RegistrationToken, *github.Response, error)) *GithubClient_CreateEntityRegistrationToken_Call { + _c.Call.Return(run) + return _c +} + // DeleteEntityHook provides a mock function with given fields: ctx, id func (_m *GithubClient) DeleteEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -117,6 +182,35 @@ func (_m *GithubClient) DeleteEntityHook(ctx context.Context, id int64) (*github return r0, r1 } +// GithubClient_DeleteEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEntityHook' +type GithubClient_DeleteEntityHook_Call struct { + *mock.Call +} + +// DeleteEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - id int64 +func (_e *GithubClient_Expecter) DeleteEntityHook(ctx interface{}, id interface{}) *GithubClient_DeleteEntityHook_Call { + return &GithubClient_DeleteEntityHook_Call{Call: _e.mock.On("DeleteEntityHook", ctx, id)} +} + +func (_c *GithubClient_DeleteEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubClient_DeleteEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubClient_DeleteEntityHook_Call) Return(ret *github.Response, err error) *GithubClient_DeleteEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubClient_DeleteEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubClient_DeleteEntityHook_Call { + _c.Call.Return(run) + return _c +} + // GetEntity provides a mock function with no fields func (_m *GithubClient) GetEntity() params.ForgeEntity { ret := _m.Called() @@ -135,6 +229,33 @@ func (_m *GithubClient) GetEntity() params.ForgeEntity { return r0 } +// GithubClient_GetEntity_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntity' +type GithubClient_GetEntity_Call struct { + *mock.Call +} + +// GetEntity is a helper method to define mock.On call +func (_e *GithubClient_Expecter) GetEntity() *GithubClient_GetEntity_Call { + return &GithubClient_GetEntity_Call{Call: _e.mock.On("GetEntity")} +} + +func (_c *GithubClient_GetEntity_Call) Run(run func()) *GithubClient_GetEntity_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GithubClient_GetEntity_Call) Return(_a0 params.ForgeEntity) *GithubClient_GetEntity_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GithubClient_GetEntity_Call) RunAndReturn(run func() params.ForgeEntity) *GithubClient_GetEntity_Call { + _c.Call.Return(run) + return _c +} + // GetEntityHook provides a mock function with given fields: ctx, id func (_m *GithubClient) GetEntityHook(ctx context.Context, id int64) (*github.Hook, error) { ret := _m.Called(ctx, id) @@ -165,6 +286,35 @@ func (_m *GithubClient) GetEntityHook(ctx context.Context, id int64) (*github.Ho return r0, r1 } +// GithubClient_GetEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityHook' +type GithubClient_GetEntityHook_Call struct { + *mock.Call +} + +// GetEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - id int64 +func (_e *GithubClient_Expecter) GetEntityHook(ctx interface{}, id interface{}) *GithubClient_GetEntityHook_Call { + return &GithubClient_GetEntityHook_Call{Call: _e.mock.On("GetEntityHook", ctx, id)} +} + +func (_c *GithubClient_GetEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubClient_GetEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubClient_GetEntityHook_Call) Return(ret *github.Hook, err error) *GithubClient_GetEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubClient_GetEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Hook, error)) *GithubClient_GetEntityHook_Call { + _c.Call.Return(run) + return _c +} + // GetEntityJITConfig provides a mock function with given fields: ctx, instance, pool, labels func (_m *GithubClient) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (map[string]string, *github.Runner, error) { ret := _m.Called(ctx, instance, pool, labels) @@ -204,6 +354,37 @@ func (_m *GithubClient) GetEntityJITConfig(ctx context.Context, instance string, return r0, r1, r2 } +// GithubClient_GetEntityJITConfig_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityJITConfig' +type GithubClient_GetEntityJITConfig_Call struct { + *mock.Call +} + +// GetEntityJITConfig is a helper method to define mock.On call +// - ctx context.Context +// - instance string +// - pool params.Pool +// - labels []string +func (_e *GithubClient_Expecter) GetEntityJITConfig(ctx interface{}, instance interface{}, pool interface{}, labels interface{}) *GithubClient_GetEntityJITConfig_Call { + return &GithubClient_GetEntityJITConfig_Call{Call: _e.mock.On("GetEntityJITConfig", ctx, instance, pool, labels)} +} + +func (_c *GithubClient_GetEntityJITConfig_Call) Run(run func(ctx context.Context, instance string, pool params.Pool, labels []string)) *GithubClient_GetEntityJITConfig_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.Pool), args[3].([]string)) + }) + return _c +} + +func (_c *GithubClient_GetEntityJITConfig_Call) Return(jitConfigMap map[string]string, runner *github.Runner, err error) *GithubClient_GetEntityJITConfig_Call { + _c.Call.Return(jitConfigMap, runner, err) + return _c +} + +func (_c *GithubClient_GetEntityJITConfig_Call) RunAndReturn(run func(context.Context, string, params.Pool, []string) (map[string]string, *github.Runner, error)) *GithubClient_GetEntityJITConfig_Call { + _c.Call.Return(run) + return _c +} + // GetWorkflowJobByID provides a mock function with given fields: ctx, owner, repo, jobID func (_m *GithubClient) GetWorkflowJobByID(ctx context.Context, owner string, repo string, jobID int64) (*github.WorkflowJob, *github.Response, error) { ret := _m.Called(ctx, owner, repo, jobID) @@ -243,6 +424,37 @@ func (_m *GithubClient) GetWorkflowJobByID(ctx context.Context, owner string, re return r0, r1, r2 } +// GithubClient_GetWorkflowJobByID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWorkflowJobByID' +type GithubClient_GetWorkflowJobByID_Call struct { + *mock.Call +} + +// GetWorkflowJobByID is a helper method to define mock.On call +// - ctx context.Context +// - owner string +// - repo string +// - jobID int64 +func (_e *GithubClient_Expecter) GetWorkflowJobByID(ctx interface{}, owner interface{}, repo interface{}, jobID interface{}) *GithubClient_GetWorkflowJobByID_Call { + return &GithubClient_GetWorkflowJobByID_Call{Call: _e.mock.On("GetWorkflowJobByID", ctx, owner, repo, jobID)} +} + +func (_c *GithubClient_GetWorkflowJobByID_Call) Run(run func(ctx context.Context, owner string, repo string, jobID int64)) *GithubClient_GetWorkflowJobByID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(int64)) + }) + return _c +} + +func (_c *GithubClient_GetWorkflowJobByID_Call) Return(_a0 *github.WorkflowJob, _a1 *github.Response, _a2 error) *GithubClient_GetWorkflowJobByID_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *GithubClient_GetWorkflowJobByID_Call) RunAndReturn(run func(context.Context, string, string, int64) (*github.WorkflowJob, *github.Response, error)) *GithubClient_GetWorkflowJobByID_Call { + _c.Call.Return(run) + return _c +} + // GithubBaseURL provides a mock function with no fields func (_m *GithubClient) GithubBaseURL() *url.URL { ret := _m.Called() @@ -263,6 +475,33 @@ func (_m *GithubClient) GithubBaseURL() *url.URL { return r0 } +// GithubClient_GithubBaseURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GithubBaseURL' +type GithubClient_GithubBaseURL_Call struct { + *mock.Call +} + +// GithubBaseURL is a helper method to define mock.On call +func (_e *GithubClient_Expecter) GithubBaseURL() *GithubClient_GithubBaseURL_Call { + return &GithubClient_GithubBaseURL_Call{Call: _e.mock.On("GithubBaseURL")} +} + +func (_c *GithubClient_GithubBaseURL_Call) Run(run func()) *GithubClient_GithubBaseURL_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GithubClient_GithubBaseURL_Call) Return(_a0 *url.URL) *GithubClient_GithubBaseURL_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GithubClient_GithubBaseURL_Call) RunAndReturn(run func() *url.URL) *GithubClient_GithubBaseURL_Call { + _c.Call.Return(run) + return _c +} + // ListEntityHooks provides a mock function with given fields: ctx, opts func (_m *GithubClient) ListEntityHooks(ctx context.Context, opts *github.ListOptions) ([]*github.Hook, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -302,6 +541,35 @@ func (_m *GithubClient) ListEntityHooks(ctx context.Context, opts *github.ListOp return r0, r1, r2 } +// GithubClient_ListEntityHooks_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityHooks' +type GithubClient_ListEntityHooks_Call struct { + *mock.Call +} + +// ListEntityHooks is a helper method to define mock.On call +// - ctx context.Context +// - opts *github.ListOptions +func (_e *GithubClient_Expecter) ListEntityHooks(ctx interface{}, opts interface{}) *GithubClient_ListEntityHooks_Call { + return &GithubClient_ListEntityHooks_Call{Call: _e.mock.On("ListEntityHooks", ctx, opts)} +} + +func (_c *GithubClient_ListEntityHooks_Call) Run(run func(ctx context.Context, opts *github.ListOptions)) *GithubClient_ListEntityHooks_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*github.ListOptions)) + }) + return _c +} + +func (_c *GithubClient_ListEntityHooks_Call) Return(ret []*github.Hook, response *github.Response, err error) *GithubClient_ListEntityHooks_Call { + _c.Call.Return(ret, response, err) + return _c +} + +func (_c *GithubClient_ListEntityHooks_Call) RunAndReturn(run func(context.Context, *github.ListOptions) ([]*github.Hook, *github.Response, error)) *GithubClient_ListEntityHooks_Call { + _c.Call.Return(run) + return _c +} + // ListEntityRunnerApplicationDownloads provides a mock function with given fields: ctx func (_m *GithubClient) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { ret := _m.Called(ctx) @@ -341,6 +609,34 @@ func (_m *GithubClient) ListEntityRunnerApplicationDownloads(ctx context.Context return r0, r1, r2 } +// GithubClient_ListEntityRunnerApplicationDownloads_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunnerApplicationDownloads' +type GithubClient_ListEntityRunnerApplicationDownloads_Call struct { + *mock.Call +} + +// ListEntityRunnerApplicationDownloads is a helper method to define mock.On call +// - ctx context.Context +func (_e *GithubClient_Expecter) ListEntityRunnerApplicationDownloads(ctx interface{}) *GithubClient_ListEntityRunnerApplicationDownloads_Call { + return &GithubClient_ListEntityRunnerApplicationDownloads_Call{Call: _e.mock.On("ListEntityRunnerApplicationDownloads", ctx)} +} + +func (_c *GithubClient_ListEntityRunnerApplicationDownloads_Call) Run(run func(ctx context.Context)) *GithubClient_ListEntityRunnerApplicationDownloads_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *GithubClient_ListEntityRunnerApplicationDownloads_Call) Return(_a0 []*github.RunnerApplicationDownload, _a1 *github.Response, _a2 error) *GithubClient_ListEntityRunnerApplicationDownloads_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *GithubClient_ListEntityRunnerApplicationDownloads_Call) RunAndReturn(run func(context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error)) *GithubClient_ListEntityRunnerApplicationDownloads_Call { + _c.Call.Return(run) + return _c +} + // ListEntityRunners provides a mock function with given fields: ctx, opts func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -380,6 +676,35 @@ func (_m *GithubClient) ListEntityRunners(ctx context.Context, opts *github.List return r0, r1, r2 } +// GithubClient_ListEntityRunners_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunners' +type GithubClient_ListEntityRunners_Call struct { + *mock.Call +} + +// ListEntityRunners is a helper method to define mock.On call +// - ctx context.Context +// - opts *github.ListRunnersOptions +func (_e *GithubClient_Expecter) ListEntityRunners(ctx interface{}, opts interface{}) *GithubClient_ListEntityRunners_Call { + return &GithubClient_ListEntityRunners_Call{Call: _e.mock.On("ListEntityRunners", ctx, opts)} +} + +func (_c *GithubClient_ListEntityRunners_Call) Run(run func(ctx context.Context, opts *github.ListRunnersOptions)) *GithubClient_ListEntityRunners_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*github.ListRunnersOptions)) + }) + return _c +} + +func (_c *GithubClient_ListEntityRunners_Call) Return(_a0 *github.Runners, _a1 *github.Response, _a2 error) *GithubClient_ListEntityRunners_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *GithubClient_ListEntityRunners_Call) RunAndReturn(run func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)) *GithubClient_ListEntityRunners_Call { + _c.Call.Return(run) + return _c +} + // PingEntityHook provides a mock function with given fields: ctx, id func (_m *GithubClient) PingEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -410,6 +735,35 @@ func (_m *GithubClient) PingEntityHook(ctx context.Context, id int64) (*github.R return r0, r1 } +// GithubClient_PingEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PingEntityHook' +type GithubClient_PingEntityHook_Call struct { + *mock.Call +} + +// PingEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - id int64 +func (_e *GithubClient_Expecter) PingEntityHook(ctx interface{}, id interface{}) *GithubClient_PingEntityHook_Call { + return &GithubClient_PingEntityHook_Call{Call: _e.mock.On("PingEntityHook", ctx, id)} +} + +func (_c *GithubClient_PingEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubClient_PingEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubClient_PingEntityHook_Call) Return(ret *github.Response, err error) *GithubClient_PingEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubClient_PingEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubClient_PingEntityHook_Call { + _c.Call.Return(run) + return _c +} + // RateLimit provides a mock function with given fields: ctx func (_m *GithubClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { ret := _m.Called(ctx) @@ -440,6 +794,34 @@ func (_m *GithubClient) RateLimit(ctx context.Context) (*github.RateLimits, erro return r0, r1 } +// GithubClient_RateLimit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RateLimit' +type GithubClient_RateLimit_Call struct { + *mock.Call +} + +// RateLimit is a helper method to define mock.On call +// - ctx context.Context +func (_e *GithubClient_Expecter) RateLimit(ctx interface{}) *GithubClient_RateLimit_Call { + return &GithubClient_RateLimit_Call{Call: _e.mock.On("RateLimit", ctx)} +} + +func (_c *GithubClient_RateLimit_Call) Run(run func(ctx context.Context)) *GithubClient_RateLimit_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *GithubClient_RateLimit_Call) Return(_a0 *github.RateLimits, _a1 error) *GithubClient_RateLimit_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GithubClient_RateLimit_Call) RunAndReturn(run func(context.Context) (*github.RateLimits, error)) *GithubClient_RateLimit_Call { + _c.Call.Return(run) + return _c +} + // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID func (_m *GithubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) error { ret := _m.Called(ctx, runnerID) @@ -458,6 +840,35 @@ func (_m *GithubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) return r0 } +// GithubClient_RemoveEntityRunner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveEntityRunner' +type GithubClient_RemoveEntityRunner_Call struct { + *mock.Call +} + +// RemoveEntityRunner is a helper method to define mock.On call +// - ctx context.Context +// - runnerID int64 +func (_e *GithubClient_Expecter) RemoveEntityRunner(ctx interface{}, runnerID interface{}) *GithubClient_RemoveEntityRunner_Call { + return &GithubClient_RemoveEntityRunner_Call{Call: _e.mock.On("RemoveEntityRunner", ctx, runnerID)} +} + +func (_c *GithubClient_RemoveEntityRunner_Call) Run(run func(ctx context.Context, runnerID int64)) *GithubClient_RemoveEntityRunner_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubClient_RemoveEntityRunner_Call) Return(_a0 error) *GithubClient_RemoveEntityRunner_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GithubClient_RemoveEntityRunner_Call) RunAndReturn(run func(context.Context, int64) error) *GithubClient_RemoveEntityRunner_Call { + _c.Call.Return(run) + return _c +} + // NewGithubClient creates a new instance of GithubClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewGithubClient(t interface { diff --git a/runner/common/mocks/GithubEntityOperations.go b/runner/common/mocks/GithubEntityOperations.go index 15326795..2448df4c 100644 --- a/runner/common/mocks/GithubEntityOperations.go +++ b/runner/common/mocks/GithubEntityOperations.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -18,6 +18,14 @@ type GithubEntityOperations struct { mock.Mock } +type GithubEntityOperations_Expecter struct { + mock *mock.Mock +} + +func (_m *GithubEntityOperations) EXPECT() *GithubEntityOperations_Expecter { + return &GithubEntityOperations_Expecter{mock: &_m.Mock} +} + // CreateEntityHook provides a mock function with given fields: ctx, hook func (_m *GithubEntityOperations) CreateEntityHook(ctx context.Context, hook *github.Hook) (*github.Hook, error) { ret := _m.Called(ctx, hook) @@ -48,6 +56,35 @@ func (_m *GithubEntityOperations) CreateEntityHook(ctx context.Context, hook *gi return r0, r1 } +// GithubEntityOperations_CreateEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityHook' +type GithubEntityOperations_CreateEntityHook_Call struct { + *mock.Call +} + +// CreateEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - hook *github.Hook +func (_e *GithubEntityOperations_Expecter) CreateEntityHook(ctx interface{}, hook interface{}) *GithubEntityOperations_CreateEntityHook_Call { + return &GithubEntityOperations_CreateEntityHook_Call{Call: _e.mock.On("CreateEntityHook", ctx, hook)} +} + +func (_c *GithubEntityOperations_CreateEntityHook_Call) Run(run func(ctx context.Context, hook *github.Hook)) *GithubEntityOperations_CreateEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*github.Hook)) + }) + return _c +} + +func (_c *GithubEntityOperations_CreateEntityHook_Call) Return(ret *github.Hook, err error) *GithubEntityOperations_CreateEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubEntityOperations_CreateEntityHook_Call) RunAndReturn(run func(context.Context, *github.Hook) (*github.Hook, error)) *GithubEntityOperations_CreateEntityHook_Call { + _c.Call.Return(run) + return _c +} + // CreateEntityRegistrationToken provides a mock function with given fields: ctx func (_m *GithubEntityOperations) CreateEntityRegistrationToken(ctx context.Context) (*github.RegistrationToken, *github.Response, error) { ret := _m.Called(ctx) @@ -87,6 +124,34 @@ func (_m *GithubEntityOperations) CreateEntityRegistrationToken(ctx context.Cont return r0, r1, r2 } +// GithubEntityOperations_CreateEntityRegistrationToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEntityRegistrationToken' +type GithubEntityOperations_CreateEntityRegistrationToken_Call struct { + *mock.Call +} + +// CreateEntityRegistrationToken is a helper method to define mock.On call +// - ctx context.Context +func (_e *GithubEntityOperations_Expecter) CreateEntityRegistrationToken(ctx interface{}) *GithubEntityOperations_CreateEntityRegistrationToken_Call { + return &GithubEntityOperations_CreateEntityRegistrationToken_Call{Call: _e.mock.On("CreateEntityRegistrationToken", ctx)} +} + +func (_c *GithubEntityOperations_CreateEntityRegistrationToken_Call) Run(run func(ctx context.Context)) *GithubEntityOperations_CreateEntityRegistrationToken_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *GithubEntityOperations_CreateEntityRegistrationToken_Call) Return(_a0 *github.RegistrationToken, _a1 *github.Response, _a2 error) *GithubEntityOperations_CreateEntityRegistrationToken_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *GithubEntityOperations_CreateEntityRegistrationToken_Call) RunAndReturn(run func(context.Context) (*github.RegistrationToken, *github.Response, error)) *GithubEntityOperations_CreateEntityRegistrationToken_Call { + _c.Call.Return(run) + return _c +} + // DeleteEntityHook provides a mock function with given fields: ctx, id func (_m *GithubEntityOperations) DeleteEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -117,6 +182,35 @@ func (_m *GithubEntityOperations) DeleteEntityHook(ctx context.Context, id int64 return r0, r1 } +// GithubEntityOperations_DeleteEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEntityHook' +type GithubEntityOperations_DeleteEntityHook_Call struct { + *mock.Call +} + +// DeleteEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - id int64 +func (_e *GithubEntityOperations_Expecter) DeleteEntityHook(ctx interface{}, id interface{}) *GithubEntityOperations_DeleteEntityHook_Call { + return &GithubEntityOperations_DeleteEntityHook_Call{Call: _e.mock.On("DeleteEntityHook", ctx, id)} +} + +func (_c *GithubEntityOperations_DeleteEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubEntityOperations_DeleteEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubEntityOperations_DeleteEntityHook_Call) Return(ret *github.Response, err error) *GithubEntityOperations_DeleteEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubEntityOperations_DeleteEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubEntityOperations_DeleteEntityHook_Call { + _c.Call.Return(run) + return _c +} + // GetEntity provides a mock function with no fields func (_m *GithubEntityOperations) GetEntity() params.ForgeEntity { ret := _m.Called() @@ -135,6 +229,33 @@ func (_m *GithubEntityOperations) GetEntity() params.ForgeEntity { return r0 } +// GithubEntityOperations_GetEntity_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntity' +type GithubEntityOperations_GetEntity_Call struct { + *mock.Call +} + +// GetEntity is a helper method to define mock.On call +func (_e *GithubEntityOperations_Expecter) GetEntity() *GithubEntityOperations_GetEntity_Call { + return &GithubEntityOperations_GetEntity_Call{Call: _e.mock.On("GetEntity")} +} + +func (_c *GithubEntityOperations_GetEntity_Call) Run(run func()) *GithubEntityOperations_GetEntity_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GithubEntityOperations_GetEntity_Call) Return(_a0 params.ForgeEntity) *GithubEntityOperations_GetEntity_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GithubEntityOperations_GetEntity_Call) RunAndReturn(run func() params.ForgeEntity) *GithubEntityOperations_GetEntity_Call { + _c.Call.Return(run) + return _c +} + // GetEntityHook provides a mock function with given fields: ctx, id func (_m *GithubEntityOperations) GetEntityHook(ctx context.Context, id int64) (*github.Hook, error) { ret := _m.Called(ctx, id) @@ -165,6 +286,35 @@ func (_m *GithubEntityOperations) GetEntityHook(ctx context.Context, id int64) ( return r0, r1 } +// GithubEntityOperations_GetEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityHook' +type GithubEntityOperations_GetEntityHook_Call struct { + *mock.Call +} + +// GetEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - id int64 +func (_e *GithubEntityOperations_Expecter) GetEntityHook(ctx interface{}, id interface{}) *GithubEntityOperations_GetEntityHook_Call { + return &GithubEntityOperations_GetEntityHook_Call{Call: _e.mock.On("GetEntityHook", ctx, id)} +} + +func (_c *GithubEntityOperations_GetEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubEntityOperations_GetEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubEntityOperations_GetEntityHook_Call) Return(ret *github.Hook, err error) *GithubEntityOperations_GetEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubEntityOperations_GetEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Hook, error)) *GithubEntityOperations_GetEntityHook_Call { + _c.Call.Return(run) + return _c +} + // GetEntityJITConfig provides a mock function with given fields: ctx, instance, pool, labels func (_m *GithubEntityOperations) GetEntityJITConfig(ctx context.Context, instance string, pool params.Pool, labels []string) (map[string]string, *github.Runner, error) { ret := _m.Called(ctx, instance, pool, labels) @@ -204,6 +354,37 @@ func (_m *GithubEntityOperations) GetEntityJITConfig(ctx context.Context, instan return r0, r1, r2 } +// GithubEntityOperations_GetEntityJITConfig_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntityJITConfig' +type GithubEntityOperations_GetEntityJITConfig_Call struct { + *mock.Call +} + +// GetEntityJITConfig is a helper method to define mock.On call +// - ctx context.Context +// - instance string +// - pool params.Pool +// - labels []string +func (_e *GithubEntityOperations_Expecter) GetEntityJITConfig(ctx interface{}, instance interface{}, pool interface{}, labels interface{}) *GithubEntityOperations_GetEntityJITConfig_Call { + return &GithubEntityOperations_GetEntityJITConfig_Call{Call: _e.mock.On("GetEntityJITConfig", ctx, instance, pool, labels)} +} + +func (_c *GithubEntityOperations_GetEntityJITConfig_Call) Run(run func(ctx context.Context, instance string, pool params.Pool, labels []string)) *GithubEntityOperations_GetEntityJITConfig_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(params.Pool), args[3].([]string)) + }) + return _c +} + +func (_c *GithubEntityOperations_GetEntityJITConfig_Call) Return(jitConfigMap map[string]string, runner *github.Runner, err error) *GithubEntityOperations_GetEntityJITConfig_Call { + _c.Call.Return(jitConfigMap, runner, err) + return _c +} + +func (_c *GithubEntityOperations_GetEntityJITConfig_Call) RunAndReturn(run func(context.Context, string, params.Pool, []string) (map[string]string, *github.Runner, error)) *GithubEntityOperations_GetEntityJITConfig_Call { + _c.Call.Return(run) + return _c +} + // GithubBaseURL provides a mock function with no fields func (_m *GithubEntityOperations) GithubBaseURL() *url.URL { ret := _m.Called() @@ -224,6 +405,33 @@ func (_m *GithubEntityOperations) GithubBaseURL() *url.URL { return r0 } +// GithubEntityOperations_GithubBaseURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GithubBaseURL' +type GithubEntityOperations_GithubBaseURL_Call struct { + *mock.Call +} + +// GithubBaseURL is a helper method to define mock.On call +func (_e *GithubEntityOperations_Expecter) GithubBaseURL() *GithubEntityOperations_GithubBaseURL_Call { + return &GithubEntityOperations_GithubBaseURL_Call{Call: _e.mock.On("GithubBaseURL")} +} + +func (_c *GithubEntityOperations_GithubBaseURL_Call) Run(run func()) *GithubEntityOperations_GithubBaseURL_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *GithubEntityOperations_GithubBaseURL_Call) Return(_a0 *url.URL) *GithubEntityOperations_GithubBaseURL_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GithubEntityOperations_GithubBaseURL_Call) RunAndReturn(run func() *url.URL) *GithubEntityOperations_GithubBaseURL_Call { + _c.Call.Return(run) + return _c +} + // ListEntityHooks provides a mock function with given fields: ctx, opts func (_m *GithubEntityOperations) ListEntityHooks(ctx context.Context, opts *github.ListOptions) ([]*github.Hook, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -263,6 +471,35 @@ func (_m *GithubEntityOperations) ListEntityHooks(ctx context.Context, opts *git return r0, r1, r2 } +// GithubEntityOperations_ListEntityHooks_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityHooks' +type GithubEntityOperations_ListEntityHooks_Call struct { + *mock.Call +} + +// ListEntityHooks is a helper method to define mock.On call +// - ctx context.Context +// - opts *github.ListOptions +func (_e *GithubEntityOperations_Expecter) ListEntityHooks(ctx interface{}, opts interface{}) *GithubEntityOperations_ListEntityHooks_Call { + return &GithubEntityOperations_ListEntityHooks_Call{Call: _e.mock.On("ListEntityHooks", ctx, opts)} +} + +func (_c *GithubEntityOperations_ListEntityHooks_Call) Run(run func(ctx context.Context, opts *github.ListOptions)) *GithubEntityOperations_ListEntityHooks_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*github.ListOptions)) + }) + return _c +} + +func (_c *GithubEntityOperations_ListEntityHooks_Call) Return(ret []*github.Hook, response *github.Response, err error) *GithubEntityOperations_ListEntityHooks_Call { + _c.Call.Return(ret, response, err) + return _c +} + +func (_c *GithubEntityOperations_ListEntityHooks_Call) RunAndReturn(run func(context.Context, *github.ListOptions) ([]*github.Hook, *github.Response, error)) *GithubEntityOperations_ListEntityHooks_Call { + _c.Call.Return(run) + return _c +} + // ListEntityRunnerApplicationDownloads provides a mock function with given fields: ctx func (_m *GithubEntityOperations) ListEntityRunnerApplicationDownloads(ctx context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error) { ret := _m.Called(ctx) @@ -302,6 +539,34 @@ func (_m *GithubEntityOperations) ListEntityRunnerApplicationDownloads(ctx conte return r0, r1, r2 } +// GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunnerApplicationDownloads' +type GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call struct { + *mock.Call +} + +// ListEntityRunnerApplicationDownloads is a helper method to define mock.On call +// - ctx context.Context +func (_e *GithubEntityOperations_Expecter) ListEntityRunnerApplicationDownloads(ctx interface{}) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { + return &GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call{Call: _e.mock.On("ListEntityRunnerApplicationDownloads", ctx)} +} + +func (_c *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call) Run(run func(ctx context.Context)) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call) Return(_a0 []*github.RunnerApplicationDownload, _a1 *github.Response, _a2 error) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call) RunAndReturn(run func(context.Context) ([]*github.RunnerApplicationDownload, *github.Response, error)) *GithubEntityOperations_ListEntityRunnerApplicationDownloads_Call { + _c.Call.Return(run) + return _c +} + // ListEntityRunners provides a mock function with given fields: ctx, opts func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *github.ListRunnersOptions) (*github.Runners, *github.Response, error) { ret := _m.Called(ctx, opts) @@ -341,6 +606,35 @@ func (_m *GithubEntityOperations) ListEntityRunners(ctx context.Context, opts *g return r0, r1, r2 } +// GithubEntityOperations_ListEntityRunners_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListEntityRunners' +type GithubEntityOperations_ListEntityRunners_Call struct { + *mock.Call +} + +// ListEntityRunners is a helper method to define mock.On call +// - ctx context.Context +// - opts *github.ListRunnersOptions +func (_e *GithubEntityOperations_Expecter) ListEntityRunners(ctx interface{}, opts interface{}) *GithubEntityOperations_ListEntityRunners_Call { + return &GithubEntityOperations_ListEntityRunners_Call{Call: _e.mock.On("ListEntityRunners", ctx, opts)} +} + +func (_c *GithubEntityOperations_ListEntityRunners_Call) Run(run func(ctx context.Context, opts *github.ListRunnersOptions)) *GithubEntityOperations_ListEntityRunners_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*github.ListRunnersOptions)) + }) + return _c +} + +func (_c *GithubEntityOperations_ListEntityRunners_Call) Return(_a0 *github.Runners, _a1 *github.Response, _a2 error) *GithubEntityOperations_ListEntityRunners_Call { + _c.Call.Return(_a0, _a1, _a2) + return _c +} + +func (_c *GithubEntityOperations_ListEntityRunners_Call) RunAndReturn(run func(context.Context, *github.ListRunnersOptions) (*github.Runners, *github.Response, error)) *GithubEntityOperations_ListEntityRunners_Call { + _c.Call.Return(run) + return _c +} + // PingEntityHook provides a mock function with given fields: ctx, id func (_m *GithubEntityOperations) PingEntityHook(ctx context.Context, id int64) (*github.Response, error) { ret := _m.Called(ctx, id) @@ -371,6 +665,35 @@ func (_m *GithubEntityOperations) PingEntityHook(ctx context.Context, id int64) return r0, r1 } +// GithubEntityOperations_PingEntityHook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PingEntityHook' +type GithubEntityOperations_PingEntityHook_Call struct { + *mock.Call +} + +// PingEntityHook is a helper method to define mock.On call +// - ctx context.Context +// - id int64 +func (_e *GithubEntityOperations_Expecter) PingEntityHook(ctx interface{}, id interface{}) *GithubEntityOperations_PingEntityHook_Call { + return &GithubEntityOperations_PingEntityHook_Call{Call: _e.mock.On("PingEntityHook", ctx, id)} +} + +func (_c *GithubEntityOperations_PingEntityHook_Call) Run(run func(ctx context.Context, id int64)) *GithubEntityOperations_PingEntityHook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubEntityOperations_PingEntityHook_Call) Return(ret *github.Response, err error) *GithubEntityOperations_PingEntityHook_Call { + _c.Call.Return(ret, err) + return _c +} + +func (_c *GithubEntityOperations_PingEntityHook_Call) RunAndReturn(run func(context.Context, int64) (*github.Response, error)) *GithubEntityOperations_PingEntityHook_Call { + _c.Call.Return(run) + return _c +} + // RateLimit provides a mock function with given fields: ctx func (_m *GithubEntityOperations) RateLimit(ctx context.Context) (*github.RateLimits, error) { ret := _m.Called(ctx) @@ -401,6 +724,34 @@ func (_m *GithubEntityOperations) RateLimit(ctx context.Context) (*github.RateLi return r0, r1 } +// GithubEntityOperations_RateLimit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RateLimit' +type GithubEntityOperations_RateLimit_Call struct { + *mock.Call +} + +// RateLimit is a helper method to define mock.On call +// - ctx context.Context +func (_e *GithubEntityOperations_Expecter) RateLimit(ctx interface{}) *GithubEntityOperations_RateLimit_Call { + return &GithubEntityOperations_RateLimit_Call{Call: _e.mock.On("RateLimit", ctx)} +} + +func (_c *GithubEntityOperations_RateLimit_Call) Run(run func(ctx context.Context)) *GithubEntityOperations_RateLimit_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *GithubEntityOperations_RateLimit_Call) Return(_a0 *github.RateLimits, _a1 error) *GithubEntityOperations_RateLimit_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *GithubEntityOperations_RateLimit_Call) RunAndReturn(run func(context.Context) (*github.RateLimits, error)) *GithubEntityOperations_RateLimit_Call { + _c.Call.Return(run) + return _c +} + // RemoveEntityRunner provides a mock function with given fields: ctx, runnerID func (_m *GithubEntityOperations) RemoveEntityRunner(ctx context.Context, runnerID int64) error { ret := _m.Called(ctx, runnerID) @@ -419,6 +770,35 @@ func (_m *GithubEntityOperations) RemoveEntityRunner(ctx context.Context, runner return r0 } +// GithubEntityOperations_RemoveEntityRunner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveEntityRunner' +type GithubEntityOperations_RemoveEntityRunner_Call struct { + *mock.Call +} + +// RemoveEntityRunner is a helper method to define mock.On call +// - ctx context.Context +// - runnerID int64 +func (_e *GithubEntityOperations_Expecter) RemoveEntityRunner(ctx interface{}, runnerID interface{}) *GithubEntityOperations_RemoveEntityRunner_Call { + return &GithubEntityOperations_RemoveEntityRunner_Call{Call: _e.mock.On("RemoveEntityRunner", ctx, runnerID)} +} + +func (_c *GithubEntityOperations_RemoveEntityRunner_Call) Run(run func(ctx context.Context, runnerID int64)) *GithubEntityOperations_RemoveEntityRunner_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *GithubEntityOperations_RemoveEntityRunner_Call) Return(_a0 error) *GithubEntityOperations_RemoveEntityRunner_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *GithubEntityOperations_RemoveEntityRunner_Call) RunAndReturn(run func(context.Context, int64) error) *GithubEntityOperations_RemoveEntityRunner_Call { + _c.Call.Return(run) + return _c +} + // NewGithubEntityOperations creates a new instance of GithubEntityOperations. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewGithubEntityOperations(t interface { diff --git a/runner/common/mocks/PoolManager.go b/runner/common/mocks/PoolManager.go index 08cfb975..a1a62f4f 100644 --- a/runner/common/mocks/PoolManager.go +++ b/runner/common/mocks/PoolManager.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -14,6 +14,14 @@ type PoolManager struct { mock.Mock } +type PoolManager_Expecter struct { + mock *mock.Mock +} + +func (_m *PoolManager) EXPECT() *PoolManager_Expecter { + return &PoolManager_Expecter{mock: &_m.Mock} +} + // GetWebhookInfo provides a mock function with given fields: ctx func (_m *PoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, error) { ret := _m.Called(ctx) @@ -42,6 +50,34 @@ func (_m *PoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, err return r0, r1 } +// PoolManager_GetWebhookInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWebhookInfo' +type PoolManager_GetWebhookInfo_Call struct { + *mock.Call +} + +// GetWebhookInfo is a helper method to define mock.On call +// - ctx context.Context +func (_e *PoolManager_Expecter) GetWebhookInfo(ctx interface{}) *PoolManager_GetWebhookInfo_Call { + return &PoolManager_GetWebhookInfo_Call{Call: _e.mock.On("GetWebhookInfo", ctx)} +} + +func (_c *PoolManager_GetWebhookInfo_Call) Run(run func(ctx context.Context)) *PoolManager_GetWebhookInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *PoolManager_GetWebhookInfo_Call) Return(_a0 params.HookInfo, _a1 error) *PoolManager_GetWebhookInfo_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManager_GetWebhookInfo_Call) RunAndReturn(run func(context.Context) (params.HookInfo, error)) *PoolManager_GetWebhookInfo_Call { + _c.Call.Return(run) + return _c +} + // GithubRunnerRegistrationToken provides a mock function with no fields func (_m *PoolManager) GithubRunnerRegistrationToken() (string, error) { ret := _m.Called() @@ -70,6 +106,33 @@ func (_m *PoolManager) GithubRunnerRegistrationToken() (string, error) { return r0, r1 } +// PoolManager_GithubRunnerRegistrationToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GithubRunnerRegistrationToken' +type PoolManager_GithubRunnerRegistrationToken_Call struct { + *mock.Call +} + +// GithubRunnerRegistrationToken is a helper method to define mock.On call +func (_e *PoolManager_Expecter) GithubRunnerRegistrationToken() *PoolManager_GithubRunnerRegistrationToken_Call { + return &PoolManager_GithubRunnerRegistrationToken_Call{Call: _e.mock.On("GithubRunnerRegistrationToken")} +} + +func (_c *PoolManager_GithubRunnerRegistrationToken_Call) Run(run func()) *PoolManager_GithubRunnerRegistrationToken_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_GithubRunnerRegistrationToken_Call) Return(_a0 string, _a1 error) *PoolManager_GithubRunnerRegistrationToken_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManager_GithubRunnerRegistrationToken_Call) RunAndReturn(run func() (string, error)) *PoolManager_GithubRunnerRegistrationToken_Call { + _c.Call.Return(run) + return _c +} + // HandleWorkflowJob provides a mock function with given fields: job func (_m *PoolManager) HandleWorkflowJob(job params.WorkflowJob) error { ret := _m.Called(job) @@ -88,6 +151,34 @@ func (_m *PoolManager) HandleWorkflowJob(job params.WorkflowJob) error { return r0 } +// PoolManager_HandleWorkflowJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HandleWorkflowJob' +type PoolManager_HandleWorkflowJob_Call struct { + *mock.Call +} + +// HandleWorkflowJob is a helper method to define mock.On call +// - job params.WorkflowJob +func (_e *PoolManager_Expecter) HandleWorkflowJob(job interface{}) *PoolManager_HandleWorkflowJob_Call { + return &PoolManager_HandleWorkflowJob_Call{Call: _e.mock.On("HandleWorkflowJob", job)} +} + +func (_c *PoolManager_HandleWorkflowJob_Call) Run(run func(job params.WorkflowJob)) *PoolManager_HandleWorkflowJob_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.WorkflowJob)) + }) + return _c +} + +func (_c *PoolManager_HandleWorkflowJob_Call) Return(_a0 error) *PoolManager_HandleWorkflowJob_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_HandleWorkflowJob_Call) RunAndReturn(run func(params.WorkflowJob) error) *PoolManager_HandleWorkflowJob_Call { + _c.Call.Return(run) + return _c +} + // ID provides a mock function with no fields func (_m *PoolManager) ID() string { ret := _m.Called() @@ -106,6 +197,33 @@ func (_m *PoolManager) ID() string { return r0 } +// PoolManager_ID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ID' +type PoolManager_ID_Call struct { + *mock.Call +} + +// ID is a helper method to define mock.On call +func (_e *PoolManager_Expecter) ID() *PoolManager_ID_Call { + return &PoolManager_ID_Call{Call: _e.mock.On("ID")} +} + +func (_c *PoolManager_ID_Call) Run(run func()) *PoolManager_ID_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_ID_Call) Return(_a0 string) *PoolManager_ID_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_ID_Call) RunAndReturn(run func() string) *PoolManager_ID_Call { + _c.Call.Return(run) + return _c +} + // InstallWebhook provides a mock function with given fields: ctx, param func (_m *PoolManager) InstallWebhook(ctx context.Context, param params.InstallWebhookParams) (params.HookInfo, error) { ret := _m.Called(ctx, param) @@ -134,6 +252,35 @@ func (_m *PoolManager) InstallWebhook(ctx context.Context, param params.InstallW return r0, r1 } +// PoolManager_InstallWebhook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'InstallWebhook' +type PoolManager_InstallWebhook_Call struct { + *mock.Call +} + +// InstallWebhook is a helper method to define mock.On call +// - ctx context.Context +// - param params.InstallWebhookParams +func (_e *PoolManager_Expecter) InstallWebhook(ctx interface{}, param interface{}) *PoolManager_InstallWebhook_Call { + return &PoolManager_InstallWebhook_Call{Call: _e.mock.On("InstallWebhook", ctx, param)} +} + +func (_c *PoolManager_InstallWebhook_Call) Run(run func(ctx context.Context, param params.InstallWebhookParams)) *PoolManager_InstallWebhook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.InstallWebhookParams)) + }) + return _c +} + +func (_c *PoolManager_InstallWebhook_Call) Return(_a0 params.HookInfo, _a1 error) *PoolManager_InstallWebhook_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManager_InstallWebhook_Call) RunAndReturn(run func(context.Context, params.InstallWebhookParams) (params.HookInfo, error)) *PoolManager_InstallWebhook_Call { + _c.Call.Return(run) + return _c +} + // RootCABundle provides a mock function with no fields func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { ret := _m.Called() @@ -162,11 +309,67 @@ func (_m *PoolManager) RootCABundle() (params.CertificateBundle, error) { return r0, r1 } +// PoolManager_RootCABundle_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RootCABundle' +type PoolManager_RootCABundle_Call struct { + *mock.Call +} + +// RootCABundle is a helper method to define mock.On call +func (_e *PoolManager_Expecter) RootCABundle() *PoolManager_RootCABundle_Call { + return &PoolManager_RootCABundle_Call{Call: _e.mock.On("RootCABundle")} +} + +func (_c *PoolManager_RootCABundle_Call) Run(run func()) *PoolManager_RootCABundle_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_RootCABundle_Call) Return(_a0 params.CertificateBundle, _a1 error) *PoolManager_RootCABundle_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManager_RootCABundle_Call) RunAndReturn(run func() (params.CertificateBundle, error)) *PoolManager_RootCABundle_Call { + _c.Call.Return(run) + return _c +} + // SetPoolRunningState provides a mock function with given fields: isRunning, failureReason func (_m *PoolManager) SetPoolRunningState(isRunning bool, failureReason string) { _m.Called(isRunning, failureReason) } +// PoolManager_SetPoolRunningState_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetPoolRunningState' +type PoolManager_SetPoolRunningState_Call struct { + *mock.Call +} + +// SetPoolRunningState is a helper method to define mock.On call +// - isRunning bool +// - failureReason string +func (_e *PoolManager_Expecter) SetPoolRunningState(isRunning interface{}, failureReason interface{}) *PoolManager_SetPoolRunningState_Call { + return &PoolManager_SetPoolRunningState_Call{Call: _e.mock.On("SetPoolRunningState", isRunning, failureReason)} +} + +func (_c *PoolManager_SetPoolRunningState_Call) Run(run func(isRunning bool, failureReason string)) *PoolManager_SetPoolRunningState_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(bool), args[1].(string)) + }) + return _c +} + +func (_c *PoolManager_SetPoolRunningState_Call) Return() *PoolManager_SetPoolRunningState_Call { + _c.Call.Return() + return _c +} + +func (_c *PoolManager_SetPoolRunningState_Call) RunAndReturn(run func(bool, string)) *PoolManager_SetPoolRunningState_Call { + _c.Run(run) + return _c +} + // Start provides a mock function with no fields func (_m *PoolManager) Start() error { ret := _m.Called() @@ -185,6 +388,33 @@ func (_m *PoolManager) Start() error { return r0 } +// PoolManager_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' +type PoolManager_Start_Call struct { + *mock.Call +} + +// Start is a helper method to define mock.On call +func (_e *PoolManager_Expecter) Start() *PoolManager_Start_Call { + return &PoolManager_Start_Call{Call: _e.mock.On("Start")} +} + +func (_c *PoolManager_Start_Call) Run(run func()) *PoolManager_Start_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_Start_Call) Return(_a0 error) *PoolManager_Start_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_Start_Call) RunAndReturn(run func() error) *PoolManager_Start_Call { + _c.Call.Return(run) + return _c +} + // Status provides a mock function with no fields func (_m *PoolManager) Status() params.PoolManagerStatus { ret := _m.Called() @@ -203,6 +433,33 @@ func (_m *PoolManager) Status() params.PoolManagerStatus { return r0 } +// PoolManager_Status_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Status' +type PoolManager_Status_Call struct { + *mock.Call +} + +// Status is a helper method to define mock.On call +func (_e *PoolManager_Expecter) Status() *PoolManager_Status_Call { + return &PoolManager_Status_Call{Call: _e.mock.On("Status")} +} + +func (_c *PoolManager_Status_Call) Run(run func()) *PoolManager_Status_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_Status_Call) Return(_a0 params.PoolManagerStatus) *PoolManager_Status_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_Status_Call) RunAndReturn(run func() params.PoolManagerStatus) *PoolManager_Status_Call { + _c.Call.Return(run) + return _c +} + // Stop provides a mock function with no fields func (_m *PoolManager) Stop() error { ret := _m.Called() @@ -221,6 +478,33 @@ func (_m *PoolManager) Stop() error { return r0 } +// PoolManager_Stop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Stop' +type PoolManager_Stop_Call struct { + *mock.Call +} + +// Stop is a helper method to define mock.On call +func (_e *PoolManager_Expecter) Stop() *PoolManager_Stop_Call { + return &PoolManager_Stop_Call{Call: _e.mock.On("Stop")} +} + +func (_c *PoolManager_Stop_Call) Run(run func()) *PoolManager_Stop_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_Stop_Call) Return(_a0 error) *PoolManager_Stop_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_Stop_Call) RunAndReturn(run func() error) *PoolManager_Stop_Call { + _c.Call.Return(run) + return _c +} + // UninstallWebhook provides a mock function with given fields: ctx func (_m *PoolManager) UninstallWebhook(ctx context.Context) error { ret := _m.Called(ctx) @@ -239,6 +523,34 @@ func (_m *PoolManager) UninstallWebhook(ctx context.Context) error { return r0 } +// PoolManager_UninstallWebhook_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UninstallWebhook' +type PoolManager_UninstallWebhook_Call struct { + *mock.Call +} + +// UninstallWebhook is a helper method to define mock.On call +// - ctx context.Context +func (_e *PoolManager_Expecter) UninstallWebhook(ctx interface{}) *PoolManager_UninstallWebhook_Call { + return &PoolManager_UninstallWebhook_Call{Call: _e.mock.On("UninstallWebhook", ctx)} +} + +func (_c *PoolManager_UninstallWebhook_Call) Run(run func(ctx context.Context)) *PoolManager_UninstallWebhook_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *PoolManager_UninstallWebhook_Call) Return(_a0 error) *PoolManager_UninstallWebhook_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_UninstallWebhook_Call) RunAndReturn(run func(context.Context) error) *PoolManager_UninstallWebhook_Call { + _c.Call.Return(run) + return _c +} + // Wait provides a mock function with no fields func (_m *PoolManager) Wait() error { ret := _m.Called() @@ -257,6 +569,33 @@ func (_m *PoolManager) Wait() error { return r0 } +// PoolManager_Wait_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Wait' +type PoolManager_Wait_Call struct { + *mock.Call +} + +// Wait is a helper method to define mock.On call +func (_e *PoolManager_Expecter) Wait() *PoolManager_Wait_Call { + return &PoolManager_Wait_Call{Call: _e.mock.On("Wait")} +} + +func (_c *PoolManager_Wait_Call) Run(run func()) *PoolManager_Wait_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_Wait_Call) Return(_a0 error) *PoolManager_Wait_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_Wait_Call) RunAndReturn(run func() error) *PoolManager_Wait_Call { + _c.Call.Return(run) + return _c +} + // WebhookSecret provides a mock function with no fields func (_m *PoolManager) WebhookSecret() string { ret := _m.Called() @@ -275,6 +614,33 @@ func (_m *PoolManager) WebhookSecret() string { return r0 } +// PoolManager_WebhookSecret_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WebhookSecret' +type PoolManager_WebhookSecret_Call struct { + *mock.Call +} + +// WebhookSecret is a helper method to define mock.On call +func (_e *PoolManager_Expecter) WebhookSecret() *PoolManager_WebhookSecret_Call { + return &PoolManager_WebhookSecret_Call{Call: _e.mock.On("WebhookSecret")} +} + +func (_c *PoolManager_WebhookSecret_Call) Run(run func()) *PoolManager_WebhookSecret_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManager_WebhookSecret_Call) Return(_a0 string) *PoolManager_WebhookSecret_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManager_WebhookSecret_Call) RunAndReturn(run func() string) *PoolManager_WebhookSecret_Call { + _c.Call.Return(run) + return _c +} + // NewPoolManager creates a new instance of PoolManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewPoolManager(t interface { diff --git a/runner/common/mocks/Provider.go b/runner/common/mocks/Provider.go index e7491ac5..5bf94a10 100644 --- a/runner/common/mocks/Provider.go +++ b/runner/common/mocks/Provider.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -19,6 +19,14 @@ type Provider struct { mock.Mock } +type Provider_Expecter struct { + mock *mock.Mock +} + +func (_m *Provider) EXPECT() *Provider_Expecter { + return &Provider_Expecter{mock: &_m.Mock} +} + // AsParams provides a mock function with no fields func (_m *Provider) AsParams() params.Provider { ret := _m.Called() @@ -37,6 +45,33 @@ func (_m *Provider) AsParams() params.Provider { return r0 } +// Provider_AsParams_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AsParams' +type Provider_AsParams_Call struct { + *mock.Call +} + +// AsParams is a helper method to define mock.On call +func (_e *Provider_Expecter) AsParams() *Provider_AsParams_Call { + return &Provider_AsParams_Call{Call: _e.mock.On("AsParams")} +} + +func (_c *Provider_AsParams_Call) Run(run func()) *Provider_AsParams_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Provider_AsParams_Call) Return(_a0 params.Provider) *Provider_AsParams_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Provider_AsParams_Call) RunAndReturn(run func() params.Provider) *Provider_AsParams_Call { + _c.Call.Return(run) + return _c +} + // CreateInstance provides a mock function with given fields: ctx, bootstrapParams, createInstanceParams func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_provider_commonparams.BootstrapInstance, createInstanceParams common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { ret := _m.Called(ctx, bootstrapParams, createInstanceParams) @@ -65,6 +100,36 @@ func (_m *Provider) CreateInstance(ctx context.Context, bootstrapParams garm_pro return r0, r1 } +// Provider_CreateInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateInstance' +type Provider_CreateInstance_Call struct { + *mock.Call +} + +// CreateInstance is a helper method to define mock.On call +// - ctx context.Context +// - bootstrapParams garm_provider_commonparams.BootstrapInstance +// - createInstanceParams common.CreateInstanceParams +func (_e *Provider_Expecter) CreateInstance(ctx interface{}, bootstrapParams interface{}, createInstanceParams interface{}) *Provider_CreateInstance_Call { + return &Provider_CreateInstance_Call{Call: _e.mock.On("CreateInstance", ctx, bootstrapParams, createInstanceParams)} +} + +func (_c *Provider_CreateInstance_Call) Run(run func(ctx context.Context, bootstrapParams garm_provider_commonparams.BootstrapInstance, createInstanceParams common.CreateInstanceParams)) *Provider_CreateInstance_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(garm_provider_commonparams.BootstrapInstance), args[2].(common.CreateInstanceParams)) + }) + return _c +} + +func (_c *Provider_CreateInstance_Call) Return(_a0 garm_provider_commonparams.ProviderInstance, _a1 error) *Provider_CreateInstance_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Provider_CreateInstance_Call) RunAndReturn(run func(context.Context, garm_provider_commonparams.BootstrapInstance, common.CreateInstanceParams) (garm_provider_commonparams.ProviderInstance, error)) *Provider_CreateInstance_Call { + _c.Call.Return(run) + return _c +} + // DeleteInstance provides a mock function with given fields: ctx, instance, deleteInstanceParams func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteInstanceParams common.DeleteInstanceParams) error { ret := _m.Called(ctx, instance, deleteInstanceParams) @@ -83,6 +148,36 @@ func (_m *Provider) DeleteInstance(ctx context.Context, instance string, deleteI return r0 } +// Provider_DeleteInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteInstance' +type Provider_DeleteInstance_Call struct { + *mock.Call +} + +// DeleteInstance is a helper method to define mock.On call +// - ctx context.Context +// - instance string +// - deleteInstanceParams common.DeleteInstanceParams +func (_e *Provider_Expecter) DeleteInstance(ctx interface{}, instance interface{}, deleteInstanceParams interface{}) *Provider_DeleteInstance_Call { + return &Provider_DeleteInstance_Call{Call: _e.mock.On("DeleteInstance", ctx, instance, deleteInstanceParams)} +} + +func (_c *Provider_DeleteInstance_Call) Run(run func(ctx context.Context, instance string, deleteInstanceParams common.DeleteInstanceParams)) *Provider_DeleteInstance_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(common.DeleteInstanceParams)) + }) + return _c +} + +func (_c *Provider_DeleteInstance_Call) Return(_a0 error) *Provider_DeleteInstance_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Provider_DeleteInstance_Call) RunAndReturn(run func(context.Context, string, common.DeleteInstanceParams) error) *Provider_DeleteInstance_Call { + _c.Call.Return(run) + return _c +} + // DisableJITConfig provides a mock function with no fields func (_m *Provider) DisableJITConfig() bool { ret := _m.Called() @@ -101,6 +196,33 @@ func (_m *Provider) DisableJITConfig() bool { return r0 } +// Provider_DisableJITConfig_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DisableJITConfig' +type Provider_DisableJITConfig_Call struct { + *mock.Call +} + +// DisableJITConfig is a helper method to define mock.On call +func (_e *Provider_Expecter) DisableJITConfig() *Provider_DisableJITConfig_Call { + return &Provider_DisableJITConfig_Call{Call: _e.mock.On("DisableJITConfig")} +} + +func (_c *Provider_DisableJITConfig_Call) Run(run func()) *Provider_DisableJITConfig_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Provider_DisableJITConfig_Call) Return(_a0 bool) *Provider_DisableJITConfig_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Provider_DisableJITConfig_Call) RunAndReturn(run func() bool) *Provider_DisableJITConfig_Call { + _c.Call.Return(run) + return _c +} + // GetInstance provides a mock function with given fields: ctx, instance, getInstanceParams func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanceParams common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error) { ret := _m.Called(ctx, instance, getInstanceParams) @@ -129,6 +251,36 @@ func (_m *Provider) GetInstance(ctx context.Context, instance string, getInstanc return r0, r1 } +// Provider_GetInstance_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetInstance' +type Provider_GetInstance_Call struct { + *mock.Call +} + +// GetInstance is a helper method to define mock.On call +// - ctx context.Context +// - instance string +// - getInstanceParams common.GetInstanceParams +func (_e *Provider_Expecter) GetInstance(ctx interface{}, instance interface{}, getInstanceParams interface{}) *Provider_GetInstance_Call { + return &Provider_GetInstance_Call{Call: _e.mock.On("GetInstance", ctx, instance, getInstanceParams)} +} + +func (_c *Provider_GetInstance_Call) Run(run func(ctx context.Context, instance string, getInstanceParams common.GetInstanceParams)) *Provider_GetInstance_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(common.GetInstanceParams)) + }) + return _c +} + +func (_c *Provider_GetInstance_Call) Return(_a0 garm_provider_commonparams.ProviderInstance, _a1 error) *Provider_GetInstance_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Provider_GetInstance_Call) RunAndReturn(run func(context.Context, string, common.GetInstanceParams) (garm_provider_commonparams.ProviderInstance, error)) *Provider_GetInstance_Call { + _c.Call.Return(run) + return _c +} + // ListInstances provides a mock function with given fields: ctx, poolID, listInstancesParams func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstancesParams common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error) { ret := _m.Called(ctx, poolID, listInstancesParams) @@ -159,6 +311,36 @@ func (_m *Provider) ListInstances(ctx context.Context, poolID string, listInstan return r0, r1 } +// Provider_ListInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ListInstances' +type Provider_ListInstances_Call struct { + *mock.Call +} + +// ListInstances is a helper method to define mock.On call +// - ctx context.Context +// - poolID string +// - listInstancesParams common.ListInstancesParams +func (_e *Provider_Expecter) ListInstances(ctx interface{}, poolID interface{}, listInstancesParams interface{}) *Provider_ListInstances_Call { + return &Provider_ListInstances_Call{Call: _e.mock.On("ListInstances", ctx, poolID, listInstancesParams)} +} + +func (_c *Provider_ListInstances_Call) Run(run func(ctx context.Context, poolID string, listInstancesParams common.ListInstancesParams)) *Provider_ListInstances_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(common.ListInstancesParams)) + }) + return _c +} + +func (_c *Provider_ListInstances_Call) Return(_a0 []garm_provider_commonparams.ProviderInstance, _a1 error) *Provider_ListInstances_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Provider_ListInstances_Call) RunAndReturn(run func(context.Context, string, common.ListInstancesParams) ([]garm_provider_commonparams.ProviderInstance, error)) *Provider_ListInstances_Call { + _c.Call.Return(run) + return _c +} + // RemoveAllInstances provides a mock function with given fields: ctx, removeAllInstancesParams func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstancesParams common.RemoveAllInstancesParams) error { ret := _m.Called(ctx, removeAllInstancesParams) @@ -177,6 +359,35 @@ func (_m *Provider) RemoveAllInstances(ctx context.Context, removeAllInstancesPa return r0 } +// Provider_RemoveAllInstances_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveAllInstances' +type Provider_RemoveAllInstances_Call struct { + *mock.Call +} + +// RemoveAllInstances is a helper method to define mock.On call +// - ctx context.Context +// - removeAllInstancesParams common.RemoveAllInstancesParams +func (_e *Provider_Expecter) RemoveAllInstances(ctx interface{}, removeAllInstancesParams interface{}) *Provider_RemoveAllInstances_Call { + return &Provider_RemoveAllInstances_Call{Call: _e.mock.On("RemoveAllInstances", ctx, removeAllInstancesParams)} +} + +func (_c *Provider_RemoveAllInstances_Call) Run(run func(ctx context.Context, removeAllInstancesParams common.RemoveAllInstancesParams)) *Provider_RemoveAllInstances_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(common.RemoveAllInstancesParams)) + }) + return _c +} + +func (_c *Provider_RemoveAllInstances_Call) Return(_a0 error) *Provider_RemoveAllInstances_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Provider_RemoveAllInstances_Call) RunAndReturn(run func(context.Context, common.RemoveAllInstancesParams) error) *Provider_RemoveAllInstances_Call { + _c.Call.Return(run) + return _c +} + // Start provides a mock function with given fields: ctx, instance, startParams func (_m *Provider) Start(ctx context.Context, instance string, startParams common.StartParams) error { ret := _m.Called(ctx, instance, startParams) @@ -195,6 +406,36 @@ func (_m *Provider) Start(ctx context.Context, instance string, startParams comm return r0 } +// Provider_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' +type Provider_Start_Call struct { + *mock.Call +} + +// Start is a helper method to define mock.On call +// - ctx context.Context +// - instance string +// - startParams common.StartParams +func (_e *Provider_Expecter) Start(ctx interface{}, instance interface{}, startParams interface{}) *Provider_Start_Call { + return &Provider_Start_Call{Call: _e.mock.On("Start", ctx, instance, startParams)} +} + +func (_c *Provider_Start_Call) Run(run func(ctx context.Context, instance string, startParams common.StartParams)) *Provider_Start_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(common.StartParams)) + }) + return _c +} + +func (_c *Provider_Start_Call) Return(_a0 error) *Provider_Start_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Provider_Start_Call) RunAndReturn(run func(context.Context, string, common.StartParams) error) *Provider_Start_Call { + _c.Call.Return(run) + return _c +} + // Stop provides a mock function with given fields: ctx, instance, stopParams func (_m *Provider) Stop(ctx context.Context, instance string, stopParams common.StopParams) error { ret := _m.Called(ctx, instance, stopParams) @@ -213,6 +454,36 @@ func (_m *Provider) Stop(ctx context.Context, instance string, stopParams common return r0 } +// Provider_Stop_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Stop' +type Provider_Stop_Call struct { + *mock.Call +} + +// Stop is a helper method to define mock.On call +// - ctx context.Context +// - instance string +// - stopParams common.StopParams +func (_e *Provider_Expecter) Stop(ctx interface{}, instance interface{}, stopParams interface{}) *Provider_Stop_Call { + return &Provider_Stop_Call{Call: _e.mock.On("Stop", ctx, instance, stopParams)} +} + +func (_c *Provider_Stop_Call) Run(run func(ctx context.Context, instance string, stopParams common.StopParams)) *Provider_Stop_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(common.StopParams)) + }) + return _c +} + +func (_c *Provider_Stop_Call) Return(_a0 error) *Provider_Stop_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Provider_Stop_Call) RunAndReturn(run func(context.Context, string, common.StopParams) error) *Provider_Stop_Call { + _c.Call.Return(run) + return _c +} + // NewProvider creates a new instance of Provider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewProvider(t interface { diff --git a/runner/common/mocks/RateLimitClient.go b/runner/common/mocks/RateLimitClient.go index 119f62e1..b7e52f71 100644 --- a/runner/common/mocks/RateLimitClient.go +++ b/runner/common/mocks/RateLimitClient.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -14,6 +14,14 @@ type RateLimitClient struct { mock.Mock } +type RateLimitClient_Expecter struct { + mock *mock.Mock +} + +func (_m *RateLimitClient) EXPECT() *RateLimitClient_Expecter { + return &RateLimitClient_Expecter{mock: &_m.Mock} +} + // RateLimit provides a mock function with given fields: ctx func (_m *RateLimitClient) RateLimit(ctx context.Context) (*github.RateLimits, error) { ret := _m.Called(ctx) @@ -44,6 +52,34 @@ func (_m *RateLimitClient) RateLimit(ctx context.Context) (*github.RateLimits, e return r0, r1 } +// RateLimitClient_RateLimit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RateLimit' +type RateLimitClient_RateLimit_Call struct { + *mock.Call +} + +// RateLimit is a helper method to define mock.On call +// - ctx context.Context +func (_e *RateLimitClient_Expecter) RateLimit(ctx interface{}) *RateLimitClient_RateLimit_Call { + return &RateLimitClient_RateLimit_Call{Call: _e.mock.On("RateLimit", ctx)} +} + +func (_c *RateLimitClient_RateLimit_Call) Run(run func(ctx context.Context)) *RateLimitClient_RateLimit_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *RateLimitClient_RateLimit_Call) Return(_a0 *github.RateLimits, _a1 error) *RateLimitClient_RateLimit_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *RateLimitClient_RateLimit_Call) RunAndReturn(run func(context.Context) (*github.RateLimits, error)) *RateLimitClient_RateLimit_Call { + _c.Call.Return(run) + return _c +} + // NewRateLimitClient creates a new instance of RateLimitClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewRateLimitClient(t interface { diff --git a/runner/common/pool.go b/runner/common/pool.go index 18f46a9d..4cb86a62 100644 --- a/runner/common/pool.go +++ b/runner/common/pool.go @@ -36,7 +36,7 @@ const ( BackoffTimer = 1 * time.Minute ) -//go:generate mockery --all +//go:generate go run github.com/vektra/mockery/v2@latest type PoolManager interface { // ID returns the ID of the entity (repo, org, enterprise) ID() string diff --git a/runner/common/provider.go b/runner/common/provider.go index 7454540f..a5d0db66 100644 --- a/runner/common/provider.go +++ b/runner/common/provider.go @@ -21,7 +21,7 @@ import ( "github.com/cloudbase/garm/params" ) -//go:generate mockery --all +//go:generate go run github.com/vektra/mockery/v2@latest type Provider interface { // CreateInstance creates a new compute instance in the provider. CreateInstance(ctx context.Context, bootstrapParams commonParams.BootstrapInstance, createInstanceParams CreateInstanceParams) (commonParams.ProviderInstance, error) diff --git a/runner/common/util.go b/runner/common/util.go index 588ab68e..d8519438 100644 --- a/runner/common/util.go +++ b/runner/common/util.go @@ -49,7 +49,7 @@ type RateLimitClient interface { // GithubClient that describes the minimum list of functions we need to interact with github. // Allows for easier testing. // -//go:generate mockery --all +//go:generate go run github.com/vektra/mockery/v2@latest type GithubClient interface { GithubEntityOperations diff --git a/runner/interfaces.go b/runner/interfaces.go index ff8129ed..3d4703f7 100644 --- a/runner/interfaces.go +++ b/runner/interfaces.go @@ -43,7 +43,7 @@ type EnterprisePoolManager interface { GetEnterprisePoolManagers() (map[string]common.PoolManager, error) } -//go:generate mockery --name=PoolManagerController +//go:generate go run github.com/vektra/mockery/v2@latest type PoolManagerController interface { RepoPoolManager diff --git a/runner/mocks/PoolManagerController.go b/runner/mocks/PoolManagerController.go index 05720ebe..b17196ec 100644 --- a/runner/mocks/PoolManagerController.go +++ b/runner/mocks/PoolManagerController.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -19,6 +19,14 @@ type PoolManagerController struct { mock.Mock } +type PoolManagerController_Expecter struct { + mock *mock.Mock +} + +func (_m *PoolManagerController) EXPECT() *PoolManagerController_Expecter { + return &PoolManagerController_Expecter{mock: &_m.Mock} +} + // CreateEnterprisePoolManager provides a mock function with given fields: ctx, enterprise, providers, store func (_m *PoolManagerController) CreateEnterprisePoolManager(ctx context.Context, enterprise params.Enterprise, providers map[string]common.Provider, store databasecommon.Store) (common.PoolManager, error) { ret := _m.Called(ctx, enterprise, providers, store) @@ -49,6 +57,37 @@ func (_m *PoolManagerController) CreateEnterprisePoolManager(ctx context.Context return r0, r1 } +// PoolManagerController_CreateEnterprisePoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEnterprisePoolManager' +type PoolManagerController_CreateEnterprisePoolManager_Call struct { + *mock.Call +} + +// CreateEnterprisePoolManager is a helper method to define mock.On call +// - ctx context.Context +// - enterprise params.Enterprise +// - providers map[string]common.Provider +// - store databasecommon.Store +func (_e *PoolManagerController_Expecter) CreateEnterprisePoolManager(ctx interface{}, enterprise interface{}, providers interface{}, store interface{}) *PoolManagerController_CreateEnterprisePoolManager_Call { + return &PoolManagerController_CreateEnterprisePoolManager_Call{Call: _e.mock.On("CreateEnterprisePoolManager", ctx, enterprise, providers, store)} +} + +func (_c *PoolManagerController_CreateEnterprisePoolManager_Call) Run(run func(ctx context.Context, enterprise params.Enterprise, providers map[string]common.Provider, store databasecommon.Store)) *PoolManagerController_CreateEnterprisePoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.Enterprise), args[2].(map[string]common.Provider), args[3].(databasecommon.Store)) + }) + return _c +} + +func (_c *PoolManagerController_CreateEnterprisePoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_CreateEnterprisePoolManager_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_CreateEnterprisePoolManager_Call) RunAndReturn(run func(context.Context, params.Enterprise, map[string]common.Provider, databasecommon.Store) (common.PoolManager, error)) *PoolManagerController_CreateEnterprisePoolManager_Call { + _c.Call.Return(run) + return _c +} + // CreateOrgPoolManager provides a mock function with given fields: ctx, org, providers, store func (_m *PoolManagerController) CreateOrgPoolManager(ctx context.Context, org params.Organization, providers map[string]common.Provider, store databasecommon.Store) (common.PoolManager, error) { ret := _m.Called(ctx, org, providers, store) @@ -79,6 +118,37 @@ func (_m *PoolManagerController) CreateOrgPoolManager(ctx context.Context, org p return r0, r1 } +// PoolManagerController_CreateOrgPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateOrgPoolManager' +type PoolManagerController_CreateOrgPoolManager_Call struct { + *mock.Call +} + +// CreateOrgPoolManager is a helper method to define mock.On call +// - ctx context.Context +// - org params.Organization +// - providers map[string]common.Provider +// - store databasecommon.Store +func (_e *PoolManagerController_Expecter) CreateOrgPoolManager(ctx interface{}, org interface{}, providers interface{}, store interface{}) *PoolManagerController_CreateOrgPoolManager_Call { + return &PoolManagerController_CreateOrgPoolManager_Call{Call: _e.mock.On("CreateOrgPoolManager", ctx, org, providers, store)} +} + +func (_c *PoolManagerController_CreateOrgPoolManager_Call) Run(run func(ctx context.Context, org params.Organization, providers map[string]common.Provider, store databasecommon.Store)) *PoolManagerController_CreateOrgPoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.Organization), args[2].(map[string]common.Provider), args[3].(databasecommon.Store)) + }) + return _c +} + +func (_c *PoolManagerController_CreateOrgPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_CreateOrgPoolManager_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_CreateOrgPoolManager_Call) RunAndReturn(run func(context.Context, params.Organization, map[string]common.Provider, databasecommon.Store) (common.PoolManager, error)) *PoolManagerController_CreateOrgPoolManager_Call { + _c.Call.Return(run) + return _c +} + // CreateRepoPoolManager provides a mock function with given fields: ctx, repo, providers, store func (_m *PoolManagerController) CreateRepoPoolManager(ctx context.Context, repo params.Repository, providers map[string]common.Provider, store databasecommon.Store) (common.PoolManager, error) { ret := _m.Called(ctx, repo, providers, store) @@ -109,6 +179,37 @@ func (_m *PoolManagerController) CreateRepoPoolManager(ctx context.Context, repo return r0, r1 } +// PoolManagerController_CreateRepoPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateRepoPoolManager' +type PoolManagerController_CreateRepoPoolManager_Call struct { + *mock.Call +} + +// CreateRepoPoolManager is a helper method to define mock.On call +// - ctx context.Context +// - repo params.Repository +// - providers map[string]common.Provider +// - store databasecommon.Store +func (_e *PoolManagerController_Expecter) CreateRepoPoolManager(ctx interface{}, repo interface{}, providers interface{}, store interface{}) *PoolManagerController_CreateRepoPoolManager_Call { + return &PoolManagerController_CreateRepoPoolManager_Call{Call: _e.mock.On("CreateRepoPoolManager", ctx, repo, providers, store)} +} + +func (_c *PoolManagerController_CreateRepoPoolManager_Call) Run(run func(ctx context.Context, repo params.Repository, providers map[string]common.Provider, store databasecommon.Store)) *PoolManagerController_CreateRepoPoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(params.Repository), args[2].(map[string]common.Provider), args[3].(databasecommon.Store)) + }) + return _c +} + +func (_c *PoolManagerController_CreateRepoPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_CreateRepoPoolManager_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_CreateRepoPoolManager_Call) RunAndReturn(run func(context.Context, params.Repository, map[string]common.Provider, databasecommon.Store) (common.PoolManager, error)) *PoolManagerController_CreateRepoPoolManager_Call { + _c.Call.Return(run) + return _c +} + // DeleteEnterprisePoolManager provides a mock function with given fields: enterprise func (_m *PoolManagerController) DeleteEnterprisePoolManager(enterprise params.Enterprise) error { ret := _m.Called(enterprise) @@ -127,6 +228,34 @@ func (_m *PoolManagerController) DeleteEnterprisePoolManager(enterprise params.E return r0 } +// PoolManagerController_DeleteEnterprisePoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteEnterprisePoolManager' +type PoolManagerController_DeleteEnterprisePoolManager_Call struct { + *mock.Call +} + +// DeleteEnterprisePoolManager is a helper method to define mock.On call +// - enterprise params.Enterprise +func (_e *PoolManagerController_Expecter) DeleteEnterprisePoolManager(enterprise interface{}) *PoolManagerController_DeleteEnterprisePoolManager_Call { + return &PoolManagerController_DeleteEnterprisePoolManager_Call{Call: _e.mock.On("DeleteEnterprisePoolManager", enterprise)} +} + +func (_c *PoolManagerController_DeleteEnterprisePoolManager_Call) Run(run func(enterprise params.Enterprise)) *PoolManagerController_DeleteEnterprisePoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.Enterprise)) + }) + return _c +} + +func (_c *PoolManagerController_DeleteEnterprisePoolManager_Call) Return(_a0 error) *PoolManagerController_DeleteEnterprisePoolManager_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManagerController_DeleteEnterprisePoolManager_Call) RunAndReturn(run func(params.Enterprise) error) *PoolManagerController_DeleteEnterprisePoolManager_Call { + _c.Call.Return(run) + return _c +} + // DeleteOrgPoolManager provides a mock function with given fields: org func (_m *PoolManagerController) DeleteOrgPoolManager(org params.Organization) error { ret := _m.Called(org) @@ -145,6 +274,34 @@ func (_m *PoolManagerController) DeleteOrgPoolManager(org params.Organization) e return r0 } +// PoolManagerController_DeleteOrgPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteOrgPoolManager' +type PoolManagerController_DeleteOrgPoolManager_Call struct { + *mock.Call +} + +// DeleteOrgPoolManager is a helper method to define mock.On call +// - org params.Organization +func (_e *PoolManagerController_Expecter) DeleteOrgPoolManager(org interface{}) *PoolManagerController_DeleteOrgPoolManager_Call { + return &PoolManagerController_DeleteOrgPoolManager_Call{Call: _e.mock.On("DeleteOrgPoolManager", org)} +} + +func (_c *PoolManagerController_DeleteOrgPoolManager_Call) Run(run func(org params.Organization)) *PoolManagerController_DeleteOrgPoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.Organization)) + }) + return _c +} + +func (_c *PoolManagerController_DeleteOrgPoolManager_Call) Return(_a0 error) *PoolManagerController_DeleteOrgPoolManager_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManagerController_DeleteOrgPoolManager_Call) RunAndReturn(run func(params.Organization) error) *PoolManagerController_DeleteOrgPoolManager_Call { + _c.Call.Return(run) + return _c +} + // DeleteRepoPoolManager provides a mock function with given fields: repo func (_m *PoolManagerController) DeleteRepoPoolManager(repo params.Repository) error { ret := _m.Called(repo) @@ -163,6 +320,34 @@ func (_m *PoolManagerController) DeleteRepoPoolManager(repo params.Repository) e return r0 } +// PoolManagerController_DeleteRepoPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteRepoPoolManager' +type PoolManagerController_DeleteRepoPoolManager_Call struct { + *mock.Call +} + +// DeleteRepoPoolManager is a helper method to define mock.On call +// - repo params.Repository +func (_e *PoolManagerController_Expecter) DeleteRepoPoolManager(repo interface{}) *PoolManagerController_DeleteRepoPoolManager_Call { + return &PoolManagerController_DeleteRepoPoolManager_Call{Call: _e.mock.On("DeleteRepoPoolManager", repo)} +} + +func (_c *PoolManagerController_DeleteRepoPoolManager_Call) Run(run func(repo params.Repository)) *PoolManagerController_DeleteRepoPoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.Repository)) + }) + return _c +} + +func (_c *PoolManagerController_DeleteRepoPoolManager_Call) Return(_a0 error) *PoolManagerController_DeleteRepoPoolManager_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *PoolManagerController_DeleteRepoPoolManager_Call) RunAndReturn(run func(params.Repository) error) *PoolManagerController_DeleteRepoPoolManager_Call { + _c.Call.Return(run) + return _c +} + // GetEnterprisePoolManager provides a mock function with given fields: enterprise func (_m *PoolManagerController) GetEnterprisePoolManager(enterprise params.Enterprise) (common.PoolManager, error) { ret := _m.Called(enterprise) @@ -193,6 +378,34 @@ func (_m *PoolManagerController) GetEnterprisePoolManager(enterprise params.Ente return r0, r1 } +// PoolManagerController_GetEnterprisePoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterprisePoolManager' +type PoolManagerController_GetEnterprisePoolManager_Call struct { + *mock.Call +} + +// GetEnterprisePoolManager is a helper method to define mock.On call +// - enterprise params.Enterprise +func (_e *PoolManagerController_Expecter) GetEnterprisePoolManager(enterprise interface{}) *PoolManagerController_GetEnterprisePoolManager_Call { + return &PoolManagerController_GetEnterprisePoolManager_Call{Call: _e.mock.On("GetEnterprisePoolManager", enterprise)} +} + +func (_c *PoolManagerController_GetEnterprisePoolManager_Call) Run(run func(enterprise params.Enterprise)) *PoolManagerController_GetEnterprisePoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.Enterprise)) + }) + return _c +} + +func (_c *PoolManagerController_GetEnterprisePoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_GetEnterprisePoolManager_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_GetEnterprisePoolManager_Call) RunAndReturn(run func(params.Enterprise) (common.PoolManager, error)) *PoolManagerController_GetEnterprisePoolManager_Call { + _c.Call.Return(run) + return _c +} + // GetEnterprisePoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetEnterprisePoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -223,6 +436,33 @@ func (_m *PoolManagerController) GetEnterprisePoolManagers() (map[string]common. return r0, r1 } +// PoolManagerController_GetEnterprisePoolManagers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEnterprisePoolManagers' +type PoolManagerController_GetEnterprisePoolManagers_Call struct { + *mock.Call +} + +// GetEnterprisePoolManagers is a helper method to define mock.On call +func (_e *PoolManagerController_Expecter) GetEnterprisePoolManagers() *PoolManagerController_GetEnterprisePoolManagers_Call { + return &PoolManagerController_GetEnterprisePoolManagers_Call{Call: _e.mock.On("GetEnterprisePoolManagers")} +} + +func (_c *PoolManagerController_GetEnterprisePoolManagers_Call) Run(run func()) *PoolManagerController_GetEnterprisePoolManagers_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManagerController_GetEnterprisePoolManagers_Call) Return(_a0 map[string]common.PoolManager, _a1 error) *PoolManagerController_GetEnterprisePoolManagers_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_GetEnterprisePoolManagers_Call) RunAndReturn(run func() (map[string]common.PoolManager, error)) *PoolManagerController_GetEnterprisePoolManagers_Call { + _c.Call.Return(run) + return _c +} + // GetOrgPoolManager provides a mock function with given fields: org func (_m *PoolManagerController) GetOrgPoolManager(org params.Organization) (common.PoolManager, error) { ret := _m.Called(org) @@ -253,6 +493,34 @@ func (_m *PoolManagerController) GetOrgPoolManager(org params.Organization) (com return r0, r1 } +// PoolManagerController_GetOrgPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrgPoolManager' +type PoolManagerController_GetOrgPoolManager_Call struct { + *mock.Call +} + +// GetOrgPoolManager is a helper method to define mock.On call +// - org params.Organization +func (_e *PoolManagerController_Expecter) GetOrgPoolManager(org interface{}) *PoolManagerController_GetOrgPoolManager_Call { + return &PoolManagerController_GetOrgPoolManager_Call{Call: _e.mock.On("GetOrgPoolManager", org)} +} + +func (_c *PoolManagerController_GetOrgPoolManager_Call) Run(run func(org params.Organization)) *PoolManagerController_GetOrgPoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.Organization)) + }) + return _c +} + +func (_c *PoolManagerController_GetOrgPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_GetOrgPoolManager_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_GetOrgPoolManager_Call) RunAndReturn(run func(params.Organization) (common.PoolManager, error)) *PoolManagerController_GetOrgPoolManager_Call { + _c.Call.Return(run) + return _c +} + // GetOrgPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetOrgPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -283,6 +551,33 @@ func (_m *PoolManagerController) GetOrgPoolManagers() (map[string]common.PoolMan return r0, r1 } +// PoolManagerController_GetOrgPoolManagers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOrgPoolManagers' +type PoolManagerController_GetOrgPoolManagers_Call struct { + *mock.Call +} + +// GetOrgPoolManagers is a helper method to define mock.On call +func (_e *PoolManagerController_Expecter) GetOrgPoolManagers() *PoolManagerController_GetOrgPoolManagers_Call { + return &PoolManagerController_GetOrgPoolManagers_Call{Call: _e.mock.On("GetOrgPoolManagers")} +} + +func (_c *PoolManagerController_GetOrgPoolManagers_Call) Run(run func()) *PoolManagerController_GetOrgPoolManagers_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManagerController_GetOrgPoolManagers_Call) Return(_a0 map[string]common.PoolManager, _a1 error) *PoolManagerController_GetOrgPoolManagers_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_GetOrgPoolManagers_Call) RunAndReturn(run func() (map[string]common.PoolManager, error)) *PoolManagerController_GetOrgPoolManagers_Call { + _c.Call.Return(run) + return _c +} + // GetRepoPoolManager provides a mock function with given fields: repo func (_m *PoolManagerController) GetRepoPoolManager(repo params.Repository) (common.PoolManager, error) { ret := _m.Called(repo) @@ -313,6 +608,34 @@ func (_m *PoolManagerController) GetRepoPoolManager(repo params.Repository) (com return r0, r1 } +// PoolManagerController_GetRepoPoolManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepoPoolManager' +type PoolManagerController_GetRepoPoolManager_Call struct { + *mock.Call +} + +// GetRepoPoolManager is a helper method to define mock.On call +// - repo params.Repository +func (_e *PoolManagerController_Expecter) GetRepoPoolManager(repo interface{}) *PoolManagerController_GetRepoPoolManager_Call { + return &PoolManagerController_GetRepoPoolManager_Call{Call: _e.mock.On("GetRepoPoolManager", repo)} +} + +func (_c *PoolManagerController_GetRepoPoolManager_Call) Run(run func(repo params.Repository)) *PoolManagerController_GetRepoPoolManager_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(params.Repository)) + }) + return _c +} + +func (_c *PoolManagerController_GetRepoPoolManager_Call) Return(_a0 common.PoolManager, _a1 error) *PoolManagerController_GetRepoPoolManager_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_GetRepoPoolManager_Call) RunAndReturn(run func(params.Repository) (common.PoolManager, error)) *PoolManagerController_GetRepoPoolManager_Call { + _c.Call.Return(run) + return _c +} + // GetRepoPoolManagers provides a mock function with no fields func (_m *PoolManagerController) GetRepoPoolManagers() (map[string]common.PoolManager, error) { ret := _m.Called() @@ -343,6 +666,33 @@ func (_m *PoolManagerController) GetRepoPoolManagers() (map[string]common.PoolMa return r0, r1 } +// PoolManagerController_GetRepoPoolManagers_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRepoPoolManagers' +type PoolManagerController_GetRepoPoolManagers_Call struct { + *mock.Call +} + +// GetRepoPoolManagers is a helper method to define mock.On call +func (_e *PoolManagerController_Expecter) GetRepoPoolManagers() *PoolManagerController_GetRepoPoolManagers_Call { + return &PoolManagerController_GetRepoPoolManagers_Call{Call: _e.mock.On("GetRepoPoolManagers")} +} + +func (_c *PoolManagerController_GetRepoPoolManagers_Call) Run(run func()) *PoolManagerController_GetRepoPoolManagers_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *PoolManagerController_GetRepoPoolManagers_Call) Return(_a0 map[string]common.PoolManager, _a1 error) *PoolManagerController_GetRepoPoolManagers_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *PoolManagerController_GetRepoPoolManagers_Call) RunAndReturn(run func() (map[string]common.PoolManager, error)) *PoolManagerController_GetRepoPoolManagers_Call { + _c.Call.Return(run) + return _c +} + // NewPoolManagerController creates a new instance of PoolManagerController. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewPoolManagerController(t interface { diff --git a/testdata/config.toml b/testdata/config.toml index ee85ee33..337c0dd6 100644 --- a/testdata/config.toml +++ b/testdata/config.toml @@ -82,6 +82,8 @@ time_to_live = "8760h" certificate = "" # The path on disk to the corresponding private key for the certificate. key = "" + [apiserver.webui] + enable = true [database] # Turn on/off debugging for database queries. diff --git a/util/util.go b/util/util.go index da1264d2..994e4637 100644 --- a/util/util.go +++ b/util/util.go @@ -17,6 +17,7 @@ package util import ( "context" "net/http" + "unicode/utf8" "github.com/pkg/errors" @@ -43,3 +44,70 @@ func FetchTools(ctx context.Context, cli common.GithubClient) ([]commonParams.Ru } return ret, nil } + +func ASCIIEqualFold(s, t string) bool { + // Fast ASCII path for equal-length ASCII strings + if len(s) == len(t) && isASCII(s) && isASCII(t) { + for i := 0; i < len(s); i++ { + a, b := s[i], t[i] + if a != b { + if 'A' <= a && a <= 'Z' { + a = a + 'a' - 'A' + } + if 'A' <= b && b <= 'Z' { + b = b + 'a' - 'A' + } + if a != b { + return false + } + } + } + return true + } + + // UTF-8 path - handle different byte lengths correctly + i, j := 0, 0 + for i < len(s) && j < len(t) { + sr, sizeS := utf8.DecodeRuneInString(s[i:]) + tr, sizeT := utf8.DecodeRuneInString(t[j:]) + + // Handle invalid UTF-8 - they must be identical + if sr == utf8.RuneError || tr == utf8.RuneError { + // For invalid UTF-8, compare the raw bytes + if sr == utf8.RuneError && tr == utf8.RuneError { + if sizeS == sizeT && s[i:i+sizeS] == t[j:j+sizeT] { + i += sizeS + j += sizeT + continue + } + } + return false + } + + if sr != tr { + // Apply ASCII case folding only + if 'A' <= sr && sr <= 'Z' { + sr = sr + 'a' - 'A' + } + if 'A' <= tr && tr <= 'Z' { + tr = tr + 'a' - 'A' + } + if sr != tr { + return false + } + } + + i += sizeS + j += sizeT + } + return i == len(s) && j == len(t) +} + +func isASCII(s string) bool { + for i := 0; i < len(s); i++ { + if s[i] >= 0x80 { + return false + } + } + return true +} diff --git a/util/util_test.go b/util/util_test.go new file mode 100644 index 00000000..f04dab84 --- /dev/null +++ b/util/util_test.go @@ -0,0 +1,394 @@ +// Copyright 2022 Cloudbase Solutions SRL +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package util + +import ( + "testing" +) + +func TestASCIIEqualFold(t *testing.T) { + tests := []struct { + name string + s string + t string + expected bool + reason string + }{ + // Basic ASCII case folding tests + { + name: "identical strings", + s: "hello", + t: "hello", + expected: true, + reason: "identical strings should match", + }, + { + name: "simple case difference", + s: "Hello", + t: "hello", + expected: true, + reason: "ASCII case folding should match H/h", + }, + { + name: "all uppercase vs lowercase", + s: "HELLO", + t: "hello", + expected: true, + reason: "ASCII case folding should match all cases", + }, + { + name: "mixed case", + s: "HeLLo", + t: "hEllO", + expected: true, + reason: "mixed case should match after folding", + }, + + // Empty string tests + { + name: "both empty", + s: "", + t: "", + expected: true, + reason: "empty strings should match", + }, + { + name: "one empty", + s: "hello", + t: "", + expected: false, + reason: "different length strings should not match", + }, + { + name: "other empty", + s: "", + t: "hello", + expected: false, + reason: "different length strings should not match", + }, + + // Different content tests + { + name: "different strings same case", + s: "hello", + t: "world", + expected: false, + reason: "different content should not match", + }, + { + name: "different strings different case", + s: "Hello", + t: "World", + expected: false, + reason: "different content should not match regardless of case", + }, + { + name: "different length", + s: "hello", + t: "hello world", + expected: false, + reason: "different length strings should not match", + }, + + // ASCII non-alphabetic characters + { + name: "numbers and symbols", + s: "Hello123!@#", + t: "hello123!@#", + expected: true, + reason: "numbers and symbols should be preserved, only letters folded", + }, + { + name: "different numbers", + s: "Hello123", + t: "Hello124", + expected: false, + reason: "different numbers should not match", + }, + { + name: "different symbols", + s: "Hello!", + t: "Hello?", + expected: false, + reason: "different symbols should not match", + }, + + // URL-specific tests (CORS security focus) + { + name: "HTTP scheme case", + s: "HTTP://example.com", + t: "http://example.com", + expected: true, + reason: "HTTP scheme should be case-insensitive", + }, + { + name: "HTTPS scheme case", + s: "HTTPS://EXAMPLE.COM", + t: "https://example.com", + expected: true, + reason: "HTTPS scheme and domain should be case-insensitive", + }, + { + name: "complex URL case", + s: "HTTPS://API.EXAMPLE.COM:8080/PATH", + t: "https://api.example.com:8080/path", + expected: true, + reason: "entire URL should be case-insensitive for ASCII", + }, + { + name: "subdomain case", + s: "https://API.SUB.EXAMPLE.COM", + t: "https://api.sub.example.com", + expected: true, + reason: "subdomains should be case-insensitive", + }, + + // Unicode security tests (homograph attack prevention) + { + name: "cyrillic homograph attack", + s: "https://еxample.com", // Cyrillic 'е' (U+0435) + t: "https://example.com", // Latin 'e' (U+0065) + expected: false, + reason: "should block Cyrillic homograph attack", + }, + { + name: "mixed cyrillic attack", + s: "https://ехample.com", // Cyrillic 'е' and 'х' + t: "https://example.com", // Latin 'e' and 'x' + expected: false, + reason: "should block mixed Cyrillic homograph attack", + }, + { + name: "cyrillic 'а' attack", + s: "https://exаmple.com", // Cyrillic 'а' (U+0430) + t: "https://example.com", // Latin 'a' (U+0061) + expected: false, + reason: "should block Cyrillic 'а' homograph attack", + }, + + // Unicode case folding security tests + { + name: "unicode case folding attack", + s: "https://CAFÉ.com", // Latin É (U+00C9) + t: "https://café.com", // Latin é (U+00E9) + expected: false, + reason: "should NOT perform Unicode case folding (security)", + }, + { + name: "turkish i attack", + s: "https://İSTANBUL.com", // Turkish İ (U+0130) + t: "https://istanbul.com", // Latin i + expected: false, + reason: "should NOT perform Turkish case folding", + }, + { + name: "german sharp s", + s: "https://GROß.com", // German ß (U+00DF) + t: "https://gross.com", // Expanded form + expected: false, + reason: "should NOT perform German ß expansion", + }, + + // Valid Unicode exact matches + { + name: "identical unicode", + s: "https://café.com", + t: "https://café.com", + expected: true, + reason: "identical Unicode strings should match", + }, + { + name: "identical cyrillic", + s: "https://пример.com", // Russian + t: "https://пример.com", // Russian + expected: true, + reason: "identical Cyrillic strings should match", + }, + { + name: "ascii part of unicode domain", + s: "HTTPS://café.COM", // ASCII parts should fold + t: "https://café.com", + expected: true, + reason: "ASCII parts should fold even in Unicode strings", + }, + + // Edge cases with UTF-8 + { + name: "different UTF-8 byte length same rune count", + s: "Café", // é is 2 bytes + t: "Café", // é is 2 bytes (same) + expected: true, + reason: "same Unicode content should match", + }, + { + name: "UTF-8 normalization difference", + s: "café\u0301", // é as e + combining acute (3 bytes for é part) + t: "café", // é as single character (2 bytes for é part) + expected: false, + reason: "different Unicode normalization should not match", + }, + { + name: "CRITICAL: current implementation flaw", + s: "ABC" + string([]byte{0xC3, 0xA9}), // ABC + é (2 bytes) = 5 bytes + t: "abc" + string([]byte{0xC3, 0xA9}), // abc + é (2 bytes) = 5 bytes + expected: true, + reason: "should match after ASCII folding (this should pass with correct implementation)", + }, + { + name: "invalid UTF-8 sequence", + s: "hello\xff", // Invalid UTF-8 + t: "hello\xff", // Invalid UTF-8 + expected: true, + reason: "identical invalid UTF-8 should match", + }, + { + name: "different invalid UTF-8", + s: "hello\xff", // Invalid UTF-8 + t: "hello\xfe", // Different invalid UTF-8 + expected: false, + reason: "different invalid UTF-8 should not match", + }, + + // ASCII boundary tests + { + name: "ascii boundary characters", + s: "A@Z[`a{z", // Test boundaries around A-Z + t: "a@z[`A{Z", + expected: true, + reason: "only A-Z should be folded, not punctuation", + }, + { + name: "digit boundaries", + s: "Test123ABC", + t: "test123abc", + expected: true, + reason: "digits should not be folded, only letters", + }, + + // Long string performance tests + { + name: "long ascii string", + s: "HTTP://" + repeatString("ABCDEFGHIJKLMNOPQRSTUVWXYZ", 100) + ".COM", + t: "http://" + repeatString("abcdefghijklmnopqrstuvwxyz", 100) + ".com", + expected: true, + reason: "long ASCII strings should be handled efficiently", + }, + { + name: "long unicode string", + s: repeatString("CAFÉ", 100), + t: repeatString("CAFÉ", 100), // Same case - should match + expected: true, + reason: "long identical Unicode strings should match", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := ASCIIEqualFold(tt.s, tt.t) + if result != tt.expected { + t.Errorf("ASCIIEqualFold(%q, %q) = %v, expected %v\nReason: %s", + tt.s, tt.t, result, tt.expected, tt.reason) + } + }) + } +} + +// Helper function for generating long test strings +func repeatString(s string, count int) string { + if count <= 0 { + return "" + } + result := make([]byte, 0, len(s)*count) + for i := 0; i < count; i++ { + result = append(result, s...) + } + return string(result) +} + +// Benchmark tests for performance verification +func BenchmarkASCIIEqualFold(b *testing.B) { + benchmarks := []struct { + name string + s string + t string + }{ + { + name: "short_ascii_match", + s: "HTTP://EXAMPLE.COM", + t: "http://example.com", + }, + { + name: "short_ascii_nomatch", + s: "HTTP://EXAMPLE.COM", + t: "http://different.com", + }, + { + name: "long_ascii_match", + s: "HTTP://" + repeatString("ABCDEFGHIJKLMNOPQRSTUVWXYZ", 100) + ".COM", + t: "http://" + repeatString("abcdefghijklmnopqrstuvwxyz", 100) + ".com", + }, + { + name: "unicode_nomatch", + s: "https://café.com", + t: "https://CAFÉ.com", + }, + { + name: "unicode_exact_match", + s: "https://café.com", + t: "https://café.com", + }, + } + + for _, bm := range benchmarks { + b.Run(bm.name, func(b *testing.B) { + for i := 0; i < b.N; i++ { + ASCIIEqualFold(bm.s, bm.t) + } + }) + } +} + +// Fuzzing test to catch edge cases +func FuzzASCIIEqualFold(f *testing.F) { + // Seed with interesting test cases + seeds := [][]string{ + {"hello", "HELLO"}, + {"", ""}, + {"café", "CAFÉ"}, + {"https://example.com", "HTTPS://EXAMPLE.COM"}, + {"еxample", "example"}, // Cyrillic attack + {string([]byte{0xff}), string([]byte{0xfe})}, // Invalid UTF-8 + } + + for _, seed := range seeds { + f.Add(seed[0], seed[1]) + } + + f.Fuzz(func(t *testing.T, s1, s2 string) { + // Just ensure it doesn't panic and returns a boolean + result := ASCIIEqualFold(s1, s2) + _ = result // Use the result to prevent optimization + + // Property: function should be symmetric + if ASCIIEqualFold(s1, s2) != ASCIIEqualFold(s2, s1) { + t.Errorf("ASCIIEqualFold is not symmetric: (%q, %q)", s1, s2) + } + + // Property: identical strings should always match + if s1 == s2 && !ASCIIEqualFold(s1, s2) { + t.Errorf("identical strings should match: %q", s1) + } + }) +} diff --git a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml index d2fafb8a..50063062 100644 --- a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml +++ b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml @@ -1,56 +1,62 @@ -linters-settings: - gocyclo: - min-complexity: 45 - dupl: - threshold: 200 - goconst: - min-len: 2 - min-occurrences: 3 - +version: "2" linters: - enable-all: true + default: all disable: - - recvcheck - - unparam - - lll - - gochecknoinits - - gochecknoglobals - - funlen - - godox - - gocognit - - whitespace - - wsl - - wrapcheck - - testpackage - - nlreturn - - errorlint - - nestif - - godot - - gofumpt - - paralleltest - - tparallel - - thelper - - exhaustruct - - varnamelen - - gci + - cyclop - depguard - errchkjson - - inamedparam - - nonamedreturns - - musttag - - ireturn + - errorlint + - exhaustruct - forcetypeassert - - cyclop - # deprecated linters - #- deadcode - #- interfacer - #- scopelint - #- varcheck - #- structcheck - #- golint - #- nosnakecase - #- maligned - #- goerr113 - #- ifshort - #- gomnd - #- exhaustivestruct + - funlen + - gochecknoglobals + - gochecknoinits + - gocognit + - godot + - godox + - gosmopolitan + - inamedparam + - ireturn + - lll + - musttag + - nestif + - nlreturn + - nonamedreturns + - paralleltest + - testpackage + - thelper + - tparallel + - unparam + - varnamelen + - whitespace + - wrapcheck + - wsl + settings: + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + gocyclo: + min-complexity: 45 + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - third_party$ + - builtin$ + - examples$ +formatters: + enable: + - gofmt + - goimports + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go index a08cd68a..61362105 100644 --- a/vendor/github.com/go-openapi/jsonpointer/pointer.go +++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go @@ -179,6 +179,11 @@ func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvide func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameProvider) error { rValue := reflect.Indirect(reflect.ValueOf(node)) + // Check for nil to prevent panic when calling rValue.Type() + if isNil(node) { + return fmt.Errorf("cannot set field %q on nil value: %w", decodedToken, ErrPointer) + } + if ns, ok := node.(JSONSetable); ok { // pointer impl return ns.JSONSet(decodedToken, data) } @@ -285,6 +290,11 @@ func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error { return setSingleImpl(node, data, decodedToken, nameProvider) } + // Check for nil during traversal + if isNil(node) { + return fmt.Errorf("cannot traverse through nil value at %q: %w", decodedToken, ErrPointer) + } + rValue := reflect.Indirect(reflect.ValueOf(node)) kind := rValue.Kind() diff --git a/vendor/github.com/mattn/go-sqlite3/README.md b/vendor/github.com/mattn/go-sqlite3/README.md index 3b43b033..76f49bac 100644 --- a/vendor/github.com/mattn/go-sqlite3/README.md +++ b/vendor/github.com/mattn/go-sqlite3/README.md @@ -351,6 +351,8 @@ For example the TDM-GCC Toolchain can be found [here](https://jmeubank.github.io # User Authentication +***This is deprecated*** + This package supports the SQLite User Authentication module. ## Compile diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c index e9cca66c..44d91d9d 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c @@ -1,7 +1,7 @@ #ifndef USE_LIBSQLITE3 /****************************************************************************** ** This file is an amalgamation of many separate C source files from SQLite -** version 3.49.1. By combining all the individual C code files into this +** version 3.50.3. By combining all the individual C code files into this ** single large file, the entire code can be compiled as a single translation ** unit. This allows many compilers to do optimizations that would not be ** possible if the files were compiled separately. Performance improvements @@ -19,7 +19,7 @@ ** separate file. This file contains only code for the core SQLite library. ** ** The content in this amalgamation comes from Fossil check-in -** 873d4e274b4988d260ba8354a9718324a1c2 with changes in files: +** 3ce993b8657d6d9deda380a93cdd6404a8c8 with changes in files: ** ** */ @@ -453,7 +453,7 @@ extern "C" { ** ** Since [version 3.6.18] ([dateof:3.6.18]), ** SQLite source code has been stored in the -** Fossil configuration management +** Fossil configuration management ** system. ^The SQLITE_SOURCE_ID macro evaluates to ** a string which identifies a particular check-in of SQLite ** within its configuration management system. ^The SQLITE_SOURCE_ID @@ -466,9 +466,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.49.1" -#define SQLITE_VERSION_NUMBER 3049001 -#define SQLITE_SOURCE_ID "2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70" +#define SQLITE_VERSION "3.50.3" +#define SQLITE_VERSION_NUMBER 3050003 +#define SQLITE_SOURCE_ID "2025-07-17 13:25:10 3ce993b8657d6d9deda380a93cdd6404a8c8ba1b185b2bc423703e41ae5f2543" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -1483,6 +1483,12 @@ struct sqlite3_io_methods { ** the value that M is to be set to. Before returning, the 32-bit signed ** integer is overwritten with the previous value of M. ** +**
  • [[SQLITE_FCNTL_BLOCK_ON_CONNECT]] +** The [SQLITE_FCNTL_BLOCK_ON_CONNECT] opcode is used to configure the +** VFS to block when taking a SHARED lock to connect to a wal mode database. +** This is used to implement the functionality associated with +** SQLITE_SETLK_BLOCK_ON_CONNECT. +** **
  • [[SQLITE_FCNTL_DATA_VERSION]] ** The [SQLITE_FCNTL_DATA_VERSION] opcode is used to detect changes to ** a database file. The argument is a pointer to a 32-bit unsigned integer. @@ -1579,6 +1585,7 @@ struct sqlite3_io_methods { #define SQLITE_FCNTL_CKSM_FILE 41 #define SQLITE_FCNTL_RESET_CACHE 42 #define SQLITE_FCNTL_NULL_IO 43 +#define SQLITE_FCNTL_BLOCK_ON_CONNECT 44 /* deprecated names */ #define SQLITE_GET_LOCKPROXYFILE SQLITE_FCNTL_GET_LOCKPROXYFILE @@ -2309,13 +2316,16 @@ struct sqlite3_mem_methods { ** ** [[SQLITE_CONFIG_LOOKASIDE]]
    SQLITE_CONFIG_LOOKASIDE
    **
    ^(The SQLITE_CONFIG_LOOKASIDE option takes two arguments that determine -** the default size of lookaside memory on each [database connection]. +** the default size of [lookaside memory] on each [database connection]. ** The first argument is the -** size of each lookaside buffer slot and the second is the number of -** slots allocated to each database connection.)^ ^(SQLITE_CONFIG_LOOKASIDE -** sets the default lookaside size. The [SQLITE_DBCONFIG_LOOKASIDE] -** option to [sqlite3_db_config()] can be used to change the lookaside -** configuration on individual connections.)^
    +** size of each lookaside buffer slot ("sz") and the second is the number of +** slots allocated to each database connection ("cnt").)^ +** ^(SQLITE_CONFIG_LOOKASIDE sets the default lookaside size. +** The [SQLITE_DBCONFIG_LOOKASIDE] option to [sqlite3_db_config()] can +** be used to change the lookaside configuration on individual connections.)^ +** The [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to change the +** default lookaside configuration at compile-time. +** ** ** [[SQLITE_CONFIG_PCACHE2]]
    SQLITE_CONFIG_PCACHE2
    **
    ^(The SQLITE_CONFIG_PCACHE2 option takes a single argument which is @@ -2552,31 +2562,50 @@ struct sqlite3_mem_methods { ** [[SQLITE_DBCONFIG_LOOKASIDE]] **
    SQLITE_DBCONFIG_LOOKASIDE
    **
    The SQLITE_DBCONFIG_LOOKASIDE option is used to adjust the -** configuration of the lookaside memory allocator within a database +** configuration of the [lookaside memory allocator] within a database ** connection. ** The arguments to the SQLITE_DBCONFIG_LOOKASIDE option are not ** in the [DBCONFIG arguments|usual format]. ** The SQLITE_DBCONFIG_LOOKASIDE option takes three arguments, not two, ** so that a call to [sqlite3_db_config()] that uses SQLITE_DBCONFIG_LOOKASIDE ** should have a total of five parameters. -** ^The first argument (the third parameter to [sqlite3_db_config()] is a +**
      +**
    1. The first argument ("buf") is a ** pointer to a memory buffer to use for lookaside memory. -** ^The first argument after the SQLITE_DBCONFIG_LOOKASIDE verb -** may be NULL in which case SQLite will allocate the -** lookaside buffer itself using [sqlite3_malloc()]. ^The second argument is the -** size of each lookaside buffer slot. ^The third argument is the number of -** slots. The size of the buffer in the first argument must be greater than -** or equal to the product of the second and third arguments. The buffer -** must be aligned to an 8-byte boundary. ^If the second argument to -** SQLITE_DBCONFIG_LOOKASIDE is not a multiple of 8, it is internally -** rounded down to the next smaller multiple of 8. ^(The lookaside memory +** The first argument may be NULL in which case SQLite will allocate the +** lookaside buffer itself using [sqlite3_malloc()]. +**

    2. The second argument ("sz") is the +** size of each lookaside buffer slot. Lookaside is disabled if "sz" +** is less than 8. The "sz" argument should be a multiple of 8 less than +** 65536. If "sz" does not meet this constraint, it is reduced in size until +** it does. +**

    3. The third argument ("cnt") is the number of slots. Lookaside is disabled +** if "cnt"is less than 1. The "cnt" value will be reduced, if necessary, so +** that the product of "sz" and "cnt" does not exceed 2,147,418,112. The "cnt" +** parameter is usually chosen so that the product of "sz" and "cnt" is less +** than 1,000,000. +**

    +**

    If the "buf" argument is not NULL, then it must +** point to a memory buffer with a size that is greater than +** or equal to the product of "sz" and "cnt". +** The buffer must be aligned to an 8-byte boundary. +** The lookaside memory ** configuration for a database connection can only be changed when that ** connection is not currently using lookaside memory, or in other words -** when the "current value" returned by -** [sqlite3_db_status](D,[SQLITE_DBSTATUS_LOOKASIDE_USED],...) is zero. +** when the value returned by [SQLITE_DBSTATUS_LOOKASIDE_USED] is zero. ** Any attempt to change the lookaside memory configuration when lookaside ** memory is in use leaves the configuration unchanged and returns -** [SQLITE_BUSY].)^

    +** [SQLITE_BUSY]. +** If the "buf" argument is NULL and an attempt +** to allocate memory based on "sz" and "cnt" fails, then +** lookaside is silently disabled. +**

    +** The [SQLITE_CONFIG_LOOKASIDE] configuration option can be used to set the +** default lookaside configuration at initialization. The +** [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to set the default lookaside +** configuration at compile-time. Typical values for lookaside are 1200 for +** "sz" and 40 to 100 for "cnt". +** ** ** [[SQLITE_DBCONFIG_ENABLE_FKEY]] **

    SQLITE_DBCONFIG_ENABLE_FKEY
    @@ -3313,6 +3342,44 @@ SQLITE_API int sqlite3_busy_handler(sqlite3*,int(*)(void*,int),void*); */ SQLITE_API int sqlite3_busy_timeout(sqlite3*, int ms); +/* +** CAPI3REF: Set the Setlk Timeout +** METHOD: sqlite3 +** +** This routine is only useful in SQLITE_ENABLE_SETLK_TIMEOUT builds. If +** the VFS supports blocking locks, it sets the timeout in ms used by +** eligible locks taken on wal mode databases by the specified database +** handle. In non-SQLITE_ENABLE_SETLK_TIMEOUT builds, or if the VFS does +** not support blocking locks, this function is a no-op. +** +** Passing 0 to this function disables blocking locks altogether. Passing +** -1 to this function requests that the VFS blocks for a long time - +** indefinitely if possible. The results of passing any other negative value +** are undefined. +** +** Internally, each SQLite database handle store two timeout values - the +** busy-timeout (used for rollback mode databases, or if the VFS does not +** support blocking locks) and the setlk-timeout (used for blocking locks +** on wal-mode databases). The sqlite3_busy_timeout() method sets both +** values, this function sets only the setlk-timeout value. Therefore, +** to configure separate busy-timeout and setlk-timeout values for a single +** database handle, call sqlite3_busy_timeout() followed by this function. +** +** Whenever the number of connections to a wal mode database falls from +** 1 to 0, the last connection takes an exclusive lock on the database, +** then checkpoints and deletes the wal file. While it is doing this, any +** new connection that tries to read from the database fails with an +** SQLITE_BUSY error. Or, if the SQLITE_SETLK_BLOCK_ON_CONNECT flag is +** passed to this API, the new connection blocks until the exclusive lock +** has been released. +*/ +SQLITE_API int sqlite3_setlk_timeout(sqlite3*, int ms, int flags); + +/* +** CAPI3REF: Flags for sqlite3_setlk_timeout() +*/ +#define SQLITE_SETLK_BLOCK_ON_CONNECT 0x01 + /* ** CAPI3REF: Convenience Routines For Running Queries ** METHOD: sqlite3 @@ -4332,7 +4399,7 @@ SQLITE_API sqlite3_file *sqlite3_database_file_object(const char*); ** ** The sqlite3_create_filename(D,J,W,N,P) allocates memory to hold a version of ** database filename D with corresponding journal file J and WAL file W and -** with N URI parameters key/values pairs in the array P. The result from +** an array P of N URI Key/Value pairs. The result from ** sqlite3_create_filename(D,J,W,N,P) is a pointer to a database filename that ** is safe to pass to routines like: **
      @@ -5013,7 +5080,7 @@ typedef struct sqlite3_context sqlite3_context; ** METHOD: sqlite3_stmt ** ** ^(In the SQL statement text input to [sqlite3_prepare_v2()] and its variants, -** literals may be replaced by a [parameter] that matches one of following +** literals may be replaced by a [parameter] that matches one of the following ** templates: ** **
        @@ -5058,7 +5125,7 @@ typedef struct sqlite3_context sqlite3_context; ** ** [[byte-order determination rules]] ^The byte-order of ** UTF16 input text is determined by the byte-order mark (BOM, U+FEFF) -** found in first character, which is removed, or in the absence of a BOM +** found in the first character, which is removed, or in the absence of a BOM ** the byte order is the native byte order of the host ** machine for sqlite3_bind_text16() or the byte order specified in ** the 6th parameter for sqlite3_bind_text64().)^ @@ -5078,7 +5145,7 @@ typedef struct sqlite3_context sqlite3_context; ** or sqlite3_bind_text16() or sqlite3_bind_text64() then ** that parameter must be the byte offset ** where the NUL terminator would occur assuming the string were NUL -** terminated. If any NUL characters occurs at byte offsets less than +** terminated. If any NUL characters occur at byte offsets less than ** the value of the fourth parameter then the resulting string value will ** contain embedded NULs. The result of expressions involving strings ** with embedded NULs is undefined. @@ -5290,7 +5357,7 @@ SQLITE_API const void *sqlite3_column_name16(sqlite3_stmt*, int N); ** METHOD: sqlite3_stmt ** ** ^These routines provide a means to determine the database, table, and -** table column that is the origin of a particular result column in +** table column that is the origin of a particular result column in a ** [SELECT] statement. ** ^The name of the database or table or column can be returned as ** either a UTF-8 or UTF-16 string. ^The _database_ routines return @@ -5428,7 +5495,7 @@ SQLITE_API const void *sqlite3_column_decltype16(sqlite3_stmt*,int); ** other than [SQLITE_ROW] before any subsequent invocation of ** sqlite3_step(). Failure to reset the prepared statement using ** [sqlite3_reset()] would result in an [SQLITE_MISUSE] return from -** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1], +** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1]), ** sqlite3_step() began ** calling [sqlite3_reset()] automatically in this circumstance rather ** than returning [SQLITE_MISUSE]. This is not considered a compatibility @@ -5859,8 +5926,8 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** ** For best security, the [SQLITE_DIRECTONLY] flag is recommended for ** all application-defined SQL functions that do not need to be -** used inside of triggers, view, CHECK constraints, or other elements of -** the database schema. This flags is especially recommended for SQL +** used inside of triggers, views, CHECK constraints, or other elements of +** the database schema. This flag is especially recommended for SQL ** functions that have side effects or reveal internal application state. ** Without this flag, an attacker might be able to modify the schema of ** a database file to include invocations of the function with parameters @@ -5891,7 +5958,7 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** [user-defined window functions|available here]. ** ** ^(If the final parameter to sqlite3_create_function_v2() or -** sqlite3_create_window_function() is not NULL, then it is destructor for +** sqlite3_create_window_function() is not NULL, then it is the destructor for ** the application data pointer. The destructor is invoked when the function ** is deleted, either by being overloaded or when the database connection ** closes.)^ ^The destructor is also invoked if the call to @@ -6291,7 +6358,7 @@ SQLITE_API unsigned int sqlite3_value_subtype(sqlite3_value*); ** METHOD: sqlite3_value ** ** ^The sqlite3_value_dup(V) interface makes a copy of the [sqlite3_value] -** object D and returns a pointer to that copy. ^The [sqlite3_value] returned +** object V and returns a pointer to that copy. ^The [sqlite3_value] returned ** is a [protected sqlite3_value] object even if the input is not. ** ^The sqlite3_value_dup(V) interface returns NULL if V is NULL or if a ** memory allocation fails. ^If V is a [pointer value], then the result @@ -6329,7 +6396,7 @@ SQLITE_API void sqlite3_value_free(sqlite3_value*); ** allocation error occurs. ** ** ^(The amount of space allocated by sqlite3_aggregate_context(C,N) is -** determined by the N parameter on first successful call. Changing the +** determined by the N parameter on the first successful call. Changing the ** value of N in any subsequent call to sqlite3_aggregate_context() within ** the same aggregate function instance will not resize the memory ** allocation.)^ Within the xFinal callback, it is customary to set @@ -6491,7 +6558,7 @@ SQLITE_API void sqlite3_set_auxdata(sqlite3_context*, int N, void*, void (*)(voi ** ** Security Warning: These interfaces should not be exposed in scripting ** languages or in other circumstances where it might be possible for an -** an attacker to invoke them. Any agent that can invoke these interfaces +** attacker to invoke them. Any agent that can invoke these interfaces ** can probably also take control of the process. ** ** Database connection client data is only available for SQLite @@ -6605,7 +6672,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** pointed to by the 2nd parameter are taken as the application-defined ** function result. If the 3rd parameter is non-negative, then it ** must be the byte offset into the string where the NUL terminator would -** appear if the string where NUL terminated. If any NUL characters occur +** appear if the string were NUL terminated. If any NUL characters occur ** in the string at a byte offset that is less than the value of the 3rd ** parameter, then the resulting string will contain embedded NULs and the ** result of expressions operating on strings with embedded NULs is undefined. @@ -6663,7 +6730,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** string and preferably a string literal. The sqlite3_result_pointer() ** routine is part of the [pointer passing interface] added for SQLite 3.20.0. ** -** If these routines are called from within the different thread +** If these routines are called from within a different thread ** than the one containing the application-defined function that received ** the [sqlite3_context] pointer, the results are undefined. */ @@ -7069,7 +7136,7 @@ SQLITE_API sqlite3 *sqlite3_db_handle(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^The sqlite3_db_name(D,N) interface returns a pointer to the schema name -** for the N-th database on database connection D, or a NULL pointer of N is +** for the N-th database on database connection D, or a NULL pointer if N is ** out of range. An N value of 0 means the main database file. An N of 1 is ** the "temp" schema. Larger values of N correspond to various ATTACH-ed ** databases. @@ -7164,7 +7231,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
        The SQLITE_TXN_READ state means that the database is currently ** in a read transaction. Content has been read from the database file ** but nothing in the database file has changed. The transaction state -** will advanced to SQLITE_TXN_WRITE if any changes occur and there are +** will be advanced to SQLITE_TXN_WRITE if any changes occur and there are ** no other conflicting concurrent write transactions. The transaction ** state will revert to SQLITE_TXN_NONE following a [ROLLBACK] or ** [COMMIT].
        @@ -7173,7 +7240,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
        The SQLITE_TXN_WRITE state means that the database is currently ** in a write transaction. Content has been written to the database file ** but has not yet committed. The transaction state will change to -** to SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT].
        +** SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT]. */ #define SQLITE_TXN_NONE 0 #define SQLITE_TXN_READ 1 @@ -7324,6 +7391,8 @@ SQLITE_API int sqlite3_autovacuum_pages( ** ** ^The second argument is a pointer to the function to invoke when a ** row is updated, inserted or deleted in a rowid table. +** ^The update hook is disabled by invoking sqlite3_update_hook() +** with a NULL pointer as the second parameter. ** ^The first argument to the callback is a copy of the third argument ** to sqlite3_update_hook(). ** ^The second callback argument is one of [SQLITE_INSERT], [SQLITE_DELETE], @@ -7452,7 +7521,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); ** CAPI3REF: Impose A Limit On Heap Size ** ** These interfaces impose limits on the amount of heap memory that will be -** by all database connections within a single process. +** used by all database connections within a single process. ** ** ^The sqlite3_soft_heap_limit64() interface sets and/or queries the ** soft limit on the amount of heap memory that may be allocated by SQLite. @@ -7510,7 +7579,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); **
      )^ ** ** The circumstances under which SQLite will enforce the heap limits may -** changes in future releases of SQLite. +** change in future releases of SQLite. */ SQLITE_API sqlite3_int64 sqlite3_soft_heap_limit64(sqlite3_int64 N); SQLITE_API sqlite3_int64 sqlite3_hard_heap_limit64(sqlite3_int64 N); @@ -7625,8 +7694,8 @@ SQLITE_API int sqlite3_table_column_metadata( ** ^The entry point is zProc. ** ^(zProc may be 0, in which case SQLite will try to come up with an ** entry point name on its own. It first tries "sqlite3_extension_init". -** If that does not work, it constructs a name "sqlite3_X_init" where the -** X is consists of the lower-case equivalent of all ASCII alphabetic +** If that does not work, it constructs a name "sqlite3_X_init" where +** X consists of the lower-case equivalent of all ASCII alphabetic ** characters in the filename from the last "/" to the first following ** "." and omitting any initial "lib".)^ ** ^The sqlite3_load_extension() interface returns @@ -7697,7 +7766,7 @@ SQLITE_API int sqlite3_enable_load_extension(sqlite3 *db, int onoff); ** ^(Even though the function prototype shows that xEntryPoint() takes ** no arguments and returns void, SQLite invokes xEntryPoint() with three ** arguments and expects an integer result as if the signature of the -** entry point where as follows: +** entry point were as follows: ** **
       **    int xEntryPoint(
      @@ -7861,7 +7930,7 @@ struct sqlite3_module {
       ** virtual table and might not be checked again by the byte code.)^ ^(The
       ** aConstraintUsage[].omit flag is an optimization hint. When the omit flag
       ** is left in its default setting of false, the constraint will always be
      -** checked separately in byte code.  If the omit flag is change to true, then
      +** checked separately in byte code.  If the omit flag is changed to true, then
       ** the constraint may or may not be checked in byte code.  In other words,
       ** when the omit flag is true there is no guarantee that the constraint will
       ** not be checked again using byte code.)^
      @@ -7887,7 +7956,7 @@ struct sqlite3_module {
       ** The xBestIndex method may optionally populate the idxFlags field with a
       ** mask of SQLITE_INDEX_SCAN_* flags. One such flag is
       ** [SQLITE_INDEX_SCAN_HEX], which if set causes the [EXPLAIN QUERY PLAN]
      -** output to show the idxNum has hex instead of as decimal.  Another flag is
      +** output to show the idxNum as hex instead of as decimal.  Another flag is
       ** SQLITE_INDEX_SCAN_UNIQUE, which if set indicates that the query plan will
       ** return at most one row.
       **
      @@ -8028,7 +8097,7 @@ struct sqlite3_index_info {
       ** the implementation of the [virtual table module].   ^The fourth
       ** parameter is an arbitrary client data pointer that is passed through
       ** into the [xCreate] and [xConnect] methods of the virtual table module
      -** when a new virtual table is be being created or reinitialized.
      +** when a new virtual table is being created or reinitialized.
       **
       ** ^The sqlite3_create_module_v2() interface has a fifth parameter which
       ** is a pointer to a destructor for the pClientData.  ^SQLite will
      @@ -8193,7 +8262,7 @@ typedef struct sqlite3_blob sqlite3_blob;
       ** in *ppBlob. Otherwise an [error code] is returned and, unless the error
       ** code is SQLITE_MISUSE, *ppBlob is set to NULL.)^ ^This means that, provided
       ** the API is not misused, it is always safe to call [sqlite3_blob_close()]
      -** on *ppBlob after this function it returns.
      +** on *ppBlob after this function returns.
       **
       ** This function fails with SQLITE_ERROR if any of the following are true:
       ** 
        @@ -8313,7 +8382,7 @@ SQLITE_API int sqlite3_blob_close(sqlite3_blob *); ** ** ^Returns the size in bytes of the BLOB accessible via the ** successfully opened [BLOB handle] in its only argument. ^The -** incremental blob I/O routines can only read or overwriting existing +** incremental blob I/O routines can only read or overwrite existing ** blob content; they cannot change the size of a blob. ** ** This routine only works on a [BLOB handle] which has been created @@ -8463,7 +8532,7 @@ SQLITE_API int sqlite3_vfs_unregister(sqlite3_vfs*); ** ^The sqlite3_mutex_alloc() routine allocates a new ** mutex and returns a pointer to it. ^The sqlite3_mutex_alloc() ** routine returns NULL if it is unable to allocate the requested -** mutex. The argument to sqlite3_mutex_alloc() must one of these +** mutex. The argument to sqlite3_mutex_alloc() must be one of these ** integer constants: ** **
          @@ -8696,7 +8765,7 @@ SQLITE_API int sqlite3_mutex_notheld(sqlite3_mutex*); ** CAPI3REF: Retrieve the mutex for a database connection ** METHOD: sqlite3 ** -** ^This interface returns a pointer the [sqlite3_mutex] object that +** ^This interface returns a pointer to the [sqlite3_mutex] object that ** serializes access to the [database connection] given in the argument ** when the [threading mode] is Serialized. ** ^If the [threading mode] is Single-thread or Multi-thread then this @@ -8819,7 +8888,7 @@ SQLITE_API int sqlite3_test_control(int op, ...); ** CAPI3REF: SQL Keyword Checking ** ** These routines provide access to the set of SQL language keywords -** recognized by SQLite. Applications can uses these routines to determine +** recognized by SQLite. Applications can use these routines to determine ** whether or not a specific identifier needs to be escaped (for example, ** by enclosing in double-quotes) so as not to confuse the parser. ** @@ -8987,7 +9056,7 @@ SQLITE_API void sqlite3_str_reset(sqlite3_str*); ** content of the dynamic string under construction in X. The value ** returned by [sqlite3_str_value(X)] is managed by the sqlite3_str object X ** and might be freed or altered by any subsequent method on the same -** [sqlite3_str] object. Applications must not used the pointer returned +** [sqlite3_str] object. Applications must not use the pointer returned by ** [sqlite3_str_value(X)] after any subsequent method call on the same ** object. ^Applications may change the content of the string returned ** by [sqlite3_str_value(X)] as long as they do not write into any bytes @@ -9073,7 +9142,7 @@ SQLITE_API int sqlite3_status64( ** allocation which could not be satisfied by the [SQLITE_CONFIG_PAGECACHE] ** buffer and where forced to overflow to [sqlite3_malloc()]. The ** returned value includes allocations that overflowed because they -** where too large (they were larger than the "sz" parameter to +** were too large (they were larger than the "sz" parameter to ** [SQLITE_CONFIG_PAGECACHE]) and allocations that overflowed because ** no space was left in the page cache.)^ ** @@ -9157,28 +9226,29 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** [[SQLITE_DBSTATUS_LOOKASIDE_HIT]] ^(
          SQLITE_DBSTATUS_LOOKASIDE_HIT
          **
          This parameter returns the number of malloc attempts that were ** satisfied using lookaside memory. Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
          )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE]] ** ^(
          SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE
          -**
          This parameter returns the number malloc attempts that might have +**
          This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to the amount of ** memory requested being larger than the lookaside slot size. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
          )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL]] ** ^(
          SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL
          -**
          This parameter returns the number malloc attempts that might have +**
          This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to all lookaside ** memory already being in use. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
          )^ ** ** [[SQLITE_DBSTATUS_CACHE_USED]] ^(
          SQLITE_DBSTATUS_CACHE_USED
          **
          This parameter returns the approximate number of bytes of heap ** memory used by all pager caches associated with the database connection.)^ ** ^The highwater mark associated with SQLITE_DBSTATUS_CACHE_USED is always 0. +**
          ** ** [[SQLITE_DBSTATUS_CACHE_USED_SHARED]] ** ^(
          SQLITE_DBSTATUS_CACHE_USED_SHARED
          @@ -9187,10 +9257,10 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** memory used by that pager cache is divided evenly between the attached ** connections.)^ In other words, if none of the pager caches associated ** with the database connection are shared, this request returns the same -** value as DBSTATUS_CACHE_USED. Or, if one or more or the pager caches are +** value as DBSTATUS_CACHE_USED. Or, if one or more of the pager caches are ** shared, the value returned by this call will be smaller than that returned ** by DBSTATUS_CACHE_USED. ^The highwater mark associated with -** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. +** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. ** ** [[SQLITE_DBSTATUS_SCHEMA_USED]] ^(
          SQLITE_DBSTATUS_SCHEMA_USED
          **
          This parameter returns the approximate number of bytes of heap @@ -9200,6 +9270,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** schema memory is shared with other database connections due to ** [shared cache mode] being enabled. ** ^The highwater mark associated with SQLITE_DBSTATUS_SCHEMA_USED is always 0. +**
          ** ** [[SQLITE_DBSTATUS_STMT_USED]] ^(
          SQLITE_DBSTATUS_STMT_USED
          **
          This parameter returns the approximate number of bytes of heap @@ -9236,7 +9307,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** been written to disk in the middle of a transaction due to the page ** cache overflowing. Transactions are more efficient if they are written ** to disk all at once. When pages spill mid-transaction, that introduces -** additional overhead. This parameter can be used help identify +** additional overhead. This parameter can be used to help identify ** inefficiencies that can be resolved by increasing the cache size. **
          ** @@ -9307,13 +9378,13 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** [[SQLITE_STMTSTATUS_SORT]]
          SQLITE_STMTSTATUS_SORT
          **
          ^This is the number of sort operations that have occurred. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance through careful use of indices.
          +** improve performance through careful use of indices. ** ** [[SQLITE_STMTSTATUS_AUTOINDEX]]
          SQLITE_STMTSTATUS_AUTOINDEX
          **
          ^This is the number of rows inserted into transient indices that ** were created automatically in order to help joins run faster. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance by adding permanent indices that do not +** improve performance by adding permanent indices that do not ** need to be reinitialized each time the statement is run.
          ** ** [[SQLITE_STMTSTATUS_VM_STEP]]
          SQLITE_STMTSTATUS_VM_STEP
          @@ -9322,19 +9393,19 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** to 2147483647. The number of virtual machine operations can be ** used as a proxy for the total work done by the prepared statement. ** If the number of virtual machine operations exceeds 2147483647 -** then the value returned by this statement status code is undefined. +** then the value returned by this statement status code is undefined. ** ** [[SQLITE_STMTSTATUS_REPREPARE]]
          SQLITE_STMTSTATUS_REPREPARE
          **
          ^This is the number of times that the prepare statement has been ** automatically regenerated due to schema changes or changes to -** [bound parameters] that might affect the query plan. +** [bound parameters] that might affect the query plan.
          ** ** [[SQLITE_STMTSTATUS_RUN]]
          SQLITE_STMTSTATUS_RUN
          **
          ^This is the number of times that the prepared statement has ** been run. A single "run" for the purposes of this counter is one ** or more calls to [sqlite3_step()] followed by a call to [sqlite3_reset()]. ** The counter is incremented on the first [sqlite3_step()] call of each -** cycle. +** cycle.
          ** ** [[SQLITE_STMTSTATUS_FILTER_MISS]] ** [[SQLITE_STMTSTATUS_FILTER HIT]] @@ -9344,7 +9415,7 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** step was bypassed because a Bloom filter returned not-found. The ** corresponding SQLITE_STMTSTATUS_FILTER_MISS value is the number of ** times that the Bloom filter returned a find, and thus the join step -** had to be processed as normal. +** had to be processed as normal. ** ** [[SQLITE_STMTSTATUS_MEMUSED]]
          SQLITE_STMTSTATUS_MEMUSED
          **
          ^This is the approximate number of bytes of heap memory @@ -9449,9 +9520,9 @@ struct sqlite3_pcache_page { ** SQLite will typically create one cache instance for each open database file, ** though this is not guaranteed. ^The ** first parameter, szPage, is the size in bytes of the pages that must -** be allocated by the cache. ^szPage will always a power of two. ^The +** be allocated by the cache. ^szPage will always be a power of two. ^The ** second parameter szExtra is a number of bytes of extra storage -** associated with each page cache entry. ^The szExtra parameter will +** associated with each page cache entry. ^The szExtra parameter will be ** a number less than 250. SQLite will use the ** extra szExtra bytes on each page to store metadata about the underlying ** database page on disk. The value passed into szExtra depends @@ -9459,17 +9530,17 @@ struct sqlite3_pcache_page { ** ^The third argument to xCreate(), bPurgeable, is true if the cache being ** created will be used to cache database pages of a file stored on disk, or ** false if it is used for an in-memory database. The cache implementation -** does not have to do anything special based with the value of bPurgeable; +** does not have to do anything special based upon the value of bPurgeable; ** it is purely advisory. ^On a cache where bPurgeable is false, SQLite will ** never invoke xUnpin() except to deliberately delete a page. ** ^In other words, calls to xUnpin() on a cache with bPurgeable set to ** false will always have the "discard" flag set to true. -** ^Hence, a cache created with bPurgeable false will +** ^Hence, a cache created with bPurgeable set to false will ** never contain any unpinned pages. ** ** [[the xCachesize() page cache method]] ** ^(The xCachesize() method may be called at any time by SQLite to set the -** suggested maximum cache-size (number of pages stored by) the cache +** suggested maximum cache-size (number of pages stored) for the cache ** instance passed as the first argument. This is the value configured using ** the SQLite "[PRAGMA cache_size]" command.)^ As with the bPurgeable ** parameter, the implementation is not required to do anything with this @@ -9496,12 +9567,12 @@ struct sqlite3_pcache_page { ** implementation must return a pointer to the page buffer with its content ** intact. If the requested page is not already in the cache, then the ** cache implementation should use the value of the createFlag -** parameter to help it determined what action to take: +** parameter to help it determine what action to take: ** ** **
          createFlag Behavior when page is not already in cache **
          0 Do not allocate a new page. Return NULL. -**
          1 Allocate a new page if it easy and convenient to do so. +**
          1 Allocate a new page if it is easy and convenient to do so. ** Otherwise return NULL. **
          2 Make every effort to allocate a new page. Only return ** NULL if allocating a new page is effectively impossible. @@ -9518,7 +9589,7 @@ struct sqlite3_pcache_page { ** as its second argument. If the third parameter, discard, is non-zero, ** then the page must be evicted from the cache. ** ^If the discard parameter is -** zero, then the page may be discarded or retained at the discretion of +** zero, then the page may be discarded or retained at the discretion of the ** page cache implementation. ^The page cache implementation ** may choose to evict unpinned pages at any time. ** @@ -9536,7 +9607,7 @@ struct sqlite3_pcache_page { ** When SQLite calls the xTruncate() method, the cache must discard all ** existing cache entries with page numbers (keys) greater than or equal ** to the value of the iLimit parameter passed to xTruncate(). If any -** of these pages are pinned, they are implicitly unpinned, meaning that +** of these pages are pinned, they become implicitly unpinned, meaning that ** they can be safely discarded. ** ** [[the xDestroy() page cache method]] @@ -9716,7 +9787,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** external process or via a database connection other than the one being ** used by the backup operation, then the backup will be automatically ** restarted by the next call to sqlite3_backup_step(). ^If the source -** database is modified by the using the same database connection as is used +** database is modified by using the same database connection as is used ** by the backup operation, then the backup database is automatically ** updated at the same time. ** @@ -9733,7 +9804,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** and may not be used following a call to sqlite3_backup_finish(). ** ** ^The value returned by sqlite3_backup_finish is [SQLITE_OK] if no -** sqlite3_backup_step() errors occurred, regardless or whether or not +** sqlite3_backup_step() errors occurred, regardless of whether or not ** sqlite3_backup_step() completed. ** ^If an out-of-memory condition or IO error occurred during any prior ** sqlite3_backup_step() call on the same [sqlite3_backup] object, then @@ -9835,7 +9906,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** application receives an SQLITE_LOCKED error, it may call the ** sqlite3_unlock_notify() method with the blocked connection handle as ** the first argument to register for a callback that will be invoked -** when the blocking connections current transaction is concluded. ^The +** when the blocking connection's current transaction is concluded. ^The ** callback is invoked from within the [sqlite3_step] or [sqlite3_close] ** call that concludes the blocking connection's transaction. ** @@ -9855,7 +9926,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** blocked connection already has a registered unlock-notify callback, ** then the new callback replaces the old.)^ ^If sqlite3_unlock_notify() is ** called with a NULL pointer as its second argument, then any existing -** unlock-notify callback is canceled. ^The blocked connections +** unlock-notify callback is canceled. ^The blocked connection's ** unlock-notify callback may also be canceled by closing the blocked ** connection using [sqlite3_close()]. ** @@ -10253,7 +10324,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** support constraints. In this configuration (which is the default) if ** a call to the [xUpdate] method returns [SQLITE_CONSTRAINT], then the entire ** statement is rolled back as if [ON CONFLICT | OR ABORT] had been -** specified as part of the users SQL statement, regardless of the actual +** specified as part of the user's SQL statement, regardless of the actual ** ON CONFLICT mode specified. ** ** If X is non-zero, then the virtual table implementation guarantees @@ -10287,7 +10358,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** [[SQLITE_VTAB_INNOCUOUS]]
          SQLITE_VTAB_INNOCUOUS
          **
          Calls of the form ** [sqlite3_vtab_config](db,SQLITE_VTAB_INNOCUOUS) from within the -** the [xConnect] or [xCreate] methods of a [virtual table] implementation +** [xConnect] or [xCreate] methods of a [virtual table] implementation ** identify that virtual table as being safe to use from within triggers ** and views. Conceptually, the SQLITE_VTAB_INNOCUOUS tag means that the ** virtual table can do no serious harm even if it is controlled by a @@ -10455,7 +10526,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); **
          ** ** ^For the purposes of comparing virtual table output values to see if the -** values are same value for sorting purposes, two NULL values are considered +** values are the same value for sorting purposes, two NULL values are considered ** to be the same. In other words, the comparison operator is "IS" ** (or "IS NOT DISTINCT FROM") and not "==". ** @@ -10465,7 +10536,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); ** ** ^A virtual table implementation is always free to return rows in any order ** it wants, as long as the "orderByConsumed" flag is not set. ^When the -** the "orderByConsumed" flag is unset, the query planner will add extra +** "orderByConsumed" flag is unset, the query planner will add extra ** [bytecode] to ensure that the final results returned by the SQL query are ** ordered correctly. The use of the "orderByConsumed" flag and the ** sqlite3_vtab_distinct() interface is merely an optimization. ^Careful @@ -10562,7 +10633,7 @@ SQLITE_API int sqlite3_vtab_in(sqlite3_index_info*, int iCons, int bHandle); ** sqlite3_vtab_in_next(X,P) should be one of the parameters to the ** xFilter method which invokes these routines, and specifically ** a parameter that was previously selected for all-at-once IN constraint -** processing use the [sqlite3_vtab_in()] interface in the +** processing using the [sqlite3_vtab_in()] interface in the ** [xBestIndex|xBestIndex method]. ^(If the X parameter is not ** an xFilter argument that was selected for all-at-once IN constraint ** processing, then these routines return [SQLITE_ERROR].)^ @@ -10617,7 +10688,7 @@ SQLITE_API int sqlite3_vtab_in_next(sqlite3_value *pVal, sqlite3_value **ppOut); ** and only if *V is set to a value. ^The sqlite3_vtab_rhs_value(P,J,V) ** inteface returns SQLITE_NOTFOUND if the right-hand side of the J-th ** constraint is not available. ^The sqlite3_vtab_rhs_value() interface -** can return an result code other than SQLITE_OK or SQLITE_NOTFOUND if +** can return a result code other than SQLITE_OK or SQLITE_NOTFOUND if ** something goes wrong. ** ** The sqlite3_vtab_rhs_value() interface is usually only successful if @@ -10645,8 +10716,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** KEYWORDS: {conflict resolution mode} ** ** These constants are returned by [sqlite3_vtab_on_conflict()] to -** inform a [virtual table] implementation what the [ON CONFLICT] mode -** is for the SQL statement being evaluated. +** inform a [virtual table] implementation of the [ON CONFLICT] mode +** for the SQL statement being evaluated. ** ** Note that the [SQLITE_IGNORE] constant is also used as a potential ** return value from the [sqlite3_set_authorizer()] callback and that @@ -10686,39 +10757,39 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** [[SQLITE_SCANSTAT_EST]]
          SQLITE_SCANSTAT_EST
          **
          ^The "double" variable pointed to by the V parameter will be set to the ** query planner's estimate for the average number of rows output from each -** iteration of the X-th loop. If the query planner's estimates was accurate, +** iteration of the X-th loop. If the query planner's estimate was accurate, ** then this value will approximate the quotient NVISIT/NLOOP and the ** product of this value for all prior loops with the same SELECTID will -** be the NLOOP value for the current loop. +** be the NLOOP value for the current loop.
          ** ** [[SQLITE_SCANSTAT_NAME]]
          SQLITE_SCANSTAT_NAME
          **
          ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the name of the index or table -** used for the X-th loop. +** used for the X-th loop.
          ** ** [[SQLITE_SCANSTAT_EXPLAIN]]
          SQLITE_SCANSTAT_EXPLAIN
          **
          ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the [EXPLAIN QUERY PLAN] -** description for the X-th loop. +** description for the X-th loop.
          ** ** [[SQLITE_SCANSTAT_SELECTID]]
          SQLITE_SCANSTAT_SELECTID
          **
          ^The "int" variable pointed to by the V parameter will be set to the ** id for the X-th query plan element. The id value is unique within the ** statement. The select-id is the same value as is output in the first -** column of an [EXPLAIN QUERY PLAN] query. +** column of an [EXPLAIN QUERY PLAN] query.
          ** ** [[SQLITE_SCANSTAT_PARENTID]]
          SQLITE_SCANSTAT_PARENTID
          **
          The "int" variable pointed to by the V parameter will be set to the -** the id of the parent of the current query element, if applicable, or +** id of the parent of the current query element, if applicable, or ** to zero if the query element has no parent. This is the same value as -** returned in the second column of an [EXPLAIN QUERY PLAN] query. +** returned in the second column of an [EXPLAIN QUERY PLAN] query.
          ** ** [[SQLITE_SCANSTAT_NCYCLE]]
          SQLITE_SCANSTAT_NCYCLE
          **
          The sqlite3_int64 output value is set to the number of cycles, ** according to the processor time-stamp counter, that elapsed while the ** query element was being processed. This value is not available for ** all query elements - if it is unavailable the output variable is -** set to -1. +** set to -1.
          ** */ #define SQLITE_SCANSTAT_NLOOP 0 @@ -10759,8 +10830,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** sqlite3_stmt_scanstatus_v2() with a zeroed flags parameter. ** ** Parameter "idx" identifies the specific query element to retrieve statistics -** for. Query elements are numbered starting from zero. A value of -1 may be -** to query for statistics regarding the entire query. ^If idx is out of range +** for. Query elements are numbered starting from zero. A value of -1 may +** retrieve statistics for the entire query. ^If idx is out of range ** - less than -1 or greater than or equal to the total number of query ** elements used to implement the statement - a non-zero value is returned and ** the variable that pOut points to is unchanged. @@ -10803,7 +10874,7 @@ SQLITE_API void sqlite3_stmt_scanstatus_reset(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^If a write-transaction is open on [database connection] D when the -** [sqlite3_db_cacheflush(D)] interface invoked, any dirty +** [sqlite3_db_cacheflush(D)] interface is invoked, any dirty ** pages in the pager-cache that are not currently in use are written out ** to disk. A dirty page may be in use if a database cursor created by an ** active SQL statement is reading from it, or if it is page 1 of a database @@ -10917,8 +10988,8 @@ SQLITE_API int sqlite3_db_cacheflush(sqlite3*); ** triggers; and so forth. ** ** When the [sqlite3_blob_write()] API is used to update a blob column, -** the pre-update hook is invoked with SQLITE_DELETE. This is because the -** in this case the new values are not available. In this case, when a +** the pre-update hook is invoked with SQLITE_DELETE, because +** the new values are not yet available. In this case, when a ** callback made with op==SQLITE_DELETE is actually a write using the ** sqlite3_blob_write() API, the [sqlite3_preupdate_blobwrite()] returns ** the index of the column being written. In other cases, where the @@ -11171,7 +11242,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** For an ordinary on-disk database file, the serialization is just a ** copy of the disk file. For an in-memory database or a "TEMP" database, ** the serialization is the same sequence of bytes which would be written -** to disk if that database where backed up to disk. +** to disk if that database were backed up to disk. ** ** The usual case is that sqlite3_serialize() copies the serialization of ** the database into memory obtained from [sqlite3_malloc64()] and returns @@ -11180,7 +11251,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** contains the SQLITE_SERIALIZE_NOCOPY bit, then no memory allocations ** are made, and the sqlite3_serialize() function will return a pointer ** to the contiguous memory representation of the database that SQLite -** is currently using for that database, or NULL if the no such contiguous +** is currently using for that database, or NULL if no such contiguous ** memory representation of the database exists. A contiguous memory ** representation of the database will usually only exist if there has ** been a prior call to [sqlite3_deserialize(D,S,...)] with the same @@ -11251,7 +11322,7 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** -** It is not possible to deserialized into the TEMP database. If the +** It is not possible to deserialize into the TEMP database. If the ** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the ** function returns SQLITE_ERROR. ** @@ -11273,7 +11344,7 @@ SQLITE_API int sqlite3_deserialize( sqlite3 *db, /* The database connection */ const char *zSchema, /* Which DB to reopen with the deserialization */ unsigned char *pData, /* The serialized database content */ - sqlite3_int64 szDb, /* Number bytes in the deserialization */ + sqlite3_int64 szDb, /* Number of bytes in the deserialization */ sqlite3_int64 szBuf, /* Total size of buffer pData[] */ unsigned mFlags /* Zero or more SQLITE_DESERIALIZE_* flags */ ); @@ -11281,7 +11352,7 @@ SQLITE_API int sqlite3_deserialize( /* ** CAPI3REF: Flags for sqlite3_deserialize() ** -** The following are allowed values for 6th argument (the F argument) to +** The following are allowed values for the 6th argument (the F argument) to ** the [sqlite3_deserialize(D,S,P,N,M,F)] interface. ** ** The SQLITE_DESERIALIZE_FREEONCLOSE means that the database serialization @@ -11806,9 +11877,10 @@ SQLITE_API void sqlite3session_table_filter( ** is inserted while a session object is enabled, then later deleted while ** the same session object is disabled, no INSERT record will appear in the ** changeset, even though the delete took place while the session was disabled. -** Or, if one field of a row is updated while a session is disabled, and -** another field of the same row is updated while the session is enabled, the -** resulting changeset will contain an UPDATE change that updates both fields. +** Or, if one field of a row is updated while a session is enabled, and +** then another field of the same row is updated while the session is disabled, +** the resulting changeset will contain an UPDATE change that updates both +** fields. */ SQLITE_API int sqlite3session_changeset( sqlite3_session *pSession, /* Session object */ @@ -11880,8 +11952,9 @@ SQLITE_API sqlite3_int64 sqlite3session_changeset_size(sqlite3_session *pSession ** database zFrom the contents of the two compatible tables would be ** identical. ** -** It an error if database zFrom does not exist or does not contain the -** required compatible table. +** Unless the call to this function is a no-op as described above, it is an +** error if database zFrom does not exist or does not contain the required +** compatible table. ** ** If the operation is successful, SQLITE_OK is returned. Otherwise, an SQLite ** error code. In this case, if argument pzErrMsg is not NULL, *pzErrMsg @@ -12016,7 +12089,7 @@ SQLITE_API int sqlite3changeset_start_v2( ** The following flags may passed via the 4th parameter to ** [sqlite3changeset_start_v2] and [sqlite3changeset_start_v2_strm]: ** -**
          SQLITE_CHANGESETAPPLY_INVERT
          +**
          SQLITE_CHANGESETSTART_INVERT
          ** Invert the changeset while iterating through it. This is equivalent to ** inverting a changeset using sqlite3changeset_invert() before applying it. ** It is an error to specify this flag with a patchset. @@ -12331,19 +12404,6 @@ SQLITE_API int sqlite3changeset_concat( void **ppOut /* OUT: Buffer containing output changeset */ ); - -/* -** CAPI3REF: Upgrade the Schema of a Changeset/Patchset -*/ -SQLITE_API int sqlite3changeset_upgrade( - sqlite3 *db, - const char *zDb, - int nIn, const void *pIn, /* Input changeset */ - int *pnOut, void **ppOut /* OUT: Inverse of input */ -); - - - /* ** CAPI3REF: Changegroup Handle ** @@ -14091,14 +14151,22 @@ struct fts5_api { ** * Terms in the GROUP BY or ORDER BY clauses of a SELECT statement. ** * Terms in the VALUES clause of an INSERT statement ** -** The hard upper limit here is 32676. Most database people will +** The hard upper limit here is 32767. Most database people will ** tell you that in a well-normalized database, you usually should ** not have more than a dozen or so columns in any table. And if ** that is the case, there is no point in having more than a few ** dozen values in any of the other situations described above. +** +** An index can only have SQLITE_MAX_COLUMN columns from the user +** point of view, but the underlying b-tree that implements the index +** might have up to twice as many columns in a WITHOUT ROWID table, +** since must also store the primary key at the end. Hence the +** column count for Index is u16 instead of i16. */ -#ifndef SQLITE_MAX_COLUMN +#if !defined(SQLITE_MAX_COLUMN) # define SQLITE_MAX_COLUMN 2000 +#elif SQLITE_MAX_COLUMN>32767 +# error SQLITE_MAX_COLUMN may not exceed 32767 #endif /* @@ -14750,6 +14818,7 @@ struct HashElem { HashElem *next, *prev; /* Next and previous elements in the table */ void *data; /* Data associated with this element */ const char *pKey; /* Key associated with this element */ + unsigned int h; /* hash for pKey */ }; /* @@ -15110,7 +15179,17 @@ SQLITE_PRIVATE void sqlite3HashClear(Hash*); ** ourselves. */ #ifndef offsetof -#define offsetof(STRUCTURE,FIELD) ((int)((char*)&((STRUCTURE*)0)->FIELD)) +#define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) +#endif + +/* +** Work around C99 "flex-array" syntax for pre-C99 compilers, so as +** to avoid complaints from -fsanitize=strict-bounds. +*/ +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) +# define FLEXARRAY +#else +# define FLEXARRAY 1 #endif /* @@ -15188,6 +15267,11 @@ typedef INT16_TYPE i16; /* 2-byte signed integer */ typedef UINT8_TYPE u8; /* 1-byte unsigned integer */ typedef INT8_TYPE i8; /* 1-byte signed integer */ +/* A bitfield type for use inside of structures. Always follow with :N where +** N is the number of bits. +*/ +typedef unsigned bft; /* Bit Field Type */ + /* ** SQLITE_MAX_U32 is a u64 constant that is the maximum u64 value ** that can be stored in a u32 without loss of data. The value @@ -15356,6 +15440,14 @@ typedef INT16_TYPE LogEst; #define LARGEST_UINT64 (0xffffffff|(((u64)0xffffffff)<<32)) #define SMALLEST_INT64 (((i64)-1) - LARGEST_INT64) +/* +** Macro SMXV(n) return the maximum value that can be held in variable n, +** assuming n is a signed integer type. UMXV(n) is similar for unsigned +** integer types. +*/ +#define SMXV(n) ((((i64)1)<<(sizeof(n)*8-1))-1) +#define UMXV(n) ((((i64)1)<<(sizeof(n)*8))-1) + /* ** Round up a number to the next larger multiple of 8. This is used ** to force 8-byte alignment on 64-bit architectures. @@ -17332,8 +17424,8 @@ SQLITE_PRIVATE int sqlite3NotPureFunc(sqlite3_context*); SQLITE_PRIVATE int sqlite3VdbeBytecodeVtabInit(sqlite3*); #endif -/* Use SQLITE_ENABLE_COMMENTS to enable generation of extra comments on -** each VDBE opcode. +/* Use SQLITE_ENABLE_EXPLAIN_COMMENTS to enable generation of extra +** comments on each VDBE opcode. ** ** Use the SQLITE_ENABLE_MODULE_COMMENTS macro to see some extra no-op ** comments in VDBE programs that show key decision points in the code @@ -18056,6 +18148,10 @@ struct sqlite3 { Savepoint *pSavepoint; /* List of active savepoints */ int nAnalysisLimit; /* Number of index rows to ANALYZE */ int busyTimeout; /* Busy handler timeout, in msec */ +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + int setlkTimeout; /* Blocking lock timeout, in msec. -1 -> inf. */ + int setlkFlags; /* Flags passed to setlk_timeout() */ +#endif int nSavepoint; /* Number of non-transaction savepoints */ int nStatement; /* Number of nested statement-transactions */ i64 nDeferredCons; /* Net deferred constraints this transaction. */ @@ -18610,6 +18706,7 @@ struct CollSeq { #define SQLITE_AFF_INTEGER 0x44 /* 'D' */ #define SQLITE_AFF_REAL 0x45 /* 'E' */ #define SQLITE_AFF_FLEXNUM 0x46 /* 'F' */ +#define SQLITE_AFF_DEFER 0x58 /* 'X' - defer computation until later */ #define sqlite3IsNumericAffinity(X) ((X)>=SQLITE_AFF_NUMERIC) @@ -18734,6 +18831,7 @@ struct Table { } u; Trigger *pTrigger; /* List of triggers on this object */ Schema *pSchema; /* Schema that contains this table */ + u8 aHx[16]; /* Column aHt[K%sizeof(aHt)] might have hash K */ }; /* @@ -18867,9 +18965,13 @@ struct FKey { struct sColMap { /* Mapping of columns in pFrom to columns in zTo */ int iFrom; /* Index of column in pFrom */ char *zCol; /* Name of column in zTo. If NULL use PRIMARY KEY */ - } aCol[1]; /* One entry for each of nCol columns */ + } aCol[FLEXARRAY]; /* One entry for each of nCol columns */ }; +/* The size (in bytes) of an FKey object holding N columns. The answer +** does NOT include space to hold the zTo name. */ +#define SZ_FKEY(N) (offsetof(FKey,aCol)+(N)*sizeof(struct sColMap)) + /* ** SQLite supports many different ways to resolve a constraint ** error. ROLLBACK processing means that a constraint violation @@ -18931,9 +19033,12 @@ struct KeyInfo { u16 nAllField; /* Total columns, including key plus others */ sqlite3 *db; /* The database connection */ u8 *aSortFlags; /* Sort order for each column. */ - CollSeq *aColl[1]; /* Collating sequence for each term of the key */ + CollSeq *aColl[FLEXARRAY]; /* Collating sequence for each term of the key */ }; +/* The size (in bytes) of a KeyInfo object with up to N fields */ +#define SZ_KEYINFO(N) (offsetof(KeyInfo,aColl) + (N)*sizeof(CollSeq*)) + /* ** Allowed bit values for entries in the KeyInfo.aSortFlags[] array. */ @@ -19053,7 +19158,7 @@ struct Index { Pgno tnum; /* DB Page containing root of this index */ LogEst szIdxRow; /* Estimated average row size in bytes */ u16 nKeyCol; /* Number of columns forming the key */ - u16 nColumn; /* Number of columns stored in the index */ + u16 nColumn; /* Nr columns in btree. Can be 2*Table.nCol */ u8 onError; /* OE_Abort, OE_Ignore, OE_Replace, or OE_None */ unsigned idxType:2; /* 0:Normal 1:UNIQUE, 2:PRIMARY KEY, 3:IPK */ unsigned bUnordered:1; /* Use this index for == or IN queries only */ @@ -19062,7 +19167,6 @@ struct Index { unsigned isCovering:1; /* True if this is a covering index */ unsigned noSkipScan:1; /* Do not try to use skip-scan if true */ unsigned hasStat1:1; /* aiRowLogEst values come from sqlite_stat1 */ - unsigned bLowQual:1; /* sqlite_stat1 says this is a low-quality index */ unsigned bNoQuery:1; /* Do not use this index to optimize queries */ unsigned bAscKeyBug:1; /* True if the bba7b69f9849b5bf bug applies */ unsigned bHasVCol:1; /* Index references one or more VIRTUAL columns */ @@ -19152,7 +19256,7 @@ struct AggInfo { ** from source tables rather than from accumulators */ u8 useSortingIdx; /* In direct mode, reference the sorting index rather ** than the source table */ - u16 nSortingColumn; /* Number of columns in the sorting index */ + u32 nSortingColumn; /* Number of columns in the sorting index */ int sortingIdx; /* Cursor number of the sorting index */ int sortingIdxPTab; /* Cursor number of pseudo-table */ int iFirstReg; /* First register in range for aCol[] and aFunc[] */ @@ -19161,8 +19265,8 @@ struct AggInfo { Table *pTab; /* Source table */ Expr *pCExpr; /* The original expression */ int iTable; /* Cursor number of the source table */ - i16 iColumn; /* Column number within the source table */ - i16 iSorterColumn; /* Column number in the sorting index */ + int iColumn; /* Column number within the source table */ + int iSorterColumn; /* Column number in the sorting index */ } *aCol; int nColumn; /* Number of used entries in aCol[] */ int nAccumulator; /* Number of columns that show through to the output. @@ -19391,10 +19495,10 @@ struct Expr { /* Macros can be used to test, set, or clear bits in the ** Expr.flags field. */ -#define ExprHasProperty(E,P) (((E)->flags&(P))!=0) -#define ExprHasAllProperty(E,P) (((E)->flags&(P))==(P)) -#define ExprSetProperty(E,P) (E)->flags|=(P) -#define ExprClearProperty(E,P) (E)->flags&=~(P) +#define ExprHasProperty(E,P) (((E)->flags&(u32)(P))!=0) +#define ExprHasAllProperty(E,P) (((E)->flags&(u32)(P))==(u32)(P)) +#define ExprSetProperty(E,P) (E)->flags|=(u32)(P) +#define ExprClearProperty(E,P) (E)->flags&=~(u32)(P) #define ExprAlwaysTrue(E) (((E)->flags&(EP_OuterON|EP_IsTrue))==EP_IsTrue) #define ExprAlwaysFalse(E) (((E)->flags&(EP_OuterON|EP_IsFalse))==EP_IsFalse) #define ExprIsFullSize(E) (((E)->flags&(EP_Reduced|EP_TokenOnly))==0) @@ -19506,9 +19610,14 @@ struct ExprList { int iConstExprReg; /* Register in which Expr value is cached. Used only ** by Parse.pConstExpr */ } u; - } a[1]; /* One slot for each expression in the list */ + } a[FLEXARRAY]; /* One slot for each expression in the list */ }; +/* The size (in bytes) of an ExprList object that is big enough to hold +** as many as N expressions. */ +#define SZ_EXPRLIST(N) \ + (offsetof(ExprList,a) + (N)*sizeof(struct ExprList_item)) + /* ** Allowed values for Expr.a.eEName */ @@ -19536,9 +19645,12 @@ struct IdList { int nId; /* Number of identifiers on the list */ struct IdList_item { char *zName; /* Name of the identifier */ - } a[1]; + } a[FLEXARRAY]; }; +/* The size (in bytes) of an IdList object that can hold up to N IDs. */ +#define SZ_IDLIST(N) (offsetof(IdList,a)+(N)*sizeof(struct IdList_item)) + /* ** Allowed values for IdList.eType, which determines which value of the a.u4 ** is valid. @@ -19658,11 +19770,19 @@ struct OnOrUsing { ** */ struct SrcList { - int nSrc; /* Number of tables or subqueries in the FROM clause */ - u32 nAlloc; /* Number of entries allocated in a[] below */ - SrcItem a[1]; /* One entry for each identifier on the list */ + int nSrc; /* Number of tables or subqueries in the FROM clause */ + u32 nAlloc; /* Number of entries allocated in a[] below */ + SrcItem a[FLEXARRAY]; /* One entry for each identifier on the list */ }; +/* Size (in bytes) of a SrcList object that can hold as many as N +** SrcItem objects. */ +#define SZ_SRCLIST(N) (offsetof(SrcList,a)+(N)*sizeof(SrcItem)) + +/* Size (in bytes( of a SrcList object that holds 1 SrcItem. This is a +** special case of SZ_SRCITEM(1) that comes up often. */ +#define SZ_SRCLIST_1 (offsetof(SrcList,a)+sizeof(SrcItem)) + /* ** Permitted values of the SrcList.a.jointype field */ @@ -20131,25 +20251,32 @@ struct Parse { char *zErrMsg; /* An error message */ Vdbe *pVdbe; /* An engine for executing database bytecode */ int rc; /* Return code from execution */ - u8 colNamesSet; /* TRUE after OP_ColumnName has been issued to pVdbe */ - u8 checkSchema; /* Causes schema cookie check after an error */ + LogEst nQueryLoop; /* Est number of iterations of a query (10*log2(N)) */ u8 nested; /* Number of nested calls to the parser/code generator */ u8 nTempReg; /* Number of temporary registers in aTempReg[] */ u8 isMultiWrite; /* True if statement may modify/insert multiple rows */ u8 mayAbort; /* True if statement may throw an ABORT exception */ u8 hasCompound; /* Need to invoke convertCompoundSelectToSubquery() */ - u8 okConstFactor; /* OK to factor out constants */ u8 disableLookaside; /* Number of times lookaside has been disabled */ u8 prepFlags; /* SQLITE_PREPARE_* flags */ u8 withinRJSubrtn; /* Nesting level for RIGHT JOIN body subroutines */ - u8 bHasWith; /* True if statement contains WITH */ u8 mSubrtnSig; /* mini Bloom filter on available SubrtnSig.selId */ + u8 eTriggerOp; /* TK_UPDATE, TK_INSERT or TK_DELETE */ + u8 bReturning; /* Coding a RETURNING trigger */ + u8 eOrconf; /* Default ON CONFLICT policy for trigger steps */ + u8 disableTriggers; /* True to disable triggers */ #if defined(SQLITE_DEBUG) || defined(SQLITE_COVERAGE_TEST) u8 earlyCleanup; /* OOM inside sqlite3ParserAddCleanup() */ #endif #ifdef SQLITE_DEBUG u8 ifNotExists; /* Might be true if IF NOT EXISTS. Assert()s only */ + u8 isCreate; /* CREATE TABLE, INDEX, or VIEW (but not TRIGGER) + ** and ALTER TABLE ADD COLUMN. */ #endif + bft colNamesSet :1; /* TRUE after OP_ColumnName has been issued to pVdbe */ + bft bHasWith :1; /* True if statement contains WITH */ + bft okConstFactor :1; /* OK to factor out constants */ + bft checkSchema :1; /* Causes schema cookie check after an error */ int nRangeReg; /* Size of the temporary register block */ int iRangeReg; /* First register in temporary register block */ int nErr; /* Number of errors seen */ @@ -20164,12 +20291,9 @@ struct Parse { ExprList *pConstExpr;/* Constant expressions */ IndexedExpr *pIdxEpr;/* List of expressions used by active indexes */ IndexedExpr *pIdxPartExpr; /* Exprs constrained by index WHERE clauses */ - Token constraintName;/* Name of the constraint currently being parsed */ yDbMask writeMask; /* Start a write transaction on these databases */ yDbMask cookieMask; /* Bitmask of schema verified databases */ - int regRowid; /* Register holding rowid of CREATE TABLE entry */ - int regRoot; /* Register holding root page number for new objects */ - int nMaxArg; /* Max args passed to user function by sub-program */ + int nMaxArg; /* Max args to xUpdate and xFilter vtab methods */ int nSelect; /* Number of SELECT stmts. Counter for Select.selId */ #ifndef SQLITE_OMIT_PROGRESS_CALLBACK u32 nProgressSteps; /* xProgress steps taken during sqlite3_prepare() */ @@ -20183,17 +20307,6 @@ struct Parse { Table *pTriggerTab; /* Table triggers are being coded for */ TriggerPrg *pTriggerPrg; /* Linked list of coded triggers */ ParseCleanup *pCleanup; /* List of cleanup operations to run after parse */ - union { - int addrCrTab; /* Address of OP_CreateBtree on CREATE TABLE */ - Returning *pReturning; /* The RETURNING clause */ - } u1; - u32 oldmask; /* Mask of old.* columns referenced */ - u32 newmask; /* Mask of new.* columns referenced */ - LogEst nQueryLoop; /* Est number of iterations of a query (10*log2(N)) */ - u8 eTriggerOp; /* TK_UPDATE, TK_INSERT or TK_DELETE */ - u8 bReturning; /* Coding a RETURNING trigger */ - u8 eOrconf; /* Default ON CONFLICT policy for trigger steps */ - u8 disableTriggers; /* True to disable triggers */ /************************************************************************** ** Fields above must be initialized to zero. The fields that follow, @@ -20205,6 +20318,19 @@ struct Parse { int aTempReg[8]; /* Holding area for temporary registers */ Parse *pOuterParse; /* Outer Parse object when nested */ Token sNameToken; /* Token with unqualified schema object name */ + u32 oldmask; /* Mask of old.* columns referenced */ + u32 newmask; /* Mask of new.* columns referenced */ + union { + struct { /* These fields available when isCreate is true */ + int addrCrTab; /* Address of OP_CreateBtree on CREATE TABLE */ + int regRowid; /* Register holding rowid of CREATE TABLE entry */ + int regRoot; /* Register holding root page for new objects */ + Token constraintName; /* Name of the constraint currently being parsed */ + } cr; + struct { /* These fields available to all other statements */ + Returning *pReturning; /* The RETURNING clause */ + } d; + } u1; /************************************************************************ ** Above is constant between recursions. Below is reset before and after @@ -20720,9 +20846,13 @@ struct With { int nCte; /* Number of CTEs in the WITH clause */ int bView; /* Belongs to the outermost Select of a view */ With *pOuter; /* Containing WITH clause, or NULL */ - Cte a[1]; /* For each CTE in the WITH clause.... */ + Cte a[FLEXARRAY]; /* For each CTE in the WITH clause.... */ }; +/* The size (in bytes) of a With object that can hold as many +** as N different CTEs. */ +#define SZ_WITH(N) (offsetof(With,a) + (N)*sizeof(Cte)) + /* ** The Cte object is not guaranteed to persist for the entire duration ** of code generation. (The query flattener or other parser tree @@ -20751,9 +20881,13 @@ struct DbClientData { DbClientData *pNext; /* Next in a linked list */ void *pData; /* The data */ void (*xDestructor)(void*); /* Destructor. Might be NULL */ - char zName[1]; /* Name of this client data. MUST BE LAST */ + char zName[FLEXARRAY]; /* Name of this client data. MUST BE LAST */ }; +/* The size (in bytes) of a DbClientData object that can has a name +** that is N bytes long, including the zero-terminator. */ +#define SZ_DBCLIENTDATA(N) (offsetof(DbClientData,zName)+(N)) + #ifdef SQLITE_DEBUG /* ** An instance of the TreeView object is used for printing the content of @@ -21196,7 +21330,7 @@ SQLITE_PRIVATE void sqlite3SubqueryColumnTypes(Parse*,Table*,Select*,char); SQLITE_PRIVATE Table *sqlite3ResultSetOfSelect(Parse*,Select*,char); SQLITE_PRIVATE void sqlite3OpenSchemaTable(Parse *, int); SQLITE_PRIVATE Index *sqlite3PrimaryKeyIndex(Table*); -SQLITE_PRIVATE i16 sqlite3TableColumnToIndex(Index*, i16); +SQLITE_PRIVATE int sqlite3TableColumnToIndex(Index*, int); #ifdef SQLITE_OMIT_GENERATED_COLUMNS # define sqlite3TableColumnToStorage(T,X) (X) /* No-op pass-through */ # define sqlite3StorageColumnToTable(T,X) (X) /* No-op pass-through */ @@ -21294,7 +21428,7 @@ SQLITE_PRIVATE void sqlite3SrcListAssignCursors(Parse*, SrcList*); SQLITE_PRIVATE void sqlite3IdListDelete(sqlite3*, IdList*); SQLITE_PRIVATE void sqlite3ClearOnOrUsing(sqlite3*, OnOrUsing*); SQLITE_PRIVATE void sqlite3SrcListDelete(sqlite3*, SrcList*); -SQLITE_PRIVATE Index *sqlite3AllocateIndexObject(sqlite3*,i16,int,char**); +SQLITE_PRIVATE Index *sqlite3AllocateIndexObject(sqlite3*,int,int,char**); SQLITE_PRIVATE void sqlite3CreateIndex(Parse*,Token*,Token*,SrcList*,ExprList*,int,Token*, Expr*, int, int, u8); SQLITE_PRIVATE void sqlite3DropIndex(Parse*, SrcList*, int); @@ -21430,7 +21564,8 @@ SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3*,const Select*,int); SQLITE_PRIVATE FuncDef *sqlite3FunctionSearch(int,const char*); SQLITE_PRIVATE void sqlite3InsertBuiltinFuncs(FuncDef*,int); SQLITE_PRIVATE FuncDef *sqlite3FindFunction(sqlite3*,const char*,int,u8,u8); -SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum*,sqlite3_value*); +SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum*,sqlite3_value*,int); +SQLITE_PRIVATE int sqlite3AppendOneUtf8Character(char*, u32); SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void); SQLITE_PRIVATE void sqlite3RegisterDateTimeFunctions(void); SQLITE_PRIVATE void sqlite3RegisterJsonFunctions(void); @@ -22295,6 +22430,9 @@ static const char * const sqlite3azCompileOpt[] = { #ifdef SQLITE_BUG_COMPATIBLE_20160819 "BUG_COMPATIBLE_20160819", #endif +#ifdef SQLITE_BUG_COMPATIBLE_20250510 + "BUG_COMPATIBLE_20250510", +#endif #ifdef SQLITE_CASE_SENSITIVE_LIKE "CASE_SENSITIVE_LIKE", #endif @@ -22531,6 +22669,9 @@ static const char * const sqlite3azCompileOpt[] = { #ifdef SQLITE_ENABLE_SESSION "ENABLE_SESSION", #endif +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + "ENABLE_SETLK_TIMEOUT", +#endif #ifdef SQLITE_ENABLE_SNAPSHOT "ENABLE_SNAPSHOT", #endif @@ -22585,6 +22726,9 @@ static const char * const sqlite3azCompileOpt[] = { #ifdef SQLITE_EXTRA_INIT "EXTRA_INIT=" CTIMEOPT_VAL(SQLITE_EXTRA_INIT), #endif +#ifdef SQLITE_EXTRA_INIT_MUTEXED + "EXTRA_INIT_MUTEXED=" CTIMEOPT_VAL(SQLITE_EXTRA_INIT_MUTEXED), +#endif #ifdef SQLITE_EXTRA_SHUTDOWN "EXTRA_SHUTDOWN=" CTIMEOPT_VAL(SQLITE_EXTRA_SHUTDOWN), #endif @@ -23569,12 +23713,19 @@ struct VdbeCursor { #endif VdbeTxtBlbCache *pCache; /* Cache of large TEXT or BLOB values */ - /* 2*nField extra array elements allocated for aType[], beyond the one - ** static element declared in the structure. nField total array slots for - ** aType[] and nField+1 array slots for aOffset[] */ - u32 aType[1]; /* Type values record decode. MUST BE LAST */ + /* Space is allocated for aType to hold at least 2*nField+1 entries: + ** nField slots for aType[] and nField+1 array slots for aOffset[] */ + u32 aType[FLEXARRAY]; /* Type values record decode. MUST BE LAST */ }; +/* +** The size (in bytes) of a VdbeCursor object that has an nField value of N +** or less. The value of SZ_VDBECURSOR(n) is guaranteed to be a multiple +** of 8. +*/ +#define SZ_VDBECURSOR(N) \ + (ROUND8(offsetof(VdbeCursor,aType)) + ((N)+1)*sizeof(u64)) + /* Return true if P is a null-only cursor */ #define IsNullCursor(P) \ @@ -23831,13 +23982,16 @@ struct sqlite3_context { u8 enc; /* Encoding to use for results */ u8 skipFlag; /* Skip accumulator loading if true */ u16 argc; /* Number of arguments */ - sqlite3_value *argv[1]; /* Argument set */ + sqlite3_value *argv[FLEXARRAY]; /* Argument set */ }; -/* A bitfield type for use inside of structures. Always follow with :N where -** N is the number of bits. +/* +** The size (in bytes) of an sqlite3_context object that holds N +** argv[] arguments. */ -typedef unsigned bft; /* Bit Field Type */ +#define SZ_CONTEXT(N) \ + (offsetof(sqlite3_context,argv)+(N)*sizeof(sqlite3_value*)) + /* The ScanStatus object holds a single value for the ** sqlite3_stmt_scanstatus() interface. @@ -23898,7 +24052,7 @@ struct Vdbe { i64 nStmtDefCons; /* Number of def. constraints when stmt started */ i64 nStmtDefImmCons; /* Number of def. imm constraints when stmt started */ Mem *aMem; /* The memory locations */ - Mem **apArg; /* Arguments to currently executing user function */ + Mem **apArg; /* Arguments xUpdate and xFilter vtab methods */ VdbeCursor **apCsr; /* One element of this array for each open cursor */ Mem *aVar; /* Values for the OP_Variable opcode. */ @@ -23918,6 +24072,7 @@ struct Vdbe { #ifdef SQLITE_DEBUG int rcApp; /* errcode set by sqlite3_result_error_code() */ u32 nWrite; /* Number of write operations that have occurred */ + int napArg; /* Size of the apArg[] array */ #endif u16 nResColumn; /* Number of columns in one row of the result set */ u16 nResAlloc; /* Column slots allocated to aColName[] */ @@ -23970,7 +24125,7 @@ struct PreUpdate { VdbeCursor *pCsr; /* Cursor to read old values from */ int op; /* One of SQLITE_INSERT, UPDATE, DELETE */ u8 *aRecord; /* old.* database record */ - KeyInfo keyinfo; + KeyInfo *pKeyinfo; /* Key information */ UnpackedRecord *pUnpacked; /* Unpacked version of aRecord[] */ UnpackedRecord *pNewUnpacked; /* Unpacked version of new.* record */ int iNewReg; /* Register for new.* values */ @@ -23982,6 +24137,7 @@ struct PreUpdate { Table *pTab; /* Schema object being updated */ Index *pPk; /* PK index if pTab is WITHOUT ROWID */ sqlite3_value **apDflt; /* Array of default values, if required */ + u8 keyinfoSpace[SZ_KEYINFO(0)]; /* Space to hold pKeyinfo[0] content */ }; /* @@ -24348,8 +24504,9 @@ SQLITE_PRIVATE int sqlite3LookasideUsed(sqlite3 *db, int *pHighwater){ nInit += countLookasideSlots(db->lookaside.pSmallInit); nFree += countLookasideSlots(db->lookaside.pSmallFree); #endif /* SQLITE_OMIT_TWOSIZE_LOOKASIDE */ - if( pHighwater ) *pHighwater = db->lookaside.nSlot - nInit; - return db->lookaside.nSlot - (nInit+nFree); + assert( db->lookaside.nSlot >= nInit+nFree ); + if( pHighwater ) *pHighwater = (int)(db->lookaside.nSlot - nInit); + return (int)(db->lookaside.nSlot - (nInit+nFree)); } /* @@ -24402,7 +24559,7 @@ SQLITE_API int sqlite3_db_status( assert( (op-SQLITE_DBSTATUS_LOOKASIDE_HIT)>=0 ); assert( (op-SQLITE_DBSTATUS_LOOKASIDE_HIT)<3 ); *pCurrent = 0; - *pHighwater = db->lookaside.anStat[op - SQLITE_DBSTATUS_LOOKASIDE_HIT]; + *pHighwater = (int)db->lookaside.anStat[op-SQLITE_DBSTATUS_LOOKASIDE_HIT]; if( resetFlag ){ db->lookaside.anStat[op - SQLITE_DBSTATUS_LOOKASIDE_HIT] = 0; } @@ -25914,7 +26071,7 @@ static int daysAfterMonday(DateTime *pDate){ ** In other words, return the day of the week according ** to this code: ** -** 0=Sunday, 1=Monday, 2=Tues, ..., 6=Saturday +** 0=Sunday, 1=Monday, 2=Tuesday, ..., 6=Saturday */ static int daysAfterSunday(DateTime *pDate){ assert( pDate->validJD ); @@ -30123,6 +30280,8 @@ SQLITE_PRIVATE sqlite3_mutex_methods const *sqlite3DefaultMutex(void){ #ifdef __CYGWIN__ # include +# include /* amalgamator: dontcache */ +# include /* amalgamator: dontcache */ # include /* amalgamator: dontcache */ #endif @@ -31517,17 +31676,17 @@ SQLITE_PRIVATE int sqlite3ApiExit(sqlite3* db, int rc){ #define etPERCENT 7 /* Percent symbol. %% */ #define etCHARX 8 /* Characters. %c */ /* The rest are extensions, not normally found in printf() */ -#define etSQLESCAPE 9 /* Strings with '\'' doubled. %q */ -#define etSQLESCAPE2 10 /* Strings with '\'' doubled and enclosed in '', - NULL pointers replaced by SQL NULL. %Q */ -#define etTOKEN 11 /* a pointer to a Token structure */ -#define etSRCITEM 12 /* a pointer to a SrcItem */ -#define etPOINTER 13 /* The %p conversion */ -#define etSQLESCAPE3 14 /* %w -> Strings with '\"' doubled */ -#define etORDINAL 15 /* %r -> 1st, 2nd, 3rd, 4th, etc. English only */ -#define etDECIMAL 16 /* %d or %u, but not %x, %o */ +#define etESCAPE_q 9 /* Strings with '\'' doubled. %q */ +#define etESCAPE_Q 10 /* Strings with '\'' doubled and enclosed in '', + NULL pointers replaced by SQL NULL. %Q */ +#define etTOKEN 11 /* a pointer to a Token structure */ +#define etSRCITEM 12 /* a pointer to a SrcItem */ +#define etPOINTER 13 /* The %p conversion */ +#define etESCAPE_w 14 /* %w -> Strings with '\"' doubled */ +#define etORDINAL 15 /* %r -> 1st, 2nd, 3rd, 4th, etc. English only */ +#define etDECIMAL 16 /* %d or %u, but not %x, %o */ -#define etINVALID 17 /* Any unrecognized conversion type */ +#define etINVALID 17 /* Any unrecognized conversion type */ /* @@ -31566,9 +31725,9 @@ static const et_info fmtinfo[] = { { 's', 0, 4, etSTRING, 0, 0 }, { 'g', 0, 1, etGENERIC, 30, 0 }, { 'z', 0, 4, etDYNSTRING, 0, 0 }, - { 'q', 0, 4, etSQLESCAPE, 0, 0 }, - { 'Q', 0, 4, etSQLESCAPE2, 0, 0 }, - { 'w', 0, 4, etSQLESCAPE3, 0, 0 }, + { 'q', 0, 4, etESCAPE_q, 0, 0 }, + { 'Q', 0, 4, etESCAPE_Q, 0, 0 }, + { 'w', 0, 4, etESCAPE_w, 0, 0 }, { 'c', 0, 0, etCHARX, 0, 0 }, { 'o', 8, 0, etRADIX, 0, 2 }, { 'u', 10, 0, etDECIMAL, 0, 0 }, @@ -32165,25 +32324,7 @@ SQLITE_API void sqlite3_str_vappendf( } }else{ unsigned int ch = va_arg(ap,unsigned int); - if( ch<0x00080 ){ - buf[0] = ch & 0xff; - length = 1; - }else if( ch<0x00800 ){ - buf[0] = 0xc0 + (u8)((ch>>6)&0x1f); - buf[1] = 0x80 + (u8)(ch & 0x3f); - length = 2; - }else if( ch<0x10000 ){ - buf[0] = 0xe0 + (u8)((ch>>12)&0x0f); - buf[1] = 0x80 + (u8)((ch>>6) & 0x3f); - buf[2] = 0x80 + (u8)(ch & 0x3f); - length = 3; - }else{ - buf[0] = 0xf0 + (u8)((ch>>18) & 0x07); - buf[1] = 0x80 + (u8)((ch>>12) & 0x3f); - buf[2] = 0x80 + (u8)((ch>>6) & 0x3f); - buf[3] = 0x80 + (u8)(ch & 0x3f); - length = 4; - } + length = sqlite3AppendOneUtf8Character(buf, ch); } if( precision>1 ){ i64 nPrior = 1; @@ -32263,22 +32404,31 @@ SQLITE_API void sqlite3_str_vappendf( while( ii>=0 ) if( (bufpt[ii--] & 0xc0)==0x80 ) width++; } break; - case etSQLESCAPE: /* %q: Escape ' characters */ - case etSQLESCAPE2: /* %Q: Escape ' and enclose in '...' */ - case etSQLESCAPE3: { /* %w: Escape " characters */ + case etESCAPE_q: /* %q: Escape ' characters */ + case etESCAPE_Q: /* %Q: Escape ' and enclose in '...' */ + case etESCAPE_w: { /* %w: Escape " characters */ i64 i, j, k, n; - int needQuote, isnull; + int needQuote = 0; char ch; - char q = ((xtype==etSQLESCAPE3)?'"':'\''); /* Quote character */ char *escarg; + char q; if( bArgList ){ escarg = getTextArg(pArgList); }else{ escarg = va_arg(ap,char*); } - isnull = escarg==0; - if( isnull ) escarg = (xtype==etSQLESCAPE2 ? "NULL" : "(NULL)"); + if( escarg==0 ){ + escarg = (xtype==etESCAPE_Q ? "NULL" : "(NULL)"); + }else if( xtype==etESCAPE_Q ){ + needQuote = 1; + } + if( xtype==etESCAPE_w ){ + q = '"'; + flag_alternateform = 0; + }else{ + q = '\''; + } /* For %q, %Q, and %w, the precision is the number of bytes (or ** characters if the ! flags is present) to use from the input. ** Because of the extra quoting characters inserted, the number @@ -32291,7 +32441,30 @@ SQLITE_API void sqlite3_str_vappendf( while( (escarg[i+1]&0xc0)==0x80 ){ i++; } } } - needQuote = !isnull && xtype==etSQLESCAPE2; + if( flag_alternateform ){ + /* For %#q, do unistr()-style backslash escapes for + ** all control characters, and for backslash itself. + ** For %#Q, do the same but only if there is at least + ** one control character. */ + u32 nBack = 0; + u32 nCtrl = 0; + for(k=0; ketBUFSIZE ){ bufpt = zExtra = printfTempBuf(pAccum, n); @@ -32300,13 +32473,41 @@ SQLITE_API void sqlite3_str_vappendf( bufpt = buf; } j = 0; - if( needQuote ) bufpt[j++] = q; - k = i; - for(i=0; i=0x10 ? '1' : '0'; + bufpt[j++] = "0123456789abcdef"[ch&0xf]; + } + } + }else{ + for(i=0; imxAlloc>0 && !isMalloced(p) ); - zText = sqlite3DbMallocRaw(p->db, p->nChar+1 ); + zText = sqlite3DbMallocRaw(p->db, 1+(u64)p->nChar ); if( zText ){ memcpy(zText, p->zText, p->nChar+1); p->printfFlags |= SQLITE_PRINTF_MALLOCED; @@ -32794,6 +32995,15 @@ SQLITE_API char *sqlite3_snprintf(int n, char *zBuf, const char *zFormat, ...){ return zBuf; } +/* Maximum size of an sqlite3_log() message. */ +#if defined(SQLITE_MAX_LOG_MESSAGE) + /* Leave the definition as supplied */ +#elif SQLITE_PRINT_BUF_SIZE*10>10000 +# define SQLITE_MAX_LOG_MESSAGE 10000 +#else +# define SQLITE_MAX_LOG_MESSAGE (SQLITE_PRINT_BUF_SIZE*10) +#endif + /* ** This is the routine that actually formats the sqlite3_log() message. ** We house it in a separate routine from sqlite3_log() to avoid using @@ -32810,7 +33020,7 @@ SQLITE_API char *sqlite3_snprintf(int n, char *zBuf, const char *zFormat, ...){ */ static void renderLogMsg(int iErrCode, const char *zFormat, va_list ap){ StrAccum acc; /* String accumulator */ - char zMsg[SQLITE_PRINT_BUF_SIZE*3]; /* Complete log message */ + char zMsg[SQLITE_MAX_LOG_MESSAGE]; /* Complete log message */ sqlite3StrAccumInit(&acc, 0, zMsg, sizeof(zMsg), 0); sqlite3_str_vappendf(&acc, zFormat, ap); @@ -34805,6 +35015,35 @@ static const unsigned char sqlite3Utf8Trans1[] = { } \ } +/* +** Write a single UTF8 character whose value is v into the +** buffer starting at zOut. zOut must be sized to hold at +** least four bytes. Return the number of bytes needed +** to encode the new character. +*/ +SQLITE_PRIVATE int sqlite3AppendOneUtf8Character(char *zOut, u32 v){ + if( v<0x00080 ){ + zOut[0] = (u8)(v & 0xff); + return 1; + } + if( v<0x00800 ){ + zOut[0] = 0xc0 + (u8)((v>>6) & 0x1f); + zOut[1] = 0x80 + (u8)(v & 0x3f); + return 2; + } + if( v<0x10000 ){ + zOut[0] = 0xe0 + (u8)((v>>12) & 0x0f); + zOut[1] = 0x80 + (u8)((v>>6) & 0x3f); + zOut[2] = 0x80 + (u8)(v & 0x3f); + return 3; + } + zOut[0] = 0xf0 + (u8)((v>>18) & 0x07); + zOut[1] = 0x80 + (u8)((v>>12) & 0x3f); + zOut[2] = 0x80 + (u8)((v>>6) & 0x3f); + zOut[3] = 0x80 + (u8)(v & 0x3f); + return 4; +} + /* ** Translate a single UTF-8 character. Return the unicode value. ** @@ -35226,7 +35465,7 @@ SQLITE_PRIVATE int sqlite3Utf16ByteLen(const void *zIn, int nByte, int nChar){ int n = 0; if( SQLITE_UTF16NATIVE==SQLITE_UTF16LE ) z++; - while( n=0xd8 && c<0xdc && z<=zEnd && z[0]>=0xdc && z[0]<0xe0 ) z += 2; @@ -36401,7 +36640,11 @@ SQLITE_PRIVATE void sqlite3FpDecode(FpDecode *p, double r, int iRound, int mxRou } p->z = &p->zBuf[i+1]; assert( i+p->n < sizeof(p->zBuf) ); - while( ALWAYS(p->n>0) && p->z[p->n-1]=='0' ){ p->n--; } + assert( p->n>0 ); + while( p->z[p->n-1]=='0' ){ + p->n--; + assert( p->n>0 ); + } } /* @@ -36906,7 +37149,7 @@ SQLITE_PRIVATE int sqlite3MulInt64(i64 *pA, i64 iB){ } /* -** Compute the absolute value of a 32-bit signed integer, of possible. Or +** Compute the absolute value of a 32-bit signed integer, if possible. Or ** if the integer has a value of -2147483648, return +2147483647 */ SQLITE_PRIVATE int sqlite3AbsInt32(int x){ @@ -37187,12 +37430,19 @@ SQLITE_PRIVATE void sqlite3HashClear(Hash *pH){ */ static unsigned int strHash(const char *z){ unsigned int h = 0; - unsigned char c; - while( (c = (unsigned char)*z++)!=0 ){ /*OPTIMIZATION-IF-TRUE*/ + while( z[0] ){ /*OPTIMIZATION-IF-TRUE*/ /* Knuth multiplicative hashing. (Sorting & Searching, p. 510). ** 0x9e3779b1 is 2654435761 which is the closest prime number to - ** (2**32)*golden_ratio, where golden_ratio = (sqrt(5) - 1)/2. */ - h += sqlite3UpperToLower[c]; + ** (2**32)*golden_ratio, where golden_ratio = (sqrt(5) - 1)/2. + ** + ** Only bits 0xdf for ASCII and bits 0xbf for EBCDIC each octet are + ** hashed since the omitted bits determine the upper/lower case difference. + */ +#ifdef SQLITE_EBCDIC + h += 0xbf & (unsigned char)*(z++); +#else + h += 0xdf & (unsigned char)*(z++); +#endif h *= 0x9e3779b1; } return h; @@ -37265,9 +37515,8 @@ static int rehash(Hash *pH, unsigned int new_size){ pH->htsize = new_size = sqlite3MallocSize(new_ht)/sizeof(struct _ht); memset(new_ht, 0, new_size*sizeof(struct _ht)); for(elem=pH->first, pH->first=0; elem; elem = next_elem){ - unsigned int h = strHash(elem->pKey) % new_size; next_elem = elem->next; - insertElement(pH, &new_ht[h], elem); + insertElement(pH, &new_ht[elem->h % new_size], elem); } return 1; } @@ -37285,23 +37534,22 @@ static HashElem *findElementWithHash( HashElem *elem; /* Used to loop thru the element list */ unsigned int count; /* Number of elements left to test */ unsigned int h; /* The computed hash */ - static HashElem nullElement = { 0, 0, 0, 0 }; + static HashElem nullElement = { 0, 0, 0, 0, 0 }; + h = strHash(pKey); if( pH->ht ){ /*OPTIMIZATION-IF-TRUE*/ struct _ht *pEntry; - h = strHash(pKey) % pH->htsize; - pEntry = &pH->ht[h]; + pEntry = &pH->ht[h % pH->htsize]; elem = pEntry->chain; count = pEntry->count; }else{ - h = 0; elem = pH->first; count = pH->count; } if( pHash ) *pHash = h; while( count ){ assert( elem!=0 ); - if( sqlite3StrICmp(elem->pKey,pKey)==0 ){ + if( h==elem->h && sqlite3StrICmp(elem->pKey,pKey)==0 ){ return elem; } elem = elem->next; @@ -37313,10 +37561,9 @@ static HashElem *findElementWithHash( /* Remove a single entry from the hash table given a pointer to that ** element and a hash on the element's key. */ -static void removeElementGivenHash( +static void removeElement( Hash *pH, /* The pH containing "elem" */ - HashElem* elem, /* The element to be removed from the pH */ - unsigned int h /* Hash value for the element */ + HashElem *elem /* The element to be removed from the pH */ ){ struct _ht *pEntry; if( elem->prev ){ @@ -37328,7 +37575,7 @@ static void removeElementGivenHash( elem->next->prev = elem->prev; } if( pH->ht ){ - pEntry = &pH->ht[h]; + pEntry = &pH->ht[elem->h % pH->htsize]; if( pEntry->chain==elem ){ pEntry->chain = elem->next; } @@ -37379,7 +37626,7 @@ SQLITE_PRIVATE void *sqlite3HashInsert(Hash *pH, const char *pKey, void *data){ if( elem->data ){ void *old_data = elem->data; if( data==0 ){ - removeElementGivenHash(pH,elem,h); + removeElement(pH,elem); }else{ elem->data = data; elem->pKey = pKey; @@ -37390,15 +37637,13 @@ SQLITE_PRIVATE void *sqlite3HashInsert(Hash *pH, const char *pKey, void *data){ new_elem = (HashElem*)sqlite3Malloc( sizeof(HashElem) ); if( new_elem==0 ) return data; new_elem->pKey = pKey; + new_elem->h = h; new_elem->data = data; pH->count++; - if( pH->count>=10 && pH->count > 2*pH->htsize ){ - if( rehash(pH, pH->count*2) ){ - assert( pH->htsize>0 ); - h = strHash(pKey) % pH->htsize; - } + if( pH->count>=5 && pH->count > 2*pH->htsize ){ + rehash(pH, pH->count*3); } - insertElement(pH, pH->ht ? &pH->ht[h] : 0, new_elem); + insertElement(pH, pH->ht ? &pH->ht[new_elem->h % pH->htsize] : 0, new_elem); return 0; } @@ -38881,6 +39126,7 @@ struct unixFile { #endif #ifdef SQLITE_ENABLE_SETLK_TIMEOUT unsigned iBusyTimeout; /* Wait this many millisec on locks */ + int bBlockOnConnect; /* True to block for SHARED locks */ #endif #if OS_VXWORKS struct vxworksFileId *pId; /* Unique file ID */ @@ -40274,6 +40520,13 @@ static int unixFileLock(unixFile *pFile, struct flock *pLock){ rc = 0; } }else{ +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + if( pFile->bBlockOnConnect && pLock->l_type==F_RDLCK + && pLock->l_start==SHARED_FIRST && pLock->l_len==SHARED_SIZE + ){ + rc = osFcntl(pFile->h, F_SETLKW, pLock); + }else +#endif rc = osSetPosixAdvisoryLock(pFile->h, pLock, pFile); } return rc; @@ -42635,8 +42888,9 @@ static int unixFileControl(sqlite3_file *id, int op, void *pArg){ #ifdef SQLITE_ENABLE_SETLK_TIMEOUT case SQLITE_FCNTL_LOCK_TIMEOUT: { int iOld = pFile->iBusyTimeout; + int iNew = *(int*)pArg; #if SQLITE_ENABLE_SETLK_TIMEOUT==1 - pFile->iBusyTimeout = *(int*)pArg; + pFile->iBusyTimeout = iNew<0 ? 0x7FFFFFFF : (unsigned)iNew; #elif SQLITE_ENABLE_SETLK_TIMEOUT==2 pFile->iBusyTimeout = !!(*(int*)pArg); #else @@ -42645,7 +42899,12 @@ static int unixFileControl(sqlite3_file *id, int op, void *pArg){ *(int*)pArg = iOld; return SQLITE_OK; } -#endif + case SQLITE_FCNTL_BLOCK_ON_CONNECT: { + int iNew = *(int*)pArg; + pFile->bBlockOnConnect = iNew; + return SQLITE_OK; + } +#endif /* SQLITE_ENABLE_SETLK_TIMEOUT */ #if SQLITE_MAX_MMAP_SIZE>0 case SQLITE_FCNTL_MMAP_SIZE: { i64 newLimit = *(i64*)pArg; @@ -43618,21 +43877,20 @@ static int unixShmLock( /* Check that, if this to be a blocking lock, no locks that occur later ** in the following list than the lock being obtained are already held: ** - ** 1. Checkpointer lock (ofst==1). - ** 2. Write lock (ofst==0). - ** 3. Read locks (ofst>=3 && ofst=3 && ofstexclMask|p->sharedMask); assert( (flags & SQLITE_SHM_UNLOCK) || pDbFd->iBusyTimeout==0 || ( - (ofst!=2) /* not RECOVER */ + (ofst!=2 || lockMask==0) && (ofst!=1 || lockMask==0 || lockMask==2) && (ofst!=0 || lockMask<3) && (ofst<3 || lockMask<(1<iBusyTimeout +#else +# define winFileBusyTimeout(pDbFd) 0 +#endif + /* ** The winVfsAppData structure is used for the pAppData member for all of the ** Win32 VFS variants. @@ -47478,7 +47746,7 @@ static struct win_syscall { { "FileTimeToLocalFileTime", (SYSCALL)0, 0 }, #endif -#define osFileTimeToLocalFileTime ((BOOL(WINAPI*)(CONST FILETIME*, \ +#define osFileTimeToLocalFileTime ((BOOL(WINAPI*)(const FILETIME*, \ LPFILETIME))aSyscall[11].pCurrent) #if SQLITE_OS_WINCE @@ -47487,7 +47755,7 @@ static struct win_syscall { { "FileTimeToSystemTime", (SYSCALL)0, 0 }, #endif -#define osFileTimeToSystemTime ((BOOL(WINAPI*)(CONST FILETIME*, \ +#define osFileTimeToSystemTime ((BOOL(WINAPI*)(const FILETIME*, \ LPSYSTEMTIME))aSyscall[12].pCurrent) { "FlushFileBuffers", (SYSCALL)FlushFileBuffers, 0 }, @@ -47593,6 +47861,12 @@ static struct win_syscall { #define osGetFullPathNameW ((DWORD(WINAPI*)(LPCWSTR,DWORD,LPWSTR, \ LPWSTR*))aSyscall[25].pCurrent) +/* +** For GetLastError(), MSDN says: +** +** Minimum supported client: Windows XP [desktop apps | UWP apps] +** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] +*/ { "GetLastError", (SYSCALL)GetLastError, 0 }, #define osGetLastError ((DWORD(WINAPI*)(VOID))aSyscall[26].pCurrent) @@ -47761,7 +48035,7 @@ static struct win_syscall { { "LockFile", (SYSCALL)0, 0 }, #endif -#ifndef osLockFile +#if !defined(osLockFile) && defined(SQLITE_WIN32_HAS_ANSI) #define osLockFile ((BOOL(WINAPI*)(HANDLE,DWORD,DWORD,DWORD, \ DWORD))aSyscall[47].pCurrent) #endif @@ -47825,7 +48099,7 @@ static struct win_syscall { { "SystemTimeToFileTime", (SYSCALL)SystemTimeToFileTime, 0 }, -#define osSystemTimeToFileTime ((BOOL(WINAPI*)(CONST SYSTEMTIME*, \ +#define osSystemTimeToFileTime ((BOOL(WINAPI*)(const SYSTEMTIME*, \ LPFILETIME))aSyscall[56].pCurrent) #if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT @@ -47834,7 +48108,7 @@ static struct win_syscall { { "UnlockFile", (SYSCALL)0, 0 }, #endif -#ifndef osUnlockFile +#if !defined(osUnlockFile) && defined(SQLITE_WIN32_HAS_ANSI) #define osUnlockFile ((BOOL(WINAPI*)(HANDLE,DWORD,DWORD,DWORD, \ DWORD))aSyscall[57].pCurrent) #endif @@ -47875,11 +48149,13 @@ static struct win_syscall { #define osCreateEventExW ((HANDLE(WINAPI*)(LPSECURITY_ATTRIBUTES,LPCWSTR, \ DWORD,DWORD))aSyscall[62].pCurrent) -#if !SQLITE_OS_WINRT +/* +** For WaitForSingleObject(), MSDN says: +** +** Minimum supported client: Windows XP [desktop apps | UWP apps] +** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] +*/ { "WaitForSingleObject", (SYSCALL)WaitForSingleObject, 0 }, -#else - { "WaitForSingleObject", (SYSCALL)0, 0 }, -#endif #define osWaitForSingleObject ((DWORD(WINAPI*)(HANDLE, \ DWORD))aSyscall[63].pCurrent) @@ -48026,6 +48302,97 @@ static struct win_syscall { #define osFlushViewOfFile \ ((BOOL(WINAPI*)(LPCVOID,SIZE_T))aSyscall[79].pCurrent) +/* +** If SQLITE_ENABLE_SETLK_TIMEOUT is defined, we require CreateEvent() +** to implement blocking locks with timeouts. MSDN says: +** +** Minimum supported client: Windows XP [desktop apps | UWP apps] +** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] +*/ +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + { "CreateEvent", (SYSCALL)CreateEvent, 0 }, +#else + { "CreateEvent", (SYSCALL)0, 0 }, +#endif + +#define osCreateEvent ( \ + (HANDLE(WINAPI*) (LPSECURITY_ATTRIBUTES,BOOL,BOOL,LPCSTR)) \ + aSyscall[80].pCurrent \ +) + +/* +** If SQLITE_ENABLE_SETLK_TIMEOUT is defined, we require CancelIo() +** for the case where a timeout expires and a lock request must be +** cancelled. +** +** Minimum supported client: Windows XP [desktop apps | UWP apps] +** Minimum supported server: Windows Server 2003 [desktop apps | UWP apps] +*/ +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + { "CancelIo", (SYSCALL)CancelIo, 0 }, +#else + { "CancelIo", (SYSCALL)0, 0 }, +#endif + +#define osCancelIo ((BOOL(WINAPI*)(HANDLE))aSyscall[81].pCurrent) + +#if defined(SQLITE_WIN32_HAS_WIDE) && defined(_WIN32) + { "GetModuleHandleW", (SYSCALL)GetModuleHandleW, 0 }, +#else + { "GetModuleHandleW", (SYSCALL)0, 0 }, +#endif + +#define osGetModuleHandleW ((HMODULE(WINAPI*)(LPCWSTR))aSyscall[82].pCurrent) + +#ifndef _WIN32 + { "getenv", (SYSCALL)getenv, 0 }, +#else + { "getenv", (SYSCALL)0, 0 }, +#endif + +#define osGetenv ((const char *(*)(const char *))aSyscall[83].pCurrent) + +#ifndef _WIN32 + { "getcwd", (SYSCALL)getcwd, 0 }, +#else + { "getcwd", (SYSCALL)0, 0 }, +#endif + +#define osGetcwd ((char*(*)(char*,size_t))aSyscall[84].pCurrent) + +#ifndef _WIN32 + { "readlink", (SYSCALL)readlink, 0 }, +#else + { "readlink", (SYSCALL)0, 0 }, +#endif + +#define osReadlink ((ssize_t(*)(const char*,char*,size_t))aSyscall[85].pCurrent) + +#ifndef _WIN32 + { "lstat", (SYSCALL)lstat, 0 }, +#else + { "lstat", (SYSCALL)0, 0 }, +#endif + +#define osLstat ((int(*)(const char*,struct stat*))aSyscall[86].pCurrent) + +#ifndef _WIN32 + { "__errno", (SYSCALL)__errno, 0 }, +#else + { "__errno", (SYSCALL)0, 0 }, +#endif + +#define osErrno (*((int*(*)(void))aSyscall[87].pCurrent)()) + +#ifndef _WIN32 + { "cygwin_conv_path", (SYSCALL)cygwin_conv_path, 0 }, +#else + { "cygwin_conv_path", (SYSCALL)0, 0 }, +#endif + +#define osCygwin_conv_path ((size_t(*)(unsigned int, \ + const void *, void *, size_t))aSyscall[88].pCurrent) + }; /* End of the overrideable system calls */ /* @@ -48199,6 +48566,7 @@ SQLITE_API int sqlite3_win32_reset_heap(){ } #endif /* SQLITE_WIN32_MALLOC */ +#ifdef _WIN32 /* ** This function outputs the specified (ANSI) string to the Win32 debugger ** (if available). @@ -48241,6 +48609,7 @@ SQLITE_API void sqlite3_win32_write_debug(const char *zBuf, int nBuf){ } #endif } +#endif /* _WIN32 */ /* ** The following routine suspends the current thread for at least ms @@ -48324,7 +48693,9 @@ SQLITE_API int sqlite3_win32_is_nt(void){ } return osInterlockedCompareExchange(&sqlite3_os_type, 2, 2)==2; #elif SQLITE_TEST - return osInterlockedCompareExchange(&sqlite3_os_type, 2, 2)==2; + return osInterlockedCompareExchange(&sqlite3_os_type, 2, 2)==2 + || osInterlockedCompareExchange(&sqlite3_os_type, 0, 0)==0 + ; #else /* ** NOTE: All sub-platforms where the GetVersionEx[AW] functions are @@ -48539,6 +48910,7 @@ SQLITE_PRIVATE void sqlite3MemSetDefault(void){ } #endif /* SQLITE_WIN32_MALLOC */ +#ifdef _WIN32 /* ** Convert a UTF-8 string to Microsoft Unicode. ** @@ -48564,6 +48936,7 @@ static LPWSTR winUtf8ToUnicode(const char *zText){ } return zWideText; } +#endif /* _WIN32 */ /* ** Convert a Microsoft Unicode string to UTF-8. @@ -48598,28 +48971,29 @@ static char *winUnicodeToUtf8(LPCWSTR zWideText){ ** Space to hold the returned string is obtained from sqlite3_malloc(). */ static LPWSTR winMbcsToUnicode(const char *zText, int useAnsi){ - int nByte; + int nWideChar; LPWSTR zMbcsText; int codepage = useAnsi ? CP_ACP : CP_OEMCP; - nByte = osMultiByteToWideChar(codepage, 0, zText, -1, NULL, - 0)*sizeof(WCHAR); - if( nByte==0 ){ + nWideChar = osMultiByteToWideChar(codepage, 0, zText, -1, NULL, + 0); + if( nWideChar==0 ){ return 0; } - zMbcsText = sqlite3MallocZero( nByte*sizeof(WCHAR) ); + zMbcsText = sqlite3MallocZero( nWideChar*sizeof(WCHAR) ); if( zMbcsText==0 ){ return 0; } - nByte = osMultiByteToWideChar(codepage, 0, zText, -1, zMbcsText, - nByte); - if( nByte==0 ){ + nWideChar = osMultiByteToWideChar(codepage, 0, zText, -1, zMbcsText, + nWideChar); + if( nWideChar==0 ){ sqlite3_free(zMbcsText); zMbcsText = 0; } return zMbcsText; } +#ifdef _WIN32 /* ** Convert a Microsoft Unicode string to a multi-byte character string, ** using the ANSI or OEM code page. @@ -48647,6 +49021,7 @@ static char *winUnicodeToMbcs(LPCWSTR zWideText, int useAnsi){ } return zText; } +#endif /* _WIN32 */ /* ** Convert a multi-byte character string to UTF-8. @@ -48666,6 +49041,7 @@ static char *winMbcsToUtf8(const char *zText, int useAnsi){ return zTextUtf8; } +#ifdef _WIN32 /* ** Convert a UTF-8 string to a multi-byte character string. ** @@ -48715,6 +49091,7 @@ SQLITE_API char *sqlite3_win32_unicode_to_utf8(LPCWSTR zWideText){ #endif return winUnicodeToUtf8(zWideText); } +#endif /* _WIN32 */ /* ** This is a public wrapper for the winMbcsToUtf8() function. @@ -48732,6 +49109,7 @@ SQLITE_API char *sqlite3_win32_mbcs_to_utf8(const char *zText){ return winMbcsToUtf8(zText, osAreFileApisANSI()); } +#ifdef _WIN32 /* ** This is a public wrapper for the winMbcsToUtf8() function. */ @@ -48856,6 +49234,7 @@ SQLITE_API int sqlite3_win32_set_directory( ){ return sqlite3_win32_set_directory16(type, zValue); } +#endif /* _WIN32 */ /* ** The return value of winGetLastErrorMsg @@ -49404,13 +49783,98 @@ static BOOL winLockFile( ovlp.Offset = offsetLow; ovlp.OffsetHigh = offsetHigh; return osLockFileEx(*phFile, flags, 0, numBytesLow, numBytesHigh, &ovlp); +#ifdef SQLITE_WIN32_HAS_ANSI }else{ return osLockFile(*phFile, offsetLow, offsetHigh, numBytesLow, numBytesHigh); +#endif } #endif } +/* +** Lock a region of nByte bytes starting at offset offset of file hFile. +** Take an EXCLUSIVE lock if parameter bExclusive is true, or a SHARED lock +** otherwise. If nMs is greater than zero and the lock cannot be obtained +** immediately, block for that many ms before giving up. +** +** This function returns SQLITE_OK if the lock is obtained successfully. If +** some other process holds the lock, SQLITE_BUSY is returned if nMs==0, or +** SQLITE_BUSY_TIMEOUT otherwise. Or, if an error occurs, SQLITE_IOERR. +*/ +static int winHandleLockTimeout( + HANDLE hFile, + DWORD offset, + DWORD nByte, + int bExcl, + DWORD nMs +){ + DWORD flags = LOCKFILE_FAIL_IMMEDIATELY | (bExcl?LOCKFILE_EXCLUSIVE_LOCK:0); + int rc = SQLITE_OK; + BOOL ret; + + if( !osIsNT() ){ + ret = winLockFile(&hFile, flags, offset, 0, nByte, 0); + }else{ + OVERLAPPED ovlp; + memset(&ovlp, 0, sizeof(OVERLAPPED)); + ovlp.Offset = offset; + +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + if( nMs!=0 ){ + flags &= ~LOCKFILE_FAIL_IMMEDIATELY; + } + ovlp.hEvent = osCreateEvent(NULL, TRUE, FALSE, NULL); + if( ovlp.hEvent==NULL ){ + return SQLITE_IOERR_LOCK; + } +#endif + + ret = osLockFileEx(hFile, flags, 0, nByte, 0, &ovlp); + +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + /* If SQLITE_ENABLE_SETLK_TIMEOUT is defined, then the file-handle was + ** opened with FILE_FLAG_OVERHEAD specified. In this case, the call to + ** LockFileEx() may fail because the request is still pending. This can + ** happen even if LOCKFILE_FAIL_IMMEDIATELY was specified. + ** + ** If nMs is 0, then LOCKFILE_FAIL_IMMEDIATELY was set in the flags + ** passed to LockFileEx(). In this case, if the operation is pending, + ** block indefinitely until it is finished. + ** + ** Otherwise, wait for up to nMs ms for the operation to finish. nMs + ** may be set to INFINITE. + */ + if( !ret && GetLastError()==ERROR_IO_PENDING ){ + DWORD nDelay = (nMs==0 ? INFINITE : nMs); + DWORD res = osWaitForSingleObject(ovlp.hEvent, nDelay); + if( res==WAIT_OBJECT_0 ){ + ret = TRUE; + }else if( res==WAIT_TIMEOUT ){ +#if SQLITE_ENABLE_SETLK_TIMEOUT==1 + rc = SQLITE_BUSY_TIMEOUT; +#else + rc = SQLITE_BUSY; +#endif + }else{ + /* Some other error has occurred */ + rc = SQLITE_IOERR_LOCK; + } + + /* If it is still pending, cancel the LockFileEx() call. */ + osCancelIo(hFile); + } + + osCloseHandle(ovlp.hEvent); +#endif + } + + if( rc==SQLITE_OK && !ret ){ + rc = SQLITE_BUSY; + } + return rc; +} + /* ** Unlock a file region. */ @@ -49435,13 +49899,23 @@ static BOOL winUnlockFile( ovlp.Offset = offsetLow; ovlp.OffsetHigh = offsetHigh; return osUnlockFileEx(*phFile, 0, numBytesLow, numBytesHigh, &ovlp); +#ifdef SQLITE_WIN32_HAS_ANSI }else{ return osUnlockFile(*phFile, offsetLow, offsetHigh, numBytesLow, numBytesHigh); +#endif } #endif } +/* +** Remove an nByte lock starting at offset iOff from HANDLE h. +*/ +static int winHandleUnlock(HANDLE h, int iOff, int nByte){ + BOOL ret = winUnlockFile(&h, iOff, 0, nByte, 0); + return (ret ? SQLITE_OK : SQLITE_IOERR_UNLOCK); +} + /***************************************************************************** ** The next group of routines implement the I/O methods specified ** by the sqlite3_io_methods object. @@ -49455,66 +49929,70 @@ static BOOL winUnlockFile( #endif /* -** Move the current position of the file handle passed as the first -** argument to offset iOffset within the file. If successful, return 0. -** Otherwise, set pFile->lastErrno and return non-zero. +** Seek the file handle h to offset nByte of the file. +** +** If successful, return SQLITE_OK. Or, if an error occurs, return an SQLite +** error code. */ -static int winSeekFile(winFile *pFile, sqlite3_int64 iOffset){ +static int winHandleSeek(HANDLE h, sqlite3_int64 iOffset){ + int rc = SQLITE_OK; /* Return value */ + #if !SQLITE_OS_WINRT LONG upperBits; /* Most sig. 32 bits of new offset */ LONG lowerBits; /* Least sig. 32 bits of new offset */ DWORD dwRet; /* Value returned by SetFilePointer() */ - DWORD lastErrno; /* Value returned by GetLastError() */ - - OSTRACE(("SEEK file=%p, offset=%lld\n", pFile->h, iOffset)); upperBits = (LONG)((iOffset>>32) & 0x7fffffff); lowerBits = (LONG)(iOffset & 0xffffffff); + dwRet = osSetFilePointer(h, lowerBits, &upperBits, FILE_BEGIN); + /* API oddity: If successful, SetFilePointer() returns a dword ** containing the lower 32-bits of the new file-offset. Or, if it fails, ** it returns INVALID_SET_FILE_POINTER. However according to MSDN, ** INVALID_SET_FILE_POINTER may also be a valid new offset. So to determine ** whether an error has actually occurred, it is also necessary to call - ** GetLastError(). - */ - dwRet = osSetFilePointer(pFile->h, lowerBits, &upperBits, FILE_BEGIN); - - if( (dwRet==INVALID_SET_FILE_POINTER - && ((lastErrno = osGetLastError())!=NO_ERROR)) ){ - pFile->lastErrno = lastErrno; - winLogError(SQLITE_IOERR_SEEK, pFile->lastErrno, - "winSeekFile", pFile->zPath); - OSTRACE(("SEEK file=%p, rc=SQLITE_IOERR_SEEK\n", pFile->h)); - return 1; + ** GetLastError(). */ + if( dwRet==INVALID_SET_FILE_POINTER ){ + DWORD lastErrno = osGetLastError(); + if( lastErrno!=NO_ERROR ){ + rc = SQLITE_IOERR_SEEK; + } } - - OSTRACE(("SEEK file=%p, rc=SQLITE_OK\n", pFile->h)); - return 0; #else - /* - ** Same as above, except that this implementation works for WinRT. - */ - + /* This implementation works for WinRT. */ LARGE_INTEGER x; /* The new offset */ BOOL bRet; /* Value returned by SetFilePointerEx() */ x.QuadPart = iOffset; - bRet = osSetFilePointerEx(pFile->h, x, 0, FILE_BEGIN); + bRet = osSetFilePointerEx(h, x, 0, FILE_BEGIN); if(!bRet){ - pFile->lastErrno = osGetLastError(); - winLogError(SQLITE_IOERR_SEEK, pFile->lastErrno, - "winSeekFile", pFile->zPath); - OSTRACE(("SEEK file=%p, rc=SQLITE_IOERR_SEEK\n", pFile->h)); - return 1; + rc = SQLITE_IOERR_SEEK; } - - OSTRACE(("SEEK file=%p, rc=SQLITE_OK\n", pFile->h)); - return 0; #endif + + OSTRACE(("SEEK file=%p, offset=%lld rc=%s\n", h, iOffset, sqlite3ErrName(rc))); + return rc; } +/* +** Move the current position of the file handle passed as the first +** argument to offset iOffset within the file. If successful, return 0. +** Otherwise, set pFile->lastErrno and return non-zero. +*/ +static int winSeekFile(winFile *pFile, sqlite3_int64 iOffset){ + int rc; + + rc = winHandleSeek(pFile->h, iOffset); + if( rc!=SQLITE_OK ){ + pFile->lastErrno = osGetLastError(); + winLogError(rc, pFile->lastErrno, "winSeekFile", pFile->zPath); + } + return rc; +} + + #if SQLITE_MAX_MMAP_SIZE>0 /* Forward references to VFS helper methods used for memory mapped files */ static int winMapfile(winFile*, sqlite3_int64); @@ -49774,6 +50252,60 @@ static int winWrite( return SQLITE_OK; } +/* +** Truncate the file opened by handle h to nByte bytes in size. +*/ +static int winHandleTruncate(HANDLE h, sqlite3_int64 nByte){ + int rc = SQLITE_OK; /* Return code */ + rc = winHandleSeek(h, nByte); + if( rc==SQLITE_OK ){ + if( 0==osSetEndOfFile(h) ){ + rc = SQLITE_IOERR_TRUNCATE; + } + } + return rc; +} + +/* +** Determine the size in bytes of the file opened by the handle passed as +** the first argument. +*/ +static int winHandleSize(HANDLE h, sqlite3_int64 *pnByte){ + int rc = SQLITE_OK; + +#if SQLITE_OS_WINRT + FILE_STANDARD_INFO info; + BOOL b; + b = osGetFileInformationByHandleEx(h, FileStandardInfo, &info, sizeof(info)); + if( b ){ + *pnByte = info.EndOfFile.QuadPart; + }else{ + rc = SQLITE_IOERR_FSTAT; + } +#else + DWORD upperBits = 0; + DWORD lowerBits = 0; + + assert( pnByte ); + lowerBits = osGetFileSize(h, &upperBits); + *pnByte = (((sqlite3_int64)upperBits)<<32) + lowerBits; + if( lowerBits==INVALID_FILE_SIZE && osGetLastError()!=NO_ERROR ){ + rc = SQLITE_IOERR_FSTAT; + } +#endif + + return rc; +} + +/* +** Close the handle passed as the only argument. +*/ +static void winHandleClose(HANDLE h){ + if( h!=INVALID_HANDLE_VALUE ){ + osCloseHandle(h); + } +} + /* ** Truncate an open file to a specified size */ @@ -50029,8 +50561,9 @@ static int winFileSize(sqlite3_file *id, sqlite3_int64 *pSize){ ** Different API routines are called depending on whether or not this ** is Win9x or WinNT. */ -static int winGetReadLock(winFile *pFile){ +static int winGetReadLock(winFile *pFile, int bBlock){ int res; + DWORD mask = ~(bBlock ? LOCKFILE_FAIL_IMMEDIATELY : 0); OSTRACE(("READ-LOCK file=%p, lock=%d\n", pFile->h, pFile->locktype)); if( osIsNT() ){ #if SQLITE_OS_WINCE @@ -50040,7 +50573,7 @@ static int winGetReadLock(winFile *pFile){ */ res = winceLockFile(&pFile->h, SHARED_FIRST, 0, 1, 0); #else - res = winLockFile(&pFile->h, SQLITE_LOCKFILEEX_FLAGS, SHARED_FIRST, 0, + res = winLockFile(&pFile->h, SQLITE_LOCKFILEEX_FLAGS&mask, SHARED_FIRST, 0, SHARED_SIZE, 0); #endif } @@ -50049,7 +50582,7 @@ static int winGetReadLock(winFile *pFile){ int lk; sqlite3_randomness(sizeof(lk), &lk); pFile->sharedLockByte = (short)((lk & 0x7fffffff)%(SHARED_SIZE - 1)); - res = winLockFile(&pFile->h, SQLITE_LOCKFILE_FLAGS, + res = winLockFile(&pFile->h, SQLITE_LOCKFILE_FLAGS&mask, SHARED_FIRST+pFile->sharedLockByte, 0, 1, 0); } #endif @@ -50144,46 +50677,62 @@ static int winLock(sqlite3_file *id, int locktype){ assert( locktype!=PENDING_LOCK ); assert( locktype!=RESERVED_LOCK || pFile->locktype==SHARED_LOCK ); - /* Lock the PENDING_LOCK byte if we need to acquire a PENDING lock or + /* Lock the PENDING_LOCK byte if we need to acquire an EXCLUSIVE lock or ** a SHARED lock. If we are acquiring a SHARED lock, the acquisition of ** the PENDING_LOCK byte is temporary. */ newLocktype = pFile->locktype; - if( pFile->locktype==NO_LOCK - || (locktype==EXCLUSIVE_LOCK && pFile->locktype<=RESERVED_LOCK) + if( locktype==SHARED_LOCK + || (locktype==EXCLUSIVE_LOCK && pFile->locktype==RESERVED_LOCK) ){ int cnt = 3; - while( cnt-->0 && (res = winLockFile(&pFile->h, SQLITE_LOCKFILE_FLAGS, - PENDING_BYTE, 0, 1, 0))==0 ){ + + /* Flags for the LockFileEx() call. This should be an exclusive lock if + ** this call is to obtain EXCLUSIVE, or a shared lock if this call is to + ** obtain SHARED. */ + int flags = LOCKFILE_FAIL_IMMEDIATELY; + if( locktype==EXCLUSIVE_LOCK ){ + flags |= LOCKFILE_EXCLUSIVE_LOCK; + } + while( cnt>0 ){ /* Try 3 times to get the pending lock. This is needed to work ** around problems caused by indexing and/or anti-virus software on ** Windows systems. + ** ** If you are using this code as a model for alternative VFSes, do not - ** copy this retry logic. It is a hack intended for Windows only. - */ + ** copy this retry logic. It is a hack intended for Windows only. */ + res = winLockFile(&pFile->h, flags, PENDING_BYTE, 0, 1, 0); + if( res ) break; + lastErrno = osGetLastError(); OSTRACE(("LOCK-PENDING-FAIL file=%p, count=%d, result=%d\n", - pFile->h, cnt, res)); + pFile->h, cnt, res + )); + if( lastErrno==ERROR_INVALID_HANDLE ){ pFile->lastErrno = lastErrno; rc = SQLITE_IOERR_LOCK; OSTRACE(("LOCK-FAIL file=%p, count=%d, rc=%s\n", - pFile->h, cnt, sqlite3ErrName(rc))); + pFile->h, cnt, sqlite3ErrName(rc) + )); return rc; } - if( cnt ) sqlite3_win32_sleep(1); + + cnt--; + if( cnt>0 ) sqlite3_win32_sleep(1); } gotPendingLock = res; - if( !res ){ - lastErrno = osGetLastError(); - } } /* Acquire a shared lock */ if( locktype==SHARED_LOCK && res ){ assert( pFile->locktype==NO_LOCK ); - res = winGetReadLock(pFile); +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + res = winGetReadLock(pFile, pFile->bBlockOnConnect); +#else + res = winGetReadLock(pFile, 0); +#endif if( res ){ newLocktype = SHARED_LOCK; }else{ @@ -50221,7 +50770,7 @@ static int winLock(sqlite3_file *id, int locktype){ newLocktype = EXCLUSIVE_LOCK; }else{ lastErrno = osGetLastError(); - winGetReadLock(pFile); + winGetReadLock(pFile, 0); } } @@ -50301,7 +50850,7 @@ static int winUnlock(sqlite3_file *id, int locktype){ type = pFile->locktype; if( type>=EXCLUSIVE_LOCK ){ winUnlockFile(&pFile->h, SHARED_FIRST, 0, SHARED_SIZE, 0); - if( locktype==SHARED_LOCK && !winGetReadLock(pFile) ){ + if( locktype==SHARED_LOCK && !winGetReadLock(pFile, 0) ){ /* This should never happen. We should always be able to ** reacquire the read lock */ rc = winLogError(SQLITE_IOERR_UNLOCK, osGetLastError(), @@ -50511,6 +51060,28 @@ static int winFileControl(sqlite3_file *id, int op, void *pArg){ return rc; } #endif + +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + case SQLITE_FCNTL_LOCK_TIMEOUT: { + int iOld = pFile->iBusyTimeout; + int iNew = *(int*)pArg; +#if SQLITE_ENABLE_SETLK_TIMEOUT==1 + pFile->iBusyTimeout = (iNew < 0) ? INFINITE : (DWORD)iNew; +#elif SQLITE_ENABLE_SETLK_TIMEOUT==2 + pFile->iBusyTimeout = (DWORD)(!!iNew); +#else +# error "SQLITE_ENABLE_SETLK_TIMEOUT must be set to 1 or 2" +#endif + *(int*)pArg = iOld; + return SQLITE_OK; + } + case SQLITE_FCNTL_BLOCK_ON_CONNECT: { + int iNew = *(int*)pArg; + pFile->bBlockOnConnect = iNew; + return SQLITE_OK; + } +#endif /* SQLITE_ENABLE_SETLK_TIMEOUT */ + } OSTRACE(("FCNTL file=%p, rc=SQLITE_NOTFOUND\n", pFile->h)); return SQLITE_NOTFOUND; @@ -50591,23 +51162,27 @@ static int winShmMutexHeld(void) { ** ** The following fields are read-only after the object is created: ** -** fid ** zFilename ** ** Either winShmNode.mutex must be held or winShmNode.nRef==0 and ** winShmMutexHeld() is true when reading or writing any other field ** in this structure. ** +** File-handle hSharedShm is used to (a) take the DMS lock, (b) truncate +** the *-shm file if the DMS-locking protocol demands it, and (c) map +** regions of the *-shm file into memory using MapViewOfFile() or +** similar. Other locks are taken by individual clients using the +** winShm.hShm handles. */ struct winShmNode { sqlite3_mutex *mutex; /* Mutex to access this object */ char *zFilename; /* Name of the file */ - winFile hFile; /* File handle from winOpen */ + HANDLE hSharedShm; /* File handle open on zFilename */ + int isUnlocked; /* DMS lock has not yet been obtained */ + int isReadonly; /* True if read-only */ int szRegion; /* Size of shared-memory regions */ int nRegion; /* Size of array apRegion */ - u8 isReadonly; /* True if read-only */ - u8 isUnlocked; /* True if no DMS lock held */ struct ShmRegion { HANDLE hMap; /* File handle from CreateFileMapping */ @@ -50616,7 +51191,6 @@ struct winShmNode { DWORD lastErrno; /* The Windows errno from the last I/O error */ int nRef; /* Number of winShm objects pointing to this */ - winShm *pFirst; /* All winShm objects pointing to this */ winShmNode *pNext; /* Next in list of all winShmNode objects */ #if defined(SQLITE_DEBUG) || defined(SQLITE_HAVE_OS_TRACE) u8 nextShmId; /* Next available winShm.id value */ @@ -50632,23 +51206,15 @@ static winShmNode *winShmNodeList = 0; /* ** Structure used internally by this VFS to record the state of an -** open shared memory connection. -** -** The following fields are initialized when this object is created and -** are read-only thereafter: -** -** winShm.pShmNode -** winShm.id -** -** All other fields are read/write. The winShm.pShmNode->mutex must be held -** while accessing any read/write fields. +** open shared memory connection. There is one such structure for each +** winFile open on a wal mode database. */ struct winShm { winShmNode *pShmNode; /* The underlying winShmNode object */ - winShm *pNext; /* Next winShm with the same winShmNode */ - u8 hasMutex; /* True if holding the winShmNode mutex */ u16 sharedMask; /* Mask of shared locks held */ u16 exclMask; /* Mask of exclusive locks held */ + HANDLE hShm; /* File-handle on *-shm file. For locking. */ + int bReadonly; /* True if hShm is opened read-only */ #if defined(SQLITE_DEBUG) || defined(SQLITE_HAVE_OS_TRACE) u8 id; /* Id of this connection with its winShmNode */ #endif @@ -50660,50 +51226,6 @@ struct winShm { #define WIN_SHM_BASE ((22+SQLITE_SHM_NLOCK)*4) /* first lock byte */ #define WIN_SHM_DMS (WIN_SHM_BASE+SQLITE_SHM_NLOCK) /* deadman switch */ -/* -** Apply advisory locks for all n bytes beginning at ofst. -*/ -#define WINSHM_UNLCK 1 -#define WINSHM_RDLCK 2 -#define WINSHM_WRLCK 3 -static int winShmSystemLock( - winShmNode *pFile, /* Apply locks to this open shared-memory segment */ - int lockType, /* WINSHM_UNLCK, WINSHM_RDLCK, or WINSHM_WRLCK */ - int ofst, /* Offset to first byte to be locked/unlocked */ - int nByte /* Number of bytes to lock or unlock */ -){ - int rc = 0; /* Result code form Lock/UnlockFileEx() */ - - /* Access to the winShmNode object is serialized by the caller */ - assert( pFile->nRef==0 || sqlite3_mutex_held(pFile->mutex) ); - - OSTRACE(("SHM-LOCK file=%p, lock=%d, offset=%d, size=%d\n", - pFile->hFile.h, lockType, ofst, nByte)); - - /* Release/Acquire the system-level lock */ - if( lockType==WINSHM_UNLCK ){ - rc = winUnlockFile(&pFile->hFile.h, ofst, 0, nByte, 0); - }else{ - /* Initialize the locking parameters */ - DWORD dwFlags = LOCKFILE_FAIL_IMMEDIATELY; - if( lockType == WINSHM_WRLCK ) dwFlags |= LOCKFILE_EXCLUSIVE_LOCK; - rc = winLockFile(&pFile->hFile.h, dwFlags, ofst, 0, nByte, 0); - } - - if( rc!= 0 ){ - rc = SQLITE_OK; - }else{ - pFile->lastErrno = osGetLastError(); - rc = SQLITE_BUSY; - } - - OSTRACE(("SHM-LOCK file=%p, func=%s, errno=%lu, rc=%s\n", - pFile->hFile.h, (lockType == WINSHM_UNLCK) ? "winUnlockFile" : - "winLockFile", pFile->lastErrno, sqlite3ErrName(rc))); - - return rc; -} - /* Forward references to VFS methods */ static int winOpen(sqlite3_vfs*,const char*,sqlite3_file*,int,int*); static int winDelete(sqlite3_vfs *,const char*,int); @@ -50735,11 +51257,7 @@ static void winShmPurge(sqlite3_vfs *pVfs, int deleteFlag){ osGetCurrentProcessId(), i, bRc ? "ok" : "failed")); UNUSED_VARIABLE_VALUE(bRc); } - if( p->hFile.h!=NULL && p->hFile.h!=INVALID_HANDLE_VALUE ){ - SimulateIOErrorBenign(1); - winClose((sqlite3_file *)&p->hFile); - SimulateIOErrorBenign(0); - } + winHandleClose(p->hSharedShm); if( deleteFlag ){ SimulateIOErrorBenign(1); sqlite3BeginBenignMalloc(); @@ -50757,42 +51275,239 @@ static void winShmPurge(sqlite3_vfs *pVfs, int deleteFlag){ } /* -** The DMS lock has not yet been taken on shm file pShmNode. Attempt to -** take it now. Return SQLITE_OK if successful, or an SQLite error -** code otherwise. -** -** If the DMS cannot be locked because this is a readonly_shm=1 -** connection and no other process already holds a lock, return -** SQLITE_READONLY_CANTINIT and set pShmNode->isUnlocked=1. +** The DMS lock has not yet been taken on the shm file associated with +** pShmNode. Take the lock. Truncate the *-shm file if required. +** Return SQLITE_OK if successful, or an SQLite error code otherwise. */ -static int winLockSharedMemory(winShmNode *pShmNode){ - int rc = winShmSystemLock(pShmNode, WINSHM_WRLCK, WIN_SHM_DMS, 1); +static int winLockSharedMemory(winShmNode *pShmNode, DWORD nMs){ + HANDLE h = pShmNode->hSharedShm; + int rc = SQLITE_OK; + + assert( sqlite3_mutex_held(pShmNode->mutex) ); + rc = winHandleLockTimeout(h, WIN_SHM_DMS, 1, 1, 0); + if( rc==SQLITE_OK ){ + /* We have an EXCLUSIVE lock on the DMS byte. This means that this + ** is the first process to open the file. Truncate it to zero bytes + ** in this case. */ + if( pShmNode->isReadonly ){ + rc = SQLITE_READONLY_CANTINIT; + }else{ + rc = winHandleTruncate(h, 0); + } + + /* Release the EXCLUSIVE lock acquired above. */ + winUnlockFile(&h, WIN_SHM_DMS, 0, 1, 0); + }else if( (rc & 0xFF)==SQLITE_BUSY ){ + rc = SQLITE_OK; + } if( rc==SQLITE_OK ){ - if( pShmNode->isReadonly ){ - pShmNode->isUnlocked = 1; - winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); - return SQLITE_READONLY_CANTINIT; - }else if( winTruncate((sqlite3_file*)&pShmNode->hFile, 0) ){ - winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); - return winLogError(SQLITE_IOERR_SHMOPEN, osGetLastError(), - "winLockSharedMemory", pShmNode->zFilename); + /* Take a SHARED lock on the DMS byte. */ + rc = winHandleLockTimeout(h, WIN_SHM_DMS, 1, 0, nMs); + if( rc==SQLITE_OK ){ + pShmNode->isUnlocked = 0; } } - if( rc==SQLITE_OK ){ - winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); - } + return rc; +} - return winShmSystemLock(pShmNode, WINSHM_RDLCK, WIN_SHM_DMS, 1); + +/* +** Convert a UTF-8 filename into whatever form the underlying +** operating system wants filenames in. Space to hold the result +** is obtained from malloc and must be freed by the calling +** function +** +** On Cygwin, 3 possible input forms are accepted: +** - If the filename starts with ":/" or ":\", +** it is converted to UTF-16 as-is. +** - If the filename contains '/', it is assumed to be a +** Cygwin absolute path, it is converted to a win32 +** absolute path in UTF-16. +** - Otherwise it must be a filename only, the win32 filename +** is returned in UTF-16. +** Note: If the function cygwin_conv_path() fails, only +** UTF-8 -> UTF-16 conversion will be done. This can only +** happen when the file path >32k, in which case winUtf8ToUnicode() +** will fail too. +*/ +static void *winConvertFromUtf8Filename(const char *zFilename){ + void *zConverted = 0; + if( osIsNT() ){ +#ifdef __CYGWIN__ + int nChar; + LPWSTR zWideFilename; + + if( osCygwin_conv_path && !(winIsDriveLetterAndColon(zFilename) + && winIsDirSep(zFilename[2])) ){ + i64 nByte; + int convertflag = CCP_POSIX_TO_WIN_W; + if( !strchr(zFilename, '/') ) convertflag |= CCP_RELATIVE; + nByte = (i64)osCygwin_conv_path(convertflag, + zFilename, 0, 0); + if( nByte>0 ){ + zConverted = sqlite3MallocZero(12+(u64)nByte); + if ( zConverted==0 ){ + return zConverted; + } + zWideFilename = zConverted; + /* Filenames should be prefixed, except when converted + * full path already starts with "\\?\". */ + if( osCygwin_conv_path(convertflag, zFilename, + zWideFilename+4, nByte)==0 ){ + if( (convertflag&CCP_RELATIVE) ){ + memmove(zWideFilename, zWideFilename+4, nByte); + }else if( memcmp(zWideFilename+4, L"\\\\", 4) ){ + memcpy(zWideFilename, L"\\\\?\\", 8); + }else if( zWideFilename[6]!='?' ){ + memmove(zWideFilename+6, zWideFilename+4, nByte); + memcpy(zWideFilename, L"\\\\?\\UNC", 14); + }else{ + memmove(zWideFilename, zWideFilename+4, nByte); + } + return zConverted; + } + sqlite3_free(zConverted); + } + } + nChar = osMultiByteToWideChar(CP_UTF8, 0, zFilename, -1, NULL, 0); + if( nChar==0 ){ + return 0; + } + zWideFilename = sqlite3MallocZero( nChar*sizeof(WCHAR)+12 ); + if( zWideFilename==0 ){ + return 0; + } + nChar = osMultiByteToWideChar(CP_UTF8, 0, zFilename, -1, + zWideFilename, nChar); + if( nChar==0 ){ + sqlite3_free(zWideFilename); + zWideFilename = 0; + }else if( nChar>MAX_PATH + && winIsDriveLetterAndColon(zFilename) + && winIsDirSep(zFilename[2]) ){ + memmove(zWideFilename+4, zWideFilename, nChar*sizeof(WCHAR)); + zWideFilename[2] = '\\'; + memcpy(zWideFilename, L"\\\\?\\", 8); + }else if( nChar>MAX_PATH + && winIsDirSep(zFilename[0]) && winIsDirSep(zFilename[1]) + && zFilename[2] != '?' ){ + memmove(zWideFilename+6, zWideFilename, nChar*sizeof(WCHAR)); + memcpy(zWideFilename, L"\\\\?\\UNC", 14); + } + zConverted = zWideFilename; +#else + zConverted = winUtf8ToUnicode(zFilename); +#endif /* __CYGWIN__ */ + } +#if defined(SQLITE_WIN32_HAS_ANSI) && defined(_WIN32) + else{ + zConverted = winUtf8ToMbcs(zFilename, osAreFileApisANSI()); + } +#endif + /* caller will handle out of memory */ + return zConverted; } /* -** Open the shared-memory area associated with database file pDbFd. +** This function is used to open a handle on a *-shm file. ** -** When opening a new shared-memory file, if no other instances of that -** file are currently open, in this process or in other processes, then -** the file must be truncated to zero length or have its header cleared. +** If SQLITE_ENABLE_SETLK_TIMEOUT is defined at build time, then the file +** is opened with FILE_FLAG_OVERLAPPED specified. If not, it is not. +*/ +static int winHandleOpen( + const char *zUtf8, /* File to open */ + int *pbReadonly, /* IN/OUT: True for readonly handle */ + HANDLE *ph /* OUT: New HANDLE for file */ +){ + int rc = SQLITE_OK; + void *zConverted = 0; + int bReadonly = *pbReadonly; + HANDLE h = INVALID_HANDLE_VALUE; + +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + const DWORD flag_overlapped = FILE_FLAG_OVERLAPPED; +#else + const DWORD flag_overlapped = 0; +#endif + + /* Convert the filename to the system encoding. */ + zConverted = winConvertFromUtf8Filename(zUtf8); + if( zConverted==0 ){ + OSTRACE(("OPEN name=%s, rc=SQLITE_IOERR_NOMEM", zUtf8)); + rc = SQLITE_IOERR_NOMEM_BKPT; + goto winopenfile_out; + } + + /* Ensure the file we are trying to open is not actually a directory. */ + if( winIsDir(zConverted) ){ + OSTRACE(("OPEN name=%s, rc=SQLITE_CANTOPEN_ISDIR", zUtf8)); + rc = SQLITE_CANTOPEN_ISDIR; + goto winopenfile_out; + } + + /* TODO: platforms. + ** TODO: retry-on-ioerr. + */ + if( osIsNT() ){ +#if SQLITE_OS_WINRT + CREATEFILE2_EXTENDED_PARAMETERS extendedParameters; + memset(&extendedParameters, 0, sizeof(extendedParameters)); + extendedParameters.dwSize = sizeof(extendedParameters); + extendedParameters.dwFileAttributes = FILE_ATTRIBUTE_NORMAL; + extendedParameters.dwFileFlags = flag_overlapped; + extendedParameters.dwSecurityQosFlags = SECURITY_ANONYMOUS; + h = osCreateFile2((LPCWSTR)zConverted, + (GENERIC_READ | (bReadonly ? 0 : GENERIC_WRITE)),/* dwDesiredAccess */ + FILE_SHARE_READ | FILE_SHARE_WRITE, /* dwShareMode */ + OPEN_ALWAYS, /* dwCreationDisposition */ + &extendedParameters + ); +#else + h = osCreateFileW((LPCWSTR)zConverted, /* lpFileName */ + (GENERIC_READ | (bReadonly ? 0 : GENERIC_WRITE)), /* dwDesiredAccess */ + FILE_SHARE_READ | FILE_SHARE_WRITE, /* dwShareMode */ + NULL, /* lpSecurityAttributes */ + OPEN_ALWAYS, /* dwCreationDisposition */ + FILE_ATTRIBUTE_NORMAL|flag_overlapped, + NULL + ); +#endif + }else{ + /* Due to pre-processor directives earlier in this file, + ** SQLITE_WIN32_HAS_ANSI is always defined if osIsNT() is false. */ +#ifdef SQLITE_WIN32_HAS_ANSI + h = osCreateFileA((LPCSTR)zConverted, + (GENERIC_READ | (bReadonly ? 0 : GENERIC_WRITE)), /* dwDesiredAccess */ + FILE_SHARE_READ | FILE_SHARE_WRITE, /* dwShareMode */ + NULL, /* lpSecurityAttributes */ + OPEN_ALWAYS, /* dwCreationDisposition */ + FILE_ATTRIBUTE_NORMAL|flag_overlapped, + NULL + ); +#endif + } + + if( h==INVALID_HANDLE_VALUE ){ + if( bReadonly==0 ){ + bReadonly = 1; + rc = winHandleOpen(zUtf8, &bReadonly, &h); + }else{ + rc = SQLITE_CANTOPEN_BKPT; + } + } + + winopenfile_out: + sqlite3_free(zConverted); + *pbReadonly = bReadonly; + *ph = h; + return rc; +} + + +/* +** Open the shared-memory area associated with database file pDbFd. */ static int winOpenSharedMemory(winFile *pDbFd){ struct winShm *p; /* The connection to be opened */ @@ -50804,98 +51519,83 @@ static int winOpenSharedMemory(winFile *pDbFd){ assert( pDbFd->pShm==0 ); /* Not previously opened */ /* Allocate space for the new sqlite3_shm object. Also speculatively - ** allocate space for a new winShmNode and filename. - */ + ** allocate space for a new winShmNode and filename. */ p = sqlite3MallocZero( sizeof(*p) ); if( p==0 ) return SQLITE_IOERR_NOMEM_BKPT; nName = sqlite3Strlen30(pDbFd->zPath); - pNew = sqlite3MallocZero( sizeof(*pShmNode) + nName + 17 ); + pNew = sqlite3MallocZero( sizeof(*pShmNode) + (i64)nName + 17 ); if( pNew==0 ){ sqlite3_free(p); return SQLITE_IOERR_NOMEM_BKPT; } pNew->zFilename = (char*)&pNew[1]; + pNew->hSharedShm = INVALID_HANDLE_VALUE; + pNew->isUnlocked = 1; sqlite3_snprintf(nName+15, pNew->zFilename, "%s-shm", pDbFd->zPath); sqlite3FileSuffix3(pDbFd->zPath, pNew->zFilename); + /* Open a file-handle on the *-shm file for this connection. This file-handle + ** is only used for locking. The mapping of the *-shm file is created using + ** the shared file handle in winShmNode.hSharedShm. */ + p->bReadonly = sqlite3_uri_boolean(pDbFd->zPath, "readonly_shm", 0); + rc = winHandleOpen(pNew->zFilename, &p->bReadonly, &p->hShm); + /* Look to see if there is an existing winShmNode that can be used. - ** If no matching winShmNode currently exists, create a new one. - */ + ** If no matching winShmNode currently exists, then create a new one. */ winShmEnterMutex(); for(pShmNode = winShmNodeList; pShmNode; pShmNode=pShmNode->pNext){ /* TBD need to come up with better match here. Perhaps - ** use FILE_ID_BOTH_DIR_INFO Structure. - */ + ** use FILE_ID_BOTH_DIR_INFO Structure. */ if( sqlite3StrICmp(pShmNode->zFilename, pNew->zFilename)==0 ) break; } - if( pShmNode ){ - sqlite3_free(pNew); - }else{ - int inFlags = SQLITE_OPEN_WAL; - int outFlags = 0; - + if( pShmNode==0 ){ pShmNode = pNew; - pNew = 0; - ((winFile*)(&pShmNode->hFile))->h = INVALID_HANDLE_VALUE; - pShmNode->pNext = winShmNodeList; - winShmNodeList = pShmNode; + /* Allocate a mutex for this winShmNode object, if one is required. */ if( sqlite3GlobalConfig.bCoreMutex ){ pShmNode->mutex = sqlite3_mutex_alloc(SQLITE_MUTEX_FAST); - if( pShmNode->mutex==0 ){ - rc = SQLITE_IOERR_NOMEM_BKPT; - goto shm_open_err; + if( pShmNode->mutex==0 ) rc = SQLITE_IOERR_NOMEM_BKPT; + } + + /* Open a file-handle to use for mappings, and for the DMS lock. */ + if( rc==SQLITE_OK ){ + HANDLE h = INVALID_HANDLE_VALUE; + pShmNode->isReadonly = p->bReadonly; + rc = winHandleOpen(pNew->zFilename, &pShmNode->isReadonly, &h); + pShmNode->hSharedShm = h; + } + + /* If successful, link the new winShmNode into the global list. If an + ** error occurred, free the object. */ + if( rc==SQLITE_OK ){ + pShmNode->pNext = winShmNodeList; + winShmNodeList = pShmNode; + pNew = 0; + }else{ + sqlite3_mutex_free(pShmNode->mutex); + if( pShmNode->hSharedShm!=INVALID_HANDLE_VALUE ){ + osCloseHandle(pShmNode->hSharedShm); } } - - if( 0==sqlite3_uri_boolean(pDbFd->zPath, "readonly_shm", 0) ){ - inFlags |= SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE; - }else{ - inFlags |= SQLITE_OPEN_READONLY; - } - rc = winOpen(pDbFd->pVfs, pShmNode->zFilename, - (sqlite3_file*)&pShmNode->hFile, - inFlags, &outFlags); - if( rc!=SQLITE_OK ){ - rc = winLogError(rc, osGetLastError(), "winOpenShm", - pShmNode->zFilename); - goto shm_open_err; - } - if( outFlags==SQLITE_OPEN_READONLY ) pShmNode->isReadonly = 1; - - rc = winLockSharedMemory(pShmNode); - if( rc!=SQLITE_OK && rc!=SQLITE_READONLY_CANTINIT ) goto shm_open_err; } - /* Make the new connection a child of the winShmNode */ - p->pShmNode = pShmNode; + /* If no error has occurred, link the winShm object to the winShmNode and + ** the winShm to pDbFd. */ + if( rc==SQLITE_OK ){ + p->pShmNode = pShmNode; + pShmNode->nRef++; #if defined(SQLITE_DEBUG) || defined(SQLITE_HAVE_OS_TRACE) - p->id = pShmNode->nextShmId++; + p->id = pShmNode->nextShmId++; #endif - pShmNode->nRef++; - pDbFd->pShm = p; + pDbFd->pShm = p; + }else if( p ){ + winHandleClose(p->hShm); + sqlite3_free(p); + } + + assert( rc!=SQLITE_OK || pShmNode->isUnlocked==0 || pShmNode->nRegion==0 ); winShmLeaveMutex(); - - /* The reference count on pShmNode has already been incremented under - ** the cover of the winShmEnterMutex() mutex and the pointer from the - ** new (struct winShm) object to the pShmNode has been set. All that is - ** left to do is to link the new object into the linked list starting - ** at pShmNode->pFirst. This must be done while holding the pShmNode->mutex - ** mutex. - */ - sqlite3_mutex_enter(pShmNode->mutex); - p->pNext = pShmNode->pFirst; - pShmNode->pFirst = p; - sqlite3_mutex_leave(pShmNode->mutex); - return rc; - - /* Jump here on any error */ -shm_open_err: - winShmSystemLock(pShmNode, WINSHM_UNLCK, WIN_SHM_DMS, 1); - winShmPurge(pDbFd->pVfs, 0); /* This call frees pShmNode if required */ - sqlite3_free(p); sqlite3_free(pNew); - winShmLeaveMutex(); return rc; } @@ -50910,27 +51610,19 @@ static int winShmUnmap( winFile *pDbFd; /* Database holding shared-memory */ winShm *p; /* The connection to be closed */ winShmNode *pShmNode; /* The underlying shared-memory file */ - winShm **pp; /* For looping over sibling connections */ pDbFd = (winFile*)fd; p = pDbFd->pShm; if( p==0 ) return SQLITE_OK; + if( p->hShm!=INVALID_HANDLE_VALUE ){ + osCloseHandle(p->hShm); + } + pShmNode = p->pShmNode; - - /* Remove connection p from the set of connections associated - ** with pShmNode */ - sqlite3_mutex_enter(pShmNode->mutex); - for(pp=&pShmNode->pFirst; (*pp)!=p; pp = &(*pp)->pNext){} - *pp = p->pNext; - - /* Free the connection p */ - sqlite3_free(p); - pDbFd->pShm = 0; - sqlite3_mutex_leave(pShmNode->mutex); + winShmEnterMutex(); /* If pShmNode->nRef has reached 0, then close the underlying - ** shared-memory file, too */ - winShmEnterMutex(); + ** shared-memory file, too. */ assert( pShmNode->nRef>0 ); pShmNode->nRef--; if( pShmNode->nRef==0 ){ @@ -50938,6 +51630,9 @@ static int winShmUnmap( } winShmLeaveMutex(); + /* Free the connection p */ + sqlite3_free(p); + pDbFd->pShm = 0; return SQLITE_OK; } @@ -50952,10 +51647,9 @@ static int winShmLock( ){ winFile *pDbFd = (winFile*)fd; /* Connection holding shared memory */ winShm *p = pDbFd->pShm; /* The shared memory being locked */ - winShm *pX; /* For looping over all siblings */ winShmNode *pShmNode; int rc = SQLITE_OK; /* Result code */ - u16 mask; /* Mask of locks to take or release */ + u16 mask = (u16)((1U<<(ofst+n)) - (1U<pShmNode; @@ -50969,85 +51663,81 @@ static int winShmLock( || flags==(SQLITE_SHM_UNLOCK | SQLITE_SHM_EXCLUSIVE) ); assert( n==1 || (flags & SQLITE_SHM_EXCLUSIVE)!=0 ); - mask = (u16)((1U<<(ofst+n)) - (1U<1 || mask==(1<mutex); - if( flags & SQLITE_SHM_UNLOCK ){ - u16 allMask = 0; /* Mask of locks held by siblings */ + /* Check that, if this to be a blocking lock, no locks that occur later + ** in the following list than the lock being obtained are already held: + ** + ** 1. Recovery lock (ofst==2). + ** 2. Checkpointer lock (ofst==1). + ** 3. Write lock (ofst==0). + ** 4. Read locks (ofst>=3 && ofstexclMask|p->sharedMask); + assert( (flags & SQLITE_SHM_UNLOCK) || pDbFd->iBusyTimeout==0 || ( + (ofst!=2 || lockMask==0) + && (ofst!=1 || lockMask==0 || lockMask==2) + && (ofst!=0 || lockMask<3) + && (ofst<3 || lockMask<(1<pFirst; pX; pX=pX->pNext){ - if( pX==p ) continue; - assert( (pX->exclMask & (p->exclMask|p->sharedMask))==0 ); - allMask |= pX->sharedMask; - } + /* Check if there is any work to do. There are three cases: + ** + ** a) An unlock operation where there are locks to unlock, + ** b) An shared lock where the requested lock is not already held + ** c) An exclusive lock where the requested lock is not already held + ** + ** The SQLite core never requests an exclusive lock that it already holds. + ** This is assert()ed immediately below. */ + assert( flags!=(SQLITE_SHM_EXCLUSIVE|SQLITE_SHM_LOCK) + || 0==(p->exclMask & mask) + ); + if( ((flags & SQLITE_SHM_UNLOCK) && ((p->exclMask|p->sharedMask) & mask)) + || (flags==(SQLITE_SHM_SHARED|SQLITE_SHM_LOCK) && 0==(p->sharedMask & mask)) + || (flags==(SQLITE_SHM_EXCLUSIVE|SQLITE_SHM_LOCK)) + ){ - /* Unlock the system-level locks */ - if( (mask & allMask)==0 ){ - rc = winShmSystemLock(pShmNode, WINSHM_UNLCK, ofst+WIN_SHM_BASE, n); - }else{ - rc = SQLITE_OK; - } + if( flags & SQLITE_SHM_UNLOCK ){ + /* Case (a) - unlock. */ - /* Undo the local locks */ - if( rc==SQLITE_OK ){ - p->exclMask &= ~mask; - p->sharedMask &= ~mask; - } - }else if( flags & SQLITE_SHM_SHARED ){ - u16 allShared = 0; /* Union of locks held by connections other than "p" */ + assert( (p->exclMask & p->sharedMask)==0 ); + assert( !(flags & SQLITE_SHM_EXCLUSIVE) || (p->exclMask & mask)==mask ); + assert( !(flags & SQLITE_SHM_SHARED) || (p->sharedMask & mask)==mask ); - /* Find out which shared locks are already held by sibling connections. - ** If any sibling already holds an exclusive lock, go ahead and return - ** SQLITE_BUSY. - */ - for(pX=pShmNode->pFirst; pX; pX=pX->pNext){ - if( (pX->exclMask & mask)!=0 ){ - rc = SQLITE_BUSY; - break; - } - allShared |= pX->sharedMask; - } + rc = winHandleUnlock(p->hShm, ofst+WIN_SHM_BASE, n); - /* Get shared locks at the system level, if necessary */ - if( rc==SQLITE_OK ){ - if( (allShared & mask)==0 ){ - rc = winShmSystemLock(pShmNode, WINSHM_RDLCK, ofst+WIN_SHM_BASE, n); - }else{ - rc = SQLITE_OK; - } - } - - /* Get the local shared locks */ - if( rc==SQLITE_OK ){ - p->sharedMask |= mask; - } - }else{ - /* Make sure no sibling connections hold locks that will block this - ** lock. If any do, return SQLITE_BUSY right away. - */ - for(pX=pShmNode->pFirst; pX; pX=pX->pNext){ - if( (pX->exclMask & mask)!=0 || (pX->sharedMask & mask)!=0 ){ - rc = SQLITE_BUSY; - break; - } - } - - /* Get the exclusive locks at the system level. Then if successful - ** also mark the local connection as being locked. - */ - if( rc==SQLITE_OK ){ - rc = winShmSystemLock(pShmNode, WINSHM_WRLCK, ofst+WIN_SHM_BASE, n); + /* If successful, also clear the bits in sharedMask/exclMask */ if( rc==SQLITE_OK ){ - assert( (p->sharedMask & mask)==0 ); - p->exclMask |= mask; + p->exclMask = (p->exclMask & ~mask); + p->sharedMask = (p->sharedMask & ~mask); + } + }else{ + int bExcl = ((flags & SQLITE_SHM_EXCLUSIVE) ? 1 : 0); + DWORD nMs = winFileBusyTimeout(pDbFd); + rc = winHandleLockTimeout(p->hShm, ofst+WIN_SHM_BASE, n, bExcl, nMs); + if( rc==SQLITE_OK ){ + if( bExcl ){ + p->exclMask = (p->exclMask | mask); + }else{ + p->sharedMask = (p->sharedMask | mask); + } } } } - sqlite3_mutex_leave(pShmNode->mutex); - OSTRACE(("SHM-LOCK pid=%lu, id=%d, sharedMask=%03x, exclMask=%03x, rc=%s\n", - osGetCurrentProcessId(), p->id, p->sharedMask, p->exclMask, - sqlite3ErrName(rc))); + + OSTRACE(( + "SHM-LOCK(%d,%d,%d) pid=%lu, id=%d, sharedMask=%03x, exclMask=%03x," + " rc=%s\n", + ofst, n, flags, + osGetCurrentProcessId(), p->id, p->sharedMask, p->exclMask, + sqlite3ErrName(rc)) + ); return rc; } @@ -51109,13 +51799,15 @@ static int winShmMap( sqlite3_mutex_enter(pShmNode->mutex); if( pShmNode->isUnlocked ){ - rc = winLockSharedMemory(pShmNode); + /* Take the DMS lock. */ + assert( pShmNode->nRegion==0 ); + rc = winLockSharedMemory(pShmNode, winFileBusyTimeout(pDbFd)); if( rc!=SQLITE_OK ) goto shmpage_out; - pShmNode->isUnlocked = 0; } - assert( szRegion==pShmNode->szRegion || pShmNode->nRegion==0 ); + assert( szRegion==pShmNode->szRegion || pShmNode->nRegion==0 ); if( pShmNode->nRegion<=iRegion ){ + HANDLE hShared = pShmNode->hSharedShm; struct ShmRegion *apNew; /* New aRegion[] array */ int nByte = (iRegion+1)*szRegion; /* Minimum required file size */ sqlite3_int64 sz; /* Current size of wal-index file */ @@ -51126,10 +51818,9 @@ static int winShmMap( ** Check to see if it has been allocated (i.e. if the wal-index file is ** large enough to contain the requested region). */ - rc = winFileSize((sqlite3_file *)&pShmNode->hFile, &sz); + rc = winHandleSize(hShared, &sz); if( rc!=SQLITE_OK ){ - rc = winLogError(SQLITE_IOERR_SHMSIZE, osGetLastError(), - "winShmMap1", pDbFd->zPath); + rc = winLogError(rc, osGetLastError(), "winShmMap1", pDbFd->zPath); goto shmpage_out; } @@ -51138,19 +51829,17 @@ static int winShmMap( ** zero, exit early. *pp will be set to NULL and SQLITE_OK returned. ** ** Alternatively, if isWrite is non-zero, use ftruncate() to allocate - ** the requested memory region. - */ + ** the requested memory region. */ if( !isWrite ) goto shmpage_out; - rc = winTruncate((sqlite3_file *)&pShmNode->hFile, nByte); + rc = winHandleTruncate(hShared, nByte); if( rc!=SQLITE_OK ){ - rc = winLogError(SQLITE_IOERR_SHMSIZE, osGetLastError(), - "winShmMap2", pDbFd->zPath); + rc = winLogError(rc, osGetLastError(), "winShmMap2", pDbFd->zPath); goto shmpage_out; } } /* Map the requested memory region into this processes address space. */ - apNew = (struct ShmRegion *)sqlite3_realloc64( + apNew = (struct ShmRegion*)sqlite3_realloc64( pShmNode->aRegion, (iRegion+1)*sizeof(apNew[0]) ); if( !apNew ){ @@ -51169,18 +51858,13 @@ static int winShmMap( void *pMap = 0; /* Mapped memory region */ #if SQLITE_OS_WINRT - hMap = osCreateFileMappingFromApp(pShmNode->hFile.h, - NULL, protect, nByte, NULL - ); + hMap = osCreateFileMappingFromApp(hShared, NULL, protect, nByte, NULL); #elif defined(SQLITE_WIN32_HAS_WIDE) - hMap = osCreateFileMappingW(pShmNode->hFile.h, - NULL, protect, 0, nByte, NULL - ); + hMap = osCreateFileMappingW(hShared, NULL, protect, 0, nByte, NULL); #elif defined(SQLITE_WIN32_HAS_ANSI) && SQLITE_WIN32_CREATEFILEMAPPINGA - hMap = osCreateFileMappingA(pShmNode->hFile.h, - NULL, protect, 0, nByte, NULL - ); + hMap = osCreateFileMappingA(hShared, NULL, protect, 0, nByte, NULL); #endif + OSTRACE(("SHM-MAP-CREATE pid=%lu, region=%d, size=%d, rc=%s\n", osGetCurrentProcessId(), pShmNode->nRegion, nByte, hMap ? "ok" : "failed")); @@ -51223,7 +51907,9 @@ shmpage_out: }else{ *pp = 0; } - if( pShmNode->isReadonly && rc==SQLITE_OK ) rc = SQLITE_READONLY; + if( pShmNode->isReadonly && rc==SQLITE_OK ){ + rc = SQLITE_READONLY; + } sqlite3_mutex_leave(pShmNode->mutex); return rc; } @@ -51543,47 +52229,6 @@ static winVfsAppData winNolockAppData = { ** sqlite3_vfs object. */ -#if defined(__CYGWIN__) -/* -** Convert a filename from whatever the underlying operating system -** supports for filenames into UTF-8. Space to hold the result is -** obtained from malloc and must be freed by the calling function. -*/ -static char *winConvertToUtf8Filename(const void *zFilename){ - char *zConverted = 0; - if( osIsNT() ){ - zConverted = winUnicodeToUtf8(zFilename); - } -#ifdef SQLITE_WIN32_HAS_ANSI - else{ - zConverted = winMbcsToUtf8(zFilename, osAreFileApisANSI()); - } -#endif - /* caller will handle out of memory */ - return zConverted; -} -#endif - -/* -** Convert a UTF-8 filename into whatever form the underlying -** operating system wants filenames in. Space to hold the result -** is obtained from malloc and must be freed by the calling -** function. -*/ -static void *winConvertFromUtf8Filename(const char *zFilename){ - void *zConverted = 0; - if( osIsNT() ){ - zConverted = winUtf8ToUnicode(zFilename); - } -#ifdef SQLITE_WIN32_HAS_ANSI - else{ - zConverted = winUtf8ToMbcs(zFilename, osAreFileApisANSI()); - } -#endif - /* caller will handle out of memory */ - return zConverted; -} - /* ** This function returns non-zero if the specified UTF-8 string buffer ** ends with a directory separator character or one was successfully @@ -51596,7 +52241,14 @@ static int winMakeEndInDirSep(int nBuf, char *zBuf){ if( winIsDirSep(zBuf[nLen-1]) ){ return 1; }else if( nLen+1mxPathname; nBuf = nMax + 2; + nMax = pVfs->mxPathname; + nBuf = 2 + (i64)nMax; zBuf = sqlite3MallocZero( nBuf ); if( !zBuf ){ OSTRACE(("TEMP-FILENAME rc=SQLITE_IOERR_NOMEM\n")); @@ -51673,7 +52326,7 @@ static int winGetTempname(sqlite3_vfs *pVfs, char **pzBuf){ } #if defined(__CYGWIN__) - else{ + else if( osGetenv!=NULL ){ static const char *azDirs[] = { 0, /* getenv("SQLITE_TMPDIR") */ 0, /* getenv("TMPDIR") */ @@ -51689,11 +52342,11 @@ static int winGetTempname(sqlite3_vfs *pVfs, char **pzBuf){ unsigned int i; const char *zDir = 0; - if( !azDirs[0] ) azDirs[0] = getenv("SQLITE_TMPDIR"); - if( !azDirs[1] ) azDirs[1] = getenv("TMPDIR"); - if( !azDirs[2] ) azDirs[2] = getenv("TMP"); - if( !azDirs[3] ) azDirs[3] = getenv("TEMP"); - if( !azDirs[4] ) azDirs[4] = getenv("USERPROFILE"); + if( !azDirs[0] ) azDirs[0] = osGetenv("SQLITE_TMPDIR"); + if( !azDirs[1] ) azDirs[1] = osGetenv("TMPDIR"); + if( !azDirs[2] ) azDirs[2] = osGetenv("TMP"); + if( !azDirs[3] ) azDirs[3] = osGetenv("TEMP"); + if( !azDirs[4] ) azDirs[4] = osGetenv("USERPROFILE"); for(i=0; inOut ){ + /* SQLite assumes that xFullPathname() nul-terminates the output buffer + ** even if it returns an error. */ + zOut[iOff] = '\0'; + return SQLITE_CANTOPEN_BKPT; + } + sqlite3_snprintf(nOut-iOff, &zOut[iOff], "%s", zPath); + return SQLITE_OK; +} +#endif /* __CYGWIN__ */ /* ** Turn a relative pathname into a full pathname. Write the full @@ -52476,8 +53178,8 @@ static int winFullPathnameNoMutex( int nFull, /* Size of output buffer in bytes */ char *zFull /* Output buffer */ ){ -#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT && !defined(__CYGWIN__) - DWORD nByte; +#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT + int nByte; void *zConverted; char *zOut; #endif @@ -52490,64 +53192,82 @@ static int winFullPathnameNoMutex( zRelative++; } -#if defined(__CYGWIN__) SimulateIOError( return SQLITE_ERROR ); - UNUSED_PARAMETER(nFull); - assert( nFull>=pVfs->mxPathname ); - if ( sqlite3_data_directory && !winIsVerbatimPathname(zRelative) ){ - /* - ** NOTE: We are dealing with a relative path name and the data - ** directory has been set. Therefore, use it as the basis - ** for converting the relative path name to an absolute - ** one by prepending the data directory and a slash. - */ - char *zOut = sqlite3MallocZero( pVfs->mxPathname+1 ); - if( !zOut ){ - return SQLITE_IOERR_NOMEM_BKPT; - } - if( cygwin_conv_path( - (osIsNT() ? CCP_POSIX_TO_WIN_W : CCP_POSIX_TO_WIN_A) | - CCP_RELATIVE, zRelative, zOut, pVfs->mxPathname+1)<0 ){ - sqlite3_free(zOut); - return winLogError(SQLITE_CANTOPEN_CONVPATH, (DWORD)errno, - "winFullPathname1", zRelative); - }else{ - char *zUtf8 = winConvertToUtf8Filename(zOut); - if( !zUtf8 ){ - sqlite3_free(zOut); - return SQLITE_IOERR_NOMEM_BKPT; - } - sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s%c%s", - sqlite3_data_directory, winGetDirSep(), zUtf8); - sqlite3_free(zUtf8); - sqlite3_free(zOut); - } - }else{ - char *zOut = sqlite3MallocZero( pVfs->mxPathname+1 ); - if( !zOut ){ - return SQLITE_IOERR_NOMEM_BKPT; - } - if( cygwin_conv_path( - (osIsNT() ? CCP_POSIX_TO_WIN_W : CCP_POSIX_TO_WIN_A), - zRelative, zOut, pVfs->mxPathname+1)<0 ){ - sqlite3_free(zOut); - return winLogError(SQLITE_CANTOPEN_CONVPATH, (DWORD)errno, - "winFullPathname2", zRelative); - }else{ - char *zUtf8 = winConvertToUtf8Filename(zOut); - if( !zUtf8 ){ - sqlite3_free(zOut); - return SQLITE_IOERR_NOMEM_BKPT; - } - sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zUtf8); - sqlite3_free(zUtf8); - sqlite3_free(zOut); + +#ifdef __CYGWIN__ + if( osGetcwd ){ + zFull[nFull-1] = '\0'; + if( !winIsDriveLetterAndColon(zRelative) || !winIsDirSep(zRelative[2]) ){ + int rc = SQLITE_OK; + int nLink = 1; /* Number of symbolic links followed so far */ + const char *zIn = zRelative; /* Input path for each iteration of loop */ + char *zDel = 0; + struct stat buf; + + UNUSED_PARAMETER(pVfs); + + do { + /* Call lstat() on path zIn. Set bLink to true if the path is a symbolic + ** link, or false otherwise. */ + int bLink = 0; + if( osLstat && osReadlink ) { + if( osLstat(zIn, &buf)!=0 ){ + int myErrno = osErrno; + if( myErrno!=ENOENT ){ + rc = winLogError(SQLITE_CANTOPEN_BKPT, (DWORD)myErrno, "lstat", zIn); + } + }else{ + bLink = ((buf.st_mode & 0170000) == 0120000); + } + + if( bLink ){ + if( zDel==0 ){ + zDel = sqlite3MallocZero(nFull); + if( zDel==0 ) rc = SQLITE_NOMEM; + }else if( ++nLink>SQLITE_MAX_SYMLINKS ){ + rc = SQLITE_CANTOPEN_BKPT; + } + + if( rc==SQLITE_OK ){ + nByte = osReadlink(zIn, zDel, nFull-1); + if( nByte ==(DWORD)-1 ){ + rc = winLogError(SQLITE_CANTOPEN_BKPT, (DWORD)osErrno, "readlink", zIn); + }else{ + if( zDel[0]!='/' ){ + int n; + for(n = sqlite3Strlen30(zIn); n>0 && zIn[n-1]!='/'; n--); + if( nByte+n+1>nFull ){ + rc = SQLITE_CANTOPEN_BKPT; + }else{ + memmove(&zDel[n], zDel, nByte+1); + memcpy(zDel, zIn, n); + nByte += n; + } + } + zDel[nByte] = '\0'; + } + } + + zIn = zDel; + } + } + + assert( rc!=SQLITE_OK || zIn!=zFull || zIn[0]=='/' ); + if( rc==SQLITE_OK && zIn!=zFull ){ + rc = mkFullPathname(zIn, zFull, nFull); + } + if( bLink==0 ) break; + zIn = zFull; + }while( rc==SQLITE_OK ); + + sqlite3_free(zDel); + winSimplifyName(zFull); + return rc; } } - return SQLITE_OK; -#endif +#endif /* __CYGWIN__ */ -#if (SQLITE_OS_WINCE || SQLITE_OS_WINRT) && !defined(__CYGWIN__) +#if (SQLITE_OS_WINCE || SQLITE_OS_WINRT) && defined(_WIN32) SimulateIOError( return SQLITE_ERROR ); /* WinCE has no concept of a relative pathname, or so I am told. */ /* WinRT has no way to convert a relative path to an absolute one. */ @@ -52566,7 +53286,8 @@ static int winFullPathnameNoMutex( return SQLITE_OK; #endif -#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT && !defined(__CYGWIN__) +#if !SQLITE_OS_WINCE && !SQLITE_OS_WINRT +#if defined(_WIN32) /* It's odd to simulate an io-error here, but really this is just ** using the io-error infrastructure to test that SQLite handles this ** function failing. This function could fail if, for example, the @@ -52584,6 +53305,7 @@ static int winFullPathnameNoMutex( sqlite3_data_directory, winGetDirSep(), zRelative); return SQLITE_OK; } +#endif zConverted = winConvertFromUtf8Filename(zRelative); if( zConverted==0 ){ return SQLITE_IOERR_NOMEM_BKPT; @@ -52622,13 +53344,12 @@ static int winFullPathnameNoMutex( return winLogError(SQLITE_CANTOPEN_FULLPATH, osGetLastError(), "winFullPathname3", zRelative); } - nByte += 3; - zTemp = sqlite3MallocZero( nByte*sizeof(zTemp[0]) ); + zTemp = sqlite3MallocZero( nByte*sizeof(zTemp[0]) + 3*sizeof(zTemp[0]) ); if( zTemp==0 ){ sqlite3_free(zConverted); return SQLITE_IOERR_NOMEM_BKPT; } - nByte = osGetFullPathNameA((char*)zConverted, nByte, zTemp, 0); + nByte = osGetFullPathNameA((char*)zConverted, nByte+3, zTemp, 0); if( nByte==0 ){ sqlite3_free(zConverted); sqlite3_free(zTemp); @@ -52641,7 +53362,26 @@ static int winFullPathnameNoMutex( } #endif if( zOut ){ +#ifdef __CYGWIN__ + if( memcmp(zOut, "\\\\?\\", 4) ){ + sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut); + }else if( memcmp(zOut+4, "UNC\\", 4) ){ + sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut+4); + }else{ + char *p = zOut+6; + *p = '\\'; + if( osGetcwd ){ + /* On Cygwin, UNC paths use forward slashes */ + while( *p ){ + if( *p=='\\' ) *p = '/'; + ++p; + } + } + sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut+6); + } +#else sqlite3_snprintf(MIN(nFull, pVfs->mxPathname), zFull, "%s", zOut); +#endif /* __CYGWIN__ */ sqlite3_free(zOut); return SQLITE_OK; }else{ @@ -52671,25 +53411,8 @@ static int winFullPathname( */ static void *winDlOpen(sqlite3_vfs *pVfs, const char *zFilename){ HANDLE h; -#if defined(__CYGWIN__) - int nFull = pVfs->mxPathname+1; - char *zFull = sqlite3MallocZero( nFull ); - void *zConverted = 0; - if( zFull==0 ){ - OSTRACE(("DLOPEN name=%s, handle=%p\n", zFilename, (void*)0)); - return 0; - } - if( winFullPathname(pVfs, zFilename, nFull, zFull)!=SQLITE_OK ){ - sqlite3_free(zFull); - OSTRACE(("DLOPEN name=%s, handle=%p\n", zFilename, (void*)0)); - return 0; - } - zConverted = winConvertFromUtf8Filename(zFull); - sqlite3_free(zFull); -#else void *zConverted = winConvertFromUtf8Filename(zFilename); UNUSED_PARAMETER(pVfs); -#endif if( zConverted==0 ){ OSTRACE(("DLOPEN name=%s, handle=%p\n", zFilename, (void*)0)); return 0; @@ -53038,7 +53761,7 @@ SQLITE_API int sqlite3_os_init(void){ /* Double-check that the aSyscall[] array has been constructed ** correctly. See ticket [bb3a86e890c8e96ab] */ - assert( ArraySize(aSyscall)==80 ); + assert( ArraySize(aSyscall)==89 ); /* get memory map allocation granularity */ memset(&winSysInfo, 0, sizeof(SYSTEM_INFO)); @@ -53657,13 +54380,13 @@ static int memdbOpen( } if( p==0 ){ MemStore **apNew; - p = sqlite3Malloc( sizeof(*p) + szName + 3 ); + p = sqlite3Malloc( sizeof(*p) + (i64)szName + 3 ); if( p==0 ){ sqlite3_mutex_leave(pVfsMutex); return SQLITE_NOMEM; } apNew = sqlite3Realloc(memdb_g.apMemStore, - sizeof(apNew[0])*(memdb_g.nMemStore+1) ); + sizeof(apNew[0])*(1+(i64)memdb_g.nMemStore) ); if( apNew==0 ){ sqlite3_free(p); sqlite3_mutex_leave(pVfsMutex); @@ -54096,7 +54819,7 @@ SQLITE_PRIVATE int sqlite3MemdbInit(void){ ** no fewer collisions than the no-op *1. */ #define BITVEC_HASH(X) (((X)*1)%BITVEC_NINT) -#define BITVEC_NPTR (BITVEC_USIZE/sizeof(Bitvec *)) +#define BITVEC_NPTR ((u32)(BITVEC_USIZE/sizeof(Bitvec *))) /* @@ -54245,7 +54968,9 @@ bitvec_set_rehash: }else{ memcpy(aiValues, p->u.aHash, sizeof(p->u.aHash)); memset(p->u.apSub, 0, sizeof(p->u.apSub)); - p->iDivisor = (p->iSize + BITVEC_NPTR - 1)/BITVEC_NPTR; + p->iDivisor = p->iSize/BITVEC_NPTR; + if( (p->iSize%BITVEC_NPTR)!=0 ) p->iDivisor++; + if( p->iDivisoriDivisor = BITVEC_NBIT; rc = sqlite3BitvecSet(p, i); for(j=0; jiSize<=BITVEC_NBIT ){ - p->u.aBitmap[i/BITVEC_SZELEM] &= ~(1 << (i&(BITVEC_SZELEM-1))); + p->u.aBitmap[i/BITVEC_SZELEM] &= ~(BITVEC_TELEM)(1<<(i&(BITVEC_SZELEM-1))); }else{ unsigned int j; u32 *aiValues = pBuf; @@ -54330,7 +55055,7 @@ SQLITE_PRIVATE u32 sqlite3BitvecSize(Bitvec *p){ ** individual bits within V. */ #define SETBIT(V,I) V[I>>3] |= (1<<(I&7)) -#define CLEARBIT(V,I) V[I>>3] &= ~(1<<(I&7)) +#define CLEARBIT(V,I) V[I>>3] &= ~(BITVEC_TELEM)(1<<(I&7)) #define TESTBIT(V,I) (V[I>>3]&(1<<(I&7)))!=0 /* @@ -54373,7 +55098,7 @@ SQLITE_PRIVATE int sqlite3BitvecBuiltinTest(int sz, int *aOp){ /* Allocate the Bitvec to be tested and a linear array of ** bits to act as the reference */ pBitvec = sqlite3BitvecCreate( sz ); - pV = sqlite3MallocZero( (sz+7)/8 + 1 ); + pV = sqlite3MallocZero( (7+(i64)sz)/8 + 1 ); pTmpSpace = sqlite3_malloc64(BITVEC_SZ); if( pBitvec==0 || pV==0 || pTmpSpace==0 ) goto bitvec_end; @@ -55614,10 +56339,6 @@ static SQLITE_WSD struct PCacheGlobal { sqlite3_mutex *mutex; /* Mutex for accessing the following: */ PgFreeslot *pFree; /* Free page blocks */ int nFreeSlot; /* Number of unused pcache slots */ - /* The following value requires a mutex to change. We skip the mutex on - ** reading because (1) most platforms read a 32-bit integer atomically and - ** (2) even if an incorrect value is read, no great harm is done since this - ** is really just an optimization. */ int bUnderPressure; /* True if low on PAGECACHE memory */ } pcache1_g; @@ -55665,7 +56386,7 @@ SQLITE_PRIVATE void sqlite3PCacheBufferSetup(void *pBuf, int sz, int n){ pcache1.nReserve = n>90 ? 10 : (n/10 + 1); pcache1.pStart = pBuf; pcache1.pFree = 0; - pcache1.bUnderPressure = 0; + AtomicStore(&pcache1.bUnderPressure,0); while( n-- ){ p = (PgFreeslot*)pBuf; p->pNext = pcache1.pFree; @@ -55733,7 +56454,7 @@ static void *pcache1Alloc(int nByte){ if( p ){ pcache1.pFree = pcache1.pFree->pNext; pcache1.nFreeSlot--; - pcache1.bUnderPressure = pcache1.nFreeSlot=0 ); sqlite3StatusHighwater(SQLITE_STATUS_PAGECACHE_SIZE, nByte); sqlite3StatusUp(SQLITE_STATUS_PAGECACHE_USED, 1); @@ -55772,7 +56493,7 @@ static void pcache1Free(void *p){ pSlot->pNext = pcache1.pFree; pcache1.pFree = pSlot; pcache1.nFreeSlot++; - pcache1.bUnderPressure = pcache1.nFreeSlotszPage+pCache->szExtra)<=pcache1.szSlot ){ - return pcache1.bUnderPressure; + return AtomicLoad(&pcache1.bUnderPressure); }else{ return sqlite3HeapNearlyFull(); } @@ -55920,12 +56641,12 @@ static int pcache1UnderMemoryPressure(PCache1 *pCache){ */ static void pcache1ResizeHash(PCache1 *p){ PgHdr1 **apNew; - unsigned int nNew; - unsigned int i; + u64 nNew; + u32 i; assert( sqlite3_mutex_held(p->pGroup->mutex) ); - nNew = p->nHash*2; + nNew = 2*(u64)p->nHash; if( nNew<256 ){ nNew = 256; } @@ -56148,7 +56869,7 @@ static void pcache1Destroy(sqlite3_pcache *p); static sqlite3_pcache *pcache1Create(int szPage, int szExtra, int bPurgeable){ PCache1 *pCache; /* The newly created page cache */ PGroup *pGroup; /* The group the new page cache will belong to */ - int sz; /* Bytes of memory required to allocate the new cache */ + i64 sz; /* Bytes of memory required to allocate the new cache */ assert( (szPage & (szPage-1))==0 && szPage>=512 && szPage<=65536 ); assert( szExtra < 300 ); @@ -58036,6 +58757,9 @@ struct Pager { Wal *pWal; /* Write-ahead log used by "journal_mode=wal" */ char *zWal; /* File name for write-ahead log */ #endif +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + sqlite3 *dbWal; +#endif }; /* @@ -58627,7 +59351,7 @@ static void checkPage(PgHdr *pPg){ ** If an error occurs while reading from the journal file, an SQLite ** error code is returned. */ -static int readSuperJournal(sqlite3_file *pJrnl, char *zSuper, u32 nSuper){ +static int readSuperJournal(sqlite3_file *pJrnl, char *zSuper, u64 nSuper){ int rc; /* Return code */ u32 len; /* Length in bytes of super-journal name */ i64 szJ; /* Total size in bytes of journal file pJrnl */ @@ -59182,6 +59906,15 @@ static void pager_unlock(Pager *pPager){ if( pagerUseWal(pPager) ){ assert( !isOpen(pPager->jfd) ); + if( pPager->eState==PAGER_ERROR ){ + /* If an IO error occurs in wal.c while attempting to wrap the wal file, + ** then the Wal object may be holding a write-lock but no read-lock. + ** This call ensures that the write-lock is dropped as well. We cannot + ** have sqlite3WalEndReadTransaction() drop the write-lock, as it once + ** did, because this would break "BEGIN EXCLUSIVE" handling for + ** SQLITE_ENABLE_SETLK_TIMEOUT builds. */ + sqlite3WalEndWriteTransaction(pPager->pWal); + } sqlite3WalEndReadTransaction(pPager->pWal); pPager->eState = PAGER_OPEN; }else if( !pPager->exclusiveMode ){ @@ -59863,12 +60596,12 @@ static int pager_delsuper(Pager *pPager, const char *zSuper){ char *zJournal; /* Pointer to one journal within MJ file */ char *zSuperPtr; /* Space to hold super-journal filename */ char *zFree = 0; /* Free this buffer */ - int nSuperPtr; /* Amount of space allocated to zSuperPtr[] */ + i64 nSuperPtr; /* Amount of space allocated to zSuperPtr[] */ /* Allocate space for both the pJournal and pSuper file descriptors. ** If successful, open the super-journal file for reading. */ - pSuper = (sqlite3_file *)sqlite3MallocZero(pVfs->szOsFile * 2); + pSuper = (sqlite3_file *)sqlite3MallocZero(2 * (i64)pVfs->szOsFile); if( !pSuper ){ rc = SQLITE_NOMEM_BKPT; pJournal = 0; @@ -59886,11 +60619,14 @@ static int pager_delsuper(Pager *pPager, const char *zSuper){ */ rc = sqlite3OsFileSize(pSuper, &nSuperJournal); if( rc!=SQLITE_OK ) goto delsuper_out; - nSuperPtr = pVfs->mxPathname+1; + nSuperPtr = 1 + (i64)pVfs->mxPathname; + assert( nSuperJournal>=0 && nSuperPtr>0 ); zFree = sqlite3Malloc(4 + nSuperJournal + nSuperPtr + 2); if( !zFree ){ rc = SQLITE_NOMEM_BKPT; goto delsuper_out; + }else{ + assert( nSuperJournal<=0x7fffffff ); } zFree[0] = zFree[1] = zFree[2] = zFree[3] = 0; zSuperJournal = &zFree[4]; @@ -60151,7 +60887,7 @@ static int pager_playback(Pager *pPager, int isHot){ ** for pageSize. */ zSuper = pPager->pTmpSpace; - rc = readSuperJournal(pPager->jfd, zSuper, pPager->pVfs->mxPathname+1); + rc = readSuperJournal(pPager->jfd, zSuper, 1+(i64)pPager->pVfs->mxPathname); if( rc==SQLITE_OK && zSuper[0] ){ rc = sqlite3OsAccess(pVfs, zSuper, SQLITE_ACCESS_EXISTS, &res); } @@ -60290,7 +61026,7 @@ end_playback: ** which case it requires 4 0x00 bytes in memory immediately before ** the filename. */ zSuper = &pPager->pTmpSpace[4]; - rc = readSuperJournal(pPager->jfd, zSuper, pPager->pVfs->mxPathname+1); + rc = readSuperJournal(pPager->jfd, zSuper, 1+(i64)pPager->pVfs->mxPathname); testcase( rc!=SQLITE_OK ); } if( rc==SQLITE_OK @@ -62061,6 +62797,7 @@ SQLITE_PRIVATE int sqlite3PagerOpen( const char *zUri = 0; /* URI args to copy */ int nUriByte = 1; /* Number of bytes of URI args at *zUri */ + /* Figure out how much space is required for each journal file-handle ** (there are two of them, the main journal and the sub-journal). */ journalFileSize = ROUND8(sqlite3JournalSize(pVfs)); @@ -62086,8 +62823,8 @@ SQLITE_PRIVATE int sqlite3PagerOpen( */ if( zFilename && zFilename[0] ){ const char *z; - nPathname = pVfs->mxPathname+1; - zPathname = sqlite3DbMallocRaw(0, nPathname*2); + nPathname = pVfs->mxPathname + 1; + zPathname = sqlite3DbMallocRaw(0, 2*(i64)nPathname); if( zPathname==0 ){ return SQLITE_NOMEM_BKPT; } @@ -62174,14 +62911,14 @@ SQLITE_PRIVATE int sqlite3PagerOpen( ROUND8(sizeof(*pPager)) + /* Pager structure */ ROUND8(pcacheSize) + /* PCache object */ ROUND8(pVfs->szOsFile) + /* The main db file */ - journalFileSize * 2 + /* The two journal files */ + (u64)journalFileSize * 2 + /* The two journal files */ SQLITE_PTRSIZE + /* Space to hold a pointer */ 4 + /* Database prefix */ - nPathname + 1 + /* database filename */ - nUriByte + /* query parameters */ - nPathname + 8 + 1 + /* Journal filename */ + (u64)nPathname + 1 + /* database filename */ + (u64)nUriByte + /* query parameters */ + (u64)nPathname + 8 + 1 + /* Journal filename */ #ifndef SQLITE_OMIT_WAL - nPathname + 4 + 1 + /* WAL filename */ + (u64)nPathname + 4 + 1 + /* WAL filename */ #endif 3 /* Terminator */ ); @@ -64904,6 +65641,11 @@ static int pagerOpenWal(Pager *pPager){ pPager->fd, pPager->zWal, pPager->exclusiveMode, pPager->journalSizeLimit, &pPager->pWal ); +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + if( rc==SQLITE_OK ){ + sqlite3WalDb(pPager->pWal, pPager->dbWal); + } +#endif } pagerFixMaplimit(pPager); @@ -65023,6 +65765,7 @@ SQLITE_PRIVATE int sqlite3PagerWalWriteLock(Pager *pPager, int bLock){ ** blocking locks are required. */ SQLITE_PRIVATE void sqlite3PagerWalDb(Pager *pPager, sqlite3 *db){ + pPager->dbWal = db; if( pagerUseWal(pPager) ){ sqlite3WalDb(pPager->pWal, db); } @@ -65636,6 +66379,11 @@ struct WalCkptInfo { /* ** An open write-ahead log file is represented by an instance of the ** following object. +** +** writeLock: +** This is usually set to 1 whenever the WRITER lock is held. However, +** if it is set to 2, then the WRITER lock is held but must be released +** by walHandleException() if a SEH exception is thrown. */ struct Wal { sqlite3_vfs *pVfs; /* The VFS used to create pDbFd */ @@ -65726,9 +66474,13 @@ struct WalIterator { u32 *aPgno; /* Array of page numbers. */ int nEntry; /* Nr. of entries in aPgno[] and aIndex[] */ int iZero; /* Frame number associated with aPgno[0] */ - } aSegment[1]; /* One for every 32KB page in the wal-index */ + } aSegment[FLEXARRAY]; /* One for every 32KB page in the wal-index */ }; +/* Size (in bytes) of a WalIterator object suitable for N or fewer segments */ +#define SZ_WALITERATOR(N) \ + (offsetof(WalIterator,aSegment)*(N)*sizeof(struct WalSegment)) + /* ** Define the parameters of the hash tables in the wal-index file. There ** is a hash-table following every HASHTABLE_NPAGE page numbers in the @@ -65887,7 +66639,7 @@ static SQLITE_NOINLINE int walIndexPageRealloc( /* Enlarge the pWal->apWiData[] array if required */ if( pWal->nWiData<=iPage ){ - sqlite3_int64 nByte = sizeof(u32*)*(iPage+1); + sqlite3_int64 nByte = sizeof(u32*)*(1+(i64)iPage); volatile u32 **apNew; apNew = (volatile u32 **)sqlite3Realloc((void *)pWal->apWiData, nByte); if( !apNew ){ @@ -65996,10 +66748,8 @@ static void walChecksumBytes( s1 = s2 = 0; } - assert( nByte>=8 ); - assert( (nByte&0x00000007)==0 ); - assert( nByte<=65536 ); - assert( nByte%4==0 ); + /* nByte is a multiple of 8 between 8 and 65536 */ + assert( nByte>=8 && (nByte&7)==0 && nByte<=65536 ); if( !nativeCksum ){ do { @@ -67089,8 +67839,7 @@ static int walIteratorInit(Wal *pWal, u32 nBackfill, WalIterator **pp){ /* Allocate space for the WalIterator object. */ nSegment = walFramePage(iLast) + 1; - nByte = sizeof(WalIterator) - + (nSegment-1)*sizeof(struct WalSegment) + nByte = SZ_WALITERATOR(nSegment) + iLast*sizeof(ht_slot); p = (WalIterator *)sqlite3_malloc64(nByte + sizeof(ht_slot) * (iLast>HASHTABLE_NPAGE?HASHTABLE_NPAGE:iLast) @@ -67161,7 +67910,7 @@ static int walEnableBlockingMs(Wal *pWal, int nMs){ static int walEnableBlocking(Wal *pWal){ int res = 0; if( pWal->db ){ - int tmout = pWal->db->busyTimeout; + int tmout = pWal->db->setlkTimeout; if( tmout ){ res = walEnableBlockingMs(pWal, tmout); } @@ -67547,7 +68296,9 @@ static int walHandleException(Wal *pWal){ static const int S = 1; static const int E = (1<lockMask & ~( + u32 mUnlock; + if( pWal->writeLock==2 ) pWal->writeLock = 0; + mUnlock = pWal->lockMask & ~( (pWal->readLock<0 ? 0 : (S << WAL_READ_LOCK(pWal->readLock))) | (pWal->writeLock ? (E << WAL_WRITE_LOCK) : 0) | (pWal->ckptLock ? (E << WAL_CKPT_LOCK) : 0) @@ -67819,7 +68570,12 @@ static int walIndexReadHdr(Wal *pWal, int *pChanged){ if( bWriteLock || SQLITE_OK==(rc = walLockExclusive(pWal, WAL_WRITE_LOCK, 1)) ){ - pWal->writeLock = 1; + /* If the write-lock was just obtained, set writeLock to 2 instead of + ** the usual 1. This causes walIndexPage() to behave as if the + ** write-lock were held (so that it allocates new pages as required), + ** and walHandleException() to unlock the write-lock if a SEH exception + ** is thrown. */ + if( !bWriteLock ) pWal->writeLock = 2; if( SQLITE_OK==(rc = walIndexPage(pWal, 0, &page0)) ){ badHdr = walIndexTryHdr(pWal, pChanged); if( badHdr ){ @@ -68183,7 +68939,6 @@ static int walTryBeginRead(Wal *pWal, int *pChanged, int useWal, int *pCnt){ rc = walIndexReadHdr(pWal, pChanged); } #ifdef SQLITE_ENABLE_SETLK_TIMEOUT - walDisableBlocking(pWal); if( rc==SQLITE_BUSY_TIMEOUT ){ rc = SQLITE_BUSY; *pCnt |= WAL_RETRY_BLOCKED_MASK; @@ -68198,6 +68953,7 @@ static int walTryBeginRead(Wal *pWal, int *pChanged, int useWal, int *pCnt){ ** WAL_RETRY this routine will be called again and will probably be ** right on the second iteration. */ + (void)walEnableBlocking(pWal); if( pWal->apWiData[0]==0 ){ /* This branch is taken when the xShmMap() method returns SQLITE_BUSY. ** We assume this is a transient condition, so return WAL_RETRY. The @@ -68214,6 +68970,7 @@ static int walTryBeginRead(Wal *pWal, int *pChanged, int useWal, int *pCnt){ rc = SQLITE_BUSY_RECOVERY; } } + walDisableBlocking(pWal); if( rc!=SQLITE_OK ){ return rc; } @@ -68604,8 +69361,11 @@ SQLITE_PRIVATE int sqlite3WalBeginReadTransaction(Wal *pWal, int *pChanged){ ** read-lock. */ SQLITE_PRIVATE void sqlite3WalEndReadTransaction(Wal *pWal){ - sqlite3WalEndWriteTransaction(pWal); +#ifndef SQLITE_ENABLE_SETLK_TIMEOUT + assert( pWal->writeLock==0 || pWal->readLock<0 ); +#endif if( pWal->readLock>=0 ){ + sqlite3WalEndWriteTransaction(pWal); walUnlockShared(pWal, WAL_READ_LOCK(pWal->readLock)); pWal->readLock = -1; } @@ -68798,7 +69558,7 @@ SQLITE_PRIVATE int sqlite3WalBeginWriteTransaction(Wal *pWal){ ** read-transaction was even opened, making this call a no-op. ** Return early. */ if( pWal->writeLock ){ - assert( !memcmp(&pWal->hdr,(void *)walIndexHdr(pWal),sizeof(WalIndexHdr)) ); + assert( !memcmp(&pWal->hdr,(void*)pWal->apWiData[0],sizeof(WalIndexHdr)) ); return SQLITE_OK; } #endif @@ -68898,6 +69658,7 @@ SQLITE_PRIVATE int sqlite3WalUndo(Wal *pWal, int (*xUndo)(void *, Pgno), void *p if( iMax!=pWal->hdr.mxFrame ) walCleanupHash(pWal); } SEH_EXCEPT( rc = SQLITE_IOERR_IN_PAGE; ) + pWal->iReCksum = 0; } return rc; } @@ -68945,6 +69706,9 @@ SQLITE_PRIVATE int sqlite3WalSavepointUndo(Wal *pWal, u32 *aWalData){ walCleanupHash(pWal); } SEH_EXCEPT( rc = SQLITE_IOERR_IN_PAGE; ) + if( pWal->iReCksum>pWal->hdr.mxFrame ){ + pWal->iReCksum = 0; + } } return rc; @@ -70247,6 +71011,12 @@ struct CellInfo { */ #define BTCURSOR_MAX_DEPTH 20 +/* +** Maximum amount of storage local to a database page, regardless of +** page size. +*/ +#define BT_MAX_LOCAL 65501 /* 65536 - 35 */ + /* ** A cursor is a pointer to a particular entry within a particular ** b-tree within a database file. @@ -70655,7 +71425,7 @@ SQLITE_PRIVATE int sqlite3BtreeHoldsMutex(Btree *p){ */ static void SQLITE_NOINLINE btreeEnterAll(sqlite3 *db){ int i; - int skipOk = 1; + u8 skipOk = 1; Btree *p; assert( sqlite3_mutex_held(db->mutex) ); for(i=0; inDb; i++){ @@ -71511,7 +72281,7 @@ static int saveCursorKey(BtCursor *pCur){ ** below. */ void *pKey; pCur->nKey = sqlite3BtreePayloadSize(pCur); - pKey = sqlite3Malloc( pCur->nKey + 9 + 8 ); + pKey = sqlite3Malloc( ((i64)pCur->nKey) + 9 + 8 ); if( pKey ){ rc = sqlite3BtreePayload(pCur, 0, (int)pCur->nKey, pKey); if( rc==SQLITE_OK ){ @@ -71801,7 +72571,7 @@ SQLITE_PRIVATE void sqlite3BtreeCursorHint(BtCursor *pCur, int eHintType, ...){ */ SQLITE_PRIVATE void sqlite3BtreeCursorHintFlags(BtCursor *pCur, unsigned x){ assert( x==BTREE_SEEK_EQ || x==BTREE_BULKLOAD || x==0 ); - pCur->hints = x; + pCur->hints = (u8)x; } @@ -71995,14 +72765,15 @@ static SQLITE_NOINLINE void btreeParseCellAdjustSizeForOverflow( static int btreePayloadToLocal(MemPage *pPage, i64 nPayload){ int maxLocal; /* Maximum amount of payload held locally */ maxLocal = pPage->maxLocal; + assert( nPayload>=0 ); if( nPayload<=maxLocal ){ - return nPayload; + return (int)nPayload; }else{ int minLocal; /* Minimum amount of payload held locally */ int surplus; /* Overflow payload available for local storage */ minLocal = pPage->minLocal; - surplus = minLocal + (nPayload - minLocal)%(pPage->pBt->usableSize-4); - return ( surplus <= maxLocal ) ? surplus : minLocal; + surplus = (int)(minLocal +(nPayload - minLocal)%(pPage->pBt->usableSize-4)); + return (surplus <= maxLocal) ? surplus : minLocal; } } @@ -72112,11 +72883,13 @@ static void btreeParseCellPtr( pInfo->pPayload = pIter; testcase( nPayload==pPage->maxLocal ); testcase( nPayload==(u32)pPage->maxLocal+1 ); + assert( nPayload>=0 ); + assert( pPage->maxLocal <= BT_MAX_LOCAL ); if( nPayload<=pPage->maxLocal ){ /* This is the (easy) common case where the entire payload fits ** on the local page. No overflow is required. */ - pInfo->nSize = nPayload + (u16)(pIter - pCell); + pInfo->nSize = (u16)nPayload + (u16)(pIter - pCell); if( pInfo->nSize<4 ) pInfo->nSize = 4; pInfo->nLocal = (u16)nPayload; }else{ @@ -72149,11 +72922,13 @@ static void btreeParseCellPtrIndex( pInfo->pPayload = pIter; testcase( nPayload==pPage->maxLocal ); testcase( nPayload==(u32)pPage->maxLocal+1 ); + assert( nPayload>=0 ); + assert( pPage->maxLocal <= BT_MAX_LOCAL ); if( nPayload<=pPage->maxLocal ){ /* This is the (easy) common case where the entire payload fits ** on the local page. No overflow is required. */ - pInfo->nSize = nPayload + (u16)(pIter - pCell); + pInfo->nSize = (u16)nPayload + (u16)(pIter - pCell); if( pInfo->nSize<4 ) pInfo->nSize = 4; pInfo->nLocal = (u16)nPayload; }else{ @@ -72692,14 +73467,14 @@ static SQLITE_INLINE int allocateSpace(MemPage *pPage, int nByte, int *pIdx){ ** at the end of the page. So do additional corruption checks inside this ** routine and return SQLITE_CORRUPT if any problems are found. */ -static int freeSpace(MemPage *pPage, u16 iStart, u16 iSize){ - u16 iPtr; /* Address of ptr to next freeblock */ - u16 iFreeBlk; /* Address of the next freeblock */ +static int freeSpace(MemPage *pPage, int iStart, int iSize){ + int iPtr; /* Address of ptr to next freeblock */ + int iFreeBlk; /* Address of the next freeblock */ u8 hdr; /* Page header size. 0 or 100 */ - u8 nFrag = 0; /* Reduction in fragmentation */ - u16 iOrigSize = iSize; /* Original value of iSize */ - u16 x; /* Offset to cell content area */ - u32 iEnd = iStart + iSize; /* First byte past the iStart buffer */ + int nFrag = 0; /* Reduction in fragmentation */ + int iOrigSize = iSize; /* Original value of iSize */ + int x; /* Offset to cell content area */ + int iEnd = iStart + iSize; /* First byte past the iStart buffer */ unsigned char *data = pPage->aData; /* Page content */ u8 *pTmp; /* Temporary ptr into data[] */ @@ -72726,7 +73501,7 @@ static int freeSpace(MemPage *pPage, u16 iStart, u16 iSize){ } iPtr = iFreeBlk; } - if( iFreeBlk>pPage->pBt->usableSize-4 ){ /* TH3: corrupt081.100 */ + if( iFreeBlk>(int)pPage->pBt->usableSize-4 ){ /* TH3: corrupt081.100 */ return SQLITE_CORRUPT_PAGE(pPage); } assert( iFreeBlk>iPtr || iFreeBlk==0 || CORRUPT_DB ); @@ -72741,7 +73516,7 @@ static int freeSpace(MemPage *pPage, u16 iStart, u16 iSize){ nFrag = iFreeBlk - iEnd; if( iEnd>iFreeBlk ) return SQLITE_CORRUPT_PAGE(pPage); iEnd = iFreeBlk + get2byte(&data[iFreeBlk+2]); - if( iEnd > pPage->pBt->usableSize ){ + if( iEnd > (int)pPage->pBt->usableSize ){ return SQLITE_CORRUPT_PAGE(pPage); } iSize = iEnd - iStart; @@ -72762,7 +73537,7 @@ static int freeSpace(MemPage *pPage, u16 iStart, u16 iSize){ } } if( nFrag>data[hdr+7] ) return SQLITE_CORRUPT_PAGE(pPage); - data[hdr+7] -= nFrag; + data[hdr+7] -= (u8)nFrag; } pTmp = &data[hdr+5]; x = get2byte(pTmp); @@ -72783,7 +73558,8 @@ static int freeSpace(MemPage *pPage, u16 iStart, u16 iSize){ /* Insert the new freeblock into the freelist */ put2byte(&data[iPtr], iStart); put2byte(&data[iStart], iFreeBlk); - put2byte(&data[iStart+2], iSize); + assert( iSize>=0 && iSize<=0xffff ); + put2byte(&data[iStart+2], (u16)iSize); } pPage->nFree += iOrigSize; return SQLITE_OK; @@ -73009,7 +73785,7 @@ static int btreeInitPage(MemPage *pPage){ assert( pBt->pageSize>=512 && pBt->pageSize<=65536 ); pPage->maskPage = (u16)(pBt->pageSize - 1); pPage->nOverflow = 0; - pPage->cellOffset = pPage->hdrOffset + 8 + pPage->childPtrSize; + pPage->cellOffset = (u16)(pPage->hdrOffset + 8 + pPage->childPtrSize); pPage->aCellIdx = data + pPage->childPtrSize + 8; pPage->aDataEnd = pPage->aData + pBt->pageSize; pPage->aDataOfst = pPage->aData + pPage->childPtrSize; @@ -73043,8 +73819,8 @@ static int btreeInitPage(MemPage *pPage){ static void zeroPage(MemPage *pPage, int flags){ unsigned char *data = pPage->aData; BtShared *pBt = pPage->pBt; - u8 hdr = pPage->hdrOffset; - u16 first; + int hdr = pPage->hdrOffset; + int first; assert( sqlite3PagerPagenumber(pPage->pDbPage)==pPage->pgno || CORRUPT_DB ); assert( sqlite3PagerGetExtra(pPage->pDbPage) == (void*)pPage ); @@ -73061,7 +73837,7 @@ static void zeroPage(MemPage *pPage, int flags){ put2byte(&data[hdr+5], pBt->usableSize); pPage->nFree = (u16)(pBt->usableSize - first); decodeFlags(pPage, flags); - pPage->cellOffset = first; + pPage->cellOffset = (u16)first; pPage->aDataEnd = &data[pBt->pageSize]; pPage->aCellIdx = &data[first]; pPage->aDataOfst = &data[pPage->childPtrSize]; @@ -73847,7 +74623,7 @@ SQLITE_PRIVATE int sqlite3BtreeSetPageSize(Btree *p, int pageSize, int nReserve, BtShared *pBt = p->pBt; assert( nReserve>=0 && nReserve<=255 ); sqlite3BtreeEnter(p); - pBt->nReserveWanted = nReserve; + pBt->nReserveWanted = (u8)nReserve; x = pBt->pageSize - pBt->usableSize; if( nReservebtsFlags & BTS_PAGESIZE_FIXED ){ @@ -73953,7 +74729,7 @@ SQLITE_PRIVATE int sqlite3BtreeSecureDelete(Btree *p, int newFlag){ assert( BTS_FAST_SECURE==(BTS_OVERWRITE|BTS_SECURE_DELETE) ); if( newFlag>=0 ){ p->pBt->btsFlags &= ~BTS_FAST_SECURE; - p->pBt->btsFlags |= BTS_SECURE_DELETE*newFlag; + p->pBt->btsFlags |= (u16)(BTS_SECURE_DELETE*newFlag); } b = (p->pBt->btsFlags & BTS_FAST_SECURE)/BTS_SECURE_DELETE; sqlite3BtreeLeave(p); @@ -74473,6 +75249,13 @@ static SQLITE_NOINLINE int btreeBeginTrans( (void)sqlite3PagerWalWriteLock(pPager, 0); unlockBtreeIfUnused(pBt); } +#if defined(SQLITE_ENABLE_SETLK_TIMEOUT) + if( rc==SQLITE_BUSY_TIMEOUT ){ + /* If a blocking lock timed out, break out of the loop here so that + ** the busy-handler is not invoked. */ + break; + } +#endif }while( (rc&0xFF)==SQLITE_BUSY && pBt->inTransaction==TRANS_NONE && btreeInvokeBusyHandler(pBt) ); sqlite3PagerWalDb(pPager, 0); @@ -76882,7 +77665,7 @@ bypass_moveto_root: rc = SQLITE_CORRUPT_PAGE(pPage); goto moveto_index_finish; } - pCellKey = sqlite3Malloc( nCell+nOverrun ); + pCellKey = sqlite3Malloc( (u64)nCell+(u64)nOverrun ); if( pCellKey==0 ){ rc = SQLITE_NOMEM_BKPT; goto moveto_index_finish; @@ -78401,7 +79184,8 @@ static int rebuildPage( } /* The pPg->nFree field is now set incorrectly. The caller will fix it. */ - pPg->nCell = nCell; + assert( nCell < 10922 ); + pPg->nCell = (u16)nCell; pPg->nOverflow = 0; put2byte(&aData[hdr+1], 0); @@ -78648,9 +79432,13 @@ static int editPage( if( pageInsertArray( pPg, pBegin, &pData, pCellptr, iNew+nCell, nNew-nCell, pCArray - ) ) goto editpage_fail; + ) + ){ + goto editpage_fail; + } - pPg->nCell = nNew; + assert( nNew < 10922 ); + pPg->nCell = (u16)nNew; pPg->nOverflow = 0; put2byte(&aData[hdr+3], pPg->nCell); @@ -78959,7 +79747,7 @@ static int balance_nonroot( int pageFlags; /* Value of pPage->aData[0] */ int iSpace1 = 0; /* First unused byte of aSpace1[] */ int iOvflSpace = 0; /* First unused byte of aOvflSpace[] */ - int szScratch; /* Size of scratch memory requested */ + u64 szScratch; /* Size of scratch memory requested */ MemPage *apOld[NB]; /* pPage and up to two siblings */ MemPage *apNew[NB+2]; /* pPage and up to NB siblings after balancing */ u8 *pRight; /* Location in parent of right-sibling pointer */ @@ -80244,7 +81032,7 @@ SQLITE_PRIVATE int sqlite3BtreeInsert( if( pCur->info.nKey==pX->nKey ){ BtreePayload x2; x2.pData = pX->pKey; - x2.nData = pX->nKey; + x2.nData = (int)pX->nKey; assert( pX->nKey<=0x7fffffff ); x2.nZero = 0; return btreeOverwriteCell(pCur, &x2); } @@ -80425,7 +81213,7 @@ SQLITE_PRIVATE int sqlite3BtreeTransferRow(BtCursor *pDest, BtCursor *pSrc, i64 getCellInfo(pSrc); if( pSrc->info.nPayload<0x80 ){ - *(aOut++) = pSrc->info.nPayload; + *(aOut++) = (u8)pSrc->info.nPayload; }else{ aOut += sqlite3PutVarint(aOut, pSrc->info.nPayload); } @@ -80438,7 +81226,7 @@ SQLITE_PRIVATE int sqlite3BtreeTransferRow(BtCursor *pDest, BtCursor *pSrc, i64 nRem = pSrc->info.nPayload; if( nIn==nRem && nInpPage->maxLocal ){ memcpy(aOut, aIn, nIn); - pBt->nPreformatSize = nIn + (aOut - pBt->pTmpSpace); + pBt->nPreformatSize = nIn + (int)(aOut - pBt->pTmpSpace); return SQLITE_OK; }else{ int rc = SQLITE_OK; @@ -80450,7 +81238,7 @@ SQLITE_PRIVATE int sqlite3BtreeTransferRow(BtCursor *pDest, BtCursor *pSrc, i64 u32 nOut; /* Size of output buffer aOut[] */ nOut = btreePayloadToLocal(pDest->pPage, pSrc->info.nPayload); - pBt->nPreformatSize = nOut + (aOut - pBt->pTmpSpace); + pBt->nPreformatSize = (int)nOut + (int)(aOut - pBt->pTmpSpace); if( nOutinfo.nPayload ){ pPgnoOut = &aOut[nOut]; pBt->nPreformatSize += 4; @@ -82071,6 +82859,7 @@ SQLITE_PRIVATE int sqlite3BtreeIsInBackup(Btree *p){ */ SQLITE_PRIVATE void *sqlite3BtreeSchema(Btree *p, int nBytes, void(*xFree)(void *)){ BtShared *pBt = p->pBt; + assert( nBytes==0 || nBytes==sizeof(Schema) ); sqlite3BtreeEnter(p); if( !pBt->pSchema && nBytes ){ pBt->pSchema = sqlite3DbMallocZero(0, nBytes); @@ -83187,7 +83976,7 @@ static void vdbeMemRenderNum(int sz, char *zBuf, Mem *p){ ** corresponding string value, then it is important that the string be ** derived from the numeric value, not the other way around, to ensure ** that the index and table are consistent. See ticket -** https://www.sqlite.org/src/info/343634942dd54ab (2018-01-31) for +** https://sqlite.org/src/info/343634942dd54ab (2018-01-31) for ** an example. ** ** This routine looks at pMem to verify that if it has both a numeric @@ -83373,7 +84162,7 @@ SQLITE_PRIVATE void sqlite3VdbeMemZeroTerminateIfAble(Mem *pMem){ return; } if( pMem->enc!=SQLITE_UTF8 ) return; - if( NEVER(pMem->z==0) ) return; + assert( pMem->z!=0 ); if( pMem->flags & MEM_Dyn ){ if( pMem->xDel==sqlite3_free && sqlite3_msize(pMem->z) >= (u64)(pMem->n+1) @@ -84486,7 +85275,7 @@ static sqlite3_value *valueNew(sqlite3 *db, struct ValueNewStat4Ctx *p){ if( pRec==0 ){ Index *pIdx = p->pIdx; /* Index being probed */ - int nByte; /* Bytes of space to allocate */ + i64 nByte; /* Bytes of space to allocate */ int i; /* Counter variable */ int nCol = pIdx->nColumn; /* Number of index columns including rowid */ @@ -84552,7 +85341,7 @@ static int valueFromFunction( ){ sqlite3_context ctx; /* Context object for function invocation */ sqlite3_value **apVal = 0; /* Function arguments */ - int nVal = 0; /* Size of apVal[] array */ + int nVal = 0; /* Number of function arguments */ FuncDef *pFunc = 0; /* Function definition */ sqlite3_value *pVal = 0; /* New value */ int rc = SQLITE_OK; /* Return code */ @@ -85550,12 +86339,10 @@ SQLITE_PRIVATE int sqlite3VdbeAddFunctionCall( int eCallCtx /* Calling context */ ){ Vdbe *v = pParse->pVdbe; - int nByte; int addr; sqlite3_context *pCtx; assert( v ); - nByte = sizeof(*pCtx) + (nArg-1)*sizeof(sqlite3_value*); - pCtx = sqlite3DbMallocRawNN(pParse->db, nByte); + pCtx = sqlite3DbMallocRawNN(pParse->db, SZ_CONTEXT(nArg)); if( pCtx==0 ){ assert( pParse->db->mallocFailed ); freeEphemeralFunction(pParse->db, (FuncDef*)pFunc); @@ -85831,7 +86618,7 @@ static Op *opIterNext(VdbeOpIter *p){ } if( pRet->p4type==P4_SUBPROGRAM ){ - int nByte = (p->nSub+1)*sizeof(SubProgram*); + i64 nByte = (1+(u64)p->nSub)*sizeof(SubProgram*); int j; for(j=0; jnSub; j++){ if( p->apSub[j]==pRet->p4.pProgram ) break; @@ -85961,8 +86748,8 @@ SQLITE_PRIVATE void sqlite3VdbeAssertAbortable(Vdbe *p){ ** (1) For each jump instruction with a negative P2 value (a label) ** resolve the P2 value to an actual address. ** -** (2) Compute the maximum number of arguments used by any SQL function -** and store that value in *pMaxFuncArgs. +** (2) Compute the maximum number of arguments used by the xUpdate/xFilter +** methods of any virtual table and store that value in *pMaxVtabArgs. ** ** (3) Update the Vdbe.readOnly and Vdbe.bIsReader flags to accurately ** indicate what the prepared statement actually does. @@ -85975,8 +86762,8 @@ SQLITE_PRIVATE void sqlite3VdbeAssertAbortable(Vdbe *p){ ** script numbers the opcodes correctly. Changes to this routine must be ** coordinated with changes to mkopcodeh.tcl. */ -static void resolveP2Values(Vdbe *p, int *pMaxFuncArgs){ - int nMaxArgs = *pMaxFuncArgs; +static void resolveP2Values(Vdbe *p, int *pMaxVtabArgs){ + int nMaxVtabArgs = *pMaxVtabArgs; Op *pOp; Parse *pParse = p->pParse; int *aLabel = pParse->aLabel; @@ -86021,15 +86808,19 @@ static void resolveP2Values(Vdbe *p, int *pMaxFuncArgs){ } #ifndef SQLITE_OMIT_VIRTUALTABLE case OP_VUpdate: { - if( pOp->p2>nMaxArgs ) nMaxArgs = pOp->p2; + if( pOp->p2>nMaxVtabArgs ) nMaxVtabArgs = pOp->p2; break; } case OP_VFilter: { int n; + /* The instruction immediately prior to VFilter will be an + ** OP_Integer that sets the "argc" value for the VFilter. See + ** the code where OP_VFilter is generated at tag-20250207a. */ assert( (pOp - p->aOp) >= 3 ); assert( pOp[-1].opcode==OP_Integer ); + assert( pOp[-1].p2==pOp->p3+1 ); n = pOp[-1].p1; - if( n>nMaxArgs ) nMaxArgs = n; + if( n>nMaxVtabArgs ) nMaxVtabArgs = n; /* Fall through into the default case */ /* no break */ deliberate_fall_through } @@ -86070,7 +86861,7 @@ resolve_p2_values_loop_exit: pParse->aLabel = 0; } pParse->nLabel = 0; - *pMaxFuncArgs = nMaxArgs; + *pMaxVtabArgs = nMaxVtabArgs; assert( p->bIsReader!=0 || DbMaskAllZero(p->btreeMask) ); } @@ -86299,7 +87090,7 @@ SQLITE_PRIVATE void sqlite3VdbeScanStatus( const char *zName /* Name of table or index being scanned */ ){ if( IS_STMT_SCANSTATUS(p->db) ){ - sqlite3_int64 nByte = (p->nScan+1) * sizeof(ScanStatus); + i64 nByte = (1+(i64)p->nScan) * sizeof(ScanStatus); ScanStatus *aNew; aNew = (ScanStatus*)sqlite3DbRealloc(p->db, p->aScan, nByte); if( aNew ){ @@ -86409,6 +87200,9 @@ SQLITE_PRIVATE void sqlite3VdbeChangeP5(Vdbe *p, u16 p5){ */ SQLITE_PRIVATE void sqlite3VdbeTypeofColumn(Vdbe *p, int iDest){ VdbeOp *pOp = sqlite3VdbeGetLastOp(p); +#ifdef SQLITE_DEBUG + while( pOp->opcode==OP_ReleaseReg ) pOp--; +#endif if( pOp->p3==iDest && pOp->opcode==OP_Column ){ pOp->p5 |= OPFLAG_TYPEOFARG; } @@ -87748,7 +88542,7 @@ SQLITE_PRIVATE void sqlite3VdbeMakeReady( int nVar; /* Number of parameters */ int nMem; /* Number of VM memory registers */ int nCursor; /* Number of cursors required */ - int nArg; /* Number of arguments in subprograms */ + int nArg; /* Max number args to xFilter or xUpdate */ int n; /* Loop counter */ struct ReusableSpace x; /* Reusable bulk memory */ @@ -87820,6 +88614,9 @@ SQLITE_PRIVATE void sqlite3VdbeMakeReady( p->apCsr = allocSpace(&x, p->apCsr, nCursor*sizeof(VdbeCursor*)); } } +#ifdef SQLITE_DEBUG + p->napArg = nArg; +#endif if( db->mallocFailed ){ p->nVar = 0; @@ -89317,6 +90114,7 @@ SQLITE_PRIVATE UnpackedRecord *sqlite3VdbeAllocUnpackedRecord( ){ UnpackedRecord *p; /* Unpacked record to return */ int nByte; /* Number of bytes required for *p */ + assert( sizeof(UnpackedRecord) + sizeof(Mem)*65536 < 0x7fffffff ); nByte = ROUND8P(sizeof(UnpackedRecord)) + sizeof(Mem)*(pKeyInfo->nKeyField+1); p = (UnpackedRecord *)sqlite3DbMallocRaw(pKeyInfo->db, nByte); if( !p ) return 0; @@ -90623,10 +91421,11 @@ SQLITE_PRIVATE void sqlite3VdbePreUpdateHook( preupdate.pCsr = pCsr; preupdate.op = op; preupdate.iNewReg = iReg; - preupdate.keyinfo.db = db; - preupdate.keyinfo.enc = ENC(db); - preupdate.keyinfo.nKeyField = pTab->nCol; - preupdate.keyinfo.aSortFlags = (u8*)&fakeSortOrder; + preupdate.pKeyinfo = (KeyInfo*)&preupdate.keyinfoSpace; + preupdate.pKeyinfo->db = db; + preupdate.pKeyinfo->enc = ENC(db); + preupdate.pKeyinfo->nKeyField = pTab->nCol; + preupdate.pKeyinfo->aSortFlags = (u8*)&fakeSortOrder; preupdate.iKey1 = iKey1; preupdate.iKey2 = iKey2; preupdate.pTab = pTab; @@ -90636,8 +91435,8 @@ SQLITE_PRIVATE void sqlite3VdbePreUpdateHook( db->xPreUpdateCallback(db->pPreUpdateArg, db, op, zDb, zTbl, iKey1, iKey2); db->pPreUpdate = 0; sqlite3DbFree(db, preupdate.aRecord); - vdbeFreeUnpacked(db, preupdate.keyinfo.nKeyField+1, preupdate.pUnpacked); - vdbeFreeUnpacked(db, preupdate.keyinfo.nKeyField+1, preupdate.pNewUnpacked); + vdbeFreeUnpacked(db, preupdate.pKeyinfo->nKeyField+1,preupdate.pUnpacked); + vdbeFreeUnpacked(db, preupdate.pKeyinfo->nKeyField+1,preupdate.pNewUnpacked); sqlite3VdbeMemRelease(&preupdate.oldipk); if( preupdate.aNew ){ int i; @@ -92468,7 +93267,7 @@ SQLITE_API int sqlite3_bind_text64( assert( xDel!=SQLITE_DYNAMIC ); if( enc!=SQLITE_UTF8 ){ if( enc==SQLITE_UTF16 ) enc = SQLITE_UTF16NATIVE; - nData &= ~(u16)1; + nData &= ~(u64)1; } return bindText(pStmt, i, zData, nData, xDel, enc); } @@ -92876,7 +93675,7 @@ SQLITE_API int sqlite3_preupdate_old(sqlite3 *db, int iIdx, sqlite3_value **ppVa if( !aRec ) goto preupdate_old_out; rc = sqlite3BtreePayload(p->pCsr->uc.pCursor, 0, nRec, aRec); if( rc==SQLITE_OK ){ - p->pUnpacked = vdbeUnpackRecord(&p->keyinfo, nRec, aRec); + p->pUnpacked = vdbeUnpackRecord(p->pKeyinfo, nRec, aRec); if( !p->pUnpacked ) rc = SQLITE_NOMEM; } if( rc!=SQLITE_OK ){ @@ -92893,7 +93692,9 @@ SQLITE_API int sqlite3_preupdate_old(sqlite3 *db, int iIdx, sqlite3_value **ppVa Column *pCol = &p->pTab->aCol[iIdx]; if( pCol->iDflt>0 ){ if( p->apDflt==0 ){ - int nByte = sizeof(sqlite3_value*)*p->pTab->nCol; + int nByte; + assert( sizeof(sqlite3_value*)*UMXV(p->pTab->nCol) < 0x7fffffff ); + nByte = sizeof(sqlite3_value*)*p->pTab->nCol; p->apDflt = (sqlite3_value**)sqlite3DbMallocZero(db, nByte); if( p->apDflt==0 ) goto preupdate_old_out; } @@ -92939,7 +93740,7 @@ SQLITE_API int sqlite3_preupdate_count(sqlite3 *db){ #else p = db->pPreUpdate; #endif - return (p ? p->keyinfo.nKeyField : 0); + return (p ? p->pKeyinfo->nKeyField : 0); } #endif /* SQLITE_ENABLE_PREUPDATE_HOOK */ @@ -93022,7 +93823,7 @@ SQLITE_API int sqlite3_preupdate_new(sqlite3 *db, int iIdx, sqlite3_value **ppVa Mem *pData = &p->v->aMem[p->iNewReg]; rc = ExpandBlob(pData); if( rc!=SQLITE_OK ) goto preupdate_new_out; - pUnpack = vdbeUnpackRecord(&p->keyinfo, pData->n, pData->z); + pUnpack = vdbeUnpackRecord(p->pKeyinfo, pData->n, pData->z); if( !pUnpack ){ rc = SQLITE_NOMEM; goto preupdate_new_out; @@ -93043,7 +93844,8 @@ SQLITE_API int sqlite3_preupdate_new(sqlite3 *db, int iIdx, sqlite3_value **ppVa */ assert( p->op==SQLITE_UPDATE ); if( !p->aNew ){ - p->aNew = (Mem *)sqlite3DbMallocZero(db, sizeof(Mem) * p->pCsr->nField); + assert( sizeof(Mem)*UMXV(p->pCsr->nField) < 0x7fffffff ); + p->aNew = (Mem *)sqlite3DbMallocZero(db, sizeof(Mem)*p->pCsr->nField); if( !p->aNew ){ rc = SQLITE_NOMEM; goto preupdate_new_out; @@ -93813,11 +94615,11 @@ static VdbeCursor *allocateCursor( */ Mem *pMem = iCur>0 ? &p->aMem[p->nMem-iCur] : p->aMem; - int nByte; + i64 nByte; VdbeCursor *pCx = 0; - nByte = - ROUND8P(sizeof(VdbeCursor)) + 2*sizeof(u32)*nField + - (eCurType==CURTYPE_BTREE?sqlite3BtreeCursorSize():0); + nByte = SZ_VDBECURSOR(nField); + assert( ROUND8(nByte)==nByte ); + if( eCurType==CURTYPE_BTREE ) nByte += sqlite3BtreeCursorSize(); assert( iCur>=0 && iCurnCursor ); if( p->apCsr[iCur] ){ /*OPTIMIZATION-IF-FALSE*/ @@ -93841,7 +94643,7 @@ static VdbeCursor *allocateCursor( pMem->szMalloc = 0; return 0; } - pMem->szMalloc = nByte; + pMem->szMalloc = (int)nByte; } p->apCsr[iCur] = pCx = (VdbeCursor*)pMem->zMalloc; @@ -93850,8 +94652,8 @@ static VdbeCursor *allocateCursor( pCx->nField = nField; pCx->aOffset = &pCx->aType[nField]; if( eCurType==CURTYPE_BTREE ){ - pCx->uc.pCursor = (BtCursor*) - &pMem->z[ROUND8P(sizeof(VdbeCursor))+2*sizeof(u32)*nField]; + assert( ROUND8(SZ_VDBECURSOR(nField))==SZ_VDBECURSOR(nField) ); + pCx->uc.pCursor = (BtCursor*)&pMem->z[SZ_VDBECURSOR(nField)]; sqlite3BtreeCursorZero(pCx->uc.pCursor); } return pCx; @@ -94855,7 +95657,7 @@ case OP_Halt: { sqlite3VdbeError(p, "%s", pOp->p4.z); } pcx = (int)(pOp - aOp); - sqlite3_log(pOp->p1, "abort at %d in [%s]: %s", pcx, p->zSql, p->zErrMsg); + sqlite3_log(pOp->p1, "abort at %d: %s; [%s]", pcx, p->zErrMsg, p->zSql); } rc = sqlite3VdbeHalt(p); assert( rc==SQLITE_BUSY || rc==SQLITE_OK || rc==SQLITE_ERROR ); @@ -96181,7 +96983,7 @@ case OP_BitNot: { /* same as TK_BITNOT, in1, out2 */ break; } -/* Opcode: Once P1 P2 * * * +/* Opcode: Once P1 P2 P3 * * ** ** Fall through to the next instruction the first time this opcode is ** encountered on each invocation of the byte-code program. Jump to P2 @@ -96197,6 +96999,12 @@ case OP_BitNot: { /* same as TK_BITNOT, in1, out2 */ ** whether or not the jump should be taken. The bitmask is necessary ** because the self-altering code trick does not work for recursive ** triggers. +** +** The P3 operand is not used directly by this opcode. However P3 is +** used by the code generator as follows: If this opcode is the start +** of a subroutine and that subroutine uses a Bloom filter, then P3 will +** be the register that holds that Bloom filter. See tag-202407032019 +** in the source code for implementation details. */ case OP_Once: { /* jump */ u32 iAddr; /* Address of this instruction */ @@ -97242,6 +98050,7 @@ case OP_MakeRecord: { zHdr += sqlite3PutVarint(zHdr, serial_type); if( pRec->n ){ assert( pRec->z!=0 ); + assert( pRec->z!=(const char*)sqlite3CtypeMap ); memcpy(zPayload, pRec->z, pRec->n); zPayload += pRec->n; } @@ -99593,7 +100402,7 @@ case OP_RowData: { /* The OP_RowData opcodes always follow OP_NotExists or ** OP_SeekRowid or OP_Rewind/Op_Next with no intervening instructions ** that might invalidate the cursor. - ** If this where not the case, on of the following assert()s + ** If this were not the case, one of the following assert()s ** would fail. Should this ever change (because of changes in the code ** generator) then the fix would be to insert a call to ** sqlite3VdbeCursorMoveto(). @@ -100862,7 +101671,7 @@ case OP_RowSetTest: { /* jump, in1, in3 */ */ case OP_Program: { /* jump0 */ int nMem; /* Number of memory registers for sub-program */ - int nByte; /* Bytes of runtime space required for sub-program */ + i64 nByte; /* Bytes of runtime space required for sub-program */ Mem *pRt; /* Register to allocate runtime space */ Mem *pMem; /* Used to iterate through memory cells */ Mem *pEnd; /* Last memory cell in new array */ @@ -100913,7 +101722,7 @@ case OP_Program: { /* jump0 */ nByte = ROUND8(sizeof(VdbeFrame)) + nMem * sizeof(Mem) + pProgram->nCsr * sizeof(VdbeCursor*) - + (pProgram->nOp + 7)/8; + + (7 + (i64)pProgram->nOp)/8; pFrame = sqlite3DbMallocZero(db, nByte); if( !pFrame ){ goto no_mem; @@ -100921,7 +101730,7 @@ case OP_Program: { /* jump0 */ sqlite3VdbeMemRelease(pRt); pRt->flags = MEM_Blob|MEM_Dyn; pRt->z = (char*)pFrame; - pRt->n = nByte; + pRt->n = (int)nByte; pRt->xDel = sqlite3VdbeFrameMemDel; pFrame->v = p; @@ -101020,12 +101829,14 @@ case OP_Param: { /* out2 */ ** statement counter is incremented (immediate foreign key constraints). */ case OP_FkCounter: { - if( db->flags & SQLITE_DeferFKs ){ - db->nDeferredImmCons += pOp->p2; - }else if( pOp->p1 ){ + if( pOp->p1 ){ db->nDeferredCons += pOp->p2; }else{ - p->nFkConstraint += pOp->p2; + if( db->flags & SQLITE_DeferFKs ){ + db->nDeferredImmCons += pOp->p2; + }else{ + p->nFkConstraint += pOp->p2; + } } break; } @@ -101240,7 +102051,7 @@ case OP_AggStep: { ** ** Note: We could avoid this by using a regular memory cell from aMem[] for ** the accumulator, instead of allocating one here. */ - nAlloc = ROUND8P( sizeof(pCtx[0]) + (n-1)*sizeof(sqlite3_value*) ); + nAlloc = ROUND8P( SZ_CONTEXT(n) ); pCtx = sqlite3DbMallocRawNN(db, nAlloc + sizeof(Mem)); if( pCtx==0 ) goto no_mem; pCtx->pOut = (Mem*)((u8*)pCtx + nAlloc); @@ -101900,6 +102711,7 @@ case OP_VFilter: { /* jump, ncycle */ /* Invoke the xFilter method */ apArg = p->apArg; + assert( nArg<=p->napArg ); for(i = 0; ivtabOnConflict; apArg = p->apArg; pX = &aMem[pOp->p3]; + assert( nArg<=p->napArg ); for(i=0; irc = rc; sqlite3SystemError(db, rc); testcase( sqlite3GlobalConfig.xLog!=0 ); - sqlite3_log(rc, "statement aborts at %d: [%s] %s", - (int)(pOp - aOp), p->zSql, p->zErrMsg); + sqlite3_log(rc, "statement aborts at %d: %s; [%s]", + (int)(pOp - aOp), p->zErrMsg, p->zSql); if( p->eVdbeState==VDBE_RUN_STATE ) sqlite3VdbeHalt(p); if( rc==SQLITE_IOERR_NOMEM ) sqlite3OomFault(db); if( rc==SQLITE_CORRUPT && db->autoCommit==0 ){ @@ -102896,6 +103709,7 @@ SQLITE_API int sqlite3_blob_open( char *zErr = 0; Table *pTab; Incrblob *pBlob = 0; + int iDb; Parse sParse; #ifdef SQLITE_ENABLE_API_ARMOR @@ -102941,7 +103755,10 @@ SQLITE_API int sqlite3_blob_open( sqlite3ErrorMsg(&sParse, "cannot open view: %s", zTable); } #endif - if( !pTab ){ + if( pTab==0 + || ((iDb = sqlite3SchemaToIndex(db, pTab->pSchema))==1 && + sqlite3OpenTempDatabase(&sParse)) + ){ if( sParse.zErrMsg ){ sqlite3DbFree(db, zErr); zErr = sParse.zErrMsg; @@ -102952,15 +103769,11 @@ SQLITE_API int sqlite3_blob_open( goto blob_open_out; } pBlob->pTab = pTab; - pBlob->zDb = db->aDb[sqlite3SchemaToIndex(db, pTab->pSchema)].zDbSName; + pBlob->zDb = db->aDb[iDb].zDbSName; /* Now search pTab for the exact column. */ - for(iCol=0; iColnCol; iCol++) { - if( sqlite3StrICmp(pTab->aCol[iCol].zCnName, zColumn)==0 ){ - break; - } - } - if( iCol==pTab->nCol ){ + iCol = sqlite3ColumnIndex(pTab, zColumn); + if( iCol<0 ){ sqlite3DbFree(db, zErr); zErr = sqlite3MPrintf(db, "no such column: \"%s\"", zColumn); rc = SQLITE_ERROR; @@ -103040,7 +103853,6 @@ SQLITE_API int sqlite3_blob_open( {OP_Halt, 0, 0, 0}, /* 5 */ }; Vdbe *v = (Vdbe *)pBlob->pStmt; - int iDb = sqlite3SchemaToIndex(db, pTab->pSchema); VdbeOp *aOp; sqlite3VdbeAddOp4Int(v, OP_Transaction, iDb, wrFlag, @@ -103618,9 +104430,12 @@ struct VdbeSorter { u8 iPrev; /* Previous thread used to flush PMA */ u8 nTask; /* Size of aTask[] array */ u8 typeMask; - SortSubtask aTask[1]; /* One or more subtasks */ + SortSubtask aTask[FLEXARRAY]; /* One or more subtasks */ }; +/* Size (in bytes) of a VdbeSorter object that works with N or fewer subtasks */ +#define SZ_VDBESORTER(N) (offsetof(VdbeSorter,aTask)+(N)*sizeof(SortSubtask)) + #define SORTER_TYPE_INTEGER 0x01 #define SORTER_TYPE_TEXT 0x02 @@ -104222,7 +105037,7 @@ SQLITE_PRIVATE int sqlite3VdbeSorterInit( VdbeSorter *pSorter; /* The new sorter */ KeyInfo *pKeyInfo; /* Copy of pCsr->pKeyInfo with db==0 */ int szKeyInfo; /* Size of pCsr->pKeyInfo in bytes */ - int sz; /* Size of pSorter in bytes */ + i64 sz; /* Size of pSorter in bytes */ int rc = SQLITE_OK; #if SQLITE_MAX_WORKER_THREADS==0 # define nWorker 0 @@ -104250,8 +105065,10 @@ SQLITE_PRIVATE int sqlite3VdbeSorterInit( assert( pCsr->pKeyInfo ); assert( !pCsr->isEphemeral ); assert( pCsr->eCurType==CURTYPE_SORTER ); - szKeyInfo = sizeof(KeyInfo) + (pCsr->pKeyInfo->nKeyField-1)*sizeof(CollSeq*); - sz = sizeof(VdbeSorter) + nWorker * sizeof(SortSubtask); + assert( sizeof(KeyInfo) + UMXV(pCsr->pKeyInfo->nKeyField)*sizeof(CollSeq*) + < 0x7fffffff ); + szKeyInfo = SZ_KEYINFO(pCsr->pKeyInfo->nKeyField); + sz = SZ_VDBESORTER(nWorker+1); pSorter = (VdbeSorter*)sqlite3DbMallocZero(db, sz + szKeyInfo); pCsr->uc.pSorter = pSorter; @@ -104463,7 +105280,7 @@ static int vdbeSorterJoinAll(VdbeSorter *pSorter, int rcin){ */ static MergeEngine *vdbeMergeEngineNew(int nReader){ int N = 2; /* Smallest power of two >= nReader */ - int nByte; /* Total bytes of space to allocate */ + i64 nByte; /* Total bytes of space to allocate */ MergeEngine *pNew; /* Pointer to allocated object to return */ assert( nReader<=SORTER_MAX_MERGE_COUNT ); @@ -104715,6 +105532,10 @@ static int vdbeSorterSort(SortSubtask *pTask, SorterList *pList){ p->u.pNext = 0; for(i=0; aSlot[i]; i++){ p = vdbeSorterMerge(pTask, p, aSlot[i]); + /* ,--Each aSlot[] holds twice as much as the previous. So we cannot use + ** | up all 64 aSlots[] with only a 64-bit address space. + ** v */ + assert( iop on success */ Table *pTab = 0; /* Table holding the row */ - Column *pCol; /* A column of pTab */ ExprList *pFJMatch = 0; /* Matches for FULL JOIN .. USING */ const char *zCol = pRight->u.zToken; @@ -107557,7 +108377,6 @@ static int lookupName( if( pSrcList ){ for(i=0, pItem=pSrcList->a; inSrc; i++, pItem++){ - u8 hCol; pTab = pItem->pSTab; assert( pTab!=0 && pTab->zName!=0 ); assert( pTab->nCol>0 || pParse->nErr ); @@ -107645,43 +108464,38 @@ static int lookupName( sqlite3RenameTokenRemap(pParse, 0, (void*)&pExpr->y.pTab); } } - hCol = sqlite3StrIHash(zCol); - for(j=0, pCol=pTab->aCol; jnCol; j++, pCol++){ - if( pCol->hName==hCol - && sqlite3StrICmp(pCol->zCnName, zCol)==0 - ){ - if( cnt>0 ){ - if( pItem->fg.isUsing==0 - || sqlite3IdListIndex(pItem->u3.pUsing, zCol)<0 - ){ - /* Two or more tables have the same column name which is - ** not joined by USING. This is an error. Signal as much - ** by clearing pFJMatch and letting cnt go above 1. */ - sqlite3ExprListDelete(db, pFJMatch); - pFJMatch = 0; - }else - if( (pItem->fg.jointype & JT_RIGHT)==0 ){ - /* An INNER or LEFT JOIN. Use the left-most table */ - continue; - }else - if( (pItem->fg.jointype & JT_LEFT)==0 ){ - /* A RIGHT JOIN. Use the right-most table */ - cnt = 0; - sqlite3ExprListDelete(db, pFJMatch); - pFJMatch = 0; - }else{ - /* For a FULL JOIN, we must construct a coalesce() func */ - extendFJMatch(pParse, &pFJMatch, pMatch, pExpr->iColumn); - } + j = sqlite3ColumnIndex(pTab, zCol); + if( j>=0 ){ + if( cnt>0 ){ + if( pItem->fg.isUsing==0 + || sqlite3IdListIndex(pItem->u3.pUsing, zCol)<0 + ){ + /* Two or more tables have the same column name which is + ** not joined by USING. This is an error. Signal as much + ** by clearing pFJMatch and letting cnt go above 1. */ + sqlite3ExprListDelete(db, pFJMatch); + pFJMatch = 0; + }else + if( (pItem->fg.jointype & JT_RIGHT)==0 ){ + /* An INNER or LEFT JOIN. Use the left-most table */ + continue; + }else + if( (pItem->fg.jointype & JT_LEFT)==0 ){ + /* A RIGHT JOIN. Use the right-most table */ + cnt = 0; + sqlite3ExprListDelete(db, pFJMatch); + pFJMatch = 0; + }else{ + /* For a FULL JOIN, we must construct a coalesce() func */ + extendFJMatch(pParse, &pFJMatch, pMatch, pExpr->iColumn); } - cnt++; - pMatch = pItem; - /* Substitute the rowid (column -1) for the INTEGER PRIMARY KEY */ - pExpr->iColumn = j==pTab->iPKey ? -1 : (i16)j; - if( pItem->fg.isNestedFrom ){ - sqlite3SrcItemColumnUsed(pItem, j); - } - break; + } + cnt++; + pMatch = pItem; + /* Substitute the rowid (column -1) for the INTEGER PRIMARY KEY */ + pExpr->iColumn = j==pTab->iPKey ? -1 : (i16)j; + if( pItem->fg.isNestedFrom ){ + sqlite3SrcItemColumnUsed(pItem, j); } } if( 0==cnt && VisibleRowid(pTab) ){ @@ -107771,23 +108585,18 @@ static int lookupName( if( pTab ){ int iCol; - u8 hCol = sqlite3StrIHash(zCol); pSchema = pTab->pSchema; cntTab++; - for(iCol=0, pCol=pTab->aCol; iColnCol; iCol++, pCol++){ - if( pCol->hName==hCol - && sqlite3StrICmp(pCol->zCnName, zCol)==0 - ){ - if( iCol==pTab->iPKey ){ - iCol = -1; - } - break; + iCol = sqlite3ColumnIndex(pTab, zCol); + if( iCol>=0 ){ + if( pTab->iPKey==iCol ) iCol = -1; + }else{ + if( sqlite3IsRowid(zCol) && VisibleRowid(pTab) ){ + iCol = -1; + }else{ + iCol = pTab->nCol; } } - if( iCol>=pTab->nCol && sqlite3IsRowid(zCol) && VisibleRowid(pTab) ){ - /* IMP: R-51414-32910 */ - iCol = -1; - } if( iColnCol ){ cnt++; pMatch = 0; @@ -108426,13 +109235,12 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ ** sqlite_version() that might change over time cannot be used ** in an index or generated column. Curiously, they can be used ** in a CHECK constraint. SQLServer, MySQL, and PostgreSQL all - ** all this. */ + ** allow this. */ sqlite3ResolveNotValid(pParse, pNC, "non-deterministic functions", NC_IdxExpr|NC_PartIdx|NC_GenCol, 0, pExpr); }else{ assert( (NC_SelfRef & 0xff)==NC_SelfRef ); /* Must fit in 8 bits */ pExpr->op2 = pNC->ncFlags & NC_SelfRef; - if( pNC->ncFlags & NC_FromDDL ) ExprSetProperty(pExpr, EP_FromDDL); } if( (pDef->funcFlags & SQLITE_FUNC_INTERNAL)!=0 && pParse->nested==0 @@ -108448,6 +109256,7 @@ static int resolveExprStep(Walker *pWalker, Expr *pExpr){ if( (pDef->funcFlags & (SQLITE_FUNC_DIRECT|SQLITE_FUNC_UNSAFE))!=0 && !IN_RENAME_OBJECT ){ + if( pNC->ncFlags & NC_FromDDL ) ExprSetProperty(pExpr, EP_FromDDL); sqlite3ExprFunctionUsable(pParse, pExpr, pDef); } } @@ -109501,20 +110310,22 @@ SQLITE_PRIVATE int sqlite3ResolveSelfReference( Expr *pExpr, /* Expression to resolve. May be NULL. */ ExprList *pList /* Expression list to resolve. May be NULL. */ ){ - SrcList sSrc; /* Fake SrcList for pParse->pNewTable */ + SrcList *pSrc; /* Fake SrcList for pParse->pNewTable */ NameContext sNC; /* Name context for pParse->pNewTable */ int rc; + u8 srcSpace[SZ_SRCLIST_1]; /* Memory space for the fake SrcList */ assert( type==0 || pTab!=0 ); assert( type==NC_IsCheck || type==NC_PartIdx || type==NC_IdxExpr || type==NC_GenCol || pTab==0 ); memset(&sNC, 0, sizeof(sNC)); - memset(&sSrc, 0, sizeof(sSrc)); + pSrc = (SrcList*)srcSpace; + memset(pSrc, 0, SZ_SRCLIST_1); if( pTab ){ - sSrc.nSrc = 1; - sSrc.a[0].zName = pTab->zName; - sSrc.a[0].pSTab = pTab; - sSrc.a[0].iCursor = -1; + pSrc->nSrc = 1; + pSrc->a[0].zName = pTab->zName; + pSrc->a[0].pSTab = pTab; + pSrc->a[0].iCursor = -1; if( pTab->pSchema!=pParse->db->aDb[1].pSchema ){ /* Cause EP_FromDDL to be set on TK_FUNCTION nodes of non-TEMP ** schema elements */ @@ -109522,7 +110333,7 @@ SQLITE_PRIVATE int sqlite3ResolveSelfReference( } } sNC.pParse = pParse; - sNC.pSrcList = &sSrc; + sNC.pSrcList = pSrc; sNC.ncFlags = type | NC_IsDDL; if( (rc = sqlite3ResolveExprNames(&sNC, pExpr))!=SQLITE_OK ) return rc; if( pList ) rc = sqlite3ResolveExprListNames(&sNC, pList); @@ -109606,7 +110417,9 @@ SQLITE_PRIVATE char sqlite3ExprAffinity(const Expr *pExpr){ pExpr->pLeft->x.pSelect->pEList->a[pExpr->iColumn].pExpr ); } - if( op==TK_VECTOR ){ + if( op==TK_VECTOR + || (op==TK_FUNCTION && pExpr->affExpr==SQLITE_AFF_DEFER) + ){ assert( ExprUseXList(pExpr) ); return sqlite3ExprAffinity(pExpr->x.pList->a[0].pExpr); } @@ -109799,7 +110612,9 @@ SQLITE_PRIVATE CollSeq *sqlite3ExprCollSeq(Parse *pParse, const Expr *pExpr){ p = p->pLeft; continue; } - if( op==TK_VECTOR ){ + if( op==TK_VECTOR + || (op==TK_FUNCTION && p->affExpr==SQLITE_AFF_DEFER) + ){ assert( ExprUseXList(p) ); p = p->x.pList->a[0].pExpr; continue; @@ -110673,7 +111488,7 @@ SQLITE_PRIVATE Expr *sqlite3ExprAnd(Parse *pParse, Expr *pLeft, Expr *pRight){ return pLeft; }else{ u32 f = pLeft->flags | pRight->flags; - if( (f&(EP_OuterON|EP_InnerON|EP_IsFalse))==EP_IsFalse + if( (f&(EP_OuterON|EP_InnerON|EP_IsFalse|EP_HasFunc))==EP_IsFalse && !IN_RENAME_OBJECT ){ sqlite3ExprDeferredDelete(pParse, pLeft); @@ -111271,7 +112086,7 @@ static Expr *exprDup( SQLITE_PRIVATE With *sqlite3WithDup(sqlite3 *db, With *p){ With *pRet = 0; if( p ){ - sqlite3_int64 nByte = sizeof(*p) + sizeof(p->a[0]) * (p->nCte-1); + sqlite3_int64 nByte = SZ_WITH(p->nCte); pRet = sqlite3DbMallocZero(db, nByte); if( pRet ){ int i; @@ -111382,7 +112197,6 @@ SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, const ExprList *p, int } pItem->zEName = sqlite3DbStrDup(db, pOldItem->zEName); pItem->fg = pOldItem->fg; - pItem->fg.done = 0; pItem->u = pOldItem->u; } return pNew; @@ -111399,11 +112213,9 @@ SQLITE_PRIVATE ExprList *sqlite3ExprListDup(sqlite3 *db, const ExprList *p, int SQLITE_PRIVATE SrcList *sqlite3SrcListDup(sqlite3 *db, const SrcList *p, int flags){ SrcList *pNew; int i; - int nByte; assert( db!=0 ); if( p==0 ) return 0; - nByte = sizeof(*p) + (p->nSrc>0 ? sizeof(p->a[0]) * (p->nSrc-1) : 0); - pNew = sqlite3DbMallocRawNN(db, nByte ); + pNew = sqlite3DbMallocRawNN(db, SZ_SRCLIST(p->nSrc) ); if( pNew==0 ) return 0; pNew->nSrc = pNew->nAlloc = p->nSrc; for(i=0; inSrc; i++){ @@ -111465,7 +112277,7 @@ SQLITE_PRIVATE IdList *sqlite3IdListDup(sqlite3 *db, const IdList *p){ int i; assert( db!=0 ); if( p==0 ) return 0; - pNew = sqlite3DbMallocRawNN(db, sizeof(*pNew)+(p->nId-1)*sizeof(p->a[0]) ); + pNew = sqlite3DbMallocRawNN(db, SZ_IDLIST(p->nId)); if( pNew==0 ) return 0; pNew->nId = p->nId; for(i=0; inId; i++){ @@ -111497,7 +112309,7 @@ SQLITE_PRIVATE Select *sqlite3SelectDup(sqlite3 *db, const Select *pDup, int fla pNew->pLimit = sqlite3ExprDup(db, p->pLimit, flags); pNew->iLimit = 0; pNew->iOffset = 0; - pNew->selFlags = p->selFlags & ~SF_UsesEphemeral; + pNew->selFlags = p->selFlags & ~(u32)SF_UsesEphemeral; pNew->addrOpenEphm[0] = -1; pNew->addrOpenEphm[1] = -1; pNew->nSelectRow = p->nSelectRow; @@ -111549,7 +112361,7 @@ SQLITE_PRIVATE SQLITE_NOINLINE ExprList *sqlite3ExprListAppendNew( struct ExprList_item *pItem; ExprList *pList; - pList = sqlite3DbMallocRawNN(db, sizeof(ExprList)+sizeof(pList->a[0])*4 ); + pList = sqlite3DbMallocRawNN(db, SZ_EXPRLIST(4)); if( pList==0 ){ sqlite3ExprDelete(db, pExpr); return 0; @@ -111569,8 +112381,7 @@ SQLITE_PRIVATE SQLITE_NOINLINE ExprList *sqlite3ExprListAppendGrow( struct ExprList_item *pItem; ExprList *pNew; pList->nAlloc *= 2; - pNew = sqlite3DbRealloc(db, pList, - sizeof(*pList)+(pList->nAlloc-1)*sizeof(pList->a[0])); + pNew = sqlite3DbRealloc(db, pList, SZ_EXPRLIST(pList->nAlloc)); if( pNew==0 ){ sqlite3ExprListDelete(db, pList); sqlite3ExprDelete(db, pExpr); @@ -112499,13 +113310,7 @@ SQLITE_PRIVATE const char *sqlite3RowidAlias(Table *pTab){ int ii; assert( VisibleRowid(pTab) ); for(ii=0; iinCol; iCol++){ - if( sqlite3_stricmp(azOpt[ii], pTab->aCol[iCol].zCnName)==0 ) break; - } - if( iCol==pTab->nCol ){ - return azOpt[ii]; - } + if( sqlite3ColumnIndex(pTab, azOpt[ii])<0 ) return azOpt[ii]; } return 0; } @@ -112909,7 +113714,7 @@ static char *exprINAffinity(Parse *pParse, const Expr *pExpr){ char *zRet; assert( pExpr->op==TK_IN ); - zRet = sqlite3DbMallocRaw(pParse->db, nVal+1); + zRet = sqlite3DbMallocRaw(pParse->db, 1+(i64)nVal); if( zRet ){ int i; for(i=0; idb, pCopy); sqlite3DbFree(pParse->db, dest.zAffSdst); if( addrBloom ){ + /* Remember that location of the Bloom filter in the P3 operand + ** of the OP_Once that began this subroutine. tag-202407032019 */ sqlite3VdbeGetOp(v, addrOnce)->p3 = dest.iSDParm2; if( dest.iSDParm2==0 ){ - sqlite3VdbeChangeToNoop(v, addrBloom); - }else{ - sqlite3VdbeGetOp(v, addrOnce)->p3 = dest.iSDParm2; + /* If the Bloom filter won't actually be used, keep it small */ + sqlite3VdbeGetOp(v, addrBloom)->p1 = 10; } } if( rc ){ @@ -113620,7 +114426,7 @@ static void sqlite3ExprCodeIN( if( ExprHasProperty(pExpr, EP_Subrtn) ){ const VdbeOp *pOp = sqlite3VdbeGetOp(v, pExpr->y.sub.iAddr); assert( pOp->opcode==OP_Once || pParse->nErr ); - if( pOp->opcode==OP_Once && pOp->p3>0 ){ + if( pOp->opcode==OP_Once && pOp->p3>0 ){ /* tag-202407032019 */ assert( OptimizationEnabled(pParse->db, SQLITE_BloomFilter) ); sqlite3VdbeAddOp4Int(v, OP_Filter, pOp->p3, destIfFalse, rLhs, nVector); VdbeCoverage(v); @@ -114212,7 +115018,7 @@ static SQLITE_NOINLINE int sqlite3IndexedExprLookup( /* -** Expresion pExpr is guaranteed to be a TK_COLUMN or equivalent. This +** Expression pExpr is guaranteed to be a TK_COLUMN or equivalent. This ** function checks the Parse.pIdxPartExpr list to see if this column ** can be replaced with a constant value. If so, it generates code to ** put the constant value in a register (ideally, but not necessarily, @@ -115469,11 +116275,11 @@ SQLITE_PRIVATE void sqlite3ExprIfTrue(Parse *pParse, Expr *pExpr, int dest, int assert( TK_ISNULL==OP_IsNull ); testcase( op==TK_ISNULL ); assert( TK_NOTNULL==OP_NotNull ); testcase( op==TK_NOTNULL ); r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, ®Free1); - sqlite3VdbeTypeofColumn(v, r1); + assert( regFree1==0 || regFree1==r1 ); + if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1); sqlite3VdbeAddOp2(v, op, r1, dest); VdbeCoverageIf(v, op==TK_ISNULL); VdbeCoverageIf(v, op==TK_NOTNULL); - testcase( regFree1==0 ); break; } case TK_BETWEEN: { @@ -115644,11 +116450,11 @@ SQLITE_PRIVATE void sqlite3ExprIfFalse(Parse *pParse, Expr *pExpr, int dest, int case TK_ISNULL: case TK_NOTNULL: { r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, ®Free1); - sqlite3VdbeTypeofColumn(v, r1); + assert( regFree1==0 || regFree1==r1 ); + if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1); sqlite3VdbeAddOp2(v, op, r1, dest); testcase( op==TK_ISNULL ); VdbeCoverageIf(v, op==TK_ISNULL); testcase( op==TK_NOTNULL ); VdbeCoverageIf(v, op==TK_NOTNULL); - testcase( regFree1==0 ); break; } case TK_BETWEEN: { @@ -116548,7 +117354,9 @@ static void findOrCreateAggInfoColumn( ){ struct AggInfo_col *pCol; int k; + int mxTerm = pParse->db->aLimit[SQLITE_LIMIT_COLUMN]; + assert( mxTerm <= SMXV(i16) ); assert( pAggInfo->iFirstReg==0 ); pCol = pAggInfo->aCol; for(k=0; knColumn; k++, pCol++){ @@ -116566,6 +117374,10 @@ static void findOrCreateAggInfoColumn( assert( pParse->db->mallocFailed ); return; } + if( k>mxTerm ){ + sqlite3ErrorMsg(pParse, "more than %d aggregate terms", mxTerm); + k = mxTerm; + } pCol = &pAggInfo->aCol[k]; assert( ExprUseYTab(pExpr) ); pCol->pTab = pExpr->y.pTab; @@ -116599,6 +117411,7 @@ fix_up_expr: if( pExpr->op==TK_COLUMN ){ pExpr->op = TK_AGG_COLUMN; } + assert( k <= SMXV(pExpr->iAgg) ); pExpr->iAgg = (i16)k; } @@ -116683,13 +117496,19 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ ** function that is already in the pAggInfo structure */ struct AggInfo_func *pItem = pAggInfo->aFunc; + int mxTerm = pParse->db->aLimit[SQLITE_LIMIT_COLUMN]; + assert( mxTerm <= SMXV(i16) ); for(i=0; inFunc; i++, pItem++){ if( NEVER(pItem->pFExpr==pExpr) ) break; if( sqlite3ExprCompare(0, pItem->pFExpr, pExpr, -1)==0 ){ break; } } - if( i>=pAggInfo->nFunc ){ + if( i>mxTerm ){ + sqlite3ErrorMsg(pParse, "more than %d aggregate terms", mxTerm); + i = mxTerm; + assert( inFunc ); + }else if( i>=pAggInfo->nFunc ){ /* pExpr is original. Make a new entry in pAggInfo->aFunc[] */ u8 enc = ENC(pParse->db); @@ -116743,6 +117562,7 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ */ assert( !ExprHasProperty(pExpr, EP_TokenOnly|EP_Reduced) ); ExprSetVVAProperty(pExpr, EP_NoReduce); + assert( i <= SMXV(pExpr->iAgg) ); pExpr->iAgg = (i16)i; pExpr->pAggInfo = pAggInfo; return WRC_Prune; @@ -117453,13 +118273,13 @@ SQLITE_PRIVATE void sqlite3AlterBeginAddColumn(Parse *pParse, SrcList *pSrc){ assert( pNew->nCol>0 ); nAlloc = (((pNew->nCol-1)/8)*8)+8; assert( nAlloc>=pNew->nCol && nAlloc%8==0 && nAlloc-pNew->nCol<8 ); - pNew->aCol = (Column*)sqlite3DbMallocZero(db, sizeof(Column)*nAlloc); + pNew->aCol = (Column*)sqlite3DbMallocZero(db, sizeof(Column)*(u32)nAlloc); pNew->zName = sqlite3MPrintf(db, "sqlite_altertab_%s", pTab->zName); if( !pNew->aCol || !pNew->zName ){ assert( db->mallocFailed ); goto exit_begin_add_column; } - memcpy(pNew->aCol, pTab->aCol, sizeof(Column)*pNew->nCol); + memcpy(pNew->aCol, pTab->aCol, sizeof(Column)*(size_t)pNew->nCol); for(i=0; inCol; i++){ Column *pCol = &pNew->aCol[i]; pCol->zCnName = sqlite3DbStrDup(db, pCol->zCnName); @@ -117554,10 +118374,8 @@ SQLITE_PRIVATE void sqlite3AlterRenameColumn( ** altered. Set iCol to be the index of the column being renamed */ zOld = sqlite3NameFromToken(db, pOld); if( !zOld ) goto exit_rename_column; - for(iCol=0; iColnCol; iCol++){ - if( 0==sqlite3StrICmp(pTab->aCol[iCol].zCnName, zOld) ) break; - } - if( iCol==pTab->nCol ){ + iCol = sqlite3ColumnIndex(pTab, zOld); + if( iCol<0 ){ sqlite3ErrorMsg(pParse, "no such column: \"%T\"", pOld); goto exit_rename_column; } @@ -118060,6 +118878,7 @@ static int renameParseSql( int bTemp /* True if SQL is from temp schema */ ){ int rc; + u64 flags; sqlite3ParseObjectInit(p, db); if( zSql==0 ){ @@ -118068,11 +118887,21 @@ static int renameParseSql( if( sqlite3StrNICmp(zSql,"CREATE ",7)!=0 ){ return SQLITE_CORRUPT_BKPT; } - db->init.iDb = bTemp ? 1 : sqlite3FindDbName(db, zDb); + if( bTemp ){ + db->init.iDb = 1; + }else{ + int iDb = sqlite3FindDbName(db, zDb); + assert( iDb>=0 && iDb<=0xff ); + db->init.iDb = (u8)iDb; + } p->eParseMode = PARSE_MODE_RENAME; p->db = db; p->nQueryLoop = 1; + flags = db->flags; + testcase( (db->flags & SQLITE_Comments)==0 && strstr(zSql," /* ")!=0 ); + db->flags |= SQLITE_Comments; rc = sqlite3RunParser(p, zSql); + db->flags = flags; if( db->mallocFailed ) rc = SQLITE_NOMEM; if( rc==SQLITE_OK && NEVER(p->pNewTable==0 && p->pNewIndex==0 && p->pNewTrigger==0) @@ -118135,10 +118964,11 @@ static int renameEditSql( nQuot = sqlite3Strlen30(zQuot)-1; } - assert( nQuot>=nNew ); - zOut = sqlite3DbMallocZero(db, nSql + pRename->nList*nQuot + 1); + assert( nQuot>=nNew && nSql>=0 && nNew>=0 ); + zOut = sqlite3DbMallocZero(db, (u64)nSql + pRename->nList*(u64)nQuot + 1); }else{ - zOut = (char*)sqlite3DbMallocZero(db, (nSql*2+1) * 3); + assert( nSql>0 ); + zOut = (char*)sqlite3DbMallocZero(db, (2*(u64)nSql + 1) * 3); if( zOut ){ zBuf1 = &zOut[nSql*2+1]; zBuf2 = &zOut[nSql*4+2]; @@ -118150,16 +118980,17 @@ static int renameEditSql( ** with the new column name, or with single-quoted versions of themselves. ** All that remains is to construct and return the edited SQL string. */ if( zOut ){ - int nOut = nSql; - memcpy(zOut, zSql, nSql); + i64 nOut = nSql; + assert( nSql>0 ); + memcpy(zOut, zSql, (size_t)nSql); while( pRename->pList ){ int iOff; /* Offset of token to replace in zOut */ - u32 nReplace; + i64 nReplace; const char *zReplace; RenameToken *pBest = renameColumnTokenNext(pRename); if( zNew ){ - if( bQuote==0 && sqlite3IsIdChar(*pBest->t.z) ){ + if( bQuote==0 && sqlite3IsIdChar(*(u8*)pBest->t.z) ){ nReplace = nNew; zReplace = zNew; }else{ @@ -118177,14 +119008,15 @@ static int renameEditSql( memcpy(zBuf1, pBest->t.z, pBest->t.n); zBuf1[pBest->t.n] = 0; sqlite3Dequote(zBuf1); - sqlite3_snprintf(nSql*2, zBuf2, "%Q%s", zBuf1, + assert( nSql < 0x15555554 /* otherwise malloc would have failed */ ); + sqlite3_snprintf((int)(nSql*2), zBuf2, "%Q%s", zBuf1, pBest->t.z[pBest->t.n]=='\'' ? " " : "" ); zReplace = zBuf2; nReplace = sqlite3Strlen30(zReplace); } - iOff = pBest->t.z - zSql; + iOff = (int)(pBest->t.z - zSql); if( pBest->t.n!=nReplace ){ memmove(&zOut[iOff + nReplace], &zOut[iOff + pBest->t.n], nOut - (iOff + pBest->t.n) @@ -118210,11 +119042,12 @@ static int renameEditSql( ** Set all pEList->a[].fg.eEName fields in the expression-list to val. */ static void renameSetENames(ExprList *pEList, int val){ + assert( val==ENAME_NAME || val==ENAME_TAB || val==ENAME_SPAN ); if( pEList ){ int i; for(i=0; inExpr; i++){ assert( val==ENAME_NAME || pEList->a[i].fg.eEName==ENAME_NAME ); - pEList->a[i].fg.eEName = val; + pEList->a[i].fg.eEName = val&0x3; } } } @@ -118471,7 +119304,7 @@ static void renameColumnFunc( if( sParse.pNewTable ){ if( IsView(sParse.pNewTable) ){ Select *pSelect = sParse.pNewTable->u.view.pSelect; - pSelect->selFlags &= ~SF_View; + pSelect->selFlags &= ~(u32)SF_View; sParse.rc = SQLITE_OK; sqlite3SelectPrep(&sParse, pSelect, 0); rc = (db->mallocFailed ? SQLITE_NOMEM : sParse.rc); @@ -118689,7 +119522,7 @@ static void renameTableFunc( sNC.pParse = &sParse; assert( pSelect->selFlags & SF_View ); - pSelect->selFlags &= ~SF_View; + pSelect->selFlags &= ~(u32)SF_View; sqlite3SelectPrep(&sParse, pTab->u.view.pSelect, &sNC); if( sParse.nErr ){ rc = sParse.rc; @@ -118862,7 +119695,7 @@ static void renameQuotefixFunc( if( sParse.pNewTable ){ if( IsView(sParse.pNewTable) ){ Select *pSelect = sParse.pNewTable->u.view.pSelect; - pSelect->selFlags &= ~SF_View; + pSelect->selFlags &= ~(u32)SF_View; sParse.rc = SQLITE_OK; sqlite3SelectPrep(&sParse, pSelect, 0); rc = (db->mallocFailed ? SQLITE_NOMEM : sParse.rc); @@ -118961,10 +119794,10 @@ static void renameTableTest( if( zDb && zInput ){ int rc; Parse sParse; - int flags = db->flags; + u64 flags = db->flags; if( bNoDQS ) db->flags &= ~(SQLITE_DqsDML|SQLITE_DqsDDL); rc = renameParseSql(&sParse, zDb, db, zInput, bTemp); - db->flags |= (flags & (SQLITE_DqsDML|SQLITE_DqsDDL)); + db->flags = flags; if( rc==SQLITE_OK ){ if( isLegacy==0 && sParse.pNewTable && IsView(sParse.pNewTable) ){ NameContext sNC; @@ -119456,7 +120289,8 @@ static void openStatTable( sqlite3NestedParse(pParse, "CREATE TABLE %Q.%s(%s)", pDb->zDbSName, zTab, aTable[i].zCols ); - aRoot[i] = (u32)pParse->regRoot; + assert( pParse->isCreate || pParse->nErr ); + aRoot[i] = (u32)pParse->u1.cr.regRoot; aCreateTbl[i] = OPFLAG_P2ISREG; } }else{ @@ -119647,7 +120481,7 @@ static void statInit( int nCol; /* Number of columns in index being sampled */ int nKeyCol; /* Number of key columns */ int nColUp; /* nCol rounded up for alignment */ - int n; /* Bytes of space to allocate */ + i64 n; /* Bytes of space to allocate */ sqlite3 *db = sqlite3_context_db_handle(context); /* Database connection */ #ifdef SQLITE_ENABLE_STAT4 /* Maximum number of samples. 0 if STAT4 data is not collected */ @@ -119683,7 +120517,7 @@ static void statInit( p->db = db; p->nEst = sqlite3_value_int64(argv[2]); p->nRow = 0; - p->nLimit = sqlite3_value_int64(argv[3]); + p->nLimit = sqlite3_value_int(argv[3]); p->nCol = nCol; p->nKeyCol = nKeyCol; p->nSkipAhead = 0; @@ -120816,16 +121650,6 @@ static void decodeIntArray( while( z[0]!=0 && z[0]!=' ' ) z++; while( z[0]==' ' ) z++; } - - /* Set the bLowQual flag if the peak number of rows obtained - ** from a full equality match is so large that a full table scan - ** seems likely to be faster than using the index. - */ - if( aLog[0] > 66 /* Index has more than 100 rows */ - && aLog[0] <= aLog[nOut-1] /* And only a single value seen */ - ){ - pIndex->bLowQual = 1; - } } } @@ -121421,7 +122245,7 @@ static void attachFunc( if( aNew==0 ) return; memcpy(aNew, db->aDb, sizeof(db->aDb[0])*2); }else{ - aNew = sqlite3DbRealloc(db, db->aDb, sizeof(db->aDb[0])*(db->nDb+1) ); + aNew = sqlite3DbRealloc(db, db->aDb, sizeof(db->aDb[0])*(1+(i64)db->nDb)); if( aNew==0 ) return; } db->aDb = aNew; @@ -121492,6 +122316,13 @@ static void attachFunc( sqlite3BtreeEnterAll(db); db->init.iDb = 0; db->mDbFlags &= ~(DBFLAG_SchemaKnownOk); +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + if( db->setlkFlags & SQLITE_SETLK_BLOCK_ON_CONNECT ){ + int val = 1; + sqlite3_file *fd = sqlite3PagerFile(sqlite3BtreePager(pNew->pBt)); + sqlite3OsFileControlHint(fd, SQLITE_FCNTL_BLOCK_ON_CONNECT, &val); + } +#endif if( !REOPEN_AS_MEMDB(db) ){ rc = sqlite3Init(db, &zErrDyn); } @@ -122214,6 +123045,7 @@ static SQLITE_NOINLINE void lockTable( } } + assert( pToplevel->nTableLock < 0x7fff0000 ); nBytes = sizeof(TableLock) * (pToplevel->nTableLock+1); pToplevel->aTableLock = sqlite3DbReallocOrFree(pToplevel->db, pToplevel->aTableLock, nBytes); @@ -122314,10 +123146,12 @@ SQLITE_PRIVATE void sqlite3FinishCoding(Parse *pParse){ || sqlite3VdbeAssertMayAbort(v, pParse->mayAbort)); if( v ){ if( pParse->bReturning ){ - Returning *pReturning = pParse->u1.pReturning; + Returning *pReturning; int addrRewind; int reg; + assert( !pParse->isCreate ); + pReturning = pParse->u1.d.pReturning; if( pReturning->nRetCol ){ sqlite3VdbeAddOp0(v, OP_FkCheck); addrRewind = @@ -122393,7 +123227,9 @@ SQLITE_PRIVATE void sqlite3FinishCoding(Parse *pParse){ } if( pParse->bReturning ){ - Returning *pRet = pParse->u1.pReturning; + Returning *pRet; + assert( !pParse->isCreate ); + pRet = pParse->u1.d.pReturning; if( pRet->nRetCol ){ sqlite3VdbeAddOp2(v, OP_OpenEphemeral, pRet->iRetCur, pRet->nRetCol); } @@ -123208,10 +124044,16 @@ SQLITE_PRIVATE Index *sqlite3PrimaryKeyIndex(Table *pTab){ ** find the (first) offset of that column in index pIdx. Or return -1 ** if column iCol is not used in index pIdx. */ -SQLITE_PRIVATE i16 sqlite3TableColumnToIndex(Index *pIdx, i16 iCol){ +SQLITE_PRIVATE int sqlite3TableColumnToIndex(Index *pIdx, int iCol){ int i; + i16 iCol16; + assert( iCol>=(-1) && iCol<=SQLITE_MAX_COLUMN ); + assert( pIdx->nColumn<=SQLITE_MAX_COLUMN+1 ); + iCol16 = iCol; for(i=0; inColumn; i++){ - if( iCol==pIdx->aiColumn[i] ) return i; + if( iCol16==pIdx->aiColumn[i] ){ + return i; + } } return -1; } @@ -123465,8 +124307,9 @@ SQLITE_PRIVATE void sqlite3StartTable( /* If the file format and encoding in the database have not been set, ** set them now. */ - reg1 = pParse->regRowid = ++pParse->nMem; - reg2 = pParse->regRoot = ++pParse->nMem; + assert( pParse->isCreate ); + reg1 = pParse->u1.cr.regRowid = ++pParse->nMem; + reg2 = pParse->u1.cr.regRoot = ++pParse->nMem; reg3 = ++pParse->nMem; sqlite3VdbeAddOp3(v, OP_ReadCookie, iDb, reg3, BTREE_FILE_FORMAT); sqlite3VdbeUsesBtree(v, iDb); @@ -123481,8 +124324,8 @@ SQLITE_PRIVATE void sqlite3StartTable( ** The record created does not contain anything yet. It will be replaced ** by the real entry in code generated at sqlite3EndTable(). ** - ** The rowid for the new entry is left in register pParse->regRowid. - ** The root page number of the new table is left in reg pParse->regRoot. + ** The rowid for the new entry is left in register pParse->u1.cr.regRowid. + ** The root page of the new table is left in reg pParse->u1.cr.regRoot. ** The rowid and root page number values are needed by the code that ** sqlite3EndTable will generate. */ @@ -123493,7 +124336,7 @@ SQLITE_PRIVATE void sqlite3StartTable( #endif { assert( !pParse->bReturning ); - pParse->u1.addrCrTab = + pParse->u1.cr.addrCrTab = sqlite3VdbeAddOp3(v, OP_CreateBtree, iDb, reg2, BTREE_INTKEY); } sqlite3OpenSchemaTable(pParse, iDb); @@ -123571,7 +124414,8 @@ SQLITE_PRIVATE void sqlite3AddReturning(Parse *pParse, ExprList *pList){ sqlite3ExprListDelete(db, pList); return; } - pParse->u1.pReturning = pRet; + assert( !pParse->isCreate ); + pParse->u1.d.pReturning = pRet; pRet->pParse = pParse; pRet->pReturnEL = pList; sqlite3ParserAddCleanup(pParse, sqlite3DeleteReturning, pRet); @@ -123613,7 +124457,6 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ char *zType; Column *pCol; sqlite3 *db = pParse->db; - u8 hName; Column *aNew; u8 eType = COLTYPE_CUSTOM; u8 szEst = 1; @@ -123667,13 +124510,10 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ memcpy(z, sName.z, sName.n); z[sName.n] = 0; sqlite3Dequote(z); - hName = sqlite3StrIHash(z); - for(i=0; inCol; i++){ - if( p->aCol[i].hName==hName && sqlite3StrICmp(z, p->aCol[i].zCnName)==0 ){ - sqlite3ErrorMsg(pParse, "duplicate column name: %s", z); - sqlite3DbFree(db, z); - return; - } + if( p->nCol && sqlite3ColumnIndex(p, z)>=0 ){ + sqlite3ErrorMsg(pParse, "duplicate column name: %s", z); + sqlite3DbFree(db, z); + return; } aNew = sqlite3DbRealloc(db,p->aCol,((i64)p->nCol+1)*sizeof(p->aCol[0])); if( aNew==0 ){ @@ -123684,7 +124524,7 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ pCol = &p->aCol[p->nCol]; memset(pCol, 0, sizeof(p->aCol[0])); pCol->zCnName = z; - pCol->hName = hName; + pCol->hName = sqlite3StrIHash(z); sqlite3ColumnPropertiesFromName(p, pCol); if( sType.n==0 ){ @@ -123708,9 +124548,14 @@ SQLITE_PRIVATE void sqlite3AddColumn(Parse *pParse, Token sName, Token sType){ pCol->affinity = sqlite3AffinityType(zType, pCol); pCol->colFlags |= COLFLAG_HASTYPE; } + if( p->nCol<=0xff ){ + u8 h = pCol->hName % sizeof(p->aHx); + p->aHx[h] = p->nCol; + } p->nCol++; p->nNVCol++; - pParse->constraintName.n = 0; + assert( pParse->isCreate ); + pParse->u1.cr.constraintName.n = 0; } /* @@ -123974,15 +124819,11 @@ SQLITE_PRIVATE void sqlite3AddPrimaryKey( assert( pCExpr!=0 ); sqlite3StringToId(pCExpr); if( pCExpr->op==TK_ID ){ - const char *zCName; assert( !ExprHasProperty(pCExpr, EP_IntValue) ); - zCName = pCExpr->u.zToken; - for(iCol=0; iColnCol; iCol++){ - if( sqlite3StrICmp(zCName, pTab->aCol[iCol].zCnName)==0 ){ - pCol = &pTab->aCol[iCol]; - makeColumnPartOfPrimaryKey(pParse, pCol); - break; - } + iCol = sqlite3ColumnIndex(pTab, pCExpr->u.zToken); + if( iCol>=0 ){ + pCol = &pTab->aCol[iCol]; + makeColumnPartOfPrimaryKey(pParse, pCol); } } } @@ -124034,8 +124875,10 @@ SQLITE_PRIVATE void sqlite3AddCheckConstraint( && !sqlite3BtreeIsReadonly(db->aDb[db->init.iDb].pBt) ){ pTab->pCheck = sqlite3ExprListAppend(pParse, pTab->pCheck, pCheckExpr); - if( pParse->constraintName.n ){ - sqlite3ExprListSetName(pParse, pTab->pCheck, &pParse->constraintName, 1); + assert( pParse->isCreate ); + if( pParse->u1.cr.constraintName.n ){ + sqlite3ExprListSetName(pParse, pTab->pCheck, + &pParse->u1.cr.constraintName, 1); }else{ Token t; for(zStart++; sqlite3Isspace(zStart[0]); zStart++){} @@ -124230,7 +125073,8 @@ static void identPut(char *z, int *pIdx, char *zSignedIdent){ ** from sqliteMalloc() and must be freed by the calling function. */ static char *createTableStmt(sqlite3 *db, Table *p){ - int i, k, n; + int i, k, len; + i64 n; char *zStmt; char *zSep, *zSep2, *zEnd; Column *pCol; @@ -124254,8 +125098,9 @@ static char *createTableStmt(sqlite3 *db, Table *p){ sqlite3OomFault(db); return 0; } - sqlite3_snprintf(n, zStmt, "CREATE TABLE "); - k = sqlite3Strlen30(zStmt); + assert( n>14 && n<=0x7fffffff ); + memcpy(zStmt, "CREATE TABLE ", 13); + k = 13; identPut(zStmt, &k, p->zName); zStmt[k++] = '('; for(pCol=p->aCol, i=0; inCol; i++, pCol++){ @@ -124267,13 +125112,15 @@ static char *createTableStmt(sqlite3 *db, Table *p){ /* SQLITE_AFF_REAL */ " REAL", /* SQLITE_AFF_FLEXNUM */ " NUM", }; - int len; const char *zType; - sqlite3_snprintf(n-k, &zStmt[k], zSep); - k += sqlite3Strlen30(&zStmt[k]); + len = sqlite3Strlen30(zSep); + assert( k+lenzCnName); + assert( kaffinity-SQLITE_AFF_BLOB >= 0 ); assert( pCol->affinity-SQLITE_AFF_BLOB < ArraySize(azType) ); testcase( pCol->affinity==SQLITE_AFF_BLOB ); @@ -124288,11 +125135,14 @@ static char *createTableStmt(sqlite3 *db, Table *p){ assert( pCol->affinity==SQLITE_AFF_BLOB || pCol->affinity==SQLITE_AFF_FLEXNUM || pCol->affinity==sqlite3AffinityType(zType, 0) ); + assert( k+lennColumn>=N ) return SQLITE_OK; + db = pParse->db; + assert( N>0 ); + assert( N <= SQLITE_MAX_COLUMN*2 /* tag-20250221-1 */ ); + testcase( N==2*pParse->db->aLimit[SQLITE_LIMIT_COLUMN] ); assert( pIdx->isResized==0 ); - nByte = (sizeof(char*) + sizeof(LogEst) + sizeof(i16) + 1)*N; + nByte = (sizeof(char*) + sizeof(LogEst) + sizeof(i16) + 1)*(u64)N; zExtra = sqlite3DbMallocZero(db, nByte); if( zExtra==0 ) return SQLITE_NOMEM_BKPT; memcpy(zExtra, pIdx->azColl, sizeof(char*)*pIdx->nColumn); @@ -124319,7 +125174,7 @@ static int resizeIndexObject(sqlite3 *db, Index *pIdx, int N){ zExtra += sizeof(i16)*N; memcpy(zExtra, pIdx->aSortOrder, pIdx->nColumn); pIdx->aSortOrder = (u8*)zExtra; - pIdx->nColumn = N; + pIdx->nColumn = (u16)N; /* See tag-20250221-1 above for proof of safety */ pIdx->isResized = 1; return SQLITE_OK; } @@ -124485,9 +125340,9 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ ** into BTREE_BLOBKEY. */ assert( !pParse->bReturning ); - if( pParse->u1.addrCrTab ){ + if( pParse->u1.cr.addrCrTab ){ assert( v ); - sqlite3VdbeChangeP3(v, pParse->u1.addrCrTab, BTREE_BLOBKEY); + sqlite3VdbeChangeP3(v, pParse->u1.cr.addrCrTab, BTREE_BLOBKEY); } /* Locate the PRIMARY KEY index. Or, if this table was originally @@ -124573,14 +125428,14 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ pIdx->nColumn = pIdx->nKeyCol; continue; } - if( resizeIndexObject(db, pIdx, pIdx->nKeyCol+n) ) return; + if( resizeIndexObject(pParse, pIdx, pIdx->nKeyCol+n) ) return; for(i=0, j=pIdx->nKeyCol; inKeyCol, pPk, i) ){ testcase( hasColumn(pIdx->aiColumn, pIdx->nKeyCol, pPk->aiColumn[i]) ); pIdx->aiColumn[j] = pPk->aiColumn[i]; pIdx->azColl[j] = pPk->azColl[i]; if( pPk->aSortOrder[i] ){ - /* See ticket https://www.sqlite.org/src/info/bba7b69f9849b5bf */ + /* See ticket https://sqlite.org/src/info/bba7b69f9849b5bf */ pIdx->bAscKeyBug = 1; } j++; @@ -124597,7 +125452,7 @@ static void convertToWithoutRowidTable(Parse *pParse, Table *pTab){ if( !hasColumn(pPk->aiColumn, nPk, i) && (pTab->aCol[i].colFlags & COLFLAG_VIRTUAL)==0 ) nExtra++; } - if( resizeIndexObject(db, pPk, nPk+nExtra) ) return; + if( resizeIndexObject(pParse, pPk, nPk+nExtra) ) return; for(i=0, j=nPk; inCol; i++){ if( !hasColumn(pPk->aiColumn, j, i) && (pTab->aCol[i].colFlags & COLFLAG_VIRTUAL)==0 @@ -124927,7 +125782,7 @@ SQLITE_PRIVATE void sqlite3EndTable( /* If this is a CREATE TABLE xx AS SELECT ..., execute the SELECT ** statement to populate the new table. The root-page number for the - ** new table is in register pParse->regRoot. + ** new table is in register pParse->u1.cr.regRoot. ** ** Once the SELECT has been coded by sqlite3Select(), it is in a ** suitable state to query for the column names and types to be used @@ -124958,7 +125813,8 @@ SQLITE_PRIVATE void sqlite3EndTable( regRec = ++pParse->nMem; regRowid = ++pParse->nMem; sqlite3MayAbort(pParse); - sqlite3VdbeAddOp3(v, OP_OpenWrite, iCsr, pParse->regRoot, iDb); + assert( pParse->isCreate ); + sqlite3VdbeAddOp3(v, OP_OpenWrite, iCsr, pParse->u1.cr.regRoot, iDb); sqlite3VdbeChangeP5(v, OPFLAG_P2ISREG); addrTop = sqlite3VdbeCurrentAddr(v) + 1; sqlite3VdbeAddOp3(v, OP_InitCoroutine, regYield, 0, addrTop); @@ -125003,6 +125859,7 @@ SQLITE_PRIVATE void sqlite3EndTable( ** schema table. We just need to update that slot with all ** the information we've collected. */ + assert( pParse->isCreate ); sqlite3NestedParse(pParse, "UPDATE %Q." LEGACY_SCHEMA_TABLE " SET type='%s', name=%Q, tbl_name=%Q, rootpage=#%d, sql=%Q" @@ -125011,9 +125868,9 @@ SQLITE_PRIVATE void sqlite3EndTable( zType, p->zName, p->zName, - pParse->regRoot, + pParse->u1.cr.regRoot, zStmt, - pParse->regRowid + pParse->u1.cr.regRowid ); sqlite3DbFree(db, zStmt); sqlite3ChangeCookie(pParse, iDb); @@ -125753,7 +126610,7 @@ SQLITE_PRIVATE void sqlite3CreateForeignKey( }else{ nCol = pFromCol->nExpr; } - nByte = sizeof(*pFKey) + (nCol-1)*sizeof(pFKey->aCol[0]) + pTo->n + 1; + nByte = SZ_FKEY(nCol) + pTo->n + 1; if( pToCol ){ for(i=0; inExpr; i++){ nByte += sqlite3Strlen30(pToCol->a[i].zEName) + 1; @@ -125955,7 +126812,7 @@ static void sqlite3RefillIndex(Parse *pParse, Index *pIndex, int memRootPage){ ** not work for UNIQUE constraint indexes on WITHOUT ROWID tables ** with DESC primary keys, since those indexes have there keys in ** a different order from the main table. - ** See ticket: https://www.sqlite.org/src/info/bba7b69f9849b5bf + ** See ticket: https://sqlite.org/src/info/bba7b69f9849b5bf */ sqlite3VdbeAddOp1(v, OP_SeekEnd, iIdx); } @@ -125979,13 +126836,14 @@ static void sqlite3RefillIndex(Parse *pParse, Index *pIndex, int memRootPage){ */ SQLITE_PRIVATE Index *sqlite3AllocateIndexObject( sqlite3 *db, /* Database connection */ - i16 nCol, /* Total number of columns in the index */ + int nCol, /* Total number of columns in the index */ int nExtra, /* Number of bytes of extra space to alloc */ char **ppExtra /* Pointer to the "extra" space */ ){ Index *p; /* Allocated index object */ - int nByte; /* Bytes of space for Index object + arrays */ + i64 nByte; /* Bytes of space for Index object + arrays */ + assert( nCol <= 2*db->aLimit[SQLITE_LIMIT_COLUMN] ); nByte = ROUND8(sizeof(Index)) + /* Index structure */ ROUND8(sizeof(char*)*nCol) + /* Index.azColl */ ROUND8(sizeof(LogEst)*(nCol+1) + /* Index.aiRowLogEst */ @@ -125998,8 +126856,9 @@ SQLITE_PRIVATE Index *sqlite3AllocateIndexObject( p->aiRowLogEst = (LogEst*)pExtra; pExtra += sizeof(LogEst)*(nCol+1); p->aiColumn = (i16*)pExtra; pExtra += sizeof(i16)*nCol; p->aSortOrder = (u8*)pExtra; - p->nColumn = nCol; - p->nKeyCol = nCol - 1; + assert( nCol>0 ); + p->nColumn = (u16)nCol; + p->nKeyCol = (u16)(nCol - 1); *ppExtra = ((char*)p) + nByte; } return p; @@ -126810,12 +127669,11 @@ SQLITE_PRIVATE IdList *sqlite3IdListAppend(Parse *pParse, IdList *pList, Token * sqlite3 *db = pParse->db; int i; if( pList==0 ){ - pList = sqlite3DbMallocZero(db, sizeof(IdList) ); + pList = sqlite3DbMallocZero(db, SZ_IDLIST(1)); if( pList==0 ) return 0; }else{ IdList *pNew; - pNew = sqlite3DbRealloc(db, pList, - sizeof(IdList) + pList->nId*sizeof(pList->a)); + pNew = sqlite3DbRealloc(db, pList, SZ_IDLIST(pList->nId+1)); if( pNew==0 ){ sqlite3IdListDelete(db, pList); return 0; @@ -126914,8 +127772,7 @@ SQLITE_PRIVATE SrcList *sqlite3SrcListEnlarge( return 0; } if( nAlloc>SQLITE_MAX_SRCLIST ) nAlloc = SQLITE_MAX_SRCLIST; - pNew = sqlite3DbRealloc(db, pSrc, - sizeof(*pSrc) + (nAlloc-1)*sizeof(pSrc->a[0]) ); + pNew = sqlite3DbRealloc(db, pSrc, SZ_SRCLIST(nAlloc)); if( pNew==0 ){ assert( db->mallocFailed ); return 0; @@ -126990,7 +127847,7 @@ SQLITE_PRIVATE SrcList *sqlite3SrcListAppend( assert( pParse->db!=0 ); db = pParse->db; if( pList==0 ){ - pList = sqlite3DbMallocRawNN(pParse->db, sizeof(SrcList) ); + pList = sqlite3DbMallocRawNN(pParse->db, SZ_SRCLIST(1)); if( pList==0 ) return 0; pList->nAlloc = 1; pList->nSrc = 1; @@ -127876,10 +128733,9 @@ SQLITE_PRIVATE With *sqlite3WithAdd( } if( pWith ){ - sqlite3_int64 nByte = sizeof(*pWith) + (sizeof(pWith->a[1]) * pWith->nCte); - pNew = sqlite3DbRealloc(db, pWith, nByte); + pNew = sqlite3DbRealloc(db, pWith, SZ_WITH(pWith->nCte+1)); }else{ - pNew = sqlite3DbMallocZero(db, sizeof(*pWith)); + pNew = sqlite3DbMallocZero(db, SZ_WITH(1)); } assert( (pNew!=0 && zName!=0) || db->mallocFailed ); @@ -129853,11 +130709,6 @@ static void substrFunc( i64 p1, p2; assert( argc==3 || argc==2 ); - if( sqlite3_value_type(argv[1])==SQLITE_NULL - || (argc==3 && sqlite3_value_type(argv[2])==SQLITE_NULL) - ){ - return; - } p0type = sqlite3_value_type(argv[0]); p1 = sqlite3_value_int64(argv[1]); if( p0type==SQLITE_BLOB ){ @@ -129875,19 +130726,23 @@ static void substrFunc( } } } -#ifdef SQLITE_SUBSTR_COMPATIBILITY - /* If SUBSTR_COMPATIBILITY is defined then substr(X,0,N) work the same as - ** as substr(X,1,N) - it returns the first N characters of X. This - ** is essentially a back-out of the bug-fix in check-in [5fc125d362df4b8] - ** from 2009-02-02 for compatibility of applications that exploited the - ** old buggy behavior. */ - if( p1==0 ) p1 = 1; /* */ -#endif if( argc==3 ){ p2 = sqlite3_value_int64(argv[2]); + if( p2==0 && sqlite3_value_type(argv[2])==SQLITE_NULL ) return; }else{ p2 = sqlite3_context_db_handle(context)->aLimit[SQLITE_LIMIT_LENGTH]; } + if( p1==0 ){ +#ifdef SQLITE_SUBSTR_COMPATIBILITY + /* If SUBSTR_COMPATIBILITY is defined then substr(X,0,N) work the same as + ** as substr(X,1,N) - it returns the first N characters of X. This + ** is essentially a back-out of the bug-fix in check-in [5fc125d362df4b8] + ** from 2009-02-02 for compatibility of applications that exploited the + ** old buggy behavior. */ + p1 = 1; /* */ +#endif + if( sqlite3_value_type(argv[1])==SQLITE_NULL ) return; + } if( p1<0 ){ p1 += len; if( p1<0 ){ @@ -130588,7 +131443,7 @@ static const char hexdigits[] = { ** Append to pStr text that is the SQL literal representation of the ** value contained in pValue. */ -SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue){ +SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue, int bEscape){ /* As currently implemented, the string must be initially empty. ** we might relax this requirement in the future, but that will ** require enhancements to the implementation. */ @@ -130636,7 +131491,7 @@ SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue){ } case SQLITE_TEXT: { const unsigned char *zArg = sqlite3_value_text(pValue); - sqlite3_str_appendf(pStr, "%Q", zArg); + sqlite3_str_appendf(pStr, bEscape ? "%#Q" : "%Q", zArg); break; } default: { @@ -130647,6 +131502,105 @@ SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue){ } } +/* +** Return true if z[] begins with N hexadecimal digits, and write +** a decoding of those digits into *pVal. Or return false if any +** one of the first N characters in z[] is not a hexadecimal digit. +*/ +static int isNHex(const char *z, int N, u32 *pVal){ + int i; + int v = 0; + for(i=0; i0 ){ + memmove(&zOut[j], &zIn[i], n); + j += n; + i += n; + } + if( zIn[i+1]=='\\' ){ + i += 2; + zOut[j++] = '\\'; + }else if( sqlite3Isxdigit(zIn[i+1]) ){ + if( !isNHex(&zIn[i+1], 4, &v) ) goto unistr_error; + i += 5; + j += sqlite3AppendOneUtf8Character(&zOut[j], v); + }else if( zIn[i+1]=='+' ){ + if( !isNHex(&zIn[i+2], 6, &v) ) goto unistr_error; + i += 8; + j += sqlite3AppendOneUtf8Character(&zOut[j], v); + }else if( zIn[i+1]=='u' ){ + if( !isNHex(&zIn[i+2], 4, &v) ) goto unistr_error; + i += 6; + j += sqlite3AppendOneUtf8Character(&zOut[j], v); + }else if( zIn[i+1]=='U' ){ + if( !isNHex(&zIn[i+2], 8, &v) ) goto unistr_error; + i += 10; + j += sqlite3AppendOneUtf8Character(&zOut[j], v); + }else{ + goto unistr_error; + } + } + zOut[j] = 0; + sqlite3_result_text64(context, zOut, j, sqlite3_free, SQLITE_UTF8); + return; + +unistr_error: + sqlite3_free(zOut); + sqlite3_result_error(context, "invalid Unicode escape", -1); + return; +} + + /* ** Implementation of the QUOTE() function. ** @@ -130656,6 +131610,10 @@ SQLITE_PRIVATE void sqlite3QuoteValue(StrAccum *pStr, sqlite3_value *pValue){ ** as needed. BLOBs are encoded as hexadecimal literals. Strings with ** embedded NUL characters cannot be represented as string literals in SQL ** and hence the returned string literal is truncated prior to the first NUL. +** +** If sqlite3_user_data() is non-zero, then the UNISTR_QUOTE() function is +** implemented instead. The difference is that UNISTR_QUOTE() uses the +** UNISTR() function to escape control characters. */ static void quoteFunc(sqlite3_context *context, int argc, sqlite3_value **argv){ sqlite3_str str; @@ -130663,7 +131621,7 @@ static void quoteFunc(sqlite3_context *context, int argc, sqlite3_value **argv){ assert( argc==1 ); UNUSED_PARAMETER(argc); sqlite3StrAccumInit(&str, db, 0, 0, db->aLimit[SQLITE_LIMIT_LENGTH]); - sqlite3QuoteValue(&str,argv[0]); + sqlite3QuoteValue(&str,argv[0],SQLITE_PTR_TO_INT(sqlite3_user_data(context))); sqlite3_result_text(context, sqlite3StrAccumFinish(&str), str.nChar, SQLITE_DYNAMIC); if( str.accError!=SQLITE_OK ){ @@ -130918,7 +131876,7 @@ static void replaceFunc( assert( zRep==sqlite3_value_text(argv[2]) ); nOut = nStr + 1; assert( nOut0 ){ + if( sqlite3_value_type(argv[i])!=SQLITE_NULL ){ + int k = sqlite3_value_bytes(argv[i]); const char *v = (const char*)sqlite3_value_text(argv[i]); if( v!=0 ){ if( j>0 && nSep>0 ){ @@ -131314,7 +132272,7 @@ static void kahanBabuskaNeumaierInit( ** that it returns NULL if it sums over no inputs. TOTAL returns ** 0.0 in that case. In addition, TOTAL always returns a float where ** SUM might return an integer if it never encounters a floating point -** value. TOTAL never fails, but SUM might through an exception if +** value. TOTAL never fails, but SUM might throw an exception if ** it overflows an integer. */ static void sumStep(sqlite3_context *context, int argc, sqlite3_value **argv){ @@ -132234,7 +133192,9 @@ SQLITE_PRIVATE void sqlite3RegisterBuiltinFunctions(void){ DFUNCTION(sqlite_version, 0, 0, 0, versionFunc ), DFUNCTION(sqlite_source_id, 0, 0, 0, sourceidFunc ), FUNCTION(sqlite_log, 2, 0, 0, errlogFunc ), + FUNCTION(unistr, 1, 0, 0, unistrFunc ), FUNCTION(quote, 1, 0, 0, quoteFunc ), + FUNCTION(unistr_quote, 1, 1, 0, quoteFunc ), VFUNCTION(last_insert_rowid, 0, 0, 0, last_insert_rowid), VFUNCTION(changes, 0, 0, 0, changes ), VFUNCTION(total_changes, 0, 0, 0, total_changes ), @@ -134521,7 +135481,7 @@ SQLITE_PRIVATE Select *sqlite3MultiValues(Parse *pParse, Select *pLeft, ExprList f = (f & pLeft->selFlags); } pSelect = sqlite3SelectNew(pParse, pRow, 0, 0, 0, 0, 0, f, 0); - pLeft->selFlags &= ~SF_MultiValue; + pLeft->selFlags &= ~(u32)SF_MultiValue; if( pSelect ){ pSelect->op = TK_ALL; pSelect->pPrior = pLeft; @@ -134903,28 +135863,22 @@ SQLITE_PRIVATE void sqlite3Insert( aTabColMap = sqlite3DbMallocZero(db, pTab->nCol*sizeof(int)); if( aTabColMap==0 ) goto insert_cleanup; for(i=0; inId; i++){ - const char *zCName = pColumn->a[i].zName; - u8 hName = sqlite3StrIHash(zCName); - for(j=0; jnCol; j++){ - if( pTab->aCol[j].hName!=hName ) continue; - if( sqlite3StrICmp(zCName, pTab->aCol[j].zCnName)==0 ){ - if( aTabColMap[j]==0 ) aTabColMap[j] = i+1; - if( i!=j ) bIdListInOrder = 0; - if( j==pTab->iPKey ){ - ipkColumn = i; assert( !withoutRowid ); - } -#ifndef SQLITE_OMIT_GENERATED_COLUMNS - if( pTab->aCol[j].colFlags & (COLFLAG_STORED|COLFLAG_VIRTUAL) ){ - sqlite3ErrorMsg(pParse, - "cannot INSERT into generated column \"%s\"", - pTab->aCol[j].zCnName); - goto insert_cleanup; - } -#endif - break; + j = sqlite3ColumnIndex(pTab, pColumn->a[i].zName); + if( j>=0 ){ + if( aTabColMap[j]==0 ) aTabColMap[j] = i+1; + if( i!=j ) bIdListInOrder = 0; + if( j==pTab->iPKey ){ + ipkColumn = i; assert( !withoutRowid ); } - } - if( j>=pTab->nCol ){ +#ifndef SQLITE_OMIT_GENERATED_COLUMNS + if( pTab->aCol[j].colFlags & (COLFLAG_STORED|COLFLAG_VIRTUAL) ){ + sqlite3ErrorMsg(pParse, + "cannot INSERT into generated column \"%s\"", + pTab->aCol[j].zCnName); + goto insert_cleanup; + } +#endif + }else{ if( sqlite3IsRowid(pColumn->a[i].zName) && !withoutRowid ){ ipkColumn = i; bIdListInOrder = 0; @@ -135222,7 +136176,7 @@ SQLITE_PRIVATE void sqlite3Insert( continue; }else if( pColumn==0 ){ /* Hidden columns that are not explicitly named in the INSERT - ** get there default value */ + ** get their default value */ sqlite3ExprCodeFactorable(pParse, sqlite3ColumnExpr(pTab, &pTab->aCol[i]), iRegStore); @@ -135947,7 +136901,7 @@ SQLITE_PRIVATE void sqlite3GenerateConstraintChecks( ** could happen in any order, but they are grouped up front for ** convenience. ** - ** 2018-08-14: Ticket https://www.sqlite.org/src/info/908f001483982c43 + ** 2018-08-14: Ticket https://sqlite.org/src/info/908f001483982c43 ** The order of constraints used to have OE_Update as (2) and OE_Abort ** and so forth as (1). But apparently PostgreSQL checks the OE_Update ** constraint before any others, so it had to be moved. @@ -137757,6 +138711,8 @@ struct sqlite3_api_routines { /* Version 3.44.0 and later */ void *(*get_clientdata)(sqlite3*,const char*); int (*set_clientdata)(sqlite3*, const char*, void*, void(*)(void*)); + /* Version 3.50.0 and later */ + int (*setlk_timeout)(sqlite3*,int,int); }; /* @@ -138090,6 +139046,8 @@ typedef int (*sqlite3_loadext_entry)( /* Version 3.44.0 and later */ #define sqlite3_get_clientdata sqlite3_api->get_clientdata #define sqlite3_set_clientdata sqlite3_api->set_clientdata +/* Version 3.50.0 and later */ +#define sqlite3_setlk_timeout sqlite3_api->setlk_timeout #endif /* !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) */ #if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) @@ -138611,7 +139569,9 @@ static const sqlite3_api_routines sqlite3Apis = { sqlite3_stmt_explain, /* Version 3.44.0 and later */ sqlite3_get_clientdata, - sqlite3_set_clientdata + sqlite3_set_clientdata, + /* Version 3.50.0 and later */ + sqlite3_setlk_timeout }; /* True if x is the directory separator character @@ -139133,48 +140093,48 @@ static const char *const pragCName[] = { /* 13 */ "pk", /* 14 */ "hidden", /* table_info reuses 8 */ - /* 15 */ "schema", /* Used by: table_list */ - /* 16 */ "name", + /* 15 */ "name", /* Used by: function_list */ + /* 16 */ "builtin", /* 17 */ "type", - /* 18 */ "ncol", - /* 19 */ "wr", - /* 20 */ "strict", - /* 21 */ "seqno", /* Used by: index_xinfo */ - /* 22 */ "cid", - /* 23 */ "name", - /* 24 */ "desc", - /* 25 */ "coll", - /* 26 */ "key", - /* 27 */ "name", /* Used by: function_list */ - /* 28 */ "builtin", - /* 29 */ "type", - /* 30 */ "enc", - /* 31 */ "narg", - /* 32 */ "flags", - /* 33 */ "tbl", /* Used by: stats */ - /* 34 */ "idx", - /* 35 */ "wdth", - /* 36 */ "hght", - /* 37 */ "flgs", - /* 38 */ "seq", /* Used by: index_list */ - /* 39 */ "name", - /* 40 */ "unique", - /* 41 */ "origin", - /* 42 */ "partial", + /* 18 */ "enc", + /* 19 */ "narg", + /* 20 */ "flags", + /* 21 */ "schema", /* Used by: table_list */ + /* 22 */ "name", + /* 23 */ "type", + /* 24 */ "ncol", + /* 25 */ "wr", + /* 26 */ "strict", + /* 27 */ "seqno", /* Used by: index_xinfo */ + /* 28 */ "cid", + /* 29 */ "name", + /* 30 */ "desc", + /* 31 */ "coll", + /* 32 */ "key", + /* 33 */ "seq", /* Used by: index_list */ + /* 34 */ "name", + /* 35 */ "unique", + /* 36 */ "origin", + /* 37 */ "partial", + /* 38 */ "tbl", /* Used by: stats */ + /* 39 */ "idx", + /* 40 */ "wdth", + /* 41 */ "hght", + /* 42 */ "flgs", /* 43 */ "table", /* Used by: foreign_key_check */ /* 44 */ "rowid", /* 45 */ "parent", /* 46 */ "fkid", - /* index_info reuses 21 */ - /* 47 */ "seq", /* Used by: database_list */ - /* 48 */ "name", - /* 49 */ "file", - /* 50 */ "busy", /* Used by: wal_checkpoint */ - /* 51 */ "log", - /* 52 */ "checkpointed", - /* collation_list reuses 38 */ + /* 47 */ "busy", /* Used by: wal_checkpoint */ + /* 48 */ "log", + /* 49 */ "checkpointed", + /* 50 */ "seq", /* Used by: database_list */ + /* 51 */ "name", + /* 52 */ "file", + /* index_info reuses 27 */ /* 53 */ "database", /* Used by: lock_status */ /* 54 */ "status", + /* collation_list reuses 33 */ /* 55 */ "cache_size", /* Used by: default_cache_size */ /* module_list pragma_list reuses 9 */ /* 56 */ "timeout", /* Used by: busy_timeout */ @@ -139267,7 +140227,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "collation_list", /* ePragTyp: */ PragTyp_COLLATION_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 38, 2, + /* ColNames: */ 33, 2, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_COMPILEOPTION_DIAGS) @@ -139302,7 +140262,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "database_list", /* ePragTyp: */ PragTyp_DATABASE_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 47, 3, + /* ColNames: */ 50, 3, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) && !defined(SQLITE_OMIT_DEPRECATED) @@ -139382,7 +140342,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "function_list", /* ePragTyp: */ PragTyp_FUNCTION_LIST, /* ePragFlg: */ PragFlg_Result0, - /* ColNames: */ 27, 6, + /* ColNames: */ 15, 6, /* iArg: */ 0 }, #endif #endif @@ -139411,17 +140371,17 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "index_info", /* ePragTyp: */ PragTyp_INDEX_INFO, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 21, 3, + /* ColNames: */ 27, 3, /* iArg: */ 0 }, {/* zName: */ "index_list", /* ePragTyp: */ PragTyp_INDEX_LIST, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 38, 5, + /* ColNames: */ 33, 5, /* iArg: */ 0 }, {/* zName: */ "index_xinfo", /* ePragTyp: */ PragTyp_INDEX_INFO, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1|PragFlg_SchemaOpt, - /* ColNames: */ 21, 6, + /* ColNames: */ 27, 6, /* iArg: */ 1 }, #endif #if !defined(SQLITE_OMIT_INTEGRITY_CHECK) @@ -139600,7 +140560,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "stats", /* ePragTyp: */ PragTyp_STATS, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result0|PragFlg_SchemaReq, - /* ColNames: */ 33, 5, + /* ColNames: */ 38, 5, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_PAGER_PRAGMAS) @@ -139619,7 +140579,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "table_list", /* ePragTyp: */ PragTyp_TABLE_LIST, /* ePragFlg: */ PragFlg_NeedSchema|PragFlg_Result1, - /* ColNames: */ 15, 6, + /* ColNames: */ 21, 6, /* iArg: */ 0 }, {/* zName: */ "table_xinfo", /* ePragTyp: */ PragTyp_TABLE_INFO, @@ -139696,7 +140656,7 @@ static const PragmaName aPragmaName[] = { {/* zName: */ "wal_checkpoint", /* ePragTyp: */ PragTyp_WAL_CHECKPOINT, /* ePragFlg: */ PragFlg_NeedSchema, - /* ColNames: */ 50, 3, + /* ColNames: */ 47, 3, /* iArg: */ 0 }, #endif #if !defined(SQLITE_OMIT_FLAG_PRAGMAS) @@ -139718,7 +140678,7 @@ static const PragmaName aPragmaName[] = { ** the following macro or to the actual analysis_limit if it is non-zero, ** in order to prevent PRAGMA optimize from running for too long. ** -** The value of 2000 is chosen emperically so that the worst-case run-time +** The value of 2000 is chosen empirically so that the worst-case run-time ** for PRAGMA optimize does not exceed 100 milliseconds against a variety ** of test databases on a RaspberryPI-4 compiled using -Os and without ** -DSQLITE_DEBUG. Of course, your mileage may vary. For the purpose of @@ -140835,7 +141795,10 @@ SQLITE_PRIVATE void sqlite3Pragma( } }else{ db->flags &= ~mask; - if( mask==SQLITE_DeferFKs ) db->nDeferredImmCons = 0; + if( mask==SQLITE_DeferFKs ){ + db->nDeferredImmCons = 0; + db->nDeferredCons = 0; + } if( (mask & SQLITE_WriteSchema)!=0 && sqlite3_stricmp(zRight, "reset")==0 ){ @@ -144004,7 +144967,7 @@ SQLITE_PRIVATE Select *sqlite3SelectNew( pNew->addrOpenEphm[0] = -1; pNew->addrOpenEphm[1] = -1; pNew->nSelectRow = 0; - if( pSrc==0 ) pSrc = sqlite3DbMallocZero(pParse->db, sizeof(*pSrc)); + if( pSrc==0 ) pSrc = sqlite3DbMallocZero(pParse->db, SZ_SRCLIST_1); pNew->pSrc = pSrc; pNew->pWhere = pWhere; pNew->pGroupBy = pGroupBy; @@ -144169,10 +145132,33 @@ SQLITE_PRIVATE int sqlite3JoinType(Parse *pParse, Token *pA, Token *pB, Token *p */ SQLITE_PRIVATE int sqlite3ColumnIndex(Table *pTab, const char *zCol){ int i; - u8 h = sqlite3StrIHash(zCol); - Column *pCol; - for(pCol=pTab->aCol, i=0; inCol; pCol++, i++){ - if( pCol->hName==h && sqlite3StrICmp(pCol->zCnName, zCol)==0 ) return i; + u8 h; + const Column *aCol; + int nCol; + + h = sqlite3StrIHash(zCol); + aCol = pTab->aCol; + nCol = pTab->nCol; + + /* See if the aHx gives us a lucky match */ + i = pTab->aHx[h % sizeof(pTab->aHx)]; + assert( i=nCol ) break; } return -1; } @@ -144423,7 +145409,7 @@ static int sqlite3ProcessJoin(Parse *pParse, Select *p){ } pE1 = sqlite3CreateColumnExpr(db, pSrc, iLeft, iLeftCol); sqlite3SrcItemColumnUsed(&pSrc->a[iLeft], iLeftCol); - if( (pSrc->a[0].fg.jointype & JT_LTORJ)!=0 ){ + if( (pSrc->a[0].fg.jointype & JT_LTORJ)!=0 && pParse->nErr==0 ){ /* This branch runs if the query contains one or more RIGHT or FULL ** JOINs. If only a single table on the left side of this join ** contains the zName column, then this branch is a no-op. @@ -144439,6 +145425,8 @@ static int sqlite3ProcessJoin(Parse *pParse, Select *p){ */ ExprList *pFuncArgs = 0; /* Arguments to the coalesce() */ static const Token tkCoalesce = { "coalesce", 8 }; + assert( pE1!=0 ); + ExprSetProperty(pE1, EP_CanBeNull); while( tableAndColumnIndex(pSrc, iLeft+1, i, zName, &iLeft, &iLeftCol, pRight->fg.isSynthUsing)!=0 ){ if( pSrc->a[iLeft].fg.isUsing==0 @@ -144455,7 +145443,13 @@ static int sqlite3ProcessJoin(Parse *pParse, Select *p){ if( pFuncArgs ){ pFuncArgs = sqlite3ExprListAppend(pParse, pFuncArgs, pE1); pE1 = sqlite3ExprFunction(pParse, pFuncArgs, &tkCoalesce, 0); + if( pE1 ){ + pE1->affExpr = SQLITE_AFF_DEFER; + } } + }else if( (pSrc->a[i+1].fg.jointype & JT_LEFT)!=0 && pParse->nErr==0 ){ + assert( pE1!=0 ); + ExprSetProperty(pE1, EP_CanBeNull); } pE2 = sqlite3CreateColumnExpr(db, pSrc, i+1, iRightCol); sqlite3SrcItemColumnUsed(pRight, iRightCol); @@ -145364,8 +146358,8 @@ static void selectInnerLoop( ** X extra columns. */ SQLITE_PRIVATE KeyInfo *sqlite3KeyInfoAlloc(sqlite3 *db, int N, int X){ - int nExtra = (N+X)*(sizeof(CollSeq*)+1) - sizeof(CollSeq*); - KeyInfo *p = sqlite3DbMallocRawNN(db, sizeof(KeyInfo) + nExtra); + int nExtra = (N+X)*(sizeof(CollSeq*)+1); + KeyInfo *p = sqlite3DbMallocRawNN(db, SZ_KEYINFO(0) + nExtra); if( p ){ p->aSortFlags = (u8*)&p->aColl[N+X]; p->nKeyField = (u16)N; @@ -145373,7 +146367,7 @@ SQLITE_PRIVATE KeyInfo *sqlite3KeyInfoAlloc(sqlite3 *db, int N, int X){ p->enc = ENC(db); p->db = db; p->nRef = 1; - memset(&p[1], 0, nExtra); + memset(p->aColl, 0, nExtra); }else{ return (KeyInfo*)sqlite3OomFault(db); } @@ -147074,6 +148068,7 @@ static int multiSelect( multi_select_end: pDest->iSdst = dest.iSdst; pDest->nSdst = dest.nSdst; + pDest->iSDParm2 = dest.iSDParm2; if( pDelete ){ sqlite3ParserAddCleanup(pParse, sqlite3SelectDeleteGeneric, pDelete); } @@ -148062,9 +149057,9 @@ static int compoundHasDifferentAffinities(Select *p){ ** from 2015-02-09.) ** ** (3) If the subquery is the right operand of a LEFT JOIN then -** (3a) the subquery may not be a join and -** (3b) the FROM clause of the subquery may not contain a virtual -** table and +** (3a) the subquery may not be a join +** (**) Was (3b): "the FROM clause of the subquery may not contain +** a virtual table" ** (**) Was: "The outer query may not have a GROUP BY." This case ** is now managed correctly ** (3d) the outer query may not be DISTINCT. @@ -148280,7 +149275,7 @@ static int flattenSubquery( */ if( (pSubitem->fg.jointype & (JT_OUTER|JT_LTORJ))!=0 ){ if( pSubSrc->nSrc>1 /* (3a) */ - || IsVirtual(pSubSrc->a[0].pSTab) /* (3b) */ + /**** || IsVirtual(pSubSrc->a[0].pSTab) (3b)-omitted */ || (p->selFlags & SF_Distinct)!=0 /* (3d) */ || (pSubitem->fg.jointype & JT_RIGHT)!=0 /* (26) */ ){ @@ -148684,7 +149679,8 @@ static void constInsert( return; /* Already present. Return without doing anything. */ } } - if( sqlite3ExprAffinity(pColumn)==SQLITE_AFF_BLOB ){ + assert( SQLITE_AFF_NONEbHasAffBlob = 1; } @@ -148759,7 +149755,8 @@ static int propagateConstantExprRewriteOne( if( pColumn==pExpr ) continue; if( pColumn->iTable!=pExpr->iTable ) continue; if( pColumn->iColumn!=pExpr->iColumn ) continue; - if( bIgnoreAffBlob && sqlite3ExprAffinity(pColumn)==SQLITE_AFF_BLOB ){ + assert( SQLITE_AFF_NONEpWinDefn = 0; #endif - p->selFlags &= ~SF_Compound; + p->selFlags &= ~(u32)SF_Compound; assert( (p->selFlags & SF_Converted)==0 ); p->selFlags |= SF_Converted; assert( pNew->pPrior!=0 ); @@ -149889,7 +150886,7 @@ static int selectExpander(Walker *pWalker, Select *p){ pEList = p->pEList; if( pParse->pWith && (p->selFlags & SF_View) ){ if( p->pWith==0 ){ - p->pWith = (With*)sqlite3DbMallocZero(db, sizeof(With)); + p->pWith = (With*)sqlite3DbMallocZero(db, SZ_WITH(1) ); if( p->pWith==0 ){ return WRC_Abort; } @@ -151028,6 +152025,7 @@ static void agginfoFree(sqlite3 *db, void *pArg){ ** * There is no WHERE or GROUP BY or HAVING clauses on the subqueries ** * The outer query is a simple count(*) with no WHERE clause or other ** extraneous syntax. +** * None of the subqueries are DISTINCT (forumpost/a860f5fb2e 2025-03-10) ** ** Return TRUE if the optimization is undertaken. */ @@ -151060,7 +152058,11 @@ static int countOfViewOptimization(Parse *pParse, Select *p){ if( pSub->op!=TK_ALL && pSub->pPrior ) return 0; /* Must be UNION ALL */ if( pSub->pWhere ) return 0; /* No WHERE clause */ if( pSub->pLimit ) return 0; /* No LIMIT clause */ - if( pSub->selFlags & SF_Aggregate ) return 0; /* Not an aggregate */ + if( pSub->selFlags & (SF_Aggregate|SF_Distinct) ){ + testcase( pSub->selFlags & SF_Aggregate ); + testcase( pSub->selFlags & SF_Distinct ); + return 0; /* Not an aggregate nor DISTINCT */ + } assert( pSub->pHaving==0 ); /* Due to the previous */ pSub = pSub->pPrior; /* Repeat over compound */ }while( pSub ); @@ -151072,14 +152074,14 @@ static int countOfViewOptimization(Parse *pParse, Select *p){ pExpr = 0; pSub = sqlite3SubqueryDetach(db, pFrom); sqlite3SrcListDelete(db, p->pSrc); - p->pSrc = sqlite3DbMallocZero(pParse->db, sizeof(*p->pSrc)); + p->pSrc = sqlite3DbMallocZero(pParse->db, SZ_SRCLIST_1); while( pSub ){ Expr *pTerm; pPrior = pSub->pPrior; pSub->pPrior = 0; pSub->pNext = 0; pSub->selFlags |= SF_Aggregate; - pSub->selFlags &= ~SF_Compound; + pSub->selFlags &= ~(u32)SF_Compound; pSub->nSelectRow = 0; sqlite3ParserAddCleanup(pParse, sqlite3ExprListDeleteGeneric, pSub->pEList); pTerm = pPrior ? sqlite3ExprDup(db, pCount, 0) : pCount; @@ -151094,7 +152096,7 @@ static int countOfViewOptimization(Parse *pParse, Select *p){ pSub = pPrior; } p->pEList->a[0].pExpr = pExpr; - p->selFlags &= ~SF_Aggregate; + p->selFlags &= ~(u32)SF_Aggregate; #if TREETRACE_ENABLED if( sqlite3TreeTrace & 0x200 ){ @@ -151301,7 +152303,7 @@ SQLITE_PRIVATE int sqlite3Select( testcase( pParse->earlyCleanup ); p->pOrderBy = 0; } - p->selFlags &= ~SF_Distinct; + p->selFlags &= ~(u32)SF_Distinct; p->selFlags |= SF_NoopOrderBy; } sqlite3SelectPrep(pParse, p, 0); @@ -151340,7 +152342,7 @@ SQLITE_PRIVATE int sqlite3Select( ** and leaving this flag set can cause errors if a compound sub-query ** in p->pSrc is flattened into this query and this function called ** again as part of compound SELECT processing. */ - p->selFlags &= ~SF_UFSrcCheck; + p->selFlags &= ~(u32)SF_UFSrcCheck; } if( pDest->eDest==SRT_Output ){ @@ -151829,7 +152831,7 @@ SQLITE_PRIVATE int sqlite3Select( && p->pWin==0 #endif ){ - p->selFlags &= ~SF_Distinct; + p->selFlags &= ~(u32)SF_Distinct; pGroupBy = p->pGroupBy = sqlite3ExprListDup(db, pEList, 0); if( pGroupBy ){ for(i=0; inExpr; i++){ @@ -151938,6 +152940,12 @@ SQLITE_PRIVATE int sqlite3Select( if( pWInfo==0 ) goto select_end; if( sqlite3WhereOutputRowCount(pWInfo) < p->nSelectRow ){ p->nSelectRow = sqlite3WhereOutputRowCount(pWInfo); + if( pDest->eDest<=SRT_DistQueue && pDest->eDest>=SRT_DistFifo ){ + /* TUNING: For a UNION CTE, because UNION is implies DISTINCT, + ** reduce the estimated output row count by 8 (LogEst 30). + ** Search for tag-20250414a to see other cases */ + p->nSelectRow -= 30; + } } if( sDistinct.isTnct && sqlite3WhereIsDistinct(pWInfo) ){ sDistinct.eTnctType = sqlite3WhereIsDistinct(pWInfo); @@ -152311,6 +153319,10 @@ SQLITE_PRIVATE int sqlite3Select( if( iOrderByCol ){ Expr *pX = p->pEList->a[iOrderByCol-1].pExpr; Expr *pBase = sqlite3ExprSkipCollateAndLikely(pX); + while( ALWAYS(pBase!=0) && pBase->op==TK_IF_NULL_ROW ){ + pX = pBase->pLeft; + pBase = sqlite3ExprSkipCollateAndLikely(pX); + } if( ALWAYS(pBase!=0) && pBase->op!=TK_AGG_COLUMN && pBase->op!=TK_REGISTER @@ -152894,7 +153906,8 @@ SQLITE_PRIVATE Trigger *sqlite3TriggerList(Parse *pParse, Table *pTab){ assert( pParse->db->pVtabCtx==0 ); #endif assert( pParse->bReturning ); - assert( &(pParse->u1.pReturning->retTrig) == pTrig ); + assert( !pParse->isCreate ); + assert( &(pParse->u1.d.pReturning->retTrig) == pTrig ); pTrig->table = pTab->zName; pTrig->pTabSchema = pTab->pSchema; pTrig->pNext = pList; @@ -153862,7 +154875,8 @@ static void codeReturningTrigger( ExprList *pNew; Returning *pReturning; Select sSelect; - SrcList sFrom; + SrcList *pFrom; + u8 fromSpace[SZ_SRCLIST_1]; assert( v!=0 ); if( !pParse->bReturning ){ @@ -153871,19 +154885,21 @@ static void codeReturningTrigger( return; } assert( db->pParse==pParse ); - pReturning = pParse->u1.pReturning; + assert( !pParse->isCreate ); + pReturning = pParse->u1.d.pReturning; if( pTrigger != &(pReturning->retTrig) ){ /* This RETURNING trigger is for a different statement */ return; } memset(&sSelect, 0, sizeof(sSelect)); - memset(&sFrom, 0, sizeof(sFrom)); + pFrom = (SrcList*)fromSpace; + memset(pFrom, 0, SZ_SRCLIST_1); sSelect.pEList = sqlite3ExprListDup(db, pReturning->pReturnEL, 0); - sSelect.pSrc = &sFrom; - sFrom.nSrc = 1; - sFrom.a[0].pSTab = pTab; - sFrom.a[0].zName = pTab->zName; /* tag-20240424-1 */ - sFrom.a[0].iCursor = -1; + sSelect.pSrc = pFrom; + pFrom->nSrc = 1; + pFrom->a[0].pSTab = pTab; + pFrom->a[0].zName = pTab->zName; /* tag-20240424-1 */ + pFrom->a[0].iCursor = -1; sqlite3SelectPrep(pParse, &sSelect, 0); if( pParse->nErr==0 ){ assert( db->mallocFailed==0 ); @@ -154101,6 +155117,8 @@ static TriggerPrg *codeRowTrigger( sSubParse.eTriggerOp = pTrigger->op; sSubParse.nQueryLoop = pParse->nQueryLoop; sSubParse.prepFlags = pParse->prepFlags; + sSubParse.oldmask = 0; + sSubParse.newmask = 0; v = sqlite3GetVdbe(&sSubParse); if( v ){ @@ -154855,38 +155873,32 @@ SQLITE_PRIVATE void sqlite3Update( */ chngRowid = chngPk = 0; for(i=0; inExpr; i++){ - u8 hCol = sqlite3StrIHash(pChanges->a[i].zEName); /* If this is an UPDATE with a FROM clause, do not resolve expressions ** here. The call to sqlite3Select() below will do that. */ if( nChangeFrom==0 && sqlite3ResolveExprNames(&sNC, pChanges->a[i].pExpr) ){ goto update_cleanup; } - for(j=0; jnCol; j++){ - if( pTab->aCol[j].hName==hCol - && sqlite3StrICmp(pTab->aCol[j].zCnName, pChanges->a[i].zEName)==0 - ){ - if( j==pTab->iPKey ){ - chngRowid = 1; - pRowidExpr = pChanges->a[i].pExpr; - iRowidExpr = i; - }else if( pPk && (pTab->aCol[j].colFlags & COLFLAG_PRIMKEY)!=0 ){ - chngPk = 1; - } -#ifndef SQLITE_OMIT_GENERATED_COLUMNS - else if( pTab->aCol[j].colFlags & COLFLAG_GENERATED ){ - testcase( pTab->aCol[j].colFlags & COLFLAG_VIRTUAL ); - testcase( pTab->aCol[j].colFlags & COLFLAG_STORED ); - sqlite3ErrorMsg(pParse, - "cannot UPDATE generated column \"%s\"", - pTab->aCol[j].zCnName); - goto update_cleanup; - } -#endif - aXRef[j] = i; - break; + j = sqlite3ColumnIndex(pTab, pChanges->a[i].zEName); + if( j>=0 ){ + if( j==pTab->iPKey ){ + chngRowid = 1; + pRowidExpr = pChanges->a[i].pExpr; + iRowidExpr = i; + }else if( pPk && (pTab->aCol[j].colFlags & COLFLAG_PRIMKEY)!=0 ){ + chngPk = 1; } - } - if( j>=pTab->nCol ){ +#ifndef SQLITE_OMIT_GENERATED_COLUMNS + else if( pTab->aCol[j].colFlags & COLFLAG_GENERATED ){ + testcase( pTab->aCol[j].colFlags & COLFLAG_VIRTUAL ); + testcase( pTab->aCol[j].colFlags & COLFLAG_STORED ); + sqlite3ErrorMsg(pParse, + "cannot UPDATE generated column \"%s\"", + pTab->aCol[j].zCnName); + goto update_cleanup; + } +#endif + aXRef[j] = i; + }else{ if( pPk==0 && sqlite3IsRowid(pChanges->a[i].zEName) ){ j = -1; chngRowid = 1; @@ -156209,7 +157221,7 @@ SQLITE_PRIVATE void sqlite3Vacuum(Parse *pParse, Token *pNm, Expr *pInto){ #else /* When SQLITE_BUG_COMPATIBLE_20160819 is defined, unrecognized arguments ** to VACUUM are silently ignored. This is a back-out of a bug fix that - ** occurred on 2016-08-19 (https://www.sqlite.org/src/info/083f9e6270). + ** occurred on 2016-08-19 (https://sqlite.org/src/info/083f9e6270). ** The buggy behavior is required for binary compatibility with some ** legacy applications. */ iDb = sqlite3FindDb(pParse->db, pNm); @@ -156288,7 +157300,7 @@ SQLITE_PRIVATE SQLITE_NOINLINE int sqlite3RunVacuum( saved_nChange = db->nChange; saved_nTotalChange = db->nTotalChange; saved_mTrace = db->mTrace; - db->flags |= SQLITE_WriteSchema | SQLITE_IgnoreChecks; + db->flags |= SQLITE_WriteSchema | SQLITE_IgnoreChecks | SQLITE_Comments; db->mDbFlags |= DBFLAG_PreferBuiltin | DBFLAG_Vacuum; db->flags &= ~(u64)(SQLITE_ForeignKeys | SQLITE_ReverseOrder | SQLITE_Defensive | SQLITE_CountRows); @@ -156993,11 +158005,12 @@ SQLITE_PRIVATE void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){ ** schema table. We just need to update that slot with all ** the information we've collected. ** - ** The VM register number pParse->regRowid holds the rowid of an + ** The VM register number pParse->u1.cr.regRowid holds the rowid of an ** entry in the sqlite_schema table that was created for this vtab ** by sqlite3StartTable(). */ iDb = sqlite3SchemaToIndex(db, pTab->pSchema); + assert( pParse->isCreate ); sqlite3NestedParse(pParse, "UPDATE %Q." LEGACY_SCHEMA_TABLE " " "SET type='table', name=%Q, tbl_name=%Q, rootpage=0, sql=%Q " @@ -157006,7 +158019,7 @@ SQLITE_PRIVATE void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){ pTab->zName, pTab->zName, zStmt, - pParse->regRowid + pParse->u1.cr.regRowid ); v = sqlite3GetVdbe(pParse); sqlite3ChangeCookie(pParse, iDb); @@ -158416,9 +159429,14 @@ struct WhereInfo { Bitmask revMask; /* Mask of ORDER BY terms that need reversing */ WhereClause sWC; /* Decomposition of the WHERE clause */ WhereMaskSet sMaskSet; /* Map cursor numbers to bitmasks */ - WhereLevel a[1]; /* Information about each nest loop in WHERE */ + WhereLevel a[FLEXARRAY]; /* Information about each nest loop in WHERE */ }; +/* +** The size (in bytes) of a WhereInfo object that holds N WhereLevels. +*/ +#define SZ_WHEREINFO(N) ROUND8(offsetof(WhereInfo,a)+(N)*sizeof(WhereLevel)) + /* ** Private interfaces - callable only by other where.c routines. ** @@ -159098,7 +160116,7 @@ static void adjustOrderByCol(ExprList *pOrderBy, ExprList *pEList){ /* ** pX is an expression of the form: (vector) IN (SELECT ...) ** In other words, it is a vector IN operator with a SELECT clause on the -** LHS. But not all terms in the vector are indexable and the terms might +** RHS. But not all terms in the vector are indexable and the terms might ** not be in the correct order for indexing. ** ** This routine makes a copy of the input pX expression and then adjusts @@ -159154,7 +160172,9 @@ static Expr *removeUnindexableInClauseTerms( int iField; assert( (pLoop->aLTerm[i]->eOperator & (WO_OR|WO_AND))==0 ); iField = pLoop->aLTerm[i]->u.x.iField - 1; - if( pOrigRhs->a[iField].pExpr==0 ) continue; /* Duplicate PK column */ + if( NEVER(pOrigRhs->a[iField].pExpr==0) ){ + continue; /* Duplicate PK column */ + } pRhs = sqlite3ExprListAppend(pParse, pRhs, pOrigRhs->a[iField].pExpr); pOrigRhs->a[iField].pExpr = 0; if( pRhs ) pRhs->a[pRhs->nExpr-1].u.x.iOrderByCol = iField+1; @@ -159251,7 +160271,7 @@ static SQLITE_NOINLINE void codeINTerm( return; } } - for(i=iEq;inLTerm; i++){ + for(i=iEq; inLTerm; i++){ assert( pLoop->aLTerm[i]!=0 ); if( pLoop->aLTerm[i]->pExpr==pX ) nEq++; } @@ -159260,22 +160280,13 @@ static SQLITE_NOINLINE void codeINTerm( if( !ExprUseXSelect(pX) || pX->x.pSelect->pEList->nExpr==1 ){ eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, 0, &iTab); }else{ - Expr *pExpr = pTerm->pExpr; - if( pExpr->iTable==0 || !ExprHasProperty(pExpr, EP_Subrtn) ){ - sqlite3 *db = pParse->db; - pX = removeUnindexableInClauseTerms(pParse, iEq, pLoop, pX); - if( !db->mallocFailed ){ - aiMap = (int*)sqlite3DbMallocZero(pParse->db, sizeof(int)*nEq); - eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, aiMap,&iTab); - pExpr->iTable = iTab; - } - sqlite3ExprDelete(db, pX); - }else{ - int n = sqlite3ExprVectorSize(pX->pLeft); - aiMap = (int*)sqlite3DbMallocZero(pParse->db, sizeof(int)*MAX(nEq,n)); - eType = sqlite3FindInIndex(pParse, pX, IN_INDEX_LOOP, 0, aiMap, &iTab); + sqlite3 *db = pParse->db; + Expr *pXMod = removeUnindexableInClauseTerms(pParse, iEq, pLoop, pX); + if( !db->mallocFailed ){ + aiMap = (int*)sqlite3DbMallocZero(db, sizeof(int)*nEq); + eType = sqlite3FindInIndex(pParse, pXMod, IN_INDEX_LOOP, 0, aiMap, &iTab); } - pX = pExpr; + sqlite3ExprDelete(db, pXMod); } if( eType==IN_INDEX_INDEX_DESC ){ @@ -159305,7 +160316,7 @@ static SQLITE_NOINLINE void codeINTerm( if( pIn ){ int iMap = 0; /* Index in aiMap[] */ pIn += i; - for(i=iEq;inLTerm; i++){ + for(i=iEq; inLTerm; i++){ if( pLoop->aLTerm[i]->pExpr==pX ){ int iOut = iTarget + i - iEq; if( eType==IN_INDEX_ROWID ){ @@ -160164,6 +161175,9 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( } sqlite3VdbeAddOp2(v, OP_Integer, pLoop->u.vtab.idxNum, iReg); sqlite3VdbeAddOp2(v, OP_Integer, nConstraint, iReg+1); + /* The instruction immediately prior to OP_VFilter must be an OP_Integer + ** that sets the "argc" value for xVFilter. This is necessary for + ** resolveP2() to work correctly. See tag-20250207a. */ sqlite3VdbeAddOp4(v, OP_VFilter, iCur, addrNotFound, iReg, pLoop->u.vtab.idxStr, pLoop->u.vtab.needFree ? P4_DYNAMIC : P4_STATIC); @@ -160754,12 +161768,13 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( if( pLevel->iLeftJoin==0 ){ /* If a partial index is driving the loop, try to eliminate WHERE clause ** terms from the query that must be true due to the WHERE clause of - ** the partial index. + ** the partial index. This optimization does not work on an outer join, + ** as shown by: ** - ** 2019-11-02 ticket 623eff57e76d45f6: This optimization does not work - ** for a LEFT JOIN. + ** 2019-11-02 ticket 623eff57e76d45f6 (LEFT JOIN) + ** 2025-05-29 forum post 7dee41d32506c4ae (RIGHT JOIN) */ - if( pIdx->pPartIdxWhere ){ + if( pIdx->pPartIdxWhere && pLevel->pRJ==0 ){ whereApplyPartialIndexConstraints(pIdx->pPartIdxWhere, iCur, pWC); } }else{ @@ -160866,8 +161881,7 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( int nNotReady; /* The number of notReady tables */ SrcItem *origSrc; /* Original list of tables */ nNotReady = pWInfo->nLevel - iLevel - 1; - pOrTab = sqlite3DbMallocRawNN(db, - sizeof(*pOrTab)+ nNotReady*sizeof(pOrTab->a[0])); + pOrTab = sqlite3DbMallocRawNN(db, SZ_SRCLIST(nNotReady+1)); if( pOrTab==0 ) return notReady; pOrTab->nAlloc = (u8)(nNotReady + 1); pOrTab->nSrc = pOrTab->nAlloc; @@ -160918,7 +161932,7 @@ SQLITE_PRIVATE Bitmask sqlite3WhereCodeOneLoopStart( ** ** This optimization also only applies if the (x1 OR x2 OR ...) term ** is not contained in the ON clause of a LEFT JOIN. - ** See ticket http://www.sqlite.org/src/info/f2369304e4 + ** See ticket http://sqlite.org/src/info/f2369304e4 ** ** 2022-02-04: Do not push down slices of a row-value comparison. ** In other words, "w" or "y" may not be a slice of a vector. Otherwise, @@ -161410,7 +162424,8 @@ SQLITE_PRIVATE SQLITE_NOINLINE void sqlite3WhereRightJoinLoop( WhereInfo *pSubWInfo; WhereLoop *pLoop = pLevel->pWLoop; SrcItem *pTabItem = &pWInfo->pTabList->a[pLevel->iFrom]; - SrcList sFrom; + SrcList *pFrom; + u8 fromSpace[SZ_SRCLIST_1]; Bitmask mAll = 0; int k; @@ -161454,13 +162469,14 @@ SQLITE_PRIVATE SQLITE_NOINLINE void sqlite3WhereRightJoinLoop( sqlite3ExprDup(pParse->db, pTerm->pExpr, 0)); } } - sFrom.nSrc = 1; - sFrom.nAlloc = 1; - memcpy(&sFrom.a[0], pTabItem, sizeof(SrcItem)); - sFrom.a[0].fg.jointype = 0; + pFrom = (SrcList*)fromSpace; + pFrom->nSrc = 1; + pFrom->nAlloc = 1; + memcpy(&pFrom->a[0], pTabItem, sizeof(SrcItem)); + pFrom->a[0].fg.jointype = 0; assert( pParse->withinRJSubrtn < 100 ); pParse->withinRJSubrtn++; - pSubWInfo = sqlite3WhereBegin(pParse, &sFrom, pSubWhere, 0, 0, 0, + pSubWInfo = sqlite3WhereBegin(pParse, pFrom, pSubWhere, 0, 0, 0, WHERE_RIGHT_JOIN, 0); if( pSubWInfo ){ int iCur = pLevel->iTabCur; @@ -162431,30 +163447,42 @@ static void exprAnalyzeOrTerm( ** 1. The SQLITE_Transitive optimization must be enabled ** 2. Must be either an == or an IS operator ** 3. Not originating in the ON clause of an OUTER JOIN -** 4. The affinities of A and B must be compatible -** 5a. Both operands use the same collating sequence OR -** 5b. The overall collating sequence is BINARY +** 4. The operator is not IS or else the query does not contain RIGHT JOIN +** 5. The affinities of A and B must be compatible +** 6a. Both operands use the same collating sequence OR +** 6b. The overall collating sequence is BINARY ** If this routine returns TRUE, that means that the RHS can be substituted ** for the LHS anyplace else in the WHERE clause where the LHS column occurs. ** This is an optimization. No harm comes from returning 0. But if 1 is ** returned when it should not be, then incorrect answers might result. */ -static int termIsEquivalence(Parse *pParse, Expr *pExpr){ +static int termIsEquivalence(Parse *pParse, Expr *pExpr, SrcList *pSrc){ char aff1, aff2; CollSeq *pColl; - if( !OptimizationEnabled(pParse->db, SQLITE_Transitive) ) return 0; - if( pExpr->op!=TK_EQ && pExpr->op!=TK_IS ) return 0; - if( ExprHasProperty(pExpr, EP_OuterON) ) return 0; + if( !OptimizationEnabled(pParse->db, SQLITE_Transitive) ) return 0; /* (1) */ + if( pExpr->op!=TK_EQ && pExpr->op!=TK_IS ) return 0; /* (2) */ + if( ExprHasProperty(pExpr, EP_OuterON) ) return 0; /* (3) */ + assert( pSrc!=0 ); + if( pExpr->op==TK_IS + && pSrc->nSrc + && (pSrc->a[0].fg.jointype & JT_LTORJ)!=0 + ){ + return 0; /* (4) */ + } aff1 = sqlite3ExprAffinity(pExpr->pLeft); aff2 = sqlite3ExprAffinity(pExpr->pRight); if( aff1!=aff2 && (!sqlite3IsNumericAffinity(aff1) || !sqlite3IsNumericAffinity(aff2)) ){ - return 0; + return 0; /* (5) */ } pColl = sqlite3ExprCompareCollSeq(pParse, pExpr); - if( sqlite3IsBinary(pColl) ) return 1; - return sqlite3ExprCollSeqMatch(pParse, pExpr->pLeft, pExpr->pRight); + if( !sqlite3IsBinary(pColl) + && !sqlite3ExprCollSeqMatch(pParse, pExpr->pLeft, pExpr->pRight) + ){ + return 0; /* (6) */ + } + return 1; } /* @@ -162719,8 +163747,8 @@ static void exprAnalyze( if( op==TK_IS ) pNew->wtFlags |= TERM_IS; pTerm = &pWC->a[idxTerm]; pTerm->wtFlags |= TERM_COPIED; - - if( termIsEquivalence(pParse, pDup) ){ + assert( pWInfo->pTabList!=0 ); + if( termIsEquivalence(pParse, pDup, pWInfo->pTabList) ){ pTerm->eOperator |= WO_EQUIV; eExtraOp = WO_EQUIV; } @@ -163448,11 +164476,16 @@ struct HiddenIndexInfo { int eDistinct; /* Value to return from sqlite3_vtab_distinct() */ u32 mIn; /* Mask of terms that are IN (...) */ u32 mHandleIn; /* Terms that vtab will handle as IN (...) */ - sqlite3_value *aRhs[1]; /* RHS values for constraints. MUST BE LAST - ** because extra space is allocated to hold up - ** to nTerm such values */ + sqlite3_value *aRhs[FLEXARRAY]; /* RHS values for constraints. MUST BE LAST + ** Extra space is allocated to hold up + ** to nTerm such values */ }; +/* Size (in bytes) of a HiddenIndeInfo object sufficient to hold as +** many as N constraints */ +#define SZ_HIDDENINDEXINFO(N) \ + (offsetof(HiddenIndexInfo,aRhs) + (N)*sizeof(sqlite3_value*)) + /* Forward declaration of methods */ static int whereLoopResize(sqlite3*, WhereLoop*, int); @@ -164517,6 +165550,8 @@ static SQLITE_NOINLINE void constructAutomaticIndex( } /* Construct the Index object to describe this index */ + assert( nKeyCol <= pTable->nCol + MAX(0, pTable->nCol - BMS + 1) ); + /* ^-- This guarantees that the number of index columns will fit in the u16 */ pIdx = sqlite3AllocateIndexObject(pParse->db, nKeyCol+HasRowid(pTable), 0, &zNotUsed); if( pIdx==0 ) goto end_auto_index_create; @@ -164928,8 +165963,8 @@ static sqlite3_index_info *allocateIndexInfo( */ pIdxInfo = sqlite3DbMallocZero(pParse->db, sizeof(*pIdxInfo) + (sizeof(*pIdxCons) + sizeof(*pUsage))*nTerm - + sizeof(*pIdxOrderBy)*nOrderBy + sizeof(*pHidden) - + sizeof(sqlite3_value*)*nTerm ); + + sizeof(*pIdxOrderBy)*nOrderBy + + SZ_HIDDENINDEXINFO(nTerm) ); if( pIdxInfo==0 ){ sqlite3ErrorMsg(pParse, "out of memory"); return 0; @@ -166565,11 +167600,8 @@ static int whereLoopAddBtreeIndex( assert( pNew->u.btree.nBtm==0 ); opMask = WO_EQ|WO_IN|WO_GT|WO_GE|WO_LT|WO_LE|WO_ISNULL|WO_IS; } - if( pProbe->bUnordered || pProbe->bLowQual ){ - if( pProbe->bUnordered ) opMask &= ~(WO_GT|WO_GE|WO_LT|WO_LE); - if( pProbe->bLowQual && pSrc->fg.isIndexedBy==0 ){ - opMask &= ~(WO_EQ|WO_IN|WO_IS); - } + if( pProbe->bUnordered ){ + opMask &= ~(WO_GT|WO_GE|WO_LT|WO_LE); } assert( pNew->u.btree.nEqnColumn ); @@ -166642,6 +167674,7 @@ static int whereLoopAddBtreeIndex( if( ExprUseXSelect(pExpr) ){ /* "x IN (SELECT ...)": TUNING: the SELECT returns 25 rows */ int i; + int bRedundant = 0; nIn = 46; assert( 46==sqlite3LogEst(25) ); /* The expression may actually be of the form (x, y) IN (SELECT...). @@ -166650,7 +167683,20 @@ static int whereLoopAddBtreeIndex( ** for each such term. The following loop checks that pTerm is the ** first such term in use, and sets nIn back to 0 if it is not. */ for(i=0; inLTerm-1; i++){ - if( pNew->aLTerm[i] && pNew->aLTerm[i]->pExpr==pExpr ) nIn = 0; + if( pNew->aLTerm[i] && pNew->aLTerm[i]->pExpr==pExpr ){ + nIn = 0; + if( pNew->aLTerm[i]->u.x.iField == pTerm->u.x.iField ){ + /* Detect when two or more columns of an index match the same + ** column of a vector IN operater, and avoid adding the column + ** to the WhereLoop more than once. See tag-20250707-01 + ** in test/rowvalue.test */ + bRedundant = 1; + } + } + } + if( bRedundant ){ + pNew->nLTerm--; + continue; } }else if( ALWAYS(pExpr->x.pList && pExpr->x.pList->nExpr) ){ /* "x IN (value, value, ...)" */ @@ -166882,7 +167928,7 @@ static int whereLoopAddBtreeIndex( if( (pNew->wsFlags & WHERE_TOP_LIMIT)==0 && pNew->u.btree.nEqnColumn && (pNew->u.btree.nEqnKeyCol || - pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY) + pProbe->idxType!=SQLITE_IDXTYPE_PRIMARYKEY) ){ if( pNew->u.btree.nEq>3 ){ sqlite3ProgressCheck(pParse); @@ -167011,6 +168057,7 @@ static int whereUsablePartialIndex( if( (!ExprHasProperty(pExpr, EP_OuterON) || pExpr->w.iJoin==iTab) && ((jointype & JT_OUTER)==0 || ExprHasProperty(pExpr, EP_OuterON)) && sqlite3ExprImpliesExpr(pParse, pExpr, pWhere, iTab) + && !sqlite3ExprImpliesExpr(pParse, pExpr, pWhere, -1) && (pTerm->wtFlags & TERM_VNULL)==0 ){ return 1; @@ -167506,7 +168553,7 @@ static int whereLoopAddBtree( && (HasRowid(pTab) || pWInfo->pSelect!=0 || sqlite3FaultSim(700)) ){ WHERETRACE(0x200, - ("-> %s a covering index according to bitmasks\n", + ("-> %s is a covering index according to bitmasks\n", pProbe->zName, m==0 ? "is" : "is not")); pNew->wsFlags = WHERE_IDX_ONLY | WHERE_INDEXED; } @@ -170123,10 +171170,7 @@ SQLITE_PRIVATE WhereInfo *sqlite3WhereBegin( ** field (type Bitmask) it must be aligned on an 8-byte boundary on ** some architectures. Hence the ROUND8() below. */ - nByteWInfo = ROUND8P(sizeof(WhereInfo)); - if( nTabList>1 ){ - nByteWInfo = ROUND8P(nByteWInfo + (nTabList-1)*sizeof(WhereLevel)); - } + nByteWInfo = SZ_WHEREINFO(nTabList); pWInfo = sqlite3DbMallocRawNN(db, nByteWInfo + sizeof(WhereLoop)); if( db->mallocFailed ){ sqlite3DbFree(db, pWInfo); @@ -170343,7 +171387,8 @@ SQLITE_PRIVATE WhereInfo *sqlite3WhereBegin( } /* TUNING: Assume that a DISTINCT clause on a subquery reduces - ** the output size by a factor of 8 (LogEst -30). + ** the output size by a factor of 8 (LogEst -30). Search for + ** tag-20250414a to see other cases. */ if( (pWInfo->wctrlFlags & WHERE_WANT_DISTINCT)!=0 ){ WHERETRACE(0x0080,("nRowOut reduced from %d to %d due to DISTINCT\n", @@ -172078,7 +173123,7 @@ SQLITE_PRIVATE int sqlite3WindowRewrite(Parse *pParse, Select *p){ p->pWhere = 0; p->pGroupBy = 0; p->pHaving = 0; - p->selFlags &= ~SF_Aggregate; + p->selFlags &= ~(u32)SF_Aggregate; p->selFlags |= SF_WinRewrite; /* Create the ORDER BY clause for the sub-select. This is the concatenation @@ -174218,6 +175263,11 @@ SQLITE_PRIVATE void sqlite3WindowCodeStep( /* #include "sqliteInt.h" */ +/* +** Verify that the pParse->isCreate field is set +*/ +#define ASSERT_IS_CREATE assert(pParse->isCreate) + /* ** Disable all error recovery processing in the parser push-down ** automaton. @@ -174281,6 +175331,10 @@ static void parserSyntaxError(Parse *pParse, Token *p){ static void disableLookaside(Parse *pParse){ sqlite3 *db = pParse->db; pParse->disableLookaside++; +#ifdef SQLITE_DEBUG + pParse->isCreate = 1; +#endif + memset(&pParse->u1.cr, 0, sizeof(pParse->u1.cr)); DisableLookaside; } @@ -177917,7 +178971,9 @@ static YYACTIONTYPE yy_reduce( } break; case 14: /* createkw ::= CREATE */ -{disableLookaside(pParse);} +{ + disableLookaside(pParse); +} break; case 15: /* ifnotexists ::= */ case 18: /* temp ::= */ yytestcase(yyruleno==18); @@ -178009,7 +179065,7 @@ static YYACTIONTYPE yy_reduce( break; case 32: /* ccons ::= CONSTRAINT nm */ case 67: /* tcons ::= CONSTRAINT nm */ yytestcase(yyruleno==67); -{pParse->constraintName = yymsp[0].minor.yy0;} +{ASSERT_IS_CREATE; pParse->u1.cr.constraintName = yymsp[0].minor.yy0;} break; case 33: /* ccons ::= DEFAULT scantok term */ {sqlite3AddDefaultValue(pParse,yymsp[0].minor.yy590,yymsp[-1].minor.yy0.z,&yymsp[-1].minor.yy0.z[yymsp[-1].minor.yy0.n]);} @@ -178119,7 +179175,7 @@ static YYACTIONTYPE yy_reduce( {yymsp[-1].minor.yy502 = 0;} break; case 66: /* tconscomma ::= COMMA */ -{pParse->constraintName.n = 0;} +{ASSERT_IS_CREATE; pParse->u1.cr.constraintName.n = 0;} break; case 68: /* tcons ::= PRIMARY KEY LP sortlist autoinc RP onconf */ {sqlite3AddPrimaryKey(pParse,yymsp[-3].minor.yy402,yymsp[0].minor.yy502,yymsp[-2].minor.yy502,0);} @@ -178206,8 +179262,8 @@ static YYACTIONTYPE yy_reduce( if( pRhs ){ pRhs->op = (u8)yymsp[-1].minor.yy502; pRhs->pPrior = pLhs; - if( ALWAYS(pLhs) ) pLhs->selFlags &= ~SF_MultiValue; - pRhs->selFlags &= ~SF_MultiValue; + if( ALWAYS(pLhs) ) pLhs->selFlags &= ~(u32)SF_MultiValue; + pRhs->selFlags &= ~(u32)SF_MultiValue; if( yymsp[-1].minor.yy502!=TK_ALL ) pParse->hasCompound = 1; }else{ sqlite3SelectDelete(pParse->db, pLhs); @@ -178847,12 +179903,21 @@ static YYACTIONTYPE yy_reduce( ** expr1 IN () ** expr1 NOT IN () ** - ** simplify to constants 0 (false) and 1 (true), respectively, - ** regardless of the value of expr1. + ** simplify to constants 0 (false) and 1 (true), respectively. + ** + ** Except, do not apply this optimization if expr1 contains a function + ** because that function might be an aggregate (we don't know yet whether + ** it is or not) and if it is an aggregate, that could change the meaning + ** of the whole query. */ - sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy590); - yymsp[-4].minor.yy590 = sqlite3Expr(pParse->db, TK_STRING, yymsp[-3].minor.yy502 ? "true" : "false"); - if( yymsp[-4].minor.yy590 ) sqlite3ExprIdToTrueFalse(yymsp[-4].minor.yy590); + Expr *pB = sqlite3Expr(pParse->db, TK_STRING, yymsp[-3].minor.yy502 ? "true" : "false"); + if( pB ) sqlite3ExprIdToTrueFalse(pB); + if( !ExprHasProperty(yymsp[-4].minor.yy590, EP_HasFunc) ){ + sqlite3ExprUnmapAndDelete(pParse, yymsp[-4].minor.yy590); + yymsp[-4].minor.yy590 = pB; + }else{ + yymsp[-4].minor.yy590 = sqlite3PExpr(pParse, yymsp[-3].minor.yy502 ? TK_OR : TK_AND, pB, yymsp[-4].minor.yy590); + } }else{ Expr *pRHS = yymsp[-1].minor.yy402->a[0].pExpr; if( yymsp[-1].minor.yy402->nExpr==1 && sqlite3ExprIsConstant(pParse,pRHS) && yymsp[-4].minor.yy590->op!=TK_VECTOR ){ @@ -179012,6 +180077,10 @@ static YYACTIONTYPE yy_reduce( { sqlite3BeginTrigger(pParse, &yymsp[-7].minor.yy0, &yymsp[-6].minor.yy0, yymsp[-5].minor.yy502, yymsp[-4].minor.yy28.a, yymsp[-4].minor.yy28.b, yymsp[-2].minor.yy563, yymsp[0].minor.yy590, yymsp[-10].minor.yy502, yymsp[-8].minor.yy502); yymsp[-10].minor.yy0 = (yymsp[-6].minor.yy0.n==0?yymsp[-7].minor.yy0:yymsp[-6].minor.yy0); /*A-overwrites-T*/ +#ifdef SQLITE_DEBUG + assert( pParse->isCreate ); /* Set by createkw reduce action */ + pParse->isCreate = 0; /* But, should not be set for CREATE TRIGGER */ +#endif } break; case 262: /* trigger_time ::= BEFORE|AFTER */ @@ -180454,7 +181523,7 @@ static int getToken(const unsigned char **pz){ int t; /* Token type to return */ do { z += sqlite3GetToken(z, &t); - }while( t==TK_SPACE ); + }while( t==TK_SPACE || t==TK_COMMENT ); if( t==TK_ID || t==TK_STRING || t==TK_JOIN_KW @@ -180947,7 +182016,11 @@ SQLITE_PRIVATE int sqlite3RunParser(Parse *pParse, const char *zSql){ assert( n==6 ); tokenType = analyzeFilterKeyword((const u8*)&zSql[6], lastTokenParsed); #endif /* SQLITE_OMIT_WINDOWFUNC */ - }else if( tokenType==TK_COMMENT && (db->flags & SQLITE_Comments)!=0 ){ + }else if( tokenType==TK_COMMENT + && (db->init.busy || (db->flags & SQLITE_Comments)!=0) + ){ + /* Ignore SQL comments if either (1) we are reparsing the schema or + ** (2) SQLITE_DBCONFIG_ENABLE_COMMENTS is turned on (the default). */ zSql += n; continue; }else if( tokenType!=TK_QNUMBER ){ @@ -181842,6 +182915,14 @@ SQLITE_API int sqlite3_initialize(void){ if( rc==SQLITE_OK ){ sqlite3PCacheBufferSetup( sqlite3GlobalConfig.pPage, sqlite3GlobalConfig.szPage, sqlite3GlobalConfig.nPage); +#ifdef SQLITE_EXTRA_INIT_MUTEXED + { + int SQLITE_EXTRA_INIT_MUTEXED(const char*); + rc = SQLITE_EXTRA_INIT_MUTEXED(0); + } +#endif + } + if( rc==SQLITE_OK ){ sqlite3MemoryBarrier(); sqlite3GlobalConfig.isInit = 1; #ifdef SQLITE_EXTRA_INIT @@ -182298,17 +183379,22 @@ SQLITE_API int sqlite3_config(int op, ...){ ** If lookaside is already active, return SQLITE_BUSY. ** ** The sz parameter is the number of bytes in each lookaside slot. -** The cnt parameter is the number of slots. If pStart is NULL the -** space for the lookaside memory is obtained from sqlite3_malloc(). -** If pStart is not NULL then it is sz*cnt bytes of memory to use for -** the lookaside memory. +** The cnt parameter is the number of slots. If pBuf is NULL the +** space for the lookaside memory is obtained from sqlite3_malloc() +** or similar. If pBuf is not NULL then it is sz*cnt bytes of memory +** to use for the lookaside memory. */ -static int setupLookaside(sqlite3 *db, void *pBuf, int sz, int cnt){ +static int setupLookaside( + sqlite3 *db, /* Database connection being configured */ + void *pBuf, /* Memory to use for lookaside. May be NULL */ + int sz, /* Desired size of each lookaside memory slot */ + int cnt /* Number of slots to allocate */ +){ #ifndef SQLITE_OMIT_LOOKASIDE - void *pStart; - sqlite3_int64 szAlloc; - int nBig; /* Number of full-size slots */ - int nSm; /* Number smaller LOOKASIDE_SMALL-byte slots */ + void *pStart; /* Start of the lookaside buffer */ + sqlite3_int64 szAlloc; /* Total space set aside for lookaside memory */ + int nBig; /* Number of full-size slots */ + int nSm; /* Number smaller LOOKASIDE_SMALL-byte slots */ if( sqlite3LookasideUsed(db,0)>0 ){ return SQLITE_BUSY; @@ -182321,19 +183407,22 @@ static int setupLookaside(sqlite3 *db, void *pBuf, int sz, int cnt){ sqlite3_free(db->lookaside.pStart); } /* The size of a lookaside slot after ROUNDDOWN8 needs to be larger - ** than a pointer to be useful. + ** than a pointer and small enough to fit in a u16. */ - sz = ROUNDDOWN8(sz); /* IMP: R-33038-09382 */ + sz = ROUNDDOWN8(sz); if( sz<=(int)sizeof(LookasideSlot*) ) sz = 0; if( sz>65528 ) sz = 65528; - if( cnt<0 ) cnt = 0; + /* Count must be at least 1 to be useful, but not so large as to use + ** more than 0x7fff0000 total bytes for lookaside. */ + if( cnt<1 ) cnt = 0; + if( sz>0 && cnt>(0x7fff0000/sz) ) cnt = 0x7fff0000/sz; szAlloc = (i64)sz*(i64)cnt; - if( sz==0 || cnt==0 ){ + if( szAlloc==0 ){ sz = 0; pStart = 0; }else if( pBuf==0 ){ sqlite3BeginBenignMalloc(); - pStart = sqlite3Malloc( szAlloc ); /* IMP: R-61949-35727 */ + pStart = sqlite3Malloc( szAlloc ); sqlite3EndBenignMalloc(); if( pStart ) szAlloc = sqlite3MallocSize(pStart); }else{ @@ -183310,6 +184399,9 @@ SQLITE_API int sqlite3_busy_handler( db->busyHandler.pBusyArg = pArg; db->busyHandler.nBusy = 0; db->busyTimeout = 0; +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + db->setlkTimeout = 0; +#endif sqlite3_mutex_leave(db->mutex); return SQLITE_OK; } @@ -183359,12 +184451,49 @@ SQLITE_API int sqlite3_busy_timeout(sqlite3 *db, int ms){ sqlite3_busy_handler(db, (int(*)(void*,int))sqliteDefaultBusyCallback, (void*)db); db->busyTimeout = ms; +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + db->setlkTimeout = ms; +#endif }else{ sqlite3_busy_handler(db, 0, 0); } return SQLITE_OK; } +/* +** Set the setlk timeout value. +*/ +SQLITE_API int sqlite3_setlk_timeout(sqlite3 *db, int ms, int flags){ +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + int iDb; + int bBOC = ((flags & SQLITE_SETLK_BLOCK_ON_CONNECT) ? 1 : 0); +#endif +#ifdef SQLITE_ENABLE_API_ARMOR + if( !sqlite3SafetyCheckOk(db) ) return SQLITE_MISUSE_BKPT; +#endif + if( ms<-1 ) return SQLITE_RANGE; +#ifdef SQLITE_ENABLE_SETLK_TIMEOUT + sqlite3_mutex_enter(db->mutex); + db->setlkTimeout = ms; + db->setlkFlags = flags; + sqlite3BtreeEnterAll(db); + for(iDb=0; iDbnDb; iDb++){ + Btree *pBt = db->aDb[iDb].pBt; + if( pBt ){ + sqlite3_file *fd = sqlite3PagerFile(sqlite3BtreePager(pBt)); + sqlite3OsFileControlHint(fd, SQLITE_FCNTL_BLOCK_ON_CONNECT, (void*)&bBOC); + } + } + sqlite3BtreeLeaveAll(db); + sqlite3_mutex_leave(db->mutex); +#endif +#if !defined(SQLITE_ENABLE_API_ARMOR) && !defined(SQLITE_ENABLE_SETLK_TIMEOUT) + UNUSED_PARAMETER(db); + UNUSED_PARAMETER(flags); +#endif + return SQLITE_OK; +} + /* ** Cause any pending operation to stop at its earliest opportunity. */ @@ -185330,7 +186459,7 @@ SQLITE_API int sqlite3_set_clientdata( return SQLITE_OK; }else{ size_t n = strlen(zName); - p = sqlite3_malloc64( sizeof(DbClientData)+n+1 ); + p = sqlite3_malloc64( SZ_DBCLIENTDATA(n+1) ); if( p==0 ){ if( xDestructor ) xDestructor(pData); sqlite3_mutex_leave(db->mutex); @@ -185484,13 +186613,10 @@ SQLITE_API int sqlite3_table_column_metadata( if( zColumnName==0 ){ /* Query for existence of table only */ }else{ - for(iCol=0; iColnCol; iCol++){ + iCol = sqlite3ColumnIndex(pTab, zColumnName); + if( iCol>=0 ){ pCol = &pTab->aCol[iCol]; - if( 0==sqlite3StrICmp(pCol->zCnName, zColumnName) ){ - break; - } - } - if( iCol==pTab->nCol ){ + }else{ if( HasRowid(pTab) && sqlite3IsRowid(zColumnName) ){ iCol = pTab->iPKey; pCol = iCol>=0 ? &pTab->aCol[iCol] : 0; @@ -185699,8 +186825,8 @@ SQLITE_API int sqlite3_test_control(int op, ...){ /* sqlite3_test_control(SQLITE_TESTCTRL_FK_NO_ACTION, sqlite3 *db, int b); ** ** If b is true, then activate the SQLITE_FkNoAction setting. If b is - ** false then clearn that setting. If the SQLITE_FkNoAction setting is - ** abled, all foreign key ON DELETE and ON UPDATE actions behave as if + ** false then clear that setting. If the SQLITE_FkNoAction setting is + ** enabled, all foreign key ON DELETE and ON UPDATE actions behave as if ** they were NO ACTION, regardless of how they are defined. ** ** NB: One must usually run "PRAGMA writable_schema=RESET" after @@ -187047,7 +188173,7 @@ SQLITE_PRIVATE void sqlite3ConnectionClosed(sqlite3 *db){ ** Here, array { X } means zero or more occurrences of X, adjacent in ** memory. A "position" is an index of a token in the token stream ** generated by the tokenizer. Note that POS_END and POS_COLUMN occur -** in the same logical place as the position element, and act as sentinals +** in the same logical place as the position element, and act as sentinels ** ending a position list array. POS_END is 0. POS_COLUMN is 1. ** The positions numbers are not stored literally but rather as two more ** than the difference from the prior position, or the just the position plus @@ -187266,6 +188392,13 @@ SQLITE_PRIVATE void sqlite3ConnectionClosed(sqlite3 *db){ #ifndef _FTSINT_H #define _FTSINT_H +/* #include */ +/* #include */ +/* #include */ +/* #include */ +/* #include */ +/* #include */ + #if !defined(NDEBUG) && !defined(SQLITE_DEBUG) # define NDEBUG 1 #endif @@ -187735,6 +188868,19 @@ typedef sqlite3_int64 i64; /* 8-byte signed integer */ #define deliberate_fall_through +/* +** Macros needed to provide flexible arrays in a portable way +*/ +#ifndef offsetof +# define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) +#endif +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) +# define FLEXARRAY +#else +# define FLEXARRAY 1 +#endif + + #endif /* SQLITE_AMALGAMATION */ #ifdef SQLITE_DEBUG @@ -187839,7 +188985,7 @@ struct Fts3Table { #endif #if defined(SQLITE_DEBUG) || defined(SQLITE_TEST) - /* True to disable the incremental doclist optimization. This is controled + /* True to disable the incremental doclist optimization. This is controlled ** by special insert command 'test-no-incr-doclist'. */ int bNoIncrDoclist; @@ -187891,7 +189037,7 @@ struct Fts3Cursor { /* ** The Fts3Cursor.eSearch member is always set to one of the following. -** Actualy, Fts3Cursor.eSearch can be greater than or equal to +** Actually, Fts3Cursor.eSearch can be greater than or equal to ** FTS3_FULLTEXT_SEARCH. If so, then Fts3Cursor.eSearch - 2 is the index ** of the column to be searched. For example, in ** @@ -187964,9 +189110,13 @@ struct Fts3Phrase { */ int nToken; /* Number of tokens in the phrase */ int iColumn; /* Index of column this phrase must match */ - Fts3PhraseToken aToken[1]; /* One entry for each token in the phrase */ + Fts3PhraseToken aToken[FLEXARRAY]; /* One for each token in the phrase */ }; +/* Size (in bytes) of an Fts3Phrase object large enough to hold N tokens */ +#define SZ_FTS3PHRASE(N) \ + (offsetof(Fts3Phrase,aToken)+(N)*sizeof(Fts3PhraseToken)) + /* ** A tree of these objects forms the RHS of a MATCH operator. ** @@ -188200,12 +189350,6 @@ SQLITE_PRIVATE int sqlite3Fts3IntegrityCheck(Fts3Table *p, int *pbOk); # define SQLITE_CORE 1 #endif -/* #include */ -/* #include */ -/* #include */ -/* #include */ -/* #include */ -/* #include */ /* #include "fts3.h" */ #ifndef SQLITE_CORE @@ -190544,7 +191688,7 @@ static int fts3DoclistOrMerge( ** sizes of the two inputs, plus enough space for exactly one of the input ** docids to grow. ** - ** A symetric argument may be made if the doclists are in descending + ** A symmetric argument may be made if the doclists are in descending ** order. */ aOut = sqlite3_malloc64((i64)n1+n2+FTS3_VARINT_MAX-1+FTS3_BUFFER_PADDING); @@ -192343,7 +193487,7 @@ static int fts3EvalDeferredPhrase(Fts3Cursor *pCsr, Fts3Phrase *pPhrase){ nDistance = iPrev - nMaxUndeferred; } - aOut = (char *)sqlite3Fts3MallocZero(nPoslist+FTS3_BUFFER_PADDING); + aOut = (char *)sqlite3Fts3MallocZero(((i64)nPoslist)+FTS3_BUFFER_PADDING); if( !aOut ){ sqlite3_free(aPoslist); return SQLITE_NOMEM; @@ -192642,7 +193786,7 @@ static int incrPhraseTokenNext( ** ** * does not contain any deferred tokens. ** -** Advance it to the next matching documnent in the database and populate +** Advance it to the next matching document in the database and populate ** the Fts3Doclist.pList and nList fields. ** ** If there is no "next" entry and no error occurs, then *pbEof is set to @@ -193649,7 +194793,7 @@ static int fts3EvalNext(Fts3Cursor *pCsr){ } /* -** Restart interation for expression pExpr so that the next call to +** Restart iteration for expression pExpr so that the next call to ** fts3EvalNext() visits the first row. Do not allow incremental ** loading or merging of phrase doclists for this iteration. ** @@ -194841,6 +195985,23 @@ SQLITE_PRIVATE int sqlite3Fts3OpenTokenizer( */ static int fts3ExprParse(ParseContext *, const char *, int, Fts3Expr **, int *); +/* +** Search buffer z[], size n, for a '"' character. Or, if enable_parenthesis +** is defined, search for '(' and ')' as well. Return the index of the first +** such character in the buffer. If there is no such character, return -1. +*/ +static int findBarredChar(const char *z, int n){ + int ii; + for(ii=0; iiiLangid, z, i, &pCursor); + *pnConsumed = n; + rc = sqlite3Fts3OpenTokenizer(pTokenizer, pParse->iLangid, z, n, &pCursor); if( rc==SQLITE_OK ){ const char *zToken; int nToken = 0, iStart = 0, iEnd = 0, iPosition = 0; @@ -194882,7 +196036,18 @@ static int getNextToken( rc = pModule->xNext(pCursor, &zToken, &nToken, &iStart, &iEnd, &iPosition); if( rc==SQLITE_OK ){ - nByte = sizeof(Fts3Expr) + sizeof(Fts3Phrase) + nToken; + /* Check that this tokenization did not gobble up any " characters. Or, + ** if enable_parenthesis is true, that it did not gobble up any + ** open or close parenthesis characters either. If it did, call + ** getNextToken() again, but pass only that part of the input buffer + ** up to the first such character. */ + int iBarred = findBarredChar(z, iEnd); + if( iBarred>=0 ){ + pModule->xClose(pCursor); + return getNextToken(pParse, iCol, z, iBarred, ppExpr, pnConsumed); + } + + nByte = sizeof(Fts3Expr) + SZ_FTS3PHRASE(1) + nToken; pRet = (Fts3Expr *)sqlite3Fts3MallocZero(nByte); if( !pRet ){ rc = SQLITE_NOMEM; @@ -194892,7 +196057,7 @@ static int getNextToken( pRet->pPhrase->nToken = 1; pRet->pPhrase->iColumn = iCol; pRet->pPhrase->aToken[0].n = nToken; - pRet->pPhrase->aToken[0].z = (char *)&pRet->pPhrase[1]; + pRet->pPhrase->aToken[0].z = (char*)&pRet->pPhrase->aToken[1]; memcpy(pRet->pPhrase->aToken[0].z, zToken, nToken); if( iEnd=0 ){ + *pnConsumed = iBarred; + } rc = SQLITE_OK; } @@ -194963,9 +196132,9 @@ static int getNextString( Fts3Expr *p = 0; sqlite3_tokenizer_cursor *pCursor = 0; char *zTemp = 0; - int nTemp = 0; + i64 nTemp = 0; - const int nSpace = sizeof(Fts3Expr) + sizeof(Fts3Phrase); + const int nSpace = sizeof(Fts3Expr) + SZ_FTS3PHRASE(1); int nToken = 0; /* The final Fts3Expr data structure, including the Fts3Phrase, @@ -195337,7 +196506,7 @@ static int fts3ExprParse( /* The isRequirePhrase variable is set to true if a phrase or ** an expression contained in parenthesis is required. If a - ** binary operator (AND, OR, NOT or NEAR) is encounted when + ** binary operator (AND, OR, NOT or NEAR) is encountered when ** isRequirePhrase is set, this is a syntax error. */ if( !isPhrase && isRequirePhrase ){ @@ -195919,7 +197088,6 @@ static void fts3ExprTestCommon( } if( rc!=SQLITE_OK && rc!=SQLITE_NOMEM ){ - sqlite3Fts3ExprFree(pExpr); sqlite3_result_error(context, "Error parsing expression", -1); }else if( rc==SQLITE_NOMEM || !(zBuf = exprToString(pExpr, 0)) ){ sqlite3_result_error_nomem(context); @@ -196162,7 +197330,7 @@ static void fts3HashInsertElement( } -/* Resize the hash table so that it cantains "new_size" buckets. +/* Resize the hash table so that it contains "new_size" buckets. ** "new_size" must be a power of 2. The hash table might fail ** to resize if sqliteMalloc() fails. ** @@ -196617,7 +197785,7 @@ static int star_oh(const char *z){ /* ** If the word ends with zFrom and xCond() is true for the stem -** of the word that preceeds the zFrom ending, then change the +** of the word that precedes the zFrom ending, then change the ** ending to zTo. ** ** The input word *pz and zFrom are both in reverse order. zTo @@ -198128,7 +199296,7 @@ static int fts3tokFilterMethod( fts3tokResetCursor(pCsr); if( idxNum==1 ){ const char *zByte = (const char *)sqlite3_value_text(apVal[0]); - int nByte = sqlite3_value_bytes(apVal[0]); + sqlite3_int64 nByte = sqlite3_value_bytes(apVal[0]); pCsr->zInput = sqlite3_malloc64(nByte+1); if( pCsr->zInput==0 ){ rc = SQLITE_NOMEM; @@ -202200,7 +203368,7 @@ static int fts3IncrmergePush( ** ** It is assumed that the buffer associated with pNode is already large ** enough to accommodate the new entry. The buffer associated with pPrev -** is extended by this function if requrired. +** is extended by this function if required. ** ** If an error (i.e. OOM condition) occurs, an SQLite error code is ** returned. Otherwise, SQLITE_OK. @@ -203863,7 +205031,7 @@ SQLITE_PRIVATE int sqlite3Fts3DeferToken( /* ** SQLite value pRowid contains the rowid of a row that may or may not be ** present in the FTS3 table. If it is, delete it and adjust the contents -** of subsiduary data structures accordingly. +** of subsidiary data structures accordingly. */ static int fts3DeleteByRowid( Fts3Table *p, @@ -204189,9 +205357,13 @@ struct MatchinfoBuffer { int nElem; int bGlobal; /* Set if global data is loaded */ char *zMatchinfo; - u32 aMatchinfo[1]; + u32 aMI[FLEXARRAY]; }; +/* Size (in bytes) of a MatchinfoBuffer sufficient for N elements */ +#define SZ_MATCHINFOBUFFER(N) \ + (offsetof(MatchinfoBuffer,aMI)+(((N)+1)/2)*sizeof(u64)) + /* ** The snippet() and offsets() functions both return text values. An instance @@ -204216,13 +205388,13 @@ struct StrBuffer { static MatchinfoBuffer *fts3MIBufferNew(size_t nElem, const char *zMatchinfo){ MatchinfoBuffer *pRet; sqlite3_int64 nByte = sizeof(u32) * (2*(sqlite3_int64)nElem + 1) - + sizeof(MatchinfoBuffer); + + SZ_MATCHINFOBUFFER(1); sqlite3_int64 nStr = strlen(zMatchinfo); pRet = sqlite3Fts3MallocZero(nByte + nStr+1); if( pRet ){ - pRet->aMatchinfo[0] = (u8*)(&pRet->aMatchinfo[1]) - (u8*)pRet; - pRet->aMatchinfo[1+nElem] = pRet->aMatchinfo[0] + pRet->aMI[0] = (u8*)(&pRet->aMI[1]) - (u8*)pRet; + pRet->aMI[1+nElem] = pRet->aMI[0] + sizeof(u32)*((int)nElem+1); pRet->nElem = (int)nElem; pRet->zMatchinfo = ((char*)pRet) + nByte; @@ -204236,10 +205408,10 @@ static MatchinfoBuffer *fts3MIBufferNew(size_t nElem, const char *zMatchinfo){ static void fts3MIBufferFree(void *p){ MatchinfoBuffer *pBuf = (MatchinfoBuffer*)((u8*)p - ((u32*)p)[-1]); - assert( (u32*)p==&pBuf->aMatchinfo[1] - || (u32*)p==&pBuf->aMatchinfo[pBuf->nElem+2] + assert( (u32*)p==&pBuf->aMI[1] + || (u32*)p==&pBuf->aMI[pBuf->nElem+2] ); - if( (u32*)p==&pBuf->aMatchinfo[1] ){ + if( (u32*)p==&pBuf->aMI[1] ){ pBuf->aRef[1] = 0; }else{ pBuf->aRef[2] = 0; @@ -204256,18 +205428,18 @@ static void (*fts3MIBufferAlloc(MatchinfoBuffer *p, u32 **paOut))(void*){ if( p->aRef[1]==0 ){ p->aRef[1] = 1; - aOut = &p->aMatchinfo[1]; + aOut = &p->aMI[1]; xRet = fts3MIBufferFree; } else if( p->aRef[2]==0 ){ p->aRef[2] = 1; - aOut = &p->aMatchinfo[p->nElem+2]; + aOut = &p->aMI[p->nElem+2]; xRet = fts3MIBufferFree; }else{ aOut = (u32*)sqlite3_malloc64(p->nElem * sizeof(u32)); if( aOut ){ xRet = sqlite3_free; - if( p->bGlobal ) memcpy(aOut, &p->aMatchinfo[1], p->nElem*sizeof(u32)); + if( p->bGlobal ) memcpy(aOut, &p->aMI[1], p->nElem*sizeof(u32)); } } @@ -204277,7 +205449,7 @@ static void (*fts3MIBufferAlloc(MatchinfoBuffer *p, u32 **paOut))(void*){ static void fts3MIBufferSetGlobal(MatchinfoBuffer *p){ p->bGlobal = 1; - memcpy(&p->aMatchinfo[2+p->nElem], &p->aMatchinfo[1], p->nElem*sizeof(u32)); + memcpy(&p->aMI[2+p->nElem], &p->aMI[1], p->nElem*sizeof(u32)); } /* @@ -204692,7 +205864,7 @@ static int fts3StringAppend( } /* If there is insufficient space allocated at StrBuffer.z, use realloc() - ** to grow the buffer until so that it is big enough to accomadate the + ** to grow the buffer until so that it is big enough to accommodate the ** appended data. */ if( pStr->n+nAppend+1>=pStr->nAlloc ){ @@ -205104,16 +206276,16 @@ static size_t fts3MatchinfoSize(MatchInfo *pInfo, char cArg){ break; case FTS3_MATCHINFO_LHITS: - nVal = pInfo->nCol * pInfo->nPhrase; + nVal = (size_t)pInfo->nCol * pInfo->nPhrase; break; case FTS3_MATCHINFO_LHITS_BM: - nVal = pInfo->nPhrase * ((pInfo->nCol + 31) / 32); + nVal = (size_t)pInfo->nPhrase * ((pInfo->nCol + 31) / 32); break; default: assert( cArg==FTS3_MATCHINFO_HITS ); - nVal = pInfo->nCol * pInfo->nPhrase * 3; + nVal = (size_t)pInfo->nCol * pInfo->nPhrase * 3; break; } @@ -206671,8 +207843,8 @@ SQLITE_PRIVATE int sqlite3FtsUnicodeFold(int c, int eRemoveDiacritic){ ** Beginning with version 3.45.0 (circa 2024-01-01), these routines also ** accept BLOB values that have JSON encoded using a binary representation ** called "JSONB". The name JSONB comes from PostgreSQL, however the on-disk -** format SQLite JSONB is completely different and incompatible with -** PostgreSQL JSONB. +** format for SQLite-JSONB is completely different and incompatible with +** PostgreSQL-JSONB. ** ** Decoding and interpreting JSONB is still O(N) where N is the size of ** the input, the same as text JSON. However, the constant of proportionality @@ -206729,7 +207901,7 @@ SQLITE_PRIVATE int sqlite3FtsUnicodeFold(int c, int eRemoveDiacritic){ ** ** The payload size need not be expressed in its minimal form. For example, ** if the payload size is 10, the size can be expressed in any of 5 different -** ways: (1) (X>>4)==10, (2) (X>>4)==12 following by on 0x0a byte, +** ways: (1) (X>>4)==10, (2) (X>>4)==12 following by one 0x0a byte, ** (3) (X>>4)==13 followed by 0x00 and 0x0a, (4) (X>>4)==14 followed by ** 0x00 0x00 0x00 0x0a, or (5) (X>>4)==15 followed by 7 bytes of 0x00 and ** a single byte of 0x0a. The shorter forms are preferred, of course, but @@ -206739,7 +207911,7 @@ SQLITE_PRIVATE int sqlite3FtsUnicodeFold(int c, int eRemoveDiacritic){ ** the size when it becomes known, resulting in a non-minimal encoding. ** ** The value (X>>4)==15 is not actually used in the current implementation -** (as SQLite is currently unable handle BLOBs larger than about 2GB) +** (as SQLite is currently unable to handle BLOBs larger than about 2GB) ** but is included in the design to allow for future enhancements. ** ** The payload follows the header. NULL, TRUE, and FALSE have no payload and @@ -206799,23 +207971,47 @@ static const char * const jsonbType[] = { ** increase for the text-JSON parser. (Ubuntu14.10 gcc 4.8.4 x64 with -Os). */ static const char jsonIsSpace[] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +#ifdef SQLITE_ASCII +/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, /* 0 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 2 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 3 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 4 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 5 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 6 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 7 */ + + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 8 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 9 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* a */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* b */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* c */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* d */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* e */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* f */ +#endif +#ifdef SQLITE_EBCDIC +/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ + 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, /* 0 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ + 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 2 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 3 */ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 4 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 5 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 6 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 7 */ + + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 8 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 9 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* a */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* b */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* c */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* d */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* e */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* f */ +#endif - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; #define jsonIsspace(x) (jsonIsSpace[(unsigned char)x]) @@ -206823,7 +208019,13 @@ static const char jsonIsSpace[] = { ** The set of all space characters recognized by jsonIsspace(). ** Useful as the second argument to strspn(). */ +#ifdef SQLITE_ASCII static const char jsonSpaces[] = "\011\012\015\040"; +#endif +#ifdef SQLITE_EBCDIC +static const char jsonSpaces[] = "\005\045\015\100"; +#endif + /* ** Characters that are special to JSON. Control characters, @@ -206832,23 +208034,46 @@ static const char jsonSpaces[] = "\011\012\015\040"; ** it in the set of special characters. */ static const char jsonIsOk[256] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +#ifdef SQLITE_ASCII +/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 0 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ + 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, /* 2 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 3 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 4 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, /* 5 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 6 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 7 */ - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 8 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 9 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* a */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* b */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* c */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* d */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* e */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 /* f */ +#endif +#ifdef SQLITE_EBCDIC +/*0 1 2 3 4 5 6 7 8 9 a b c d e f */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 0 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 1 */ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, /* 2 */ + 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, /* 3 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 4 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 5 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 6 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, /* 7 */ + + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 8 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* 9 */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* a */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* b */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* c */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* d */ + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, /* e */ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 /* f */ +#endif }; /* Objects */ @@ -206993,7 +208218,7 @@ struct JsonParse { ** Forward references **************************************************************************/ static void jsonReturnStringAsBlob(JsonString*); -static int jsonFuncArgMightBeBinary(sqlite3_value *pJson); +static int jsonArgIsJsonb(sqlite3_value *pJson, JsonParse *p); static u32 jsonTranslateBlobToText(const JsonParse*,u32,JsonString*); static void jsonReturnParse(sqlite3_context*,JsonParse*); static JsonParse *jsonParseFuncArg(sqlite3_context*,sqlite3_value*,u32); @@ -207067,7 +208292,7 @@ static int jsonCacheInsert( ** most-recently used entry if it isn't so already. ** ** The JsonParse object returned still belongs to the Cache and might -** be deleted at any moment. If the caller whants the JsonParse to +** be deleted at any moment. If the caller wants the JsonParse to ** linger, it needs to increment the nPJRef reference counter. */ static JsonParse *jsonCacheSearch( @@ -207411,11 +208636,9 @@ static void jsonAppendSqlValue( break; } default: { - if( jsonFuncArgMightBeBinary(pValue) ){ - JsonParse px; - memset(&px, 0, sizeof(px)); - px.aBlob = (u8*)sqlite3_value_blob(pValue); - px.nBlob = sqlite3_value_bytes(pValue); + JsonParse px; + memset(&px, 0, sizeof(px)); + if( jsonArgIsJsonb(pValue, &px) ){ jsonTranslateBlobToText(&px, 0, p); }else if( p->eErr==0 ){ sqlite3_result_error(p->pCtx, "JSON cannot hold BLOB values", -1); @@ -207734,7 +208957,7 @@ static void jsonWrongNumArgs( */ static int jsonBlobExpand(JsonParse *pParse, u32 N){ u8 *aNew; - u32 t; + u64 t; assert( N>pParse->nBlobAlloc ); if( pParse->nBlobAlloc==0 ){ t = 100; @@ -207744,8 +208967,9 @@ static int jsonBlobExpand(JsonParse *pParse, u32 N){ if( tdb, pParse->aBlob, t); if( aNew==0 ){ pParse->oom = 1; return 1; } + assert( t<0x7fffffff ); pParse->aBlob = aNew; - pParse->nBlobAlloc = t; + pParse->nBlobAlloc = (u32)t; return 0; } @@ -207812,7 +209036,7 @@ static SQLITE_NOINLINE void jsonBlobExpandAndAppendNode( } -/* Append an node type byte together with the payload size and +/* Append a node type byte together with the payload size and ** possibly also the payload. ** ** If aPayload is not NULL, then it is a pointer to the payload which @@ -207881,8 +209105,10 @@ static int jsonBlobChangePayloadSize( nExtra = 1; }else if( szType==13 ){ nExtra = 2; - }else{ + }else if( szType==14 ){ nExtra = 4; + }else{ + nExtra = 8; } if( szPayload<=11 ){ nNeeded = 0; @@ -208352,7 +209578,12 @@ json_parse_restart: || c=='n' || c=='r' || c=='t' || (c=='u' && jsonIs4Hex(&z[j+1])) ){ if( opcode==JSONB_TEXT ) opcode = JSONB_TEXTJ; - }else if( c=='\'' || c=='0' || c=='v' || c=='\n' + }else if( c=='\'' || c=='v' || c=='\n' +#ifdef SQLITE_BUG_COMPATIBLE_20250510 + || (c=='0') /* Legacy bug compatible */ +#else + || (c=='0' && !sqlite3Isdigit(z[j+1])) /* Correct implementation */ +#endif || (0xe2==(u8)c && 0x80==(u8)z[j+1] && (0xa8==(u8)z[j+2] || 0xa9==(u8)z[j+2])) || (c=='x' && jsonIs2Hex(&z[j+1])) ){ @@ -208702,10 +209933,7 @@ static u32 jsonbPayloadSize(const JsonParse *pParse, u32 i, u32 *pSz){ u8 x; u32 sz; u32 n; - if( NEVER(i>pParse->nBlob) ){ - *pSz = 0; - return 0; - } + assert( i<=pParse->nBlob ); x = pParse->aBlob[i]>>4; if( x<=11 ){ sz = x; @@ -208742,15 +209970,15 @@ static u32 jsonbPayloadSize(const JsonParse *pParse, u32 i, u32 *pSz){ *pSz = 0; return 0; } - sz = (pParse->aBlob[i+5]<<24) + (pParse->aBlob[i+6]<<16) + + sz = ((u32)pParse->aBlob[i+5]<<24) + (pParse->aBlob[i+6]<<16) + (pParse->aBlob[i+7]<<8) + pParse->aBlob[i+8]; n = 9; } if( (i64)i+sz+n > pParse->nBlob && (i64)i+sz+n > pParse->nBlob-pParse->delta ){ - sz = 0; - n = 0; + *pSz = 0; + return 0; } *pSz = sz; return n; @@ -208847,9 +210075,12 @@ static u32 jsonTranslateBlobToText( } case JSONB_TEXT: case JSONB_TEXTJ: { - jsonAppendChar(pOut, '"'); - jsonAppendRaw(pOut, (const char*)&pParse->aBlob[i+n], sz); - jsonAppendChar(pOut, '"'); + if( pOut->nUsed+sz+2<=pOut->nAlloc || jsonStringGrow(pOut, sz+2)==0 ){ + pOut->zBuf[pOut->nUsed] = '"'; + memcpy(pOut->zBuf+pOut->nUsed+1,(const char*)&pParse->aBlob[i+n],sz); + pOut->zBuf[pOut->nUsed+sz+1] = '"'; + pOut->nUsed += sz+2; + } break; } case JSONB_TEXT5: { @@ -209088,33 +210319,6 @@ static u32 jsonTranslateBlobToPrettyText( return i; } - -/* Return true if the input pJson -** -** For performance reasons, this routine does not do a detailed check of the -** input BLOB to ensure that it is well-formed. Hence, false positives are -** possible. False negatives should never occur, however. -*/ -static int jsonFuncArgMightBeBinary(sqlite3_value *pJson){ - u32 sz, n; - const u8 *aBlob; - int nBlob; - JsonParse s; - if( sqlite3_value_type(pJson)!=SQLITE_BLOB ) return 0; - aBlob = sqlite3_value_blob(pJson); - nBlob = sqlite3_value_bytes(pJson); - if( nBlob<1 ) return 0; - if( NEVER(aBlob==0) || (aBlob[0] & 0x0f)>JSONB_OBJECT ) return 0; - memset(&s, 0, sizeof(s)); - s.aBlob = (u8*)aBlob; - s.nBlob = nBlob; - n = jsonbPayloadSize(&s, 0, &sz); - if( n==0 ) return 0; - if( sz+n!=(u32)nBlob ) return 0; - if( (aBlob[0] & 0x0f)<=JSONB_FALSE && sz>0 ) return 0; - return sz+n==(u32)nBlob; -} - /* ** Given that a JSONB_ARRAY object starts at offset i, return ** the number of entries in that array. @@ -209147,6 +210351,82 @@ static void jsonAfterEditSizeAdjust(JsonParse *pParse, u32 iRoot){ pParse->delta += jsonBlobChangePayloadSize(pParse, iRoot, sz); } +/* +** If the JSONB at aIns[0..nIns-1] can be expanded (by denormalizing the +** size field) by d bytes, then write the expansion into aOut[] and +** return true. In this way, an overwrite happens without changing the +** size of the JSONB, which reduces memcpy() operations and also make it +** faster and easier to update the B-Tree entry that contains the JSONB +** in the database. +** +** If the expansion of aIns[] by d bytes cannot be (easily) accomplished +** then return false. +** +** The d parameter is guaranteed to be between 1 and 8. +** +** This routine is an optimization. A correct answer is obtained if it +** always leaves the output unchanged and returns false. +*/ +static int jsonBlobOverwrite( + u8 *aOut, /* Overwrite here */ + const u8 *aIns, /* New content */ + u32 nIns, /* Bytes of new content */ + u32 d /* Need to expand new content by this much */ +){ + u32 szPayload; /* Bytes of payload */ + u32 i; /* New header size, after expansion & a loop counter */ + u8 szHdr; /* Size of header before expansion */ + + /* Lookup table for finding the upper 4 bits of the first byte of the + ** expanded aIns[], based on the size of the expanded aIns[] header: + ** + ** 2 3 4 5 6 7 8 9 */ + static const u8 aType[] = { 0xc0, 0xd0, 0, 0xe0, 0, 0, 0, 0xf0 }; + + if( (aIns[0]&0x0f)<=2 ) return 0; /* Cannot enlarge NULL, true, false */ + switch( aIns[0]>>4 ){ + default: { /* aIns[] header size 1 */ + if( ((1<=2 && i<=9 && aType[i-2]!=0 ); + aOut[0] = (aIns[0] & 0x0f) | aType[i-2]; + memcpy(&aOut[i], &aIns[szHdr], nIns-szHdr); + szPayload = nIns - szHdr; + while( 1/*edit-by-break*/ ){ + i--; + aOut[i] = szPayload & 0xff; + if( i==1 ) break; + szPayload >>= 8; + } + assert( (szPayload>>8)==0 ); + return 1; +} + /* ** Modify the JSONB blob at pParse->aBlob by removing nDel bytes of ** content beginning at iDel, and replacing them with nIns bytes of @@ -209168,6 +210448,11 @@ static void jsonBlobEdit( u32 nIns /* Bytes of content to insert */ ){ i64 d = (i64)nIns - (i64)nDel; + if( d<0 && d>=(-8) && aIns!=0 + && jsonBlobOverwrite(&pParse->aBlob[iDel], aIns, nIns, (int)-d) + ){ + return; + } if( d!=0 ){ if( pParse->nBlob + d > pParse->nBlobAlloc ){ jsonBlobExpand(pParse, pParse->nBlob+d); @@ -209179,7 +210464,9 @@ static void jsonBlobEdit( pParse->nBlob += d; pParse->delta += d; } - if( nIns && aIns ) memcpy(&pParse->aBlob[iDel], aIns, nIns); + if( nIns && aIns ){ + memcpy(&pParse->aBlob[iDel], aIns, nIns); + } } /* @@ -209264,7 +210551,21 @@ static u32 jsonUnescapeOneChar(const char *z, u32 n, u32 *piOut){ case 'r': { *piOut = '\r'; return 2; } case 't': { *piOut = '\t'; return 2; } case 'v': { *piOut = '\v'; return 2; } - case '0': { *piOut = 0; return 2; } + case '0': { + /* JSON5 requires that the \0 escape not be followed by a digit. + ** But SQLite did not enforce this restriction in versions 3.42.0 + ** through 3.49.2. That was a bug. But some applications might have + ** come to depend on that bug. Use the SQLITE_BUG_COMPATIBLE_20250510 + ** option to restore the old buggy behavior. */ +#ifdef SQLITE_BUG_COMPATIBLE_20250510 + /* Legacy bug-compatible behavior */ + *piOut = 0; +#else + /* Correct behavior */ + *piOut = (n>2 && sqlite3Isdigit(z[2])) ? JSON_INVALID_CHAR : 0; +#endif + return 2; + } case '\'': case '"': case '/': @@ -209764,7 +211065,7 @@ static void jsonReturnFromBlob( char *zOut; u32 nOut = sz; z = (const char*)&pParse->aBlob[i+n]; - zOut = sqlite3DbMallocRaw(db, nOut+1); + zOut = sqlite3DbMallocRaw(db, ((u64)nOut)+1); if( zOut==0 ) goto returnfromblob_oom; for(iIn=iOut=0; iInaBlob = (u8*)sqlite3_value_blob(pArg); - pParse->nBlob = sqlite3_value_bytes(pArg); - }else{ + if( !jsonArgIsJsonb(pArg, pParse) ){ sqlite3_result_error(ctx, "JSON cannot hold BLOB values", -1); return 1; } @@ -209942,7 +211240,7 @@ static char *jsonBadPathError( } /* argv[0] is a BLOB that seems likely to be a JSONB. Subsequent -** arguments come in parse where each pair contains a JSON path and +** arguments come in pairs where each pair contains a JSON path and ** content to insert or set at that patch. Do the updates ** and return the result. ** @@ -210013,27 +211311,46 @@ jsonInsertIntoBlob_patherror: /* ** If pArg is a blob that seems like a JSONB blob, then initialize ** p to point to that JSONB and return TRUE. If pArg does not seem like -** a JSONB blob, then return FALSE; +** a JSONB blob, then return FALSE. ** -** This routine is only called if it is already known that pArg is a -** blob. The only open question is whether or not the blob appears -** to be a JSONB blob. +** For small BLOBs (having no more than 7 bytes of payload) a full +** validity check is done. So for small BLOBs this routine only returns +** true if the value is guaranteed to be a valid JSONB. For larger BLOBs +** (8 byte or more of payload) only the size of the outermost element is +** checked to verify that the BLOB is superficially valid JSONB. +** +** A full JSONB validation is done on smaller BLOBs because those BLOBs might +** also be text JSON that has been incorrectly cast into a BLOB. +** (See tag-20240123-a and https://sqlite.org/forum/forumpost/012136abd5) +** If the BLOB is 9 bytes are larger, then it is not possible for the +** superficial size check done here to pass if the input is really text +** JSON so we do not need to look deeper in that case. +** +** Why we only need to do full JSONB validation for smaller BLOBs: +** +** The first byte of valid JSON text must be one of: '{', '[', '"', ' ', '\n', +** '\r', '\t', '-', or a digit '0' through '9'. Of these, only a subset +** can also be the first byte of JSONB: '{', '[', and digits '3' +** through '9'. In every one of those cases, the payload size is 7 bytes +** or less. So if we do full JSONB validation for every BLOB where the +** payload is less than 7 bytes, we will never get a false positive for +** JSONB on an input that is really text JSON. */ static int jsonArgIsJsonb(sqlite3_value *pArg, JsonParse *p){ u32 n, sz = 0; + u8 c; + if( sqlite3_value_type(pArg)!=SQLITE_BLOB ) return 0; p->aBlob = (u8*)sqlite3_value_blob(pArg); p->nBlob = (u32)sqlite3_value_bytes(pArg); - if( p->nBlob==0 ){ - p->aBlob = 0; - return 0; - } - if( NEVER(p->aBlob==0) ){ - return 0; - } - if( (p->aBlob[0] & 0x0f)<=JSONB_OBJECT + if( p->nBlob>0 + && ALWAYS(p->aBlob!=0) + && ((c = p->aBlob[0]) & 0x0f)<=JSONB_OBJECT && (n = jsonbPayloadSize(p, 0, &sz))>0 && sz+n==p->nBlob - && ((p->aBlob[0] & 0x0f)>JSONB_FALSE || sz==0) + && ((c & 0x0f)>JSONB_FALSE || sz==0) + && (sz>7 + || (c!=0x7b && c!=0x5b && !sqlite3Isdigit(c)) + || jsonbValidityCheck(p, 0, p->nBlob, 1)==0) ){ return 1; } @@ -210111,7 +211428,7 @@ rebuild_from_cache: ** JSON functions were suppose to work. From the beginning, blob was ** reserved for expansion and a blob value should have raised an error. ** But it did not, due to a bug. And many applications came to depend - ** upon this buggy behavior, espeically when using the CLI and reading + ** upon this buggy behavior, especially when using the CLI and reading ** JSON text using readfile(), which returns a blob. For this reason ** we will continue to support the bug moving forward. ** See for example https://sqlite.org/forum/forumpost/012136abd5292b8d @@ -211126,21 +212443,17 @@ static void jsonValidFunc( return; } case SQLITE_BLOB: { - if( jsonFuncArgMightBeBinary(argv[0]) ){ + JsonParse py; + memset(&py, 0, sizeof(py)); + if( jsonArgIsJsonb(argv[0], &py) ){ if( flags & 0x04 ){ /* Superficial checking only - accomplished by the - ** jsonFuncArgMightBeBinary() call above. */ + ** jsonArgIsJsonb() call above. */ res = 1; }else if( flags & 0x08 ){ /* Strict checking. Check by translating BLOB->TEXT->BLOB. If ** no errors occur, call that a "strict check". */ - JsonParse px; - u32 iErr; - memset(&px, 0, sizeof(px)); - px.aBlob = (u8*)sqlite3_value_blob(argv[0]); - px.nBlob = sqlite3_value_bytes(argv[0]); - iErr = jsonbValidityCheck(&px, 0, px.nBlob, 1); - res = iErr==0; + res = 0==jsonbValidityCheck(&py, 0, py.nBlob, 1); } break; } @@ -211198,9 +212511,7 @@ static void jsonErrorFunc( UNUSED_PARAMETER(argc); memset(&s, 0, sizeof(s)); s.db = sqlite3_context_db_handle(ctx); - if( jsonFuncArgMightBeBinary(argv[0]) ){ - s.aBlob = (u8*)sqlite3_value_blob(argv[0]); - s.nBlob = sqlite3_value_bytes(argv[0]); + if( jsonArgIsJsonb(argv[0], &s) ){ iErrPos = (i64)jsonbValidityCheck(&s, 0, s.nBlob, 1); }else{ s.zJson = (char*)sqlite3_value_text(argv[0]); @@ -211361,18 +212672,20 @@ static void jsonObjectStep( UNUSED_PARAMETER(argc); pStr = (JsonString*)sqlite3_aggregate_context(ctx, sizeof(*pStr)); if( pStr ){ + z = (const char*)sqlite3_value_text(argv[0]); + n = sqlite3Strlen30(z); if( pStr->zBuf==0 ){ jsonStringInit(pStr, ctx); jsonAppendChar(pStr, '{'); - }else if( pStr->nUsed>1 ){ + }else if( pStr->nUsed>1 && z!=0 ){ jsonAppendChar(pStr, ','); } pStr->pCtx = ctx; - z = (const char*)sqlite3_value_text(argv[0]); - n = sqlite3Strlen30(z); - jsonAppendString(pStr, z, n); - jsonAppendChar(pStr, ':'); - jsonAppendSqlValue(pStr, argv[1]); + if( z!=0 ){ + jsonAppendString(pStr, z, n); + jsonAppendChar(pStr, ':'); + jsonAppendSqlValue(pStr, argv[1]); + } } } static void jsonObjectCompute(sqlite3_context *ctx, int isFinal){ @@ -211885,9 +213198,8 @@ static int jsonEachFilter( memset(&p->sParse, 0, sizeof(p->sParse)); p->sParse.nJPRef = 1; p->sParse.db = p->db; - if( jsonFuncArgMightBeBinary(argv[0]) ){ - p->sParse.nBlob = sqlite3_value_bytes(argv[0]); - p->sParse.aBlob = (u8*)sqlite3_value_blob(argv[0]); + if( jsonArgIsJsonb(argv[0], &p->sParse) ){ + /* We have JSONB */ }else{ p->sParse.zJson = (char*)sqlite3_value_text(argv[0]); p->sParse.nJson = sqlite3_value_bytes(argv[0]); @@ -212181,6 +213493,8 @@ SQLITE_PRIVATE int sqlite3JsonTableFunctions(sqlite3 *db){ #endif SQLITE_PRIVATE int sqlite3GetToken(const unsigned char*,int*); /* In the SQLite core */ +/* #include */ + /* ** If building separately, we will need some setup that is normally ** found in sqliteInt.h @@ -212211,6 +213525,14 @@ typedef unsigned int u32; # define ALWAYS(X) (X) # define NEVER(X) (X) #endif +#ifndef offsetof +#define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) +#endif +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) +# define FLEXARRAY +#else +# define FLEXARRAY 1 +#endif #endif /* !defined(SQLITE_AMALGAMATION) */ /* Macro to check for 4-byte alignment. Only used inside of assert() */ @@ -212531,9 +213853,13 @@ struct RtreeMatchArg { RtreeGeomCallback cb; /* Info about the callback functions */ int nParam; /* Number of parameters to the SQL function */ sqlite3_value **apSqlParam; /* Original SQL parameter values */ - RtreeDValue aParam[1]; /* Values for parameters to the SQL function */ + RtreeDValue aParam[FLEXARRAY]; /* Values for parameters to the SQL function */ }; +/* Size of an RtreeMatchArg object with N parameters */ +#define SZ_RTREEMATCHARG(N) \ + (offsetof(RtreeMatchArg,aParam)+(N)*sizeof(RtreeDValue)) + #ifndef MAX # define MAX(x,y) ((x) < (y) ? (y) : (x)) #endif @@ -214222,7 +215548,7 @@ static int rtreeBestIndex(sqlite3_vtab *tab, sqlite3_index_info *pIdxInfo){ } /* -** Return the N-dimensional volumn of the cell stored in *p. +** Return the N-dimensional volume of the cell stored in *p. */ static RtreeDValue cellArea(Rtree *pRtree, RtreeCell *p){ RtreeDValue area = (RtreeDValue)1; @@ -215988,7 +217314,7 @@ static sqlite3_stmt *rtreeCheckPrepare( /* ** The second and subsequent arguments to this function are a printf() ** style format string and arguments. This function formats the string and -** appends it to the report being accumuated in pCheck. +** appends it to the report being accumulated in pCheck. */ static void rtreeCheckAppendMsg(RtreeCheck *pCheck, const char *zFmt, ...){ va_list ap; @@ -217176,7 +218502,7 @@ static void geopolyBBoxFinal( ** Determine if point (x0,y0) is beneath line segment (x1,y1)->(x2,y2). ** Returns: ** -** +2 x0,y0 is on the line segement +** +2 x0,y0 is on the line segment ** ** +1 x0,y0 is beneath line segment ** @@ -217282,7 +218608,7 @@ static void geopolyWithinFunc( sqlite3_free(p2); } -/* Objects used by the overlap algorihm. */ +/* Objects used by the overlap algorithm. */ typedef struct GeoEvent GeoEvent; typedef struct GeoSegment GeoSegment; typedef struct GeoOverlap GeoOverlap; @@ -218329,8 +219655,7 @@ static void geomCallback(sqlite3_context *ctx, int nArg, sqlite3_value **aArg){ sqlite3_int64 nBlob; int memErr = 0; - nBlob = sizeof(RtreeMatchArg) + (nArg-1)*sizeof(RtreeDValue) - + nArg*sizeof(sqlite3_value*); + nBlob = SZ_RTREEMATCHARG(nArg) + nArg*sizeof(sqlite3_value*); pBlob = (RtreeMatchArg *)sqlite3_malloc64(nBlob); if( !pBlob ){ sqlite3_result_error_nomem(ctx); @@ -219425,7 +220750,7 @@ SQLITE_PRIVATE void sqlite3Fts3IcuTokenizerModule( ** ** "RBU" stands for "Resumable Bulk Update". As in a large database update ** transmitted via a wireless network to a mobile device. A transaction -** applied using this extension is hence refered to as an "RBU update". +** applied using this extension is hence referred to as an "RBU update". ** ** ** LIMITATIONS @@ -219722,7 +221047,7 @@ SQLITE_API sqlite3rbu *sqlite3rbu_open( ** the next call to sqlite3rbu_vacuum() opens a handle that starts a ** new RBU vacuum operation. ** -** As with sqlite3rbu_open(), Zipvfs users should rever to the comment +** As with sqlite3rbu_open(), Zipvfs users should refer to the comment ** describing the sqlite3rbu_create_vfs() API function below for ** a description of the complications associated with using RBU with ** zipvfs databases. @@ -219818,7 +221143,7 @@ SQLITE_API int sqlite3rbu_savestate(sqlite3rbu *pRbu); ** ** If the RBU update has been completely applied, mark the RBU database ** as fully applied. Otherwise, assuming no error has occurred, save the -** current state of the RBU update appliation to the RBU database. +** current state of the RBU update application to the RBU database. ** ** If an error has already occurred as part of an sqlite3rbu_step() ** or sqlite3rbu_open() call, or if one occurs within this function, an @@ -224744,7 +226069,7 @@ static int rbuVfsFileSize(sqlite3_file *pFile, sqlite_int64 *pSize){ /* If this is an RBU vacuum operation and this is the target database, ** pretend that it has at least one page. Otherwise, SQLite will not - ** check for the existance of a *-wal file. rbuVfsRead() contains + ** check for the existence of a *-wal file. rbuVfsRead() contains ** similar logic. */ if( rc==SQLITE_OK && *pSize==0 && p->pRbu && rbuIsVacuum(p->pRbu) @@ -226676,8 +228001,8 @@ static int dbpageUpdate( /* "INSERT INTO dbpage($PGNO,NULL)" causes page number $PGNO and ** all subsequent pages to be deleted. */ pTab->iDbTrunc = iDb; - pgno--; - pTab->pgnoTrunc = pgno; + pTab->pgnoTrunc = pgno-1; + pgno = 1; }else{ zErr = "bad page value"; goto update_fail; @@ -227974,7 +229299,7 @@ static int sessionTableInfo( /* ** This function is called to initialize the SessionTable.nCol, azCol[] ** abPK[] and azDflt[] members of SessionTable object pTab. If these -** fields are already initilialized, this function is a no-op. +** fields are already initialized, this function is a no-op. ** ** If an error occurs, an error code is stored in sqlite3_session.rc and ** non-zero returned. Or, if no error occurs but the table has no primary @@ -227993,6 +229318,8 @@ static int sessionInitTable( if( pTab->nCol==0 ){ u8 *abPK; assert( pTab->azCol==0 || pTab->abPK==0 ); + sqlite3_free(pTab->azCol); + pTab->abPK = 0; rc = sessionTableInfo(pSession, db, zDb, pTab->zName, &pTab->nCol, &pTab->nTotalCol, 0, &pTab->azCol, &pTab->azDflt, &pTab->aiIdx, &abPK, @@ -229000,7 +230327,9 @@ SQLITE_API int sqlite3session_diff( SessionTable *pTo; /* Table zTbl */ /* Locate and if necessary initialize the target table object */ + pSession->bAutoAttach++; rc = sessionFindTable(pSession, zTbl, &pTo); + pSession->bAutoAttach--; if( pTo==0 ) goto diff_out; if( sessionInitTable(pSession, pTo, pSession->db, pSession->zDb) ){ rc = pSession->rc; @@ -229011,17 +230340,43 @@ SQLITE_API int sqlite3session_diff( if( rc==SQLITE_OK ){ int bHasPk = 0; int bMismatch = 0; - int nCol; /* Columns in zFrom.zTbl */ + int nCol = 0; /* Columns in zFrom.zTbl */ int bRowid = 0; - u8 *abPK; + u8 *abPK = 0; const char **azCol = 0; - rc = sessionTableInfo(0, db, zFrom, zTbl, - &nCol, 0, 0, &azCol, 0, 0, &abPK, - pSession->bImplicitPK ? &bRowid : 0 - ); + char *zDbExists = 0; + + /* Check that database zFrom is attached. */ + zDbExists = sqlite3_mprintf("SELECT * FROM %Q.sqlite_schema", zFrom); + if( zDbExists==0 ){ + rc = SQLITE_NOMEM; + }else{ + sqlite3_stmt *pDbExists = 0; + rc = sqlite3_prepare_v2(db, zDbExists, -1, &pDbExists, 0); + if( rc==SQLITE_ERROR ){ + rc = SQLITE_OK; + nCol = -1; + } + sqlite3_finalize(pDbExists); + sqlite3_free(zDbExists); + } + + if( rc==SQLITE_OK && nCol==0 ){ + rc = sessionTableInfo(0, db, zFrom, zTbl, + &nCol, 0, 0, &azCol, 0, 0, &abPK, + pSession->bImplicitPK ? &bRowid : 0 + ); + } if( rc==SQLITE_OK ){ if( pTo->nCol!=nCol ){ - bMismatch = 1; + if( nCol<=0 ){ + rc = SQLITE_SCHEMA; + if( pzErrMsg ){ + *pzErrMsg = sqlite3_mprintf("no such table: %s.%s", zFrom, zTbl); + } + }else{ + bMismatch = 1; + } }else{ int i; for(i=0; idb; /* Source database handle */ SessionTable *pTab; /* Used to iterate through attached tables */ - SessionBuffer buf = {0,0,0}; /* Buffer in which to accumlate changeset */ + SessionBuffer buf = {0,0,0}; /* Buffer in which to accumulate changeset */ int rc; /* Return code */ assert( xOutput==0 || (pnChangeset==0 && ppChangeset==0) ); @@ -230150,14 +231505,15 @@ SQLITE_API int sqlite3changeset_start_v2_strm( ** object and the buffer is full, discard some data to free up space. */ static void sessionDiscardData(SessionInput *pIn){ - if( pIn->xInput && pIn->iNext>=sessions_strm_chunk_size ){ - int nMove = pIn->buf.nBuf - pIn->iNext; + if( pIn->xInput && pIn->iCurrent>=sessions_strm_chunk_size ){ + int nMove = pIn->buf.nBuf - pIn->iCurrent; assert( nMove>=0 ); if( nMove>0 ){ - memmove(pIn->buf.aBuf, &pIn->buf.aBuf[pIn->iNext], nMove); + memmove(pIn->buf.aBuf, &pIn->buf.aBuf[pIn->iCurrent], nMove); } - pIn->buf.nBuf -= pIn->iNext; - pIn->iNext = 0; + pIn->buf.nBuf -= pIn->iCurrent; + pIn->iNext -= pIn->iCurrent; + pIn->iCurrent = 0; pIn->nData = pIn->buf.nBuf; } } @@ -230511,8 +231867,8 @@ static int sessionChangesetNextOne( p->rc = sessionInputBuffer(&p->in, 2); if( p->rc!=SQLITE_OK ) return p->rc; - sessionDiscardData(&p->in); p->in.iCurrent = p->in.iNext; + sessionDiscardData(&p->in); /* If the iterator is already at the end of the changeset, return DONE. */ if( p->in.iNext>=p->in.nData ){ @@ -232871,14 +234227,19 @@ SQLITE_API int sqlite3changegroup_add_change( sqlite3_changegroup *pGrp, sqlite3_changeset_iter *pIter ){ + int rc = SQLITE_OK; + if( pIter->in.iCurrent==pIter->in.iNext || pIter->rc!=SQLITE_OK || pIter->bInvert ){ /* Iterator does not point to any valid entry or is an INVERT iterator. */ - return SQLITE_ERROR; + rc = SQLITE_ERROR; + }else{ + pIter->in.bNoDiscard = 1; + rc = sessionOneChangeToHash(pGrp, pIter, 0); } - return sessionOneChangeToHash(pGrp, pIter, 0); + return rc; } /* @@ -234176,6 +235537,7 @@ SQLITE_EXTENSION_INIT1 /* #include */ /* #include */ +/* #include */ #ifndef SQLITE_AMALGAMATION @@ -234231,6 +235593,18 @@ typedef sqlite3_uint64 u64; # define EIGHT_BYTE_ALIGNMENT(X) ((((uptr)(X) - (uptr)0)&7)==0) #endif +/* +** Macros needed to provide flexible arrays in a portable way +*/ +#ifndef offsetof +# define offsetof(STRUCTURE,FIELD) ((size_t)((char*)&((STRUCTURE*)0)->FIELD)) +#endif +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) +# define FLEXARRAY +#else +# define FLEXARRAY 1 +#endif + #endif /* Truncate very long tokens to this many bytes. Hard limit is @@ -234303,10 +235677,11 @@ typedef struct Fts5Colset Fts5Colset; */ struct Fts5Colset { int nCol; - int aiCol[1]; + int aiCol[FLEXARRAY]; }; - +/* Size (int bytes) of a complete Fts5Colset object with N columns. */ +#define SZ_FTS5COLSET(N) (sizeof(i64)*((N+2)/2)) /************************************************************************** ** Interface to code in fts5_config.c. fts5_config.c contains contains code @@ -235135,7 +236510,7 @@ static void sqlite3Fts5UnicodeAscii(u8*, u8*); ** ** The "lemon" program processes an LALR(1) input grammar file, then uses ** this template to construct a parser. The "lemon" program inserts text -** at each "%%" line. Also, any "P-a-r-s-e" identifer prefix (without the +** at each "%%" line. Also, any "P-a-r-s-e" identifier prefix (without the ** interstitial "-" characters) contained in this template is changed into ** the value of the %name directive from the grammar. Otherwise, the content ** of this template is copied straight through into the generate parser @@ -237289,7 +238664,7 @@ static int fts5Bm25GetData( ** under consideration. ** ** The problem with this is that if (N < 2*nHit), the IDF is - ** negative. Which is undesirable. So the mimimum allowable IDF is + ** negative. Which is undesirable. So the minimum allowable IDF is ** (1e-6) - roughly the same as a term that appears in just over ** half of set of 5,000,000 documents. */ double idf = log( (nRow - nHit + 0.5) / (nHit + 0.5) ); @@ -237752,7 +239127,7 @@ static char *sqlite3Fts5Strndup(int *pRc, const char *pIn, int nIn){ ** * The 52 upper and lower case ASCII characters, and ** * The 10 integer ASCII characters. ** * The underscore character "_" (0x5F). -** * The unicode "subsitute" character (0x1A). +** * The unicode "substitute" character (0x1A). */ static int sqlite3Fts5IsBareword(char t){ u8 aBareword[128] = { @@ -239070,9 +240445,13 @@ struct Fts5ExprNode { /* Child nodes. For a NOT node, this array always contains 2 entries. For ** AND or OR nodes, it contains 2 or more entries. */ int nChild; /* Number of child nodes */ - Fts5ExprNode *apChild[1]; /* Array of child nodes */ + Fts5ExprNode *apChild[FLEXARRAY]; /* Array of child nodes */ }; +/* Size (in bytes) of an Fts5ExprNode object that holds up to N children */ +#define SZ_FTS5EXPRNODE(N) \ + (offsetof(Fts5ExprNode,apChild) + (N)*sizeof(Fts5ExprNode*)) + #define Fts5NodeIsString(p) ((p)->eType==FTS5_TERM || (p)->eType==FTS5_STRING) /* @@ -239103,9 +240482,13 @@ struct Fts5ExprPhrase { Fts5ExprNode *pNode; /* FTS5_STRING node this phrase is part of */ Fts5Buffer poslist; /* Current position list */ int nTerm; /* Number of entries in aTerm[] */ - Fts5ExprTerm aTerm[1]; /* Terms that make up this phrase */ + Fts5ExprTerm aTerm[FLEXARRAY]; /* Terms that make up this phrase */ }; +/* Size (in bytes) of an Fts5ExprPhrase object that holds up to N terms */ +#define SZ_FTS5EXPRPHRASE(N) \ + (offsetof(Fts5ExprPhrase,aTerm) + (N)*sizeof(Fts5ExprTerm)) + /* ** One or more phrases that must appear within a certain token distance of ** each other within each matching document. @@ -239114,9 +240497,12 @@ struct Fts5ExprNearset { int nNear; /* NEAR parameter */ Fts5Colset *pColset; /* Columns to search (NULL -> all columns) */ int nPhrase; /* Number of entries in aPhrase[] array */ - Fts5ExprPhrase *apPhrase[1]; /* Array of phrase pointers */ + Fts5ExprPhrase *apPhrase[FLEXARRAY]; /* Array of phrase pointers */ }; +/* Size (in bytes) of an Fts5ExprNearset object covering up to N phrases */ +#define SZ_FTS5EXPRNEARSET(N) \ + (offsetof(Fts5ExprNearset,apPhrase)+(N)*sizeof(Fts5ExprPhrase*)) /* ** Parse context. @@ -239276,7 +240662,7 @@ static int sqlite3Fts5ExprNew( /* If the LHS of the MATCH expression was a user column, apply the ** implicit column-filter. */ if( sParse.rc==SQLITE_OK && iColnCol ){ - int n = sizeof(Fts5Colset); + int n = SZ_FTS5COLSET(1); Fts5Colset *pColset = (Fts5Colset*)sqlite3Fts5MallocZero(&sParse.rc, n); if( pColset ){ pColset->nCol = 1; @@ -240634,7 +242020,7 @@ static Fts5ExprNearset *sqlite3Fts5ParseNearset( if( pParse->rc==SQLITE_OK ){ if( pNear==0 ){ sqlite3_int64 nByte; - nByte = sizeof(Fts5ExprNearset) + SZALLOC * sizeof(Fts5ExprPhrase*); + nByte = SZ_FTS5EXPRNEARSET(SZALLOC+1); pRet = sqlite3_malloc64(nByte); if( pRet==0 ){ pParse->rc = SQLITE_NOMEM; @@ -240645,7 +242031,7 @@ static Fts5ExprNearset *sqlite3Fts5ParseNearset( int nNew = pNear->nPhrase + SZALLOC; sqlite3_int64 nByte; - nByte = sizeof(Fts5ExprNearset) + nNew * sizeof(Fts5ExprPhrase*); + nByte = SZ_FTS5EXPRNEARSET(nNew+1); pRet = (Fts5ExprNearset*)sqlite3_realloc64(pNear, nByte); if( pRet==0 ){ pParse->rc = SQLITE_NOMEM; @@ -240736,12 +242122,12 @@ static int fts5ParseTokenize( int nNew = SZALLOC + (pPhrase ? pPhrase->nTerm : 0); pNew = (Fts5ExprPhrase*)sqlite3_realloc64(pPhrase, - sizeof(Fts5ExprPhrase) + sizeof(Fts5ExprTerm) * nNew + SZ_FTS5EXPRPHRASE(nNew+1) ); if( pNew==0 ){ rc = SQLITE_NOMEM; }else{ - if( pPhrase==0 ) memset(pNew, 0, sizeof(Fts5ExprPhrase)); + if( pPhrase==0 ) memset(pNew, 0, SZ_FTS5EXPRPHRASE(1)); pCtx->pPhrase = pPhrase = pNew; pNew->nTerm = nNew - SZALLOC; } @@ -240849,7 +242235,7 @@ static Fts5ExprPhrase *sqlite3Fts5ParseTerm( if( sCtx.pPhrase==0 ){ /* This happens when parsing a token or quoted phrase that contains ** no token characters at all. (e.g ... MATCH '""'). */ - sCtx.pPhrase = sqlite3Fts5MallocZero(&pParse->rc, sizeof(Fts5ExprPhrase)); + sCtx.pPhrase = sqlite3Fts5MallocZero(&pParse->rc, SZ_FTS5EXPRPHRASE(1)); }else if( sCtx.pPhrase->nTerm ){ sCtx.pPhrase->aTerm[sCtx.pPhrase->nTerm-1].bPrefix = (u8)bPrefix; } @@ -240884,19 +242270,18 @@ static int sqlite3Fts5ExprClonePhrase( sizeof(Fts5ExprPhrase*)); } if( rc==SQLITE_OK ){ - pNew->pRoot = (Fts5ExprNode*)sqlite3Fts5MallocZero(&rc, - sizeof(Fts5ExprNode)); + pNew->pRoot = (Fts5ExprNode*)sqlite3Fts5MallocZero(&rc, SZ_FTS5EXPRNODE(1)); } if( rc==SQLITE_OK ){ pNew->pRoot->pNear = (Fts5ExprNearset*)sqlite3Fts5MallocZero(&rc, - sizeof(Fts5ExprNearset) + sizeof(Fts5ExprPhrase*)); + SZ_FTS5EXPRNEARSET(2)); } if( rc==SQLITE_OK && ALWAYS(pOrig!=0) ){ Fts5Colset *pColsetOrig = pOrig->pNode->pNear->pColset; if( pColsetOrig ){ sqlite3_int64 nByte; Fts5Colset *pColset; - nByte = sizeof(Fts5Colset) + (pColsetOrig->nCol-1) * sizeof(int); + nByte = SZ_FTS5COLSET(pColsetOrig->nCol); pColset = (Fts5Colset*)sqlite3Fts5MallocZero(&rc, nByte); if( pColset ){ memcpy(pColset, pColsetOrig, (size_t)nByte); @@ -240924,7 +242309,7 @@ static int sqlite3Fts5ExprClonePhrase( }else{ /* This happens when parsing a token or quoted phrase that contains ** no token characters at all. (e.g ... MATCH '""'). */ - sCtx.pPhrase = sqlite3Fts5MallocZero(&rc, sizeof(Fts5ExprPhrase)); + sCtx.pPhrase = sqlite3Fts5MallocZero(&rc, SZ_FTS5EXPRPHRASE(1)); } } @@ -240989,7 +242374,8 @@ static void sqlite3Fts5ParseSetDistance( ); return; } - nNear = nNear * 10 + (p->p[i] - '0'); + if( nNear<214748363 ) nNear = nNear * 10 + (p->p[i] - '0'); + /* ^^^^^^^^^^^^^^^--- Prevent integer overflow */ } }else{ nNear = FTS5_DEFAULT_NEARDIST; @@ -241018,7 +242404,7 @@ static Fts5Colset *fts5ParseColset( assert( pParse->rc==SQLITE_OK ); assert( iCol>=0 && iColpConfig->nCol ); - pNew = sqlite3_realloc64(p, sizeof(Fts5Colset) + sizeof(int)*nCol); + pNew = sqlite3_realloc64(p, SZ_FTS5COLSET(nCol+1)); if( pNew==0 ){ pParse->rc = SQLITE_NOMEM; }else{ @@ -241053,7 +242439,7 @@ static Fts5Colset *sqlite3Fts5ParseColsetInvert(Fts5Parse *pParse, Fts5Colset *p int nCol = pParse->pConfig->nCol; pRet = (Fts5Colset*)sqlite3Fts5MallocZero(&pParse->rc, - sizeof(Fts5Colset) + sizeof(int)*nCol + SZ_FTS5COLSET(nCol+1) ); if( pRet ){ int i; @@ -241114,7 +242500,7 @@ static Fts5Colset *sqlite3Fts5ParseColset( static Fts5Colset *fts5CloneColset(int *pRc, Fts5Colset *pOrig){ Fts5Colset *pRet; if( pOrig ){ - sqlite3_int64 nByte = sizeof(Fts5Colset) + (pOrig->nCol-1) * sizeof(int); + sqlite3_int64 nByte = SZ_FTS5COLSET(pOrig->nCol); pRet = (Fts5Colset*)sqlite3Fts5MallocZero(pRc, nByte); if( pRet ){ memcpy(pRet, pOrig, (size_t)nByte); @@ -241282,7 +242668,7 @@ static Fts5ExprNode *fts5ParsePhraseToAnd( assert( pNear->nPhrase==1 ); assert( pParse->bPhraseToAnd ); - nByte = sizeof(Fts5ExprNode) + nTerm*sizeof(Fts5ExprNode*); + nByte = SZ_FTS5EXPRNODE(nTerm+1); pRet = (Fts5ExprNode*)sqlite3Fts5MallocZero(&pParse->rc, nByte); if( pRet ){ pRet->eType = FTS5_AND; @@ -241292,7 +242678,7 @@ static Fts5ExprNode *fts5ParsePhraseToAnd( pParse->nPhrase--; for(ii=0; iirc, sizeof(Fts5ExprPhrase) + &pParse->rc, SZ_FTS5EXPRPHRASE(1) ); if( pPhrase ){ if( parseGrowPhraseArray(pParse) ){ @@ -241361,7 +242747,7 @@ static Fts5ExprNode *sqlite3Fts5ParseNode( if( pRight->eType==eType ) nChild += pRight->nChild-1; } - nByte = sizeof(Fts5ExprNode) + sizeof(Fts5ExprNode*)*(nChild-1); + nByte = SZ_FTS5EXPRNODE(nChild); pRet = (Fts5ExprNode*)sqlite3Fts5MallocZero(&pParse->rc, nByte); if( pRet ){ @@ -242236,7 +243622,7 @@ static int sqlite3Fts5ExprInstToken( } /* -** Clear the token mappings for all Fts5IndexIter objects mannaged by +** Clear the token mappings for all Fts5IndexIter objects managed by ** the expression passed as the only argument. */ static void sqlite3Fts5ExprClearTokens(Fts5Expr *pExpr){ @@ -242271,7 +243657,7 @@ typedef struct Fts5HashEntry Fts5HashEntry; /* ** This file contains the implementation of an in-memory hash table used -** to accumuluate "term -> doclist" content before it is flused to a level-0 +** to accumulate "term -> doclist" content before it is flushed to a level-0 ** segment. */ @@ -242328,7 +243714,7 @@ struct Fts5HashEntry { }; /* -** Eqivalent to: +** Equivalent to: ** ** char *fts5EntryKey(Fts5HashEntry *pEntry){ return zKey; } */ @@ -243264,9 +244650,13 @@ struct Fts5Structure { u64 nOriginCntr; /* Origin value for next top-level segment */ int nSegment; /* Total segments in this structure */ int nLevel; /* Number of levels in this index */ - Fts5StructureLevel aLevel[1]; /* Array of nLevel level objects */ + Fts5StructureLevel aLevel[FLEXARRAY]; /* Array of nLevel level objects */ }; +/* Size (in bytes) of an Fts5Structure object holding up to N levels */ +#define SZ_FTS5STRUCTURE(N) \ + (offsetof(Fts5Structure,aLevel) + (N)*sizeof(Fts5StructureLevel)) + /* ** An object of type Fts5SegWriter is used to write to segments. */ @@ -243396,11 +244786,15 @@ struct Fts5SegIter { ** Array of tombstone pages. Reference counted. */ struct Fts5TombstoneArray { - int nRef; /* Number of pointers to this object */ + int nRef; /* Number of pointers to this object */ int nTombstone; - Fts5Data *apTombstone[1]; /* Array of tombstone pages */ + Fts5Data *apTombstone[FLEXARRAY]; /* Array of tombstone pages */ }; +/* Size (in bytes) of an Fts5TombstoneArray holding up to N tombstones */ +#define SZ_FTS5TOMBSTONEARRAY(N) \ + (offsetof(Fts5TombstoneArray,apTombstone)+(N)*sizeof(Fts5Data*)) + /* ** Argument is a pointer to an Fts5Data structure that contains a ** leaf page. @@ -243469,9 +244863,12 @@ struct Fts5Iter { i64 iSwitchRowid; /* Firstest rowid of other than aFirst[1] */ Fts5CResult *aFirst; /* Current merge state (see above) */ - Fts5SegIter aSeg[1]; /* Array of segment iterators */ + Fts5SegIter aSeg[FLEXARRAY]; /* Array of segment iterators */ }; +/* Size (in bytes) of an Fts5Iter object holding up to N segment iterators */ +#define SZ_FTS5ITER(N) (offsetof(Fts5Iter,aSeg)+(N)*sizeof(Fts5SegIter)) + /* ** An instance of the following type is used to iterate through the contents ** of a doclist-index record. @@ -243498,9 +244895,13 @@ struct Fts5DlidxLvl { struct Fts5DlidxIter { int nLvl; int iSegid; - Fts5DlidxLvl aLvl[1]; + Fts5DlidxLvl aLvl[FLEXARRAY]; }; +/* Size (in bytes) of an Fts5DlidxIter object with up to N levels */ +#define SZ_FTS5DLIDXITER(N) \ + (offsetof(Fts5DlidxIter,aLvl)+(N)*sizeof(Fts5DlidxLvl)) + static void fts5PutU16(u8 *aOut, u16 iVal){ aOut[0] = (iVal>>8); aOut[1] = (iVal&0xFF); @@ -243868,7 +245269,7 @@ static int sqlite3Fts5StructureTest(Fts5Index *p, void *pStruct){ static void fts5StructureMakeWritable(int *pRc, Fts5Structure **pp){ Fts5Structure *p = *pp; if( *pRc==SQLITE_OK && p->nRef>1 ){ - i64 nByte = sizeof(Fts5Structure)+(p->nLevel-1)*sizeof(Fts5StructureLevel); + i64 nByte = SZ_FTS5STRUCTURE(p->nLevel); Fts5Structure *pNew; pNew = (Fts5Structure*)sqlite3Fts5MallocZero(pRc, nByte); if( pNew ){ @@ -243942,10 +245343,7 @@ static int fts5StructureDecode( ){ return FTS5_CORRUPT; } - nByte = ( - sizeof(Fts5Structure) + /* Main structure */ - sizeof(Fts5StructureLevel) * (nLevel-1) /* aLevel[] array */ - ); + nByte = SZ_FTS5STRUCTURE(nLevel); pRet = (Fts5Structure*)sqlite3Fts5MallocZero(&rc, nByte); if( pRet ){ @@ -244025,10 +245423,7 @@ static void fts5StructureAddLevel(int *pRc, Fts5Structure **ppStruct){ if( *pRc==SQLITE_OK ){ Fts5Structure *pStruct = *ppStruct; int nLevel = pStruct->nLevel; - sqlite3_int64 nByte = ( - sizeof(Fts5Structure) + /* Main structure */ - sizeof(Fts5StructureLevel) * (nLevel+1) /* aLevel[] array */ - ); + sqlite3_int64 nByte = SZ_FTS5STRUCTURE(nLevel+2); pStruct = sqlite3_realloc64(pStruct, nByte); if( pStruct ){ @@ -244567,7 +245962,7 @@ static Fts5DlidxIter *fts5DlidxIterInit( int bDone = 0; for(i=0; p->rc==SQLITE_OK && bDone==0; i++){ - sqlite3_int64 nByte = sizeof(Fts5DlidxIter) + i * sizeof(Fts5DlidxLvl); + sqlite3_int64 nByte = SZ_FTS5DLIDXITER(i+1); Fts5DlidxIter *pNew; pNew = (Fts5DlidxIter*)sqlite3_realloc64(pIter, nByte); @@ -244783,9 +246178,9 @@ static void fts5SegIterSetNext(Fts5Index *p, Fts5SegIter *pIter){ ** leave an error in the Fts5Index object. */ static void fts5SegIterAllocTombstone(Fts5Index *p, Fts5SegIter *pIter){ - const int nTomb = pIter->pSeg->nPgTombstone; + const i64 nTomb = (i64)pIter->pSeg->nPgTombstone; if( nTomb>0 ){ - int nByte = nTomb * sizeof(Fts5Data*) + sizeof(Fts5TombstoneArray); + i64 nByte = SZ_FTS5TOMBSTONEARRAY(nTomb+1); Fts5TombstoneArray *pNew; pNew = (Fts5TombstoneArray*)sqlite3Fts5MallocZero(&p->rc, nByte); if( pNew ){ @@ -246246,8 +247641,7 @@ static Fts5Iter *fts5MultiIterAlloc( for(nSlot=2; nSlotaSeg[] */ + SZ_FTS5ITER(nSlot) + /* pNew + pNew->aSeg[] */ sizeof(Fts5CResult) * nSlot /* pNew->aFirst[] */ ); if( pNew ){ @@ -248048,7 +249442,7 @@ static void fts5DoSecureDelete( int iDelKeyOff = 0; /* Offset of deleted key, if any */ nIdx = nPg-iPgIdx; - aIdx = sqlite3Fts5MallocZero(&p->rc, nIdx+16); + aIdx = sqlite3Fts5MallocZero(&p->rc, ((i64)nIdx)+16); if( p->rc ) return; memcpy(aIdx, &aPg[iPgIdx], nIdx); @@ -248613,7 +250007,7 @@ static Fts5Structure *fts5IndexOptimizeStruct( Fts5Structure *pStruct ){ Fts5Structure *pNew = 0; - sqlite3_int64 nByte = sizeof(Fts5Structure); + sqlite3_int64 nByte = SZ_FTS5STRUCTURE(1); int nSeg = pStruct->nSegment; int i; @@ -248642,7 +250036,8 @@ static Fts5Structure *fts5IndexOptimizeStruct( assert( pStruct->aLevel[i].nMerge<=nThis ); } - nByte += (pStruct->nLevel+1) * sizeof(Fts5StructureLevel); + nByte += (((i64)pStruct->nLevel)+1) * sizeof(Fts5StructureLevel); + assert( nByte==SZ_FTS5STRUCTURE(pStruct->nLevel+2) ); pNew = (Fts5Structure*)sqlite3Fts5MallocZero(&p->rc, nByte); if( pNew ){ @@ -249219,9 +250614,13 @@ struct Fts5TokenDataIter { int nIterAlloc; Fts5PoslistReader *aPoslistReader; int *aPoslistToIter; - Fts5Iter *apIter[1]; + Fts5Iter *apIter[FLEXARRAY]; }; +/* Size in bytes of an Fts5TokenDataIter object holding up to N iterators */ +#define SZ_FTS5TOKENDATAITER(N) \ + (offsetof(Fts5TokenDataIter,apIter) + (N)*sizeof(Fts5Iter)) + /* ** The two input arrays - a1[] and a2[] - are in sorted order. This function ** merges the two arrays together and writes the result to output array @@ -249293,7 +250692,7 @@ static void fts5TokendataIterAppendMap( /* ** Sort the contents of the pT->aMap[] array. ** -** The sorting algorithm requries a malloc(). If this fails, an error code +** The sorting algorithm requires a malloc(). If this fails, an error code ** is left in Fts5Index.rc before returning. */ static void fts5TokendataIterSortMap(Fts5Index *p, Fts5TokenDataIter *pT){ @@ -249484,7 +250883,7 @@ static void fts5SetupPrefixIter( && p->pConfig->bPrefixInsttoken ){ s.pTokendata = &s2; - s2.pT = (Fts5TokenDataIter*)fts5IdxMalloc(p, sizeof(*s2.pT)); + s2.pT = (Fts5TokenDataIter*)fts5IdxMalloc(p, SZ_FTS5TOKENDATAITER(1)); } if( p->pConfig->eDetail==FTS5_DETAIL_NONE ){ @@ -249530,7 +250929,8 @@ static void fts5SetupPrefixIter( } } - pData = fts5IdxMalloc(p, sizeof(*pData)+s.doclist.n+FTS5_DATA_ZERO_PADDING); + pData = fts5IdxMalloc(p, sizeof(*pData) + + ((i64)s.doclist.n)+FTS5_DATA_ZERO_PADDING); assert( pData!=0 || p->rc!=SQLITE_OK ); if( pData ){ pData->p = (u8*)&pData[1]; @@ -249611,15 +251011,17 @@ static int sqlite3Fts5IndexRollback(Fts5Index *p){ ** and the initial version of the "averages" record (a zero-byte blob). */ static int sqlite3Fts5IndexReinit(Fts5Index *p){ - Fts5Structure s; + Fts5Structure *pTmp; + u8 tmpSpace[SZ_FTS5STRUCTURE(1)]; fts5StructureInvalidate(p); fts5IndexDiscardData(p); - memset(&s, 0, sizeof(Fts5Structure)); + pTmp = (Fts5Structure*)tmpSpace; + memset(pTmp, 0, SZ_FTS5STRUCTURE(1)); if( p->pConfig->bContentlessDelete ){ - s.nOriginCntr = 1; + pTmp->nOriginCntr = 1; } fts5DataWrite(p, FTS5_AVERAGES_ROWID, (const u8*)"", 0); - fts5StructureWrite(p, &s); + fts5StructureWrite(p, pTmp); return fts5IndexReturn(p); } @@ -249827,7 +251229,7 @@ static Fts5TokenDataIter *fts5AppendTokendataIter( if( p->rc==SQLITE_OK ){ if( pIn==0 || pIn->nIter==pIn->nIterAlloc ){ int nAlloc = pIn ? pIn->nIterAlloc*2 : 16; - int nByte = nAlloc * sizeof(Fts5Iter*) + sizeof(Fts5TokenDataIter); + int nByte = SZ_FTS5TOKENDATAITER(nAlloc+1); Fts5TokenDataIter *pNew = (Fts5TokenDataIter*)sqlite3_realloc(pIn, nByte); if( pNew==0 ){ @@ -250343,7 +251745,8 @@ static int fts5SetupPrefixIterTokendata( fts5BufferGrow(&p->rc, &token, nToken+1); assert( token.p!=0 || p->rc!=SQLITE_OK ); - ctx.pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, sizeof(*ctx.pT)); + ctx.pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, + SZ_FTS5TOKENDATAITER(1)); if( p->rc==SQLITE_OK ){ @@ -250474,7 +251877,8 @@ static int sqlite3Fts5IndexIterWriteTokendata( if( pIter->nSeg>0 ){ /* This is a prefix term iterator. */ if( pT==0 ){ - pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, sizeof(*pT)); + pT = (Fts5TokenDataIter*)sqlite3Fts5MallocZero(&p->rc, + SZ_FTS5TOKENDATAITER(1)); pIter->pTokenDataIter = pT; } if( pT ){ @@ -251508,7 +252912,7 @@ static void fts5DecodeRowid( #if defined(SQLITE_TEST) || defined(SQLITE_FTS5_DEBUG) static void fts5DebugRowid(int *pRc, Fts5Buffer *pBuf, i64 iKey){ - int iSegid, iHeight, iPgno, bDlidx, bTomb; /* Rowid compenents */ + int iSegid, iHeight, iPgno, bDlidx, bTomb; /* Rowid components */ fts5DecodeRowid(iKey, &bTomb, &iSegid, &bDlidx, &iHeight, &iPgno); if( iSegid==0 ){ @@ -251754,7 +253158,7 @@ static void fts5DecodeFunction( ** buffer overreads even if the record is corrupt. */ n = sqlite3_value_bytes(apVal[1]); aBlob = sqlite3_value_blob(apVal[1]); - nSpace = n + FTS5_DATA_ZERO_PADDING; + nSpace = ((i64)n) + FTS5_DATA_ZERO_PADDING; a = (u8*)sqlite3Fts5MallocZero(&rc, nSpace); if( a==0 ) goto decode_out; if( n>0 ) memcpy(a, aBlob, n); @@ -252469,9 +253873,11 @@ struct Fts5Sorter { i64 iRowid; /* Current rowid */ const u8 *aPoslist; /* Position lists for current row */ int nIdx; /* Number of entries in aIdx[] */ - int aIdx[1]; /* Offsets into aPoslist for current row */ + int aIdx[FLEXARRAY]; /* Offsets into aPoslist for current row */ }; +/* Size (int bytes) of an Fts5Sorter object with N indexes */ +#define SZ_FTS5SORTER(N) (offsetof(Fts5Sorter,nIdx)+((N+2)/2)*sizeof(i64)) /* ** Virtual-table cursor object. @@ -253349,7 +254755,7 @@ static int fts5CursorFirstSorted( const char *zRankArgs = pCsr->zRankArgs; nPhrase = sqlite3Fts5ExprPhraseCount(pCsr->pExpr); - nByte = sizeof(Fts5Sorter) + sizeof(int) * (nPhrase-1); + nByte = SZ_FTS5SORTER(nPhrase); pSorter = (Fts5Sorter*)sqlite3_malloc64(nByte); if( pSorter==0 ) return SQLITE_NOMEM; memset(pSorter, 0, (size_t)nByte); @@ -255875,7 +257281,7 @@ static void fts5SourceIdFunc( ){ assert( nArg==0 ); UNUSED_PARAM2(nArg, apUnused); - sqlite3_result_text(pCtx, "fts5: 2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70", -1, SQLITE_TRANSIENT); + sqlite3_result_text(pCtx, "fts5: 2025-07-17 13:25:10 3ce993b8657d6d9deda380a93cdd6404a8c8ba1b185b2bc423703e41ae5f2543", -1, SQLITE_TRANSIENT); } /* @@ -256100,8 +257506,8 @@ static int fts5Init(sqlite3 *db){ ** its entry point to enable the matchinfo() demo. */ #ifdef SQLITE_FTS5_ENABLE_TEST_MI if( rc==SQLITE_OK ){ - extern int sqlite3Fts5TestRegisterMatchinfo(sqlite3*); - rc = sqlite3Fts5TestRegisterMatchinfo(db); + extern int sqlite3Fts5TestRegisterMatchinfoAPI(fts5_api*); + rc = sqlite3Fts5TestRegisterMatchinfoAPI(&pGlobal->api); } #endif @@ -256690,6 +258096,7 @@ static int fts5StorageDeleteFromIndex( for(iCol=1; rc==SQLITE_OK && iCol<=pConfig->nCol; iCol++){ if( pConfig->abUnindexed[iCol-1]==0 ){ sqlite3_value *pVal = 0; + sqlite3_value *pFree = 0; const char *pText = 0; int nText = 0; const char *pLoc = 0; @@ -256706,11 +258113,22 @@ static int fts5StorageDeleteFromIndex( if( pConfig->bLocale && sqlite3Fts5IsLocaleValue(pConfig, pVal) ){ rc = sqlite3Fts5DecodeLocaleValue(pVal, &pText, &nText, &pLoc, &nLoc); }else{ - pText = (const char*)sqlite3_value_text(pVal); - nText = sqlite3_value_bytes(pVal); - if( pConfig->bLocale && pSeek ){ - pLoc = (const char*)sqlite3_column_text(pSeek, iCol + pConfig->nCol); - nLoc = sqlite3_column_bytes(pSeek, iCol + pConfig->nCol); + if( sqlite3_value_type(pVal)!=SQLITE_TEXT ){ + /* Make a copy of the value to work with. This is because the call + ** to sqlite3_value_text() below forces the type of the value to + ** SQLITE_TEXT, and we may need to use it again later. */ + pFree = pVal = sqlite3_value_dup(pVal); + if( pVal==0 ){ + rc = SQLITE_NOMEM; + } + } + if( rc==SQLITE_OK ){ + pText = (const char*)sqlite3_value_text(pVal); + nText = sqlite3_value_bytes(pVal); + if( pConfig->bLocale && pSeek ){ + pLoc = (const char*)sqlite3_column_text(pSeek, iCol+pConfig->nCol); + nLoc = sqlite3_column_bytes(pSeek, iCol + pConfig->nCol); + } } } @@ -256726,6 +258144,7 @@ static int fts5StorageDeleteFromIndex( } sqlite3Fts5ClearLocale(pConfig); } + sqlite3_value_free(pFree); } } if( rc==SQLITE_OK && p->nTotalRow<1 ){ @@ -259939,7 +261358,6 @@ static void sqlite3Fts5UnicodeAscii(u8 *aArray, u8 *aAscii){ aAscii[0] = 0; /* 0x00 is never a token character */ } - /* ** 2015 May 30 ** @@ -260480,12 +261898,12 @@ static int fts5VocabInitVtab( *pzErr = sqlite3_mprintf("wrong number of vtable arguments"); rc = SQLITE_ERROR; }else{ - int nByte; /* Bytes of space to allocate */ + i64 nByte; /* Bytes of space to allocate */ const char *zDb = bDb ? argv[3] : argv[1]; const char *zTab = bDb ? argv[4] : argv[3]; const char *zType = bDb ? argv[5] : argv[4]; - int nDb = (int)strlen(zDb)+1; - int nTab = (int)strlen(zTab)+1; + i64 nDb = strlen(zDb)+1; + i64 nTab = strlen(zTab)+1; int eType = 0; rc = fts5VocabTableType(zType, pzErr, &eType); diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h index 5e07ce68..d65d949a 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.h @@ -134,7 +134,7 @@ extern "C" { ** ** Since [version 3.6.18] ([dateof:3.6.18]), ** SQLite source code has been stored in the -** Fossil configuration management +** Fossil configuration management ** system. ^The SQLITE_SOURCE_ID macro evaluates to ** a string which identifies a particular check-in of SQLite ** within its configuration management system. ^The SQLITE_SOURCE_ID @@ -147,9 +147,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.49.1" -#define SQLITE_VERSION_NUMBER 3049001 -#define SQLITE_SOURCE_ID "2025-02-18 13:38:58 873d4e274b4988d260ba8354a9718324a1c26187a4ab4c1cc0227c03d0f10e70" +#define SQLITE_VERSION "3.50.3" +#define SQLITE_VERSION_NUMBER 3050003 +#define SQLITE_SOURCE_ID "2025-07-17 13:25:10 3ce993b8657d6d9deda380a93cdd6404a8c8ba1b185b2bc423703e41ae5f2543" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -1164,6 +1164,12 @@ struct sqlite3_io_methods { ** the value that M is to be set to. Before returning, the 32-bit signed ** integer is overwritten with the previous value of M. ** +**
        • [[SQLITE_FCNTL_BLOCK_ON_CONNECT]] +** The [SQLITE_FCNTL_BLOCK_ON_CONNECT] opcode is used to configure the +** VFS to block when taking a SHARED lock to connect to a wal mode database. +** This is used to implement the functionality associated with +** SQLITE_SETLK_BLOCK_ON_CONNECT. +** **
        • [[SQLITE_FCNTL_DATA_VERSION]] ** The [SQLITE_FCNTL_DATA_VERSION] opcode is used to detect changes to ** a database file. The argument is a pointer to a 32-bit unsigned integer. @@ -1260,6 +1266,7 @@ struct sqlite3_io_methods { #define SQLITE_FCNTL_CKSM_FILE 41 #define SQLITE_FCNTL_RESET_CACHE 42 #define SQLITE_FCNTL_NULL_IO 43 +#define SQLITE_FCNTL_BLOCK_ON_CONNECT 44 /* deprecated names */ #define SQLITE_GET_LOCKPROXYFILE SQLITE_FCNTL_GET_LOCKPROXYFILE @@ -1990,13 +1997,16 @@ struct sqlite3_mem_methods { ** ** [[SQLITE_CONFIG_LOOKASIDE]]
          SQLITE_CONFIG_LOOKASIDE
          **
          ^(The SQLITE_CONFIG_LOOKASIDE option takes two arguments that determine -** the default size of lookaside memory on each [database connection]. +** the default size of [lookaside memory] on each [database connection]. ** The first argument is the -** size of each lookaside buffer slot and the second is the number of -** slots allocated to each database connection.)^ ^(SQLITE_CONFIG_LOOKASIDE -** sets the default lookaside size. The [SQLITE_DBCONFIG_LOOKASIDE] -** option to [sqlite3_db_config()] can be used to change the lookaside -** configuration on individual connections.)^
          +** size of each lookaside buffer slot ("sz") and the second is the number of +** slots allocated to each database connection ("cnt").)^ +** ^(SQLITE_CONFIG_LOOKASIDE sets the default lookaside size. +** The [SQLITE_DBCONFIG_LOOKASIDE] option to [sqlite3_db_config()] can +** be used to change the lookaside configuration on individual connections.)^ +** The [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to change the +** default lookaside configuration at compile-time. +**
        • ** ** [[SQLITE_CONFIG_PCACHE2]]
          SQLITE_CONFIG_PCACHE2
          **
          ^(The SQLITE_CONFIG_PCACHE2 option takes a single argument which is @@ -2233,31 +2243,50 @@ struct sqlite3_mem_methods { ** [[SQLITE_DBCONFIG_LOOKASIDE]] **
          SQLITE_DBCONFIG_LOOKASIDE
          **
          The SQLITE_DBCONFIG_LOOKASIDE option is used to adjust the -** configuration of the lookaside memory allocator within a database +** configuration of the [lookaside memory allocator] within a database ** connection. ** The arguments to the SQLITE_DBCONFIG_LOOKASIDE option are not ** in the [DBCONFIG arguments|usual format]. ** The SQLITE_DBCONFIG_LOOKASIDE option takes three arguments, not two, ** so that a call to [sqlite3_db_config()] that uses SQLITE_DBCONFIG_LOOKASIDE ** should have a total of five parameters. -** ^The first argument (the third parameter to [sqlite3_db_config()] is a +**
            +**
          1. The first argument ("buf") is a ** pointer to a memory buffer to use for lookaside memory. -** ^The first argument after the SQLITE_DBCONFIG_LOOKASIDE verb -** may be NULL in which case SQLite will allocate the -** lookaside buffer itself using [sqlite3_malloc()]. ^The second argument is the -** size of each lookaside buffer slot. ^The third argument is the number of -** slots. The size of the buffer in the first argument must be greater than -** or equal to the product of the second and third arguments. The buffer -** must be aligned to an 8-byte boundary. ^If the second argument to -** SQLITE_DBCONFIG_LOOKASIDE is not a multiple of 8, it is internally -** rounded down to the next smaller multiple of 8. ^(The lookaside memory +** The first argument may be NULL in which case SQLite will allocate the +** lookaside buffer itself using [sqlite3_malloc()]. +**

          2. The second argument ("sz") is the +** size of each lookaside buffer slot. Lookaside is disabled if "sz" +** is less than 8. The "sz" argument should be a multiple of 8 less than +** 65536. If "sz" does not meet this constraint, it is reduced in size until +** it does. +**

          3. The third argument ("cnt") is the number of slots. Lookaside is disabled +** if "cnt"is less than 1. The "cnt" value will be reduced, if necessary, so +** that the product of "sz" and "cnt" does not exceed 2,147,418,112. The "cnt" +** parameter is usually chosen so that the product of "sz" and "cnt" is less +** than 1,000,000. +**

          +**

          If the "buf" argument is not NULL, then it must +** point to a memory buffer with a size that is greater than +** or equal to the product of "sz" and "cnt". +** The buffer must be aligned to an 8-byte boundary. +** The lookaside memory ** configuration for a database connection can only be changed when that ** connection is not currently using lookaside memory, or in other words -** when the "current value" returned by -** [sqlite3_db_status](D,[SQLITE_DBSTATUS_LOOKASIDE_USED],...) is zero. +** when the value returned by [SQLITE_DBSTATUS_LOOKASIDE_USED] is zero. ** Any attempt to change the lookaside memory configuration when lookaside ** memory is in use leaves the configuration unchanged and returns -** [SQLITE_BUSY].)^

          +** [SQLITE_BUSY]. +** If the "buf" argument is NULL and an attempt +** to allocate memory based on "sz" and "cnt" fails, then +** lookaside is silently disabled. +**

          +** The [SQLITE_CONFIG_LOOKASIDE] configuration option can be used to set the +** default lookaside configuration at initialization. The +** [-DSQLITE_DEFAULT_LOOKASIDE] option can be used to set the default lookaside +** configuration at compile-time. Typical values for lookaside are 1200 for +** "sz" and 40 to 100 for "cnt". +** ** ** [[SQLITE_DBCONFIG_ENABLE_FKEY]] **

          SQLITE_DBCONFIG_ENABLE_FKEY
          @@ -2994,6 +3023,44 @@ SQLITE_API int sqlite3_busy_handler(sqlite3*,int(*)(void*,int),void*); */ SQLITE_API int sqlite3_busy_timeout(sqlite3*, int ms); +/* +** CAPI3REF: Set the Setlk Timeout +** METHOD: sqlite3 +** +** This routine is only useful in SQLITE_ENABLE_SETLK_TIMEOUT builds. If +** the VFS supports blocking locks, it sets the timeout in ms used by +** eligible locks taken on wal mode databases by the specified database +** handle. In non-SQLITE_ENABLE_SETLK_TIMEOUT builds, or if the VFS does +** not support blocking locks, this function is a no-op. +** +** Passing 0 to this function disables blocking locks altogether. Passing +** -1 to this function requests that the VFS blocks for a long time - +** indefinitely if possible. The results of passing any other negative value +** are undefined. +** +** Internally, each SQLite database handle store two timeout values - the +** busy-timeout (used for rollback mode databases, or if the VFS does not +** support blocking locks) and the setlk-timeout (used for blocking locks +** on wal-mode databases). The sqlite3_busy_timeout() method sets both +** values, this function sets only the setlk-timeout value. Therefore, +** to configure separate busy-timeout and setlk-timeout values for a single +** database handle, call sqlite3_busy_timeout() followed by this function. +** +** Whenever the number of connections to a wal mode database falls from +** 1 to 0, the last connection takes an exclusive lock on the database, +** then checkpoints and deletes the wal file. While it is doing this, any +** new connection that tries to read from the database fails with an +** SQLITE_BUSY error. Or, if the SQLITE_SETLK_BLOCK_ON_CONNECT flag is +** passed to this API, the new connection blocks until the exclusive lock +** has been released. +*/ +SQLITE_API int sqlite3_setlk_timeout(sqlite3*, int ms, int flags); + +/* +** CAPI3REF: Flags for sqlite3_setlk_timeout() +*/ +#define SQLITE_SETLK_BLOCK_ON_CONNECT 0x01 + /* ** CAPI3REF: Convenience Routines For Running Queries ** METHOD: sqlite3 @@ -4013,7 +4080,7 @@ SQLITE_API sqlite3_file *sqlite3_database_file_object(const char*); ** ** The sqlite3_create_filename(D,J,W,N,P) allocates memory to hold a version of ** database filename D with corresponding journal file J and WAL file W and -** with N URI parameters key/values pairs in the array P. The result from +** an array P of N URI Key/Value pairs. The result from ** sqlite3_create_filename(D,J,W,N,P) is a pointer to a database filename that ** is safe to pass to routines like: **
            @@ -4694,7 +4761,7 @@ typedef struct sqlite3_context sqlite3_context; ** METHOD: sqlite3_stmt ** ** ^(In the SQL statement text input to [sqlite3_prepare_v2()] and its variants, -** literals may be replaced by a [parameter] that matches one of following +** literals may be replaced by a [parameter] that matches one of the following ** templates: ** **
              @@ -4739,7 +4806,7 @@ typedef struct sqlite3_context sqlite3_context; ** ** [[byte-order determination rules]] ^The byte-order of ** UTF16 input text is determined by the byte-order mark (BOM, U+FEFF) -** found in first character, which is removed, or in the absence of a BOM +** found in the first character, which is removed, or in the absence of a BOM ** the byte order is the native byte order of the host ** machine for sqlite3_bind_text16() or the byte order specified in ** the 6th parameter for sqlite3_bind_text64().)^ @@ -4759,7 +4826,7 @@ typedef struct sqlite3_context sqlite3_context; ** or sqlite3_bind_text16() or sqlite3_bind_text64() then ** that parameter must be the byte offset ** where the NUL terminator would occur assuming the string were NUL -** terminated. If any NUL characters occurs at byte offsets less than +** terminated. If any NUL characters occur at byte offsets less than ** the value of the fourth parameter then the resulting string value will ** contain embedded NULs. The result of expressions involving strings ** with embedded NULs is undefined. @@ -4971,7 +5038,7 @@ SQLITE_API const void *sqlite3_column_name16(sqlite3_stmt*, int N); ** METHOD: sqlite3_stmt ** ** ^These routines provide a means to determine the database, table, and -** table column that is the origin of a particular result column in +** table column that is the origin of a particular result column in a ** [SELECT] statement. ** ^The name of the database or table or column can be returned as ** either a UTF-8 or UTF-16 string. ^The _database_ routines return @@ -5109,7 +5176,7 @@ SQLITE_API const void *sqlite3_column_decltype16(sqlite3_stmt*,int); ** other than [SQLITE_ROW] before any subsequent invocation of ** sqlite3_step(). Failure to reset the prepared statement using ** [sqlite3_reset()] would result in an [SQLITE_MISUSE] return from -** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1], +** sqlite3_step(). But after [version 3.6.23.1] ([dateof:3.6.23.1]), ** sqlite3_step() began ** calling [sqlite3_reset()] automatically in this circumstance rather ** than returning [SQLITE_MISUSE]. This is not considered a compatibility @@ -5540,8 +5607,8 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** ** For best security, the [SQLITE_DIRECTONLY] flag is recommended for ** all application-defined SQL functions that do not need to be -** used inside of triggers, view, CHECK constraints, or other elements of -** the database schema. This flags is especially recommended for SQL +** used inside of triggers, views, CHECK constraints, or other elements of +** the database schema. This flag is especially recommended for SQL ** functions that have side effects or reveal internal application state. ** Without this flag, an attacker might be able to modify the schema of ** a database file to include invocations of the function with parameters @@ -5572,7 +5639,7 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** [user-defined window functions|available here]. ** ** ^(If the final parameter to sqlite3_create_function_v2() or -** sqlite3_create_window_function() is not NULL, then it is destructor for +** sqlite3_create_window_function() is not NULL, then it is the destructor for ** the application data pointer. The destructor is invoked when the function ** is deleted, either by being overloaded or when the database connection ** closes.)^ ^The destructor is also invoked if the call to @@ -5972,7 +6039,7 @@ SQLITE_API unsigned int sqlite3_value_subtype(sqlite3_value*); ** METHOD: sqlite3_value ** ** ^The sqlite3_value_dup(V) interface makes a copy of the [sqlite3_value] -** object D and returns a pointer to that copy. ^The [sqlite3_value] returned +** object V and returns a pointer to that copy. ^The [sqlite3_value] returned ** is a [protected sqlite3_value] object even if the input is not. ** ^The sqlite3_value_dup(V) interface returns NULL if V is NULL or if a ** memory allocation fails. ^If V is a [pointer value], then the result @@ -6010,7 +6077,7 @@ SQLITE_API void sqlite3_value_free(sqlite3_value*); ** allocation error occurs. ** ** ^(The amount of space allocated by sqlite3_aggregate_context(C,N) is -** determined by the N parameter on first successful call. Changing the +** determined by the N parameter on the first successful call. Changing the ** value of N in any subsequent call to sqlite3_aggregate_context() within ** the same aggregate function instance will not resize the memory ** allocation.)^ Within the xFinal callback, it is customary to set @@ -6172,7 +6239,7 @@ SQLITE_API void sqlite3_set_auxdata(sqlite3_context*, int N, void*, void (*)(voi ** ** Security Warning: These interfaces should not be exposed in scripting ** languages or in other circumstances where it might be possible for an -** an attacker to invoke them. Any agent that can invoke these interfaces +** attacker to invoke them. Any agent that can invoke these interfaces ** can probably also take control of the process. ** ** Database connection client data is only available for SQLite @@ -6286,7 +6353,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** pointed to by the 2nd parameter are taken as the application-defined ** function result. If the 3rd parameter is non-negative, then it ** must be the byte offset into the string where the NUL terminator would -** appear if the string where NUL terminated. If any NUL characters occur +** appear if the string were NUL terminated. If any NUL characters occur ** in the string at a byte offset that is less than the value of the 3rd ** parameter, then the resulting string will contain embedded NULs and the ** result of expressions operating on strings with embedded NULs is undefined. @@ -6344,7 +6411,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** string and preferably a string literal. The sqlite3_result_pointer() ** routine is part of the [pointer passing interface] added for SQLite 3.20.0. ** -** If these routines are called from within the different thread +** If these routines are called from within a different thread ** than the one containing the application-defined function that received ** the [sqlite3_context] pointer, the results are undefined. */ @@ -6750,7 +6817,7 @@ SQLITE_API sqlite3 *sqlite3_db_handle(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^The sqlite3_db_name(D,N) interface returns a pointer to the schema name -** for the N-th database on database connection D, or a NULL pointer of N is +** for the N-th database on database connection D, or a NULL pointer if N is ** out of range. An N value of 0 means the main database file. An N of 1 is ** the "temp" schema. Larger values of N correspond to various ATTACH-ed ** databases. @@ -6845,7 +6912,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
              The SQLITE_TXN_READ state means that the database is currently ** in a read transaction. Content has been read from the database file ** but nothing in the database file has changed. The transaction state -** will advanced to SQLITE_TXN_WRITE if any changes occur and there are +** will be advanced to SQLITE_TXN_WRITE if any changes occur and there are ** no other conflicting concurrent write transactions. The transaction ** state will revert to SQLITE_TXN_NONE following a [ROLLBACK] or ** [COMMIT].
              @@ -6854,7 +6921,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
              The SQLITE_TXN_WRITE state means that the database is currently ** in a write transaction. Content has been written to the database file ** but has not yet committed. The transaction state will change to -** to SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT].
              +** SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT]. */ #define SQLITE_TXN_NONE 0 #define SQLITE_TXN_READ 1 @@ -7005,6 +7072,8 @@ SQLITE_API int sqlite3_autovacuum_pages( ** ** ^The second argument is a pointer to the function to invoke when a ** row is updated, inserted or deleted in a rowid table. +** ^The update hook is disabled by invoking sqlite3_update_hook() +** with a NULL pointer as the second parameter. ** ^The first argument to the callback is a copy of the third argument ** to sqlite3_update_hook(). ** ^The second callback argument is one of [SQLITE_INSERT], [SQLITE_DELETE], @@ -7133,7 +7202,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); ** CAPI3REF: Impose A Limit On Heap Size ** ** These interfaces impose limits on the amount of heap memory that will be -** by all database connections within a single process. +** used by all database connections within a single process. ** ** ^The sqlite3_soft_heap_limit64() interface sets and/or queries the ** soft limit on the amount of heap memory that may be allocated by SQLite. @@ -7191,7 +7260,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); **
            )^ ** ** The circumstances under which SQLite will enforce the heap limits may -** changes in future releases of SQLite. +** change in future releases of SQLite. */ SQLITE_API sqlite3_int64 sqlite3_soft_heap_limit64(sqlite3_int64 N); SQLITE_API sqlite3_int64 sqlite3_hard_heap_limit64(sqlite3_int64 N); @@ -7306,8 +7375,8 @@ SQLITE_API int sqlite3_table_column_metadata( ** ^The entry point is zProc. ** ^(zProc may be 0, in which case SQLite will try to come up with an ** entry point name on its own. It first tries "sqlite3_extension_init". -** If that does not work, it constructs a name "sqlite3_X_init" where the -** X is consists of the lower-case equivalent of all ASCII alphabetic +** If that does not work, it constructs a name "sqlite3_X_init" where +** X consists of the lower-case equivalent of all ASCII alphabetic ** characters in the filename from the last "/" to the first following ** "." and omitting any initial "lib".)^ ** ^The sqlite3_load_extension() interface returns @@ -7378,7 +7447,7 @@ SQLITE_API int sqlite3_enable_load_extension(sqlite3 *db, int onoff); ** ^(Even though the function prototype shows that xEntryPoint() takes ** no arguments and returns void, SQLite invokes xEntryPoint() with three ** arguments and expects an integer result as if the signature of the -** entry point where as follows: +** entry point were as follows: ** **
             **    int xEntryPoint(
            @@ -7542,7 +7611,7 @@ struct sqlite3_module {
             ** virtual table and might not be checked again by the byte code.)^ ^(The
             ** aConstraintUsage[].omit flag is an optimization hint. When the omit flag
             ** is left in its default setting of false, the constraint will always be
            -** checked separately in byte code.  If the omit flag is change to true, then
            +** checked separately in byte code.  If the omit flag is changed to true, then
             ** the constraint may or may not be checked in byte code.  In other words,
             ** when the omit flag is true there is no guarantee that the constraint will
             ** not be checked again using byte code.)^
            @@ -7568,7 +7637,7 @@ struct sqlite3_module {
             ** The xBestIndex method may optionally populate the idxFlags field with a
             ** mask of SQLITE_INDEX_SCAN_* flags. One such flag is
             ** [SQLITE_INDEX_SCAN_HEX], which if set causes the [EXPLAIN QUERY PLAN]
            -** output to show the idxNum has hex instead of as decimal.  Another flag is
            +** output to show the idxNum as hex instead of as decimal.  Another flag is
             ** SQLITE_INDEX_SCAN_UNIQUE, which if set indicates that the query plan will
             ** return at most one row.
             **
            @@ -7709,7 +7778,7 @@ struct sqlite3_index_info {
             ** the implementation of the [virtual table module].   ^The fourth
             ** parameter is an arbitrary client data pointer that is passed through
             ** into the [xCreate] and [xConnect] methods of the virtual table module
            -** when a new virtual table is be being created or reinitialized.
            +** when a new virtual table is being created or reinitialized.
             **
             ** ^The sqlite3_create_module_v2() interface has a fifth parameter which
             ** is a pointer to a destructor for the pClientData.  ^SQLite will
            @@ -7874,7 +7943,7 @@ typedef struct sqlite3_blob sqlite3_blob;
             ** in *ppBlob. Otherwise an [error code] is returned and, unless the error
             ** code is SQLITE_MISUSE, *ppBlob is set to NULL.)^ ^This means that, provided
             ** the API is not misused, it is always safe to call [sqlite3_blob_close()]
            -** on *ppBlob after this function it returns.
            +** on *ppBlob after this function returns.
             **
             ** This function fails with SQLITE_ERROR if any of the following are true:
             ** 
              @@ -7994,7 +8063,7 @@ SQLITE_API int sqlite3_blob_close(sqlite3_blob *); ** ** ^Returns the size in bytes of the BLOB accessible via the ** successfully opened [BLOB handle] in its only argument. ^The -** incremental blob I/O routines can only read or overwriting existing +** incremental blob I/O routines can only read or overwrite existing ** blob content; they cannot change the size of a blob. ** ** This routine only works on a [BLOB handle] which has been created @@ -8144,7 +8213,7 @@ SQLITE_API int sqlite3_vfs_unregister(sqlite3_vfs*); ** ^The sqlite3_mutex_alloc() routine allocates a new ** mutex and returns a pointer to it. ^The sqlite3_mutex_alloc() ** routine returns NULL if it is unable to allocate the requested -** mutex. The argument to sqlite3_mutex_alloc() must one of these +** mutex. The argument to sqlite3_mutex_alloc() must be one of these ** integer constants: ** **
                @@ -8377,7 +8446,7 @@ SQLITE_API int sqlite3_mutex_notheld(sqlite3_mutex*); ** CAPI3REF: Retrieve the mutex for a database connection ** METHOD: sqlite3 ** -** ^This interface returns a pointer the [sqlite3_mutex] object that +** ^This interface returns a pointer to the [sqlite3_mutex] object that ** serializes access to the [database connection] given in the argument ** when the [threading mode] is Serialized. ** ^If the [threading mode] is Single-thread or Multi-thread then this @@ -8500,7 +8569,7 @@ SQLITE_API int sqlite3_test_control(int op, ...); ** CAPI3REF: SQL Keyword Checking ** ** These routines provide access to the set of SQL language keywords -** recognized by SQLite. Applications can uses these routines to determine +** recognized by SQLite. Applications can use these routines to determine ** whether or not a specific identifier needs to be escaped (for example, ** by enclosing in double-quotes) so as not to confuse the parser. ** @@ -8668,7 +8737,7 @@ SQLITE_API void sqlite3_str_reset(sqlite3_str*); ** content of the dynamic string under construction in X. The value ** returned by [sqlite3_str_value(X)] is managed by the sqlite3_str object X ** and might be freed or altered by any subsequent method on the same -** [sqlite3_str] object. Applications must not used the pointer returned +** [sqlite3_str] object. Applications must not use the pointer returned by ** [sqlite3_str_value(X)] after any subsequent method call on the same ** object. ^Applications may change the content of the string returned ** by [sqlite3_str_value(X)] as long as they do not write into any bytes @@ -8754,7 +8823,7 @@ SQLITE_API int sqlite3_status64( ** allocation which could not be satisfied by the [SQLITE_CONFIG_PAGECACHE] ** buffer and where forced to overflow to [sqlite3_malloc()]. The ** returned value includes allocations that overflowed because they -** where too large (they were larger than the "sz" parameter to +** were too large (they were larger than the "sz" parameter to ** [SQLITE_CONFIG_PAGECACHE]) and allocations that overflowed because ** no space was left in the page cache.)^ ** @@ -8838,28 +8907,29 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** [[SQLITE_DBSTATUS_LOOKASIDE_HIT]] ^(
                SQLITE_DBSTATUS_LOOKASIDE_HIT
                **
                This parameter returns the number of malloc attempts that were ** satisfied using lookaside memory. Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
                )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE]] ** ^(
                SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE
                -**
                This parameter returns the number malloc attempts that might have +**
                This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to the amount of ** memory requested being larger than the lookaside slot size. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
                )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL]] ** ^(
                SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL
                -**
                This parameter returns the number malloc attempts that might have +**
                This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to all lookaside ** memory already being in use. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
                )^ ** ** [[SQLITE_DBSTATUS_CACHE_USED]] ^(
                SQLITE_DBSTATUS_CACHE_USED
                **
                This parameter returns the approximate number of bytes of heap ** memory used by all pager caches associated with the database connection.)^ ** ^The highwater mark associated with SQLITE_DBSTATUS_CACHE_USED is always 0. +**
                ** ** [[SQLITE_DBSTATUS_CACHE_USED_SHARED]] ** ^(
                SQLITE_DBSTATUS_CACHE_USED_SHARED
                @@ -8868,10 +8938,10 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** memory used by that pager cache is divided evenly between the attached ** connections.)^ In other words, if none of the pager caches associated ** with the database connection are shared, this request returns the same -** value as DBSTATUS_CACHE_USED. Or, if one or more or the pager caches are +** value as DBSTATUS_CACHE_USED. Or, if one or more of the pager caches are ** shared, the value returned by this call will be smaller than that returned ** by DBSTATUS_CACHE_USED. ^The highwater mark associated with -** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. +** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. ** ** [[SQLITE_DBSTATUS_SCHEMA_USED]] ^(
                SQLITE_DBSTATUS_SCHEMA_USED
                **
                This parameter returns the approximate number of bytes of heap @@ -8881,6 +8951,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** schema memory is shared with other database connections due to ** [shared cache mode] being enabled. ** ^The highwater mark associated with SQLITE_DBSTATUS_SCHEMA_USED is always 0. +**
                ** ** [[SQLITE_DBSTATUS_STMT_USED]] ^(
                SQLITE_DBSTATUS_STMT_USED
                **
                This parameter returns the approximate number of bytes of heap @@ -8917,7 +8988,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** been written to disk in the middle of a transaction due to the page ** cache overflowing. Transactions are more efficient if they are written ** to disk all at once. When pages spill mid-transaction, that introduces -** additional overhead. This parameter can be used help identify +** additional overhead. This parameter can be used to help identify ** inefficiencies that can be resolved by increasing the cache size. **
                ** @@ -8988,13 +9059,13 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** [[SQLITE_STMTSTATUS_SORT]]
                SQLITE_STMTSTATUS_SORT
                **
                ^This is the number of sort operations that have occurred. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance through careful use of indices.
                +** improve performance through careful use of indices. ** ** [[SQLITE_STMTSTATUS_AUTOINDEX]]
                SQLITE_STMTSTATUS_AUTOINDEX
                **
                ^This is the number of rows inserted into transient indices that ** were created automatically in order to help joins run faster. ** A non-zero value in this counter may indicate an opportunity to -** improvement performance by adding permanent indices that do not +** improve performance by adding permanent indices that do not ** need to be reinitialized each time the statement is run.
                ** ** [[SQLITE_STMTSTATUS_VM_STEP]]
                SQLITE_STMTSTATUS_VM_STEP
                @@ -9003,19 +9074,19 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** to 2147483647. The number of virtual machine operations can be ** used as a proxy for the total work done by the prepared statement. ** If the number of virtual machine operations exceeds 2147483647 -** then the value returned by this statement status code is undefined. +** then the value returned by this statement status code is undefined. ** ** [[SQLITE_STMTSTATUS_REPREPARE]]
                SQLITE_STMTSTATUS_REPREPARE
                **
                ^This is the number of times that the prepare statement has been ** automatically regenerated due to schema changes or changes to -** [bound parameters] that might affect the query plan. +** [bound parameters] that might affect the query plan.
                ** ** [[SQLITE_STMTSTATUS_RUN]]
                SQLITE_STMTSTATUS_RUN
                **
                ^This is the number of times that the prepared statement has ** been run. A single "run" for the purposes of this counter is one ** or more calls to [sqlite3_step()] followed by a call to [sqlite3_reset()]. ** The counter is incremented on the first [sqlite3_step()] call of each -** cycle. +** cycle.
                ** ** [[SQLITE_STMTSTATUS_FILTER_MISS]] ** [[SQLITE_STMTSTATUS_FILTER HIT]] @@ -9025,7 +9096,7 @@ SQLITE_API int sqlite3_stmt_status(sqlite3_stmt*, int op,int resetFlg); ** step was bypassed because a Bloom filter returned not-found. The ** corresponding SQLITE_STMTSTATUS_FILTER_MISS value is the number of ** times that the Bloom filter returned a find, and thus the join step -** had to be processed as normal. +** had to be processed as normal. ** ** [[SQLITE_STMTSTATUS_MEMUSED]]
                SQLITE_STMTSTATUS_MEMUSED
                **
                ^This is the approximate number of bytes of heap memory @@ -9130,9 +9201,9 @@ struct sqlite3_pcache_page { ** SQLite will typically create one cache instance for each open database file, ** though this is not guaranteed. ^The ** first parameter, szPage, is the size in bytes of the pages that must -** be allocated by the cache. ^szPage will always a power of two. ^The +** be allocated by the cache. ^szPage will always be a power of two. ^The ** second parameter szExtra is a number of bytes of extra storage -** associated with each page cache entry. ^The szExtra parameter will +** associated with each page cache entry. ^The szExtra parameter will be ** a number less than 250. SQLite will use the ** extra szExtra bytes on each page to store metadata about the underlying ** database page on disk. The value passed into szExtra depends @@ -9140,17 +9211,17 @@ struct sqlite3_pcache_page { ** ^The third argument to xCreate(), bPurgeable, is true if the cache being ** created will be used to cache database pages of a file stored on disk, or ** false if it is used for an in-memory database. The cache implementation -** does not have to do anything special based with the value of bPurgeable; +** does not have to do anything special based upon the value of bPurgeable; ** it is purely advisory. ^On a cache where bPurgeable is false, SQLite will ** never invoke xUnpin() except to deliberately delete a page. ** ^In other words, calls to xUnpin() on a cache with bPurgeable set to ** false will always have the "discard" flag set to true. -** ^Hence, a cache created with bPurgeable false will +** ^Hence, a cache created with bPurgeable set to false will ** never contain any unpinned pages. ** ** [[the xCachesize() page cache method]] ** ^(The xCachesize() method may be called at any time by SQLite to set the -** suggested maximum cache-size (number of pages stored by) the cache +** suggested maximum cache-size (number of pages stored) for the cache ** instance passed as the first argument. This is the value configured using ** the SQLite "[PRAGMA cache_size]" command.)^ As with the bPurgeable ** parameter, the implementation is not required to do anything with this @@ -9177,12 +9248,12 @@ struct sqlite3_pcache_page { ** implementation must return a pointer to the page buffer with its content ** intact. If the requested page is not already in the cache, then the ** cache implementation should use the value of the createFlag -** parameter to help it determined what action to take: +** parameter to help it determine what action to take: ** ** **
                createFlag Behavior when page is not already in cache **
                0 Do not allocate a new page. Return NULL. -**
                1 Allocate a new page if it easy and convenient to do so. +**
                1 Allocate a new page if it is easy and convenient to do so. ** Otherwise return NULL. **
                2 Make every effort to allocate a new page. Only return ** NULL if allocating a new page is effectively impossible. @@ -9199,7 +9270,7 @@ struct sqlite3_pcache_page { ** as its second argument. If the third parameter, discard, is non-zero, ** then the page must be evicted from the cache. ** ^If the discard parameter is -** zero, then the page may be discarded or retained at the discretion of +** zero, then the page may be discarded or retained at the discretion of the ** page cache implementation. ^The page cache implementation ** may choose to evict unpinned pages at any time. ** @@ -9217,7 +9288,7 @@ struct sqlite3_pcache_page { ** When SQLite calls the xTruncate() method, the cache must discard all ** existing cache entries with page numbers (keys) greater than or equal ** to the value of the iLimit parameter passed to xTruncate(). If any -** of these pages are pinned, they are implicitly unpinned, meaning that +** of these pages are pinned, they become implicitly unpinned, meaning that ** they can be safely discarded. ** ** [[the xDestroy() page cache method]] @@ -9397,7 +9468,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** external process or via a database connection other than the one being ** used by the backup operation, then the backup will be automatically ** restarted by the next call to sqlite3_backup_step(). ^If the source -** database is modified by the using the same database connection as is used +** database is modified by using the same database connection as is used ** by the backup operation, then the backup database is automatically ** updated at the same time. ** @@ -9414,7 +9485,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** and may not be used following a call to sqlite3_backup_finish(). ** ** ^The value returned by sqlite3_backup_finish is [SQLITE_OK] if no -** sqlite3_backup_step() errors occurred, regardless or whether or not +** sqlite3_backup_step() errors occurred, regardless of whether or not ** sqlite3_backup_step() completed. ** ^If an out-of-memory condition or IO error occurred during any prior ** sqlite3_backup_step() call on the same [sqlite3_backup] object, then @@ -9516,7 +9587,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** application receives an SQLITE_LOCKED error, it may call the ** sqlite3_unlock_notify() method with the blocked connection handle as ** the first argument to register for a callback that will be invoked -** when the blocking connections current transaction is concluded. ^The +** when the blocking connection's current transaction is concluded. ^The ** callback is invoked from within the [sqlite3_step] or [sqlite3_close] ** call that concludes the blocking connection's transaction. ** @@ -9536,7 +9607,7 @@ SQLITE_API int sqlite3_backup_pagecount(sqlite3_backup *p); ** blocked connection already has a registered unlock-notify callback, ** then the new callback replaces the old.)^ ^If sqlite3_unlock_notify() is ** called with a NULL pointer as its second argument, then any existing -** unlock-notify callback is canceled. ^The blocked connections +** unlock-notify callback is canceled. ^The blocked connection's ** unlock-notify callback may also be canceled by closing the blocked ** connection using [sqlite3_close()]. ** @@ -9934,7 +10005,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** support constraints. In this configuration (which is the default) if ** a call to the [xUpdate] method returns [SQLITE_CONSTRAINT], then the entire ** statement is rolled back as if [ON CONFLICT | OR ABORT] had been -** specified as part of the users SQL statement, regardless of the actual +** specified as part of the user's SQL statement, regardless of the actual ** ON CONFLICT mode specified. ** ** If X is non-zero, then the virtual table implementation guarantees @@ -9968,7 +10039,7 @@ SQLITE_API int sqlite3_vtab_config(sqlite3*, int op, ...); ** [[SQLITE_VTAB_INNOCUOUS]]
                SQLITE_VTAB_INNOCUOUS
                **
                Calls of the form ** [sqlite3_vtab_config](db,SQLITE_VTAB_INNOCUOUS) from within the -** the [xConnect] or [xCreate] methods of a [virtual table] implementation +** [xConnect] or [xCreate] methods of a [virtual table] implementation ** identify that virtual table as being safe to use from within triggers ** and views. Conceptually, the SQLITE_VTAB_INNOCUOUS tag means that the ** virtual table can do no serious harm even if it is controlled by a @@ -10136,7 +10207,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); **
                ** ** ^For the purposes of comparing virtual table output values to see if the -** values are same value for sorting purposes, two NULL values are considered +** values are the same value for sorting purposes, two NULL values are considered ** to be the same. In other words, the comparison operator is "IS" ** (or "IS NOT DISTINCT FROM") and not "==". ** @@ -10146,7 +10217,7 @@ SQLITE_API const char *sqlite3_vtab_collation(sqlite3_index_info*,int); ** ** ^A virtual table implementation is always free to return rows in any order ** it wants, as long as the "orderByConsumed" flag is not set. ^When the -** the "orderByConsumed" flag is unset, the query planner will add extra +** "orderByConsumed" flag is unset, the query planner will add extra ** [bytecode] to ensure that the final results returned by the SQL query are ** ordered correctly. The use of the "orderByConsumed" flag and the ** sqlite3_vtab_distinct() interface is merely an optimization. ^Careful @@ -10243,7 +10314,7 @@ SQLITE_API int sqlite3_vtab_in(sqlite3_index_info*, int iCons, int bHandle); ** sqlite3_vtab_in_next(X,P) should be one of the parameters to the ** xFilter method which invokes these routines, and specifically ** a parameter that was previously selected for all-at-once IN constraint -** processing use the [sqlite3_vtab_in()] interface in the +** processing using the [sqlite3_vtab_in()] interface in the ** [xBestIndex|xBestIndex method]. ^(If the X parameter is not ** an xFilter argument that was selected for all-at-once IN constraint ** processing, then these routines return [SQLITE_ERROR].)^ @@ -10298,7 +10369,7 @@ SQLITE_API int sqlite3_vtab_in_next(sqlite3_value *pVal, sqlite3_value **ppOut); ** and only if *V is set to a value. ^The sqlite3_vtab_rhs_value(P,J,V) ** inteface returns SQLITE_NOTFOUND if the right-hand side of the J-th ** constraint is not available. ^The sqlite3_vtab_rhs_value() interface -** can return an result code other than SQLITE_OK or SQLITE_NOTFOUND if +** can return a result code other than SQLITE_OK or SQLITE_NOTFOUND if ** something goes wrong. ** ** The sqlite3_vtab_rhs_value() interface is usually only successful if @@ -10326,8 +10397,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** KEYWORDS: {conflict resolution mode} ** ** These constants are returned by [sqlite3_vtab_on_conflict()] to -** inform a [virtual table] implementation what the [ON CONFLICT] mode -** is for the SQL statement being evaluated. +** inform a [virtual table] implementation of the [ON CONFLICT] mode +** for the SQL statement being evaluated. ** ** Note that the [SQLITE_IGNORE] constant is also used as a potential ** return value from the [sqlite3_set_authorizer()] callback and that @@ -10367,39 +10438,39 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** [[SQLITE_SCANSTAT_EST]]
                SQLITE_SCANSTAT_EST
                **
                ^The "double" variable pointed to by the V parameter will be set to the ** query planner's estimate for the average number of rows output from each -** iteration of the X-th loop. If the query planner's estimates was accurate, +** iteration of the X-th loop. If the query planner's estimate was accurate, ** then this value will approximate the quotient NVISIT/NLOOP and the ** product of this value for all prior loops with the same SELECTID will -** be the NLOOP value for the current loop. +** be the NLOOP value for the current loop.
                ** ** [[SQLITE_SCANSTAT_NAME]]
                SQLITE_SCANSTAT_NAME
                **
                ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the name of the index or table -** used for the X-th loop. +** used for the X-th loop.
                ** ** [[SQLITE_SCANSTAT_EXPLAIN]]
                SQLITE_SCANSTAT_EXPLAIN
                **
                ^The "const char *" variable pointed to by the V parameter will be set ** to a zero-terminated UTF-8 string containing the [EXPLAIN QUERY PLAN] -** description for the X-th loop. +** description for the X-th loop.
                ** ** [[SQLITE_SCANSTAT_SELECTID]]
                SQLITE_SCANSTAT_SELECTID
                **
                ^The "int" variable pointed to by the V parameter will be set to the ** id for the X-th query plan element. The id value is unique within the ** statement. The select-id is the same value as is output in the first -** column of an [EXPLAIN QUERY PLAN] query. +** column of an [EXPLAIN QUERY PLAN] query.
                ** ** [[SQLITE_SCANSTAT_PARENTID]]
                SQLITE_SCANSTAT_PARENTID
                **
                The "int" variable pointed to by the V parameter will be set to the -** the id of the parent of the current query element, if applicable, or +** id of the parent of the current query element, if applicable, or ** to zero if the query element has no parent. This is the same value as -** returned in the second column of an [EXPLAIN QUERY PLAN] query. +** returned in the second column of an [EXPLAIN QUERY PLAN] query.
                ** ** [[SQLITE_SCANSTAT_NCYCLE]]
                SQLITE_SCANSTAT_NCYCLE
                **
                The sqlite3_int64 output value is set to the number of cycles, ** according to the processor time-stamp counter, that elapsed while the ** query element was being processed. This value is not available for ** all query elements - if it is unavailable the output variable is -** set to -1. +** set to -1.
                ** */ #define SQLITE_SCANSTAT_NLOOP 0 @@ -10440,8 +10511,8 @@ SQLITE_API int sqlite3_vtab_rhs_value(sqlite3_index_info*, int, sqlite3_value ** ** sqlite3_stmt_scanstatus_v2() with a zeroed flags parameter. ** ** Parameter "idx" identifies the specific query element to retrieve statistics -** for. Query elements are numbered starting from zero. A value of -1 may be -** to query for statistics regarding the entire query. ^If idx is out of range +** for. Query elements are numbered starting from zero. A value of -1 may +** retrieve statistics for the entire query. ^If idx is out of range ** - less than -1 or greater than or equal to the total number of query ** elements used to implement the statement - a non-zero value is returned and ** the variable that pOut points to is unchanged. @@ -10484,7 +10555,7 @@ SQLITE_API void sqlite3_stmt_scanstatus_reset(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^If a write-transaction is open on [database connection] D when the -** [sqlite3_db_cacheflush(D)] interface invoked, any dirty +** [sqlite3_db_cacheflush(D)] interface is invoked, any dirty ** pages in the pager-cache that are not currently in use are written out ** to disk. A dirty page may be in use if a database cursor created by an ** active SQL statement is reading from it, or if it is page 1 of a database @@ -10598,8 +10669,8 @@ SQLITE_API int sqlite3_db_cacheflush(sqlite3*); ** triggers; and so forth. ** ** When the [sqlite3_blob_write()] API is used to update a blob column, -** the pre-update hook is invoked with SQLITE_DELETE. This is because the -** in this case the new values are not available. In this case, when a +** the pre-update hook is invoked with SQLITE_DELETE, because +** the new values are not yet available. In this case, when a ** callback made with op==SQLITE_DELETE is actually a write using the ** sqlite3_blob_write() API, the [sqlite3_preupdate_blobwrite()] returns ** the index of the column being written. In other cases, where the @@ -10852,7 +10923,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** For an ordinary on-disk database file, the serialization is just a ** copy of the disk file. For an in-memory database or a "TEMP" database, ** the serialization is the same sequence of bytes which would be written -** to disk if that database where backed up to disk. +** to disk if that database were backed up to disk. ** ** The usual case is that sqlite3_serialize() copies the serialization of ** the database into memory obtained from [sqlite3_malloc64()] and returns @@ -10861,7 +10932,7 @@ SQLITE_API SQLITE_EXPERIMENTAL int sqlite3_snapshot_recover(sqlite3 *db, const c ** contains the SQLITE_SERIALIZE_NOCOPY bit, then no memory allocations ** are made, and the sqlite3_serialize() function will return a pointer ** to the contiguous memory representation of the database that SQLite -** is currently using for that database, or NULL if the no such contiguous +** is currently using for that database, or NULL if no such contiguous ** memory representation of the database exists. A contiguous memory ** representation of the database will usually only exist if there has ** been a prior call to [sqlite3_deserialize(D,S,...)] with the same @@ -10932,7 +11003,7 @@ SQLITE_API unsigned char *sqlite3_serialize( ** database is currently in a read transaction or is involved in a backup ** operation. ** -** It is not possible to deserialized into the TEMP database. If the +** It is not possible to deserialize into the TEMP database. If the ** S argument to sqlite3_deserialize(D,S,P,N,M,F) is "temp" then the ** function returns SQLITE_ERROR. ** @@ -10954,7 +11025,7 @@ SQLITE_API int sqlite3_deserialize( sqlite3 *db, /* The database connection */ const char *zSchema, /* Which DB to reopen with the deserialization */ unsigned char *pData, /* The serialized database content */ - sqlite3_int64 szDb, /* Number bytes in the deserialization */ + sqlite3_int64 szDb, /* Number of bytes in the deserialization */ sqlite3_int64 szBuf, /* Total size of buffer pData[] */ unsigned mFlags /* Zero or more SQLITE_DESERIALIZE_* flags */ ); @@ -10962,7 +11033,7 @@ SQLITE_API int sqlite3_deserialize( /* ** CAPI3REF: Flags for sqlite3_deserialize() ** -** The following are allowed values for 6th argument (the F argument) to +** The following are allowed values for the 6th argument (the F argument) to ** the [sqlite3_deserialize(D,S,P,N,M,F)] interface. ** ** The SQLITE_DESERIALIZE_FREEONCLOSE means that the database serialization @@ -11487,9 +11558,10 @@ SQLITE_API void sqlite3session_table_filter( ** is inserted while a session object is enabled, then later deleted while ** the same session object is disabled, no INSERT record will appear in the ** changeset, even though the delete took place while the session was disabled. -** Or, if one field of a row is updated while a session is disabled, and -** another field of the same row is updated while the session is enabled, the -** resulting changeset will contain an UPDATE change that updates both fields. +** Or, if one field of a row is updated while a session is enabled, and +** then another field of the same row is updated while the session is disabled, +** the resulting changeset will contain an UPDATE change that updates both +** fields. */ SQLITE_API int sqlite3session_changeset( sqlite3_session *pSession, /* Session object */ @@ -11561,8 +11633,9 @@ SQLITE_API sqlite3_int64 sqlite3session_changeset_size(sqlite3_session *pSession ** database zFrom the contents of the two compatible tables would be ** identical. ** -** It an error if database zFrom does not exist or does not contain the -** required compatible table. +** Unless the call to this function is a no-op as described above, it is an +** error if database zFrom does not exist or does not contain the required +** compatible table. ** ** If the operation is successful, SQLITE_OK is returned. Otherwise, an SQLite ** error code. In this case, if argument pzErrMsg is not NULL, *pzErrMsg @@ -11697,7 +11770,7 @@ SQLITE_API int sqlite3changeset_start_v2( ** The following flags may passed via the 4th parameter to ** [sqlite3changeset_start_v2] and [sqlite3changeset_start_v2_strm]: ** -**
                SQLITE_CHANGESETAPPLY_INVERT
                +**
                SQLITE_CHANGESETSTART_INVERT
                ** Invert the changeset while iterating through it. This is equivalent to ** inverting a changeset using sqlite3changeset_invert() before applying it. ** It is an error to specify this flag with a patchset. @@ -12012,19 +12085,6 @@ SQLITE_API int sqlite3changeset_concat( void **ppOut /* OUT: Buffer containing output changeset */ ); - -/* -** CAPI3REF: Upgrade the Schema of a Changeset/Patchset -*/ -SQLITE_API int sqlite3changeset_upgrade( - sqlite3 *db, - const char *zDb, - int nIn, const void *pIn, /* Input changeset */ - int *pnOut, void **ppOut /* OUT: Inverse of input */ -); - - - /* ** CAPI3REF: Changegroup Handle ** diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go index 76d84016..5a492766 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3_opt_userauth.go @@ -16,53 +16,10 @@ package sqlite3 #else #include #endif -#include - -static int -_sqlite3_user_authenticate(sqlite3* db, const char* zUsername, const char* aPW, int nPW) -{ - return sqlite3_user_authenticate(db, zUsername, aPW, nPW); -} - -static int -_sqlite3_user_add(sqlite3* db, const char* zUsername, const char* aPW, int nPW, int isAdmin) -{ - return sqlite3_user_add(db, zUsername, aPW, nPW, isAdmin); -} - -static int -_sqlite3_user_change(sqlite3* db, const char* zUsername, const char* aPW, int nPW, int isAdmin) -{ - return sqlite3_user_change(db, zUsername, aPW, nPW, isAdmin); -} - -static int -_sqlite3_user_delete(sqlite3* db, const char* zUsername) -{ - return sqlite3_user_delete(db, zUsername); -} - -static int -_sqlite3_auth_enabled(sqlite3* db) -{ - int exists = -1; - - sqlite3_stmt *stmt; - sqlite3_prepare_v2(db, "select count(type) from sqlite_master WHERE type='table' and name='sqlite_user';", -1, &stmt, NULL); - - while ( sqlite3_step(stmt) == SQLITE_ROW) { - exists = sqlite3_column_int(stmt, 0); - } - - sqlite3_finalize(stmt); - - return exists; -} */ import "C" import ( "errors" - "unsafe" ) const ( @@ -70,8 +27,9 @@ const ( ) var ( - ErrUnauthorized = errors.New("SQLITE_AUTH: Unauthorized") - ErrAdminRequired = errors.New("SQLITE_AUTH: Unauthorized; Admin Privileges Required") + ErrUnauthorized = errors.New("SQLITE_AUTH: Unauthorized") + ErrAdminRequired = errors.New("SQLITE_AUTH: Unauthorized; Admin Privileges Required") + errUserAuthNoLongerSupported = errors.New("sqlite3: the sqlite_userauth tag is no longer supported as the userauth extension is no longer supported by the SQLite authors, see https://github.com/mattn/go-sqlite3/issues/1341") ) // Authenticate will perform an authentication of the provided username @@ -88,15 +46,7 @@ var ( // If the SQLITE_USER table is not present in the database file, then // this interface is a harmless no-op returning SQLITE_OK. func (c *SQLiteConn) Authenticate(username, password string) error { - rv := c.authenticate(username, password) - switch rv { - case C.SQLITE_ERROR, C.SQLITE_AUTH: - return ErrUnauthorized - case C.SQLITE_OK: - return nil - default: - return c.lastError() - } + return errUserAuthNoLongerSupported } // authenticate provides the actual authentication to SQLite. @@ -109,17 +59,7 @@ func (c *SQLiteConn) Authenticate(username, password string) error { // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authenticate(username, password string) int { - // Allocate C Variables - cuser := C.CString(username) - cpass := C.CString(password) - - // Free C Variables - defer func() { - C.free(unsafe.Pointer(cuser)) - C.free(unsafe.Pointer(cpass)) - }() - - return int(C._sqlite3_user_authenticate(c.db, cuser, cpass, C.int(len(password)))) + return 1 } // AuthUserAdd can be used (by an admin user only) @@ -131,20 +71,7 @@ func (c *SQLiteConn) authenticate(username, password string) int { // for any ATTACH-ed databases. Any call to AuthUserAdd by a // non-admin user results in an error. func (c *SQLiteConn) AuthUserAdd(username, password string, admin bool) error { - isAdmin := 0 - if admin { - isAdmin = 1 - } - - rv := c.authUserAdd(username, password, isAdmin) - switch rv { - case C.SQLITE_ERROR, C.SQLITE_AUTH: - return ErrAdminRequired - case C.SQLITE_OK: - return nil - default: - return c.lastError() - } + return errUserAuthNoLongerSupported } // authUserAdd enables the User Authentication if not enabled. @@ -162,17 +89,7 @@ func (c *SQLiteConn) AuthUserAdd(username, password string, admin bool) error { // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authUserAdd(username, password string, admin int) int { - // Allocate C Variables - cuser := C.CString(username) - cpass := C.CString(password) - - // Free C Variables - defer func() { - C.free(unsafe.Pointer(cuser)) - C.free(unsafe.Pointer(cpass)) - }() - - return int(C._sqlite3_user_add(c.db, cuser, cpass, C.int(len(password)), C.int(admin))) + return 1 } // AuthUserChange can be used to change a users @@ -181,20 +98,7 @@ func (c *SQLiteConn) authUserAdd(username, password string, admin int) int { // credentials or admin privilege setting. No user may change their own // admin privilege setting. func (c *SQLiteConn) AuthUserChange(username, password string, admin bool) error { - isAdmin := 0 - if admin { - isAdmin = 1 - } - - rv := c.authUserChange(username, password, isAdmin) - switch rv { - case C.SQLITE_ERROR, C.SQLITE_AUTH: - return ErrAdminRequired - case C.SQLITE_OK: - return nil - default: - return c.lastError() - } + return errUserAuthNoLongerSupported } // authUserChange allows to modify a user. @@ -215,17 +119,7 @@ func (c *SQLiteConn) AuthUserChange(username, password string, admin bool) error // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authUserChange(username, password string, admin int) int { - // Allocate C Variables - cuser := C.CString(username) - cpass := C.CString(password) - - // Free C Variables - defer func() { - C.free(unsafe.Pointer(cuser)) - C.free(unsafe.Pointer(cpass)) - }() - - return int(C._sqlite3_user_change(c.db, cuser, cpass, C.int(len(password)), C.int(admin))) + return 1 } // AuthUserDelete can be used (by an admin user only) @@ -234,15 +128,7 @@ func (c *SQLiteConn) authUserChange(username, password string, admin int) int { // the database cannot be converted into a no-authentication-required // database. func (c *SQLiteConn) AuthUserDelete(username string) error { - rv := c.authUserDelete(username) - switch rv { - case C.SQLITE_ERROR, C.SQLITE_AUTH: - return ErrAdminRequired - case C.SQLITE_OK: - return nil - default: - return c.lastError() - } + return errUserAuthNoLongerSupported } // authUserDelete can be used to delete a user. @@ -258,25 +144,12 @@ func (c *SQLiteConn) AuthUserDelete(username string) error { // C.SQLITE_ERROR (1) // C.SQLITE_AUTH (23) func (c *SQLiteConn) authUserDelete(username string) int { - // Allocate C Variables - cuser := C.CString(username) - - // Free C Variables - defer func() { - C.free(unsafe.Pointer(cuser)) - }() - - return int(C._sqlite3_user_delete(c.db, cuser)) + return 1 } // AuthEnabled checks if the database is protected by user authentication func (c *SQLiteConn) AuthEnabled() (exists bool) { - rv := c.authEnabled() - if rv == 1 { - exists = true - } - - return + return false } // authEnabled perform the actual check for user authentication. @@ -289,7 +162,7 @@ func (c *SQLiteConn) AuthEnabled() (exists bool) { // 0 - Disabled // 1 - Enabled func (c *SQLiteConn) authEnabled() int { - return int(C._sqlite3_auth_enabled(c.db)) + return 0 } // EOF diff --git a/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h b/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h index 935437bb..3a5e0a4e 100644 --- a/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h +++ b/vendor/github.com/mattn/go-sqlite3/sqlite3ext.h @@ -371,6 +371,8 @@ struct sqlite3_api_routines { /* Version 3.44.0 and later */ void *(*get_clientdata)(sqlite3*,const char*); int (*set_clientdata)(sqlite3*, const char*, void*, void(*)(void*)); + /* Version 3.50.0 and later */ + int (*setlk_timeout)(sqlite3*,int,int); }; /* @@ -704,6 +706,8 @@ typedef int (*sqlite3_loadext_entry)( /* Version 3.44.0 and later */ #define sqlite3_get_clientdata sqlite3_api->get_clientdata #define sqlite3_set_clientdata sqlite3_api->set_clientdata +/* Version 3.50.0 and later */ +#define sqlite3_setlk_timeout sqlite3_api->setlk_timeout #endif /* !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) */ #if !defined(SQLITE_CORE) && !defined(SQLITE_OMIT_LOAD_EXTENSION) diff --git a/vendor/github.com/spf13/pflag/README.md b/vendor/github.com/spf13/pflag/README.md index 7eacc5bd..388c4e5e 100644 --- a/vendor/github.com/spf13/pflag/README.md +++ b/vendor/github.com/spf13/pflag/README.md @@ -284,6 +284,33 @@ func main() { } ``` +### Using pflag with go test +`pflag` does not parse the shorthand versions of go test's built-in flags (i.e., those starting with `-test.`). +For more context, see issues [#63](https://github.com/spf13/pflag/issues/63) and [#238](https://github.com/spf13/pflag/issues/238) for more details. + +For example, if you use pflag in your `TestMain` function and call `pflag.Parse()` after defining your custom flags, running a test like this: +```bash +go test /your/tests -run ^YourTest -v --your-test-pflags +``` +will result in the `-v` flag being ignored. This happens because of the way pflag handles flag parsing, skipping over go test's built-in shorthand flags. +To work around this, you can use the `ParseSkippedFlags` function, which ensures that go test's flags are parsed separately using the standard flag package. + +**Example**: You want to parse go test flags that are otherwise ignore by `pflag.Parse()` +```go +import ( + goflag "flag" + flag "github.com/spf13/pflag" +) + +var ip *int = flag.Int("flagname", 1234, "help message for flagname") + +func main() { + flag.CommandLine.AddGoFlagSet(goflag.CommandLine) + flag.ParseSkippedFlags(os.Args[1:], goflag.CommandLine) + flag.Parse() +} +``` + ## More info You can see the full reference documentation of the pflag package diff --git a/vendor/github.com/spf13/pflag/bool_func.go b/vendor/github.com/spf13/pflag/bool_func.go new file mode 100644 index 00000000..83d77afa --- /dev/null +++ b/vendor/github.com/spf13/pflag/bool_func.go @@ -0,0 +1,40 @@ +package pflag + +// -- func Value +type boolfuncValue func(string) error + +func (f boolfuncValue) Set(s string) error { return f(s) } + +func (f boolfuncValue) Type() string { return "boolfunc" } + +func (f boolfuncValue) String() string { return "" } // same behavior as stdlib 'flag' package + +func (f boolfuncValue) IsBoolFlag() bool { return true } + +// BoolFunc defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}" (or any form that matches the flag) is parsed +// on the command line. +func (f *FlagSet) BoolFunc(name string, usage string, fn func(string) error) { + f.BoolFuncP(name, "", usage, fn) +} + +// BoolFuncP is like BoolFunc, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BoolFuncP(name, shorthand string, usage string, fn func(string) error) { + var val Value = boolfuncValue(fn) + flag := f.VarPF(val, name, shorthand, usage) + flag.NoOptDefVal = "true" +} + +// BoolFunc defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}" (or any form that matches the flag) is parsed +// on the command line. +func BoolFunc(name string, usage string, fn func(string) error) { + CommandLine.BoolFuncP(name, "", usage, fn) +} + +// BoolFuncP is like BoolFunc, but accepts a shorthand letter that can be used after a single dash. +func BoolFuncP(name, shorthand string, usage string, fn func(string) error) { + CommandLine.BoolFuncP(name, shorthand, usage, fn) +} diff --git a/vendor/github.com/spf13/pflag/count.go b/vendor/github.com/spf13/pflag/count.go index a0b2679f..d49c0143 100644 --- a/vendor/github.com/spf13/pflag/count.go +++ b/vendor/github.com/spf13/pflag/count.go @@ -85,7 +85,7 @@ func (f *FlagSet) CountP(name, shorthand string, usage string) *int { // Count defines a count flag with specified name, default value, and usage string. // The return value is the address of an int variable that stores the value of the flag. -// A count flag will add 1 to its value evey time it is found on the command line +// A count flag will add 1 to its value every time it is found on the command line func Count(name string, usage string) *int { return CommandLine.CountP(name, "", usage) } diff --git a/vendor/github.com/spf13/pflag/errors.go b/vendor/github.com/spf13/pflag/errors.go new file mode 100644 index 00000000..ff11b66b --- /dev/null +++ b/vendor/github.com/spf13/pflag/errors.go @@ -0,0 +1,149 @@ +package pflag + +import "fmt" + +// notExistErrorMessageType specifies which flavor of "flag does not exist" +// is printed by NotExistError. This allows the related errors to be grouped +// under a single NotExistError struct without making a breaking change to +// the error message text. +type notExistErrorMessageType int + +const ( + flagNotExistMessage notExistErrorMessageType = iota + flagNotDefinedMessage + flagNoSuchFlagMessage + flagUnknownFlagMessage + flagUnknownShorthandFlagMessage +) + +// NotExistError is the error returned when trying to access a flag that +// does not exist in the FlagSet. +type NotExistError struct { + name string + specifiedShorthands string + messageType notExistErrorMessageType +} + +// Error implements error. +func (e *NotExistError) Error() string { + switch e.messageType { + case flagNotExistMessage: + return fmt.Sprintf("flag %q does not exist", e.name) + + case flagNotDefinedMessage: + return fmt.Sprintf("flag accessed but not defined: %s", e.name) + + case flagNoSuchFlagMessage: + return fmt.Sprintf("no such flag -%v", e.name) + + case flagUnknownFlagMessage: + return fmt.Sprintf("unknown flag: --%s", e.name) + + case flagUnknownShorthandFlagMessage: + c := rune(e.name[0]) + return fmt.Sprintf("unknown shorthand flag: %q in -%s", c, e.specifiedShorthands) + } + + panic(fmt.Errorf("unknown flagNotExistErrorMessageType: %v", e.messageType)) +} + +// GetSpecifiedName returns the name of the flag (without dashes) as it +// appeared in the parsed arguments. +func (e *NotExistError) GetSpecifiedName() string { + return e.name +} + +// GetSpecifiedShortnames returns the group of shorthand arguments +// (without dashes) that the flag appeared within. If the flag was not in a +// shorthand group, this will return an empty string. +func (e *NotExistError) GetSpecifiedShortnames() string { + return e.specifiedShorthands +} + +// ValueRequiredError is the error returned when a flag needs an argument but +// no argument was provided. +type ValueRequiredError struct { + flag *Flag + specifiedName string + specifiedShorthands string +} + +// Error implements error. +func (e *ValueRequiredError) Error() string { + if len(e.specifiedShorthands) > 0 { + c := rune(e.specifiedName[0]) + return fmt.Sprintf("flag needs an argument: %q in -%s", c, e.specifiedShorthands) + } + + return fmt.Sprintf("flag needs an argument: --%s", e.specifiedName) +} + +// GetFlag returns the flag for which the error occurred. +func (e *ValueRequiredError) GetFlag() *Flag { + return e.flag +} + +// GetSpecifiedName returns the name of the flag (without dashes) as it +// appeared in the parsed arguments. +func (e *ValueRequiredError) GetSpecifiedName() string { + return e.specifiedName +} + +// GetSpecifiedShortnames returns the group of shorthand arguments +// (without dashes) that the flag appeared within. If the flag was not in a +// shorthand group, this will return an empty string. +func (e *ValueRequiredError) GetSpecifiedShortnames() string { + return e.specifiedShorthands +} + +// InvalidValueError is the error returned when an invalid value is used +// for a flag. +type InvalidValueError struct { + flag *Flag + value string + cause error +} + +// Error implements error. +func (e *InvalidValueError) Error() string { + flag := e.flag + var flagName string + if flag.Shorthand != "" && flag.ShorthandDeprecated == "" { + flagName = fmt.Sprintf("-%s, --%s", flag.Shorthand, flag.Name) + } else { + flagName = fmt.Sprintf("--%s", flag.Name) + } + return fmt.Sprintf("invalid argument %q for %q flag: %v", e.value, flagName, e.cause) +} + +// Unwrap implements errors.Unwrap. +func (e *InvalidValueError) Unwrap() error { + return e.cause +} + +// GetFlag returns the flag for which the error occurred. +func (e *InvalidValueError) GetFlag() *Flag { + return e.flag +} + +// GetValue returns the invalid value that was provided. +func (e *InvalidValueError) GetValue() string { + return e.value +} + +// InvalidSyntaxError is the error returned when a bad flag name is passed on +// the command line. +type InvalidSyntaxError struct { + specifiedFlag string +} + +// Error implements error. +func (e *InvalidSyntaxError) Error() string { + return fmt.Sprintf("bad flag syntax: %s", e.specifiedFlag) +} + +// GetSpecifiedName returns the exact flag (with dashes) as it +// appeared in the parsed arguments. +func (e *InvalidSyntaxError) GetSpecifiedFlag() string { + return e.specifiedFlag +} diff --git a/vendor/github.com/spf13/pflag/flag.go b/vendor/github.com/spf13/pflag/flag.go index 7c058de3..d4dfbc5e 100644 --- a/vendor/github.com/spf13/pflag/flag.go +++ b/vendor/github.com/spf13/pflag/flag.go @@ -27,23 +27,32 @@ unaffected. Define flags using flag.String(), Bool(), Int(), etc. This declares an integer flag, -flagname, stored in the pointer ip, with type *int. + var ip = flag.Int("flagname", 1234, "help message for flagname") + If you like, you can bind the flag to a variable using the Var() functions. + var flagvar int func init() { flag.IntVar(&flagvar, "flagname", 1234, "help message for flagname") } + Or you can create custom flags that satisfy the Value interface (with pointer receivers) and couple them to flag parsing by + flag.Var(&flagVal, "name", "help message for flagname") + For such flags, the default value is just the initial value of the variable. After all flags are defined, call + flag.Parse() + to parse the command line into the defined flags. Flags may then be used directly. If you're using the flags themselves, they are all pointers; if you bind to variables, they're values. + fmt.Println("ip has value ", *ip) fmt.Println("flagvar has value ", flagvar) @@ -54,22 +63,26 @@ The arguments are indexed from 0 through flag.NArg()-1. The pflag package also defines some new functions that are not in flag, that give one-letter shorthands for flags. You can use these by appending 'P' to the name of any function that defines a flag. + var ip = flag.IntP("flagname", "f", 1234, "help message") var flagvar bool func init() { flag.BoolVarP(&flagvar, "boolname", "b", true, "help message") } flag.VarP(&flagval, "varname", "v", "help message") + Shorthand letters can be used with single dashes on the command line. Boolean shorthand flags can be combined with other shorthand flags. Command line flag syntax: + --flag // boolean flags only --flag=x Unlike the flag package, a single dash before an option means something different than a double dash. Single dashes signify a series of shorthand letters for flags. All but the last shorthand letter must be boolean flags. + // boolean flags -f -abc @@ -381,7 +394,7 @@ func (f *FlagSet) lookup(name NormalizedName) *Flag { func (f *FlagSet) getFlagType(name string, ftype string, convFunc func(sval string) (interface{}, error)) (interface{}, error) { flag := f.Lookup(name) if flag == nil { - err := fmt.Errorf("flag accessed but not defined: %s", name) + err := &NotExistError{name: name, messageType: flagNotDefinedMessage} return nil, err } @@ -411,7 +424,7 @@ func (f *FlagSet) ArgsLenAtDash() int { func (f *FlagSet) MarkDeprecated(name string, usageMessage string) error { flag := f.Lookup(name) if flag == nil { - return fmt.Errorf("flag %q does not exist", name) + return &NotExistError{name: name, messageType: flagNotExistMessage} } if usageMessage == "" { return fmt.Errorf("deprecated message for flag %q must be set", name) @@ -427,7 +440,7 @@ func (f *FlagSet) MarkDeprecated(name string, usageMessage string) error { func (f *FlagSet) MarkShorthandDeprecated(name string, usageMessage string) error { flag := f.Lookup(name) if flag == nil { - return fmt.Errorf("flag %q does not exist", name) + return &NotExistError{name: name, messageType: flagNotExistMessage} } if usageMessage == "" { return fmt.Errorf("deprecated message for flag %q must be set", name) @@ -441,7 +454,7 @@ func (f *FlagSet) MarkShorthandDeprecated(name string, usageMessage string) erro func (f *FlagSet) MarkHidden(name string) error { flag := f.Lookup(name) if flag == nil { - return fmt.Errorf("flag %q does not exist", name) + return &NotExistError{name: name, messageType: flagNotExistMessage} } flag.Hidden = true return nil @@ -464,18 +477,16 @@ func (f *FlagSet) Set(name, value string) error { normalName := f.normalizeFlagName(name) flag, ok := f.formal[normalName] if !ok { - return fmt.Errorf("no such flag -%v", name) + return &NotExistError{name: name, messageType: flagNoSuchFlagMessage} } err := flag.Value.Set(value) if err != nil { - var flagName string - if flag.Shorthand != "" && flag.ShorthandDeprecated == "" { - flagName = fmt.Sprintf("-%s, --%s", flag.Shorthand, flag.Name) - } else { - flagName = fmt.Sprintf("--%s", flag.Name) + return &InvalidValueError{ + flag: flag, + value: value, + cause: err, } - return fmt.Errorf("invalid argument %q for %q flag: %v", value, flagName, err) } if !flag.Changed { @@ -501,7 +512,7 @@ func (f *FlagSet) SetAnnotation(name, key string, values []string) error { normalName := f.normalizeFlagName(name) flag, ok := f.formal[normalName] if !ok { - return fmt.Errorf("no such flag -%v", name) + return &NotExistError{name: name, messageType: flagNoSuchFlagMessage} } if flag.Annotations == nil { flag.Annotations = map[string][]string{} @@ -538,7 +549,7 @@ func (f *FlagSet) PrintDefaults() { func (f *Flag) defaultIsZeroValue() bool { switch f.Value.(type) { case boolFlag: - return f.DefValue == "false" + return f.DefValue == "false" || f.DefValue == "" case *durationValue: // Beginning in Go 1.7, duration zero values are "0s" return f.DefValue == "0" || f.DefValue == "0s" @@ -551,7 +562,7 @@ func (f *Flag) defaultIsZeroValue() bool { case *intSliceValue, *stringSliceValue, *stringArrayValue: return f.DefValue == "[]" default: - switch f.Value.String() { + switch f.DefValue { case "false": return true case "": @@ -588,8 +599,10 @@ func UnquoteUsage(flag *Flag) (name string, usage string) { name = flag.Value.Type() switch name { - case "bool": + case "bool", "boolfunc": name = "" + case "func": + name = "value" case "float64": name = "float" case "int64": @@ -707,7 +720,7 @@ func (f *FlagSet) FlagUsagesWrapped(cols int) string { switch flag.Value.Type() { case "string": line += fmt.Sprintf("[=\"%s\"]", flag.NoOptDefVal) - case "bool": + case "bool", "boolfunc": if flag.NoOptDefVal != "true" { line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) } @@ -911,10 +924,9 @@ func VarP(value Value, name, shorthand, usage string) { CommandLine.VarP(value, name, shorthand, usage) } -// failf prints to standard error a formatted error and usage message and +// fail prints an error message and usage message to standard error and // returns the error. -func (f *FlagSet) failf(format string, a ...interface{}) error { - err := fmt.Errorf(format, a...) +func (f *FlagSet) fail(err error) error { if f.errorHandling != ContinueOnError { fmt.Fprintln(f.Output(), err) f.usage() @@ -934,9 +946,9 @@ func (f *FlagSet) usage() { } } -//--unknown (args will be empty) -//--unknown --next-flag ... (args will be --next-flag ...) -//--unknown arg ... (args will be arg ...) +// --unknown (args will be empty) +// --unknown --next-flag ... (args will be --next-flag ...) +// --unknown arg ... (args will be arg ...) func stripUnknownFlagValue(args []string) []string { if len(args) == 0 { //--unknown @@ -960,7 +972,7 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin a = args name := s[2:] if len(name) == 0 || name[0] == '-' || name[0] == '=' { - err = f.failf("bad flag syntax: %s", s) + err = f.fail(&InvalidSyntaxError{specifiedFlag: s}) return } @@ -982,7 +994,7 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin return stripUnknownFlagValue(a), nil default: - err = f.failf("unknown flag: --%s", name) + err = f.fail(&NotExistError{name: name, messageType: flagUnknownFlagMessage}) return } } @@ -1000,13 +1012,16 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin a = a[1:] } else { // '--flag' (arg was required) - err = f.failf("flag needs an argument: %s", s) + err = f.fail(&ValueRequiredError{ + flag: flag, + specifiedName: name, + }) return } err = fn(flag, value) if err != nil { - f.failf(err.Error()) + f.fail(err) } return } @@ -1014,7 +1029,7 @@ func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []strin func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parseFunc) (outShorts string, outArgs []string, err error) { outArgs = args - if strings.HasPrefix(shorthands, "test.") { + if isGotestShorthandFlag(shorthands) { return } @@ -1039,7 +1054,11 @@ func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parse outArgs = stripUnknownFlagValue(outArgs) return default: - err = f.failf("unknown shorthand flag: %q in -%s", c, shorthands) + err = f.fail(&NotExistError{ + name: string(c), + specifiedShorthands: shorthands, + messageType: flagUnknownShorthandFlagMessage, + }) return } } @@ -1062,7 +1081,11 @@ func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parse outArgs = args[1:] } else { // '-f' (arg was required) - err = f.failf("flag needs an argument: %q in -%s", c, shorthands) + err = f.fail(&ValueRequiredError{ + flag: flag, + specifiedName: string(c), + specifiedShorthands: shorthands, + }) return } @@ -1072,7 +1095,7 @@ func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parse err = fn(flag, value) if err != nil { - f.failf(err.Error()) + f.fail(err) } return } @@ -1135,7 +1158,7 @@ func (f *FlagSet) Parse(arguments []string) error { } f.parsed = true - if len(arguments) < 0 { + if len(arguments) == 0 { return nil } diff --git a/vendor/github.com/spf13/pflag/func.go b/vendor/github.com/spf13/pflag/func.go new file mode 100644 index 00000000..9f4d88f2 --- /dev/null +++ b/vendor/github.com/spf13/pflag/func.go @@ -0,0 +1,37 @@ +package pflag + +// -- func Value +type funcValue func(string) error + +func (f funcValue) Set(s string) error { return f(s) } + +func (f funcValue) Type() string { return "func" } + +func (f funcValue) String() string { return "" } // same behavior as stdlib 'flag' package + +// Func defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}={value}" (or equivalent) is +// parsed on the command line, with "{value}" as an argument. +func (f *FlagSet) Func(name string, usage string, fn func(string) error) { + f.FuncP(name, "", usage, fn) +} + +// FuncP is like Func, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) FuncP(name string, shorthand string, usage string, fn func(string) error) { + var val Value = funcValue(fn) + f.VarP(val, name, shorthand, usage) +} + +// Func defines a func flag with specified name, callback function and usage string. +// +// The callback function will be called every time "--{name}={value}" (or equivalent) is +// parsed on the command line, with "{value}" as an argument. +func Func(name string, usage string, fn func(string) error) { + CommandLine.FuncP(name, "", usage, fn) +} + +// FuncP is like Func, but accepts a shorthand letter that can be used after a single dash. +func FuncP(name, shorthand string, usage string, fn func(string) error) { + CommandLine.FuncP(name, shorthand, usage, fn) +} diff --git a/vendor/github.com/spf13/pflag/golangflag.go b/vendor/github.com/spf13/pflag/golangflag.go index d3dd72b7..f563907e 100644 --- a/vendor/github.com/spf13/pflag/golangflag.go +++ b/vendor/github.com/spf13/pflag/golangflag.go @@ -10,6 +10,15 @@ import ( "strings" ) +// go test flags prefixes +func isGotestFlag(flag string) bool { + return strings.HasPrefix(flag, "-test.") +} + +func isGotestShorthandFlag(flag string) bool { + return strings.HasPrefix(flag, "test.") +} + // flagValueWrapper implements pflag.Value around a flag.Value. The main // difference here is the addition of the Type method that returns a string // name of the type. As this is generally unknown, we approximate that with @@ -103,3 +112,16 @@ func (f *FlagSet) AddGoFlagSet(newSet *goflag.FlagSet) { } f.addedGoFlagSets = append(f.addedGoFlagSets, newSet) } + +// ParseSkippedFlags explicitly Parses go test flags (i.e. the one starting with '-test.') with goflag.Parse(), +// since by default those are skipped by pflag.Parse(). +// Typical usage example: `ParseGoTestFlags(os.Args[1:], goflag.CommandLine)` +func ParseSkippedFlags(osArgs []string, goFlagSet *goflag.FlagSet) error { + var skippedFlags []string + for _, f := range osArgs { + if isGotestFlag(f) { + skippedFlags = append(skippedFlags, f) + } + } + return goFlagSet.Parse(skippedFlags) +} diff --git a/vendor/github.com/spf13/pflag/ipnet_slice.go b/vendor/github.com/spf13/pflag/ipnet_slice.go index 6b541aa8..c6e89da1 100644 --- a/vendor/github.com/spf13/pflag/ipnet_slice.go +++ b/vendor/github.com/spf13/pflag/ipnet_slice.go @@ -73,7 +73,7 @@ func (s *ipNetSliceValue) String() string { func ipNetSliceConv(val string) (interface{}, error) { val = strings.Trim(val, "[]") - // Emtpy string would cause a slice with one (empty) entry + // Empty string would cause a slice with one (empty) entry if len(val) == 0 { return []net.IPNet{}, nil } diff --git a/vendor/github.com/spf13/pflag/text.go b/vendor/github.com/spf13/pflag/text.go new file mode 100644 index 00000000..886d5a3d --- /dev/null +++ b/vendor/github.com/spf13/pflag/text.go @@ -0,0 +1,81 @@ +package pflag + +import ( + "encoding" + "fmt" + "reflect" +) + +// following is copied from go 1.23.4 flag.go +type textValue struct{ p encoding.TextUnmarshaler } + +func newTextValue(val encoding.TextMarshaler, p encoding.TextUnmarshaler) textValue { + ptrVal := reflect.ValueOf(p) + if ptrVal.Kind() != reflect.Ptr { + panic("variable value type must be a pointer") + } + defVal := reflect.ValueOf(val) + if defVal.Kind() == reflect.Ptr { + defVal = defVal.Elem() + } + if defVal.Type() != ptrVal.Type().Elem() { + panic(fmt.Sprintf("default type does not match variable type: %v != %v", defVal.Type(), ptrVal.Type().Elem())) + } + ptrVal.Elem().Set(defVal) + return textValue{p} +} + +func (v textValue) Set(s string) error { + return v.p.UnmarshalText([]byte(s)) +} + +func (v textValue) Get() interface{} { + return v.p +} + +func (v textValue) String() string { + if m, ok := v.p.(encoding.TextMarshaler); ok { + if b, err := m.MarshalText(); err == nil { + return string(b) + } + } + return "" +} + +//end of copy + +func (v textValue) Type() string { + return reflect.ValueOf(v.p).Type().Name() +} + +// GetText set out, which implements encoding.UnmarshalText, to the value of a flag with given name +func (f *FlagSet) GetText(name string, out encoding.TextUnmarshaler) error { + flag := f.Lookup(name) + if flag == nil { + return fmt.Errorf("flag accessed but not defined: %s", name) + } + if flag.Value.Type() != reflect.TypeOf(out).Name() { + return fmt.Errorf("trying to get %s value of flag of type %s", reflect.TypeOf(out).Name(), flag.Value.Type()) + } + return out.UnmarshalText([]byte(flag.Value.String())) +} + +// TextVar defines a flag with a specified name, default value, and usage string. The argument p must be a pointer to a variable that will hold the value of the flag, and p must implement encoding.TextUnmarshaler. If the flag is used, the flag value will be passed to p's UnmarshalText method. The type of the default value must be the same as the type of p. +func (f *FlagSet) TextVar(p encoding.TextUnmarshaler, name string, value encoding.TextMarshaler, usage string) { + f.VarP(newTextValue(value, p), name, "", usage) +} + +// TextVarP is like TextVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) TextVarP(p encoding.TextUnmarshaler, name, shorthand string, value encoding.TextMarshaler, usage string) { + f.VarP(newTextValue(value, p), name, shorthand, usage) +} + +// TextVar defines a flag with a specified name, default value, and usage string. The argument p must be a pointer to a variable that will hold the value of the flag, and p must implement encoding.TextUnmarshaler. If the flag is used, the flag value will be passed to p's UnmarshalText method. The type of the default value must be the same as the type of p. +func TextVar(p encoding.TextUnmarshaler, name string, value encoding.TextMarshaler, usage string) { + CommandLine.VarP(newTextValue(value, p), name, "", usage) +} + +// TextVarP is like TextVar, but accepts a shorthand letter that can be used after a single dash. +func TextVarP(p encoding.TextUnmarshaler, name, shorthand string, value encoding.TextMarshaler, usage string) { + CommandLine.VarP(newTextValue(value, p), name, shorthand, usage) +} diff --git a/vendor/github.com/spf13/pflag/time.go b/vendor/github.com/spf13/pflag/time.go new file mode 100644 index 00000000..dc024807 --- /dev/null +++ b/vendor/github.com/spf13/pflag/time.go @@ -0,0 +1,118 @@ +package pflag + +import ( + "fmt" + "strings" + "time" +) + +// TimeValue adapts time.Time for use as a flag. +type timeValue struct { + *time.Time + formats []string +} + +func newTimeValue(val time.Time, p *time.Time, formats []string) *timeValue { + *p = val + return &timeValue{ + Time: p, + formats: formats, + } +} + +// Set time.Time value from string based on accepted formats. +func (d *timeValue) Set(s string) error { + s = strings.TrimSpace(s) + for _, f := range d.formats { + v, err := time.Parse(f, s) + if err != nil { + continue + } + *d.Time = v + return nil + } + + formatsString := "" + for i, f := range d.formats { + if i > 0 { + formatsString += ", " + } + formatsString += fmt.Sprintf("`%s`", f) + } + + return fmt.Errorf("invalid time format `%s` must be one of: %s", s, formatsString) +} + +// Type name for time.Time flags. +func (d *timeValue) Type() string { + return "time" +} + +func (d *timeValue) String() string { return d.Time.Format(time.RFC3339Nano) } + +// GetTime return the time value of a flag with the given name +func (f *FlagSet) GetTime(name string) (time.Time, error) { + flag := f.Lookup(name) + if flag == nil { + err := fmt.Errorf("flag accessed but not defined: %s", name) + return time.Time{}, err + } + + if flag.Value.Type() != "time" { + err := fmt.Errorf("trying to get %s value of flag of type %s", "time", flag.Value.Type()) + return time.Time{}, err + } + + val, ok := flag.Value.(*timeValue) + if !ok { + return time.Time{}, fmt.Errorf("value %s is not a time", flag.Value) + } + + return *val.Time, nil +} + +// TimeVar defines a time.Time flag with specified name, default value, and usage string. +// The argument p points to a time.Time variable in which to store the value of the flag. +func (f *FlagSet) TimeVar(p *time.Time, name string, value time.Time, formats []string, usage string) { + f.TimeVarP(p, name, "", value, formats, usage) +} + +// TimeVarP is like TimeVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) TimeVarP(p *time.Time, name, shorthand string, value time.Time, formats []string, usage string) { + f.VarP(newTimeValue(value, p, formats), name, shorthand, usage) +} + +// TimeVar defines a time.Time flag with specified name, default value, and usage string. +// The argument p points to a time.Time variable in which to store the value of the flag. +func TimeVar(p *time.Time, name string, value time.Time, formats []string, usage string) { + CommandLine.TimeVarP(p, name, "", value, formats, usage) +} + +// TimeVarP is like TimeVar, but accepts a shorthand letter that can be used after a single dash. +func TimeVarP(p *time.Time, name, shorthand string, value time.Time, formats []string, usage string) { + CommandLine.VarP(newTimeValue(value, p, formats), name, shorthand, usage) +} + +// Time defines a time.Time flag with specified name, default value, and usage string. +// The return value is the address of a time.Time variable that stores the value of the flag. +func (f *FlagSet) Time(name string, value time.Time, formats []string, usage string) *time.Time { + return f.TimeP(name, "", value, formats, usage) +} + +// TimeP is like Time, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) TimeP(name, shorthand string, value time.Time, formats []string, usage string) *time.Time { + p := new(time.Time) + f.TimeVarP(p, name, shorthand, value, formats, usage) + return p +} + +// Time defines a time.Time flag with specified name, default value, and usage string. +// The return value is the address of a time.Time variable that stores the value of the flag. +func Time(name string, value time.Time, formats []string, usage string) *time.Time { + return CommandLine.TimeP(name, "", value, formats, usage) +} + +// TimeP is like Time, but accepts a shorthand letter that can be used after a single dash. +func TimeP(name, shorthand string, value time.Time, formats []string, usage string) *time.Time { + return CommandLine.TimeP(name, shorthand, value, formats, usage) +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 257d4f95..f090ee01 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -55,7 +55,7 @@ github.com/go-openapi/analysis/internal/flatten/sortref # github.com/go-openapi/errors v0.22.2 ## explicit; go 1.20 github.com/go-openapi/errors -# github.com/go-openapi/jsonpointer v0.21.1 +# github.com/go-openapi/jsonpointer v0.21.2 ## explicit; go 1.20 github.com/go-openapi/jsonpointer # github.com/go-openapi/jsonreference v0.21.0 @@ -160,7 +160,7 @@ github.com/mattn/go-isatty # github.com/mattn/go-runewidth v0.0.16 ## explicit; go 1.9 github.com/mattn/go-runewidth -# github.com/mattn/go-sqlite3 v1.14.28 +# github.com/mattn/go-sqlite3 v1.14.31 ## explicit; go 1.19 github.com/mattn/go-sqlite3 # github.com/minio/sio v0.4.1 @@ -223,7 +223,7 @@ github.com/rivo/uniseg # github.com/spf13/cobra v1.9.1 ## explicit; go 1.15 github.com/spf13/cobra -# github.com/spf13/pflag v1.0.6 +# github.com/spf13/pflag v1.0.7 ## explicit; go 1.12 github.com/spf13/pflag # github.com/stretchr/objx v0.5.2 diff --git a/webapp/.env.development b/webapp/.env.development new file mode 100644 index 00000000..17e74eaa --- /dev/null +++ b/webapp/.env.development @@ -0,0 +1,2 @@ +VITE_GARM_API_URL=http://localhost:9997 +NODE_ENV=development diff --git a/webapp/.env.example b/webapp/.env.example new file mode 100644 index 00000000..2e2f8dbb --- /dev/null +++ b/webapp/.env.example @@ -0,0 +1,8 @@ +# Development Environment Variables + +# GARM Backend API URL (for development only) +# When set, the frontend will connect to this URL instead of using proxy +# VITE_GARM_API_URL=http://localhost:9997 + +# Node Environment (automatically set by npm scripts) +# NODE_ENV=development \ No newline at end of file diff --git a/webapp/DEV_SETUP.md b/webapp/DEV_SETUP.md new file mode 100644 index 00000000..2eb778f3 --- /dev/null +++ b/webapp/DEV_SETUP.md @@ -0,0 +1,79 @@ +# Development Setup + +The web app can be started with the `npm run dev` command, which will start a development server with hot reloading. To properly work, there are a number of prerequisites you need to have and some GARM settings to tweak. + +## Prerequisites + +To have a full development setup, you will need the following prerequisites: + +- **Node.js 24+** and **npm** +- **Go 1.24+** (for building the GARM backend) +- **openapi-generator-cli** in your PATH (for API client generation) + +The `openapi-generator-cli` will also need java to be installed. If you're running on Ubuntu, running: + +```bash +sudo apt-get install default-jre +``` + +should be enough. Different distros should have an equivalent package available. + +>[!NOTE] +>If you don't need to change the web app, you don't need to rebuild it. There is already a pre-built version in the repo. + +## Necessary GARM settings + +GARM has strict origin checks for websockets and API calls. To allow your local development server to communicate with the GARM backend, you need to configure the following settings: + +```toml +[apiserver] +cors_origins = ["https://garm.example.com", "http://127.0.0.1:5173"] +``` + +>[!IMPORTANT] +> You must include the port. + +>[!IMPORTANT] +> Omitting the `cors_origins` option will automatically check same host origin. + +## Development Server + +Your GARM server can be started and hosted anywhere. As long as you set the proper `cors_origins` URLs, your web-ui development server can be separate from your GARM server. To point the web app to the GARM server, you will need to create an `.env.development` file in the `webapp/` directory: + +```bash +cd /home/ubuntu/garm/webapp +echo "VITE_GARM_API_URL=http://localhost:9997" > .env +echo "NODE_ENV=development" >> .env +npm run dev +``` + +## Asset Management + +During development: +- SVG icons are served from `static/assets/` +- Favicons are served from `static/` +- All static assets are copied from `assets/assets/` to `static/assets/` + +## Building for Production + +For production deployments, the web app is embedded into the GARM binary. You don't need to serve it separately. To build the web app and embed it into the binary, run the following 2 commands: + +```bash +# Build the static webapp +make build-webui +# Build the garm binary with the webapp embedded +make build +``` + +This creates the production build with: +- Base path set to `/ui` +- All assets embedded for Go to serve +- Optimized bundles + +>[!IMPORTANT] +>The web UI is an optional feature in GARM. For the `/ui` URL to be available, you will need to enable it in the garm config file under: +>```toml +>[apiserver.webui] +> enable=true +>``` +>See the sample config file in the `testdata/config.toml` file. \ No newline at end of file diff --git a/webapp/README.md b/webapp/README.md new file mode 100644 index 00000000..4b63b2a8 --- /dev/null +++ b/webapp/README.md @@ -0,0 +1,102 @@ +# GARM SPA (SvelteKit) + +This is a Single Page Application (SPA) implementation of the GARM web interface using SvelteKit. + +## Features + +- **Lightweight**: Uses SvelteKit for minimal bundle size and fast performance +- **Modern**: TypeScript-first development with full type safety +- **Responsive**: Mobile-first design using Tailwind CSS +- **Real-time**: WebSocket integration for live updates +- **API-driven**: Uses the existing GARM REST API endpoints + +### Quick Start + +1. **Clone the repository** (if not already done) + +```bash +git clone https://github.com/cloudbase/garm.git +cd garm +``` + +2. **Build and test GARM with embedded webapp** + +```bash +# You can skip this command if you made no changes to the webapp. +make build-webui +# builds the binary, with the web UI embedded. +make build +``` + +Make sure you enable the webui in the config: + +```toml +[apiserver.webui] + enable=true +``` + +3. **Access the webapp** + - Navigate to `http://localhost:9997/ui/` (or your configured fqdn and port) + +### Development Workflow + +See the [DEV_SETUP.md](DEV_SETUP.md) file. + +### Git Workflow + +**DO NOT commit** the following directories: +- `webapp/node_modules/` - Dependencies (managed by package-lock.json) +- `webapp/.svelte-kit/` - Build cache and generated files +- `webapp/build/` - Production build output + +These are already included in `.gitignore`. Only commit source files in `webapp/src/` and configuration files. + +### API Client Generation + +The webapp uses auto-generated TypeScript clients from the GARM OpenAPI spec using `go generate`. To regenerate the clients, mocks and everything else, run: + +```bash +go generate ./... +``` + +In the root folder of the project. + +>[!NOTE] +> See [DEV_SETUP.md](DEV_SETUP.md) for prerequisites, before you try to generate the files. + +### Asset Serving + +The webapp is embedded using Go's `embed` package in `webapp/assets/assets.go`: + +```go +//go:embed all:* +var EmbeddedSPA embed.FS +``` + +This allows GARM to serve the entire webapp with zero external dependencies. The webapp assets are compiled into the Go binary at build time. + +## Running GARM behind a reverse proxy + +In production, GARM will serve the web UI and assets from the embedded files inside the binary. The web UI also relies on the [events](/doc/events.md) API for real-time updates. + +To have a fully working experience, you will need to configure your reverse proxy to allow websocket upgrades. For an `nginx` example, see [the sample config in the testdata folder](/testdata/nginx-server.conf). + +Additionally, in production you can also override the default web UI that is embedded in GARM, without updating the garm binary. To do that, build the webapp, place it in the document root of `nginx` and create a new `location /ui` config in nginx. Something like the following should work: + +``` + # Place this before the proxy_pass location + location ~ ^/ui(/.*)?$ { + root /var/www/html/garm-webui/; + } + + location / { + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Forwarded-Host $http_host; + + proxy_pass http://garm_backend; + proxy_set_header Host $Host; + proxy_redirect off; + } +``` + +This should allow you to override the default web UI embedded in GARM without updating the GARM binary. diff --git a/webapp/assets/_app/env.js b/webapp/assets/_app/env.js new file mode 100644 index 00000000..f5427da6 --- /dev/null +++ b/webapp/assets/_app/env.js @@ -0,0 +1 @@ +export const env={} \ No newline at end of file diff --git a/webapp/assets/_app/immutable/assets/0.BPrCR_r7.css b/webapp/assets/_app/immutable/assets/0.BPrCR_r7.css new file mode 100644 index 00000000..17f0a1ed --- /dev/null +++ b/webapp/assets/_app/immutable/assets/0.BPrCR_r7.css @@ -0,0 +1 @@ +/*! tailwindcss v4.1.11 | MIT License | https://tailwindcss.com */@layer properties{@supports ((-webkit-hyphens:none) and (not (margin-trim:inline))) or ((-moz-orient:inline) and (not (color:rgb(from red r g b)))){*,:before,:after,::backdrop{--tw-translate-x:0;--tw-translate-y:0;--tw-translate-z:0;--tw-rotate-x:initial;--tw-rotate-y:initial;--tw-rotate-z:initial;--tw-skew-x:initial;--tw-skew-y:initial;--tw-space-y-reverse:0;--tw-space-x-reverse:0;--tw-divide-y-reverse:0;--tw-border-style:solid;--tw-gradient-position:initial;--tw-gradient-from:#0000;--tw-gradient-via:#0000;--tw-gradient-to:#0000;--tw-gradient-stops:initial;--tw-gradient-via-stops:initial;--tw-gradient-from-position:0%;--tw-gradient-via-position:50%;--tw-gradient-to-position:100%;--tw-leading:initial;--tw-font-weight:initial;--tw-tracking:initial;--tw-shadow:0 0 #0000;--tw-shadow-color:initial;--tw-shadow-alpha:100%;--tw-inset-shadow:0 0 #0000;--tw-inset-shadow-color:initial;--tw-inset-shadow-alpha:100%;--tw-ring-color:initial;--tw-ring-shadow:0 0 #0000;--tw-inset-ring-color:initial;--tw-inset-ring-shadow:0 0 #0000;--tw-ring-inset:initial;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-offset-shadow:0 0 #0000;--tw-blur:initial;--tw-brightness:initial;--tw-contrast:initial;--tw-grayscale:initial;--tw-hue-rotate:initial;--tw-invert:initial;--tw-opacity:initial;--tw-saturate:initial;--tw-sepia:initial;--tw-drop-shadow:initial;--tw-drop-shadow-color:initial;--tw-drop-shadow-alpha:100%;--tw-drop-shadow-size:initial;--tw-duration:initial;--tw-ease:initial;--tw-scale-x:1;--tw-scale-y:1;--tw-scale-z:1}}}@layer theme{:root,:host{--font-sans:ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";--font-mono:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--color-red-50:oklch(97.1% .013 17.38);--color-red-100:oklch(93.6% .032 17.717);--color-red-200:oklch(88.5% .062 18.334);--color-red-300:oklch(80.8% .114 19.571);--color-red-400:oklch(70.4% .191 22.216);--color-red-500:oklch(63.7% .237 25.331);--color-red-600:oklch(57.7% .245 27.325);--color-red-700:oklch(50.5% .213 27.518);--color-red-800:oklch(44.4% .177 26.899);--color-red-900:oklch(39.6% .141 25.723);--color-orange-50:oklch(98% .016 73.684);--color-orange-400:oklch(75% .183 55.934);--color-orange-500:oklch(70.5% .213 47.604);--color-orange-600:oklch(64.6% .222 41.116);--color-orange-700:oklch(55.3% .195 38.402);--color-yellow-50:oklch(98.7% .026 102.212);--color-yellow-100:oklch(97.3% .071 103.193);--color-yellow-200:oklch(94.5% .129 101.54);--color-yellow-300:oklch(90.5% .182 98.111);--color-yellow-400:oklch(85.2% .199 91.936);--color-yellow-500:oklch(79.5% .184 86.047);--color-yellow-600:oklch(68.1% .162 75.834);--color-yellow-700:oklch(55.4% .135 66.442);--color-yellow-800:oklch(47.6% .114 61.907);--color-yellow-900:oklch(42.1% .095 57.708);--color-green-50:oklch(98.2% .018 155.826);--color-green-100:oklch(96.2% .044 156.743);--color-green-200:oklch(92.5% .084 155.995);--color-green-300:oklch(87.1% .15 154.449);--color-green-400:oklch(79.2% .209 151.711);--color-green-500:oklch(72.3% .219 149.579);--color-green-600:oklch(62.7% .194 149.214);--color-green-700:oklch(52.7% .154 150.069);--color-green-800:oklch(44.8% .119 151.328);--color-green-900:oklch(39.3% .095 152.535);--color-blue-50:oklch(97% .014 254.604);--color-blue-100:oklch(93.2% .032 255.585);--color-blue-200:oklch(88.2% .059 254.128);--color-blue-300:oklch(80.9% .105 251.813);--color-blue-400:oklch(70.7% .165 254.624);--color-blue-500:oklch(62.3% .214 259.815);--color-blue-600:oklch(54.6% .245 262.881);--color-blue-700:oklch(48.8% .243 264.376);--color-blue-800:oklch(42.4% .199 265.638);--color-blue-900:oklch(37.9% .146 265.522);--color-indigo-300:oklch(78.5% .115 274.713);--color-indigo-400:oklch(67.3% .182 276.935);--color-indigo-500:oklch(58.5% .233 277.117);--color-indigo-600:oklch(51.1% .262 276.966);--color-indigo-900:oklch(35.9% .144 278.697);--color-purple-50:oklch(97.7% .014 308.299);--color-purple-400:oklch(71.4% .203 305.504);--color-purple-500:oklch(62.7% .265 303.9);--color-purple-600:oklch(55.8% .288 302.321);--color-purple-700:oklch(49.6% .265 301.924);--color-gray-50:oklch(98.5% .002 247.839);--color-gray-100:oklch(96.7% .003 264.542);--color-gray-200:oklch(92.8% .006 264.531);--color-gray-300:oklch(87.2% .01 258.338);--color-gray-400:oklch(70.7% .022 261.325);--color-gray-500:oklch(55.1% .027 264.364);--color-gray-600:oklch(44.6% .03 256.802);--color-gray-700:oklch(37.3% .034 259.733);--color-gray-800:oklch(27.8% .033 256.848);--color-gray-900:oklch(21% .034 264.665);--color-black:#000;--color-white:#fff;--spacing:.25rem;--container-xs:20rem;--container-sm:24rem;--container-md:28rem;--container-xl:36rem;--container-2xl:42rem;--container-6xl:72rem;--container-7xl:80rem;--text-xs:.75rem;--text-xs--line-height:calc(1/.75);--text-sm:.875rem;--text-sm--line-height:calc(1.25/.875);--text-base:1rem;--text-base--line-height: 1.5 ;--text-lg:1.125rem;--text-lg--line-height:calc(1.75/1.125);--text-xl:1.25rem;--text-xl--line-height:calc(1.75/1.25);--text-2xl:1.5rem;--text-2xl--line-height:calc(2/1.5);--text-3xl:1.875rem;--text-3xl--line-height: 1.2 ;--font-weight-medium:500;--font-weight-semibold:600;--font-weight-bold:700;--font-weight-extrabold:800;--tracking-wide:.025em;--tracking-wider:.05em;--radius-md:.375rem;--radius-lg:.5rem;--ease-in-out:cubic-bezier(.4,0,.2,1);--animate-spin:spin 1s linear infinite;--animate-pulse:pulse 2s cubic-bezier(.4,0,.6,1)infinite;--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4,0,.2,1);--default-font-family:var(--font-sans);--default-mono-font-family:var(--font-mono)}}@layer base{*,:after,:before,::backdrop{box-sizing:border-box;border:0 solid;margin:0;padding:0}::file-selector-button{box-sizing:border-box;border:0 solid;margin:0;padding:0}html,:host{-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;line-height:1.5;font-family:var(--default-font-family,ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji");font-feature-settings:var(--default-font-feature-settings,normal);font-variation-settings:var(--default-font-variation-settings,normal);-webkit-tap-highlight-color:transparent}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;-webkit-text-decoration:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:var(--default-mono-font-family,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace);font-feature-settings:var(--default-mono-font-feature-settings,normal);font-variation-settings:var(--default-mono-font-variation-settings,normal);font-size:1em}small{font-size:80%}sub,sup{vertical-align:baseline;font-size:75%;line-height:0;position:relative}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}:-moz-focusring{outline:auto}progress{vertical-align:baseline}summary{display:list-item}ol,ul,menu{list-style:none}img,svg,video,canvas,audio,iframe,embed,object{vertical-align:middle;display:block}img,video{max-width:100%;height:auto}button,input,select,optgroup,textarea{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}::file-selector-button{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}:where(select:is([multiple],[size])) optgroup{font-weight:bolder}:where(select:is([multiple],[size])) optgroup option{padding-inline-start:20px}::file-selector-button{margin-inline-end:4px}::-moz-placeholder{opacity:1}::placeholder{opacity:1}@supports (not (-webkit-appearance:-apple-pay-button)) or (contain-intrinsic-size:1px){::-moz-placeholder{color:currentColor}::placeholder{color:currentColor}@supports (color:color-mix(in lab,red,red)){::-moz-placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}::placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}}}textarea{resize:vertical}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-date-and-time-value{min-height:1lh;text-align:inherit}::-webkit-datetime-edit{display:inline-flex}::-webkit-datetime-edit-fields-wrapper{padding:0}::-webkit-datetime-edit{padding-block:0}::-webkit-datetime-edit-year-field{padding-block:0}::-webkit-datetime-edit-month-field{padding-block:0}::-webkit-datetime-edit-day-field{padding-block:0}::-webkit-datetime-edit-hour-field{padding-block:0}::-webkit-datetime-edit-minute-field{padding-block:0}::-webkit-datetime-edit-second-field{padding-block:0}::-webkit-datetime-edit-millisecond-field{padding-block:0}::-webkit-datetime-edit-meridiem-field{padding-block:0}:-moz-ui-invalid{box-shadow:none}button,input:where([type=button],[type=reset],[type=submit]){-webkit-appearance:button;-moz-appearance:button;appearance:button}::file-selector-button{-webkit-appearance:button;-moz-appearance:button;appearance:button}::-webkit-inner-spin-button{height:auto}::-webkit-outer-spin-button{height:auto}[hidden]:where(:not([hidden=until-found])){display:none!important}html{font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica Neue,Arial,Noto Sans,sans-serif}}@layer components;@layer utilities{.pointer-events-none{pointer-events:none}.invisible{visibility:hidden}.sr-only{clip:rect(0,0,0,0);white-space:nowrap;border-width:0;width:1px;height:1px;margin:-1px;padding:0;position:absolute;overflow:hidden}.absolute{position:absolute}.fixed{position:fixed}.relative{position:relative}.static{position:static}.inset-0{inset:calc(var(--spacing)*0)}.inset-y-0{inset-block:calc(var(--spacing)*0)}.top-0{top:calc(var(--spacing)*0)}.top-1\/2{top:50%}.top-2{top:calc(var(--spacing)*2)}.top-4{top:calc(var(--spacing)*4)}.top-full{top:100%}.right-0{right:calc(var(--spacing)*0)}.right-2{right:calc(var(--spacing)*2)}.right-4{right:calc(var(--spacing)*4)}.right-full{right:100%}.bottom-full{bottom:100%}.left-0{left:calc(var(--spacing)*0)}.left-1\/2{left:50%}.left-full{left:100%}.z-0{z-index:0}.z-10{z-index:10}.z-40{z-index:40}.z-50{z-index:50}.z-\[60\]{z-index:60}.container{width:100%}@media (min-width:640px){.container{max-width:640px}}@media (min-width:768px){.container{max-width:768px}}@media (min-width:1024px){.container{max-width:1024px}}@media (min-width:1280px){.container{max-width:1280px}}@media (min-width:1536px){.container{max-width:1536px}}.mx-1{margin-inline:calc(var(--spacing)*1)}.mx-2{margin-inline:calc(var(--spacing)*2)}.mx-4{margin-inline:calc(var(--spacing)*4)}.mx-auto{margin-inline:auto}.mt-0\.5{margin-top:calc(var(--spacing)*.5)}.mt-1{margin-top:calc(var(--spacing)*1)}.mt-2{margin-top:calc(var(--spacing)*2)}.mt-3{margin-top:calc(var(--spacing)*3)}.mt-4{margin-top:calc(var(--spacing)*4)}.mt-5{margin-top:calc(var(--spacing)*5)}.mt-6{margin-top:calc(var(--spacing)*6)}.mt-8{margin-top:calc(var(--spacing)*8)}.-mr-0\.5{margin-right:calc(var(--spacing)*-.5)}.-mr-1{margin-right:calc(var(--spacing)*-1)}.-mr-12{margin-right:calc(var(--spacing)*-12)}.mr-2{margin-right:calc(var(--spacing)*2)}.mr-2\.5{margin-right:calc(var(--spacing)*2.5)}.mr-3{margin-right:calc(var(--spacing)*3)}.mr-4{margin-right:calc(var(--spacing)*4)}.mb-1{margin-bottom:calc(var(--spacing)*1)}.mb-2{margin-bottom:calc(var(--spacing)*2)}.mb-3{margin-bottom:calc(var(--spacing)*3)}.mb-4{margin-bottom:calc(var(--spacing)*4)}.mb-6{margin-bottom:calc(var(--spacing)*6)}.-ml-0\.5{margin-left:calc(var(--spacing)*-.5)}.-ml-1{margin-left:calc(var(--spacing)*-1)}.ml-1{margin-left:calc(var(--spacing)*1)}.ml-2{margin-left:calc(var(--spacing)*2)}.ml-3{margin-left:calc(var(--spacing)*3)}.ml-4{margin-left:calc(var(--spacing)*4)}.ml-5{margin-left:calc(var(--spacing)*5)}.ml-6{margin-left:calc(var(--spacing)*6)}.block{display:block}.contents{display:contents}.flex{display:flex}.grid{display:grid}.hidden{display:none}.inline{display:inline}.inline-block{display:inline-block}.inline-flex{display:inline-flex}.table{display:table}.h-0{height:calc(var(--spacing)*0)}.h-2{height:calc(var(--spacing)*2)}.h-2\.5{height:calc(var(--spacing)*2.5)}.h-3{height:calc(var(--spacing)*3)}.h-4{height:calc(var(--spacing)*4)}.h-5{height:calc(var(--spacing)*5)}.h-6{height:calc(var(--spacing)*6)}.h-8{height:calc(var(--spacing)*8)}.h-10{height:calc(var(--spacing)*10)}.h-12{height:calc(var(--spacing)*12)}.h-16{height:calc(var(--spacing)*16)}.h-24{height:calc(var(--spacing)*24)}.h-48{height:calc(var(--spacing)*48)}.h-full{height:100%}.max-h-96{max-height:calc(var(--spacing)*96)}.max-h-\[90vh\]{max-height:90vh}.max-h-screen{max-height:100vh}.min-h-0{min-height:calc(var(--spacing)*0)}.min-h-\[38px\]{min-height:38px}.min-h-screen{min-height:100vh}.w-0{width:calc(var(--spacing)*0)}.w-2{width:calc(var(--spacing)*2)}.w-2\.5{width:calc(var(--spacing)*2.5)}.w-3{width:calc(var(--spacing)*3)}.w-4{width:calc(var(--spacing)*4)}.w-5{width:calc(var(--spacing)*5)}.w-6{width:calc(var(--spacing)*6)}.w-8{width:calc(var(--spacing)*8)}.w-10{width:calc(var(--spacing)*10)}.w-12{width:calc(var(--spacing)*12)}.w-16{width:calc(var(--spacing)*16)}.w-20{width:calc(var(--spacing)*20)}.w-64{width:calc(var(--spacing)*64)}.w-80{width:calc(var(--spacing)*80)}.w-auto{width:auto}.w-full{width:100%}.max-w-2xl{max-width:var(--container-2xl)}.max-w-6xl{max-width:var(--container-6xl)}.max-w-7xl{max-width:var(--container-7xl)}.max-w-full{max-width:100%}.max-w-md{max-width:var(--container-md)}.max-w-sm{max-width:var(--container-sm)}.max-w-xl{max-width:var(--container-xl)}.max-w-xs{max-width:var(--container-xs)}.min-w-0{min-width:calc(var(--spacing)*0)}.flex-1{flex:1}.flex-shrink-0{flex-shrink:0}.-translate-x-1\/2{--tw-translate-x: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.-translate-y-1\/2{--tw-translate-y: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.rotate-90{rotate:90deg}.transform{transform:var(--tw-rotate-x,)var(--tw-rotate-y,)var(--tw-rotate-z,)var(--tw-skew-x,)var(--tw-skew-y,)}.animate-pulse{animation:var(--animate-pulse)}.animate-spin{animation:var(--animate-spin)}.cursor-default{cursor:default}.cursor-help{cursor:help}.cursor-not-allowed{cursor:not-allowed}.cursor-pointer{cursor:pointer}.resize-none{resize:none}.list-inside{list-style-position:inside}.list-disc{list-style-type:disc}.appearance-none{-webkit-appearance:none;-moz-appearance:none;appearance:none}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.flex-col{flex-direction:column}.flex-wrap{flex-wrap:wrap}.items-center{align-items:center}.items-start{align-items:flex-start}.justify-between{justify-content:space-between}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}.gap-0{gap:calc(var(--spacing)*0)}.gap-2{gap:calc(var(--spacing)*2)}.gap-4{gap:calc(var(--spacing)*4)}.gap-5{gap:calc(var(--spacing)*5)}.gap-6{gap:calc(var(--spacing)*6)}:where(.-space-y-px>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(-1px*var(--tw-space-y-reverse));margin-block-end:calc(-1px*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-1>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*1)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-2>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*2)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-3>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*3)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-4>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*4)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-6>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*6)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*6)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-8>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*8)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*8)*calc(1 - var(--tw-space-y-reverse)))}.gap-x-4{-moz-column-gap:calc(var(--spacing)*4);column-gap:calc(var(--spacing)*4)}:where(.-space-x-px>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(-1px*var(--tw-space-x-reverse));margin-inline-end:calc(-1px*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-1>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*1)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-2>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*2)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-4>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*4)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-x-reverse)))}.gap-y-6{row-gap:calc(var(--spacing)*6)}:where(.divide-y>:not(:last-child)){--tw-divide-y-reverse:0;border-bottom-style:var(--tw-border-style);border-top-style:var(--tw-border-style);border-top-width:calc(1px*var(--tw-divide-y-reverse));border-bottom-width:calc(1px*calc(1 - var(--tw-divide-y-reverse)))}:where(.divide-gray-200>:not(:last-child)){border-color:var(--color-gray-200)}.truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.scroll-smooth{scroll-behavior:smooth}.rounded{border-radius:.25rem}.rounded-full{border-radius:3.40282e38px}.rounded-lg{border-radius:var(--radius-lg)}.rounded-md{border-radius:var(--radius-md)}.rounded-none{border-radius:0}.rounded-t-md{border-top-left-radius:var(--radius-md);border-top-right-radius:var(--radius-md)}.rounded-l-md{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.rounded-l-none{border-top-left-radius:0;border-bottom-left-radius:0}.rounded-r-md{border-top-right-radius:var(--radius-md);border-bottom-right-radius:var(--radius-md)}.rounded-r-none{border-top-right-radius:0;border-bottom-right-radius:0}.rounded-b-md{border-bottom-right-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.border{border-style:var(--tw-border-style);border-width:1px}.border-2{border-style:var(--tw-border-style);border-width:2px}.border-4{border-style:var(--tw-border-style);border-width:4px}.border-t{border-top-style:var(--tw-border-style);border-top-width:1px}.border-r{border-right-style:var(--tw-border-style);border-right-width:1px}.border-b{border-bottom-style:var(--tw-border-style);border-bottom-width:1px}.border-b-2{border-bottom-style:var(--tw-border-style);border-bottom-width:2px}.border-l-0{border-left-style:var(--tw-border-style);border-left-width:0}.border-dashed{--tw-border-style:dashed;border-style:dashed}.border-blue-200{border-color:var(--color-blue-200)}.border-blue-500{border-color:var(--color-blue-500)}.border-blue-600{border-color:var(--color-blue-600)}.border-gray-100{border-color:var(--color-gray-100)}.border-gray-200{border-color:var(--color-gray-200)}.border-gray-300{border-color:var(--color-gray-300)}.border-green-200{border-color:var(--color-green-200)}.border-red-200{border-color:var(--color-red-200)}.border-red-300{border-color:var(--color-red-300)}.border-transparent{border-color:#0000}.border-white{border-color:var(--color-white)}.border-yellow-200{border-color:var(--color-yellow-200)}.border-t-gray-900{border-top-color:var(--color-gray-900)}.border-r-gray-900{border-right-color:var(--color-gray-900)}.border-b-gray-900{border-bottom-color:var(--color-gray-900)}.border-l-gray-900{border-left-color:var(--color-gray-900)}.bg-black\/30{background-color:#0000004d}@supports (color:color-mix(in lab,red,red)){.bg-black\/30{background-color:color-mix(in oklab,var(--color-black)30%,transparent)}}.bg-blue-50{background-color:var(--color-blue-50)}.bg-blue-100{background-color:var(--color-blue-100)}.bg-blue-500{background-color:var(--color-blue-500)}.bg-blue-600{background-color:var(--color-blue-600)}.bg-gray-50{background-color:var(--color-gray-50)}.bg-gray-100{background-color:var(--color-gray-100)}.bg-gray-200{background-color:var(--color-gray-200)}.bg-gray-400{background-color:var(--color-gray-400)}.bg-gray-500{background-color:var(--color-gray-500)}.bg-gray-900{background-color:var(--color-gray-900)}.bg-green-50{background-color:var(--color-green-50)}.bg-green-100{background-color:var(--color-green-100)}.bg-green-500{background-color:var(--color-green-500)}.bg-orange-50{background-color:var(--color-orange-50)}.bg-purple-50{background-color:var(--color-purple-50)}.bg-purple-500{background-color:var(--color-purple-500)}.bg-red-50{background-color:var(--color-red-50)}.bg-red-100{background-color:var(--color-red-100)}.bg-red-500{background-color:var(--color-red-500)}.bg-red-600{background-color:var(--color-red-600)}.bg-white{background-color:var(--color-white)}.bg-yellow-50{background-color:var(--color-yellow-50)}.bg-yellow-100{background-color:var(--color-yellow-100)}.bg-yellow-500{background-color:var(--color-yellow-500)}.bg-gradient-to-r{--tw-gradient-position:to right in oklab;background-image:linear-gradient(var(--tw-gradient-stops))}.from-gray-50{--tw-gradient-from:var(--color-gray-50);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.to-white{--tw-gradient-to:var(--color-white);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.p-1{padding:calc(var(--spacing)*1)}.p-2{padding:calc(var(--spacing)*2)}.p-3{padding:calc(var(--spacing)*3)}.p-4{padding:calc(var(--spacing)*4)}.p-5{padding:calc(var(--spacing)*5)}.p-6{padding:calc(var(--spacing)*6)}.px-2{padding-inline:calc(var(--spacing)*2)}.px-2\.5{padding-inline:calc(var(--spacing)*2.5)}.px-3{padding-inline:calc(var(--spacing)*3)}.px-4{padding-inline:calc(var(--spacing)*4)}.px-6{padding-inline:calc(var(--spacing)*6)}.py-0\.5{padding-block:calc(var(--spacing)*.5)}.py-1{padding-block:calc(var(--spacing)*1)}.py-2{padding-block:calc(var(--spacing)*2)}.py-3{padding-block:calc(var(--spacing)*3)}.py-4{padding-block:calc(var(--spacing)*4)}.py-5{padding-block:calc(var(--spacing)*5)}.py-6{padding-block:calc(var(--spacing)*6)}.py-8{padding-block:calc(var(--spacing)*8)}.py-12{padding-block:calc(var(--spacing)*12)}.pt-2{padding-top:calc(var(--spacing)*2)}.pt-4{padding-top:calc(var(--spacing)*4)}.pt-5{padding-top:calc(var(--spacing)*5)}.pt-6{padding-top:calc(var(--spacing)*6)}.pt-20{padding-top:calc(var(--spacing)*20)}.pr-3{padding-right:calc(var(--spacing)*3)}.pr-8{padding-right:calc(var(--spacing)*8)}.pb-2{padding-bottom:calc(var(--spacing)*2)}.pb-4{padding-bottom:calc(var(--spacing)*4)}.pl-2{padding-left:calc(var(--spacing)*2)}.pl-3{padding-left:calc(var(--spacing)*3)}.pl-10{padding-left:calc(var(--spacing)*10)}.text-center{text-align:center}.text-left{text-align:left}.text-right{text-align:right}.font-mono{font-family:var(--font-mono)}.text-2xl{font-size:var(--text-2xl);line-height:var(--tw-leading,var(--text-2xl--line-height))}.text-3xl{font-size:var(--text-3xl);line-height:var(--tw-leading,var(--text-3xl--line-height))}.text-base{font-size:var(--text-base);line-height:var(--tw-leading,var(--text-base--line-height))}.text-lg{font-size:var(--text-lg);line-height:var(--tw-leading,var(--text-lg--line-height))}.text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}.text-xl{font-size:var(--text-xl);line-height:var(--tw-leading,var(--text-xl--line-height))}.text-xs{font-size:var(--text-xs);line-height:var(--tw-leading,var(--text-xs--line-height))}.leading-5{--tw-leading:calc(var(--spacing)*5);line-height:calc(var(--spacing)*5)}.leading-6{--tw-leading:calc(var(--spacing)*6);line-height:calc(var(--spacing)*6)}.font-bold{--tw-font-weight:var(--font-weight-bold);font-weight:var(--font-weight-bold)}.font-extrabold{--tw-font-weight:var(--font-weight-extrabold);font-weight:var(--font-weight-extrabold)}.font-medium{--tw-font-weight:var(--font-weight-medium);font-weight:var(--font-weight-medium)}.font-semibold{--tw-font-weight:var(--font-weight-semibold);font-weight:var(--font-weight-semibold)}.tracking-wide{--tw-tracking:var(--tracking-wide);letter-spacing:var(--tracking-wide)}.tracking-wider{--tw-tracking:var(--tracking-wider);letter-spacing:var(--tracking-wider)}.break-all{word-break:break-all}.text-black{color:var(--color-black)}.text-blue-400{color:var(--color-blue-400)}.text-blue-600{color:var(--color-blue-600)}.text-blue-700{color:var(--color-blue-700)}.text-blue-800{color:var(--color-blue-800)}.text-gray-300{color:var(--color-gray-300)}.text-gray-400{color:var(--color-gray-400)}.text-gray-500{color:var(--color-gray-500)}.text-gray-600{color:var(--color-gray-600)}.text-gray-700{color:var(--color-gray-700)}.text-gray-800{color:var(--color-gray-800)}.text-gray-900{color:var(--color-gray-900)}.text-green-400{color:var(--color-green-400)}.text-green-500{color:var(--color-green-500)}.text-green-600{color:var(--color-green-600)}.text-green-700{color:var(--color-green-700)}.text-green-800{color:var(--color-green-800)}.text-indigo-600{color:var(--color-indigo-600)}.text-orange-700{color:var(--color-orange-700)}.text-purple-600{color:var(--color-purple-600)}.text-purple-700{color:var(--color-purple-700)}.text-red-400{color:var(--color-red-400)}.text-red-500{color:var(--color-red-500)}.text-red-600{color:var(--color-red-600)}.text-red-700{color:var(--color-red-700)}.text-red-800{color:var(--color-red-800)}.text-red-900{color:var(--color-red-900)}.text-white{color:var(--color-white)}.text-yellow-400{color:var(--color-yellow-400)}.text-yellow-600{color:var(--color-yellow-600)}.text-yellow-700{color:var(--color-yellow-700)}.text-yellow-800{color:var(--color-yellow-800)}.capitalize{text-transform:capitalize}.uppercase{text-transform:uppercase}.italic{font-style:italic}.placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.placeholder-gray-400::placeholder{color:var(--color-gray-400)}.placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.placeholder-gray-500::placeholder{color:var(--color-gray-500)}.opacity-0{opacity:0}.opacity-25{opacity:.25}.opacity-50{opacity:.5}.opacity-75{opacity:.75}.shadow{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-lg{--tw-shadow:0 10px 15px -3px var(--tw-shadow-color,#0000001a),0 4px 6px -4px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-sm{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-xl{--tw-shadow:0 20px 25px -5px var(--tw-shadow-color,#0000001a),0 8px 10px -6px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring,.ring-1{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring-blue-600\/20{--tw-ring-color:#155dfc33}@supports (color:color-mix(in lab,red,red)){.ring-blue-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-600)20%,transparent)}}.ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.ring-gray-600\/20{--tw-ring-color:#4a556533}@supports (color:color-mix(in lab,red,red)){.ring-gray-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-600)20%,transparent)}}.ring-green-600\/20{--tw-ring-color:#00a54433}@supports (color:color-mix(in lab,red,red)){.ring-green-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-600)20%,transparent)}}.ring-orange-600\/20{--tw-ring-color:#f0510033}@supports (color:color-mix(in lab,red,red)){.ring-orange-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-600)20%,transparent)}}.ring-purple-600\/20{--tw-ring-color:#9810fa33}@supports (color:color-mix(in lab,red,red)){.ring-purple-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-600)20%,transparent)}}.ring-red-600\/20{--tw-ring-color:#e4001433}@supports (color:color-mix(in lab,red,red)){.ring-red-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-600)20%,transparent)}}.ring-yellow-600\/20{--tw-ring-color:#cd890033}@supports (color:color-mix(in lab,red,red)){.ring-yellow-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-600)20%,transparent)}}.filter{filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.transition-all{transition-property:all;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-colors{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-shadow{transition-property:box-shadow;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-transform{transition-property:transform,translate,scale,rotate;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.duration-200{--tw-duration:.2s;transition-duration:.2s}.duration-300{--tw-duration:.3s;transition-duration:.3s}.ease-in-out{--tw-ease:var(--ease-in-out);transition-timing-function:var(--ease-in-out)}.ring-inset{--tw-ring-inset:inset}@media (hover:hover){.group-hover\:visible:is(:where(.group):hover *){visibility:visible}.group-hover\:opacity-100:is(:where(.group):hover *){opacity:1}}.first\:rounded-l-md:first-child{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.first\:border-l:first-child{border-left-style:var(--tw-border-style);border-left-width:1px}@media (hover:hover){.hover\:scale-105:hover{--tw-scale-x:105%;--tw-scale-y:105%;--tw-scale-z:105%;scale:var(--tw-scale-x)var(--tw-scale-y)}.hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.hover\:border-gray-400:hover{border-color:var(--color-gray-400)}.hover\:bg-blue-200:hover{background-color:var(--color-blue-200)}.hover\:bg-blue-700:hover{background-color:var(--color-blue-700)}.hover\:bg-gray-50:hover{background-color:var(--color-gray-50)}.hover\:bg-gray-200:hover{background-color:var(--color-gray-200)}.hover\:bg-red-200:hover{background-color:var(--color-red-200)}.hover\:bg-red-700:hover{background-color:var(--color-red-700)}.hover\:text-blue-500:hover{color:var(--color-blue-500)}.hover\:text-blue-600:hover{color:var(--color-blue-600)}.hover\:text-gray-600:hover{color:var(--color-gray-600)}.hover\:text-gray-700:hover{color:var(--color-gray-700)}.hover\:text-gray-800:hover{color:var(--color-gray-800)}.hover\:text-gray-900:hover{color:var(--color-gray-900)}.hover\:text-green-500:hover{color:var(--color-green-500)}.hover\:text-green-900:hover{color:var(--color-green-900)}.hover\:text-indigo-900:hover{color:var(--color-indigo-900)}.hover\:text-red-500:hover{color:var(--color-red-500)}.hover\:text-red-900:hover{color:var(--color-red-900)}.hover\:text-yellow-300:hover{color:var(--color-yellow-300)}.hover\:text-yellow-500:hover{color:var(--color-yellow-500)}.hover\:underline:hover{text-decoration-line:underline}.hover\:shadow-md:hover{--tw-shadow:0 4px 6px -1px var(--tw-shadow-color,#0000001a),0 2px 4px -2px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.hover\:shadow-sm:hover{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}}.focus\:z-10:focus{z-index:10}.focus\:border-blue-500:focus{border-color:var(--color-blue-500)}.focus\:bg-red-200:focus{background-color:var(--color-red-200)}.focus\:placeholder-gray-400:focus::-moz-placeholder{color:var(--color-gray-400)}.focus\:placeholder-gray-400:focus::placeholder{color:var(--color-gray-400)}.focus\:ring-1:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-2:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(2px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-blue-500:focus{--tw-ring-color:var(--color-blue-500)}.focus\:ring-gray-500:focus{--tw-ring-color:var(--color-gray-500)}.focus\:ring-green-500:focus{--tw-ring-color:var(--color-green-500)}.focus\:ring-indigo-500:focus{--tw-ring-color:var(--color-indigo-500)}.focus\:ring-red-500:focus{--tw-ring-color:var(--color-red-500)}.focus\:ring-white:focus{--tw-ring-color:var(--color-white)}.focus\:ring-yellow-500:focus{--tw-ring-color:var(--color-yellow-500)}.focus\:ring-offset-2:focus{--tw-ring-offset-width:2px;--tw-ring-offset-shadow:var(--tw-ring-inset,)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color)}.focus\:outline-none:focus{--tw-outline-style:none;outline-style:none}.focus\:ring-inset:focus{--tw-ring-inset:inset}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:bg-gray-400:disabled{background-color:var(--color-gray-400)}.disabled\:opacity-50:disabled{opacity:.5}@media (hover:hover){.disabled\:hover\:bg-gray-400:disabled:hover{background-color:var(--color-gray-400)}}@media (min-width:640px){.sm\:mx-auto{margin-inline:auto}.sm\:mt-0{margin-top:calc(var(--spacing)*0)}.sm\:ml-4{margin-left:calc(var(--spacing)*4)}.sm\:block{display:block}.sm\:flex{display:flex}.sm\:hidden{display:none}.sm\:w-full{width:100%}.sm\:max-w-md{max-width:var(--container-md)}.sm\:flex-1{flex:1}.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.sm\:items-center{align-items:center}.sm\:justify-between{justify-content:space-between}.sm\:rounded-lg{border-radius:var(--radius-lg)}.sm\:p-6{padding:calc(var(--spacing)*6)}.sm\:px-6{padding-inline:calc(var(--spacing)*6)}.sm\:px-10{padding-inline:calc(var(--spacing)*10)}.sm\:text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}}@media (min-width:768px){.md\:ml-2{margin-left:calc(var(--spacing)*2)}.md\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.md\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}:where(.md\:space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}}@media (min-width:1024px){.lg\:fixed{position:fixed}.lg\:inset-y-0{inset-block:calc(var(--spacing)*0)}.lg\:flex{display:flex}.lg\:hidden{display:none}.lg\:w-64{width:calc(var(--spacing)*64)}.lg\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.lg\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.lg\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}.lg\:flex-col{flex-direction:column}.lg\:px-8{padding-inline:calc(var(--spacing)*8)}.lg\:pt-6{padding-top:calc(var(--spacing)*6)}.lg\:pl-64{padding-left:calc(var(--spacing)*64)}}.dark .dark\:block{display:block}.dark .dark\:hidden{display:none}:where(.dark .dark\:divide-gray-700>:not(:last-child)){border-color:var(--color-gray-700)}.dark .dark\:border-blue-700{border-color:var(--color-blue-700)}.dark .dark\:border-blue-800{border-color:var(--color-blue-800)}.dark .dark\:border-gray-600{border-color:var(--color-gray-600)}.dark .dark\:border-gray-700{border-color:var(--color-gray-700)}.dark .dark\:border-green-700{border-color:var(--color-green-700)}.dark .dark\:border-red-600{border-color:var(--color-red-600)}.dark .dark\:border-red-700{border-color:var(--color-red-700)}.dark .dark\:border-red-800{border-color:var(--color-red-800)}.dark .dark\:border-yellow-700{border-color:var(--color-yellow-700)}.dark .dark\:border-yellow-800{border-color:var(--color-yellow-800)}.dark .dark\:bg-black\/50{background-color:#00000080}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-black\/50{background-color:color-mix(in oklab,var(--color-black)50%,transparent)}}.dark .dark\:bg-blue-500\/10{background-color:#3080ff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-500\/10{background-color:color-mix(in oklab,var(--color-blue-500)10%,transparent)}}.dark .dark\:bg-blue-900{background-color:var(--color-blue-900)}.dark .dark\:bg-blue-900\/20{background-color:#1c398e33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/20{background-color:color-mix(in oklab,var(--color-blue-900)20%,transparent)}}.dark .dark\:bg-blue-900\/50{background-color:#1c398e80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/50{background-color:color-mix(in oklab,var(--color-blue-900)50%,transparent)}}.dark .dark\:bg-gray-500\/10{background-color:#6a72821a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-500\/10{background-color:color-mix(in oklab,var(--color-gray-500)10%,transparent)}}.dark .dark\:bg-gray-600{background-color:var(--color-gray-600)}.dark .dark\:bg-gray-700{background-color:var(--color-gray-700)}.dark .dark\:bg-gray-800{background-color:var(--color-gray-800)}.dark .dark\:bg-gray-800\/50{background-color:#1e293980}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-800\/50{background-color:color-mix(in oklab,var(--color-gray-800)50%,transparent)}}.dark .dark\:bg-gray-900{background-color:var(--color-gray-900)}.dark .dark\:bg-gray-900\/50{background-color:#10182880}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-900\/50{background-color:color-mix(in oklab,var(--color-gray-900)50%,transparent)}}.dark .dark\:bg-green-500\/10{background-color:#00c7581a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-500\/10{background-color:color-mix(in oklab,var(--color-green-500)10%,transparent)}}.dark .dark\:bg-green-900{background-color:var(--color-green-900)}.dark .dark\:bg-green-900\/50{background-color:#0d542b80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-900\/50{background-color:color-mix(in oklab,var(--color-green-900)50%,transparent)}}.dark .dark\:bg-orange-500\/10{background-color:#fe6e001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-orange-500\/10{background-color:color-mix(in oklab,var(--color-orange-500)10%,transparent)}}.dark .dark\:bg-purple-500\/10{background-color:#ac4bff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-purple-500\/10{background-color:color-mix(in oklab,var(--color-purple-500)10%,transparent)}}.dark .dark\:bg-red-500\/10{background-color:#fb2c361a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-500\/10{background-color:color-mix(in oklab,var(--color-red-500)10%,transparent)}}.dark .dark\:bg-red-700{background-color:var(--color-red-700)}.dark .dark\:bg-red-800{background-color:var(--color-red-800)}.dark .dark\:bg-red-900{background-color:var(--color-red-900)}.dark .dark\:bg-red-900\/20{background-color:#82181a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/20{background-color:color-mix(in oklab,var(--color-red-900)20%,transparent)}}.dark .dark\:bg-red-900\/50{background-color:#82181a80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/50{background-color:color-mix(in oklab,var(--color-red-900)50%,transparent)}}.dark .dark\:bg-yellow-500\/10{background-color:#edb2001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-500\/10{background-color:color-mix(in oklab,var(--color-yellow-500)10%,transparent)}}.dark .dark\:bg-yellow-900{background-color:var(--color-yellow-900)}.dark .dark\:bg-yellow-900\/20{background-color:#733e0a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-900\/20{background-color:color-mix(in oklab,var(--color-yellow-900)20%,transparent)}}.dark .dark\:from-gray-800{--tw-gradient-from:var(--color-gray-800);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:to-gray-700{--tw-gradient-to:var(--color-gray-700);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:text-blue-200{color:var(--color-blue-200)}.dark .dark\:text-blue-300{color:var(--color-blue-300)}.dark .dark\:text-blue-400{color:var(--color-blue-400)}.dark .dark\:text-gray-200{color:var(--color-gray-200)}.dark .dark\:text-gray-300{color:var(--color-gray-300)}.dark .dark\:text-gray-400{color:var(--color-gray-400)}.dark .dark\:text-gray-500{color:var(--color-gray-500)}.dark .dark\:text-green-200{color:var(--color-green-200)}.dark .dark\:text-green-300{color:var(--color-green-300)}.dark .dark\:text-green-400{color:var(--color-green-400)}.dark .dark\:text-indigo-400{color:var(--color-indigo-400)}.dark .dark\:text-orange-400{color:var(--color-orange-400)}.dark .dark\:text-purple-400{color:var(--color-purple-400)}.dark .dark\:text-red-100{color:var(--color-red-100)}.dark .dark\:text-red-200{color:var(--color-red-200)}.dark .dark\:text-red-300{color:var(--color-red-300)}.dark .dark\:text-red-400{color:var(--color-red-400)}.dark .dark\:text-white{color:var(--color-white)}.dark .dark\:text-yellow-200{color:var(--color-yellow-200)}.dark .dark\:text-yellow-300{color:var(--color-yellow-300)}.dark .dark\:text-yellow-400{color:var(--color-yellow-400)}.dark .dark\:placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-400::placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.dark .dark\:placeholder-gray-500::placeholder{color:var(--color-gray-500)}.dark .dark\:ring-blue-400\/20{--tw-ring-color:#54a2ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)20%,transparent)}}.dark .dark\:ring-blue-400\/30{--tw-ring-color:#54a2ff4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)30%,transparent)}}.dark .dark\:ring-blue-500\/20{--tw-ring-color:#3080ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-500)20%,transparent)}}.dark .dark\:ring-gray-400\/20{--tw-ring-color:#99a1af33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)20%,transparent)}}.dark .dark\:ring-gray-400\/30{--tw-ring-color:#99a1af4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)30%,transparent)}}.dark .dark\:ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.dark .dark\:ring-green-400\/20{--tw-ring-color:#05df7233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-400)20%,transparent)}}.dark .dark\:ring-green-400\/30{--tw-ring-color:#05df724d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-green-400)30%,transparent)}}.dark .dark\:ring-green-500\/20{--tw-ring-color:#00c75833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-500)20%,transparent)}}.dark .dark\:ring-orange-500\/20{--tw-ring-color:#fe6e0033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-orange-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-500)20%,transparent)}}.dark .dark\:ring-purple-500\/20{--tw-ring-color:#ac4bff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-purple-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-500)20%,transparent)}}.dark .dark\:ring-red-400\/20{--tw-ring-color:#ff656833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-400)20%,transparent)}}.dark .dark\:ring-red-400\/30{--tw-ring-color:#ff65684d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-red-400)30%,transparent)}}.dark .dark\:ring-red-500\/20{--tw-ring-color:#fb2c3633}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-500)20%,transparent)}}.dark .dark\:ring-yellow-400\/30{--tw-ring-color:#fac8004d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-yellow-400)30%,transparent)}}.dark .dark\:ring-yellow-500\/20{--tw-ring-color:#edb20033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-500)20%,transparent)}}@media (hover:hover){.dark .dark\:hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.dark .dark\:hover\:border-gray-500:hover{border-color:var(--color-gray-500)}.dark .dark\:hover\:bg-blue-800:hover{background-color:var(--color-blue-800)}.dark .dark\:hover\:bg-gray-500:hover{background-color:var(--color-gray-500)}.dark .dark\:hover\:bg-gray-600:hover{background-color:var(--color-gray-600)}.dark .dark\:hover\:bg-gray-700:hover{background-color:var(--color-gray-700)}.dark .dark\:hover\:bg-gray-800:hover{background-color:var(--color-gray-800)}.dark .dark\:hover\:bg-red-700:hover{background-color:var(--color-red-700)}.dark .dark\:hover\:bg-red-800:hover{background-color:var(--color-red-800)}.dark .dark\:hover\:text-blue-300:hover{color:var(--color-blue-300)}.dark .dark\:hover\:text-gray-100:hover{color:var(--color-gray-100)}.dark .dark\:hover\:text-gray-300:hover{color:var(--color-gray-300)}.dark .dark\:hover\:text-green-300:hover{color:var(--color-green-300)}.dark .dark\:hover\:text-indigo-300:hover{color:var(--color-indigo-300)}.dark .dark\:hover\:text-red-300:hover{color:var(--color-red-300)}.dark .dark\:hover\:text-white:hover{color:var(--color-white)}}.dark .dark\:focus\:bg-red-700:focus{background-color:var(--color-red-700)}.dark .dark\:focus\:ring-offset-gray-900:focus{--tw-ring-offset-color:var(--color-gray-900)}}@property --tw-translate-x{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-y{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-z{syntax:"*";inherits:false;initial-value:0}@property --tw-rotate-x{syntax:"*";inherits:false}@property --tw-rotate-y{syntax:"*";inherits:false}@property --tw-rotate-z{syntax:"*";inherits:false}@property --tw-skew-x{syntax:"*";inherits:false}@property --tw-skew-y{syntax:"*";inherits:false}@property --tw-space-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-space-x-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-divide-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-border-style{syntax:"*";inherits:false;initial-value:solid}@property --tw-gradient-position{syntax:"*";inherits:false}@property --tw-gradient-from{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-via{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-to{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-stops{syntax:"*";inherits:false}@property --tw-gradient-via-stops{syntax:"*";inherits:false}@property --tw-gradient-from-position{syntax:"";inherits:false;initial-value:0%}@property --tw-gradient-via-position{syntax:"";inherits:false;initial-value:50%}@property --tw-gradient-to-position{syntax:"";inherits:false;initial-value:100%}@property --tw-leading{syntax:"*";inherits:false}@property --tw-font-weight{syntax:"*";inherits:false}@property --tw-tracking{syntax:"*";inherits:false}@property --tw-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-shadow-color{syntax:"*";inherits:false}@property --tw-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-inset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-shadow-color{syntax:"*";inherits:false}@property --tw-inset-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-ring-color{syntax:"*";inherits:false}@property --tw-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-ring-color{syntax:"*";inherits:false}@property --tw-inset-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-ring-inset{syntax:"*";inherits:false}@property --tw-ring-offset-width{syntax:"";inherits:false;initial-value:0}@property --tw-ring-offset-color{syntax:"*";inherits:false;initial-value:#fff}@property --tw-ring-offset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-blur{syntax:"*";inherits:false}@property --tw-brightness{syntax:"*";inherits:false}@property --tw-contrast{syntax:"*";inherits:false}@property --tw-grayscale{syntax:"*";inherits:false}@property --tw-hue-rotate{syntax:"*";inherits:false}@property --tw-invert{syntax:"*";inherits:false}@property --tw-opacity{syntax:"*";inherits:false}@property --tw-saturate{syntax:"*";inherits:false}@property --tw-sepia{syntax:"*";inherits:false}@property --tw-drop-shadow{syntax:"*";inherits:false}@property --tw-drop-shadow-color{syntax:"*";inherits:false}@property --tw-drop-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-drop-shadow-size{syntax:"*";inherits:false}@property --tw-duration{syntax:"*";inherits:false}@property --tw-ease{syntax:"*";inherits:false}@property --tw-scale-x{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-y{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-z{syntax:"*";inherits:false;initial-value:1}@keyframes spin{to{transform:rotate(360deg)}}@keyframes pulse{50%{opacity:.5}} diff --git a/webapp/assets/_app/immutable/assets/_layout.BPrCR_r7.css b/webapp/assets/_app/immutable/assets/_layout.BPrCR_r7.css new file mode 100644 index 00000000..17f0a1ed --- /dev/null +++ b/webapp/assets/_app/immutable/assets/_layout.BPrCR_r7.css @@ -0,0 +1 @@ +/*! tailwindcss v4.1.11 | MIT License | https://tailwindcss.com */@layer properties{@supports ((-webkit-hyphens:none) and (not (margin-trim:inline))) or ((-moz-orient:inline) and (not (color:rgb(from red r g b)))){*,:before,:after,::backdrop{--tw-translate-x:0;--tw-translate-y:0;--tw-translate-z:0;--tw-rotate-x:initial;--tw-rotate-y:initial;--tw-rotate-z:initial;--tw-skew-x:initial;--tw-skew-y:initial;--tw-space-y-reverse:0;--tw-space-x-reverse:0;--tw-divide-y-reverse:0;--tw-border-style:solid;--tw-gradient-position:initial;--tw-gradient-from:#0000;--tw-gradient-via:#0000;--tw-gradient-to:#0000;--tw-gradient-stops:initial;--tw-gradient-via-stops:initial;--tw-gradient-from-position:0%;--tw-gradient-via-position:50%;--tw-gradient-to-position:100%;--tw-leading:initial;--tw-font-weight:initial;--tw-tracking:initial;--tw-shadow:0 0 #0000;--tw-shadow-color:initial;--tw-shadow-alpha:100%;--tw-inset-shadow:0 0 #0000;--tw-inset-shadow-color:initial;--tw-inset-shadow-alpha:100%;--tw-ring-color:initial;--tw-ring-shadow:0 0 #0000;--tw-inset-ring-color:initial;--tw-inset-ring-shadow:0 0 #0000;--tw-ring-inset:initial;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-offset-shadow:0 0 #0000;--tw-blur:initial;--tw-brightness:initial;--tw-contrast:initial;--tw-grayscale:initial;--tw-hue-rotate:initial;--tw-invert:initial;--tw-opacity:initial;--tw-saturate:initial;--tw-sepia:initial;--tw-drop-shadow:initial;--tw-drop-shadow-color:initial;--tw-drop-shadow-alpha:100%;--tw-drop-shadow-size:initial;--tw-duration:initial;--tw-ease:initial;--tw-scale-x:1;--tw-scale-y:1;--tw-scale-z:1}}}@layer theme{:root,:host{--font-sans:ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";--font-mono:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--color-red-50:oklch(97.1% .013 17.38);--color-red-100:oklch(93.6% .032 17.717);--color-red-200:oklch(88.5% .062 18.334);--color-red-300:oklch(80.8% .114 19.571);--color-red-400:oklch(70.4% .191 22.216);--color-red-500:oklch(63.7% .237 25.331);--color-red-600:oklch(57.7% .245 27.325);--color-red-700:oklch(50.5% .213 27.518);--color-red-800:oklch(44.4% .177 26.899);--color-red-900:oklch(39.6% .141 25.723);--color-orange-50:oklch(98% .016 73.684);--color-orange-400:oklch(75% .183 55.934);--color-orange-500:oklch(70.5% .213 47.604);--color-orange-600:oklch(64.6% .222 41.116);--color-orange-700:oklch(55.3% .195 38.402);--color-yellow-50:oklch(98.7% .026 102.212);--color-yellow-100:oklch(97.3% .071 103.193);--color-yellow-200:oklch(94.5% .129 101.54);--color-yellow-300:oklch(90.5% .182 98.111);--color-yellow-400:oklch(85.2% .199 91.936);--color-yellow-500:oklch(79.5% .184 86.047);--color-yellow-600:oklch(68.1% .162 75.834);--color-yellow-700:oklch(55.4% .135 66.442);--color-yellow-800:oklch(47.6% .114 61.907);--color-yellow-900:oklch(42.1% .095 57.708);--color-green-50:oklch(98.2% .018 155.826);--color-green-100:oklch(96.2% .044 156.743);--color-green-200:oklch(92.5% .084 155.995);--color-green-300:oklch(87.1% .15 154.449);--color-green-400:oklch(79.2% .209 151.711);--color-green-500:oklch(72.3% .219 149.579);--color-green-600:oklch(62.7% .194 149.214);--color-green-700:oklch(52.7% .154 150.069);--color-green-800:oklch(44.8% .119 151.328);--color-green-900:oklch(39.3% .095 152.535);--color-blue-50:oklch(97% .014 254.604);--color-blue-100:oklch(93.2% .032 255.585);--color-blue-200:oklch(88.2% .059 254.128);--color-blue-300:oklch(80.9% .105 251.813);--color-blue-400:oklch(70.7% .165 254.624);--color-blue-500:oklch(62.3% .214 259.815);--color-blue-600:oklch(54.6% .245 262.881);--color-blue-700:oklch(48.8% .243 264.376);--color-blue-800:oklch(42.4% .199 265.638);--color-blue-900:oklch(37.9% .146 265.522);--color-indigo-300:oklch(78.5% .115 274.713);--color-indigo-400:oklch(67.3% .182 276.935);--color-indigo-500:oklch(58.5% .233 277.117);--color-indigo-600:oklch(51.1% .262 276.966);--color-indigo-900:oklch(35.9% .144 278.697);--color-purple-50:oklch(97.7% .014 308.299);--color-purple-400:oklch(71.4% .203 305.504);--color-purple-500:oklch(62.7% .265 303.9);--color-purple-600:oklch(55.8% .288 302.321);--color-purple-700:oklch(49.6% .265 301.924);--color-gray-50:oklch(98.5% .002 247.839);--color-gray-100:oklch(96.7% .003 264.542);--color-gray-200:oklch(92.8% .006 264.531);--color-gray-300:oklch(87.2% .01 258.338);--color-gray-400:oklch(70.7% .022 261.325);--color-gray-500:oklch(55.1% .027 264.364);--color-gray-600:oklch(44.6% .03 256.802);--color-gray-700:oklch(37.3% .034 259.733);--color-gray-800:oklch(27.8% .033 256.848);--color-gray-900:oklch(21% .034 264.665);--color-black:#000;--color-white:#fff;--spacing:.25rem;--container-xs:20rem;--container-sm:24rem;--container-md:28rem;--container-xl:36rem;--container-2xl:42rem;--container-6xl:72rem;--container-7xl:80rem;--text-xs:.75rem;--text-xs--line-height:calc(1/.75);--text-sm:.875rem;--text-sm--line-height:calc(1.25/.875);--text-base:1rem;--text-base--line-height: 1.5 ;--text-lg:1.125rem;--text-lg--line-height:calc(1.75/1.125);--text-xl:1.25rem;--text-xl--line-height:calc(1.75/1.25);--text-2xl:1.5rem;--text-2xl--line-height:calc(2/1.5);--text-3xl:1.875rem;--text-3xl--line-height: 1.2 ;--font-weight-medium:500;--font-weight-semibold:600;--font-weight-bold:700;--font-weight-extrabold:800;--tracking-wide:.025em;--tracking-wider:.05em;--radius-md:.375rem;--radius-lg:.5rem;--ease-in-out:cubic-bezier(.4,0,.2,1);--animate-spin:spin 1s linear infinite;--animate-pulse:pulse 2s cubic-bezier(.4,0,.6,1)infinite;--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4,0,.2,1);--default-font-family:var(--font-sans);--default-mono-font-family:var(--font-mono)}}@layer base{*,:after,:before,::backdrop{box-sizing:border-box;border:0 solid;margin:0;padding:0}::file-selector-button{box-sizing:border-box;border:0 solid;margin:0;padding:0}html,:host{-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;line-height:1.5;font-family:var(--default-font-family,ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji");font-feature-settings:var(--default-font-feature-settings,normal);font-variation-settings:var(--default-font-variation-settings,normal);-webkit-tap-highlight-color:transparent}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;-webkit-text-decoration:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:var(--default-mono-font-family,ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace);font-feature-settings:var(--default-mono-font-feature-settings,normal);font-variation-settings:var(--default-mono-font-variation-settings,normal);font-size:1em}small{font-size:80%}sub,sup{vertical-align:baseline;font-size:75%;line-height:0;position:relative}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}:-moz-focusring{outline:auto}progress{vertical-align:baseline}summary{display:list-item}ol,ul,menu{list-style:none}img,svg,video,canvas,audio,iframe,embed,object{vertical-align:middle;display:block}img,video{max-width:100%;height:auto}button,input,select,optgroup,textarea{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}::file-selector-button{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}:where(select:is([multiple],[size])) optgroup{font-weight:bolder}:where(select:is([multiple],[size])) optgroup option{padding-inline-start:20px}::file-selector-button{margin-inline-end:4px}::-moz-placeholder{opacity:1}::placeholder{opacity:1}@supports (not (-webkit-appearance:-apple-pay-button)) or (contain-intrinsic-size:1px){::-moz-placeholder{color:currentColor}::placeholder{color:currentColor}@supports (color:color-mix(in lab,red,red)){::-moz-placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}::placeholder{color:color-mix(in oklab,currentcolor 50%,transparent)}}}textarea{resize:vertical}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-date-and-time-value{min-height:1lh;text-align:inherit}::-webkit-datetime-edit{display:inline-flex}::-webkit-datetime-edit-fields-wrapper{padding:0}::-webkit-datetime-edit{padding-block:0}::-webkit-datetime-edit-year-field{padding-block:0}::-webkit-datetime-edit-month-field{padding-block:0}::-webkit-datetime-edit-day-field{padding-block:0}::-webkit-datetime-edit-hour-field{padding-block:0}::-webkit-datetime-edit-minute-field{padding-block:0}::-webkit-datetime-edit-second-field{padding-block:0}::-webkit-datetime-edit-millisecond-field{padding-block:0}::-webkit-datetime-edit-meridiem-field{padding-block:0}:-moz-ui-invalid{box-shadow:none}button,input:where([type=button],[type=reset],[type=submit]){-webkit-appearance:button;-moz-appearance:button;appearance:button}::file-selector-button{-webkit-appearance:button;-moz-appearance:button;appearance:button}::-webkit-inner-spin-button{height:auto}::-webkit-outer-spin-button{height:auto}[hidden]:where(:not([hidden=until-found])){display:none!important}html{font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica Neue,Arial,Noto Sans,sans-serif}}@layer components;@layer utilities{.pointer-events-none{pointer-events:none}.invisible{visibility:hidden}.sr-only{clip:rect(0,0,0,0);white-space:nowrap;border-width:0;width:1px;height:1px;margin:-1px;padding:0;position:absolute;overflow:hidden}.absolute{position:absolute}.fixed{position:fixed}.relative{position:relative}.static{position:static}.inset-0{inset:calc(var(--spacing)*0)}.inset-y-0{inset-block:calc(var(--spacing)*0)}.top-0{top:calc(var(--spacing)*0)}.top-1\/2{top:50%}.top-2{top:calc(var(--spacing)*2)}.top-4{top:calc(var(--spacing)*4)}.top-full{top:100%}.right-0{right:calc(var(--spacing)*0)}.right-2{right:calc(var(--spacing)*2)}.right-4{right:calc(var(--spacing)*4)}.right-full{right:100%}.bottom-full{bottom:100%}.left-0{left:calc(var(--spacing)*0)}.left-1\/2{left:50%}.left-full{left:100%}.z-0{z-index:0}.z-10{z-index:10}.z-40{z-index:40}.z-50{z-index:50}.z-\[60\]{z-index:60}.container{width:100%}@media (min-width:640px){.container{max-width:640px}}@media (min-width:768px){.container{max-width:768px}}@media (min-width:1024px){.container{max-width:1024px}}@media (min-width:1280px){.container{max-width:1280px}}@media (min-width:1536px){.container{max-width:1536px}}.mx-1{margin-inline:calc(var(--spacing)*1)}.mx-2{margin-inline:calc(var(--spacing)*2)}.mx-4{margin-inline:calc(var(--spacing)*4)}.mx-auto{margin-inline:auto}.mt-0\.5{margin-top:calc(var(--spacing)*.5)}.mt-1{margin-top:calc(var(--spacing)*1)}.mt-2{margin-top:calc(var(--spacing)*2)}.mt-3{margin-top:calc(var(--spacing)*3)}.mt-4{margin-top:calc(var(--spacing)*4)}.mt-5{margin-top:calc(var(--spacing)*5)}.mt-6{margin-top:calc(var(--spacing)*6)}.mt-8{margin-top:calc(var(--spacing)*8)}.-mr-0\.5{margin-right:calc(var(--spacing)*-.5)}.-mr-1{margin-right:calc(var(--spacing)*-1)}.-mr-12{margin-right:calc(var(--spacing)*-12)}.mr-2{margin-right:calc(var(--spacing)*2)}.mr-2\.5{margin-right:calc(var(--spacing)*2.5)}.mr-3{margin-right:calc(var(--spacing)*3)}.mr-4{margin-right:calc(var(--spacing)*4)}.mb-1{margin-bottom:calc(var(--spacing)*1)}.mb-2{margin-bottom:calc(var(--spacing)*2)}.mb-3{margin-bottom:calc(var(--spacing)*3)}.mb-4{margin-bottom:calc(var(--spacing)*4)}.mb-6{margin-bottom:calc(var(--spacing)*6)}.-ml-0\.5{margin-left:calc(var(--spacing)*-.5)}.-ml-1{margin-left:calc(var(--spacing)*-1)}.ml-1{margin-left:calc(var(--spacing)*1)}.ml-2{margin-left:calc(var(--spacing)*2)}.ml-3{margin-left:calc(var(--spacing)*3)}.ml-4{margin-left:calc(var(--spacing)*4)}.ml-5{margin-left:calc(var(--spacing)*5)}.ml-6{margin-left:calc(var(--spacing)*6)}.block{display:block}.contents{display:contents}.flex{display:flex}.grid{display:grid}.hidden{display:none}.inline{display:inline}.inline-block{display:inline-block}.inline-flex{display:inline-flex}.table{display:table}.h-0{height:calc(var(--spacing)*0)}.h-2{height:calc(var(--spacing)*2)}.h-2\.5{height:calc(var(--spacing)*2.5)}.h-3{height:calc(var(--spacing)*3)}.h-4{height:calc(var(--spacing)*4)}.h-5{height:calc(var(--spacing)*5)}.h-6{height:calc(var(--spacing)*6)}.h-8{height:calc(var(--spacing)*8)}.h-10{height:calc(var(--spacing)*10)}.h-12{height:calc(var(--spacing)*12)}.h-16{height:calc(var(--spacing)*16)}.h-24{height:calc(var(--spacing)*24)}.h-48{height:calc(var(--spacing)*48)}.h-full{height:100%}.max-h-96{max-height:calc(var(--spacing)*96)}.max-h-\[90vh\]{max-height:90vh}.max-h-screen{max-height:100vh}.min-h-0{min-height:calc(var(--spacing)*0)}.min-h-\[38px\]{min-height:38px}.min-h-screen{min-height:100vh}.w-0{width:calc(var(--spacing)*0)}.w-2{width:calc(var(--spacing)*2)}.w-2\.5{width:calc(var(--spacing)*2.5)}.w-3{width:calc(var(--spacing)*3)}.w-4{width:calc(var(--spacing)*4)}.w-5{width:calc(var(--spacing)*5)}.w-6{width:calc(var(--spacing)*6)}.w-8{width:calc(var(--spacing)*8)}.w-10{width:calc(var(--spacing)*10)}.w-12{width:calc(var(--spacing)*12)}.w-16{width:calc(var(--spacing)*16)}.w-20{width:calc(var(--spacing)*20)}.w-64{width:calc(var(--spacing)*64)}.w-80{width:calc(var(--spacing)*80)}.w-auto{width:auto}.w-full{width:100%}.max-w-2xl{max-width:var(--container-2xl)}.max-w-6xl{max-width:var(--container-6xl)}.max-w-7xl{max-width:var(--container-7xl)}.max-w-full{max-width:100%}.max-w-md{max-width:var(--container-md)}.max-w-sm{max-width:var(--container-sm)}.max-w-xl{max-width:var(--container-xl)}.max-w-xs{max-width:var(--container-xs)}.min-w-0{min-width:calc(var(--spacing)*0)}.flex-1{flex:1}.flex-shrink-0{flex-shrink:0}.-translate-x-1\/2{--tw-translate-x: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.-translate-y-1\/2{--tw-translate-y: -50% ;translate:var(--tw-translate-x)var(--tw-translate-y)}.rotate-90{rotate:90deg}.transform{transform:var(--tw-rotate-x,)var(--tw-rotate-y,)var(--tw-rotate-z,)var(--tw-skew-x,)var(--tw-skew-y,)}.animate-pulse{animation:var(--animate-pulse)}.animate-spin{animation:var(--animate-spin)}.cursor-default{cursor:default}.cursor-help{cursor:help}.cursor-not-allowed{cursor:not-allowed}.cursor-pointer{cursor:pointer}.resize-none{resize:none}.list-inside{list-style-position:inside}.list-disc{list-style-type:disc}.appearance-none{-webkit-appearance:none;-moz-appearance:none;appearance:none}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.flex-col{flex-direction:column}.flex-wrap{flex-wrap:wrap}.items-center{align-items:center}.items-start{align-items:flex-start}.justify-between{justify-content:space-between}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}.gap-0{gap:calc(var(--spacing)*0)}.gap-2{gap:calc(var(--spacing)*2)}.gap-4{gap:calc(var(--spacing)*4)}.gap-5{gap:calc(var(--spacing)*5)}.gap-6{gap:calc(var(--spacing)*6)}:where(.-space-y-px>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(-1px*var(--tw-space-y-reverse));margin-block-end:calc(-1px*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-1>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*1)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-2>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*2)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-3>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*3)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-4>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*4)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-6>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*6)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*6)*calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-8>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing)*8)*var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing)*8)*calc(1 - var(--tw-space-y-reverse)))}.gap-x-4{-moz-column-gap:calc(var(--spacing)*4);column-gap:calc(var(--spacing)*4)}:where(.-space-x-px>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(-1px*var(--tw-space-x-reverse));margin-inline-end:calc(-1px*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-1>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*1)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*1)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-2>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*2)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*2)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}:where(.space-x-4>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*4)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*4)*calc(1 - var(--tw-space-x-reverse)))}.gap-y-6{row-gap:calc(var(--spacing)*6)}:where(.divide-y>:not(:last-child)){--tw-divide-y-reverse:0;border-bottom-style:var(--tw-border-style);border-top-style:var(--tw-border-style);border-top-width:calc(1px*var(--tw-divide-y-reverse));border-bottom-width:calc(1px*calc(1 - var(--tw-divide-y-reverse)))}:where(.divide-gray-200>:not(:last-child)){border-color:var(--color-gray-200)}.truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.scroll-smooth{scroll-behavior:smooth}.rounded{border-radius:.25rem}.rounded-full{border-radius:3.40282e38px}.rounded-lg{border-radius:var(--radius-lg)}.rounded-md{border-radius:var(--radius-md)}.rounded-none{border-radius:0}.rounded-t-md{border-top-left-radius:var(--radius-md);border-top-right-radius:var(--radius-md)}.rounded-l-md{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.rounded-l-none{border-top-left-radius:0;border-bottom-left-radius:0}.rounded-r-md{border-top-right-radius:var(--radius-md);border-bottom-right-radius:var(--radius-md)}.rounded-r-none{border-top-right-radius:0;border-bottom-right-radius:0}.rounded-b-md{border-bottom-right-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.border{border-style:var(--tw-border-style);border-width:1px}.border-2{border-style:var(--tw-border-style);border-width:2px}.border-4{border-style:var(--tw-border-style);border-width:4px}.border-t{border-top-style:var(--tw-border-style);border-top-width:1px}.border-r{border-right-style:var(--tw-border-style);border-right-width:1px}.border-b{border-bottom-style:var(--tw-border-style);border-bottom-width:1px}.border-b-2{border-bottom-style:var(--tw-border-style);border-bottom-width:2px}.border-l-0{border-left-style:var(--tw-border-style);border-left-width:0}.border-dashed{--tw-border-style:dashed;border-style:dashed}.border-blue-200{border-color:var(--color-blue-200)}.border-blue-500{border-color:var(--color-blue-500)}.border-blue-600{border-color:var(--color-blue-600)}.border-gray-100{border-color:var(--color-gray-100)}.border-gray-200{border-color:var(--color-gray-200)}.border-gray-300{border-color:var(--color-gray-300)}.border-green-200{border-color:var(--color-green-200)}.border-red-200{border-color:var(--color-red-200)}.border-red-300{border-color:var(--color-red-300)}.border-transparent{border-color:#0000}.border-white{border-color:var(--color-white)}.border-yellow-200{border-color:var(--color-yellow-200)}.border-t-gray-900{border-top-color:var(--color-gray-900)}.border-r-gray-900{border-right-color:var(--color-gray-900)}.border-b-gray-900{border-bottom-color:var(--color-gray-900)}.border-l-gray-900{border-left-color:var(--color-gray-900)}.bg-black\/30{background-color:#0000004d}@supports (color:color-mix(in lab,red,red)){.bg-black\/30{background-color:color-mix(in oklab,var(--color-black)30%,transparent)}}.bg-blue-50{background-color:var(--color-blue-50)}.bg-blue-100{background-color:var(--color-blue-100)}.bg-blue-500{background-color:var(--color-blue-500)}.bg-blue-600{background-color:var(--color-blue-600)}.bg-gray-50{background-color:var(--color-gray-50)}.bg-gray-100{background-color:var(--color-gray-100)}.bg-gray-200{background-color:var(--color-gray-200)}.bg-gray-400{background-color:var(--color-gray-400)}.bg-gray-500{background-color:var(--color-gray-500)}.bg-gray-900{background-color:var(--color-gray-900)}.bg-green-50{background-color:var(--color-green-50)}.bg-green-100{background-color:var(--color-green-100)}.bg-green-500{background-color:var(--color-green-500)}.bg-orange-50{background-color:var(--color-orange-50)}.bg-purple-50{background-color:var(--color-purple-50)}.bg-purple-500{background-color:var(--color-purple-500)}.bg-red-50{background-color:var(--color-red-50)}.bg-red-100{background-color:var(--color-red-100)}.bg-red-500{background-color:var(--color-red-500)}.bg-red-600{background-color:var(--color-red-600)}.bg-white{background-color:var(--color-white)}.bg-yellow-50{background-color:var(--color-yellow-50)}.bg-yellow-100{background-color:var(--color-yellow-100)}.bg-yellow-500{background-color:var(--color-yellow-500)}.bg-gradient-to-r{--tw-gradient-position:to right in oklab;background-image:linear-gradient(var(--tw-gradient-stops))}.from-gray-50{--tw-gradient-from:var(--color-gray-50);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.to-white{--tw-gradient-to:var(--color-white);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.p-1{padding:calc(var(--spacing)*1)}.p-2{padding:calc(var(--spacing)*2)}.p-3{padding:calc(var(--spacing)*3)}.p-4{padding:calc(var(--spacing)*4)}.p-5{padding:calc(var(--spacing)*5)}.p-6{padding:calc(var(--spacing)*6)}.px-2{padding-inline:calc(var(--spacing)*2)}.px-2\.5{padding-inline:calc(var(--spacing)*2.5)}.px-3{padding-inline:calc(var(--spacing)*3)}.px-4{padding-inline:calc(var(--spacing)*4)}.px-6{padding-inline:calc(var(--spacing)*6)}.py-0\.5{padding-block:calc(var(--spacing)*.5)}.py-1{padding-block:calc(var(--spacing)*1)}.py-2{padding-block:calc(var(--spacing)*2)}.py-3{padding-block:calc(var(--spacing)*3)}.py-4{padding-block:calc(var(--spacing)*4)}.py-5{padding-block:calc(var(--spacing)*5)}.py-6{padding-block:calc(var(--spacing)*6)}.py-8{padding-block:calc(var(--spacing)*8)}.py-12{padding-block:calc(var(--spacing)*12)}.pt-2{padding-top:calc(var(--spacing)*2)}.pt-4{padding-top:calc(var(--spacing)*4)}.pt-5{padding-top:calc(var(--spacing)*5)}.pt-6{padding-top:calc(var(--spacing)*6)}.pt-20{padding-top:calc(var(--spacing)*20)}.pr-3{padding-right:calc(var(--spacing)*3)}.pr-8{padding-right:calc(var(--spacing)*8)}.pb-2{padding-bottom:calc(var(--spacing)*2)}.pb-4{padding-bottom:calc(var(--spacing)*4)}.pl-2{padding-left:calc(var(--spacing)*2)}.pl-3{padding-left:calc(var(--spacing)*3)}.pl-10{padding-left:calc(var(--spacing)*10)}.text-center{text-align:center}.text-left{text-align:left}.text-right{text-align:right}.font-mono{font-family:var(--font-mono)}.text-2xl{font-size:var(--text-2xl);line-height:var(--tw-leading,var(--text-2xl--line-height))}.text-3xl{font-size:var(--text-3xl);line-height:var(--tw-leading,var(--text-3xl--line-height))}.text-base{font-size:var(--text-base);line-height:var(--tw-leading,var(--text-base--line-height))}.text-lg{font-size:var(--text-lg);line-height:var(--tw-leading,var(--text-lg--line-height))}.text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}.text-xl{font-size:var(--text-xl);line-height:var(--tw-leading,var(--text-xl--line-height))}.text-xs{font-size:var(--text-xs);line-height:var(--tw-leading,var(--text-xs--line-height))}.leading-5{--tw-leading:calc(var(--spacing)*5);line-height:calc(var(--spacing)*5)}.leading-6{--tw-leading:calc(var(--spacing)*6);line-height:calc(var(--spacing)*6)}.font-bold{--tw-font-weight:var(--font-weight-bold);font-weight:var(--font-weight-bold)}.font-extrabold{--tw-font-weight:var(--font-weight-extrabold);font-weight:var(--font-weight-extrabold)}.font-medium{--tw-font-weight:var(--font-weight-medium);font-weight:var(--font-weight-medium)}.font-semibold{--tw-font-weight:var(--font-weight-semibold);font-weight:var(--font-weight-semibold)}.tracking-wide{--tw-tracking:var(--tracking-wide);letter-spacing:var(--tracking-wide)}.tracking-wider{--tw-tracking:var(--tracking-wider);letter-spacing:var(--tracking-wider)}.break-all{word-break:break-all}.text-black{color:var(--color-black)}.text-blue-400{color:var(--color-blue-400)}.text-blue-600{color:var(--color-blue-600)}.text-blue-700{color:var(--color-blue-700)}.text-blue-800{color:var(--color-blue-800)}.text-gray-300{color:var(--color-gray-300)}.text-gray-400{color:var(--color-gray-400)}.text-gray-500{color:var(--color-gray-500)}.text-gray-600{color:var(--color-gray-600)}.text-gray-700{color:var(--color-gray-700)}.text-gray-800{color:var(--color-gray-800)}.text-gray-900{color:var(--color-gray-900)}.text-green-400{color:var(--color-green-400)}.text-green-500{color:var(--color-green-500)}.text-green-600{color:var(--color-green-600)}.text-green-700{color:var(--color-green-700)}.text-green-800{color:var(--color-green-800)}.text-indigo-600{color:var(--color-indigo-600)}.text-orange-700{color:var(--color-orange-700)}.text-purple-600{color:var(--color-purple-600)}.text-purple-700{color:var(--color-purple-700)}.text-red-400{color:var(--color-red-400)}.text-red-500{color:var(--color-red-500)}.text-red-600{color:var(--color-red-600)}.text-red-700{color:var(--color-red-700)}.text-red-800{color:var(--color-red-800)}.text-red-900{color:var(--color-red-900)}.text-white{color:var(--color-white)}.text-yellow-400{color:var(--color-yellow-400)}.text-yellow-600{color:var(--color-yellow-600)}.text-yellow-700{color:var(--color-yellow-700)}.text-yellow-800{color:var(--color-yellow-800)}.capitalize{text-transform:capitalize}.uppercase{text-transform:uppercase}.italic{font-style:italic}.placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.placeholder-gray-400::placeholder{color:var(--color-gray-400)}.placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.placeholder-gray-500::placeholder{color:var(--color-gray-500)}.opacity-0{opacity:0}.opacity-25{opacity:.25}.opacity-50{opacity:.5}.opacity-75{opacity:.75}.shadow{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-lg{--tw-shadow:0 10px 15px -3px var(--tw-shadow-color,#0000001a),0 4px 6px -4px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-sm{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.shadow-xl{--tw-shadow:0 20px 25px -5px var(--tw-shadow-color,#0000001a),0 8px 10px -6px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring,.ring-1{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.ring-blue-600\/20{--tw-ring-color:#155dfc33}@supports (color:color-mix(in lab,red,red)){.ring-blue-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-600)20%,transparent)}}.ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.ring-gray-600\/20{--tw-ring-color:#4a556533}@supports (color:color-mix(in lab,red,red)){.ring-gray-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-600)20%,transparent)}}.ring-green-600\/20{--tw-ring-color:#00a54433}@supports (color:color-mix(in lab,red,red)){.ring-green-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-600)20%,transparent)}}.ring-orange-600\/20{--tw-ring-color:#f0510033}@supports (color:color-mix(in lab,red,red)){.ring-orange-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-600)20%,transparent)}}.ring-purple-600\/20{--tw-ring-color:#9810fa33}@supports (color:color-mix(in lab,red,red)){.ring-purple-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-600)20%,transparent)}}.ring-red-600\/20{--tw-ring-color:#e4001433}@supports (color:color-mix(in lab,red,red)){.ring-red-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-600)20%,transparent)}}.ring-yellow-600\/20{--tw-ring-color:#cd890033}@supports (color:color-mix(in lab,red,red)){.ring-yellow-600\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-600)20%,transparent)}}.filter{filter:var(--tw-blur,)var(--tw-brightness,)var(--tw-contrast,)var(--tw-grayscale,)var(--tw-hue-rotate,)var(--tw-invert,)var(--tw-saturate,)var(--tw-sepia,)var(--tw-drop-shadow,)}.transition-all{transition-property:all;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-colors{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-shadow{transition-property:box-shadow;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-transform{transition-property:transform,translate,scale,rotate;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.duration-200{--tw-duration:.2s;transition-duration:.2s}.duration-300{--tw-duration:.3s;transition-duration:.3s}.ease-in-out{--tw-ease:var(--ease-in-out);transition-timing-function:var(--ease-in-out)}.ring-inset{--tw-ring-inset:inset}@media (hover:hover){.group-hover\:visible:is(:where(.group):hover *){visibility:visible}.group-hover\:opacity-100:is(:where(.group):hover *){opacity:1}}.first\:rounded-l-md:first-child{border-top-left-radius:var(--radius-md);border-bottom-left-radius:var(--radius-md)}.first\:border-l:first-child{border-left-style:var(--tw-border-style);border-left-width:1px}@media (hover:hover){.hover\:scale-105:hover{--tw-scale-x:105%;--tw-scale-y:105%;--tw-scale-z:105%;scale:var(--tw-scale-x)var(--tw-scale-y)}.hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.hover\:border-gray-400:hover{border-color:var(--color-gray-400)}.hover\:bg-blue-200:hover{background-color:var(--color-blue-200)}.hover\:bg-blue-700:hover{background-color:var(--color-blue-700)}.hover\:bg-gray-50:hover{background-color:var(--color-gray-50)}.hover\:bg-gray-200:hover{background-color:var(--color-gray-200)}.hover\:bg-red-200:hover{background-color:var(--color-red-200)}.hover\:bg-red-700:hover{background-color:var(--color-red-700)}.hover\:text-blue-500:hover{color:var(--color-blue-500)}.hover\:text-blue-600:hover{color:var(--color-blue-600)}.hover\:text-gray-600:hover{color:var(--color-gray-600)}.hover\:text-gray-700:hover{color:var(--color-gray-700)}.hover\:text-gray-800:hover{color:var(--color-gray-800)}.hover\:text-gray-900:hover{color:var(--color-gray-900)}.hover\:text-green-500:hover{color:var(--color-green-500)}.hover\:text-green-900:hover{color:var(--color-green-900)}.hover\:text-indigo-900:hover{color:var(--color-indigo-900)}.hover\:text-red-500:hover{color:var(--color-red-500)}.hover\:text-red-900:hover{color:var(--color-red-900)}.hover\:text-yellow-300:hover{color:var(--color-yellow-300)}.hover\:text-yellow-500:hover{color:var(--color-yellow-500)}.hover\:underline:hover{text-decoration-line:underline}.hover\:shadow-md:hover{--tw-shadow:0 4px 6px -1px var(--tw-shadow-color,#0000001a),0 2px 4px -2px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.hover\:shadow-sm:hover{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a),0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}}.focus\:z-10:focus{z-index:10}.focus\:border-blue-500:focus{border-color:var(--color-blue-500)}.focus\:bg-red-200:focus{background-color:var(--color-red-200)}.focus\:placeholder-gray-400:focus::-moz-placeholder{color:var(--color-gray-400)}.focus\:placeholder-gray-400:focus::placeholder{color:var(--color-gray-400)}.focus\:ring-1:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-2:focus{--tw-ring-shadow:var(--tw-ring-inset,)0 0 0 calc(2px + var(--tw-ring-offset-width))var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow),var(--tw-inset-ring-shadow),var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow)}.focus\:ring-blue-500:focus{--tw-ring-color:var(--color-blue-500)}.focus\:ring-gray-500:focus{--tw-ring-color:var(--color-gray-500)}.focus\:ring-green-500:focus{--tw-ring-color:var(--color-green-500)}.focus\:ring-indigo-500:focus{--tw-ring-color:var(--color-indigo-500)}.focus\:ring-red-500:focus{--tw-ring-color:var(--color-red-500)}.focus\:ring-white:focus{--tw-ring-color:var(--color-white)}.focus\:ring-yellow-500:focus{--tw-ring-color:var(--color-yellow-500)}.focus\:ring-offset-2:focus{--tw-ring-offset-width:2px;--tw-ring-offset-shadow:var(--tw-ring-inset,)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color)}.focus\:outline-none:focus{--tw-outline-style:none;outline-style:none}.focus\:ring-inset:focus{--tw-ring-inset:inset}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:bg-gray-400:disabled{background-color:var(--color-gray-400)}.disabled\:opacity-50:disabled{opacity:.5}@media (hover:hover){.disabled\:hover\:bg-gray-400:disabled:hover{background-color:var(--color-gray-400)}}@media (min-width:640px){.sm\:mx-auto{margin-inline:auto}.sm\:mt-0{margin-top:calc(var(--spacing)*0)}.sm\:ml-4{margin-left:calc(var(--spacing)*4)}.sm\:block{display:block}.sm\:flex{display:flex}.sm\:hidden{display:none}.sm\:w-full{width:100%}.sm\:max-w-md{max-width:var(--container-md)}.sm\:flex-1{flex:1}.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.sm\:items-center{align-items:center}.sm\:justify-between{justify-content:space-between}.sm\:rounded-lg{border-radius:var(--radius-lg)}.sm\:p-6{padding:calc(var(--spacing)*6)}.sm\:px-6{padding-inline:calc(var(--spacing)*6)}.sm\:px-10{padding-inline:calc(var(--spacing)*10)}.sm\:text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}}@media (min-width:768px){.md\:ml-2{margin-left:calc(var(--spacing)*2)}.md\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.md\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}:where(.md\:space-x-3>:not(:last-child)){--tw-space-x-reverse:0;margin-inline-start:calc(calc(var(--spacing)*3)*var(--tw-space-x-reverse));margin-inline-end:calc(calc(var(--spacing)*3)*calc(1 - var(--tw-space-x-reverse)))}}@media (min-width:1024px){.lg\:fixed{position:fixed}.lg\:inset-y-0{inset-block:calc(var(--spacing)*0)}.lg\:flex{display:flex}.lg\:hidden{display:none}.lg\:w-64{width:calc(var(--spacing)*64)}.lg\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.lg\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.lg\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}.lg\:flex-col{flex-direction:column}.lg\:px-8{padding-inline:calc(var(--spacing)*8)}.lg\:pt-6{padding-top:calc(var(--spacing)*6)}.lg\:pl-64{padding-left:calc(var(--spacing)*64)}}.dark .dark\:block{display:block}.dark .dark\:hidden{display:none}:where(.dark .dark\:divide-gray-700>:not(:last-child)){border-color:var(--color-gray-700)}.dark .dark\:border-blue-700{border-color:var(--color-blue-700)}.dark .dark\:border-blue-800{border-color:var(--color-blue-800)}.dark .dark\:border-gray-600{border-color:var(--color-gray-600)}.dark .dark\:border-gray-700{border-color:var(--color-gray-700)}.dark .dark\:border-green-700{border-color:var(--color-green-700)}.dark .dark\:border-red-600{border-color:var(--color-red-600)}.dark .dark\:border-red-700{border-color:var(--color-red-700)}.dark .dark\:border-red-800{border-color:var(--color-red-800)}.dark .dark\:border-yellow-700{border-color:var(--color-yellow-700)}.dark .dark\:border-yellow-800{border-color:var(--color-yellow-800)}.dark .dark\:bg-black\/50{background-color:#00000080}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-black\/50{background-color:color-mix(in oklab,var(--color-black)50%,transparent)}}.dark .dark\:bg-blue-500\/10{background-color:#3080ff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-500\/10{background-color:color-mix(in oklab,var(--color-blue-500)10%,transparent)}}.dark .dark\:bg-blue-900{background-color:var(--color-blue-900)}.dark .dark\:bg-blue-900\/20{background-color:#1c398e33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/20{background-color:color-mix(in oklab,var(--color-blue-900)20%,transparent)}}.dark .dark\:bg-blue-900\/50{background-color:#1c398e80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-blue-900\/50{background-color:color-mix(in oklab,var(--color-blue-900)50%,transparent)}}.dark .dark\:bg-gray-500\/10{background-color:#6a72821a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-500\/10{background-color:color-mix(in oklab,var(--color-gray-500)10%,transparent)}}.dark .dark\:bg-gray-600{background-color:var(--color-gray-600)}.dark .dark\:bg-gray-700{background-color:var(--color-gray-700)}.dark .dark\:bg-gray-800{background-color:var(--color-gray-800)}.dark .dark\:bg-gray-800\/50{background-color:#1e293980}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-800\/50{background-color:color-mix(in oklab,var(--color-gray-800)50%,transparent)}}.dark .dark\:bg-gray-900{background-color:var(--color-gray-900)}.dark .dark\:bg-gray-900\/50{background-color:#10182880}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-gray-900\/50{background-color:color-mix(in oklab,var(--color-gray-900)50%,transparent)}}.dark .dark\:bg-green-500\/10{background-color:#00c7581a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-500\/10{background-color:color-mix(in oklab,var(--color-green-500)10%,transparent)}}.dark .dark\:bg-green-900{background-color:var(--color-green-900)}.dark .dark\:bg-green-900\/50{background-color:#0d542b80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-green-900\/50{background-color:color-mix(in oklab,var(--color-green-900)50%,transparent)}}.dark .dark\:bg-orange-500\/10{background-color:#fe6e001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-orange-500\/10{background-color:color-mix(in oklab,var(--color-orange-500)10%,transparent)}}.dark .dark\:bg-purple-500\/10{background-color:#ac4bff1a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-purple-500\/10{background-color:color-mix(in oklab,var(--color-purple-500)10%,transparent)}}.dark .dark\:bg-red-500\/10{background-color:#fb2c361a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-500\/10{background-color:color-mix(in oklab,var(--color-red-500)10%,transparent)}}.dark .dark\:bg-red-700{background-color:var(--color-red-700)}.dark .dark\:bg-red-800{background-color:var(--color-red-800)}.dark .dark\:bg-red-900{background-color:var(--color-red-900)}.dark .dark\:bg-red-900\/20{background-color:#82181a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/20{background-color:color-mix(in oklab,var(--color-red-900)20%,transparent)}}.dark .dark\:bg-red-900\/50{background-color:#82181a80}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-red-900\/50{background-color:color-mix(in oklab,var(--color-red-900)50%,transparent)}}.dark .dark\:bg-yellow-500\/10{background-color:#edb2001a}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-500\/10{background-color:color-mix(in oklab,var(--color-yellow-500)10%,transparent)}}.dark .dark\:bg-yellow-900{background-color:var(--color-yellow-900)}.dark .dark\:bg-yellow-900\/20{background-color:#733e0a33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:bg-yellow-900\/20{background-color:color-mix(in oklab,var(--color-yellow-900)20%,transparent)}}.dark .dark\:from-gray-800{--tw-gradient-from:var(--color-gray-800);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:to-gray-700{--tw-gradient-to:var(--color-gray-700);--tw-gradient-stops:var(--tw-gradient-via-stops,var(--tw-gradient-position),var(--tw-gradient-from)var(--tw-gradient-from-position),var(--tw-gradient-to)var(--tw-gradient-to-position))}.dark .dark\:text-blue-200{color:var(--color-blue-200)}.dark .dark\:text-blue-300{color:var(--color-blue-300)}.dark .dark\:text-blue-400{color:var(--color-blue-400)}.dark .dark\:text-gray-200{color:var(--color-gray-200)}.dark .dark\:text-gray-300{color:var(--color-gray-300)}.dark .dark\:text-gray-400{color:var(--color-gray-400)}.dark .dark\:text-gray-500{color:var(--color-gray-500)}.dark .dark\:text-green-200{color:var(--color-green-200)}.dark .dark\:text-green-300{color:var(--color-green-300)}.dark .dark\:text-green-400{color:var(--color-green-400)}.dark .dark\:text-indigo-400{color:var(--color-indigo-400)}.dark .dark\:text-orange-400{color:var(--color-orange-400)}.dark .dark\:text-purple-400{color:var(--color-purple-400)}.dark .dark\:text-red-100{color:var(--color-red-100)}.dark .dark\:text-red-200{color:var(--color-red-200)}.dark .dark\:text-red-300{color:var(--color-red-300)}.dark .dark\:text-red-400{color:var(--color-red-400)}.dark .dark\:text-white{color:var(--color-white)}.dark .dark\:text-yellow-200{color:var(--color-yellow-200)}.dark .dark\:text-yellow-300{color:var(--color-yellow-300)}.dark .dark\:text-yellow-400{color:var(--color-yellow-400)}.dark .dark\:placeholder-gray-400::-moz-placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-400::placeholder{color:var(--color-gray-400)}.dark .dark\:placeholder-gray-500::-moz-placeholder{color:var(--color-gray-500)}.dark .dark\:placeholder-gray-500::placeholder{color:var(--color-gray-500)}.dark .dark\:ring-blue-400\/20{--tw-ring-color:#54a2ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)20%,transparent)}}.dark .dark\:ring-blue-400\/30{--tw-ring-color:#54a2ff4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-blue-400)30%,transparent)}}.dark .dark\:ring-blue-500\/20{--tw-ring-color:#3080ff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-blue-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-blue-500)20%,transparent)}}.dark .dark\:ring-gray-400\/20{--tw-ring-color:#99a1af33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)20%,transparent)}}.dark .dark\:ring-gray-400\/30{--tw-ring-color:#99a1af4d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-gray-400)30%,transparent)}}.dark .dark\:ring-gray-500\/20{--tw-ring-color:#6a728233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-gray-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-gray-500)20%,transparent)}}.dark .dark\:ring-green-400\/20{--tw-ring-color:#05df7233}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-400)20%,transparent)}}.dark .dark\:ring-green-400\/30{--tw-ring-color:#05df724d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-green-400)30%,transparent)}}.dark .dark\:ring-green-500\/20{--tw-ring-color:#00c75833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-green-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-green-500)20%,transparent)}}.dark .dark\:ring-orange-500\/20{--tw-ring-color:#fe6e0033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-orange-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-orange-500)20%,transparent)}}.dark .dark\:ring-purple-500\/20{--tw-ring-color:#ac4bff33}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-purple-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-purple-500)20%,transparent)}}.dark .dark\:ring-red-400\/20{--tw-ring-color:#ff656833}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-400)20%,transparent)}}.dark .dark\:ring-red-400\/30{--tw-ring-color:#ff65684d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-red-400)30%,transparent)}}.dark .dark\:ring-red-500\/20{--tw-ring-color:#fb2c3633}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-red-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-red-500)20%,transparent)}}.dark .dark\:ring-yellow-400\/30{--tw-ring-color:#fac8004d}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-400\/30{--tw-ring-color:color-mix(in oklab,var(--color-yellow-400)30%,transparent)}}.dark .dark\:ring-yellow-500\/20{--tw-ring-color:#edb20033}@supports (color:color-mix(in lab,red,red)){.dark .dark\:ring-yellow-500\/20{--tw-ring-color:color-mix(in oklab,var(--color-yellow-500)20%,transparent)}}@media (hover:hover){.dark .dark\:hover\:border-blue-400:hover{border-color:var(--color-blue-400)}.dark .dark\:hover\:border-gray-500:hover{border-color:var(--color-gray-500)}.dark .dark\:hover\:bg-blue-800:hover{background-color:var(--color-blue-800)}.dark .dark\:hover\:bg-gray-500:hover{background-color:var(--color-gray-500)}.dark .dark\:hover\:bg-gray-600:hover{background-color:var(--color-gray-600)}.dark .dark\:hover\:bg-gray-700:hover{background-color:var(--color-gray-700)}.dark .dark\:hover\:bg-gray-800:hover{background-color:var(--color-gray-800)}.dark .dark\:hover\:bg-red-700:hover{background-color:var(--color-red-700)}.dark .dark\:hover\:bg-red-800:hover{background-color:var(--color-red-800)}.dark .dark\:hover\:text-blue-300:hover{color:var(--color-blue-300)}.dark .dark\:hover\:text-gray-100:hover{color:var(--color-gray-100)}.dark .dark\:hover\:text-gray-300:hover{color:var(--color-gray-300)}.dark .dark\:hover\:text-green-300:hover{color:var(--color-green-300)}.dark .dark\:hover\:text-indigo-300:hover{color:var(--color-indigo-300)}.dark .dark\:hover\:text-red-300:hover{color:var(--color-red-300)}.dark .dark\:hover\:text-white:hover{color:var(--color-white)}}.dark .dark\:focus\:bg-red-700:focus{background-color:var(--color-red-700)}.dark .dark\:focus\:ring-offset-gray-900:focus{--tw-ring-offset-color:var(--color-gray-900)}}@property --tw-translate-x{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-y{syntax:"*";inherits:false;initial-value:0}@property --tw-translate-z{syntax:"*";inherits:false;initial-value:0}@property --tw-rotate-x{syntax:"*";inherits:false}@property --tw-rotate-y{syntax:"*";inherits:false}@property --tw-rotate-z{syntax:"*";inherits:false}@property --tw-skew-x{syntax:"*";inherits:false}@property --tw-skew-y{syntax:"*";inherits:false}@property --tw-space-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-space-x-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-divide-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-border-style{syntax:"*";inherits:false;initial-value:solid}@property --tw-gradient-position{syntax:"*";inherits:false}@property --tw-gradient-from{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-via{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-to{syntax:"";inherits:false;initial-value:#0000}@property --tw-gradient-stops{syntax:"*";inherits:false}@property --tw-gradient-via-stops{syntax:"*";inherits:false}@property --tw-gradient-from-position{syntax:"";inherits:false;initial-value:0%}@property --tw-gradient-via-position{syntax:"";inherits:false;initial-value:50%}@property --tw-gradient-to-position{syntax:"";inherits:false;initial-value:100%}@property --tw-leading{syntax:"*";inherits:false}@property --tw-font-weight{syntax:"*";inherits:false}@property --tw-tracking{syntax:"*";inherits:false}@property --tw-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-shadow-color{syntax:"*";inherits:false}@property --tw-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-inset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-shadow-color{syntax:"*";inherits:false}@property --tw-inset-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-ring-color{syntax:"*";inherits:false}@property --tw-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-ring-color{syntax:"*";inherits:false}@property --tw-inset-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-ring-inset{syntax:"*";inherits:false}@property --tw-ring-offset-width{syntax:"";inherits:false;initial-value:0}@property --tw-ring-offset-color{syntax:"*";inherits:false;initial-value:#fff}@property --tw-ring-offset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-blur{syntax:"*";inherits:false}@property --tw-brightness{syntax:"*";inherits:false}@property --tw-contrast{syntax:"*";inherits:false}@property --tw-grayscale{syntax:"*";inherits:false}@property --tw-hue-rotate{syntax:"*";inherits:false}@property --tw-invert{syntax:"*";inherits:false}@property --tw-opacity{syntax:"*";inherits:false}@property --tw-saturate{syntax:"*";inherits:false}@property --tw-sepia{syntax:"*";inherits:false}@property --tw-drop-shadow{syntax:"*";inherits:false}@property --tw-drop-shadow-color{syntax:"*";inherits:false}@property --tw-drop-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-drop-shadow-size{syntax:"*";inherits:false}@property --tw-duration{syntax:"*";inherits:false}@property --tw-ease{syntax:"*";inherits:false}@property --tw-scale-x{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-y{syntax:"*";inherits:false;initial-value:1}@property --tw-scale-z{syntax:"*";inherits:false;initial-value:1}@keyframes spin{to{transform:rotate(360deg)}}@keyframes pulse{50%{opacity:.5}} diff --git a/webapp/assets/_app/immutable/chunks/5WA7h8uK.js b/webapp/assets/_app/immutable/chunks/5WA7h8uK.js new file mode 100644 index 00000000..04989407 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/5WA7h8uK.js @@ -0,0 +1 @@ +import{M as K,K as T,L as j,N as C,_ as F,a0 as q,a1 as $,Y as z,a2 as x,O as G,P as A,Q as H,at as J,R as Z,aa as Q,U as V,T as W,au as D,m as X,av as k,s as U,J as ee,g as m,aw as re,ax as ne,ay as w,az as se,aA as M,ar as ae,q as ie,aB as te,aj as R,aC as ue,a6 as fe,aD as le,u as oe,aE as ce,aF as de,aG as _e,aH as N,aI as L,aJ as pe,aK as ve,S as Y,aL as B,aM as S}from"./D8EpLgQ1.js";function Ie(e,r,s=!1){T&&j();var n=e,a=null,i=null,l=J,d=s?C:0,p=!1;const P=(o,u=!0)=>{p=!0,_(u,o)};var f=null;function I(){f!==null&&(f.lastChild.remove(),n.before(f),f=null);var o=l?a:i,u=l?i:a;o&&Q(o),u&&V(u,()=>{l?i=null:a=null})}const _=(o,u)=>{if(l===(l=o))return;let g=!1;if(T){const E=F(n)===q;!!l===E&&(n=$(),z(n),x(!1),g=!0)}var b=Z(),c=n;if(b&&(f=document.createDocumentFragment(),f.append(c=G())),l?a??=u&&A(()=>u(c)):i??=u&&A(()=>u(c)),b){var h=H,t=l?a:i,v=l?i:a;t&&h.skipped_effects.delete(t),v&&h.skipped_effects.add(v),h.add_callback(I)}else I();g&&x(!0)};K(()=>{p=!1,r(P),p||_(null,null)},d),T&&(n=W)}let O=!1,y=Symbol();function ge(e,r,s){const n=s[r]??={store:null,source:X(void 0),unsubscribe:D};if(n.store!==e&&!(y in s))if(n.unsubscribe(),n.store=e??null,e==null)n.source.v=void 0,n.unsubscribe=D;else{var a=!0;n.unsubscribe=k(e,i=>{a?n.source.v=i:U(n.source,i)}),a=!1}return e&&y in s?ee(e):m(n.source)}function Ee(){const e={};function r(){re(()=>{for(var s in e)e[s].unsubscribe();ne(e,y,{enumerable:!1,value:!0})})}return[e,r]}function be(e){var r=O;try{return O=!1,[e(),O]}finally{O=r}}const he={get(e,r){if(!e.exclude.includes(r))return m(e.version),r in e.special?e.special[r]():e.props[r]},set(e,r,s){if(!(r in e.special)){var n=R;try{L(e.parent_effect),e.special[r]=me({get[r](){return e.props[r]}},r,M)}finally{L(n)}}return e.special[r](s),N(e.version),!0},getOwnPropertyDescriptor(e,r){if(!e.exclude.includes(r)&&r in e.props)return{enumerable:!0,configurable:!0,value:e.props[r]}},deleteProperty(e,r){return e.exclude.includes(r)||(e.exclude.push(r),N(e.version)),!0},has(e,r){return e.exclude.includes(r)?!1:r in e.props},ownKeys(e){return Reflect.ownKeys(e.props).filter(r=>!e.exclude.includes(r))}};function Oe(e,r){return new Proxy({props:e,exclude:r,special:{},version:fe(0),parent_effect:R},he)}const Se={get(e,r){let s=e.props.length;for(;s--;){let n=e.props[s];if(S(n)&&(n=n()),typeof n=="object"&&n!==null&&r in n)return n[r]}},set(e,r,s){let n=e.props.length;for(;n--;){let a=e.props[n];S(a)&&(a=a());const i=w(a,r);if(i&&i.set)return i.set(s),!0}return!1},getOwnPropertyDescriptor(e,r){let s=e.props.length;for(;s--;){let n=e.props[s];if(S(n)&&(n=n()),typeof n=="object"&&n!==null&&r in n){const a=w(n,r);return a&&!a.configurable&&(a.configurable=!0),a}}},has(e,r){if(r===Y||r===B)return!1;for(let s of e.props)if(S(s)&&(s=s()),s!=null&&r in s)return!0;return!1},ownKeys(e){const r=[];for(let s of e.props)if(S(s)&&(s=s()),!!s){for(const n in s)r.includes(n)||r.push(n);for(const n of Object.getOwnPropertySymbols(s))r.includes(n)||r.push(n)}return r}};function Te(...e){return new Proxy({props:e},Se)}function me(e,r,s,n){var a=!ce||(s&de)!==0,i=(s&le)!==0,l=(s&pe)!==0,d=n,p=!0,P=()=>(p&&(p=!1,d=l?oe(n):n),d),f;if(i){var I=Y in e||B in e;f=w(e,r)?.set??(I&&r in e?t=>e[r]=t:void 0)}var _,o=!1;i?[_,o]=be(()=>e[r]):_=e[r],_===void 0&&n!==void 0&&(_=P(),f&&(a&&se(),f(_)));var u;if(a?u=()=>{var t=e[r];return t===void 0?P():(p=!0,t)}:u=()=>{var t=e[r];return t!==void 0&&(d=void 0),t===void 0?d:t},a&&(s&M)===0)return u;if(f){var g=e.$$legacy;return function(t,v){return arguments.length>0?((!a||!v||g||o)&&f(v?u():t),t):u()}}var b=!1,c=((s&_e)!==0?ae:ie)(()=>(b=!1,u()));i&&m(c);var h=R;return function(t,v){if(arguments.length>0){const E=v?m(c):a&&i?te(t):t;return U(c,E),b=!0,d!==void 0&&(d=E),t}return ve&&b||(h.f&ue)!==0?c.v:m(c)}}export{ge as a,Te as b,Ie as i,Oe as l,me as p,Ee as s}; diff --git a/webapp/assets/_app/immutable/chunks/B3Pzt0F_.js b/webapp/assets/_app/immutable/chunks/B3Pzt0F_.js new file mode 100644 index 00000000..dd5dc33f --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/B3Pzt0F_.js @@ -0,0 +1 @@ +import{am as g,an as d,ao as c,u as m,ap as b,aq as i,g as p,n as v,ar as h,as as k}from"./D8EpLgQ1.js";function x(n=!1){const s=g,e=s.l.u;if(!e)return;let f=()=>v(s.s);if(n){let t=0,a={};const _=h(()=>{let l=!1;const r=s.s;for(const o in r)r[o]!==a[o]&&(a[o]=r[o],l=!0);return l&&t++,t});f=()=>p(_)}e.b.length&&d(()=>{u(s,f),i(e.b)}),c(()=>{const t=m(()=>e.m.map(b));return()=>{for(const a of t)typeof a=="function"&&a()}}),e.a.length&&c(()=>{u(s,f),i(e.a)})}function u(n,s){if(n.l.s)for(const e of n.l.s)p(e);s()}k();export{x as i}; diff --git a/webapp/assets/_app/immutable/chunks/B7ITzBt8.js b/webapp/assets/_app/immutable/chunks/B7ITzBt8.js new file mode 100644 index 00000000..8b2103d3 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/B7ITzBt8.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as j}from"./B3Pzt0F_.js";import{p as R,l as w,a as q,f as g,t as v,c as k,d as A,k as B,j as u,s as _,m as y,r as m,n as f,u as b,g as d,v as h}from"./D8EpLgQ1.js";import{p as o,i as D}from"./5WA7h8uK.js";import{c as U,s as F}from"./CiE1LlKV.js";import{b as r}from"./CoIRRsD9.js";var G=g('
                '),H=g('');function V(x,n){R(n,!1);const i=y(),p=y();let e=o(n,"item",8),s=o(n,"entityType",8,"repository"),$=o(n,"showOwner",8,!1),E=o(n,"showId",8,!1),I=o(n,"fontMono",8,!1);function z(){if(!e())return"Unknown";switch(s()){case"repository":return $()?`${e().owner||"Unknown"}/${e().name||"Unknown"}`:e().name||"Unknown";case"organization":case"enterprise":return e().name||"Unknown";case"pool":return E()?e().id||"Unknown":e().name||"Unknown";case"scaleset":return e().name||"Unknown";case"instance":return e().name||"Unknown";default:return e().name||e().id||"Unknown"}}function C(){if(!e())return"#";let t;switch(s()){case"instance":t=e().name;break;default:t=e().id||e().name;break}if(!t)return"#";switch(s()){case"repository":return`${r}/repositories/${t}`;case"organization":return`${r}/organizations/${t}`;case"enterprise":return`${r}/enterprises/${t}`;case"pool":return`${r}/pools/${t}`;case"scaleset":return`${r}/scalesets/${t}`;case"instance":return`${r}/instances/${encodeURIComponent(t)}`;default:return"#"}}w(()=>{},()=>{_(i,z())}),w(()=>{},()=>{_(p,C())}),q(),j();var c=H(),a=u(c),M=u(a,!0);m(a);var N=B(a,2);{var O=t=>{var l=G(),T=u(l,!0);m(l),v(()=>h(T,(f(e()),b(()=>e().provider_id)))),k(t,l)};D(N,t=>{f(s()),f(e()),b(()=>s()==="instance"&&e()?.provider_id)&&t(O)})}m(c),v(()=>{U(a,"href",d(p)),F(a,1,`block w-full truncate text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 ${I()?"font-mono":""}`),U(a,"title",d(i)),h(M,d(i))}),k(x,c),A()}export{V as E}; diff --git a/webapp/assets/_app/immutable/chunks/BAg1iRPq.js b/webapp/assets/_app/immutable/chunks/BAg1iRPq.js new file mode 100644 index 00000000..9bf97768 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/BAg1iRPq.js @@ -0,0 +1 @@ +import{F as t,G as S,u as b,H as h,S as k}from"./D8EpLgQ1.js";function u(r,i){return r===i||r?.[k]===i}function d(r={},i,a,T){return t(()=>{var f,s;return S(()=>{f=s,s=[],b(()=>{r!==a(...s)&&(i(r,...s),f&&u(a(...f),r)&&i(null,...f))})}),()=>{h(()=>{s&&u(a(...s),r)&&i(null,...s)})}}),r}export{d as b}; diff --git a/webapp/assets/_app/immutable/chunks/BE4wujub.js b/webapp/assets/_app/immutable/chunks/BE4wujub.js new file mode 100644 index 00000000..6b6a2146 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/BE4wujub.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as v}from"./B3Pzt0F_.js";import{p as w,l as m,n as s,g as r,m as g,a as x,B as h,b as T,c as B,d as S,s as k,u}from"./D8EpLgQ1.js";import{k as A}from"./C9DJVOi1.js";import{p as d}from"./5WA7h8uK.js";import{k as b,B as C}from"./BGVHQGl-.js";import{f as E}from"./ow_oMtSd.js";function q(_,i){w(i,!1);const c=g(),n=g();let e=d(i,"item",8),l=d(i,"statusType",8,"entity"),a=d(i,"statusField",8,"status");m(()=>(s(e()),s(a())),()=>{k(c,e()?.[a()]||"unknown")}),m(()=>(s(e()),s(l()),r(c),s(a())),()=>{k(n,(()=>{if(!e())return{variant:"error",text:"Unknown"};switch(l()){case"entity":return b(e());case"instance":let t="secondary";switch(r(c).toLowerCase()){case"running":t="success";break;case"stopped":t="info";break;case"creating":case"pending_create":t="warning";break;case"deleting":case"pending_delete":case"pending_force_delete":t="warning";break;case"error":case"deleted":t="error";break;case"active":case"online":t="success";break;case"idle":t="info";break;case"pending":case"installing":t="warning";break;case"failed":case"terminated":case"offline":t="error";break;case"unknown":default:t="secondary";break}return{variant:t,text:E(r(c))};case"enabled":return{variant:e().enabled?"success":"error",text:e().enabled?"Enabled":"Disabled"};case"custom":const o=e()[a()]||"Unknown";if(a()==="auth-type"){const f=o==="pat"||!o?"pat":"app";return{variant:f==="pat"?"success":"info",text:f==="pat"?"PAT":"App"}}return{variant:"info",text:o};default:return b(e())}})())}),x(),v();var p=h(),y=T(p);A(y,()=>(s(e()),s(a()),u(()=>`${e()?.name||"item"}-${e()?.[a()]||"status"}-${e()?.updated_at||"time"}`)),t=>{C(t,{get variant(){return r(n),u(()=>r(n).variant)},get text(){return r(n),u(()=>r(n).text)}})}),B(_,p),S()}export{q as S}; diff --git a/webapp/assets/_app/immutable/chunks/BEkVdVE1.js b/webapp/assets/_app/immutable/chunks/BEkVdVE1.js new file mode 100644 index 00000000..1eb2c1f7 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/BEkVdVE1.js @@ -0,0 +1 @@ +import{I as u}from"./D8EpLgQ1.js";function c(){const{subscribe:s,set:i,update:o}=u([]),n={subscribe:s,add:e=>{const t=Math.random().toString(36).substr(2,9),r={...e,id:t,duration:e.duration??5e3};return o(a=>[...a,r]),r.duration&&r.duration>0&&setTimeout(()=>{o(a=>a.filter(d=>d.id!==t))},r.duration),t},remove:e=>{o(t=>t.filter(r=>r.id!==e))},clear:()=>{i([])},success:(e,t="",r)=>n.add({type:"success",title:e,message:t,duration:r}),error:(e,t="",r)=>n.add({type:"error",title:e,message:t,duration:r}),info:(e,t="",r)=>n.add({type:"info",title:e,message:t,duration:r}),warning:(e,t="",r)=>n.add({type:"warning",title:e,message:t,duration:r})};return n}const p=c();export{p as t}; diff --git a/webapp/assets/_app/immutable/chunks/BGVHQGl-.js b/webapp/assets/_app/immutable/chunks/BGVHQGl-.js new file mode 100644 index 00000000..e53186fd --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/BGVHQGl-.js @@ -0,0 +1,4 @@ +import"./DsnmJJEf.js";import{i as m}from"./B3Pzt0F_.js";import{p as w,l as x,n as d,a as k,f as _,t as b,c as v,d as y,s as h,m as E,j as B,r as z,g as L,v as M}from"./D8EpLgQ1.js";import{s as j,e as $}from"./CiE1LlKV.js";import{p as o}from"./5WA7h8uK.js";function S(e){if(!e)return"N/A";try{return(typeof e=="string"?new Date(e):e).toLocaleString()}catch{return"Invalid Date"}}function A(e,r="w-4 h-4"){return e==="gitea"?``:e==="github"?`
                `:` + + + `}function C(e,r){if(e.repo_name)return e.repo_name;if(e.org_name)return e.org_name;if(e.enterprise_name)return e.enterprise_name;if(e.repo_id&&!e.repo_name&&r?.repositories){const n=r.repositories.find(t=>t.id===e.repo_id);return n?`${n.owner}/${n.name}`:"Unknown Entity"}if(e.org_id&&!e.org_name&&r?.organizations){const n=r.organizations.find(t=>t.id===e.org_id);return n&&n.name?n.name:"Unknown Entity"}if(e.enterprise_id&&!e.enterprise_name&&r?.enterprises){const n=r.enterprises.find(t=>t.id===e.enterprise_id);return n&&n.name?n.name:"Unknown Entity"}return"Unknown Entity"}function H(e){return e.repo_id?"repository":e.org_id?"organization":e.enterprise_id?"enterprise":"unknown"}function P(e,r=""){return e.repo_id?`${r}/repositories/${e.repo_id}`:e.org_id?`${r}/organizations/${e.org_id}`:e.enterprise_id?`${r}/enterprises/${e.enterprise_id}`:"#"}function V(e){e&&(e.scrollTop=e.scrollHeight)}function W(e){return{newPerPage:e,newCurrentPage:1}}function q(e){return e.pool_manager_status?.running?{text:"Running",variant:"success"}:{text:"Stopped",variant:"error"}}function G(e){switch(e.toLowerCase()){case"error":return{text:"Error",variant:"error"};case"warning":return{text:"Warning",variant:"warning"};case"info":return{text:"Info",variant:"info"};default:return{text:e,variant:"info"}}}function l(e,r,n){if(!r.trim())return e;const t=r.toLowerCase();return e.filter(a=>typeof n=="function"?n(a).toLowerCase().includes(t):n.some(i=>a[i]?.toString().toLowerCase().includes(t)))}function J(e,r){return l(e,r,["name","owner"])}function K(e,r){return l(e,r,["name"])}function O(e,r){return l(e,r,n=>[n.name||"",n.description||"",n.endpoint?.name||""].join(" "))}function Q(e,r){return l(e,r,["name","description","base_url","api_base_url"])}function X(e,r,n){return e.slice((r-1)*n,r*n)}var T=_(" ");function Y(e,r){w(r,!1);const n=E();let t=o(r,"variant",8,"gray"),a=o(r,"size",8,"sm"),i=o(r,"text",8),g=o(r,"ring",8,!1);const c={success:"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200",error:"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200",warning:"bg-yellow-100 dark:bg-yellow-900 text-yellow-800 dark:text-yellow-200",info:"bg-blue-100 dark:bg-blue-900 text-blue-800 dark:text-blue-200",gray:"bg-gray-100 dark:bg-gray-700 text-gray-800 dark:text-gray-200",blue:"bg-blue-100 dark:bg-blue-900 text-blue-800 dark:text-blue-200",green:"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200",red:"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200",yellow:"bg-yellow-100 dark:bg-yellow-900 text-yellow-800 dark:text-yellow-200",secondary:"bg-gray-100 dark:bg-gray-700 text-gray-800 dark:text-gray-200"},u={success:"ring-green-600/20 dark:ring-green-400/30",error:"ring-red-600/20 dark:ring-red-400/30",warning:"ring-yellow-600/20 dark:ring-yellow-400/30",info:"ring-blue-600/20 dark:ring-blue-400/30",gray:"ring-gray-500/20 dark:ring-gray-400/30",blue:"ring-blue-600/20 dark:ring-blue-400/30",green:"ring-green-600/20 dark:ring-green-400/30",red:"ring-red-600/20 dark:ring-red-400/30",yellow:"ring-yellow-600/20 dark:ring-yellow-400/30",secondary:"ring-gray-500/20 dark:ring-gray-400/30"},f={sm:"px-2 py-1 text-xs",md:"px-2.5 py-0.5 text-xs"};x(()=>(d(t()),d(a()),d(g())),()=>{h(n,["inline-flex items-center rounded-full font-semibold",c[t()],f[a()],g()?`ring-1 ring-inset ${u[t()]}`:""].filter(Boolean).join(" "))}),k(),m();var s=T(),p=B(s,!0);z(s),b(()=>{j(s,1,$(L(n))),M(p,i())}),v(e,s),y()}export{Y as B,Q as a,S as b,W as c,G as d,C as e,O as f,A as g,l as h,H as i,P as j,q as k,K as l,J as m,X as p,V as s}; diff --git a/webapp/assets/_app/immutable/chunks/BmGWMSQm.js b/webapp/assets/_app/immutable/chunks/BmGWMSQm.js new file mode 100644 index 00000000..7e21970e --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/BmGWMSQm.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as K}from"./B3Pzt0F_.js";import{p as O,f as U,j as e,r as t,k as r,n as m,u as o,z as et,t as q,v as p,c as h,d as Q,E as ct,D as mt,B as Y,b as Z,g as P}from"./D8EpLgQ1.js";import{p as T,i as G,s as ut,a as pt}from"./5WA7h8uK.js";import{c as at,B as gt}from"./CiE1LlKV.js";import{b as R,B as H,e as xt}from"./BGVHQGl-.js";import{b as ft}from"./CoIRRsD9.js";import{e as yt}from"./wyaP0EDu.js";import{D as ht,G as tt}from"./C9DJVOi1.js";import{E as _t}from"./B7ITzBt8.js";import{S as bt}from"./BE4wujub.js";import{e as kt,i as wt}from"./u94nIB4-.js";import{b as Ct}from"./BAg1iRPq.js";var Pt=U('

                ID
                Created At
                Updated At
                Status
                Pool Balancer Type
                ');function Wt(L,v){O(v,!1);let a=T(v,"entity",8),g=T(v,"entityType",8);function N(){return`${g().charAt(0).toUpperCase()+g().slice(1)} Information`}function n(){if(!a().endpoint?.base_url)return"#";switch(g()){case"repository":const d=a();return`${a().endpoint.base_url}/${d.owner}/${a().name}`;case"organization":return`${a().endpoint.base_url}/${a().name}`;case"enterprise":return`${a().endpoint.base_url}/enterprises/${a().name}`;default:return"#"}}function E(){return`${g().charAt(0).toUpperCase()+g().slice(1)} URL`}function V(){const d=a().pool_balancing_type;if(!d||d===""||d==="none")return"Round Robin (default)";switch(d){case"roundrobin":return"Round Robin";case"pack":return"Pack";default:return d}}K();var c=Pt(),x=e(c),_=e(x),I=e(_,!0);t(_);var b=r(_,2),i=e(b),f=r(e(i),2),k=e(f,!0);t(f),t(i);var u=r(i,2),D=r(e(u),2),M=e(D,!0);t(D),t(u);var w=r(u,2),S=r(e(w),2),$=e(S,!0);t(S),t(w);var s=r(w,2),C=r(e(s),2),l=e(C);{var j=d=>{H(d,{variant:"success",text:"Running"})},z=d=>{H(d,{variant:"error",text:"Stopped"})};G(l,d=>{m(a()),o(()=>a().pool_manager_status?.running)?d(j):d(z,!1)})}t(C),t(s);var B=r(s,2),A=r(e(B),2),y=e(A,!0);t(A),t(B);var W=r(B,2),F=e(W),rt=e(F,!0);t(F);var X=r(F,2),J=e(X),st=e(J);et(),t(J),t(X),t(W),t(b),t(x),t(c),q((d,ot,it,dt,nt,lt,vt)=>{p(I,d),p(k,(m(a()),o(()=>a().id))),p(M,ot),p($,it),p(y,dt),p(rt,nt),at(J,"href",lt),p(st,`${vt??""} `)},[()=>o(N),()=>(m(R),m(a()),o(()=>R(a().created_at))),()=>(m(R),m(a()),o(()=>R(a().updated_at))),()=>o(V),()=>o(E),()=>o(n),()=>o(n)]),h(L,c),Q()}var Tt=U('

                No pools configured

                '),Et=U('');function qt(L,v){O(v,!1);const[a,g]=ut(),N=()=>pt(yt,"$eagerCache",a);let n=T(v,"pools",8),E=T(v,"entityType",8),V=T(v,"entityId",8,""),c=T(v,"entityName",8,"");const x=ct();function _(){x("addPool",{entityType:E(),entityId:V(),entityName:c()})}const I=[{key:"id",title:"ID",flexible:!0,cellComponent:_t,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:tt,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:tt,cellProps:{field:"provider_name"}},{key:"status",title:"Status",cellComponent:bt,cellProps:{statusType:"enabled"}}],b={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:s=>xt(s,N())},badges:[{type:"custom",value:s=>({variant:s.enabled?"success":"error",text:s.enabled?"Enabled":"Disabled"})}]};K();var i=Et(),f=e(i),k=e(f),u=e(k),D=e(u);t(u);var M=r(u,2);t(k);var w=r(k,2);{var S=s=>{var C=Tt(),l=r(e(C),4),j=e(l);t(l);var z=r(l,2),B=e(z);gt(B,{variant:"primary",size:"sm",$$events:{click:_},children:(A,y)=>{et();var W=mt("Add Pool");h(A,W)},$$slots:{default:!0}}),t(z),t(C),q(()=>p(j,`No pools configured for this ${E()??""}.`)),h(s,C)},$=s=>{ht(s,{get columns(){return I},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return m(n()),o(()=>n().length)},totalPages:1,get totalItems(){return m(n()),o(()=>n().length)},itemName:"pools",emptyTitle:"No pools configured",get emptyMessage(){return`No pools configured for this ${E()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return b}})};G(w,s=>{m(n()),o(()=>n().length===0)?s(S):s($,!1)})}t(f),t(i),q(()=>{p(D,`Pools (${m(n()),o(()=>n().length)??""})`),at(M,"href",`${ft}/pools`)}),h(L,i),Q(),g()}var It=U('

                '),Bt=U('

                Events

                '),Nt=U('

                Events

                No events available

                ');function Ft(L,v){O(v,!1);let a=T(v,"events",8),g=T(v,"eventsContainer",12,void 0);K();var N=Y(),n=Z(N);{var E=c=>{var x=Bt(),_=e(x),I=r(e(_),2);kt(I,5,a,wt,(b,i)=>{var f=It(),k=e(f),u=e(k),D=e(u,!0);t(u);var M=r(u,2),w=e(M);{var S=l=>{H(l,{variant:"error",text:"Error"})},$=l=>{var j=Y(),z=Z(j);{var B=y=>{H(y,{variant:"warning",text:"Warning"})},A=y=>{H(y,{variant:"info",text:"Info"})};G(z,y=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="warning")?y(B):y(A,!1)},!0)}h(l,j)};G(w,l=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="error")?l(S):l($,!1)})}var s=r(w,2),C=e(s,!0);t(s),t(M),t(k),t(f),q(l=>{p(D,(P(i),o(()=>P(i).message))),p(C,l)},[()=>(m(R),P(i),o(()=>R(P(i).created_at)))]),h(b,f)}),t(I),Ct(I,b=>g(b),()=>g()),t(_),t(x),h(c,x)},V=c=>{var x=Nt();h(c,x)};G(n,c=>{m(a()),o(()=>a()&&a().length>0)?c(E):c(V,!1)})}h(L,N),Q()}export{Wt as E,qt as P,Ft as a}; diff --git a/webapp/assets/_app/immutable/chunks/C41YH50Q.js b/webapp/assets/_app/immutable/chunks/C41YH50Q.js new file mode 100644 index 00000000..9f4c7cfe --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/C41YH50Q.js @@ -0,0 +1 @@ +import{s as e}from"./CTf6mQoE.js";const r=()=>{const s=e;return{page:{subscribe:s.page.subscribe},navigating:{subscribe:s.navigating.subscribe},updated:s.updated}},b={subscribe(s){return r().page.subscribe(s)}};export{b as p}; diff --git a/webapp/assets/_app/immutable/chunks/C6k1Q4We.js b/webapp/assets/_app/immutable/chunks/C6k1Q4We.js new file mode 100644 index 00000000..8d095aaf --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/C6k1Q4We.js @@ -0,0 +1 @@ +import{V as b,W as o,u as h,G as _,K as t,Q as f,X as m}from"./D8EpLgQ1.js";function y(e,a,c=a){var v=b(),d=new WeakSet;o(e,"input",r=>{var l=r?e.defaultValue:e.value;if(l=n(e)?s(l):l,c(l),f!==null&&d.add(f),v&&l!==(l=a())){var k=e.selectionStart,u=e.selectionEnd;e.value=l??"",u!==null&&(e.selectionStart=k,e.selectionEnd=Math.min(u,e.value.length))}}),(t&&e.defaultValue!==e.value||h(a)==null&&e.value)&&(c(n(e)?s(e.value):e.value),f!==null&&d.add(f)),_(()=>{var r=a();if(e===document.activeElement){var l=m??f;if(d.has(l))return}n(e)&&r===s(e.value)||e.type==="date"&&!r&&!e.value||r!==e.value&&(e.value=r??"")})}function E(e,a,c=a){o(e,"change",v=>{var d=v?e.defaultChecked:e.checked;c(d)}),(t&&e.defaultChecked!==e.checked||h(a)==null)&&c(e.checked),_(()=>{var v=a();e.checked=!!v})}function n(e){var a=e.type;return a==="number"||a==="range"}function s(e){return e===""?null:+e}export{E as a,y as b}; diff --git a/webapp/assets/_app/immutable/chunks/C89fcOde.js b/webapp/assets/_app/immutable/chunks/C89fcOde.js new file mode 100644 index 00000000..481fd468 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/C89fcOde.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as dr}from"./B3Pzt0F_.js";import{p as or,E as sr,m as i,o as ir,s,f as le,j as d,r as o,k as t,g as r,n as c,u as m,t as f,x as Ge,z as nr,v as _,e as je,c as u,D as R,B as Be,b as $e,d as lr}from"./D8EpLgQ1.js";import{p as ur,i as E}from"./5WA7h8uK.js";import{r as b,b as ze}from"./CiE1LlKV.js";import{b as v,a as br}from"./C6k1Q4We.js";import{p as gr}from"./D4Caz1gY.js";import{M as cr}from"./qB7B8uiS.js";import{J as mr}from"./DZblzgqm.js";var vr=le('

                '),pr=le('
                Updating...
                '),fr=le('

                Scale Set Information

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ');function Or(Ce,ue){or(ue,!1);let e=ur(ue,"scaleSet",8);const $=sr();let O=i(!1),J=i(""),T=i(e().name||""),I=i(e().image||""),M=i(e().flavor||""),N=i(e().max_runners),A=i(e().min_idle_runners),P=i(e().runner_bootstrap_timeout),U=i(e().runner_prefix||""),h=i(e().os_type||"linux"),k=i(e().os_arch||"amd64"),D=i(e()["github-runner-group"]||""),G=i(e().enabled),g=i("{}");ir(()=>{if(e().extra_specs)try{if(typeof e().extra_specs=="object")s(g,JSON.stringify(e().extra_specs,null,2));else{const l=JSON.parse(e().extra_specs);s(g,JSON.stringify(l,null,2))}}catch{s(g,e().extra_specs||"{}")}});async function Fe(){try{s(O,!0),s(J,"");let l={};if(r(g).trim())try{l=JSON.parse(r(g))}catch{throw new Error("Invalid JSON in extra specs")}const w={name:r(T)!==e().name?r(T):void 0,image:r(I)!==e().image?r(I):void 0,flavor:r(M)!==e().flavor?r(M):void 0,max_runners:r(N)!==e().max_runners?r(N):void 0,min_idle_runners:r(A)!==e().min_idle_runners?r(A):void 0,runner_bootstrap_timeout:r(P)!==e().runner_bootstrap_timeout?r(P):void 0,runner_prefix:r(U)!==e().runner_prefix?r(U):void 0,os_type:r(h)!==e().os_type?r(h):void 0,os_arch:r(k)!==e().os_arch?r(k):void 0,"github-runner-group":r(D)!==e()["github-runner-group"]&&r(D)||void 0,enabled:r(G)!==e().enabled?r(G):void 0,extra_specs:r(g).trim()!==JSON.stringify(e().extra_specs||{},null,2).trim()?l:void 0};Object.keys(w).forEach(p=>{w[p]===void 0&&delete w[p]}),$("submit",w)}catch(l){s(J,l instanceof Error?l.message:"Failed to update scale set")}finally{s(O,!1)}}dr(),cr(Ce,{$$events:{close:()=>$("close")},children:(l,w)=>{var p=fr(),z=d(p),be=d(z),Le=d(be);o(be),o(z);var C=t(z,2),ge=d(C);{var He=a=>{var n=vr(),j=d(n),ne=d(j,!0);o(j),o(n),f(()=>_(ne,r(J))),u(a,n)};E(ge,a=>{r(J)&&a(He)})}var F=t(ge,2),ce=t(d(F),2),L=d(ce),me=t(d(L),2),We=d(me,!0);o(me),o(L);var ve=t(L,2),pe=t(d(ve),2),qe=d(pe);{var Ke=a=>{var n=R();f(()=>_(n,`Repository: ${c(e()),m(()=>e().repo_name)??""}`)),u(a,n)},Qe=a=>{var n=Be(),j=$e(n);{var ne=x=>{var S=R();f(()=>_(S,`Organization: ${c(e()),m(()=>e().org_name)??""}`)),u(x,S)},er=x=>{var S=Be(),rr=$e(S);{var ar=y=>{var B=R();f(()=>_(B,`Enterprise: ${c(e()),m(()=>e().enterprise_name)??""}`)),u(y,B)},tr=y=>{var B=R("Unknown Entity");u(y,B)};E(rr,y=>{c(e()),m(()=>e().enterprise_name)?y(ar):y(tr,!1)},!0)}u(x,S)};E(j,x=>{c(e()),m(()=>e().org_name)?x(ne):x(er,!1)},!0)}u(a,n)};E(qe,a=>{c(e()),m(()=>e().repo_name)?a(Ke):a(Qe,!1)})}o(pe),o(ve),o(ce),o(F);var H=t(F,2),fe=t(d(H),2);b(fe),o(H);var W=t(H,2),xe=t(d(W),2),q=d(xe),ye=t(d(q),2);b(ye),o(q);var K=t(q,2),_e=t(d(K),2);b(_e),o(K);var Q=t(K,2),V=t(d(Q),2);f(()=>{r(h),Ge(()=>{})});var X=d(V);X.value=X.__value="linux";var he=t(X);he.value=he.__value="windows",o(V),o(Q);var ke=t(Q,2),Y=t(d(ke),2);f(()=>{r(k),Ge(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var we=t(Z);we.value=we.__value="arm64",o(Y),o(ke),o(xe),o(W);var ee=t(W,2),Se=t(d(ee),2),re=d(Se),Re=t(d(re),2);b(Re),o(re);var ae=t(re,2),Ee=t(d(ae),2);b(Ee),o(ae);var Oe=t(ae,2),Je=t(d(Oe),2);b(Je),o(Oe),o(Se),o(ee);var te=t(ee,2),de=t(d(te),2),oe=d(de),Te=t(d(oe),2);b(Te),o(oe);var Ie=t(oe,2),Me=t(d(Ie),2);b(Me),o(Ie),o(de);var se=t(de,2),Ne=d(se),Ve=t(d(Ne),2);mr(Ve,{rows:4,placeholder:"{}",get value(){return r(g)},set value(a){s(g,a)},$$legacy:!0}),o(Ne),o(se);var Ae=t(se,2),Pe=d(Ae);b(Pe),nr(2),o(Ae),o(te);var Ue=t(te,2),De=d(Ue),ie=t(De,2),Xe=d(ie);{var Ye=a=>{var n=pr();u(a,n)},Ze=a=>{var n=R("Update Scale Set");u(a,n)};E(Xe,a=>{r(O)?a(Ye):a(Ze,!1)})}o(ie),o(Ue),o(C),o(p),f(()=>{_(Le,`Update Scale Set ${c(e()),m(()=>e().name)??""}`),_(We,(c(e()),m(()=>e().provider_name))),ie.disabled=r(O)}),v(fe,()=>r(T),a=>s(T,a)),v(ye,()=>r(I),a=>s(I,a)),v(_e,()=>r(M),a=>s(M,a)),ze(V,()=>r(h),a=>s(h,a)),ze(Y,()=>r(k),a=>s(k,a)),v(Re,()=>r(A),a=>s(A,a)),v(Ee,()=>r(N),a=>s(N,a)),v(Je,()=>r(P),a=>s(P,a)),v(Te,()=>r(U),a=>s(U,a)),v(Me,()=>r(D),a=>s(D,a)),br(Pe,()=>r(G),a=>s(G,a)),je("click",De,()=>$("close")),je("submit",C,gr(Fe)),u(l,p)},$$slots:{default:!0}}),lr()}export{Or as U}; diff --git a/webapp/assets/_app/immutable/chunks/C9DJVOi1.js b/webapp/assets/_app/immutable/chunks/C9DJVOi1.js new file mode 100644 index 00000000..3375b79c --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/C9DJVOi1.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as ke}from"./B3Pzt0F_.js";import{V as ut,aU as gt,aV as ft,M as ht,O as mt,P as pt,Q as kt,R as xt,at as _t,K as Xe,L as yt,U as bt,T as wt,a8 as Mt,p as fe,l as $,s as ee,m as te,n as u,a as Le,C as ye,j as s,g as e,r as n,t as R,c as i,d as he,E as be,f as L,e as Ge,u as v,k as j,v as O,z as Se,D as Te,B as Q,b as q,x as Pt,q as X}from"./D8EpLgQ1.js";import{p as o,l as Ye,i as I,b as Ct}from"./5WA7h8uK.js";import{e as de,i as ge}from"./u94nIB4-.js";import{h as Ae,s as pe,f as jt,e as zt,r as Tt,c as Be,B as Ce,b as Lt,d as $e,i as Ht}from"./CiE1LlKV.js";import{c as It}from"./CCSWcuVN.js";import{b as St}from"./C6k1Q4We.js";import{b as At}from"./CoIRRsD9.js";import{B as Bt,g as et,b as Et}from"./BGVHQGl-.js";function tt(S,r,g){Xe&&yt();var a=S,t=_t,p,h,m=null,f=ut()?gt:ft;function M(){p&&bt(p),m!==null&&(m.lastChild.remove(),a.before(m),m=null),p=h}ht(()=>{if(f(t,t=r())){var k=a,A=xt();A&&(m=document.createDocumentFragment(),m.append(k=mt())),h=pt(()=>g(k)),A?kt.add_callback(M):M()}}),Xe&&(a=wt)}function Fe(S,r){var g=S.$$events?.[r.type],a=Mt(g)?g.slice():g==null?[]:[g];for(var t of a)t.call(this,r)}var Dt=ye('');function Vt(S,r){fe(r,!1);const g=te();let a=o(r,"name",8),t=o(r,"class",8,"h-5 w-5");const p={plus:'',edit:'',delete:'',view:'',close:'',check:'',x:'',"chevron-left":'',"chevron-right":'',"chevron-down":'',"chevron-up":'',search:'',refresh:'',menu:'',settings:'',"check-circle":'',"x-circle":'',"exclamation-circle":'',"information-circle":'',loading:'',sun:'',moon:'',document:'',folder:''};$(()=>u(a()),()=>{ee(g,p[a()]||"")}),Le();var h=Dt(),m=s(h);Ae(m,()=>e(g),!0),n(h),R(()=>pe(h,0,`${t()}`)),i(S,h),he()}var Nt=L('');function rt(S,r){const g=Ye(r,["children","$$slots","$$events","$$legacy"]),a=Ye(g,["action","disabled","title","ariaLabel","size"]);fe(r,!1);const t=te(),p=te(),h=te(),m=te(),f=te(),M=te(),k=te(),A=te(),U=te(),V=be();let P=o(r,"action",8,"edit"),Z=o(r,"disabled",8,!1),B=o(r,"title",8,""),x=o(r,"ariaLabel",8,""),H=o(r,"size",8,"md");function D(){Z()||V("click")}$(()=>{},()=>{ee(t,"transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2 dark:focus:ring-offset-gray-900 cursor-pointer disabled:cursor-not-allowed disabled:opacity-50")}),$(()=>u(H()),()=>{ee(p,{sm:"p-1",md:"p-2"}[H()])}),$(()=>u(P()),()=>{ee(h,{edit:"text-indigo-600 dark:text-indigo-400 hover:text-indigo-900 dark:hover:text-indigo-300 focus:ring-indigo-500",delete:"text-red-600 dark:text-red-400 hover:text-red-900 dark:hover:text-red-300 focus:ring-red-500",view:"text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-gray-300 focus:ring-gray-500",add:"text-green-600 dark:text-green-400 hover:text-green-900 dark:hover:text-green-300 focus:ring-green-500"}[P()])}),$(()=>u(H()),()=>{ee(m,H()==="sm"?"h-4 w-4":"h-5 w-5")}),$(()=>(e(t),e(p),e(h)),()=>{ee(f,[e(t),e(p),e(h)].join(" "))}),$(()=>{},()=>{ee(M,{edit:'',delete:'',view:'',add:''})}),$(()=>{},()=>{ee(k,{edit:"Edit",delete:"Delete",view:"View",add:"Add"})}),$(()=>(u(B()),e(k),u(P())),()=>{ee(A,B()||e(k)[P()])}),$(()=>(u(x()),e(k),u(P())),()=>{ee(U,x()||`${e(k)[P()]} item`)}),Le(),ke();var F=Nt();jt(F,()=>({type:"button",class:e(f),disabled:Z(),title:e(A),"aria-label":e(U),...a}));var J=s(F),l=s(J);Ae(l,()=>(e(M),u(P()),v(()=>e(M)[P()])),!0),n(J),n(F),R(()=>pe(J,0,zt(e(m)))),Ge("click",F,D),i(S,F),he()}var Rt=L('
                ');function Ut(S,r){fe(r,!1);let g=o(r,"value",12,""),a=o(r,"placeholder",8,"Search..."),t=o(r,"disabled",8,!1);const p=be();function h(){p("input",g())}ke();var m=Rt(),f=s(m),M=s(f);Vt(M,{name:"search",class:"h-5 w-5 text-gray-400"}),n(f);var k=j(f,2);Tt(k),n(m),R(()=>{Be(k,"placeholder",a()),k.disabled=t()}),St(k,g),Ge("input",k,h),i(S,m),he()}var qt=L('

                ');function Ot(S,r){let g=o(r,"message",8,"Loading...");var a=qt(),t=j(s(a),2),p=s(t,!0);n(t),n(a),R(()=>O(p,g())),i(S,a)}var Ft=L('
                '),Gt=L('

                ');function Kt(S,r){let g=o(r,"title",8,"Error"),a=o(r,"message",8),t=o(r,"showRetry",8,!1),p=o(r,"onRetry",8,void 0);var h=Gt(),m=s(h),f=s(m),M=j(s(f),2),k=s(M),A=s(k,!0);n(k);var U=j(k,2),V=s(U,!0);n(U);var P=j(U,2);{var Z=B=>{var x=Ft(),H=s(x);Ce(H,{variant:"secondary",size:"sm",icon:"",class:"text-red-700 dark:text-red-200 bg-red-100 dark:bg-red-800 hover:bg-red-200 dark:hover:bg-red-700 focus:outline-none focus:bg-red-200 dark:focus:bg-red-700",$$events:{click(...D){p()?.apply(this,D)}},children:(D,F)=>{Se();var J=Te("Retry");i(D,J)},$$slots:{default:!0}}),n(x),i(B,x)};I(P,B=>{t()&&p()&&B(Z)})}n(M),n(f),n(m),n(h),R(()=>{O(A,g()),O(V,a())}),i(S,h)}var Qt=ye(''),Zt=ye(''),Jt=ye(''),Wt=ye(''),Xt=ye(''),Yt=ye(''),$t=L('

                ');function er(S,r){let g=o(r,"title",8),a=o(r,"message",8),t=o(r,"iconType",8,"document");var p=$t(),h=s(p);{var m=V=>{var P=Qt();i(V,P)},f=V=>{var P=Q(),Z=q(P);{var B=H=>{var D=Zt();i(H,D)},x=H=>{var D=Q(),F=q(D);{var J=d=>{var c=Jt();i(d,c)},l=d=>{var c=Q(),y=q(c);{var N=w=>{var E=Wt();i(w,E)},W=w=>{var E=Q(),z=q(E);{var C=T=>{var G=Xt();i(T,G)},_=T=>{var G=Q(),Y=q(G);{var re=ae=>{var ce=Yt();i(ae,ce)};I(Y,ae=>{t()==="settings"&&ae(re)},!0)}i(T,G)};I(z,T=>{t()==="key"?T(C):T(_,!1)},!0)}i(w,E)};I(y,w=>{t()==="cog"?w(N):w(W,!1)},!0)}i(d,c)};I(F,d=>{t()==="users"?d(J):d(l,!1)},!0)}i(H,D)};I(Z,H=>{t()==="building"?H(B):H(x,!1)},!0)}i(V,P)};I(h,V=>{t()==="document"?V(m):V(f,!1)})}var M=j(h,2),k=s(M,!0);n(M);var A=j(M,2),U=s(A,!0);n(A),n(p),R(()=>{O(k,g()),O(U,a())}),i(S,p)}var tr=L(""),rr=L('
                '),ar=L('
                ');function nr(S,r){fe(r,!1);let g=o(r,"searchTerm",12,""),a=o(r,"perPage",12,25),t=o(r,"placeholder",8,"Search..."),p=o(r,"showPerPageSelector",8,!0),h=o(r,"perPageOptions",24,()=>[25,50,100]);const m=be();function f(){m("search",{term:g()})}function M(){m("perPageChange",{perPage:a()})}ke();var k=ar(),A=s(k),U=s(A),V=s(U),P=j(s(V),2);Ut(P,{get placeholder(){return t()},get value(){return g()},set value(x){g(x)},$$events:{input:f},$$legacy:!0}),n(V),n(U);var Z=j(U,2);{var B=x=>{var H=rr(),D=s(H),F=j(s(D),2);R(()=>{a(),Pt(()=>{h()})}),de(F,5,h,ge,(J,l)=>{var d=tr(),c=s(d,!0);n(d);var y={};R(()=>{O(c,e(l)),y!==(y=e(l))&&(d.value=(d.__value=e(l))??"")}),i(J,d)}),n(F),n(D),n(H),Lt(F,a),Ge("change",F,M),i(x,H)};I(Z,x=>{p()&&x(B)})}n(A),n(k),i(S,k),he()}var ir=L('Showing to of ',1),or=L('
                ');function sr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"currentPage",8,1),p=o(r,"totalPages",8,1),h=o(r,"perPage",8,25),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results");const M=be();function k(P){P>=1&&P<=p()&&P!==t()&&M("pageChange",{page:P})}$(()=>(u(m()),u(t()),u(h())),()=>{ee(g,m()===0?0:(t()-1)*h()+1)}),$(()=>(u(t()),u(h()),u(m())),()=>{ee(a,Math.min(t()*h(),m()))}),Le(),ke();var A=Q(),U=q(A);{var V=P=>{var Z=or(),B=s(Z),x=s(B);{let z=X(()=>t()===1);Ce(x,{variant:"secondary",get disabled(){return e(z)},$$events:{click:()=>k(t()-1)},children:(C,_)=>{Se();var T=Te("Previous");i(C,T)},$$slots:{default:!0}})}var H=j(x,2);{let z=X(()=>t()===p());Ce(H,{variant:"secondary",get disabled(){return e(z)},class:"ml-3",$$events:{click:()=>k(t()+1)},children:(C,_)=>{Se();var T=Te("Next");i(C,T)},$$slots:{default:!0}})}n(B);var D=j(B,2),F=s(D),J=s(F),l=s(J);{var d=z=>{var C=Te();R(()=>O(C,`No ${f()??""}`)),i(z,C)},c=z=>{var C=ir(),_=j(q(C)),T=s(_,!0);n(_);var G=j(_,2),Y=s(G,!0);n(G);var re=j(G,2),ae=s(re,!0);n(re);var ce=j(re);R(()=>{O(T,e(g)),O(Y,e(a)),O(ae,m()),O(ce,` ${f()??""}`)}),i(z,C)};I(l,z=>{m()===0?z(d):z(c,!1)})}n(J),n(F);var y=j(F,2),N=s(y),W=s(N);{let z=X(()=>t()===1);Ce(W,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-r-none","aria-label":"Previous page",icon:"",$$events:{click:()=>k(t()-1)}})}var w=j(W,2);de(w,1,()=>(u(p()),v(()=>Array(p()))),ge,(z,C,_)=>{const T=X(()=>_+1);{let G=X(()=>e(T)===t()?"primary":"secondary");Ce(z,{get variant(){return e(G)},size:"sm",class:"rounded-none border-l-0 first:border-l first:rounded-l-md",$$events:{click:()=>k(e(T))},children:(Y,re)=>{Se();var ae=Te();R(()=>O(ae,e(T))),i(Y,ae)},$$slots:{default:!0}})}});var E=j(w,2);{let z=X(()=>t()===p());Ce(E,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-l-none","aria-label":"Next page",icon:"",$$events:{click:()=>k(t()+1)}})}n(N),n(y),n(D),n(Z),i(P,Z)};I(U,P=>{p()>1&&P(V)})}i(S,A),he()}var lr=L('

                '),dr=L('

                '),cr=L('

                '),vr=L('

                '),ur=L('
                '),gr=L('
                '),fr=L('
                '),hr=L(" "),mr=L('
                '),pr=L('
                ');function kr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"config",8);function p(){if(!a())return"Unknown";const{field:l,useId:d,showOwner:c}=t().primaryText,y=a()[l];return d&&y?`${y.slice(0,8)}...`:c&&a().owner&&a().name?`${a().owner}/${a().name}`:y||"Unknown"}function h(){if(!t().secondaryText)return"";const{field:l,computedValue:d}=t().secondaryText;return d!==void 0?typeof d=="function"?d(a()):d:a()?.[l]||""}function m(){if(!t().primaryText.href||!a())return"#";let l=t().primaryText.href;return l=l.replace("{id}",a().id||""),l=l.replace("{name}",encodeURIComponent(a().name||"")),`${At}${l}`}function f(l){if(!a())return;const d=t().actions?.find(c=>c.type===l);d&&d.handler(a()),l==="edit"?g("edit",{item:a()}):l==="delete"?g("delete",{item:a()}):g("action",{type:l,item:a()})}function M(l){switch(l.type){case"status":if(t().entityType==="instance"){const c=a()?.[l.field]||"unknown";let y="neutral",N=c.charAt(0).toUpperCase()+c.slice(1);return l.field==="status"?y=c==="running"?"success":c==="pending"||c==="creating"?"info":c==="failed"||c==="error"?"error":"neutral":l.field==="runner_status"&&(y=c==="idle"?"info":c==="active"||c==="running"?"success":c==="failed"||c==="error"?"error":"neutral"),{variant:y,text:N}}return{variant:"neutral",text:a()?.[l.field]||"Unknown"};case"forge":return{variant:"neutral",text:a()?.[l.field]||"unknown"};case"auth":const d=a()?.[l.field]||"pat";return{variant:d==="pat"?"success":"info",text:d.toUpperCase()};case"custom":if(typeof l.value=="function"){const c=l.value(a());return{variant:c?.variant||"neutral",text:c?.text||""}}return{variant:l.value?.variant||"neutral",text:l.value?.text||""};default:return{variant:"neutral",text:""}}}ke();var k=pr(),A=s(k),U=s(A);{var V=l=>{var d=dr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=lr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R((w,E)=>{Be(d,"href",w),pe(c,1,`text-sm font-medium text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 truncate ${u(t()),v(()=>t().primaryText.isMonospace?"font-mono":"")??""}`),O(y,E)},[()=>v(m),()=>v(p)]),i(l,d)},P=l=>{var d=vr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=cr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R(w=>O(y,w),[()=>v(p)]),i(l,d)};I(U,l=>{u(t()),v(()=>t().primaryText.isClickable)?l(V):l(P,!1)})}var Z=j(U,2);{var B=l=>{var d=fr(),c=s(d);{var y=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().customInfo)),ge,(C,_)=>{const T=X(()=>(e(_),u(a()),v(()=>typeof e(_).icon=="function"?e(_).icon(a()):e(_).icon))),G=X(()=>(e(_),u(a()),v(()=>typeof e(_).text=="function"?e(_).text(a()):e(_).text)));var Y=ur(),re=s(Y);{var ae=je=>{var He=Q(),De=q(He);Ae(De,()=>e(T)),i(je,He)};I(re,je=>{e(T)&&je(ae)})}var ce=j(re,2),Ee=s(ce,!0);n(ce),n(Y),R(()=>O(Ee,e(G))),i(C,Y)}),i(w,E)};I(c,w=>{u(t()),v(()=>t().customInfo)&&w(y)})}var N=j(c,2);{var W=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().badges.filter(C=>C.type==="forge"))),ge,(C,_)=>{var T=gr(),G=s(T);Ae(G,()=>(u(et),e(_),u(a()),v(()=>et(e(_).field?a()?.[e(_).field]||"unknown":a()?.endpoint?.endpoint_type||"unknown"))));var Y=j(G,2),re=s(Y,!0);n(Y),n(T),R(()=>O(re,(u(a()),v(()=>a()?.endpoint?.name||"Unknown")))),i(C,T)}),i(w,E)};I(N,w=>{u(t()),v(()=>t().badges)&&w(W)})}n(d),i(l,d)};I(Z,l=>{u(t()),v(()=>t().customInfo||t().badges?.some(d=>d.type==="forge"))&&l(B)})}n(A);var x=j(A,2),H=s(x);{var D=l=>{var d=Q(),c=q(d);de(c,1,()=>(u(t()),v(()=>t().badges.filter(y=>y.type!=="forge"))),ge,(y,N)=>{var W=Q(),w=q(W);{var E=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));var T=hr(),G=s(T,!0);n(T),R(()=>{pe(T,1,`inline-flex items-center rounded-full px-2 py-1 text-xs font-medium ring-1 ring-inset ${u(e(_)),v(()=>e(_).variant==="success"?"bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-900/50 dark:text-green-300 dark:ring-green-400/20":e(_).variant==="info"?"bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-900/50 dark:text-blue-300 dark:ring-blue-400/20":e(_).variant==="error"?"bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-900/50 dark:text-red-300 dark:ring-red-400/20":"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-900/50 dark:text-gray-300 dark:ring-gray-400/20")??""}`),O(G,(u(e(_)),v(()=>e(_).text)))}),i(C,T)},z=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));Bt(C,{get variant(){return u(e(_)),v(()=>e(_).variant)},get text(){return u(e(_)),v(()=>e(_).text)}})};I(w,C=>{e(N),v(()=>e(N).type==="status")?C(E):C(z,!1)})}i(y,W)}),i(l,d)};I(H,l=>{u(t()),v(()=>t().badges)&&l(D)})}var F=j(H,2);{var J=l=>{var d=mr();de(d,5,()=>(u(t()),v(()=>t().actions)),ge,(c,y)=>{{let N=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`))),W=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`)));rt(c,{get action(){return e(y),v(()=>e(y).type)},size:"sm",get title(){return e(N)},get ariaLabel(){return e(W)},$$events:{click:()=>f(e(y).type)}})}}),n(d),i(l,d)};I(F,l=>{u(t()),v(()=>t().actions)&&l(J)})}n(x),n(k),i(S,k),he()}var xr=L('
                '),_r=L('
                '),yr=L("
                "),br=L("
                "),wr=L(' ',1),Mr=L('
                ');function Vr(S,r){fe(r,!1);const g=te();let a=o(r,"columns",24,()=>[]),t=o(r,"data",24,()=>[]),p=o(r,"loading",8,!1),h=o(r,"error",8,""),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results"),M=o(r,"searchTerm",12,""),k=o(r,"searchPlaceholder",8,"Search..."),A=o(r,"showSearch",8,!0),U=o(r,"currentPage",8,1),V=o(r,"perPage",12,25),P=o(r,"totalPages",8,1),Z=o(r,"showPagination",8,!0),B=o(r,"showPerPageSelector",8,!0),x=o(r,"emptyTitle",8,"No items found"),H=o(r,"emptyMessage",8,""),D=o(r,"emptyIconType",8,"document"),F=o(r,"errorTitle",8,"Error loading data"),J=o(r,"showRetry",8,!1),l=o(r,"showMobileCards",8,!0),d=o(r,"mobileCardConfig",8,null);const c=be();function y(b){c("search",b.detail)}function N(b){c("pageChange",b.detail)}function W(b){c("perPageChange",b.detail)}function w(){c("retry")}function E(b){c("edit",b.detail)}function z(b){c("delete",b.detail)}function C(b){c("action",b.detail)}function _(b){const ve="px-6 py-4 text-sm",Ve=b.align==="right"?"text-right":b.align==="center"?"text-center":"text-left",Ne=b.key==="actions"?"font-medium":"text-gray-900 dark:text-white",Re=b.flexible?"min-w-0":"";return`${ve} ${Ve} ${Ne} ${Re}`.trim()}function T(){return a().map(b=>b.flexible?`${b.flexRatio||1}fr`:"auto").join(" ")}$(()=>(u(H()),u(M()),u(f())),()=>{ee(g,H()||(M()?`No items found matching "${M()}"`:`No ${f()} found`))}),Le(),ke();var G=Mr(),Y=s(G);{var re=b=>{nr(b,{get placeholder(){return k()},get showPerPageSelector(){return B()},get searchTerm(){return M()},set searchTerm(ve){M(ve)},get perPage(){return V()},set perPage(ve){V(ve)},$$events:{search:y,perPageChange:W},$$legacy:!0})};I(Y,b=>{A()&&b(re)})}var ae=j(Y,2),ce=s(ae);{var Ee=b=>{Ot(b,{get message(){return`Loading ${f()??""}...`}})},je=b=>{var ve=Q(),Ve=q(ve);{var Ne=we=>{{let Ie=X(()=>J()?w:void 0);Kt(we,{get title(){return F()},get message(){return h()},get showRetry(){return J()},get onRetry(){return e(Ie)}})}},Re=we=>{var Ie=Q(),at=q(Ie);{var nt=Me=>{er(Me,{get title(){return x()},get message(){return e(g)},get iconType(){return D()}})},it=Me=>{var Ke=wr(),Qe=q(Ke);{var ot=oe=>{var K=_r();de(K,7,t,(le,ne)=>le.id||le.name||ne,(le,ne,qe)=>{var ze=xr(),ie=s(ze);{var Pe=me=>{var xe=Q(),se=q(xe);tt(se,()=>(e(ne),v(()=>`${e(ne).id||e(ne).name}-${e(ne).updated_at}-mobile`)),_e=>{kr(_e,{get item(){return e(ne)},get config(){return d()},$$events:{edit(ue){Fe.call(this,r,ue)},delete(ue){Fe.call(this,r,ue)},action(ue){Fe.call(this,r,ue)}}})}),i(me,xe)},Oe=me=>{var xe=Q(),se=q(xe);$e(se,r,"mobile-card",{get item(){return e(ne)},get index(){return e(qe)}}),i(me,xe)};I(ie,me=>{d()?me(Pe):me(Oe,!1)})}n(ze),i(le,ze)}),n(K),i(oe,K)};I(Qe,oe=>{l()&&oe(ot)})}var Ze=j(Qe,2),Ue=s(Ze),Je=s(Ue);de(Je,1,a,ge,(oe,K)=>{var le=yr(),ne=s(le,!0);n(le),R(()=>{pe(le,1,`px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-300 uppercase tracking-wider bg-gray-50 dark:bg-gray-700 border-b border-gray-200 dark:border-gray-600 ${e(K),v(()=>e(K).align==="right"?"text-right":e(K).align==="center"?"text-center":"text-left")??""}`),O(ne,(e(K),v(()=>e(K).title)))}),i(oe,le)});var st=j(Je,2);de(st,3,t,(oe,K)=>oe.id||oe.name||K,(oe,K,le)=>{var ne=Q(),qe=q(ne);de(qe,1,a,ge,(ze,ie)=>{var Pe=br(),Oe=s(Pe);{var me=se=>{var _e=Q(),ue=q(_e);tt(ue,()=>(e(K),e(ie),v(()=>`${e(K).id||e(K).name}-${e(K).updated_at}-${e(ie).key}`)),lt=>{var We=Q(),dt=q(We);It(dt,()=>e(ie).cellComponent,(ct,vt)=>{vt(ct,Ct({get item(){return e(K)}},()=>e(ie).cellProps,{$$events:{edit:E,delete:z,action:C}}))}),i(lt,We)}),i(se,_e)},xe=se=>{var _e=Q(),ue=q(_e);$e(ue,r,"cell",{get item(){return e(K)},get column(){return e(ie)},get index(){return e(le)},get value(){return e(K),e(ie),v(()=>e(K)[e(ie).key])}}),i(se,_e)};I(Oe,se=>{e(ie),v(()=>e(ie).cellComponent)?se(me):se(xe,!1)})}n(Pe),R(se=>pe(Pe,1,`${se??""} border-b border-gray-200 dark:border-gray-700`),[()=>(e(ie),v(()=>_(e(ie))))]),i(ze,Pe)}),i(oe,ne)}),n(Ue),n(Ze),R(oe=>Ht(Ue,`grid-template-columns: ${oe??""}`),[()=>v(T)]),i(Me,Ke)};I(at,Me=>{u(t()),v(()=>t().length===0)?Me(nt):Me(it,!1)},!0)}i(we,Ie)};I(Ve,we=>{h()?we(Ne):we(Re,!1)},!0)}i(b,ve)};I(ce,b=>{p()?b(Ee):b(je,!1)})}var He=j(ce,2);{var De=b=>{sr(b,{get currentPage(){return U()},get totalPages(){return P()},get perPage(){return V()},get totalItems(){return m()},get itemName(){return f()},$$events:{pageChange:N}})};I(He,b=>{u(Z()),u(p()),u(h()),u(t()),v(()=>Z()&&!p()&&!h()&&t().length>0)&&b(De)})}n(ae),n(G),i(S,G),he()}var Pr=L('
                ');function Nr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"actions",24,()=>[{type:"edit",title:"Edit",ariaLabel:"Edit item",action:"edit"},{type:"delete",title:"Delete",ariaLabel:"Delete item",action:"delete"}]);function p(m){a()&&(m==="edit"?g("edit",{item:a()}):m==="delete"?g("delete",{item:a()}):g("action",{type:m,item:a()}))}ke();var h=Pr();de(h,5,t,ge,(m,f)=>{{let M=X(()=>(e(f),v(()=>e(f).action||(e(f).type==="edit"?"edit":e(f).type==="delete"?"delete":"view")))),k=X(()=>(e(f),v(()=>e(f).title||(e(f).type==="edit"?"Edit":e(f).type==="delete"?"Delete":e(f).label)))),A=X(()=>(e(f),v(()=>e(f).ariaLabel||(e(f).type==="edit"?"Edit item":e(f).type==="delete"?"Delete item":e(f).label))));rt(m,{get action(){return e(M)},get title(){return e(k)},get ariaLabel(){return e(A)},$$events:{click:()=>p(e(f).type)}})}}),n(h),i(S,h),he()}var Cr=L(" "),jr=L(" ");function Rr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"item",8),p=o(r,"field",8),h=o(r,"type",8,"text"),m=o(r,"truncateLength",8,50),f=o(r,"showTitle",8,!1);function M(){return t()&&p().split(".").reduce((B,x)=>B?.[x],t())||""}function k(){return h()==="date"?Et(e(g)):h()==="truncated"&&e(g).length>m()?`${e(g).slice(0,m())}...`:e(g)}function A(){switch(h()){case"code":return"inline-block max-w-full truncate bg-gray-100 dark:bg-gray-700 px-2 py-1 rounded text-xs font-mono";case"description":return"block w-full truncate text-sm text-gray-500 dark:text-gray-300";case"date":return"block w-full truncate text-sm text-gray-900 dark:text-white font-mono";default:return"block w-full truncate text-sm text-gray-900 dark:text-white"}}$(()=>{},()=>{ee(g,M())}),$(()=>{},()=>{ee(a,k())}),Le(),ke();var U=Q(),V=q(U);{var P=B=>{var x=Cr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)},Z=B=>{var x=jr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)};I(V,B=>{h()==="code"?B(P):B(Z,!1)})}i(S,U),he()}export{rt as A,Vr as D,Rr as G,Nr as a,tt as k}; diff --git a/webapp/assets/_app/immutable/chunks/CCSWcuVN.js b/webapp/assets/_app/immutable/chunks/CCSWcuVN.js new file mode 100644 index 00000000..2c651009 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CCSWcuVN.js @@ -0,0 +1 @@ +import{K as l,L as u,M as m,N as _,O as p,P as h,Q as v,R as b,T,U as g}from"./D8EpLgQ1.js";function y(s,i,d){l&&u();var r=s,a,n,e=null,t=null;function f(){n&&(g(n),n=null),e&&(e.lastChild.remove(),r.before(e),e=null),n=t,t=null}m(()=>{if(a!==(a=i())){var c=b();if(a){var o=r;c&&(e=document.createDocumentFragment(),e.append(o=p())),t=h(()=>d(o,a))}c?v.add_callback(f):f()}},_),l&&(r=T)}export{y as c}; diff --git a/webapp/assets/_app/immutable/chunks/CGpPw4EW.js b/webapp/assets/_app/immutable/chunks/CGpPw4EW.js new file mode 100644 index 00000000..14941294 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CGpPw4EW.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as _}from"./B3Pzt0F_.js";import{p as h,f as x,t as u,c as g,d as k,k as w,j as o,u as m,n as e,r,v as y}from"./D8EpLgQ1.js";import{h as b}from"./CiE1LlKV.js";import{p}from"./5WA7h8uK.js";import{g as v}from"./BGVHQGl-.js";var z=x('
                ');function U(l,i){h(i,!1);let t=p(i,"item",8),s=p(i,"iconSize",8,"w-5 h-5");_();var a=z(),n=o(a),f=o(n);b(f,()=>(e(v),e(t()),e(s()),m(()=>v(t()?.endpoint?.endpoint_type||t()?.endpoint_type||"unknown",s())))),r(n);var d=w(n,2),c=o(d,!0);r(d),r(a),u(()=>y(c,(e(t()),m(()=>t()?.endpoint?.name||t()?.endpoint_name||t()?.github_endpoint_name||"Unknown")))),g(l,a),k()}export{U as E}; diff --git a/webapp/assets/_app/immutable/chunks/CLYUNKnN.js b/webapp/assets/_app/immutable/chunks/CLYUNKnN.js new file mode 100644 index 00000000..e6432af4 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CLYUNKnN.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as b}from"./B3Pzt0F_.js";import{p as k,f as E,t as C,u as i,n as t,v as n,c as j,d as P,k as z,j as l,r as o}from"./D8EpLgQ1.js";import{c as N}from"./CiE1LlKV.js";import{p as f}from"./5WA7h8uK.js";import"./CoIRRsD9.js";import{j as x,e as c,i as u}from"./BGVHQGl-.js";var T=E('');function G(d,r){k(r,!1);let e=f(r,"item",8),m=f(r,"eagerCache",8,null);b();var s=T(),a=l(s),v=l(a,!0);o(a);var p=z(a,2),g=l(p,!0);o(p),o(s),C((h,y,_)=>{N(a,"href",h),n(v,y),n(g,_)},[()=>(t(x),t(e()),i(()=>x(e()))),()=>(t(c),t(e()),t(m()),i(()=>c(e(),m()))),()=>(t(u),t(e()),i(()=>u(e())))]),j(d,s),P()}export{G as P}; diff --git a/webapp/assets/_app/immutable/chunks/CNMHKIIK.js b/webapp/assets/_app/immutable/chunks/CNMHKIIK.js new file mode 100644 index 00000000..c1bc0085 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CNMHKIIK.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as j}from"./B3Pzt0F_.js";import{p as E,E as G,f as S,j as t,r,k as g,u,n as p,z as m,t as z,v as D,e as f,c as H,d as I}from"./D8EpLgQ1.js";import{h as y,s as v}from"./CiE1LlKV.js";import{p as h}from"./5WA7h8uK.js";import{g as o}from"./BGVHQGl-.js";var q=S('
                ');function M(x,s){E(s,!1);const k=G();let d=h(s,"selectedForgeType",12,""),_=h(s,"label",8,"Select Forge Type");function n(c){d(c),k("select",c)}j();var i=q(),l=t(i),F=t(l,!0);r(l);var b=g(l,2),e=t(b),w=t(e);y(w,()=>(p(o),u(()=>o("github","w-8 h-8")))),m(2),r(e);var a=g(e,2),T=t(a);y(T,()=>(p(o),u(()=>o("gitea","w-8 h-8")))),m(2),r(a),r(b),r(i),z(()=>{D(F,_()),v(e,1,`flex flex-col items-center justify-center p-6 border-2 rounded-lg transition-colors cursor-pointer ${d()==="github"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),v(a,1,`flex flex-col items-center justify-center p-6 border-2 rounded-lg transition-colors cursor-pointer ${d()==="gitea"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`)}),f("click",e,()=>n("github")),f("click",a,()=>n("gitea")),H(x,i),I()}export{M as F}; diff --git a/webapp/assets/_app/immutable/chunks/CO4LUyTP.js b/webapp/assets/_app/immutable/chunks/CO4LUyTP.js new file mode 100644 index 00000000..8559a85a --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CO4LUyTP.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as E}from"./B3Pzt0F_.js";import{p as H,E as L,f as h,t as f,c,d as z,j as e,r as a,k as x,v as d,z as M,D as q}from"./D8EpLgQ1.js";import{p as i,i as C}from"./5WA7h8uK.js";import{B as F}from"./CiE1LlKV.js";var G=h('
                '),I=h('

                ');function S(u,t){H(t,!1);const _=L();let k=i(t,"title",8),b=i(t,"description",8),v=i(t,"actionLabel",8,null),g=i(t,"showAction",8,!0);function w(){_("action")}E();var r=I(),s=e(r),o=e(s),y=e(o,!0);a(o);var m=x(o,2),j=e(m,!0);a(m),a(s);var A=x(s,2);{var P=n=>{var l=G(),B=e(l);F(B,{variant:"primary",icon:'',$$events:{click:w},children:(D,J)=>{M();var p=q();f(()=>d(p,v())),c(D,p)},$$slots:{default:!0}}),a(l),c(n,l)};C(A,n=>{g()&&v()&&n(P)})}a(r),f(()=>{d(y,k()),d(j,b())}),c(u,r),z()}export{S as P}; diff --git a/webapp/assets/_app/immutable/chunks/CTf6mQoE.js b/webapp/assets/_app/immutable/chunks/CTf6mQoE.js new file mode 100644 index 00000000..15ee7bf7 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CTf6mQoE.js @@ -0,0 +1,3 @@ +import{I as Ee,o as De,aR as T,g as x,s as P,bf as yt,aS as Be}from"./D8EpLgQ1.js";import{a as wt,b as L}from"./CoIRRsD9.js";class le{constructor(t,n){this.status=t,typeof n=="string"?this.body={message:n}:n?this.body=n:this.body={message:`Error: ${t}`}}toString(){return JSON.stringify(this.body)}}class Se{constructor(t,n){this.status=t,this.location=n}}class Re extends Error{constructor(t,n,r){super(r),this.status=t,this.text=n}}new URL("sveltekit-internal://");function vt(e,t){return e==="/"||t==="ignore"?e:t==="never"?e.endsWith("/")?e.slice(0,-1):e:t==="always"&&!e.endsWith("/")?e+"/":e}function bt(e){return e.split("%25").map(decodeURI).join("%25")}function kt(e){for(const t in e)e[t]=decodeURIComponent(e[t]);return e}function me({href:e}){return e.split("#")[0]}function At(e,t,n,r=!1){const a=new URL(e);Object.defineProperty(a,"searchParams",{value:new Proxy(a.searchParams,{get(i,o){if(o==="get"||o==="getAll"||o==="has")return f=>(n(f),i[o](f));t();const c=Reflect.get(i,o);return typeof c=="function"?c.bind(i):c}}),enumerable:!0,configurable:!0});const s=["href","pathname","search","toString","toJSON"];r&&s.push("hash");for(const i of s)Object.defineProperty(a,i,{get(){return t(),e[i]},enumerable:!0,configurable:!0});return a}function Et(...e){let t=5381;for(const n of e)if(typeof n=="string"){let r=n.length;for(;r;)t=t*33^n.charCodeAt(--r)}else if(ArrayBuffer.isView(n)){const r=new Uint8Array(n.buffer,n.byteOffset,n.byteLength);let a=r.length;for(;a;)t=t*33^r[--a]}else throw new TypeError("value must be a string or TypedArray");return(t>>>0).toString(36)}function St(e){const t=atob(e),n=new Uint8Array(t.length);for(let r=0;r((e instanceof Request?e.method:t?.method||"GET")!=="GET"&&G.delete(Ie(e)),Rt(e,t));const G=new Map;function It(e,t){const n=Ie(e,t),r=document.querySelector(n);if(r?.textContent){let{body:a,...s}=JSON.parse(r.textContent);const i=r.getAttribute("data-ttl");return i&&G.set(n,{body:a,init:s,ttl:1e3*Number(i)}),r.getAttribute("data-b64")!==null&&(a=St(a)),Promise.resolve(new Response(a,s))}return window.fetch(e,t)}function Ut(e,t,n){if(G.size>0){const r=Ie(e,n),a=G.get(r);if(a){if(performance.now(){const a=/^\[\.\.\.(\w+)(?:=(\w+))?\]$/.exec(r);if(a)return t.push({name:a[1],matcher:a[2],optional:!1,rest:!0,chained:!0}),"(?:/([^]*))?";const s=/^\[\[(\w+)(?:=(\w+))?\]\]$/.exec(r);if(s)return t.push({name:s[1],matcher:s[2],optional:!0,rest:!1,chained:!0}),"(?:/([^/]+))?";if(!r)return;const i=r.split(/\[(.+?)\](?!\])/);return"/"+i.map((c,f)=>{if(f%2){if(c.startsWith("x+"))return _e(String.fromCharCode(parseInt(c.slice(2),16)));if(c.startsWith("u+"))return _e(String.fromCharCode(...c.slice(2).split("-").map(_=>parseInt(_,16))));const d=Lt.exec(c),[,h,u,l,p]=d;return t.push({name:l,matcher:p,optional:!!h,rest:!!u,chained:u?f===1&&i[0]==="":!1}),u?"([^]*?)":h?"([^/]*)?":"([^/]+?)"}return _e(c)}).join("")}).join("")}/?$`),params:t}}function xt(e){return e!==""&&!/^\([^)]+\)$/.test(e)}function Pt(e){return e.slice(1).split("/").filter(xt)}function Ct(e,t,n){const r={},a=e.slice(1),s=a.filter(o=>o!==void 0);let i=0;for(let o=0;od).join("/"),i=0),f===void 0){c.rest&&(r[c.name]="");continue}if(!c.matcher||n[c.matcher](f)){r[c.name]=f;const d=t[o+1],h=a[o+1];d&&!d.rest&&d.optional&&h&&c.chained&&(i=0),!d&&!h&&Object.keys(r).length===s.length&&(i=0);continue}if(c.optional&&c.chained){i++;continue}return}if(!i)return r}function _e(e){return e.normalize().replace(/[[\]]/g,"\\$&").replace(/%/g,"%25").replace(/\//g,"%2[Ff]").replace(/\?/g,"%3[Ff]").replace(/#/g,"%23").replace(/[.*+?^${}()|\\]/g,"\\$&")}function Ot({nodes:e,server_loads:t,dictionary:n,matchers:r}){const a=new Set(t);return Object.entries(n).map(([o,[c,f,d]])=>{const{pattern:h,params:u}=Tt(o),l={id:o,exec:p=>{const _=h.exec(p);if(_)return Ct(_,u,r)},errors:[1,...d||[]].map(p=>e[p]),layouts:[0,...f||[]].map(i),leaf:s(c)};return l.errors.length=l.layouts.length=Math.max(l.errors.length,l.layouts.length),l});function s(o){const c=o<0;return c&&(o=~o),[c,e[o]]}function i(o){return o===void 0?o:[a.has(o),e[o]]}}function ze(e,t=JSON.parse){try{return t(sessionStorage[e])}catch{}}function Fe(e,t,n=JSON.stringify){const r=n(t);try{sessionStorage[e]=r}catch{}}const Nt="1755334486454",Xe="sveltekit:snapshot",Ze="sveltekit:scroll",Qe="sveltekit:states",jt="sveltekit:pageurl",F="sveltekit:history",Y="sveltekit:navigation",j={tap:1,hover:2,viewport:3,eager:4,off:-1,false:-1},Z=location.origin;function Ue(e){if(e instanceof URL)return e;let t=document.baseURI;if(!t){const n=document.getElementsByTagName("base");t=n.length?n[0].href:document.URL}return new URL(e,t)}function fe(){return{x:pageXOffset,y:pageYOffset}}function B(e,t){return e.getAttribute(`data-sveltekit-${t}`)}const Ve={...j,"":j.hover};function et(e){let t=e.assignedSlot??e.parentNode;return t?.nodeType===11&&(t=t.host),t}function tt(e,t){for(;e&&e!==t;){if(e.nodeName.toUpperCase()==="A"&&e.hasAttribute("href"))return e;e=et(e)}}function ve(e,t,n){let r;try{if(r=new URL(e instanceof SVGAElement?e.href.baseVal:e.href,document.baseURI),n&&r.hash.match(/^#[^/]/)){const o=location.hash.split("#")[1]||"/";r.hash=`#${o}${r.hash}`}}catch{}const a=e instanceof SVGAElement?e.target.baseVal:e.target,s=!r||!!a||ue(r,t,n)||(e.getAttribute("rel")||"").split(/\s+/).includes("external"),i=r?.origin===Z&&e.hasAttribute("download");return{url:r,external:s,target:a,download:i}}function te(e){let t=null,n=null,r=null,a=null,s=null,i=null,o=e;for(;o&&o!==document.documentElement;)r===null&&(r=B(o,"preload-code")),a===null&&(a=B(o,"preload-data")),t===null&&(t=B(o,"keepfocus")),n===null&&(n=B(o,"noscroll")),s===null&&(s=B(o,"reload")),i===null&&(i=B(o,"replacestate")),o=et(o);function c(f){switch(f){case"":case"true":return!0;case"off":case"false":return!1;default:return}}return{preload_code:Ve[r??"off"],preload_data:Ve[a??"off"],keepfocus:c(t),noscroll:c(n),reload:c(s),replace_state:c(i)}}function qe(e){const t=Ee(e);let n=!0;function r(){n=!0,t.update(i=>i)}function a(i){n=!1,t.set(i)}function s(i){let o;return t.subscribe(c=>{(o===void 0||n&&c!==o)&&i(o=c)})}return{notify:r,set:a,subscribe:s}}const nt={v:()=>{}};function $t(){const{set:e,subscribe:t}=Ee(!1);let n;async function r(){clearTimeout(n);try{const a=await fetch(`${wt}/_app/version.json`,{headers:{pragma:"no-cache","cache-control":"no-cache"}});if(!a.ok)return!1;const i=(await a.json()).version!==Nt;return i&&(e(!0),nt.v(),clearTimeout(n)),i}catch{return!1}}return{subscribe:t,check:r}}function ue(e,t,n){return e.origin!==Z||!e.pathname.startsWith(t)?!0:n?!(e.pathname===t+"/"||e.pathname===t+"/index.html"||e.protocol==="file:"&&e.pathname.replace(/\/[^/]+\.html?$/,"")===t):!1}function kn(e){}function Me(e){const t=Bt(e),n=new ArrayBuffer(t.length),r=new DataView(n);for(let a=0;a>16),t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255),n=r=0);return r===12?(n>>=4,t+=String.fromCharCode(n)):r===18&&(n>>=2,t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255)),t}const Ft=-1,Vt=-2,qt=-3,Mt=-4,Gt=-5,Ht=-6;function Kt(e,t){if(typeof e=="number")return a(e,!0);if(!Array.isArray(e)||e.length===0)throw new Error("Invalid input");const n=e,r=Array(n.length);function a(s,i=!1){if(s===Ft)return;if(s===qt)return NaN;if(s===Mt)return 1/0;if(s===Gt)return-1/0;if(s===Ht)return-0;if(i)throw new Error("Invalid input");if(s in r)return r[s];const o=n[s];if(!o||typeof o!="object")r[s]=o;else if(Array.isArray(o))if(typeof o[0]=="string"){const c=o[0],f=t?.[c];if(f)return r[s]=f(a(o[1]));switch(c){case"Date":r[s]=new Date(o[1]);break;case"Set":const d=new Set;r[s]=d;for(let l=1;lt!=null)}const Jt="x-sveltekit-invalidated",zt="x-sveltekit-trailing-slash";function ne(e){return e instanceof le||e instanceof Re?e.status:500}function Xt(e){return e instanceof Re?e.text:"Internal Error"}let E,J,ye;const Zt=De.toString().includes("$$")||/function \w+\(\) \{\}/.test(De.toString());Zt?(E={data:{},form:null,error:null,params:{},route:{id:null},state:{},status:-1,url:new URL("https://example.com")},J={current:null},ye={current:!1}):(E=new class{#e=T({});get data(){return x(this.#e)}set data(t){P(this.#e,t)}#t=T(null);get form(){return x(this.#t)}set form(t){P(this.#t,t)}#n=T(null);get error(){return x(this.#n)}set error(t){P(this.#n,t)}#r=T({});get params(){return x(this.#r)}set params(t){P(this.#r,t)}#a=T({id:null});get route(){return x(this.#a)}set route(t){P(this.#a,t)}#o=T({});get state(){return x(this.#o)}set state(t){P(this.#o,t)}#s=T(-1);get status(){return x(this.#s)}set status(t){P(this.#s,t)}#i=T(new URL("https://example.com"));get url(){return x(this.#i)}set url(t){P(this.#i,t)}},J=new class{#e=T(null);get current(){return x(this.#e)}set current(t){P(this.#e,t)}},ye=new class{#e=T(!1);get current(){return x(this.#e)}set current(t){P(this.#e,t)}},nt.v=()=>ye.current=!0);function Qt(e){Object.assign(E,e)}const en="/__data.json",tn=".html__data.json";function nn(e){return e.endsWith(".html")?e.replace(/\.html$/,tn):e.replace(/\/$/,"")+en}const{tick:rn}=yt,an=new Set(["icon","shortcut icon","apple-touch-icon"]),D=ze(Ze)??{},z=ze(Xe)??{},N={url:qe({}),page:qe({}),navigating:Ee(null),updated:$t()};function Le(e){D[e]=fe()}function on(e,t){let n=e+1;for(;D[n];)delete D[n],n+=1;for(n=t+1;z[n];)delete z[n],n+=1}function q(e){return location.href=e.href,new Promise(()=>{})}async function at(){if("serviceWorker"in navigator){const e=await navigator.serviceWorker.getRegistration(L||"/");e&&await e.update()}}function Ge(){}let Te,be,re,C,ke,v;globalThis.__sveltekit_13hoftk.data;const ae=[],oe=[];let O=null;const ee=new Map,ot=new Set,sn=new Set,H=new Set;let w={branch:[],error:null,url:null},xe=!1,se=!1,He=!0,X=!1,M=!1,st=!1,Pe=!1,it,k,I,$;const K=new Set,Ke=new Map;async function Rn(e,t,n){document.URL!==location.href&&(location.href=location.href),v=e,await e.hooks.init?.(),Te=Ot(e),C=document.documentElement,ke=t,be=e.nodes[0],re=e.nodes[1],be(),re(),k=history.state?.[F],I=history.state?.[Y],k||(k=I=Date.now(),history.replaceState({...history.state,[F]:k,[Y]:I},""));const r=D[k];function a(){r&&(history.scrollRestoration="manual",scrollTo(r.x,r.y))}n?(a(),await _n(ke,n)):(await W({type:"enter",url:Ue(v.hash?wn(new URL(location.href)):location.href),replace_state:!0}),a()),mn()}function cn(){ae.length=0,Pe=!1}function ct(e){oe.some(t=>t?.snapshot)&&(z[e]=oe.map(t=>t?.snapshot?.capture()))}function lt(e){z[e]?.forEach((t,n)=>{oe[n]?.snapshot?.restore(t)})}function We(){Le(k),Fe(Ze,D),ct(I),Fe(Xe,z)}async function Ce(e,t,n,r){let a;const s=await W({type:"goto",url:Ue(e),keepfocus:t.keepFocus,noscroll:t.noScroll,replace_state:t.replaceState,state:t.state,redirect_count:n,nav_token:r,accept:()=>{t.invalidateAll&&(Pe=!0,a=[...Ke.keys()]),t.invalidate&&t.invalidate.forEach(gn)}});return t.invalidateAll&&Be().then(Be).then(()=>{Ke.forEach(({resource:i},o)=>{a?.includes(o)&&i.refresh?.()})}),s}async function ln(e){if(e.id!==O?.id){const t={};K.add(t),O={id:e.id,token:t,promise:dt({...e,preload:t}).then(n=>(K.delete(t),n.type==="loaded"&&n.state.error&&(O=null),n))}}return O.promise}async function we(e){const t=(await he(e,!1))?.route;t&&await Promise.all([...t.layouts,t.leaf].map(n=>n?.[1]()))}function ft(e,t,n){w=e.state;const r=document.querySelector("style[data-sveltekit]");if(r&&r.remove(),Object.assign(E,e.props.page),it=new v.root({target:t,props:{...e.props,stores:N,components:oe},hydrate:n,sync:!1}),lt(I),n){const a={from:null,to:{params:w.params,route:{id:w.route?.id??null},url:new URL(location.href)},willUnload:!1,type:"enter",complete:Promise.resolve()};H.forEach(s=>s(a))}se=!0}function ie({url:e,params:t,branch:n,status:r,error:a,route:s,form:i}){let o="never";if(L&&(e.pathname===L||e.pathname===L+"/"))o="always";else for(const l of n)l?.slash!==void 0&&(o=l.slash);e.pathname=vt(e.pathname,o),e.search=e.search;const c={type:"loaded",state:{url:e,params:t,branch:n,error:a,route:s},props:{constructors:Yt(n).map(l=>l.node.component),page:$e(E)}};i!==void 0&&(c.props.form=i);let f={},d=!E,h=0;for(let l=0;l(o&&(c.route=!0),u[l])}),params:new Proxy(r,{get:(u,l)=>(o&&c.params.add(l),u[l])}),data:s?.data??null,url:At(n,()=>{o&&(c.url=!0)},u=>{o&&c.search_params.add(u)},v.hash),async fetch(u,l){u instanceof Request&&(l={body:u.method==="GET"||u.method==="HEAD"?void 0:await u.blob(),cache:u.cache,credentials:u.credentials,headers:[...u.headers].length>0?u?.headers:void 0,integrity:u.integrity,keepalive:u.keepalive,method:u.method,mode:u.mode,redirect:u.redirect,referrer:u.referrer,referrerPolicy:u.referrerPolicy,signal:u.signal,...l});const{resolved:p,promise:_}=ut(u,l,n);return o&&d(p.href),_},setHeaders:()=>{},depends:d,parent(){return o&&(c.parent=!0),t()},untrack(u){o=!1;try{return u()}finally{o=!0}}};i=await f.universal.load.call(null,h)??null}return{node:f,loader:e,server:s,universal:f.universal?.load?{type:"data",data:i,uses:c}:null,data:i??s?.data??null,slash:f.universal?.trailingSlash??s?.slash}}function ut(e,t,n){let r=e instanceof Request?e.url:e;const a=new URL(r,n);a.origin===n.origin&&(r=a.href.slice(n.origin.length));const s=se?Ut(r,a.href,t):It(r,t);return{resolved:a,promise:s}}function Ye(e,t,n,r,a,s){if(Pe)return!0;if(!a)return!1;if(a.parent&&e||a.route&&t||a.url&&n)return!0;for(const i of a.search_params)if(r.has(i))return!0;for(const i of a.params)if(s[i]!==w.params[i])return!0;for(const i of a.dependencies)if(ae.some(o=>o(new URL(i))))return!0;return!1}function Ne(e,t){return e?.type==="data"?e:e?.type==="skip"?t??null:null}function fn(e,t){if(!e)return new Set(t.searchParams.keys());const n=new Set([...e.searchParams.keys(),...t.searchParams.keys()]);for(const r of n){const a=e.searchParams.getAll(r),s=t.searchParams.getAll(r);a.every(i=>s.includes(i))&&s.every(i=>a.includes(i))&&n.delete(r)}return n}function Je({error:e,url:t,route:n,params:r}){return{type:"loaded",state:{error:e,url:t,route:n,params:r,branch:[]},props:{page:$e(E),constructors:[]}}}async function dt({id:e,invalidating:t,url:n,params:r,route:a,preload:s}){if(O?.id===e)return K.delete(O.token),O.promise;const{errors:i,layouts:o,leaf:c}=a,f=[...o,c];i.forEach(g=>g?.().catch(()=>{})),f.forEach(g=>g?.[1]().catch(()=>{}));let d=null;const h=w.url?e!==ce(w.url):!1,u=w.route?a.id!==w.route.id:!1,l=fn(w.url,n);let p=!1;const _=f.map((g,y)=>{const b=w.branch[y],A=!!g?.[0]&&(b?.loader!==g[1]||Ye(p,u,h,l,b.server?.uses,r));return A&&(p=!0),A});if(_.some(Boolean)){try{d=await gt(n,_)}catch(g){const y=await V(g,{url:n,params:r,route:{id:e}});return K.has(s)?Je({error:y,url:n,params:r,route:a}):de({status:ne(g),error:y,url:n,route:a})}if(d.type==="redirect")return d}const m=d?.nodes;let R=!1;const S=f.map(async(g,y)=>{if(!g)return;const b=w.branch[y],A=m?.[y];if((!A||A.type==="skip")&&g[1]===b?.loader&&!Ye(R,u,h,l,b.universal?.uses,r))return b;if(R=!0,A?.type==="error")throw A;return Oe({loader:g[1],url:n,params:r,route:a,parent:async()=>{const pe={};for(let ge=0;ge{});const U=[];for(let g=0;gPromise.resolve({}),server_data_node:Ne(s)}),c={node:await re(),loader:re,universal:null,server:null,data:null};return ie({url:n,params:a,branch:[o,c],status:e,error:t,route:null})}catch(o){if(o instanceof Se)return Ce(new URL(o.location,location.href),{},0);throw o}}async function dn(e){const t=e.href;if(ee.has(t))return ee.get(t);let n;try{const r=(async()=>{let a=await v.hooks.reroute({url:new URL(e),fetch:async(s,i)=>ut(s,i,e).promise})??e;if(typeof a=="string"){const s=new URL(e);v.hash?s.hash=a:s.pathname=a,a=s}return a})();ee.set(t,r),n=await r}catch{ee.delete(t);return}return n}async function he(e,t){if(e&&!ue(e,L,v.hash)){const n=await dn(e);if(!n)return;const r=hn(n);for(const a of Te){const s=a.exec(r);if(s)return{id:ce(e),invalidating:t,route:a,params:kt(s),url:e}}}}function hn(e){return bt(v.hash?e.hash.replace(/^#/,"").replace(/[?#].+/,""):e.pathname.slice(L.length))||"/"}function ce(e){return(v.hash?e.hash.replace(/^#/,""):e.pathname)+e.search}function ht({url:e,type:t,intent:n,delta:r}){let a=!1;const s=je(w,n,e,t);r!==void 0&&(s.navigation.delta=r);const i={...s.navigation,cancel:()=>{a=!0,s.reject(new Error("navigation cancelled"))}};return X||ot.forEach(o=>o(i)),a?null:s}async function W({type:e,url:t,popped:n,keepfocus:r,noscroll:a,replace_state:s,state:i={},redirect_count:o=0,nav_token:c={},accept:f=Ge,block:d=Ge}){const h=$;$=c;const u=await he(t,!1),l=e==="enter"?je(w,u,t,e):ht({url:t,type:e,delta:n?.delta,intent:u});if(!l){d(),$===c&&($=h);return}const p=k,_=I;f(),X=!0,se&&l.navigation.type!=="enter"&&N.navigating.set(J.current=l.navigation);let m=u&&await dt(u);if(!m){if(ue(t,L,v.hash))return await q(t);m=await pt(t,{id:null},await V(new Re(404,"Not Found",`Not found: ${t.pathname}`),{url:t,params:{},route:{id:null}}),404)}if(t=u?.url||t,$!==c)return l.reject(new Error("navigation aborted")),!1;if(m.type==="redirect")if(o>=20)m=await de({status:500,error:await V(new Error("Redirect loop"),{url:t,params:{},route:{id:null}}),url:t,route:{id:null}});else return await Ce(new URL(m.location,t).href,{},o+1,c),!1;else m.props.page.status>=400&&await N.updated.check()&&(await at(),await q(t));if(cn(),Le(p),ct(_),m.props.page.url.pathname!==t.pathname&&(t.pathname=m.props.page.url.pathname),i=n?n.state:i,!n){const g=s?0:1,y={[F]:k+=g,[Y]:I+=g,[Qe]:i};(s?history.replaceState:history.pushState).call(history,y,"",t),s||on(k,I)}if(O=null,m.props.page.state=i,se){const g=(await Promise.all(Array.from(sn,y=>y(l.navigation)))).filter(y=>typeof y=="function");if(g.length>0){let y=function(){g.forEach(b=>{H.delete(b)})};g.push(y),g.forEach(b=>{H.add(b)})}w=m.state,m.props.page&&(m.props.page.url=t),it.$set(m.props),Qt(m.props.page),st=!0}else ft(m,ke,!1);const{activeElement:R}=document;await rn();const S=n?n.scroll:a?fe():null;if(He){const g=t.hash&&document.getElementById(_t(t));S?scrollTo(S.x,S.y):g?g.scrollIntoView():scrollTo(0,0)}const U=document.activeElement!==R&&document.activeElement!==document.body;!r&&!U&&yn(t),He=!0,m.props.page&&Object.assign(E,m.props.page),X=!1,e==="popstate"&<(I),l.fulfil(void 0),H.forEach(g=>g(l.navigation)),N.navigating.set(J.current=null)}async function pt(e,t,n,r){return e.origin===Z&&e.pathname===location.pathname&&!xe?await de({status:r,error:n,url:e,route:t}):await q(e)}function pn(){let e,t,n;C.addEventListener("mousemove",o=>{const c=o.target;clearTimeout(e),e=setTimeout(()=>{s(c,j.hover)},20)});function r(o){o.defaultPrevented||s(o.composedPath()[0],j.tap)}C.addEventListener("mousedown",r),C.addEventListener("touchstart",r,{passive:!0});const a=new IntersectionObserver(o=>{for(const c of o)c.isIntersecting&&(we(new URL(c.target.href)),a.unobserve(c.target))},{threshold:0});async function s(o,c){const f=tt(o,C),d=f===t&&c>=n;if(!f||d)return;const{url:h,external:u,download:l}=ve(f,L,v.hash);if(u||l)return;const p=te(f),_=h&&ce(w.url)===ce(h);if(!(p.reload||_))if(c<=p.preload_data){t=f,n=j.tap;const m=await he(h,!1);if(!m)return;ln(m)}else c<=p.preload_code&&(t=f,n=c,we(h))}function i(){a.disconnect();for(const o of C.querySelectorAll("a")){const{url:c,external:f,download:d}=ve(o,L,v.hash);if(f||d)continue;const h=te(o);h.reload||(h.preload_code===j.viewport&&a.observe(o),h.preload_code===j.eager&&we(c))}}H.add(i),i()}function V(e,t){if(e instanceof le)return e.body;const n=ne(e),r=Xt(e);return v.hooks.handleError({error:e,event:t,status:n,message:r})??{message:r}}function In(e,t={}){return e=new URL(Ue(e)),e.origin!==Z?Promise.reject(new Error("goto: invalid URL")):Ce(e,t,0)}function gn(e){if(typeof e=="function")ae.push(e);else{const{href:t}=new URL(e,location.href);ae.push(n=>n.href===t)}}function mn(){history.scrollRestoration="manual",addEventListener("beforeunload",t=>{let n=!1;if(We(),!X){const r=je(w,void 0,null,"leave"),a={...r.navigation,cancel:()=>{n=!0,r.reject(new Error("navigation cancelled"))}};ot.forEach(s=>s(a))}n?(t.preventDefault(),t.returnValue=""):history.scrollRestoration="auto"}),addEventListener("visibilitychange",()=>{document.visibilityState==="hidden"&&We()}),navigator.connection?.saveData||pn(),C.addEventListener("click",async t=>{if(t.button||t.which!==1||t.metaKey||t.ctrlKey||t.shiftKey||t.altKey||t.defaultPrevented)return;const n=tt(t.composedPath()[0],C);if(!n)return;const{url:r,external:a,target:s,download:i}=ve(n,L,v.hash);if(!r)return;if(s==="_parent"||s==="_top"){if(window.parent!==window)return}else if(s&&s!=="_self")return;const o=te(n);if(!(n instanceof SVGAElement)&&r.protocol!==location.protocol&&!(r.protocol==="https:"||r.protocol==="http:")||i)return;const[f,d]=(v.hash?r.hash.replace(/^#/,""):r.href).split("#"),h=f===me(location);if(a||o.reload&&(!h||!d)){ht({url:r,type:"link"})?X=!0:t.preventDefault();return}if(d!==void 0&&h){const[,u]=w.url.href.split("#");if(u===d){if(t.preventDefault(),d===""||d==="top"&&n.ownerDocument.getElementById("top")===null)window.scrollTo({top:0});else{const l=n.ownerDocument.getElementById(decodeURIComponent(d));l&&(l.scrollIntoView(),l.focus())}return}if(M=!0,Le(k),e(r),!o.replace_state)return;M=!1}t.preventDefault(),await new Promise(u=>{requestAnimationFrame(()=>{setTimeout(u,0)}),setTimeout(u,100)}),await W({type:"link",url:r,keepfocus:o.keepfocus,noscroll:o.noscroll,replace_state:o.replace_state??r.href===location.href})}),C.addEventListener("submit",t=>{if(t.defaultPrevented)return;const n=HTMLFormElement.prototype.cloneNode.call(t.target),r=t.submitter;if((r?.formTarget||n.target)==="_blank"||(r?.formMethod||n.method)!=="get")return;const i=new URL(r?.hasAttribute("formaction")&&r?.formAction||n.action);if(ue(i,L,!1))return;const o=t.target,c=te(o);if(c.reload)return;t.preventDefault(),t.stopPropagation();const f=new FormData(o),d=r?.getAttribute("name");d&&f.append(d,r?.getAttribute("value")??""),i.search=new URLSearchParams(f).toString(),W({type:"form",url:i,keepfocus:c.keepfocus,noscroll:c.noscroll,replace_state:c.replace_state??i.href===location.href})}),addEventListener("popstate",async t=>{if(!Ae){if(t.state?.[F]){const n=t.state[F];if($={},n===k)return;const r=D[n],a=t.state[Qe]??{},s=new URL(t.state[jt]??location.href),i=t.state[Y],o=w.url?me(location)===me(w.url):!1;if(i===I&&(st||o)){a!==E.state&&(E.state=a),e(s),D[k]=fe(),r&&scrollTo(r.x,r.y),k=n;return}const f=n-k;await W({type:"popstate",url:s,popped:{state:a,scroll:r,delta:f},accept:()=>{k=n,I=i},block:()=>{history.go(-f)},nav_token:$})}else if(!M){const n=new URL(location.href);e(n),v.hash&&location.reload()}}}),addEventListener("hashchange",()=>{M&&(M=!1,history.replaceState({...history.state,[F]:++k,[Y]:I},"",location.href))});for(const t of document.querySelectorAll("link"))an.has(t.rel)&&(t.href=t.href);addEventListener("pageshow",t=>{t.persisted&&N.navigating.set(J.current=null)});function e(t){w.url=E.url=t,N.page.set($e(E)),N.page.notify()}}async function _n(e,{status:t=200,error:n,node_ids:r,params:a,route:s,server_route:i,data:o,form:c}){xe=!0;const f=new URL(location.href);let d;({params:a={},route:s={id:null}}=await he(f,!1)||{}),d=Te.find(({id:l})=>l===s.id);let h,u=!0;try{const l=r.map(async(_,m)=>{const R=o[m];return R?.uses&&(R.uses=mt(R.uses)),Oe({loader:v.nodes[_],url:f,params:a,route:s,parent:async()=>{const S={};for(let U=0;Us?"1":"0").join(""));const r=window.fetch,a=await r(n.href,{});if(!a.ok){let s;throw a.headers.get("content-type")?.includes("application/json")?s=await a.json():a.status===404?s="Not Found":a.status===500&&(s="Internal Error"),new le(a.status,s)}return new Promise(async s=>{const i=new Map,o=a.body.getReader(),c=new TextDecoder;function f(h){return Kt(h,{...v.decoders,Promise:u=>new Promise((l,p)=>{i.set(u,{fulfil:l,reject:p})})})}let d="";for(;;){const{done:h,value:u}=await o.read();if(h&&!d)break;for(d+=!u&&d?` +`:c.decode(u,{stream:!0});;){const l=d.indexOf(` +`);if(l===-1)break;const p=JSON.parse(d.slice(0,l));if(d=d.slice(l+1),p.type==="redirect")return s(p);if(p.type==="data")p.nodes?.forEach(_=>{_?.type==="data"&&(_.uses=mt(_.uses),_.data=f(_.data))}),s(p);else if(p.type==="chunk"){const{id:_,data:m,error:R}=p,S=i.get(_);i.delete(_),R?S.reject(f(R)):S.fulfil(f(m))}}}})}function mt(e){return{dependencies:new Set(e?.dependencies??[]),params:new Set(e?.params??[]),parent:!!e?.parent,route:!!e?.route,url:!!e?.url,search_params:new Set(e?.search_params??[])}}let Ae=!1;function yn(e){const t=document.querySelector("[autofocus]");if(t)t.focus();else{const n=_t(e);if(n&&document.getElementById(n)){const{x:a,y:s}=fe();setTimeout(()=>{const i=history.state;Ae=!0,location.replace(`#${n}`),v.hash&&location.replace(e.hash),history.replaceState(i,"",e.hash),scrollTo(a,s),Ae=!1})}else{const a=document.body,s=a.getAttribute("tabindex");a.tabIndex=-1,a.focus({preventScroll:!0,focusVisible:!1}),s!==null?a.setAttribute("tabindex",s):a.removeAttribute("tabindex")}const r=getSelection();if(r&&r.type!=="None"){const a=[];for(let s=0;s{if(r.rangeCount===a.length){for(let s=0;s{a=c,s=f});return i.catch(()=>{}),{navigation:{from:{params:e.params,route:{id:e.route?.id??null},url:e.url},to:n&&{params:t?.params??null,route:{id:t?.route?.id??null},url:n},willUnload:!t,type:r,complete:i},fulfil:a,reject:s}}function $e(e){return{data:e.data,error:e.error,form:e.form,params:e.params,route:e.route,state:e.state,status:e.status,url:e.url}}function wn(e){const t=new URL(e);return t.hash=decodeURIComponent(e.hash),t}function _t(e){let t;if(v.hash){const[,,n]=e.hash.split("#",3);t=n??""}else t=e.hash.slice(1);return decodeURIComponent(t)}export{Rn as a,In as g,kn as l,E as p,N as s}; diff --git a/webapp/assets/_app/immutable/chunks/CclkODgu.js b/webapp/assets/_app/immutable/chunks/CclkODgu.js new file mode 100644 index 00000000..f3c6b3c0 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CclkODgu.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as Oe}from"./B3Pzt0F_.js";import{p as qe,E as Ie,o as Ke,f as x,j as t,r,k as o,g as e,m,z as B,t as y,x as ue,u as c,v,n as T,s as i,e as be,c as u,D as Ge,d as He}from"./D8EpLgQ1.js";import{p as ge,i as $}from"./5WA7h8uK.js";import{e as Je,i as Qe}from"./u94nIB4-.js";import{r as me,b as ye,g as Ve}from"./CiE1LlKV.js";import{a as Xe,b as Ye}from"./C6k1Q4We.js";import{p as Ze}from"./D4Caz1gY.js";import{M as ea}from"./qB7B8uiS.js";var aa=x('

                '),ta=x('
                Owner:
                '),ra=x('
                '),sa=x(""),oa=x(''),na=x('

                Leave empty to auto-generate a new secret

                '),ia=x('
                Updating...
                '),da=x('

                Name:
                Endpoint:
                Current Credentials:
                Current Pool Balancer:

                Leave unchanged to keep current credentials

                Round Robin distributes jobs evenly across pools, Pack fills pools in order

                ');function xa(xe,D){qe(D,!1);let d=ge(D,"entity",8),k=ge(D,"entityType",8);const P=Ie();let C=m(!1),w=m(""),M=m([]),R=m(!1),f=m(""),_=m(""),h=m(""),b=m(!1);function fe(){if(k()==="repository"){const l=d();return`${l.owner}/${l.name}`}return d().name||""}function W(){return k().charAt(0).toUpperCase()+k().slice(1)}function _e(){return k()==="repository"&&d().owner||""}async function he(){try{i(R,!0),i(M,await Ve.listCredentials())}catch(l){i(w,l instanceof Error?l.message:"Failed to load credentials")}finally{i(R,!1)}}function ke(){i(f,d().credentials_name||""),i(_,d().pool_balancing_type||"roundrobin"),i(h,""),i(b,!1)}async function we(){try{i(C,!0),i(w,"");const l={};let E=!1;if(e(f)&&e(f)!==d().credentials_name&&(l.credentials_name=e(f),E=!0),e(_)&&e(_)!==d().pool_balancing_type&&(l.pool_balancer_type=e(_),E=!0),e(b)){if(!e(h).trim()){i(w,"Please enter a webhook secret or uncheck the option to change it");return}l.webhook_secret=e(h),E=!0}if(!E){P("close");return}P("submit",l)}catch(l){i(w,l instanceof Error?l.message:`Failed to update ${k()}`)}finally{i(C,!1)}}Ke(()=>{he(),ke()}),Oe(),ea(xe,{$$events:{close:()=>P("close")},children:(l,E)=>{var j=da(),F=t(j),N=t(F),Ce=t(N);r(N);var Y=o(N,2),Ee=t(Y,!0);r(Y),r(F);var z=o(F,2),Z=t(z);{var Se=a=>{var s=aa(),n=t(s),p=t(n,!0);r(n),r(s),y(()=>v(p,e(w))),u(a,s)};$(Z,a=>{e(w)&&a(Se)})}var A=o(Z,2),L=t(A),Ue=t(L);r(L);var ee=o(L,2),ae=t(ee);{var $e=a=>{var s=ta(),n=o(t(s),2),p=t(n,!0);r(n),r(s),y(S=>v(p,S),[()=>c(_e)]),u(a,s)};$(ae,a=>{k()==="repository"&&a($e)})}var O=o(ae,2),te=o(t(O),2),Pe=t(te,!0);r(te),r(O);var q=o(O,2),re=o(t(q),2),Be=t(re,!0);r(re),r(q);var I=o(q,2),se=o(t(I),2),Te=t(se,!0);r(se),r(I);var oe=o(I,2),ne=o(t(oe),2),De=t(ne,!0);r(ne),r(oe),r(ee),r(A);var K=o(A,2),G=t(K),Me=o(t(G),2);{var Re=a=>{var s=ra();u(a,s)},We=a=>{var s=oa();y(()=>{e(f),ue(()=>{e(M)})});var n=t(s);n.value=n.__value="";var p=o(n);Je(p,1,()=>e(M),Qe,(S,g)=>{var U=sa(),Le=t(U);r(U);var pe={};y(()=>{v(Le,`${e(g),c(()=>e(g).name)??""} (${e(g),c(()=>e(g).endpoint?.name||"Unknown")??""})`),pe!==(pe=(e(g),c(()=>e(g).name)))&&(U.value=(U.__value=(e(g),c(()=>e(g).name)))??"")}),u(S,U)}),r(s),ye(s,()=>e(f),S=>i(f,S)),u(a,s)};$(Me,a=>{e(R)?a(Re):a(We,!1)})}B(2),r(G);var H=o(G,2),J=o(t(H),2);y(()=>{e(_),ue(()=>{})});var Q=t(J);Q.value=Q.__value="roundrobin";var ie=o(Q);ie.value=ie.__value="pack",r(J),B(2),r(H);var de=o(H,2),V=t(de),le=t(V);me(le),B(2),r(V);var je=o(V,2);{var Fe=a=>{var s=na(),n=o(t(s),2);me(n),B(2),r(s),y(()=>n.required=e(b)),Ye(n,()=>e(h),p=>i(h,p)),u(a,s)};$(je,a=>{e(b)&&a(Fe)})}r(de),r(K);var ce=o(K,2),ve=t(ce),X=o(ve,2),Ne=t(X);{var ze=a=>{var s=ia();u(a,s)},Ae=a=>{var s=Ge();y(n=>v(s,`Update ${n??""}`),[()=>c(W)]),u(a,s)};$(Ne,a=>{e(C)?a(ze):a(Ae,!1)})}r(X),r(ce),r(z),r(j),y((a,s,n,p)=>{v(Ce,`Update ${a??""}`),v(Ee,s),v(Ue,`${n??""} Information`),v(Pe,(T(d()),c(()=>d().name))),v(Be,(T(d()),c(()=>d().endpoint?.name))),v(Te,(T(d()),c(()=>d().credentials_name))),v(De,(T(d()),c(()=>d().pool_balancing_type||"roundrobin"))),X.disabled=p},[()=>c(W),()=>c(fe),()=>c(W),()=>(e(C),e(b),e(h),c(()=>e(C)||e(b)&&!e(h).trim()))]),ye(J,()=>e(_),a=>i(_,a)),Xe(le,()=>e(b),a=>i(b,a)),be("click",ve,()=>P("close")),be("submit",z,Ze(we)),u(l,j)},$$slots:{default:!0}}),He()}export{xa as U}; diff --git a/webapp/assets/_app/immutable/chunks/CiE1LlKV.js b/webapp/assets/_app/immutable/chunks/CiE1LlKV.js new file mode 100644 index 00000000..89e2bf89 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CiE1LlKV.js @@ -0,0 +1,7 @@ +import"./DsnmJJEf.js";import{i as Tr}from"./B3Pzt0F_.js";import{t as Ie,K as Y,L as Ke,aj as Br,aW as Dr,T as ht,a3 as Lr,af as jr,aX as kr,aY as zr,aZ as ut,Y as Fr,a_ as Gr,Z as we,M as Jt,ae as Xe,P as Kt,F as ot,a8 as _r,a$ as qr,b0 as Hr,aw as $r,W as Nr,b1 as Wr,b2 as Qr,b3 as Mr,g as k,b4 as Jr,b5 as Kr,a2 as Ot,b6 as Xr,b7 as Yr,b8 as Zr,b9 as es,ba as ts,at as rs,bb as ss,bc as as,bd as os,be as ns,p as ls,E as is,l as ee,a as cs,f as ps,e as ds,c as ue,d as hs,n as oe,m as te,s as re,j as qe,k as Pt,r as He,C as nt,B as us,b as Os}from"./D8EpLgQ1.js";import{l as bt,p as se,i as $e}from"./5WA7h8uK.js";function mt(t,e,r=!1,s=!1,o=!1){var a=t,n="";Ie(()=>{var l=Br;if(n===(n=e()??"")){Y&&Ke();return}if(l.nodes_start!==null&&(Dr(l.nodes_start,l.nodes_end),l.nodes_start=l.nodes_end=null),n!==""){if(Y){ht.data;for(var i=Ke(),c=i;i!==null&&(i.nodeType!==Lr||i.data!=="");)c=i,i=jr(i);if(i===null)throw kr(),zr;ut(ht,c),a=Fr(i);return}var p=n+"";r?p=`${p}`:s&&(p=`${p}`);var R=Gr(p);if((r||s)&&(R=we(R)),ut(we(R),R.lastChild),r||s)for(;we(R);)a.before(we(R));else a.before(R)}})}function Ps(t,e,r,s,o){Y&&Ke();var a=e.$$slots?.[r],n=!1;a===!0&&(a=e[r==="default"?"children":r],n=!0),a===void 0||a(t,n?()=>s:s)}function bs(t,e){var r=void 0,s;Jt(()=>{r!==(r=e())&&(s&&(Xe(s),s=null),r&&(s=Kt(()=>{ot(()=>r(t))})))})}function Xt(t){var e,r,s="";if(typeof t=="string"||typeof t=="number")s+=t;else if(typeof t=="object")if(Array.isArray(t)){var o=t.length;for(e=0;e=0;){var l=n+a;(n===0||Vt.includes(s[n-1]))&&(l===s.length||Vt.includes(s[l]))?s=(n===0?"":s.substring(0,n))+s.substring(l+1):n=l}}return s===""?null:s}function St(t,e=!1){var r=e?" !important;":";",s="";for(var o in t){var a=t[o];a!=null&&a!==""&&(s+=" "+o+": "+a+r)}return s}function Ne(t){return t[0]!=="-"||t[1]!=="-"?t.toLowerCase():t}function As(t,e){if(e){var r="",s,o;if(Array.isArray(e)?(s=e[0],o=e[1]):s=e,t){t=String(t).replaceAll(/\s*\/\*.*?\*\/\s*/g,"").trim();var a=!1,n=0,l=!1,i=[];s&&i.push(...Object.keys(s).map(Ne)),o&&i.push(...Object.keys(o).map(Ne));var c=0,p=-1;const y=t.length;for(var R=0;R{Ce(t,t.__value)});e.observe(t,{childList:!0,subtree:!0,attributes:!0,attributeFilter:["value"]}),$r(()=>{e.disconnect()})}function Uo(t,e,r=e){var s=!0;Nr(t,"change",o=>{var a=o?"[selected]":":checked",n;if(t.multiple)n=[].map.call(t.querySelectorAll(a),me);else{var l=t.querySelector(a)??t.querySelector("option:not([disabled])");n=l&&me(l)}r(n)}),ot(()=>{var o=e();if(Ce(t,o,s),s&&o===void 0){var a=t.querySelector(":checked");a!==null&&(o=me(a),r(o))}t.__value=o,s=!1}),Yt(t)}function me(t){return"__value"in t?t.__value:t.value}const Oe=Symbol("class"),Pe=Symbol("style"),Zt=Symbol("is custom element"),er=Symbol("is html");function To(t){if(Y){var e=!1,r=()=>{if(!e){if(e=!0,t.hasAttribute("value")){var s=t.value;Ue(t,"value",null),t.value=s}if(t.hasAttribute("checked")){var o=t.checked;Ue(t,"checked",null),t.checked=o}}};t.__on_r=r,ss(r),as()}}function Bo(t,e){var r=lt(t);r.value===(r.value=e??void 0)||t.value===e&&(e!==0||t.nodeName!=="PROGRESS")||(t.value=e??"")}function fs(t,e){e?t.hasAttribute("selected")||t.setAttribute("selected",""):t.removeAttribute("selected")}function Ue(t,e,r,s){var o=lt(t);Y&&(o[e]=t.getAttribute(e),e==="src"||e==="srcset"||e==="href"&&t.nodeName==="LINK")||o[e]!==(o[e]=r)&&(e==="loading"&&(t[os]=r),r==null?t.removeAttribute(e):typeof r!="string"&&tr(t).includes(e)?t[e]=r:t.setAttribute(e,r))}function ys(t,e,r,s,o=!1){var a=lt(t),n=a[Zt],l=!a[er];let i=Y&&n;i&&Ot(!1);var c=e||{},p=t.tagName==="OPTION";for(var R in e)R in r||(r[R]=null);r.class?r.class=Vs(r.class):r[Oe]&&(r.class=null),r[Pe]&&(r.style??=null);var I=tr(t);for(const E in r){let v=r[E];if(p&&E==="value"&&v==null){t.value=t.__value="",c[E]=v;continue}if(E==="class"){var T=t.namespaceURI==="http://www.w3.org/1999/xhtml";Ee(t,T,v,s,e?.[Oe],r[Oe]),c[E]=v,c[Oe]=r[Oe];continue}if(E==="style"){Rs(t,v,e?.[Pe],r[Pe]),c[E]=v,c[Pe]=r[Pe];continue}var f=c[E];if(!(v===f&&!(v===void 0&&t.hasAttribute(E)))){c[E]=v;var y=E[0]+E[1];if(y!=="$$")if(y==="on"){const U={},L="$$"+E;let B=E.slice(2);var w=ns(B);if(Xr(B)&&(B=B.slice(0,-7),U.capture=!0),!w&&f){if(v!=null)continue;t.removeEventListener(B,c[L],U),c[L]=null}if(v!=null)if(w)t[`__${B}`]=v,Zr([B]);else{let Z=function(ce){c[E].call(this,ce)};c[L]=Yr(B,t,Z,U)}else w&&(t[`__${B}`]=void 0)}else if(E==="style")Ue(t,E,v);else if(E==="autofocus")es(t,!!v);else if(!n&&(E==="__value"||E==="value"&&v!=null))t.value=t.__value=v;else if(E==="selected"&&p)fs(t,v);else{var C=E;l||(C=ts(C));var D=C==="defaultValue"||C==="defaultChecked";if(v==null&&!n&&!D)if(a[E]=null,C==="value"||C==="checked"){let U=t;const L=e===void 0;if(C==="value"){let B=U.defaultValue;U.removeAttribute(C),U.defaultValue=B,U.value=U.__value=L?B:null}else{let B=U.defaultChecked;U.removeAttribute(C),U.defaultChecked=B,U.checked=L?B:!1}}else t.removeAttribute(E);else D||I.includes(C)&&(n||typeof v!="string")?(t[C]=v,C in a&&(a[C]=rs)):typeof v!="function"&&Ue(t,C,v)}}}return i&&Ot(!0),c}function ws(t,e,r=[],s=[],o,a=!1){Wr(r,s,n=>{var l=void 0,i={},c=t.nodeName==="SELECT",p=!1;if(Jt(()=>{var I=e(...n.map(k)),T=ys(t,l,I,o,a);p&&c&&"value"in I&&Ce(t,I.value);for(let y of Object.getOwnPropertySymbols(i))I[y]||Xe(i[y]);for(let y of Object.getOwnPropertySymbols(I)){var f=I[y];y.description===Jr&&(!l||f!==l[y])&&(i[y]&&Xe(i[y]),i[y]=Kt(()=>bs(t,()=>f))),T[y]=f}l=T}),c){var R=t;ot(()=>{Ce(R,l.value,!0),Yt(R)})}p=!0})}function lt(t){return t.__attributes??={[Zt]:t.nodeName.includes("-"),[er]:t.namespaceURI===Qr}}var At=new Map;function tr(t){var e=At.get(t.nodeName);if(e)return e;At.set(t.nodeName,e=[]);for(var r,s=t,o=Element.prototype;o!==s;){r=Kr(s);for(var a in r)r[a].set&&e.push(a);s=Mr(s)}return e}function rr(t,e){return function(){return t.apply(e,arguments)}}const{toString:Is}=Object.prototype,{getPrototypeOf:it}=Object,{iterator:De,toStringTag:sr}=Symbol,Le=(t=>e=>{const r=Is.call(e);return t[r]||(t[r]=r.slice(8,-1).toLowerCase())})(Object.create(null)),N=t=>(t=t.toLowerCase(),e=>Le(e)===t),je=t=>e=>typeof e===t,{isArray:de}=Array,Ve=je("undefined");function Se(t){return t!==null&&!Ve(t)&&t.constructor!==null&&!Ve(t.constructor)&&_(t.constructor.isBuffer)&&t.constructor.isBuffer(t)}const ar=N("ArrayBuffer");function Es(t){let e;return typeof ArrayBuffer<"u"&&ArrayBuffer.isView?e=ArrayBuffer.isView(t):e=t&&t.buffer&&ar(t.buffer),e}const gs=je("string"),_=je("function"),or=je("number"),Ae=t=>t!==null&&typeof t=="object",xs=t=>t===!0||t===!1,ge=t=>{if(Le(t)!=="object")return!1;const e=it(t);return(e===null||e===Object.prototype||Object.getPrototypeOf(e)===null)&&!(sr in t)&&!(De in t)},vs=t=>{if(!Ae(t)||Se(t))return!1;try{return Object.keys(t).length===0&&Object.getPrototypeOf(t)===Object.prototype}catch{return!1}},Cs=N("Date"),Us=N("File"),Ts=N("Blob"),Bs=N("FileList"),Ds=t=>Ae(t)&&_(t.pipe),Ls=t=>{let e;return t&&(typeof FormData=="function"&&t instanceof FormData||_(t.append)&&((e=Le(t))==="formdata"||e==="object"&&_(t.toString)&&t.toString()==="[object FormData]"))},js=N("URLSearchParams"),[ks,zs,Fs,Gs]=["ReadableStream","Request","Response","Headers"].map(N),_s=t=>t.trim?t.trim():t.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,"");function Re(t,e,{allOwnKeys:r=!1}={}){if(t===null||typeof t>"u")return;let s,o;if(typeof t!="object"&&(t=[t]),de(t))for(s=0,o=t.length;s0;)if(o=r[s],e===o.toLowerCase())return o;return null}const ne=typeof globalThis<"u"?globalThis:typeof self<"u"?self:typeof window<"u"?window:global,lr=t=>!Ve(t)&&t!==ne;function Ye(){const{caseless:t}=lr(this)&&this||{},e={},r=(s,o)=>{const a=t&&nr(e,o)||o;ge(e[a])&&ge(s)?e[a]=Ye(e[a],s):ge(s)?e[a]=Ye({},s):de(s)?e[a]=s.slice():e[a]=s};for(let s=0,o=arguments.length;s(Re(e,(o,a)=>{r&&_(o)?t[a]=rr(o,r):t[a]=o},{allOwnKeys:s}),t),Hs=t=>(t.charCodeAt(0)===65279&&(t=t.slice(1)),t),$s=(t,e,r,s)=>{t.prototype=Object.create(e.prototype,s),t.prototype.constructor=t,Object.defineProperty(t,"super",{value:e.prototype}),r&&Object.assign(t.prototype,r)},Ns=(t,e,r,s)=>{let o,a,n;const l={};if(e=e||{},t==null)return e;do{for(o=Object.getOwnPropertyNames(t),a=o.length;a-- >0;)n=o[a],(!s||s(n,t,e))&&!l[n]&&(e[n]=t[n],l[n]=!0);t=r!==!1&&it(t)}while(t&&(!r||r(t,e))&&t!==Object.prototype);return e},Ws=(t,e,r)=>{t=String(t),(r===void 0||r>t.length)&&(r=t.length),r-=e.length;const s=t.indexOf(e,r);return s!==-1&&s===r},Qs=t=>{if(!t)return null;if(de(t))return t;let e=t.length;if(!or(e))return null;const r=new Array(e);for(;e-- >0;)r[e]=t[e];return r},Ms=(t=>e=>t&&e instanceof t)(typeof Uint8Array<"u"&&it(Uint8Array)),Js=(t,e)=>{const s=(t&&t[De]).call(t);let o;for(;(o=s.next())&&!o.done;){const a=o.value;e.call(t,a[0],a[1])}},Ks=(t,e)=>{let r;const s=[];for(;(r=t.exec(e))!==null;)s.push(r);return s},Xs=N("HTMLFormElement"),Ys=t=>t.toLowerCase().replace(/[-_\s]([a-z\d])(\w*)/g,function(r,s,o){return s.toUpperCase()+o}),Rt=(({hasOwnProperty:t})=>(e,r)=>t.call(e,r))(Object.prototype),Zs=N("RegExp"),ir=(t,e)=>{const r=Object.getOwnPropertyDescriptors(t),s={};Re(r,(o,a)=>{let n;(n=e(o,a,t))!==!1&&(s[a]=n||o)}),Object.defineProperties(t,s)},ea=t=>{ir(t,(e,r)=>{if(_(t)&&["arguments","caller","callee"].indexOf(r)!==-1)return!1;const s=t[r];if(_(s)){if(e.enumerable=!1,"writable"in e){e.writable=!1;return}e.set||(e.set=()=>{throw Error("Can not rewrite read-only method '"+r+"'")})}})},ta=(t,e)=>{const r={},s=o=>{o.forEach(a=>{r[a]=!0})};return de(t)?s(t):s(String(t).split(e)),r},ra=()=>{},sa=(t,e)=>t!=null&&Number.isFinite(t=+t)?t:e;function aa(t){return!!(t&&_(t.append)&&t[sr]==="FormData"&&t[De])}const oa=t=>{const e=new Array(10),r=(s,o)=>{if(Ae(s)){if(e.indexOf(s)>=0)return;if(Se(s))return s;if(!("toJSON"in s)){e[o]=s;const a=de(s)?[]:{};return Re(s,(n,l)=>{const i=r(n,o+1);!Ve(i)&&(a[l]=i)}),e[o]=void 0,a}}return s};return r(t,0)},na=N("AsyncFunction"),la=t=>t&&(Ae(t)||_(t))&&_(t.then)&&_(t.catch),cr=((t,e)=>t?setImmediate:e?((r,s)=>(ne.addEventListener("message",({source:o,data:a})=>{o===ne&&a===r&&s.length&&s.shift()()},!1),o=>{s.push(o),ne.postMessage(r,"*")}))(`axios@${Math.random()}`,[]):r=>setTimeout(r))(typeof setImmediate=="function",_(ne.postMessage)),ia=typeof queueMicrotask<"u"?queueMicrotask.bind(ne):typeof process<"u"&&process.nextTick||cr,ca=t=>t!=null&&_(t[De]),h={isArray:de,isArrayBuffer:ar,isBuffer:Se,isFormData:Ls,isArrayBufferView:Es,isString:gs,isNumber:or,isBoolean:xs,isObject:Ae,isPlainObject:ge,isEmptyObject:vs,isReadableStream:ks,isRequest:zs,isResponse:Fs,isHeaders:Gs,isUndefined:Ve,isDate:Cs,isFile:Us,isBlob:Ts,isRegExp:Zs,isFunction:_,isStream:Ds,isURLSearchParams:js,isTypedArray:Ms,isFileList:Bs,forEach:Re,merge:Ye,extend:qs,trim:_s,stripBOM:Hs,inherits:$s,toFlatObject:Ns,kindOf:Le,kindOfTest:N,endsWith:Ws,toArray:Qs,forEachEntry:Js,matchAll:Ks,isHTMLForm:Xs,hasOwnProperty:Rt,hasOwnProp:Rt,reduceDescriptors:ir,freezeMethods:ea,toObjectSet:ta,toCamelCase:Ys,noop:ra,toFiniteNumber:sa,findKey:nr,global:ne,isContextDefined:lr,isSpecCompliantForm:aa,toJSONObject:oa,isAsyncFn:na,isThenable:la,setImmediate:cr,asap:ia,isIterable:ca};function g(t,e,r,s,o){Error.call(this),Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):this.stack=new Error().stack,this.message=t,this.name="AxiosError",e&&(this.code=e),r&&(this.config=r),s&&(this.request=s),o&&(this.response=o,this.status=o.status?o.status:null)}h.inherits(g,Error,{toJSON:function(){return{message:this.message,name:this.name,description:this.description,number:this.number,fileName:this.fileName,lineNumber:this.lineNumber,columnNumber:this.columnNumber,stack:this.stack,config:h.toJSONObject(this.config),code:this.code,status:this.status}}});const pr=g.prototype,dr={};["ERR_BAD_OPTION_VALUE","ERR_BAD_OPTION","ECONNABORTED","ETIMEDOUT","ERR_NETWORK","ERR_FR_TOO_MANY_REDIRECTS","ERR_DEPRECATED","ERR_BAD_RESPONSE","ERR_BAD_REQUEST","ERR_CANCELED","ERR_NOT_SUPPORT","ERR_INVALID_URL"].forEach(t=>{dr[t]={value:t}});Object.defineProperties(g,dr);Object.defineProperty(pr,"isAxiosError",{value:!0});g.from=(t,e,r,s,o,a)=>{const n=Object.create(pr);return h.toFlatObject(t,n,function(i){return i!==Error.prototype},l=>l!=="isAxiosError"),g.call(n,t.message,e,r,s,o),n.cause=t,n.name=t.name,a&&Object.assign(n,a),n};const pa=null;function Ze(t){return h.isPlainObject(t)||h.isArray(t)}function hr(t){return h.endsWith(t,"[]")?t.slice(0,-2):t}function ft(t,e,r){return t?t.concat(e).map(function(o,a){return o=hr(o),!r&&a?"["+o+"]":o}).join(r?".":""):e}function da(t){return h.isArray(t)&&!t.some(Ze)}const ha=h.toFlatObject(h,{},null,function(e){return/^is[A-Z]/.test(e)});function ke(t,e,r){if(!h.isObject(t))throw new TypeError("target must be an object");e=e||new FormData,r=h.toFlatObject(r,{metaTokens:!0,dots:!1,indexes:!1},!1,function(y,w){return!h.isUndefined(w[y])});const s=r.metaTokens,o=r.visitor||p,a=r.dots,n=r.indexes,i=(r.Blob||typeof Blob<"u"&&Blob)&&h.isSpecCompliantForm(e);if(!h.isFunction(o))throw new TypeError("visitor must be a function");function c(f){if(f===null)return"";if(h.isDate(f))return f.toISOString();if(h.isBoolean(f))return f.toString();if(!i&&h.isBlob(f))throw new g("Blob is not supported. Use a Buffer instead.");return h.isArrayBuffer(f)||h.isTypedArray(f)?i&&typeof Blob=="function"?new Blob([f]):Buffer.from(f):f}function p(f,y,w){let C=f;if(f&&!w&&typeof f=="object"){if(h.endsWith(y,"{}"))y=s?y:y.slice(0,-2),f=JSON.stringify(f);else if(h.isArray(f)&&da(f)||(h.isFileList(f)||h.endsWith(y,"[]"))&&(C=h.toArray(f)))return y=hr(y),C.forEach(function(E,v){!(h.isUndefined(E)||E===null)&&e.append(n===!0?ft([y],v,a):n===null?y:y+"[]",c(E))}),!1}return Ze(f)?!0:(e.append(ft(w,y,a),c(f)),!1)}const R=[],I=Object.assign(ha,{defaultVisitor:p,convertValue:c,isVisitable:Ze});function T(f,y){if(!h.isUndefined(f)){if(R.indexOf(f)!==-1)throw Error("Circular reference detected in "+y.join("."));R.push(f),h.forEach(f,function(C,D){(!(h.isUndefined(C)||C===null)&&o.call(e,C,h.isString(D)?D.trim():D,y,I))===!0&&T(C,y?y.concat(D):[D])}),R.pop()}}if(!h.isObject(t))throw new TypeError("data must be an object");return T(t),e}function yt(t){const e={"!":"%21","'":"%27","(":"%28",")":"%29","~":"%7E","%20":"+","%00":"\0"};return encodeURIComponent(t).replace(/[!'()~]|%20|%00/g,function(s){return e[s]})}function ct(t,e){this._pairs=[],t&&ke(t,this,e)}const ur=ct.prototype;ur.append=function(e,r){this._pairs.push([e,r])};ur.toString=function(e){const r=e?function(s){return e.call(this,s,yt)}:yt;return this._pairs.map(function(o){return r(o[0])+"="+r(o[1])},"").join("&")};function ua(t){return encodeURIComponent(t).replace(/%3A/gi,":").replace(/%24/g,"$").replace(/%2C/gi,",").replace(/%20/g,"+").replace(/%5B/gi,"[").replace(/%5D/gi,"]")}function Or(t,e,r){if(!e)return t;const s=r&&r.encode||ua;h.isFunction(r)&&(r={serialize:r});const o=r&&r.serialize;let a;if(o?a=o(e,r):a=h.isURLSearchParams(e)?e.toString():new ct(e,r).toString(s),a){const n=t.indexOf("#");n!==-1&&(t=t.slice(0,n)),t+=(t.indexOf("?")===-1?"?":"&")+a}return t}class wt{constructor(){this.handlers=[]}use(e,r,s){return this.handlers.push({fulfilled:e,rejected:r,synchronous:s?s.synchronous:!1,runWhen:s?s.runWhen:null}),this.handlers.length-1}eject(e){this.handlers[e]&&(this.handlers[e]=null)}clear(){this.handlers&&(this.handlers=[])}forEach(e){h.forEach(this.handlers,function(s){s!==null&&e(s)})}}const Pr={silentJSONParsing:!0,forcedJSONParsing:!0,clarifyTimeoutError:!1},Oa=typeof URLSearchParams<"u"?URLSearchParams:ct,Pa=typeof FormData<"u"?FormData:null,ba=typeof Blob<"u"?Blob:null,ma={isBrowser:!0,classes:{URLSearchParams:Oa,FormData:Pa,Blob:ba},protocols:["http","https","file","blob","url","data"]},pt=typeof window<"u"&&typeof document<"u",et=typeof navigator=="object"&&navigator||void 0,Va=pt&&(!et||["ReactNative","NativeScript","NS"].indexOf(et.product)<0),Sa=typeof WorkerGlobalScope<"u"&&self instanceof WorkerGlobalScope&&typeof self.importScripts=="function",Aa=pt&&window.location.href||"http://localhost",Ra=Object.freeze(Object.defineProperty({__proto__:null,hasBrowserEnv:pt,hasStandardBrowserEnv:Va,hasStandardBrowserWebWorkerEnv:Sa,navigator:et,origin:Aa},Symbol.toStringTag,{value:"Module"})),G={...Ra,...ma};function fa(t,e){return ke(t,new G.classes.URLSearchParams,{visitor:function(r,s,o,a){return G.isNode&&h.isBuffer(r)?(this.append(s,r.toString("base64")),!1):a.defaultVisitor.apply(this,arguments)},...e})}function ya(t){return h.matchAll(/\w+|\[(\w*)]/g,t).map(e=>e[0]==="[]"?"":e[1]||e[0])}function wa(t){const e={},r=Object.keys(t);let s;const o=r.length;let a;for(s=0;s=r.length;return n=!n&&h.isArray(o)?o.length:n,i?(h.hasOwnProp(o,n)?o[n]=[o[n],s]:o[n]=s,!l):((!o[n]||!h.isObject(o[n]))&&(o[n]=[]),e(r,s,o[n],a)&&h.isArray(o[n])&&(o[n]=wa(o[n])),!l)}if(h.isFormData(t)&&h.isFunction(t.entries)){const r={};return h.forEachEntry(t,(s,o)=>{e(ya(s),o,r,0)}),r}return null}function Ia(t,e,r){if(h.isString(t))try{return(e||JSON.parse)(t),h.trim(t)}catch(s){if(s.name!=="SyntaxError")throw s}return(r||JSON.stringify)(t)}const fe={transitional:Pr,adapter:["xhr","http","fetch"],transformRequest:[function(e,r){const s=r.getContentType()||"",o=s.indexOf("application/json")>-1,a=h.isObject(e);if(a&&h.isHTMLForm(e)&&(e=new FormData(e)),h.isFormData(e))return o?JSON.stringify(br(e)):e;if(h.isArrayBuffer(e)||h.isBuffer(e)||h.isStream(e)||h.isFile(e)||h.isBlob(e)||h.isReadableStream(e))return e;if(h.isArrayBufferView(e))return e.buffer;if(h.isURLSearchParams(e))return r.setContentType("application/x-www-form-urlencoded;charset=utf-8",!1),e.toString();let l;if(a){if(s.indexOf("application/x-www-form-urlencoded")>-1)return fa(e,this.formSerializer).toString();if((l=h.isFileList(e))||s.indexOf("multipart/form-data")>-1){const i=this.env&&this.env.FormData;return ke(l?{"files[]":e}:e,i&&new i,this.formSerializer)}}return a||o?(r.setContentType("application/json",!1),Ia(e)):e}],transformResponse:[function(e){const r=this.transitional||fe.transitional,s=r&&r.forcedJSONParsing,o=this.responseType==="json";if(h.isResponse(e)||h.isReadableStream(e))return e;if(e&&h.isString(e)&&(s&&!this.responseType||o)){const n=!(r&&r.silentJSONParsing)&&o;try{return JSON.parse(e)}catch(l){if(n)throw l.name==="SyntaxError"?g.from(l,g.ERR_BAD_RESPONSE,this,null,this.response):l}}return e}],timeout:0,xsrfCookieName:"XSRF-TOKEN",xsrfHeaderName:"X-XSRF-TOKEN",maxContentLength:-1,maxBodyLength:-1,env:{FormData:G.classes.FormData,Blob:G.classes.Blob},validateStatus:function(e){return e>=200&&e<300},headers:{common:{Accept:"application/json, text/plain, */*","Content-Type":void 0}}};h.forEach(["delete","get","head","post","put","patch"],t=>{fe.headers[t]={}});const Ea=h.toObjectSet(["age","authorization","content-length","content-type","etag","expires","from","host","if-modified-since","if-unmodified-since","last-modified","location","max-forwards","proxy-authorization","referer","retry-after","user-agent"]),ga=t=>{const e={};let r,s,o;return t&&t.split(` +`).forEach(function(n){o=n.indexOf(":"),r=n.substring(0,o).trim().toLowerCase(),s=n.substring(o+1).trim(),!(!r||e[r]&&Ea[r])&&(r==="set-cookie"?e[r]?e[r].push(s):e[r]=[s]:e[r]=e[r]?e[r]+", "+s:s)}),e},It=Symbol("internals");function be(t){return t&&String(t).trim().toLowerCase()}function xe(t){return t===!1||t==null?t:h.isArray(t)?t.map(xe):String(t)}function xa(t){const e=Object.create(null),r=/([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;let s;for(;s=r.exec(t);)e[s[1]]=s[2];return e}const va=t=>/^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(t.trim());function Qe(t,e,r,s,o){if(h.isFunction(s))return s.call(this,e,r);if(o&&(e=r),!!h.isString(e)){if(h.isString(s))return e.indexOf(s)!==-1;if(h.isRegExp(s))return s.test(e)}}function Ca(t){return t.trim().toLowerCase().replace(/([a-z\d])(\w*)/g,(e,r,s)=>r.toUpperCase()+s)}function Ua(t,e){const r=h.toCamelCase(" "+e);["get","set","has"].forEach(s=>{Object.defineProperty(t,s+r,{value:function(o,a,n){return this[s].call(this,e,o,a,n)},configurable:!0})})}let q=class{constructor(e){e&&this.set(e)}set(e,r,s){const o=this;function a(l,i,c){const p=be(i);if(!p)throw new Error("header name must be a non-empty string");const R=h.findKey(o,p);(!R||o[R]===void 0||c===!0||c===void 0&&o[R]!==!1)&&(o[R||i]=xe(l))}const n=(l,i)=>h.forEach(l,(c,p)=>a(c,p,i));if(h.isPlainObject(e)||e instanceof this.constructor)n(e,r);else if(h.isString(e)&&(e=e.trim())&&!va(e))n(ga(e),r);else if(h.isObject(e)&&h.isIterable(e)){let l={},i,c;for(const p of e){if(!h.isArray(p))throw TypeError("Object iterator must return a key-value pair");l[c=p[0]]=(i=l[c])?h.isArray(i)?[...i,p[1]]:[i,p[1]]:p[1]}n(l,r)}else e!=null&&a(r,e,s);return this}get(e,r){if(e=be(e),e){const s=h.findKey(this,e);if(s){const o=this[s];if(!r)return o;if(r===!0)return xa(o);if(h.isFunction(r))return r.call(this,o,s);if(h.isRegExp(r))return r.exec(o);throw new TypeError("parser must be boolean|regexp|function")}}}has(e,r){if(e=be(e),e){const s=h.findKey(this,e);return!!(s&&this[s]!==void 0&&(!r||Qe(this,this[s],s,r)))}return!1}delete(e,r){const s=this;let o=!1;function a(n){if(n=be(n),n){const l=h.findKey(s,n);l&&(!r||Qe(s,s[l],l,r))&&(delete s[l],o=!0)}}return h.isArray(e)?e.forEach(a):a(e),o}clear(e){const r=Object.keys(this);let s=r.length,o=!1;for(;s--;){const a=r[s];(!e||Qe(this,this[a],a,e,!0))&&(delete this[a],o=!0)}return o}normalize(e){const r=this,s={};return h.forEach(this,(o,a)=>{const n=h.findKey(s,a);if(n){r[n]=xe(o),delete r[a];return}const l=e?Ca(a):String(a).trim();l!==a&&delete r[a],r[l]=xe(o),s[l]=!0}),this}concat(...e){return this.constructor.concat(this,...e)}toJSON(e){const r=Object.create(null);return h.forEach(this,(s,o)=>{s!=null&&s!==!1&&(r[o]=e&&h.isArray(s)?s.join(", "):s)}),r}[Symbol.iterator](){return Object.entries(this.toJSON())[Symbol.iterator]()}toString(){return Object.entries(this.toJSON()).map(([e,r])=>e+": "+r).join(` +`)}getSetCookie(){return this.get("set-cookie")||[]}get[Symbol.toStringTag](){return"AxiosHeaders"}static from(e){return e instanceof this?e:new this(e)}static concat(e,...r){const s=new this(e);return r.forEach(o=>s.set(o)),s}static accessor(e){const s=(this[It]=this[It]={accessors:{}}).accessors,o=this.prototype;function a(n){const l=be(n);s[l]||(Ua(o,n),s[l]=!0)}return h.isArray(e)?e.forEach(a):a(e),this}};q.accessor(["Content-Type","Content-Length","Accept","Accept-Encoding","User-Agent","Authorization"]);h.reduceDescriptors(q.prototype,({value:t},e)=>{let r=e[0].toUpperCase()+e.slice(1);return{get:()=>t,set(s){this[r]=s}}});h.freezeMethods(q);function Me(t,e){const r=this||fe,s=e||r,o=q.from(s.headers);let a=s.data;return h.forEach(t,function(l){a=l.call(r,a,o.normalize(),e?e.status:void 0)}),o.normalize(),a}function mr(t){return!!(t&&t.__CANCEL__)}function he(t,e,r){g.call(this,t??"canceled",g.ERR_CANCELED,e,r),this.name="CanceledError"}h.inherits(he,g,{__CANCEL__:!0});function Vr(t,e,r){const s=r.config.validateStatus;!r.status||!s||s(r.status)?t(r):e(new g("Request failed with status code "+r.status,[g.ERR_BAD_REQUEST,g.ERR_BAD_RESPONSE][Math.floor(r.status/100)-4],r.config,r.request,r))}function Ta(t){const e=/^([-+\w]{1,25})(:?\/\/|:)/.exec(t);return e&&e[1]||""}function Ba(t,e){t=t||10;const r=new Array(t),s=new Array(t);let o=0,a=0,n;return e=e!==void 0?e:1e3,function(i){const c=Date.now(),p=s[a];n||(n=c),r[o]=i,s[o]=c;let R=a,I=0;for(;R!==o;)I+=r[R++],R=R%t;if(o=(o+1)%t,o===a&&(a=(a+1)%t),c-n{r=p,o=null,a&&(clearTimeout(a),a=null),t(...c)};return[(...c)=>{const p=Date.now(),R=p-r;R>=s?n(c,p):(o=c,a||(a=setTimeout(()=>{a=null,n(o)},s-R)))},()=>o&&n(o)]}const Te=(t,e,r=3)=>{let s=0;const o=Ba(50,250);return Da(a=>{const n=a.loaded,l=a.lengthComputable?a.total:void 0,i=n-s,c=o(i),p=n<=l;s=n;const R={loaded:n,total:l,progress:l?n/l:void 0,bytes:i,rate:c||void 0,estimated:c&&l&&p?(l-n)/c:void 0,event:a,lengthComputable:l!=null,[e?"download":"upload"]:!0};t(R)},r)},Et=(t,e)=>{const r=t!=null;return[s=>e[0]({lengthComputable:r,total:t,loaded:s}),e[1]]},gt=t=>(...e)=>h.asap(()=>t(...e)),La=G.hasStandardBrowserEnv?((t,e)=>r=>(r=new URL(r,G.origin),t.protocol===r.protocol&&t.host===r.host&&(e||t.port===r.port)))(new URL(G.origin),G.navigator&&/(msie|trident)/i.test(G.navigator.userAgent)):()=>!0,ja=G.hasStandardBrowserEnv?{write(t,e,r,s,o,a){const n=[t+"="+encodeURIComponent(e)];h.isNumber(r)&&n.push("expires="+new Date(r).toGMTString()),h.isString(s)&&n.push("path="+s),h.isString(o)&&n.push("domain="+o),a===!0&&n.push("secure"),document.cookie=n.join("; ")},read(t){const e=document.cookie.match(new RegExp("(^|;\\s*)("+t+")=([^;]*)"));return e?decodeURIComponent(e[3]):null},remove(t){this.write(t,"",Date.now()-864e5)}}:{write(){},read(){return null},remove(){}};function ka(t){return/^([a-z][a-z\d+\-.]*:)?\/\//i.test(t)}function za(t,e){return e?t.replace(/\/?\/$/,"")+"/"+e.replace(/^\/+/,""):t}function Sr(t,e,r){let s=!ka(e);return t&&(s||r==!1)?za(t,e):e}const xt=t=>t instanceof q?{...t}:t;function ie(t,e){e=e||{};const r={};function s(c,p,R,I){return h.isPlainObject(c)&&h.isPlainObject(p)?h.merge.call({caseless:I},c,p):h.isPlainObject(p)?h.merge({},p):h.isArray(p)?p.slice():p}function o(c,p,R,I){if(h.isUndefined(p)){if(!h.isUndefined(c))return s(void 0,c,R,I)}else return s(c,p,R,I)}function a(c,p){if(!h.isUndefined(p))return s(void 0,p)}function n(c,p){if(h.isUndefined(p)){if(!h.isUndefined(c))return s(void 0,c)}else return s(void 0,p)}function l(c,p,R){if(R in e)return s(c,p);if(R in t)return s(void 0,c)}const i={url:a,method:a,data:a,baseURL:n,transformRequest:n,transformResponse:n,paramsSerializer:n,timeout:n,timeoutMessage:n,withCredentials:n,withXSRFToken:n,adapter:n,responseType:n,xsrfCookieName:n,xsrfHeaderName:n,onUploadProgress:n,onDownloadProgress:n,decompress:n,maxContentLength:n,maxBodyLength:n,beforeRedirect:n,transport:n,httpAgent:n,httpsAgent:n,cancelToken:n,socketPath:n,responseEncoding:n,validateStatus:l,headers:(c,p,R)=>o(xt(c),xt(p),R,!0)};return h.forEach(Object.keys({...t,...e}),function(p){const R=i[p]||o,I=R(t[p],e[p],p);h.isUndefined(I)&&R!==l||(r[p]=I)}),r}const Ar=t=>{const e=ie({},t);let{data:r,withXSRFToken:s,xsrfHeaderName:o,xsrfCookieName:a,headers:n,auth:l}=e;e.headers=n=q.from(n),e.url=Or(Sr(e.baseURL,e.url,e.allowAbsoluteUrls),t.params,t.paramsSerializer),l&&n.set("Authorization","Basic "+btoa((l.username||"")+":"+(l.password?unescape(encodeURIComponent(l.password)):"")));let i;if(h.isFormData(r)){if(G.hasStandardBrowserEnv||G.hasStandardBrowserWebWorkerEnv)n.setContentType(void 0);else if((i=n.getContentType())!==!1){const[c,...p]=i?i.split(";").map(R=>R.trim()).filter(Boolean):[];n.setContentType([c||"multipart/form-data",...p].join("; "))}}if(G.hasStandardBrowserEnv&&(s&&h.isFunction(s)&&(s=s(e)),s||s!==!1&&La(e.url))){const c=o&&a&&ja.read(a);c&&n.set(o,c)}return e},Fa=typeof XMLHttpRequest<"u",Ga=Fa&&function(t){return new Promise(function(r,s){const o=Ar(t);let a=o.data;const n=q.from(o.headers).normalize();let{responseType:l,onUploadProgress:i,onDownloadProgress:c}=o,p,R,I,T,f;function y(){T&&T(),f&&f(),o.cancelToken&&o.cancelToken.unsubscribe(p),o.signal&&o.signal.removeEventListener("abort",p)}let w=new XMLHttpRequest;w.open(o.method.toUpperCase(),o.url,!0),w.timeout=o.timeout;function C(){if(!w)return;const E=q.from("getAllResponseHeaders"in w&&w.getAllResponseHeaders()),U={data:!l||l==="text"||l==="json"?w.responseText:w.response,status:w.status,statusText:w.statusText,headers:E,config:t,request:w};Vr(function(B){r(B),y()},function(B){s(B),y()},U),w=null}"onloadend"in w?w.onloadend=C:w.onreadystatechange=function(){!w||w.readyState!==4||w.status===0&&!(w.responseURL&&w.responseURL.indexOf("file:")===0)||setTimeout(C)},w.onabort=function(){w&&(s(new g("Request aborted",g.ECONNABORTED,t,w)),w=null)},w.onerror=function(){s(new g("Network Error",g.ERR_NETWORK,t,w)),w=null},w.ontimeout=function(){let v=o.timeout?"timeout of "+o.timeout+"ms exceeded":"timeout exceeded";const U=o.transitional||Pr;o.timeoutErrorMessage&&(v=o.timeoutErrorMessage),s(new g(v,U.clarifyTimeoutError?g.ETIMEDOUT:g.ECONNABORTED,t,w)),w=null},a===void 0&&n.setContentType(null),"setRequestHeader"in w&&h.forEach(n.toJSON(),function(v,U){w.setRequestHeader(U,v)}),h.isUndefined(o.withCredentials)||(w.withCredentials=!!o.withCredentials),l&&l!=="json"&&(w.responseType=o.responseType),c&&([I,f]=Te(c,!0),w.addEventListener("progress",I)),i&&w.upload&&([R,T]=Te(i),w.upload.addEventListener("progress",R),w.upload.addEventListener("loadend",T)),(o.cancelToken||o.signal)&&(p=E=>{w&&(s(!E||E.type?new he(null,t,w):E),w.abort(),w=null)},o.cancelToken&&o.cancelToken.subscribe(p),o.signal&&(o.signal.aborted?p():o.signal.addEventListener("abort",p)));const D=Ta(o.url);if(D&&G.protocols.indexOf(D)===-1){s(new g("Unsupported protocol "+D+":",g.ERR_BAD_REQUEST,t));return}w.send(a||null)})},_a=(t,e)=>{const{length:r}=t=t?t.filter(Boolean):[];if(e||r){let s=new AbortController,o;const a=function(c){if(!o){o=!0,l();const p=c instanceof Error?c:this.reason;s.abort(p instanceof g?p:new he(p instanceof Error?p.message:p))}};let n=e&&setTimeout(()=>{n=null,a(new g(`timeout ${e} of ms exceeded`,g.ETIMEDOUT))},e);const l=()=>{t&&(n&&clearTimeout(n),n=null,t.forEach(c=>{c.unsubscribe?c.unsubscribe(a):c.removeEventListener("abort",a)}),t=null)};t.forEach(c=>c.addEventListener("abort",a));const{signal:i}=s;return i.unsubscribe=()=>h.asap(l),i}},qa=function*(t,e){let r=t.byteLength;if(r{const o=Ha(t,e);let a=0,n,l=i=>{n||(n=!0,s&&s(i))};return new ReadableStream({async pull(i){try{const{done:c,value:p}=await o.next();if(c){l(),i.close();return}let R=p.byteLength;if(r){let I=a+=R;r(I)}i.enqueue(new Uint8Array(p))}catch(c){throw l(c),c}},cancel(i){return l(i),o.return()}},{highWaterMark:2})},ze=typeof fetch=="function"&&typeof Request=="function"&&typeof Response=="function",Rr=ze&&typeof ReadableStream=="function",Na=ze&&(typeof TextEncoder=="function"?(t=>e=>t.encode(e))(new TextEncoder):async t=>new Uint8Array(await new Response(t).arrayBuffer())),fr=(t,...e)=>{try{return!!t(...e)}catch{return!1}},Wa=Rr&&fr(()=>{let t=!1;const e=new Request(G.origin,{body:new ReadableStream,method:"POST",get duplex(){return t=!0,"half"}}).headers.has("Content-Type");return t&&!e}),Ct=64*1024,tt=Rr&&fr(()=>h.isReadableStream(new Response("").body)),Be={stream:tt&&(t=>t.body)};ze&&(t=>{["text","arrayBuffer","blob","formData","stream"].forEach(e=>{!Be[e]&&(Be[e]=h.isFunction(t[e])?r=>r[e]():(r,s)=>{throw new g(`Response type '${e}' is not supported`,g.ERR_NOT_SUPPORT,s)})})})(new Response);const Qa=async t=>{if(t==null)return 0;if(h.isBlob(t))return t.size;if(h.isSpecCompliantForm(t))return(await new Request(G.origin,{method:"POST",body:t}).arrayBuffer()).byteLength;if(h.isArrayBufferView(t)||h.isArrayBuffer(t))return t.byteLength;if(h.isURLSearchParams(t)&&(t=t+""),h.isString(t))return(await Na(t)).byteLength},Ma=async(t,e)=>{const r=h.toFiniteNumber(t.getContentLength());return r??Qa(e)},Ja=ze&&(async t=>{let{url:e,method:r,data:s,signal:o,cancelToken:a,timeout:n,onDownloadProgress:l,onUploadProgress:i,responseType:c,headers:p,withCredentials:R="same-origin",fetchOptions:I}=Ar(t);c=c?(c+"").toLowerCase():"text";let T=_a([o,a&&a.toAbortSignal()],n),f;const y=T&&T.unsubscribe&&(()=>{T.unsubscribe()});let w;try{if(i&&Wa&&r!=="get"&&r!=="head"&&(w=await Ma(p,s))!==0){let U=new Request(e,{method:"POST",body:s,duplex:"half"}),L;if(h.isFormData(s)&&(L=U.headers.get("content-type"))&&p.setContentType(L),U.body){const[B,Z]=Et(w,Te(gt(i)));s=vt(U.body,Ct,B,Z)}}h.isString(R)||(R=R?"include":"omit");const C="credentials"in Request.prototype;f=new Request(e,{...I,signal:T,method:r.toUpperCase(),headers:p.normalize().toJSON(),body:s,duplex:"half",credentials:C?R:void 0});let D=await fetch(f,I);const E=tt&&(c==="stream"||c==="response");if(tt&&(l||E&&y)){const U={};["status","statusText","headers"].forEach(ce=>{U[ce]=D[ce]});const L=h.toFiniteNumber(D.headers.get("content-length")),[B,Z]=l&&Et(L,Te(gt(l),!0))||[];D=new Response(vt(D.body,Ct,B,()=>{Z&&Z(),y&&y()}),U)}c=c||"text";let v=await Be[h.findKey(Be,c)||"text"](D,t);return!E&&y&&y(),await new Promise((U,L)=>{Vr(U,L,{data:v,headers:q.from(D.headers),status:D.status,statusText:D.statusText,config:t,request:f})})}catch(C){throw y&&y(),C&&C.name==="TypeError"&&/Load failed|fetch/i.test(C.message)?Object.assign(new g("Network Error",g.ERR_NETWORK,t,f),{cause:C.cause||C}):g.from(C,C&&C.code,t,f)}}),rt={http:pa,xhr:Ga,fetch:Ja};h.forEach(rt,(t,e)=>{if(t){try{Object.defineProperty(t,"name",{value:e})}catch{}Object.defineProperty(t,"adapterName",{value:e})}});const Ut=t=>`- ${t}`,Ka=t=>h.isFunction(t)||t===null||t===!1,yr={getAdapter:t=>{t=h.isArray(t)?t:[t];const{length:e}=t;let r,s;const o={};for(let a=0;a`adapter ${l} `+(i===!1?"is not supported by the environment":"is not available in the build"));let n=e?a.length>1?`since : +`+a.map(Ut).join(` +`):" "+Ut(a[0]):"as no adapter specified";throw new g("There is no suitable adapter to dispatch the request "+n,"ERR_NOT_SUPPORT")}return s},adapters:rt};function Je(t){if(t.cancelToken&&t.cancelToken.throwIfRequested(),t.signal&&t.signal.aborted)throw new he(null,t)}function Tt(t){return Je(t),t.headers=q.from(t.headers),t.data=Me.call(t,t.transformRequest),["post","put","patch"].indexOf(t.method)!==-1&&t.headers.setContentType("application/x-www-form-urlencoded",!1),yr.getAdapter(t.adapter||fe.adapter)(t).then(function(s){return Je(t),s.data=Me.call(t,t.transformResponse,s),s.headers=q.from(s.headers),s},function(s){return mr(s)||(Je(t),s&&s.response&&(s.response.data=Me.call(t,t.transformResponse,s.response),s.response.headers=q.from(s.response.headers))),Promise.reject(s)})}const wr="1.11.0",Fe={};["object","boolean","number","function","string","symbol"].forEach((t,e)=>{Fe[t]=function(s){return typeof s===t||"a"+(e<1?"n ":" ")+t}});const Bt={};Fe.transitional=function(e,r,s){function o(a,n){return"[Axios v"+wr+"] Transitional option '"+a+"'"+n+(s?". "+s:"")}return(a,n,l)=>{if(e===!1)throw new g(o(n," has been removed"+(r?" in "+r:"")),g.ERR_DEPRECATED);return r&&!Bt[n]&&(Bt[n]=!0,console.warn(o(n," has been deprecated since v"+r+" and will be removed in the near future"))),e?e(a,n,l):!0}};Fe.spelling=function(e){return(r,s)=>(console.warn(`${s} is likely a misspelling of ${e}`),!0)};function Xa(t,e,r){if(typeof t!="object")throw new g("options must be an object",g.ERR_BAD_OPTION_VALUE);const s=Object.keys(t);let o=s.length;for(;o-- >0;){const a=s[o],n=e[a];if(n){const l=t[a],i=l===void 0||n(l,a,t);if(i!==!0)throw new g("option "+a+" must be "+i,g.ERR_BAD_OPTION_VALUE);continue}if(r!==!0)throw new g("Unknown option "+a,g.ERR_BAD_OPTION)}}const ve={assertOptions:Xa,validators:Fe},M=ve.validators;let le=class{constructor(e){this.defaults=e||{},this.interceptors={request:new wt,response:new wt}}async request(e,r){try{return await this._request(e,r)}catch(s){if(s instanceof Error){let o={};Error.captureStackTrace?Error.captureStackTrace(o):o=new Error;const a=o.stack?o.stack.replace(/^.+\n/,""):"";try{s.stack?a&&!String(s.stack).endsWith(a.replace(/^.+\n.+\n/,""))&&(s.stack+=` +`+a):s.stack=a}catch{}}throw s}}_request(e,r){typeof e=="string"?(r=r||{},r.url=e):r=e||{},r=ie(this.defaults,r);const{transitional:s,paramsSerializer:o,headers:a}=r;s!==void 0&&ve.assertOptions(s,{silentJSONParsing:M.transitional(M.boolean),forcedJSONParsing:M.transitional(M.boolean),clarifyTimeoutError:M.transitional(M.boolean)},!1),o!=null&&(h.isFunction(o)?r.paramsSerializer={serialize:o}:ve.assertOptions(o,{encode:M.function,serialize:M.function},!0)),r.allowAbsoluteUrls!==void 0||(this.defaults.allowAbsoluteUrls!==void 0?r.allowAbsoluteUrls=this.defaults.allowAbsoluteUrls:r.allowAbsoluteUrls=!0),ve.assertOptions(r,{baseUrl:M.spelling("baseURL"),withXsrfToken:M.spelling("withXSRFToken")},!0),r.method=(r.method||this.defaults.method||"get").toLowerCase();let n=a&&h.merge(a.common,a[r.method]);a&&h.forEach(["delete","get","head","post","put","patch","common"],f=>{delete a[f]}),r.headers=q.concat(n,a);const l=[];let i=!0;this.interceptors.request.forEach(function(y){typeof y.runWhen=="function"&&y.runWhen(r)===!1||(i=i&&y.synchronous,l.unshift(y.fulfilled,y.rejected))});const c=[];this.interceptors.response.forEach(function(y){c.push(y.fulfilled,y.rejected)});let p,R=0,I;if(!i){const f=[Tt.bind(this),void 0];for(f.unshift(...l),f.push(...c),I=f.length,p=Promise.resolve(r);R{if(!s._listeners)return;let a=s._listeners.length;for(;a-- >0;)s._listeners[a](o);s._listeners=null}),this.promise.then=o=>{let a;const n=new Promise(l=>{s.subscribe(l),a=l}).then(o);return n.cancel=function(){s.unsubscribe(a)},n},e(function(a,n,l){s.reason||(s.reason=new he(a,n,l),r(s.reason))})}throwIfRequested(){if(this.reason)throw this.reason}subscribe(e){if(this.reason){e(this.reason);return}this._listeners?this._listeners.push(e):this._listeners=[e]}unsubscribe(e){if(!this._listeners)return;const r=this._listeners.indexOf(e);r!==-1&&this._listeners.splice(r,1)}toAbortSignal(){const e=new AbortController,r=s=>{e.abort(s)};return this.subscribe(r),e.signal.unsubscribe=()=>this.unsubscribe(r),e.signal}static source(){let e;return{token:new Ir(function(o){e=o}),cancel:e}}};function Za(t){return function(r){return t.apply(null,r)}}function eo(t){return h.isObject(t)&&t.isAxiosError===!0}const st={Continue:100,SwitchingProtocols:101,Processing:102,EarlyHints:103,Ok:200,Created:201,Accepted:202,NonAuthoritativeInformation:203,NoContent:204,ResetContent:205,PartialContent:206,MultiStatus:207,AlreadyReported:208,ImUsed:226,MultipleChoices:300,MovedPermanently:301,Found:302,SeeOther:303,NotModified:304,UseProxy:305,Unused:306,TemporaryRedirect:307,PermanentRedirect:308,BadRequest:400,Unauthorized:401,PaymentRequired:402,Forbidden:403,NotFound:404,MethodNotAllowed:405,NotAcceptable:406,ProxyAuthenticationRequired:407,RequestTimeout:408,Conflict:409,Gone:410,LengthRequired:411,PreconditionFailed:412,PayloadTooLarge:413,UriTooLong:414,UnsupportedMediaType:415,RangeNotSatisfiable:416,ExpectationFailed:417,ImATeapot:418,MisdirectedRequest:421,UnprocessableEntity:422,Locked:423,FailedDependency:424,TooEarly:425,UpgradeRequired:426,PreconditionRequired:428,TooManyRequests:429,RequestHeaderFieldsTooLarge:431,UnavailableForLegalReasons:451,InternalServerError:500,NotImplemented:501,BadGateway:502,ServiceUnavailable:503,GatewayTimeout:504,HttpVersionNotSupported:505,VariantAlsoNegotiates:506,InsufficientStorage:507,LoopDetected:508,NotExtended:510,NetworkAuthenticationRequired:511};Object.entries(st).forEach(([t,e])=>{st[e]=t});function Er(t){const e=new le(t),r=rr(le.prototype.request,e);return h.extend(r,le.prototype,e,{allOwnKeys:!0}),h.extend(r,e,null,{allOwnKeys:!0}),r.create=function(o){return Er(ie(t,o))},r}const u=Er(fe);u.Axios=le;u.CanceledError=he;u.CancelToken=Ya;u.isCancel=mr;u.VERSION=wr;u.toFormData=ke;u.AxiosError=g;u.Cancel=u.CanceledError;u.all=function(e){return Promise.all(e)};u.spread=Za;u.isAxiosError=eo;u.mergeConfig=ie;u.AxiosHeaders=q;u.formToJSON=t=>br(h.isHTMLForm(t)?new FormData(t):t);u.getAdapter=yr.getAdapter;u.HttpStatusCode=st;u.default=u;const{Axios:jo,AxiosError:ko,CanceledError:zo,isCancel:Fo,CancelToken:Go,VERSION:_o,all:qo,Cancel:Ho,isAxiosError:$o,spread:No,toFormData:Wo,AxiosHeaders:Qo,HttpStatusCode:Mo,formToJSON:Jo,getAdapter:Ko,mergeConfig:Xo}=u,O="/api/v1".replace(/\/+$/,"");class H{constructor(e,r=O,s=u){this.basePath=r,this.axios=s,e&&(this.configuration=e,this.basePath=e.basePath??r)}configuration}class to extends Error{constructor(e,r){super(r),this.field=e,this.name="RequiredError"}}const P={},b="https://example.com",d=function(t,e,r){if(r==null)throw new to(e,`Required parameter ${e} was null or undefined when calling ${t}.`)},m=async function(t,e,r){if(r&&r.apiKey){const s=typeof r.apiKey=="function"?await r.apiKey(e):await r.apiKey;t[e]=s}};function at(t,e,r=""){e!=null&&(typeof e=="object"?Array.isArray(e)?e.forEach(s=>at(t,s,r)):Object.keys(e).forEach(s=>at(t,e[s],`${r}${r!==""?".":""}${s}`)):t.has(r)?t.append(r,e):t.set(r,e))}const V=function(t,...e){const r=new URLSearchParams(t.search);at(r,e),t.search=r.toString()},x=function(t,e,r){const s=typeof t!="string";return(s&&r&&r.isJsonMime?r.isJsonMime(e.headers["Content-Type"]):s)?JSON.stringify(t!==void 0?t:{}):t||""},S=function(t){return t.pathname+t.search+t.hash},A=function(t,e,r,s){return(o=e,a=r)=>{const n={...t.options,url:(o.defaults.baseURL?"":s?.basePath??a)+t.url};return o.request(n)}},ro=function(t){return{updateController:async(e,r={})=>{d("updateController","body",e);const s="/controller",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"PUT",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}}}},so=function(t){const e=ro(t);return{async updateController(r,s){const o=await e.updateController(r,s),a=t?.serverIndex??0,n=P["ControllerApi.updateController"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class Dt extends H{updateController(e,r){return so(this.configuration).updateController(e,r).then(s=>s(this.axios,this.basePath))}}const ao=function(t){return{controllerInfo:async(e={})=>{const r="/controller-info",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}}}},oo=function(t){const e=ao(t);return{async controllerInfo(r){const s=await e.controllerInfo(r),o=t?.serverIndex??0,a=P["ControllerInfoApi.controllerInfo"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)}}};class Lt extends H{controllerInfo(e){return oo(this.configuration).controllerInfo(e).then(r=>r(this.axios,this.basePath))}}const no=function(t){return{createCredentials:async(e,r={})=>{d("createCredentials","body",e);const s="/github/credentials",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createGiteaCredentials:async(e,r={})=>{d("createGiteaCredentials","body",e);const s="/gitea/credentials",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},deleteCredentials:async(e,r={})=>{d("deleteCredentials","id",e);const s="/github/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteGiteaCredentials:async(e,r={})=>{d("deleteGiteaCredentials","id",e);const s="/gitea/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getCredentials:async(e,r={})=>{d("getCredentials","id",e);const s="/github/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getGiteaCredentials:async(e,r={})=>{d("getGiteaCredentials","id",e);const s="/gitea/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listCredentials:async(e={})=>{const r="/github/credentials",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listGiteaCredentials:async(e={})=>{const r="/gitea/credentials",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},updateCredentials:async(e,r,s={})=>{d("updateCredentials","id",e),d("updateCredentials","body",r);const o="/github/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateGiteaCredentials:async(e,r,s={})=>{d("updateGiteaCredentials","id",e),d("updateGiteaCredentials","body",r);const o="/gitea/credentials/{id}".replace("{id}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}}}},J=function(t){const e=no(t);return{async createCredentials(r,s){const o=await e.createCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.createCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createGiteaCredentials(r,s){const o=await e.createGiteaCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.createGiteaCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteCredentials(r,s){const o=await e.deleteCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.deleteCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteGiteaCredentials(r,s){const o=await e.deleteGiteaCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.deleteGiteaCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getCredentials(r,s){const o=await e.getCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.getCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getGiteaCredentials(r,s){const o=await e.getGiteaCredentials(r,s),a=t?.serverIndex??0,n=P["CredentialsApi.getGiteaCredentials"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listCredentials(r){const s=await e.listCredentials(r),o=t?.serverIndex??0,a=P["CredentialsApi.listCredentials"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listGiteaCredentials(r){const s=await e.listGiteaCredentials(r),o=t?.serverIndex??0,a=P["CredentialsApi.listGiteaCredentials"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async updateCredentials(r,s,o){const a=await e.updateCredentials(r,s,o),n=t?.serverIndex??0,l=P["CredentialsApi.updateCredentials"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateGiteaCredentials(r,s,o){const a=await e.updateGiteaCredentials(r,s,o),n=t?.serverIndex??0,l=P["CredentialsApi.updateGiteaCredentials"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)}}};class jt extends H{createCredentials(e,r){return J(this.configuration).createCredentials(e,r).then(s=>s(this.axios,this.basePath))}createGiteaCredentials(e,r){return J(this.configuration).createGiteaCredentials(e,r).then(s=>s(this.axios,this.basePath))}deleteCredentials(e,r){return J(this.configuration).deleteCredentials(e,r).then(s=>s(this.axios,this.basePath))}deleteGiteaCredentials(e,r){return J(this.configuration).deleteGiteaCredentials(e,r).then(s=>s(this.axios,this.basePath))}getCredentials(e,r){return J(this.configuration).getCredentials(e,r).then(s=>s(this.axios,this.basePath))}getGiteaCredentials(e,r){return J(this.configuration).getGiteaCredentials(e,r).then(s=>s(this.axios,this.basePath))}listCredentials(e){return J(this.configuration).listCredentials(e).then(r=>r(this.axios,this.basePath))}listGiteaCredentials(e){return J(this.configuration).listGiteaCredentials(e).then(r=>r(this.axios,this.basePath))}updateCredentials(e,r,s){return J(this.configuration).updateCredentials(e,r,s).then(o=>o(this.axios,this.basePath))}updateGiteaCredentials(e,r,s){return J(this.configuration).updateGiteaCredentials(e,r,s).then(o=>o(this.axios,this.basePath))}}const lo=function(t){return{createGiteaEndpoint:async(e,r={})=>{d("createGiteaEndpoint","body",e);const s="/gitea/endpoints",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createGithubEndpoint:async(e,r={})=>{d("createGithubEndpoint","body",e);const s="/github/endpoints",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},deleteGiteaEndpoint:async(e,r={})=>{d("deleteGiteaEndpoint","name",e);const s="/gitea/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteGithubEndpoint:async(e,r={})=>{d("deleteGithubEndpoint","name",e);const s="/github/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getGiteaEndpoint:async(e,r={})=>{d("getGiteaEndpoint","name",e);const s="/gitea/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getGithubEndpoint:async(e,r={})=>{d("getGithubEndpoint","name",e);const s="/github/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listGiteaEndpoints:async(e={})=>{const r="/gitea/endpoints",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listGithubEndpoints:async(e={})=>{const r="/github/endpoints",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},updateGiteaEndpoint:async(e,r,s={})=>{d("updateGiteaEndpoint","name",e),d("updateGiteaEndpoint","body",r);const o="/gitea/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateGithubEndpoint:async(e,r,s={})=>{d("updateGithubEndpoint","name",e),d("updateGithubEndpoint","body",r);const o="/github/endpoints/{name}".replace("{name}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}}}},K=function(t){const e=lo(t);return{async createGiteaEndpoint(r,s){const o=await e.createGiteaEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.createGiteaEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createGithubEndpoint(r,s){const o=await e.createGithubEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.createGithubEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteGiteaEndpoint(r,s){const o=await e.deleteGiteaEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.deleteGiteaEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteGithubEndpoint(r,s){const o=await e.deleteGithubEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.deleteGithubEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getGiteaEndpoint(r,s){const o=await e.getGiteaEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.getGiteaEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getGithubEndpoint(r,s){const o=await e.getGithubEndpoint(r,s),a=t?.serverIndex??0,n=P["EndpointsApi.getGithubEndpoint"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listGiteaEndpoints(r){const s=await e.listGiteaEndpoints(r),o=t?.serverIndex??0,a=P["EndpointsApi.listGiteaEndpoints"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listGithubEndpoints(r){const s=await e.listGithubEndpoints(r),o=t?.serverIndex??0,a=P["EndpointsApi.listGithubEndpoints"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async updateGiteaEndpoint(r,s,o){const a=await e.updateGiteaEndpoint(r,s,o),n=t?.serverIndex??0,l=P["EndpointsApi.updateGiteaEndpoint"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateGithubEndpoint(r,s,o){const a=await e.updateGithubEndpoint(r,s,o),n=t?.serverIndex??0,l=P["EndpointsApi.updateGithubEndpoint"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)}}};class kt extends H{createGiteaEndpoint(e,r){return K(this.configuration).createGiteaEndpoint(e,r).then(s=>s(this.axios,this.basePath))}createGithubEndpoint(e,r){return K(this.configuration).createGithubEndpoint(e,r).then(s=>s(this.axios,this.basePath))}deleteGiteaEndpoint(e,r){return K(this.configuration).deleteGiteaEndpoint(e,r).then(s=>s(this.axios,this.basePath))}deleteGithubEndpoint(e,r){return K(this.configuration).deleteGithubEndpoint(e,r).then(s=>s(this.axios,this.basePath))}getGiteaEndpoint(e,r){return K(this.configuration).getGiteaEndpoint(e,r).then(s=>s(this.axios,this.basePath))}getGithubEndpoint(e,r){return K(this.configuration).getGithubEndpoint(e,r).then(s=>s(this.axios,this.basePath))}listGiteaEndpoints(e){return K(this.configuration).listGiteaEndpoints(e).then(r=>r(this.axios,this.basePath))}listGithubEndpoints(e){return K(this.configuration).listGithubEndpoints(e).then(r=>r(this.axios,this.basePath))}updateGiteaEndpoint(e,r,s){return K(this.configuration).updateGiteaEndpoint(e,r,s).then(o=>o(this.axios,this.basePath))}updateGithubEndpoint(e,r,s){return K(this.configuration).updateGithubEndpoint(e,r,s).then(o=>o(this.axios,this.basePath))}}const io=function(t){return{createEnterprise:async(e,r={})=>{d("createEnterprise","body",e);const s="/enterprises",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createEnterprisePool:async(e,r,s={})=>{d("createEnterprisePool","enterpriseID",e),d("createEnterprisePool","body",r);const o="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createEnterpriseScaleSet:async(e,r,s={})=>{d("createEnterpriseScaleSet","enterpriseID",e),d("createEnterpriseScaleSet","body",r);const o="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteEnterprise:async(e,r={})=>{d("deleteEnterprise","enterpriseID",e);const s="/enterprises/{enterpriseID}".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteEnterprisePool:async(e,r,s={})=>{d("deleteEnterprisePool","enterpriseID",e),d("deleteEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getEnterprise:async(e,r={})=>{d("getEnterprise","enterpriseID",e);const s="/enterprises/{enterpriseID}".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getEnterprisePool:async(e,r,s={})=>{d("getEnterprisePool","enterpriseID",e),d("getEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},listEnterpriseInstances:async(e,r={})=>{d("listEnterpriseInstances","enterpriseID",e);const s="/enterprises/{enterpriseID}/instances".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterprisePools:async(e,r={})=>{d("listEnterprisePools","enterpriseID",e);const s="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterpriseScaleSets:async(e,r={})=>{d("listEnterpriseScaleSets","enterpriseID",e);const s="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterprises:async(e,r,s={})=>{const o="/enterprises",a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),e!==void 0&&(c.name=e),r!==void 0&&(c.endpoint=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},updateEnterprise:async(e,r,s={})=>{d("updateEnterprise","enterpriseID",e),d("updateEnterprise","body",r);const o="/enterprises/{enterpriseID}".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateEnterprisePool:async(e,r,s,o={})=>{d("updateEnterprisePool","enterpriseID",e),d("updateEnterprisePool","poolID",r),d("updateEnterprisePool","body",s);const a="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},$=function(t){const e=io(t);return{async createEnterprise(r,s){const o=await e.createEnterprise(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.createEnterprise"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createEnterprisePool(r,s,o){const a=await e.createEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.createEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createEnterpriseScaleSet(r,s,o){const a=await e.createEnterpriseScaleSet(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.createEnterpriseScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteEnterprise(r,s){const o=await e.deleteEnterprise(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.deleteEnterprise"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteEnterprisePool(r,s,o){const a=await e.deleteEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.deleteEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getEnterprise(r,s){const o=await e.getEnterprise(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.getEnterprise"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getEnterprisePool(r,s,o){const a=await e.getEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.getEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listEnterpriseInstances(r,s){const o=await e.listEnterpriseInstances(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.listEnterpriseInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterprisePools(r,s){const o=await e.listEnterprisePools(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.listEnterprisePools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterpriseScaleSets(r,s){const o=await e.listEnterpriseScaleSets(r,s),a=t?.serverIndex??0,n=P["EnterprisesApi.listEnterpriseScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterprises(r,s,o){const a=await e.listEnterprises(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.listEnterprises"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateEnterprise(r,s,o){const a=await e.updateEnterprise(r,s,o),n=t?.serverIndex??0,l=P["EnterprisesApi.updateEnterprise"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateEnterprisePool(r,s,o,a){const n=await e.updateEnterprisePool(r,s,o,a),l=t?.serverIndex??0,i=P["EnterprisesApi.updateEnterprisePool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class zt extends H{createEnterprise(e,r){return $(this.configuration).createEnterprise(e,r).then(s=>s(this.axios,this.basePath))}createEnterprisePool(e,r,s){return $(this.configuration).createEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}createEnterpriseScaleSet(e,r,s){return $(this.configuration).createEnterpriseScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteEnterprise(e,r){return $(this.configuration).deleteEnterprise(e,r).then(s=>s(this.axios,this.basePath))}deleteEnterprisePool(e,r,s){return $(this.configuration).deleteEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}getEnterprise(e,r){return $(this.configuration).getEnterprise(e,r).then(s=>s(this.axios,this.basePath))}getEnterprisePool(e,r,s){return $(this.configuration).getEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}listEnterpriseInstances(e,r){return $(this.configuration).listEnterpriseInstances(e,r).then(s=>s(this.axios,this.basePath))}listEnterprisePools(e,r){return $(this.configuration).listEnterprisePools(e,r).then(s=>s(this.axios,this.basePath))}listEnterpriseScaleSets(e,r){return $(this.configuration).listEnterpriseScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listEnterprises(e,r,s){return $(this.configuration).listEnterprises(e,r,s).then(o=>o(this.axios,this.basePath))}updateEnterprise(e,r,s){return $(this.configuration).updateEnterprise(e,r,s).then(o=>o(this.axios,this.basePath))}updateEnterprisePool(e,r,s,o){return $(this.configuration).updateEnterprisePool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const co=function(t){return{firstRun:async(e,r={})=>{d("firstRun","body",e);const s="/first-run",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}}}},po=function(t){const e=co(t);return{async firstRun(r,s){const o=await e.firstRun(r,s),a=t?.serverIndex??0,n=P["FirstRunApi.firstRun"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class Ft extends H{firstRun(e,r){return po(this.configuration).firstRun(e,r).then(s=>s(this.axios,this.basePath))}}const ho=function(t){return{getOrgWebhookInfo:async(e,r={})=>{d("getOrgWebhookInfo","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getRepoWebhookInfo:async(e,r={})=>{d("getRepoWebhookInfo","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},installOrgWebhook:async(e,r,s={})=>{d("installOrgWebhook","orgID",e),d("installOrgWebhook","body",r);const o="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},installRepoWebhook:async(e,r,s={})=>{d("installRepoWebhook","repoID",e),d("installRepoWebhook","body",r);const o="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},uninstallOrgWebhook:async(e,r={})=>{d("uninstallOrgWebhook","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},uninstallRepoWebhook:async(e,r={})=>{d("uninstallRepoWebhook","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}}}},pe=function(t){const e=ho(t);return{async getOrgWebhookInfo(r,s){const o=await e.getOrgWebhookInfo(r,s),a=t?.serverIndex??0,n=P["HooksApi.getOrgWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getRepoWebhookInfo(r,s){const o=await e.getRepoWebhookInfo(r,s),a=t?.serverIndex??0,n=P["HooksApi.getRepoWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async installOrgWebhook(r,s,o){const a=await e.installOrgWebhook(r,s,o),n=t?.serverIndex??0,l=P["HooksApi.installOrgWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async installRepoWebhook(r,s,o){const a=await e.installRepoWebhook(r,s,o),n=t?.serverIndex??0,l=P["HooksApi.installRepoWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async uninstallOrgWebhook(r,s){const o=await e.uninstallOrgWebhook(r,s),a=t?.serverIndex??0,n=P["HooksApi.uninstallOrgWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async uninstallRepoWebhook(r,s){const o=await e.uninstallRepoWebhook(r,s),a=t?.serverIndex??0,n=P["HooksApi.uninstallRepoWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class Gt extends H{getOrgWebhookInfo(e,r){return pe(this.configuration).getOrgWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}getRepoWebhookInfo(e,r){return pe(this.configuration).getRepoWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}installOrgWebhook(e,r,s){return pe(this.configuration).installOrgWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}installRepoWebhook(e,r,s){return pe(this.configuration).installRepoWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}uninstallOrgWebhook(e,r){return pe(this.configuration).uninstallOrgWebhook(e,r).then(s=>s(this.axios,this.basePath))}uninstallRepoWebhook(e,r){return pe(this.configuration).uninstallRepoWebhook(e,r).then(s=>s(this.axios,this.basePath))}}const uo=function(t){return{deleteInstance:async(e,r,s,o={})=>{d("deleteInstance","instanceName",e);const a="/instances/{instanceName}".replace("{instanceName}",encodeURIComponent(String(e))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"DELETE",...l,...o},c={},p={};await m(c,"Authorization",t),r!==void 0&&(p.forceRemove=r),s!==void 0&&(p.bypassGHUnauthorized=s),V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},{url:S(n),options:i}},getInstance:async(e,r={})=>{d("getInstance","instanceName",e);const s="/instances/{instanceName}".replace("{instanceName}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterpriseInstances:async(e,r={})=>{d("listEnterpriseInstances","enterpriseID",e);const s="/enterprises/{enterpriseID}/instances".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listInstances:async(e={})=>{const r="/instances",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listOrgInstances:async(e,r={})=>{d("listOrgInstances","orgID",e);const s="/organizations/{orgID}/instances".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listPoolInstances:async(e,r={})=>{d("listPoolInstances","poolID",e);const s="/pools/{poolID}/instances".replace("{poolID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoInstances:async(e,r={})=>{d("listRepoInstances","repoID",e);const s="/repositories/{repoID}/instances".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listScaleSetInstances:async(e,r={})=>{d("listScaleSetInstances","scalesetID",e);const s="/scalesets/{scalesetID}/instances".replace("{scalesetID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}}}},ae=function(t){const e=uo(t);return{async deleteInstance(r,s,o,a){const n=await e.deleteInstance(r,s,o,a),l=t?.serverIndex??0,i=P["InstancesApi.deleteInstance"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async getInstance(r,s){const o=await e.getInstance(r,s),a=t?.serverIndex??0,n=P["InstancesApi.getInstance"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterpriseInstances(r,s){const o=await e.listEnterpriseInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listEnterpriseInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listInstances(r){const s=await e.listInstances(r),o=t?.serverIndex??0,a=P["InstancesApi.listInstances"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listOrgInstances(r,s){const o=await e.listOrgInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listOrgInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listPoolInstances(r,s){const o=await e.listPoolInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listPoolInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoInstances(r,s){const o=await e.listRepoInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listRepoInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listScaleSetInstances(r,s){const o=await e.listScaleSetInstances(r,s),a=t?.serverIndex??0,n=P["InstancesApi.listScaleSetInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class _t extends H{deleteInstance(e,r,s,o){return ae(this.configuration).deleteInstance(e,r,s,o).then(a=>a(this.axios,this.basePath))}getInstance(e,r){return ae(this.configuration).getInstance(e,r).then(s=>s(this.axios,this.basePath))}listEnterpriseInstances(e,r){return ae(this.configuration).listEnterpriseInstances(e,r).then(s=>s(this.axios,this.basePath))}listInstances(e){return ae(this.configuration).listInstances(e).then(r=>r(this.axios,this.basePath))}listOrgInstances(e,r){return ae(this.configuration).listOrgInstances(e,r).then(s=>s(this.axios,this.basePath))}listPoolInstances(e,r){return ae(this.configuration).listPoolInstances(e,r).then(s=>s(this.axios,this.basePath))}listRepoInstances(e,r){return ae(this.configuration).listRepoInstances(e,r).then(s=>s(this.axios,this.basePath))}listScaleSetInstances(e,r){return ae(this.configuration).listScaleSetInstances(e,r).then(s=>s(this.axios,this.basePath))}}const Oo=function(t){return{login:async(e,r={})=>{d("login","body",e);const s="/auth/login",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}}}},Po=function(t){const e=Oo(t);return{async login(r,s){const o=await e.login(r,s),a=t?.serverIndex??0,n=P["LoginApi.login"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)}}};class qt extends H{login(e,r){return Po(this.configuration).login(e,r).then(s=>s(this.axios,this.basePath))}}const bo=function(t){return{createOrg:async(e,r={})=>{d("createOrg","body",e);const s="/organizations",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createOrgPool:async(e,r,s={})=>{d("createOrgPool","orgID",e),d("createOrgPool","body",r);const o="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createOrgScaleSet:async(e,r,s={})=>{d("createOrgScaleSet","orgID",e),d("createOrgScaleSet","body",r);const o="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteOrg:async(e,r,s={})=>{d("deleteOrg","orgID",e);const o="/organizations/{orgID}".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),r!==void 0&&(c.keepWebhook=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deleteOrgPool:async(e,r,s={})=>{d("deleteOrgPool","orgID",e),d("deleteOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getOrg:async(e,r={})=>{d("getOrg","orgID",e);const s="/organizations/{orgID}".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getOrgPool:async(e,r,s={})=>{d("getOrgPool","orgID",e),d("getOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getOrgWebhookInfo:async(e,r={})=>{d("getOrgWebhookInfo","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},installOrgWebhook:async(e,r,s={})=>{d("installOrgWebhook","orgID",e),d("installOrgWebhook","body",r);const o="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},listOrgInstances:async(e,r={})=>{d("listOrgInstances","orgID",e);const s="/organizations/{orgID}/instances".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgPools:async(e,r={})=>{d("listOrgPools","orgID",e);const s="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgScaleSets:async(e,r={})=>{d("listOrgScaleSets","orgID",e);const s="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgs:async(e,r,s={})=>{const o="/organizations",a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),e!==void 0&&(c.name=e),r!==void 0&&(c.endpoint=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},uninstallOrgWebhook:async(e,r={})=>{d("uninstallOrgWebhook","orgID",e);const s="/organizations/{orgID}/webhook".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},updateOrg:async(e,r,s={})=>{d("updateOrg","orgID",e),d("updateOrg","body",r);const o="/organizations/{orgID}".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateOrgPool:async(e,r,s,o={})=>{d("updateOrgPool","orgID",e),d("updateOrgPool","poolID",r),d("updateOrgPool","body",s);const a="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},z=function(t){const e=bo(t);return{async createOrg(r,s){const o=await e.createOrg(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.createOrg"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createOrgPool(r,s,o){const a=await e.createOrgPool(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.createOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createOrgScaleSet(r,s,o){const a=await e.createOrgScaleSet(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.createOrgScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteOrg(r,s,o){const a=await e.deleteOrg(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.deleteOrg"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteOrgPool(r,s,o){const a=await e.deleteOrgPool(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.deleteOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getOrg(r,s){const o=await e.getOrg(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.getOrg"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getOrgPool(r,s,o){const a=await e.getOrgPool(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.getOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getOrgWebhookInfo(r,s){const o=await e.getOrgWebhookInfo(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.getOrgWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async installOrgWebhook(r,s,o){const a=await e.installOrgWebhook(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.installOrgWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listOrgInstances(r,s){const o=await e.listOrgInstances(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.listOrgInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgPools(r,s){const o=await e.listOrgPools(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.listOrgPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgScaleSets(r,s){const o=await e.listOrgScaleSets(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.listOrgScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgs(r,s,o){const a=await e.listOrgs(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.listOrgs"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async uninstallOrgWebhook(r,s){const o=await e.uninstallOrgWebhook(r,s),a=t?.serverIndex??0,n=P["OrganizationsApi.uninstallOrgWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async updateOrg(r,s,o){const a=await e.updateOrg(r,s,o),n=t?.serverIndex??0,l=P["OrganizationsApi.updateOrg"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateOrgPool(r,s,o,a){const n=await e.updateOrgPool(r,s,o,a),l=t?.serverIndex??0,i=P["OrganizationsApi.updateOrgPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class Ht extends H{createOrg(e,r){return z(this.configuration).createOrg(e,r).then(s=>s(this.axios,this.basePath))}createOrgPool(e,r,s){return z(this.configuration).createOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}createOrgScaleSet(e,r,s){return z(this.configuration).createOrgScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteOrg(e,r,s){return z(this.configuration).deleteOrg(e,r,s).then(o=>o(this.axios,this.basePath))}deleteOrgPool(e,r,s){return z(this.configuration).deleteOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}getOrg(e,r){return z(this.configuration).getOrg(e,r).then(s=>s(this.axios,this.basePath))}getOrgPool(e,r,s){return z(this.configuration).getOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}getOrgWebhookInfo(e,r){return z(this.configuration).getOrgWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}installOrgWebhook(e,r,s){return z(this.configuration).installOrgWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}listOrgInstances(e,r){return z(this.configuration).listOrgInstances(e,r).then(s=>s(this.axios,this.basePath))}listOrgPools(e,r){return z(this.configuration).listOrgPools(e,r).then(s=>s(this.axios,this.basePath))}listOrgScaleSets(e,r){return z(this.configuration).listOrgScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listOrgs(e,r,s){return z(this.configuration).listOrgs(e,r,s).then(o=>o(this.axios,this.basePath))}uninstallOrgWebhook(e,r){return z(this.configuration).uninstallOrgWebhook(e,r).then(s=>s(this.axios,this.basePath))}updateOrg(e,r,s){return z(this.configuration).updateOrg(e,r,s).then(o=>o(this.axios,this.basePath))}updateOrgPool(e,r,s,o){return z(this.configuration).updateOrgPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const mo=function(t){return{createEnterprisePool:async(e,r,s={})=>{d("createEnterprisePool","enterpriseID",e),d("createEnterprisePool","body",r);const o="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createOrgPool:async(e,r,s={})=>{d("createOrgPool","orgID",e),d("createOrgPool","body",r);const o="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createRepoPool:async(e,r,s={})=>{d("createRepoPool","repoID",e),d("createRepoPool","body",r);const o="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteEnterprisePool:async(e,r,s={})=>{d("deleteEnterprisePool","enterpriseID",e),d("deleteEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deleteOrgPool:async(e,r,s={})=>{d("deleteOrgPool","orgID",e),d("deleteOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deletePool:async(e,r={})=>{d("deletePool","poolID",e);const s="/pools/{poolID}".replace("{poolID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},deleteRepoPool:async(e,r,s={})=>{d("deleteRepoPool","repoID",e),d("deleteRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getEnterprisePool:async(e,r,s={})=>{d("getEnterprisePool","enterpriseID",e),d("getEnterprisePool","poolID",r);const o="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getOrgPool:async(e,r,s={})=>{d("getOrgPool","orgID",e),d("getOrgPool","poolID",r);const o="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getPool:async(e,r={})=>{d("getPool","poolID",e);const s="/pools/{poolID}".replace("{poolID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getRepoPool:async(e,r,s={})=>{d("getRepoPool","repoID",e),d("getRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},listEnterprisePools:async(e,r={})=>{d("listEnterprisePools","enterpriseID",e);const s="/enterprises/{enterpriseID}/pools".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgPools:async(e,r={})=>{d("listOrgPools","orgID",e);const s="/organizations/{orgID}/pools".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listPools:async(e={})=>{const r="/pools",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},listRepoPools:async(e,r={})=>{d("listRepoPools","repoID",e);const s="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},updateEnterprisePool:async(e,r,s,o={})=>{d("updateEnterprisePool","enterpriseID",e),d("updateEnterprisePool","poolID",r),d("updateEnterprisePool","body",s);const a="/enterprises/{enterpriseID}/pools/{poolID}".replace("{enterpriseID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}},updateOrgPool:async(e,r,s,o={})=>{d("updateOrgPool","orgID",e),d("updateOrgPool","poolID",r),d("updateOrgPool","body",s);const a="/organizations/{orgID}/pools/{poolID}".replace("{orgID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}},updatePool:async(e,r,s={})=>{d("updatePool","poolID",e),d("updatePool","body",r);const o="/pools/{poolID}".replace("{poolID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateRepoPool:async(e,r,s,o={})=>{d("updateRepoPool","repoID",e),d("updateRepoPool","poolID",r),d("updateRepoPool","body",s);const a="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},j=function(t){const e=mo(t);return{async createEnterprisePool(r,s,o){const a=await e.createEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.createEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createOrgPool(r,s,o){const a=await e.createOrgPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.createOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createRepoPool(r,s,o){const a=await e.createRepoPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.createRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteEnterprisePool(r,s,o){const a=await e.deleteEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.deleteEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteOrgPool(r,s,o){const a=await e.deleteOrgPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.deleteOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deletePool(r,s){const o=await e.deletePool(r,s),a=t?.serverIndex??0,n=P["PoolsApi.deletePool"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async deleteRepoPool(r,s,o){const a=await e.deleteRepoPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.deleteRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getEnterprisePool(r,s,o){const a=await e.getEnterprisePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.getEnterprisePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getOrgPool(r,s,o){const a=await e.getOrgPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.getOrgPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getPool(r,s){const o=await e.getPool(r,s),a=t?.serverIndex??0,n=P["PoolsApi.getPool"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getRepoPool(r,s,o){const a=await e.getRepoPool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.getRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listEnterprisePools(r,s){const o=await e.listEnterprisePools(r,s),a=t?.serverIndex??0,n=P["PoolsApi.listEnterprisePools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgPools(r,s){const o=await e.listOrgPools(r,s),a=t?.serverIndex??0,n=P["PoolsApi.listOrgPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listPools(r){const s=await e.listPools(r),o=t?.serverIndex??0,a=P["PoolsApi.listPools"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async listRepoPools(r,s){const o=await e.listRepoPools(r,s),a=t?.serverIndex??0,n=P["PoolsApi.listRepoPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async updateEnterprisePool(r,s,o,a){const n=await e.updateEnterprisePool(r,s,o,a),l=t?.serverIndex??0,i=P["PoolsApi.updateEnterprisePool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async updateOrgPool(r,s,o,a){const n=await e.updateOrgPool(r,s,o,a),l=t?.serverIndex??0,i=P["PoolsApi.updateOrgPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async updatePool(r,s,o){const a=await e.updatePool(r,s,o),n=t?.serverIndex??0,l=P["PoolsApi.updatePool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateRepoPool(r,s,o,a){const n=await e.updateRepoPool(r,s,o,a),l=t?.serverIndex??0,i=P["PoolsApi.updateRepoPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class $t extends H{createEnterprisePool(e,r,s){return j(this.configuration).createEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}createOrgPool(e,r,s){return j(this.configuration).createOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}createRepoPool(e,r,s){return j(this.configuration).createRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}deleteEnterprisePool(e,r,s){return j(this.configuration).deleteEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}deleteOrgPool(e,r,s){return j(this.configuration).deleteOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}deletePool(e,r){return j(this.configuration).deletePool(e,r).then(s=>s(this.axios,this.basePath))}deleteRepoPool(e,r,s){return j(this.configuration).deleteRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}getEnterprisePool(e,r,s){return j(this.configuration).getEnterprisePool(e,r,s).then(o=>o(this.axios,this.basePath))}getOrgPool(e,r,s){return j(this.configuration).getOrgPool(e,r,s).then(o=>o(this.axios,this.basePath))}getPool(e,r){return j(this.configuration).getPool(e,r).then(s=>s(this.axios,this.basePath))}getRepoPool(e,r,s){return j(this.configuration).getRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}listEnterprisePools(e,r){return j(this.configuration).listEnterprisePools(e,r).then(s=>s(this.axios,this.basePath))}listOrgPools(e,r){return j(this.configuration).listOrgPools(e,r).then(s=>s(this.axios,this.basePath))}listPools(e){return j(this.configuration).listPools(e).then(r=>r(this.axios,this.basePath))}listRepoPools(e,r){return j(this.configuration).listRepoPools(e,r).then(s=>s(this.axios,this.basePath))}updateEnterprisePool(e,r,s,o){return j(this.configuration).updateEnterprisePool(e,r,s,o).then(a=>a(this.axios,this.basePath))}updateOrgPool(e,r,s,o){return j(this.configuration).updateOrgPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}updatePool(e,r,s){return j(this.configuration).updatePool(e,r,s).then(o=>o(this.axios,this.basePath))}updateRepoPool(e,r,s,o){return j(this.configuration).updateRepoPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const Vo=function(t){return{listProviders:async(e={})=>{const r="/providers",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}}}},So=function(t){const e=Vo(t);return{async listProviders(r){const s=await e.listProviders(r),o=t?.serverIndex??0,a=P["ProvidersApi.listProviders"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)}}};class Nt extends H{listProviders(e){return So(this.configuration).listProviders(e).then(r=>r(this.axios,this.basePath))}}const Ao=function(t){return{createRepo:async(e,r={})=>{d("createRepo","body",e);const s="/repositories",o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"POST",...a,...r},l={},i={};await m(l,"Authorization",t),l["Content-Type"]="application/json",V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},n.data=x(e,n,t),{url:S(o),options:n}},createRepoPool:async(e,r,s={})=>{d("createRepoPool","repoID",e),d("createRepoPool","body",r);const o="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createRepoScaleSet:async(e,r,s={})=>{d("createRepoScaleSet","repoID",e),d("createRepoScaleSet","body",r);const o="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteRepo:async(e,r,s={})=>{d("deleteRepo","repoID",e);const o="/repositories/{repoID}".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),r!==void 0&&(c.keepWebhook=r),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},deleteRepoPool:async(e,r,s={})=>{d("deleteRepoPool","repoID",e),d("deleteRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"DELETE",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getRepo:async(e,r={})=>{d("getRepo","repoID",e);const s="/repositories/{repoID}".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getRepoPool:async(e,r,s={})=>{d("getRepoPool","repoID",e),d("getRepoPool","poolID",r);const o="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"GET",...n,...s},i={},c={};await m(i,"Authorization",t),V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},{url:S(a),options:l}},getRepoWebhookInfo:async(e,r={})=>{d("getRepoWebhookInfo","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},installRepoWebhook:async(e,r,s={})=>{d("installRepoWebhook","repoID",e),d("installRepoWebhook","body",r);const o="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},listRepoInstances:async(e,r={})=>{d("listRepoInstances","repoID",e);const s="/repositories/{repoID}/instances".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoPools:async(e,r={})=>{d("listRepoPools","repoID",e);const s="/repositories/{repoID}/pools".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoScaleSets:async(e,r={})=>{d("listRepoScaleSets","repoID",e);const s="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepos:async(e,r,s,o={})=>{const a="/repositories",n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"GET",...l,...o},c={},p={};await m(c,"Authorization",t),e!==void 0&&(p.owner=e),r!==void 0&&(p.name=r),s!==void 0&&(p.endpoint=s),V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},{url:S(n),options:i}},uninstallRepoWebhook:async(e,r={})=>{d("uninstallRepoWebhook","repoID",e);const s="/repositories/{repoID}/webhook".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},updateRepo:async(e,r,s={})=>{d("updateRepo","repoID",e),d("updateRepo","body",r);const o="/repositories/{repoID}".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},updateRepoPool:async(e,r,s,o={})=>{d("updateRepoPool","repoID",e),d("updateRepoPool","poolID",r),d("updateRepoPool","body",s);const a="/repositories/{repoID}/pools/{poolID}".replace("{repoID}",encodeURIComponent(String(e))).replace("{poolID}",encodeURIComponent(String(r))),n=new URL(a,b);let l;t&&(l=t.baseOptions);const i={method:"PUT",...l,...o},c={},p={};await m(c,"Authorization",t),c["Content-Type"]="application/json",V(n,p);let R=l&&l.headers?l.headers:{};return i.headers={...c,...R,...o.headers},i.data=x(s,i,t),{url:S(n),options:i}}}},F=function(t){const e=Ao(t);return{async createRepo(r,s){const o=await e.createRepo(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.createRepo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async createRepoPool(r,s,o){const a=await e.createRepoPool(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.createRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createRepoScaleSet(r,s,o){const a=await e.createRepoScaleSet(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.createRepoScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteRepo(r,s,o){const a=await e.deleteRepo(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.deleteRepo"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteRepoPool(r,s,o){const a=await e.deleteRepoPool(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.deleteRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getRepo(r,s){const o=await e.getRepo(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.getRepo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getRepoPool(r,s,o){const a=await e.getRepoPool(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.getRepoPool"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async getRepoWebhookInfo(r,s){const o=await e.getRepoWebhookInfo(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.getRepoWebhookInfo"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async installRepoWebhook(r,s,o){const a=await e.installRepoWebhook(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.installRepoWebhook"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async listRepoInstances(r,s){const o=await e.listRepoInstances(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.listRepoInstances"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoPools(r,s){const o=await e.listRepoPools(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.listRepoPools"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoScaleSets(r,s){const o=await e.listRepoScaleSets(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.listRepoScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepos(r,s,o,a){const n=await e.listRepos(r,s,o,a),l=t?.serverIndex??0,i=P["RepositoriesApi.listRepos"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)},async uninstallRepoWebhook(r,s){const o=await e.uninstallRepoWebhook(r,s),a=t?.serverIndex??0,n=P["RepositoriesApi.uninstallRepoWebhook"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async updateRepo(r,s,o){const a=await e.updateRepo(r,s,o),n=t?.serverIndex??0,l=P["RepositoriesApi.updateRepo"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async updateRepoPool(r,s,o,a){const n=await e.updateRepoPool(r,s,o,a),l=t?.serverIndex??0,i=P["RepositoriesApi.updateRepoPool"]?.[l]?.url;return(c,p)=>A(n,u,O,t)(c,i||p)}}};class Wt extends H{createRepo(e,r){return F(this.configuration).createRepo(e,r).then(s=>s(this.axios,this.basePath))}createRepoPool(e,r,s){return F(this.configuration).createRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}createRepoScaleSet(e,r,s){return F(this.configuration).createRepoScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteRepo(e,r,s){return F(this.configuration).deleteRepo(e,r,s).then(o=>o(this.axios,this.basePath))}deleteRepoPool(e,r,s){return F(this.configuration).deleteRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}getRepo(e,r){return F(this.configuration).getRepo(e,r).then(s=>s(this.axios,this.basePath))}getRepoPool(e,r,s){return F(this.configuration).getRepoPool(e,r,s).then(o=>o(this.axios,this.basePath))}getRepoWebhookInfo(e,r){return F(this.configuration).getRepoWebhookInfo(e,r).then(s=>s(this.axios,this.basePath))}installRepoWebhook(e,r,s){return F(this.configuration).installRepoWebhook(e,r,s).then(o=>o(this.axios,this.basePath))}listRepoInstances(e,r){return F(this.configuration).listRepoInstances(e,r).then(s=>s(this.axios,this.basePath))}listRepoPools(e,r){return F(this.configuration).listRepoPools(e,r).then(s=>s(this.axios,this.basePath))}listRepoScaleSets(e,r){return F(this.configuration).listRepoScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listRepos(e,r,s,o){return F(this.configuration).listRepos(e,r,s,o).then(a=>a(this.axios,this.basePath))}uninstallRepoWebhook(e,r){return F(this.configuration).uninstallRepoWebhook(e,r).then(s=>s(this.axios,this.basePath))}updateRepo(e,r,s){return F(this.configuration).updateRepo(e,r,s).then(o=>o(this.axios,this.basePath))}updateRepoPool(e,r,s,o){return F(this.configuration).updateRepoPool(e,r,s,o).then(a=>a(this.axios,this.basePath))}}const Ro=function(t){return{createEnterpriseScaleSet:async(e,r,s={})=>{d("createEnterpriseScaleSet","enterpriseID",e),d("createEnterpriseScaleSet","body",r);const o="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createOrgScaleSet:async(e,r,s={})=>{d("createOrgScaleSet","orgID",e),d("createOrgScaleSet","body",r);const o="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},createRepoScaleSet:async(e,r,s={})=>{d("createRepoScaleSet","repoID",e),d("createRepoScaleSet","body",r);const o="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"POST",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}},deleteScaleSet:async(e,r={})=>{d("deleteScaleSet","scalesetID",e);const s="/scalesets/{scalesetID}".replace("{scalesetID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"DELETE",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},getScaleSet:async(e,r={})=>{d("getScaleSet","scalesetID",e);const s="/scalesets/{scalesetID}".replace("{scalesetID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listEnterpriseScaleSets:async(e,r={})=>{d("listEnterpriseScaleSets","enterpriseID",e);const s="/enterprises/{enterpriseID}/scalesets".replace("{enterpriseID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listOrgScaleSets:async(e,r={})=>{d("listOrgScaleSets","orgID",e);const s="/organizations/{orgID}/scalesets".replace("{orgID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listRepoScaleSets:async(e,r={})=>{d("listRepoScaleSets","repoID",e);const s="/repositories/{repoID}/scalesets".replace("{repoID}",encodeURIComponent(String(e))),o=new URL(s,b);let a;t&&(a=t.baseOptions);const n={method:"GET",...a,...r},l={},i={};await m(l,"Authorization",t),V(o,i);let c=a&&a.headers?a.headers:{};return n.headers={...l,...c,...r.headers},{url:S(o),options:n}},listScalesets:async(e={})=>{const r="/scalesets",s=new URL(r,b);let o;t&&(o=t.baseOptions);const a={method:"GET",...o,...e},n={},l={};await m(n,"Authorization",t),V(s,l);let i=o&&o.headers?o.headers:{};return a.headers={...n,...i,...e.headers},{url:S(s),options:a}},updateScaleSet:async(e,r,s={})=>{d("updateScaleSet","scalesetID",e),d("updateScaleSet","body",r);const o="/scalesets/{scalesetID}".replace("{scalesetID}",encodeURIComponent(String(e))),a=new URL(o,b);let n;t&&(n=t.baseOptions);const l={method:"PUT",...n,...s},i={},c={};await m(i,"Authorization",t),i["Content-Type"]="application/json",V(a,c);let p=n&&n.headers?n.headers:{};return l.headers={...i,...p,...s.headers},l.data=x(r,l,t),{url:S(a),options:l}}}},X=function(t){const e=Ro(t);return{async createEnterpriseScaleSet(r,s,o){const a=await e.createEnterpriseScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.createEnterpriseScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createOrgScaleSet(r,s,o){const a=await e.createOrgScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.createOrgScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async createRepoScaleSet(r,s,o){const a=await e.createRepoScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.createRepoScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)},async deleteScaleSet(r,s){const o=await e.deleteScaleSet(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.deleteScaleSet"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async getScaleSet(r,s){const o=await e.getScaleSet(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.getScaleSet"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listEnterpriseScaleSets(r,s){const o=await e.listEnterpriseScaleSets(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.listEnterpriseScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listOrgScaleSets(r,s){const o=await e.listOrgScaleSets(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.listOrgScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listRepoScaleSets(r,s){const o=await e.listRepoScaleSets(r,s),a=t?.serverIndex??0,n=P["ScalesetsApi.listRepoScaleSets"]?.[a]?.url;return(l,i)=>A(o,u,O,t)(l,n||i)},async listScalesets(r){const s=await e.listScalesets(r),o=t?.serverIndex??0,a=P["ScalesetsApi.listScalesets"]?.[o]?.url;return(n,l)=>A(s,u,O,t)(n,a||l)},async updateScaleSet(r,s,o){const a=await e.updateScaleSet(r,s,o),n=t?.serverIndex??0,l=P["ScalesetsApi.updateScaleSet"]?.[n]?.url;return(i,c)=>A(a,u,O,t)(i,l||c)}}};class Qt extends H{createEnterpriseScaleSet(e,r,s){return X(this.configuration).createEnterpriseScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}createOrgScaleSet(e,r,s){return X(this.configuration).createOrgScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}createRepoScaleSet(e,r,s){return X(this.configuration).createRepoScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}deleteScaleSet(e,r){return X(this.configuration).deleteScaleSet(e,r).then(s=>s(this.axios,this.basePath))}getScaleSet(e,r){return X(this.configuration).getScaleSet(e,r).then(s=>s(this.axios,this.basePath))}listEnterpriseScaleSets(e,r){return X(this.configuration).listEnterpriseScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listOrgScaleSets(e,r){return X(this.configuration).listOrgScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listRepoScaleSets(e,r){return X(this.configuration).listRepoScaleSets(e,r).then(s=>s(this.axios,this.basePath))}listScalesets(e){return X(this.configuration).listScalesets(e).then(r=>r(this.axios,this.basePath))}updateScaleSet(e,r,s){return X(this.configuration).updateScaleSet(e,r,s).then(o=>o(this.axios,this.basePath))}}class Mt{apiKey;username;password;accessToken;basePath;serverIndex;baseOptions;formDataCtor;constructor(e={}){this.apiKey=e.apiKey,this.username=e.username,this.password=e.password,this.accessToken=e.accessToken,this.basePath=e.basePath,this.serverIndex=e.serverIndex,this.baseOptions={...e.baseOptions,headers:{...e.baseOptions?.headers}},this.formDataCtor=e.formDataCtor}isJsonMime(e){const r=new RegExp("^(application/json|[^;/ ]+/[^;/ ]+[+]json)[ ]*(;.*)?$","i");return e!==null&&(r.test(e)||e.toLowerCase()==="application/json-patch+json")}}class gr{baseUrl;token;config;isDevelopmentMode(){return typeof window>"u"?!1:window.location.port==="5173"}loginApi;controllerInfoApi;controllerApi;endpointsApi;credentialsApi;repositoriesApi;organizationsApi;enterprisesApi;poolsApi;scaleSetsApi;instancesApi;providersApi;firstRunApi;hooksApi;constructor(e=""){this.baseUrl=e||window.location.origin;const r=this.isDevelopmentMode();this.config=new Mt({basePath:`${this.baseUrl}/api/v1`,accessToken:()=>this.token||"",baseOptions:{withCredentials:!r}}),this.loginApi=new qt(this.config),this.controllerInfoApi=new Lt(this.config),this.controllerApi=new Dt(this.config),this.endpointsApi=new kt(this.config),this.credentialsApi=new jt(this.config),this.repositoriesApi=new Wt(this.config),this.organizationsApi=new Ht(this.config),this.enterprisesApi=new zt(this.config),this.poolsApi=new $t(this.config),this.scaleSetsApi=new Qt(this.config),this.instancesApi=new _t(this.config),this.providersApi=new Nt(this.config),this.firstRunApi=new Ft(this.config),this.hooksApi=new Gt(this.config)}setToken(e){this.token=e;const r=this.isDevelopmentMode();this.config=new Mt({basePath:`${this.baseUrl}/api/v1`,accessToken:()=>e,baseOptions:{withCredentials:!r}}),this.loginApi=new qt(this.config),this.controllerInfoApi=new Lt(this.config),this.controllerApi=new Dt(this.config),this.endpointsApi=new kt(this.config),this.credentialsApi=new jt(this.config),this.repositoriesApi=new Wt(this.config),this.organizationsApi=new Ht(this.config),this.enterprisesApi=new zt(this.config),this.poolsApi=new $t(this.config),this.scaleSetsApi=new Qt(this.config),this.instancesApi=new _t(this.config),this.providersApi=new Nt(this.config),this.firstRunApi=new Ft(this.config),this.hooksApi=new Gt(this.config)}async login(e){const r={username:e.username,password:e.password},o=(await this.loginApi.login(r)).data.token;if(o)return this.setToken(o),{token:o};throw new Error("Login failed")}async getControllerInfo(){return(await this.controllerInfoApi.controllerInfo()).data}async listGithubEndpoints(){return(await this.endpointsApi.listGithubEndpoints()).data||[]}async getGithubEndpoint(e){return(await this.endpointsApi.getGithubEndpoint(e)).data}async createGithubEndpoint(e){return(await this.endpointsApi.createGithubEndpoint(e)).data}async updateGithubEndpoint(e,r){return(await this.endpointsApi.updateGithubEndpoint(e,r)).data}async deleteGithubEndpoint(e){await this.endpointsApi.deleteGithubEndpoint(e)}async listGiteaEndpoints(){return(await this.endpointsApi.listGiteaEndpoints()).data||[]}async getGiteaEndpoint(e){return(await this.endpointsApi.getGiteaEndpoint(e)).data}async createGiteaEndpoint(e){return(await this.endpointsApi.createGiteaEndpoint(e)).data}async updateGiteaEndpoint(e,r){return(await this.endpointsApi.updateGiteaEndpoint(e,r)).data}async deleteGiteaEndpoint(e){await this.endpointsApi.deleteGiteaEndpoint(e)}async listAllEndpoints(){const[e,r]=await Promise.all([this.listGithubEndpoints().catch(()=>[]),this.listGiteaEndpoints().catch(()=>[])]);return[...e.map(s=>({...s,endpoint_type:"github"})),...r.map(s=>({...s,endpoint_type:"gitea"}))]}async listGithubCredentials(){return(await this.credentialsApi.listCredentials()).data||[]}async getGithubCredentials(e){return(await this.credentialsApi.getCredentials(e)).data}async createGithubCredentials(e){return(await this.credentialsApi.createCredentials(e)).data}async updateGithubCredentials(e,r){return(await this.credentialsApi.updateCredentials(e,r)).data}async deleteGithubCredentials(e){await this.credentialsApi.deleteCredentials(e)}async listGiteaCredentials(){return(await this.credentialsApi.listGiteaCredentials()).data||[]}async getGiteaCredentials(e){return(await this.credentialsApi.getGiteaCredentials(e)).data}async createGiteaCredentials(e){return(await this.credentialsApi.createGiteaCredentials(e)).data}async updateGiteaCredentials(e,r){return(await this.credentialsApi.updateGiteaCredentials(e,r)).data}async deleteGiteaCredentials(e){await this.credentialsApi.deleteGiteaCredentials(e)}async listAllCredentials(){const[e,r]=await Promise.all([this.listGithubCredentials().catch(()=>[]),this.listGiteaCredentials().catch(()=>[])]);return[...e,...r]}async installRepositoryWebhook(e,r={}){await this.repositoriesApi.installRepoWebhook(e,r)}async uninstallRepositoryWebhook(e){await this.hooksApi.uninstallRepoWebhook(e)}async getRepositoryWebhookInfo(e){return(await this.hooksApi.getRepoWebhookInfo(e)).data}async listRepositories(){return(await this.repositoriesApi.listRepos()).data||[]}async getRepository(e){return(await this.repositoriesApi.getRepo(e)).data}async createRepository(e){return(await this.repositoriesApi.createRepo(e)).data}async updateRepository(e,r){return(await this.repositoriesApi.updateRepo(e,r)).data}async deleteRepository(e){await this.repositoriesApi.deleteRepo(e)}async installRepoWebhook(e){await this.repositoriesApi.installRepoWebhook(e,{})}async listRepositoryPools(e){return(await this.repositoriesApi.listRepoPools(e)).data||[]}async listRepositoryInstances(e){return(await this.repositoriesApi.listRepoInstances(e)).data||[]}async createRepositoryPool(e,r){return(await this.repositoriesApi.createRepoPool(e,r)).data}async installOrganizationWebhook(e,r={}){await this.organizationsApi.installOrgWebhook(e,r)}async uninstallOrganizationWebhook(e){await this.hooksApi.uninstallOrgWebhook(e)}async getOrganizationWebhookInfo(e){return(await this.hooksApi.getOrgWebhookInfo(e)).data}async listOrganizations(){return(await this.organizationsApi.listOrgs()).data||[]}async getOrganization(e){return(await this.organizationsApi.getOrg(e)).data}async createOrganization(e){return(await this.organizationsApi.createOrg(e)).data}async updateOrganization(e,r){return(await this.organizationsApi.updateOrg(e,r)).data}async deleteOrganization(e){await this.organizationsApi.deleteOrg(e)}async listOrganizationPools(e){return(await this.organizationsApi.listOrgPools(e)).data||[]}async listOrganizationInstances(e){return(await this.organizationsApi.listOrgInstances(e)).data||[]}async createOrganizationPool(e,r){return(await this.organizationsApi.createOrgPool(e,r)).data}async listEnterprises(){return(await this.enterprisesApi.listEnterprises()).data||[]}async getEnterprise(e){return(await this.enterprisesApi.getEnterprise(e)).data}async createEnterprise(e){return(await this.enterprisesApi.createEnterprise(e)).data}async updateEnterprise(e,r){return(await this.enterprisesApi.updateEnterprise(e,r)).data}async deleteEnterprise(e){await this.enterprisesApi.deleteEnterprise(e)}async listEnterprisePools(e){return(await this.enterprisesApi.listEnterprisePools(e)).data||[]}async listEnterpriseInstances(e){return(await this.enterprisesApi.listEnterpriseInstances(e)).data||[]}async createEnterprisePool(e,r){return(await this.enterprisesApi.createEnterprisePool(e,r)).data}async createRepositoryScaleSet(e,r){return(await this.repositoriesApi.createRepoScaleSet(e,r)).data}async listRepositoryScaleSets(e){return(await this.repositoriesApi.listRepoScaleSets(e)).data||[]}async createOrganizationScaleSet(e,r){return(await this.organizationsApi.createOrgScaleSet(e,r)).data}async listOrganizationScaleSets(e){return(await this.organizationsApi.listOrgScaleSets(e)).data||[]}async createEnterpriseScaleSet(e,r){return(await this.enterprisesApi.createEnterpriseScaleSet(e,r)).data}async listEnterpriseScaleSets(e){return(await this.enterprisesApi.listEnterpriseScaleSets(e)).data||[]}async listPools(){return(await this.poolsApi.listPools()).data||[]}async listAllPools(){return this.listPools()}async getPool(e){return(await this.poolsApi.getPool(e)).data}async updatePool(e,r){return(await this.poolsApi.updatePool(e,r)).data}async deletePool(e){await this.poolsApi.deletePool(e)}async listScaleSets(){return(await this.scaleSetsApi.listScalesets()).data||[]}async getScaleSet(e){return(await this.scaleSetsApi.getScaleSet(e.toString())).data}async updateScaleSet(e,r){return(await this.scaleSetsApi.updateScaleSet(e.toString(),r)).data}async deleteScaleSet(e){await this.scaleSetsApi.deleteScaleSet(e.toString())}async listInstances(){return(await this.instancesApi.listInstances()).data||[]}async getInstance(e){return(await this.instancesApi.getInstance(e)).data}async deleteInstance(e){await this.instancesApi.deleteInstance(e)}async listProviders(){return(await this.providersApi.listProviders()).data||[]}async listCredentials(){return this.listAllCredentials()}async listEndpoints(){return this.listAllEndpoints()}async firstRun(e){return(await this.firstRunApi.firstRun(e)).data}async updateController(e){return(await this.controllerApi.updateController(e)).data}}new gr;class fo extends gr{constructor(e=""){super(e)}}const Yo=new fo;var yo=nt(''),wo=nt(''),Io=nt(''),Eo=ps("");function Zo(t,e){const r=bt(e,["children","$$slots","$$events","$$legacy"]),s=bt(r,["variant","size","disabled","loading","type","fullWidth","icon","iconPosition"]);ls(e,!1);const o=te(),a=te(),n=te(),l=te(),i=te(),c=te(),p=te(),R=te(),I=is();let T=se(e,"variant",8,"primary"),f=se(e,"size",8,"md"),y=se(e,"disabled",8,!1),w=se(e,"loading",8,!1),C=se(e,"type",8,"button"),D=se(e,"fullWidth",8,!1),E=se(e,"icon",8,null),v=se(e,"iconPosition",8,"left");function U(){!y()&&!w()&&I("click")}ee(()=>{},()=>{re(o,"inline-flex items-center justify-center font-medium rounded-md transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2 dark:focus:ring-offset-gray-900 cursor-pointer disabled:cursor-not-allowed")}),ee(()=>oe(f()),()=>{re(a,{sm:"px-3 py-2 text-sm",md:"px-4 py-2 text-sm",lg:"px-6 py-3 text-base"}[f()])}),ee(()=>oe(T()),()=>{re(n,{primary:"text-white bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 disabled:bg-gray-400 disabled:hover:bg-gray-400",secondary:"text-gray-700 dark:text-gray-300 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600 focus:ring-blue-500",danger:"text-white bg-red-600 hover:bg-red-700 focus:ring-red-500 disabled:bg-gray-400 disabled:hover:bg-gray-400",ghost:"text-gray-700 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-800 focus:ring-blue-500"}[T()])}),ee(()=>oe(D()),()=>{re(l,D()?"w-full":"")}),ee(()=>oe(y()),()=>{re(i,y()?"opacity-50":"")}),ee(()=>(k(o),k(a),k(n),k(l),k(i)),()=>{re(c,[k(o),k(a),k(n),k(l),k(i)].filter(Boolean).join(" "))}),ee(()=>oe(f()),()=>{re(p,{sm:"h-4 w-4",md:"h-5 w-5",lg:"h-6 w-6"}[f()])}),ee(()=>(oe(v()),oe(f())),()=>{re(R,{sm:v()==="left"?"-ml-0.5 mr-2":"ml-2 -mr-0.5",md:v()==="left"?"-ml-1 mr-2":"ml-2 -mr-1",lg:v()==="left"?"-ml-1 mr-3":"ml-3 -mr-1"}[f()])}),cs(),Tr();var L=Eo();ws(L,()=>({type:C(),disabled:y(),class:k(c),...s}));var B=qe(L);{var Z=W=>{var Q=yo();Ie(()=>Ee(Q,0,`animate-spin ${k(p)??""} ${v()==="left"?"-ml-1 mr-2":"ml-2 -mr-1"}`)),ue(W,Q)},ce=W=>{var Q=us(),Ge=Os(Q);{var Cr=_e=>{var ye=wo(),Ur=qe(ye);mt(Ur,E,!0),He(ye),Ie(()=>Ee(ye,0,`${k(p)??""} ${k(R)??""}`)),ue(_e,ye)};$e(Ge,_e=>{E()&&v()==="left"&&_e(Cr)},!0)}ue(W,Q)};$e(B,W=>{w()?W(Z):W(ce,!1)})}var dt=Pt(B,2);Ps(dt,e,"default",{});var xr=Pt(dt,2);{var vr=W=>{var Q=Io(),Ge=qe(Q);mt(Ge,E,!0),He(Q),Ie(()=>Ee(Q,0,`${k(p)??""} ${k(R)??""}`)),ue(W,Q)};$e(xr,W=>{E()&&v()==="right"&&!w()&&W(vr)})}He(L),ds("click",L,U),ue(t,L),hs()}export{Zo as B,Bo as a,Uo as b,Ue as c,Ps as d,Vs as e,ws as f,Yo as g,mt as h,Rs as i,To as r,Ee as s}; diff --git a/webapp/assets/_app/immutable/chunks/CoIRRsD9.js b/webapp/assets/_app/immutable/chunks/CoIRRsD9.js new file mode 100644 index 00000000..8cbc8c4b --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CoIRRsD9.js @@ -0,0 +1 @@ +const s=globalThis.__sveltekit_13hoftk?.base??"/ui",t=globalThis.__sveltekit_13hoftk?.assets??s;export{t as a,s as b}; diff --git a/webapp/assets/_app/immutable/chunks/CwqI2jFH.js b/webapp/assets/_app/immutable/chunks/CwqI2jFH.js new file mode 100644 index 00000000..4cf31304 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CwqI2jFH.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as Dr}from"./B3Pzt0F_.js";import{p as Lr,E as qr,m as s,o as Gr,f as m,k as r,j as o,g as e,r as a,t as v,e as M,c as b,v as T,b as Jr,z as gr,x as W,u as p,s as d,D as Ie,d as Fr}from"./D8EpLgQ1.js";import{p as vr,i as z}from"./5WA7h8uK.js";import{e as Ae,i as $e}from"./u94nIB4-.js";import{s as Oe,r as h,b as Q,g as C,c as Nr}from"./CiE1LlKV.js";import{b as E,a as Vr}from"./C6k1Q4We.js";import{p as Kr}from"./D4Caz1gY.js";import{M as Ur}from"./qB7B8uiS.js";import{J as Wr}from"./DZblzgqm.js";var Qr=m('

                '),Xr=m('
                '),Yr=m(""),Zr=m(''),et=m('
                '),rt=m(""),tt=m(''),at=m(' '),ot=m('
                '),dt=m('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),it=m('
                Creating...
                '),st=m('

                Create New Pool

                Entity Level *
                ');function yt(pr,X){Lr(X,!1);const Y=qr();let He=vr(X,"initialEntityType",8,""),mr=vr(X,"initialEntityId",8,""),G=s(!1),P=s(""),n=s(He()),S=s([]),Z=s([]),ee=s(!1),re=s(!1),k=s(mr()),j=s(""),B=s(""),D=s(""),te=s(void 0),ae=s(void 0),oe=s(void 0),de=s(100),ie=s("garm"),J=s("linux"),F=s("amd64"),se=s(""),le=s(!0),_=s([]),I=s(""),L=s("{}");async function fr(){try{d(re,!0),d(Z,await C.listProviders())}catch(l){d(P,l instanceof Error?l.message:"Failed to load providers")}finally{d(re,!1)}}async function Be(){if(e(n))try{switch(d(ee,!0),d(S,[]),e(n)){case"repository":d(S,await C.listRepositories());break;case"organization":d(S,await C.listOrganizations());break;case"enterprise":d(S,await C.listEnterprises());break}}catch(l){d(P,l instanceof Error?l.message:`Failed to load ${e(n)}s`)}finally{d(ee,!1)}}function ne(l){e(n)!==l&&(d(n,l),d(k,""),Be())}function De(){e(I).trim()&&!e(_).includes(e(I).trim())&&(d(_,[...e(_),e(I).trim()]),d(I,""))}function yr(l){d(_,e(_).filter((A,w)=>w!==l))}function xr(l){l.key==="Enter"&&(l.preventDefault(),De())}async function hr(){if(!e(n)||!e(k)||!e(j)||!e(B)||!e(D)){d(P,"Please fill in all required fields");return}try{d(G,!0),d(P,"");let l={};if(e(L).trim())try{l=JSON.parse(e(L))}catch{throw new Error("Invalid JSON in extra specs")}const A={provider_name:e(j),image:e(B),flavor:e(D),max_runners:e(te)||10,min_idle_runners:e(ae)||0,runner_bootstrap_timeout:e(oe)||20,priority:e(de),runner_prefix:e(ie),os_type:e(J),os_arch:e(F),"github-runner-group":e(se)||void 0,enabled:e(le),tags:e(_),extra_specs:e(L).trim()?l:void 0};let w;switch(e(n)){case"repository":w=await C.createRepositoryPool(e(k),A);break;case"organization":w=await C.createOrganizationPool(e(k),A);break;case"enterprise":w=await C.createEnterprisePool(e(k),A);break;default:throw new Error("Invalid entity level")}Y("submit",A)}catch(l){d(P,l instanceof Error?l.message:"Failed to create pool")}finally{d(G,!1)}}Gr(()=>{fr(),He()&&Be()}),Dr(),Ur(pr,{$$events:{close:()=>Y("close")},children:(l,A)=>{var w=st(),N=r(o(w),2),Le=o(N);{var kr=c=>{var y=Qr(),$=o(y),V=o($,!0);a($),a(y),v(()=>T(V,e(P))),b(c,y)};z(Le,c=>{e(P)&&c(kr)})}var ue=r(Le,2),qe=r(o(ue),2),be=o(qe),ce=r(be,2),Ge=r(ce,2);a(qe),a(ue);var Je=r(ue,2);{var _r=c=>{var y=dt(),$=Jr(y),V=r(o($),2),ve=o(V),pe=o(ve),Pr=o(pe);gr(),a(pe);var Rr=r(pe,2);{var Tr=t=>{var u=Xr();b(t,u)},zr=t=>{var u=Zr();v(()=>{e(k),W(()=>{e(n),e(S)})});var f=o(u),O=o(f);a(f),f.value=f.__value="";var R=r(f);Ae(R,1,()=>e(S),$e,(g,i)=>{var x=Yr(),U=o(x);{var Hr=H=>{var q=Ie();v(()=>T(q,`${e(i),p(()=>e(i).owner)??""}/${e(i),p(()=>e(i).name)??""} (${e(i),p(()=>e(i).endpoint?.name)??""})`)),b(H,q)},Br=H=>{var q=Ie();v(()=>T(q,`${e(i),p(()=>e(i).name)??""} (${e(i),p(()=>e(i).endpoint?.name)??""})`)),b(H,q)};z(U,H=>{e(n)==="repository"?H(Hr):H(Br,!1)})}a(x);var cr={};v(()=>{cr!==(cr=(e(i),p(()=>e(i).id)))&&(x.value=(x.__value=(e(i),p(()=>e(i).id)))??"")}),b(g,x)}),a(u),v(()=>T(O,`Select a ${e(n)??""}`)),Q(u,()=>e(k),g=>d(k,g)),b(t,u)};z(Rr,t=>{e(ee)?t(Tr):t(zr,!1)})}a(ve);var Ve=r(ve,2),Cr=r(o(Ve),2);{var Sr=t=>{var u=et();b(t,u)},jr=t=>{var u=tt();v(()=>{e(j),W(()=>{e(Z)})});var f=o(u);f.value=f.__value="";var O=r(f);Ae(O,1,()=>e(Z),$e,(R,g)=>{var i=rt(),x=o(i,!0);a(i);var U={};v(()=>{T(x,(e(g),p(()=>e(g).name))),U!==(U=(e(g),p(()=>e(g).name)))&&(i.value=(i.__value=(e(g),p(()=>e(g).name)))??"")}),b(R,i)}),a(u),Q(u,()=>e(j),R=>d(j,R)),b(t,u)};z(Cr,t=>{e(re)?t(Sr):t(jr,!1)})}a(Ve),a(V),a($);var me=r($,2),Ke=r(o(me),2),fe=o(Ke),Ue=r(o(fe),2);h(Ue),a(fe);var ye=r(fe,2),We=r(o(ye),2);h(We),a(ye);var xe=r(ye,2),he=r(o(xe),2);v(()=>{e(J),W(()=>{})});var ke=o(he);ke.value=ke.__value="linux";var Qe=r(ke);Qe.value=Qe.__value="windows",a(he),a(xe);var Xe=r(xe,2),_e=r(o(Xe),2);v(()=>{e(F),W(()=>{})});var we=o(_e);we.value=we.__value="amd64";var Ye=r(we);Ye.value=Ye.__value="arm64",a(_e),a(Xe),a(Ke),a(me);var Ee=r(me,2),Ze=r(o(Ee),2),Me=o(Ze),er=r(o(Me),2);h(er),a(Me);var Pe=r(Me,2),rr=r(o(Pe),2);h(rr),a(Pe);var tr=r(Pe,2),ar=r(o(tr),2);h(ar),a(tr),a(Ze),a(Ee);var or=r(Ee,2),Re=r(o(or),2),Te=o(Re),dr=r(o(Te),2);h(dr),a(Te);var ze=r(Te,2),ir=r(o(ze),2);h(ir),a(ze);var sr=r(ze,2),lr=r(o(sr),2);h(lr),a(sr),a(Re);var Ce=r(Re,2),nr=r(o(Ce),2),Se=o(nr),K=o(Se);h(K);var Ir=r(K,2);a(Se);var Ar=r(Se,2);{var $r=t=>{var u=ot();Ae(u,5,()=>e(_),$e,(f,O,R)=>{var g=at(),i=o(g),x=r(i);a(g),v(()=>{T(i,`${e(O)??""} `),Nr(x,"aria-label",`Remove tag ${e(O)}`)}),M("click",x,()=>yr(R)),b(f,g)}),a(u),b(t,u)};z(Ar,t=>{e(_),p(()=>e(_).length>0)&&t($r)})}a(nr),a(Ce);var je=r(Ce,2),Or=r(o(je),2);Wr(Or,{rows:4,placeholder:"{}",get value(){return e(L)},set value(t){d(L,t)},$$legacy:!0}),a(je);var ur=r(je,2),br=o(ur);h(br),gr(2),a(ur),a(or),v(t=>T(Pr,`${t??""} `),[()=>(e(n),p(()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)))]),E(Ue,()=>e(B),t=>d(B,t)),E(We,()=>e(D),t=>d(D,t)),Q(he,()=>e(J),t=>d(J,t)),Q(_e,()=>e(F),t=>d(F,t)),E(er,()=>e(ae),t=>d(ae,t)),E(rr,()=>e(te),t=>d(te,t)),E(ar,()=>e(oe),t=>d(oe,t)),E(dr,()=>e(ie),t=>d(ie,t)),E(ir,()=>e(de),t=>d(de,t)),E(lr,()=>e(se),t=>d(se,t)),E(K,()=>e(I),t=>d(I,t)),M("keydown",K,xr),M("click",Ir,De),Vr(br,()=>e(le),t=>d(le,t)),b(c,y)};z(Je,c=>{e(n)&&c(_r)})}var Fe=r(Je,2),Ne=o(Fe),ge=r(Ne,2),wr=o(ge);{var Er=c=>{var y=it();b(c,y)},Mr=c=>{var y=Ie("Create Pool");b(c,y)};z(wr,c=>{e(G)?c(Er):c(Mr,!1)})}a(ge),a(Fe),a(N),a(w),v(()=>{Oe(be,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Oe(ce,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Oe(Ge,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),ge.disabled=e(G)||!e(n)||!e(k)||!e(j)||!e(B)||!e(D)}),M("click",be,()=>ne("repository")),M("click",ce,()=>ne("organization")),M("click",Ge,()=>ne("enterprise")),M("click",Ne,()=>Y("close")),M("submit",N,Kr(hr)),b(l,w)},$$slots:{default:!0}}),Fr()}export{yt as C}; diff --git a/webapp/assets/_app/immutable/chunks/D4Caz1gY.js b/webapp/assets/_app/immutable/chunks/D4Caz1gY.js new file mode 100644 index 00000000..85ca9d43 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/D4Caz1gY.js @@ -0,0 +1 @@ +function r(t){return function(...e){var n=e[0];return n.preventDefault(),t?.apply(this,e)}}export{r as p}; diff --git a/webapp/assets/_app/immutable/chunks/D8EpLgQ1.js b/webapp/assets/_app/immutable/chunks/D8EpLgQ1.js new file mode 100644 index 00000000..b9d9b59c --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/D8EpLgQ1.js @@ -0,0 +1,2 @@ +var Je=Array.isArray,wn=Array.prototype.indexOf,yn=Array.from,$e=Object.defineProperty,be=Object.getOwnPropertyDescriptor,gn=Object.getOwnPropertyDescriptors,bn=Object.prototype,mn=Array.prototype,At=Object.getPrototypeOf,dt=Object.isExtensible;function Mr(e){return typeof e=="function"}const Ne=()=>{};function Lr(e){return e()}function St(e){for(var t=0;t{e=r,t=s});return{promise:n,resolve:e,reject:t}}const k=2,Qe=4,qe=8,Ae=16,Y=32,ae=64,kt=128,O=256,De=512,m=1024,P=2048,H=4096,W=8192,de=16384,et=32768,Ct=65536,ht=1<<17,Rt=1<<18,tt=1<<19,nt=1<<20,Be=1<<21,rt=1<<22,X=1<<23,Z=Symbol("$state"),Fr=Symbol("legacy props"),qr=Symbol(""),at=new class extends Error{name="StaleReactionError";message="The reaction that called `getAbortSignal()` was re-run or destroyed"},st=3,me=8;function Tn(){throw new Error("https://svelte.dev/e/await_outside_boundary")}function he(e){throw new Error("https://svelte.dev/e/lifecycle_outside_component")}function xn(){throw new Error("https://svelte.dev/e/async_derived_orphan")}function An(e){throw new Error("https://svelte.dev/e/effect_in_teardown")}function Sn(){throw new Error("https://svelte.dev/e/effect_in_unowned_derived")}function kn(e){throw new Error("https://svelte.dev/e/effect_orphan")}function Cn(){throw new Error("https://svelte.dev/e/effect_update_depth_exceeded")}function Rn(){throw new Error("https://svelte.dev/e/get_abort_signal_outside_reaction")}function On(){throw new Error("https://svelte.dev/e/hydration_failed")}function Ot(e){throw new Error("https://svelte.dev/e/lifecycle_legacy_only")}function Vr(e){throw new Error("https://svelte.dev/e/props_invalid_value")}function Nn(){throw new Error("https://svelte.dev/e/state_descriptors_fixed")}function Pn(){throw new Error("https://svelte.dev/e/state_prototype_fixed")}function Dn(){throw new Error("https://svelte.dev/e/state_unsafe_mutation")}const Yr=1,Hr=2,Ur=4,$r=8,Br=16,Wr=1,zr=2,Gr=4,Kr=8,Xr=16,Nt=1,In=2,it="[",Mn="[!",Pt="]",oe={},E=Symbol(),Zr="http://www.w3.org/1999/xhtml",Jr="@attach";function je(e){console.warn("https://svelte.dev/e/hydration_mismatch")}function Qr(){console.warn("https://svelte.dev/e/select_multiple_invalid_value")}let g=!1;function fe(e){g=e}let w;function I(e){if(e===null)throw je(),oe;return w=e}function ut(){return I(M(w))}function ea(e){if(g){if(M(w)!==null)throw je(),oe;w=e}}function ta(e=1){if(g){for(var t=e,n=w;t--;)n=M(n);w=n}}function na(){for(var e=0,t=w;;){if(t.nodeType===me){var n=t.data;if(n===Pt){if(e===0)return t;e-=1}else(n===it||n===Mn)&&(e+=1)}var r=M(t);t.remove(),t=r}}function ra(e){if(!e||e.nodeType!==me)throw je(),oe;return e.data}function Dt(e){return e===this.v}function It(e,t){return e!=e?t==t:e!==t||e!==null&&typeof e=="object"||typeof e=="function"}function aa(e,t){return e!==t}function Mt(e){return!It(e,this.v)}let Se=!1;function sa(){Se=!0}let p=null;function Ie(e){p=e}function Ln(e){return Ve().get(e)}function Fn(e,t){return Ve().set(e,t),t}function qn(e){return Ve().has(e)}function jn(){return Ve()}function Vn(e,t=!1,n){p={p,c:null,e:null,s:e,x:null,l:Se&&!t?{s:null,u:null,$:[]}:null}}function Yn(e){var t=p,n=t.e;if(n!==null){t.e=null;for(var r of n)Jt(r)}return p=t.p,{}}function ke(){return!Se||p!==null&&p.l===null}function Ve(e){return p===null&&he(),p.c??=new Map(Hn(p)||void 0)}function Hn(e){let t=e.p;for(;t!==null;){const n=t.c;if(n!==null)return n;t=t.p}return null}const Un=new WeakMap;function $n(e){var t=h;if(t===null)return d.f|=X,e;if((t.f&et)===0){if((t.f&kt)===0)throw!t.parent&&e instanceof Error&&Lt(e),e;t.b.error(e)}else lt(e,t)}function lt(e,t){for(;t!==null;){if((t.f&kt)!==0)try{t.b.error(e);return}catch(n){e=n}t=t.parent}throw e instanceof Error&&Lt(e),e}function Lt(e){const t=Un.get(e);t&&($e(e,"message",{value:t.message}),$e(e,"stack",{value:t.stack}))}const Bn=typeof requestIdleCallback>"u"?e=>setTimeout(e,1):requestIdleCallback;let Ee=[],Te=[];function Ft(){var e=Ee;Ee=[],St(e)}function qt(){var e=Te;Te=[],St(e)}function jt(e){Ee.length===0&&queueMicrotask(Ft),Ee.push(e)}function ia(e){Te.length===0&&Bn(qt),Te.push(e)}function Wn(){Ee.length>0&&Ft(),Te.length>0&&qt()}function zn(){for(var e=h.b;e!==null&&!e.has_pending_snippet();)e=e.parent;return e===null&&Tn(),e}function ft(e){var t=k|P,n=d!==null&&(d.f&k)!==0?d:null;return h===null||n!==null&&(n.f&O)!==0?t|=O:h.f|=tt,{ctx:p,deps:null,effects:null,equals:Dt,f:t,fn:e,reactions:null,rv:0,v:E,wv:0,parent:n??h,ac:null}}function Gn(e,t){let n=h;n===null&&xn();var r=n.b,s=void 0,a=ct(E),i=null,f=!d;return ar(()=>{try{var u=e()}catch(_){u=Promise.reject(_)}var l=()=>u;s=i?.then(l,l)??Promise.resolve(u),i=s;var o=y,v=r.pending;f&&(r.update_pending_count(1),v||o.increment());const c=(_,b=void 0)=>{i=null,v||o.activate(),b?b!==at&&(a.f|=X,Le(a,b)):((a.f&X)!==0&&(a.f^=X),Le(a,_)),f&&(r.update_pending_count(-1),v||o.decrement()),Ht()};if(s.then(c,_=>c(null,_||"unknown")),o)return()=>{queueMicrotask(()=>o.neuter())}}),new Promise(u=>{function l(o){function v(){o===s?u(a):l(s)}o.then(v,v)}l(s)})}function ua(e){const t=ft(e);return sn(t),t}function Kn(e){const t=ft(e);return t.equals=Mt,t}function Vt(e){var t=e.effects;if(t!==null){e.effects=null;for(var n=0;nGn(u))).then(u=>{s?.activate(),i();try{n([...e.map(r),...u])}catch(l){(a.f&de)===0&<(l,a)}s?.deactivate(),Ht()}).catch(u=>{f.error(u)})}function Jn(){var e=h,t=d,n=p;return function(){G(e),L(t),Ie(n)}}function Ht(){G(null),L(null),Ie(null)}const we=new Set;let y=null,He=null,ve=null,pt=new Set,Me=[];function Ut(){const e=Me.shift();Me.length>0&&queueMicrotask(Ut),e()}let ee=[],Ye=null,We=!1,Pe=!1;class te{current=new Map;#a=new Map;#s=new Set;#e=0;#f=null;#o=!1;#n=[];#i=[];#r=[];#t=[];#u=[];#c=[];#_=[];skipped_effects=new Set;process(t){ee=[],He=null;var n=null;if(we.size>1){n=new Map,ve=new Map;for(const[a,i]of this.current)n.set(a,{v:a.v,wv:a.wv}),a.v=i;for(const a of we)if(a!==this)for(const[i,f]of a.#a)n.has(i)||(n.set(i,{v:i.v,wv:i.wv}),i.v=f)}for(const a of t)this.#d(a);if(this.#n.length===0&&this.#e===0){this.#v();var r=this.#r,s=this.#t;this.#r=[],this.#t=[],this.#u=[],He=y,y=null,wt(r),wt(s),y===null?y=this:we.delete(this),this.#f?.resolve()}else this.#l(this.#r),this.#l(this.#t),this.#l(this.#u);if(n){for(const[a,{v:i,wv:f}]of n)a.wv<=f&&(a.v=i);ve=null}for(const a of this.#n)_e(a);for(const a of this.#i)_e(a);this.#n=[],this.#i=[]}#d(t){t.f^=m;for(var n=t.first;n!==null;){var r=n.f,s=(r&(Y|ae))!==0,a=s&&(r&m)!==0,i=a||(r&W)!==0||this.skipped_effects.has(n);if(!i&&n.fn!==null){if(s)n.f^=m;else if((r&m)===0)if((r&Qe)!==0)this.#t.push(n);else if((r&rt)!==0){var f=n.b?.pending?this.#i:this.#n;f.push(n)}else Ce(n)&&((n.f&Ae)!==0&&this.#u.push(n),_e(n));var u=n.first;if(u!==null){n=u;continue}}var l=n.parent;for(n=n.next;n===null&&l!==null;)n=l.next,l=l.parent}}#l(t){for(const n of t)((n.f&P)!==0?this.#c:this.#_).push(n),x(n,m);t.length=0}capture(t,n){this.#a.has(t)||this.#a.set(t,n),this.current.set(t,t.v)}activate(){y=this}deactivate(){y=null,He=null;for(const t of pt)if(pt.delete(t),t(),y!==null)break}neuter(){this.#o=!0}flush(){ee.length>0?ze():this.#v(),y===this&&(this.#e===0&&we.delete(this),this.deactivate())}#v(){if(!this.#o)for(const t of this.#s)t();this.#s.clear()}increment(){this.#e+=1}decrement(){if(this.#e-=1,this.#e===0){for(const t of this.#c)x(t,P),ne(t);for(const t of this.#_)x(t,H),ne(t);this.#r=[],this.#t=[],this.flush()}else this.deactivate()}add_callback(t){this.#s.add(t)}settled(){return(this.#f??=En()).promise}static ensure(){if(y===null){const t=y=new te;we.add(y),Pe||te.enqueue(()=>{y===t&&t.flush()})}return y}static enqueue(t){Me.length===0&&queueMicrotask(Ut),Me.unshift(t)}}function $t(e){var t=Pe;Pe=!0;try{var n;for(e&&(ze(),n=e());;){if(Wn(),ee.length===0&&(y?.flush(),ee.length===0))return Ye=null,n;ze()}}finally{Pe=t}}function ze(){var e=ce;We=!0;try{var t=0;for(bt(!0);ee.length>0;){var n=te.ensure();if(t++>1e3){var r,s;Qn()}n.process(ee),J.clear()}}finally{We=!1,bt(e),Ye=null}}function Qn(){try{Cn()}catch(e){lt(e,Ye)}}function wt(e){var t=e.length;if(t!==0){for(var n=0;ns&&(r.f&nt)!==0)break}}for(;nK(e))),t}function j(e,t,n=!1){d!==null&&(!D||(d.f&ht)!==0)&&ke()&&(d.f&(k|Ae|rt|ht))!==0&&!V?.includes(e)&&Dn();let r=n?ye(t):t;return Le(e,r)}function Le(e,t){if(!e.equals(t)){var n=e.v;pe?J.set(e,t):J.set(e,n),e.v=t;var r=te.ensure();r.capture(e,n),(e.f&k)!==0&&((e.f&P)!==0&&ot(e),x(e,(e.f&O)===0?m:H)),e.wv=ln(),Bt(e,P),ke()&&h!==null&&(h.f&m)!==0&&(h.f&(Y|ae))===0&&(R===null?_r([e]):R.push(e))}return t}function oa(e,t=1){var n=K(e),r=t===1?n++:n--;return j(e,n),r}function Ue(e){j(e,e.v+1)}function Bt(e,t){var n=e.reactions;if(n!==null)for(var r=ke(),s=n.length,a=0;a{if(Q===a)return f();var u=d,l=Q;L(null),Et(a);var o=f();return L(u),Et(l),o};return r&&n.set("length",$(e.length)),new Proxy(e,{defineProperty(f,u,l){(!("value"in l)||l.configurable===!1||l.enumerable===!1||l.writable===!1)&&Nn();var o=n.get(u);return o===void 0?o=i(()=>{var v=$(l.value);return n.set(u,v),v}):j(o,l.value,!0),!0},deleteProperty(f,u){var l=n.get(u);if(l===void 0){if(u in f){const o=i(()=>$(E));n.set(u,o),Ue(s)}}else j(l,E),Ue(s);return!0},get(f,u,l){if(u===Z)return e;var o=n.get(u),v=u in f;if(o===void 0&&(!v||be(f,u)?.writable)&&(o=i(()=>{var _=ye(v?f[u]:E),b=$(_);return b}),n.set(u,o)),o!==void 0){var c=K(o);return c===E?void 0:c}return Reflect.get(f,u,l)},getOwnPropertyDescriptor(f,u){var l=Reflect.getOwnPropertyDescriptor(f,u);if(l&&"value"in l){var o=n.get(u);o&&(l.value=K(o))}else if(l===void 0){var v=n.get(u),c=v?.v;if(v!==void 0&&c!==E)return{enumerable:!0,configurable:!0,value:c,writable:!0}}return l},has(f,u){if(u===Z)return!0;var l=n.get(u),o=l!==void 0&&l.v!==E||Reflect.has(f,u);if(l!==void 0||h!==null&&(!o||be(f,u)?.writable)){l===void 0&&(l=i(()=>{var c=o?ye(f[u]):E,_=$(c);return _}),n.set(u,l));var v=K(l);if(v===E)return!1}return o},set(f,u,l,o){var v=n.get(u),c=u in f;if(r&&u==="length")for(var _=l;_$(E)),n.set(_+"",b))}if(v===void 0)(!c||be(f,u)?.writable)&&(v=i(()=>$(void 0)),j(v,ye(l)),n.set(u,v));else{c=v.v!==E;var q=i(()=>ye(l));j(v,q)}var Re=Reflect.getOwnPropertyDescriptor(f,u);if(Re?.set&&Re.set.call(o,l),!c){if(r&&typeof u=="string"){var Oe=n.get("length"),U=Number(u);Number.isInteger(U)&&U>=Oe.v&&j(Oe,U+1)}Ue(s)}return!0},ownKeys(f){K(s);var u=Reflect.ownKeys(f).filter(v=>{var c=n.get(v);return c===void 0||c.v!==E});for(var[l,o]of n)o.v!==E&&!(l in f)&&u.push(l);return u},setPrototypeOf(){Pn()}})}function yt(e){try{if(e!==null&&typeof e=="object"&&Z in e)return e[Z]}catch{}return e}function ca(e,t){return Object.is(yt(e),yt(t))}var gt,er,Wt,zt,Gt;function Ge(){if(gt===void 0){gt=window,er=document,Wt=/Firefox/.test(navigator.userAgent);var e=Element.prototype,t=Node.prototype,n=Text.prototype;zt=be(t,"firstChild").get,Gt=be(t,"nextSibling").get,dt(e)&&(e.__click=void 0,e.__className=void 0,e.__attributes=null,e.__style=void 0,e.__e=void 0),dt(n)&&(n.__t=void 0)}}function z(e=""){return document.createTextNode(e)}function A(e){return zt.call(e)}function M(e){return Gt.call(e)}function _a(e,t){if(!g)return A(e);var n=A(w);if(n===null)n=w.appendChild(z());else if(t&&n.nodeType!==st){var r=z();return n?.before(r),I(r),r}return I(n),n}function va(e,t){if(!g){var n=A(e);return n instanceof Comment&&n.data===""?M(n):n}return w}function da(e,t=1,n=!1){let r=g?w:e;for(var s;t--;)s=r,r=M(r);if(!g)return r;if(n&&r?.nodeType!==st){var a=z();return r===null?s?.after(a):r.before(a),I(a),a}return I(r),r}function Kt(e){e.textContent=""}function ha(){return!1}function Xt(e){h===null&&d===null&&kn(),d!==null&&(d.f&O)!==0&&h===null&&Sn(),pe&&An()}function tr(e,t){var n=t.last;n===null?t.last=t.first=e:(n.next=e,e.prev=n,t.last=e)}function F(e,t,n,r=!0){var s=h;s!==null&&(s.f&W)!==0&&(e|=W);var a={ctx:p,deps:null,nodes_start:null,nodes_end:null,f:e|P,first:null,fn:t,last:null,next:null,parent:s,b:s&&s.b,prev:null,teardown:null,transitions:null,wv:0,ac:null};if(n)try{_e(a),a.f|=et}catch(u){throw re(a),u}else t!==null&&ne(a);var i=n&&a.deps===null&&a.first===null&&a.nodes_start===null&&a.teardown===null&&(a.f&tt)===0;if(!i&&r&&(s!==null&&tr(a,s),d!==null&&(d.f&k)!==0&&(e&ae)===0)){var f=d;(f.effects??=[]).push(a)}return a}function Zt(e){const t=F(qe,null,!1);return x(t,m),t.teardown=e,t}function nr(e){Xt();var t=h.f,n=!d&&(t&Y)!==0&&(t&et)===0;if(n){var r=p;(r.e??=[]).push(e)}else return Jt(e)}function Jt(e){return F(Qe|nt,e,!1)}function pa(e){return Xt(),F(qe|nt,e,!0)}function rr(e){te.ensure();const t=F(ae,e,!0);return(n={})=>new Promise(r=>{n.outro?fr(t,()=>{re(t),r(void 0)}):(re(t),r(void 0))})}function wa(e){return F(Qe,e,!1)}function ya(e,t){var n=p,r={effect:null,ran:!1,deps:e};n.l.$.push(r),r.effect=Qt(()=>{e(),!r.ran&&(r.ran=!0,se(t))})}function ga(){var e=p;Qt(()=>{for(var t of e.l.$){t.deps();var n=t.effect;(n.f&m)!==0&&x(n,H),Ce(n)&&_e(n),t.ran=!1}})}function ar(e){return F(rt|tt,e,!0)}function Qt(e,t=0){return F(qe|t,e,!0)}function ba(e,t=[],n=[]){Zn(t,n,r=>{F(qe,()=>e(...r.map(K)),!0)})}function sr(e,t=0){var n=F(Ae|t,e,!0);return n}function ir(e,t=!0){return F(Y,e,!0,t)}function en(e){var t=e.teardown;if(t!==null){const n=pe,r=d;mt(!0),L(null);try{t.call(null)}finally{mt(n),L(r)}}}function tn(e,t=!1){var n=e.first;for(e.first=e.last=null;n!==null;){n.ac?.abort(at);var r=n.next;(n.f&ae)!==0?n.parent=null:re(n,t),n=r}}function ur(e){for(var t=e.first;t!==null;){var n=t.next;(t.f&Y)===0&&re(t),t=n}}function re(e,t=!0){var n=!1;(t||(e.f&Rt)!==0)&&e.nodes_start!==null&&e.nodes_end!==null&&(lr(e.nodes_start,e.nodes_end),n=!0),tn(e,t&&!n),Fe(e,0),x(e,de);var r=e.transitions;if(r!==null)for(const a of r)a.stop();en(e);var s=e.parent;s!==null&&s.first!==null&&nn(e),e.next=e.prev=e.teardown=e.ctx=e.deps=e.fn=e.nodes_start=e.nodes_end=e.ac=null}function lr(e,t){for(;e!==null;){var n=e===t?null:M(e);e.remove(),e=n}}function nn(e){var t=e.parent,n=e.prev,r=e.next;n!==null&&(n.next=r),r!==null&&(r.prev=n),t!==null&&(t.first===e&&(t.first=r),t.last===e&&(t.last=n))}function fr(e,t){var n=[];rn(e,n,!0),or(n,()=>{re(e),t&&t()})}function or(e,t){var n=e.length;if(n>0){var r=()=>--n||t();for(var s of e)s.out(r)}else t()}function rn(e,t,n){if((e.f&W)===0){if(e.f^=W,e.transitions!==null)for(const i of e.transitions)(i.is_global||n)&&t.push(i);for(var r=e.first;r!==null;){var s=r.next,a=(r.f&Ct)!==0||(r.f&Y)!==0;rn(r,t,a?n:!1),r=s}}}function ma(e){an(e,!0)}function an(e,t){if((e.f&W)!==0){e.f^=W,(e.f&m)===0&&(x(e,P),ne(e));for(var n=e.first;n!==null;){var r=n.next,s=(n.f&Ct)!==0||(n.f&Y)!==0;an(n,s?t:!1),n=r}if(e.transitions!==null)for(const a of e.transitions)(a.is_global||t)&&a.in()}}let le=null;function cr(e){var t=le;try{if(le=new Set,se(e),t!==null)for(var n of le)t.add(n);return le}finally{le=t}}function Ea(e){for(var t of cr(e))Le(t,t.v)}let ce=!1;function bt(e){ce=e}let pe=!1;function mt(e){pe=e}let d=null,D=!1;function L(e){d=e}let h=null;function G(e){h=e}let V=null;function sn(e){d!==null&&(V===null?V=[e]:V.push(e))}let T=null,S=0,R=null;function _r(e){R=e}let un=1,xe=0,Q=xe;function Et(e){Q=e}let B=!1;function ln(){return++un}function Ce(e){var t=e.f;if((t&P)!==0)return!0;if((t&H)!==0){var n=e.deps,r=(t&O)!==0;if(n!==null){var s,a,i=(t&De)!==0,f=r&&h!==null&&!B,u=n.length;if((i||f)&&(h===null||(h.f&de)===0)){var l=e,o=l.parent;for(s=0;se.wv)return!0}(!r||h!==null&&!B)&&x(e,m)}return!1}function fn(e,t,n=!0){var r=e.reactions;if(r!==null&&!V?.includes(e))for(var s=0;s0)for(c.length=S+T.length,_=0;_{document.activeElement===n&&e.focus()})}}function Ca(e){g&&A(e)!==null&&Kt(e)}let Tt=!1;function mr(){Tt||(Tt=!0,document.addEventListener("reset",e=>{Promise.resolve().then(()=>{if(!e.defaultPrevented)for(const t of e.target.elements)t.__on_r?.()})},{capture:!0}))}function _n(e){var t=d,n=h;L(null),G(null);try{return e()}finally{L(t),G(n)}}function Ra(e,t,n,r=n){e.addEventListener(t,()=>_n(n));const s=e.__on_r;s?e.__on_r=()=>{s(),r(!0)}:e.__on_r=()=>r(!0),mr()}const vn=new Set,Xe=new Set;function Er(e,t,n,r={}){function s(a){if(r.capture||ge.call(t,a),!a.cancelBubble)return _n(()=>n?.call(this,a))}return e.startsWith("pointer")||e.startsWith("touch")||e==="wheel"?jt(()=>{t.addEventListener(e,s,r)}):t.addEventListener(e,s,r),s}function Oa(e,t,n,r,s){var a={capture:r,passive:s},i=Er(e,t,n,a);(t===document.body||t===window||t===document||t instanceof HTMLMediaElement)&&Zt(()=>{t.removeEventListener(e,i,a)})}function Na(e){for(var t=0;t{throw U});throw c}}finally{e.__root=t,delete e.currentTarget,L(o),G(v)}}}let C;function Tr(){C=void 0}function Pa(e){let t=null,n=g;var r;if(g){for(t=w,C===void 0&&(C=A(document.head));C!==null&&(C.nodeType!==me||C.data!==it);)C=M(C);C===null?fe(!1):C=I(M(C))}g||(r=document.head.appendChild(z()));try{sr(()=>e(r),Rt)}finally{n&&(fe(!0),C=w,I(t))}}function _t(e){var t=document.createElement("template");return t.innerHTML=e.replaceAll("",""),t.content}function N(e,t){var n=h;n.nodes_start===null&&(n.nodes_start=e,n.nodes_end=t)}function Da(e,t){var n=(t&Nt)!==0,r=(t&In)!==0,s,a=!e.startsWith("");return()=>{if(g)return N(w,null),w;s===void 0&&(s=_t(a?e:""+e),n||(s=A(s)));var i=r||Wt?document.importNode(s,!0):s.cloneNode(!0);if(n){var f=A(i),u=i.lastChild;N(f,u)}else N(i,i);return i}}function xr(e,t,n="svg"){var r=!e.startsWith(""),s=(t&Nt)!==0,a=`<${n}>${r?e:""+e}`,i;return()=>{if(g)return N(w,null),w;if(!i){var f=_t(a),u=A(f);if(s)for(i=document.createDocumentFragment();A(u);)i.appendChild(A(u));else i=A(u)}var l=i.cloneNode(!0);if(s){var o=A(l),v=l.lastChild;N(o,v)}else N(l,l);return l}}function Ia(e,t){return xr(e,t,"svg")}function Ma(e=""){if(!g){var t=z(e+"");return N(t,t),t}var n=w;return n.nodeType!==st&&(n.before(n=z()),I(n)),N(n,n),n}function La(){if(g)return N(w,null),w;var e=document.createDocumentFragment(),t=document.createComment(""),n=z();return e.append(t,n),N(t,n),e}function Fa(e,t){if(g){h.nodes_end=w,ut();return}e!==null&&e.before(t)}function qa(e,t){var n=t==null?"":typeof t=="object"?t+"":t;n!==(e.__t??=e.nodeValue)&&(e.__t=n,e.nodeValue=n+"")}function dn(e,t){return hn(e,t)}function Ar(e,t){Ge(),t.intro=t.intro??!1;const n=t.target,r=g,s=w;try{for(var a=A(n);a&&(a.nodeType!==me||a.data!==it);)a=M(a);if(!a)throw oe;fe(!0),I(a),ut();const i=hn(e,{...t,anchor:a});if(w===null||w.nodeType!==me||w.data!==Pt)throw je(),oe;return fe(!1),i}catch(i){if(i instanceof Error&&i.message.split(` +`).some(f=>f.startsWith("https://svelte.dev/e/")))throw i;return i!==oe&&console.warn("Failed to hydrate: ",i),t.recover===!1&&On(),Ge(),Kt(n),fe(!1),dn(e,t)}finally{fe(r),I(s),Tr()}}const ie=new Map;function hn(e,{target:t,anchor:n,props:r={},events:s,context:a,intro:i=!0}){Ge();var f=new Set,u=v=>{for(var c=0;c{var v=n??t.appendChild(z());return ir(()=>{if(a){Vn({});var c=p;c.c=a}s&&(r.$$events=s),g&&N(v,null),l=e(v,r)||{},g&&(h.nodes_end=w),a&&Yn()}),()=>{for(var c of f){t.removeEventListener(c,ge);var _=ie.get(c);--_===0?(document.removeEventListener(c,ge),ie.delete(c)):ie.set(c,_)}Xe.delete(u),v!==n&&v.parentNode?.removeChild(v)}});return Ze.set(l,o),l}let Ze=new WeakMap;function Sr(e,t){const n=Ze.get(e);return n?(Ze.delete(e),n(t)):Promise.resolve()}function kr(e){return(t,...n)=>{var r=e(...n),s;if(g)s=w,ut();else{var a=r.render().trim(),i=_t(a);s=A(i),t.before(s)}const f=r.setup?.(s);N(s,s),typeof f=="function"&&Zt(f)}}function Cr(e,t,n){if(e==null)return t(void 0),Ne;const r=se(()=>e.subscribe(t,n));return r.unsubscribe?()=>r.unsubscribe():r}const ue=[];function ja(e,t=Ne){let n=null;const r=new Set;function s(f){if(It(e,f)&&(e=f,n)){const u=!ue.length;for(const l of r)l[1](),ue.push(l,e);if(u){for(let l=0;l{r.delete(l),r.size===0&&n&&(n(),n=null)}}return{set:s,update:a,subscribe:i}}function Va(e){let t;return Cr(e,n=>t=n)(),t}function Rr(){return d===null&&Rn(),(d.ac??=new AbortController).signal}function pn(e){p===null&&he(),Se&&p.l!==null?vt(p).m.push(e):nr(()=>{const t=se(e);if(typeof t=="function")return t})}function Or(e){p===null&&he(),pn(()=>()=>se(e))}function Nr(e,t,{bubbles:n=!1,cancelable:r=!1}={}){return new CustomEvent(e,{detail:t,bubbles:n,cancelable:r})}function Pr(){const e=p;return e===null&&he(),(t,n,r)=>{const s=e.s.$$events?.[t];if(s){const a=Je(s)?s.slice():[s],i=Nr(t,n,r);for(const f of a)f.call(e.x,i);return!i.defaultPrevented}return!0}}function Dr(e){p===null&&he(),p.l===null&&Ot(),vt(p).b.push(e)}function Ir(e){p===null&&he(),p.l===null&&Ot(),vt(p).a.push(e)}function vt(e){var t=e.l;return t.u??={a:[],b:[],m:[]}}const Ya=Object.freeze(Object.defineProperty({__proto__:null,afterUpdate:Ir,beforeUpdate:Dr,createEventDispatcher:Pr,createRawSnippet:kr,flushSync:$t,getAbortSignal:Rr,getAllContexts:jn,getContext:Ln,hasContext:qn,hydrate:Ar,mount:dn,onDestroy:Or,onMount:pn,setContext:Fn,settled:hr,tick:dr,unmount:Sr,untrack:se},Symbol.toStringTag,{value:"Module"}));export{er as $,Or as A,La as B,Ia as C,Ma as D,Pr as E,wa as F,Qt as G,jt as H,ja as I,Va as J,g as K,ut as L,sr as M,Ct as N,z as O,ir as P,y as Q,ha as R,Z as S,w as T,fr as U,ke as V,Ra as W,He as X,I as Y,A as Z,ra as _,ga as a,Qr as a$,Mn as a0,na as a1,fe as a2,me as a3,Pt as a4,Le as a5,ct as a6,yn as a7,Je as a8,Hr as a9,Gr as aA,ye as aB,de as aC,Kr as aD,Se as aE,zr as aF,Wr as aG,oa as aH,G as aI,Xr as aJ,pe as aK,Fr as aL,Mr as aM,Ar as aN,dn as aO,$t as aP,Sr as aQ,$ as aR,dr as aS,ua as aT,aa as aU,It as aV,lr as aW,je as aX,oe as aY,N as aZ,_t as a_,ma as aa,Yr as ab,Br as ac,W as ad,re as ae,M as af,rn as ag,Kt as ah,or as ai,h as aj,Ur as ak,$r as al,p as am,pa as an,nr as ao,Lr as ap,St as aq,ft as ar,sa as as,E as at,Ne as au,Cr as av,Zt as aw,$e as ax,be as ay,Vr as az,va as b,ca as b0,Zn as b1,Zr as b2,At as b3,Jr as b4,gn as b5,xa as b6,Er as b7,Na as b8,ka as b9,Sa as ba,ia as bb,mr as bc,qr as bd,Aa as be,Ya as bf,Fa as c,Yn as d,Oa as e,Da as f,K as g,Pa as h,gt as i,_a as j,da as k,ya as l,la as m,Ta as n,pn as o,Vn as p,Kn as q,ea as r,j as s,ba as t,se as u,qa as v,Ca as w,Ea as x,fa as y,ta as z}; diff --git a/webapp/assets/_app/immutable/chunks/DDhBTdDt.js b/webapp/assets/_app/immutable/chunks/DDhBTdDt.js new file mode 100644 index 00000000..7f2ab6df --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/DDhBTdDt.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as U}from"./B3Pzt0F_.js";import{f as I,j as t,k as p,r as a,t as P,v as b,c as u,z as N,D as A,p as W,u as z,n as H,d as X}from"./D8EpLgQ1.js";import{p as s,i as T}from"./5WA7h8uK.js";import{s as Y,h as Z,B as F,c as $}from"./CiE1LlKV.js";import{b as ee}from"./CoIRRsD9.js";import{D as te,G as ae,a as se}from"./C9DJVOi1.js";import{E as ne}from"./B7ITzBt8.js";import{S as B}from"./BE4wujub.js";var le=I('
                '),ie=I('
                '),re=I('

                ');function ye(L,e){let n=s(e,"title",8),S=s(e,"subtitle",8),_=s(e,"forgeIcon",8,""),f=s(e,"onEdit",8,null),h=s(e,"onDelete",8,null),k=s(e,"editLabel",8,"Edit"),j=s(e,"deleteLabel",8,"Delete"),g=s(e,"titleClass",8,"");var c=re(),v=t(c),m=t(v),y=t(m),C=t(y);{var E=i=>{var r=le(),w=t(r);Z(w,_),a(r),u(i,r)};T(C,i=>{_()&&i(E)})}var l=p(C,2),D=t(l),G=t(D,!0);a(D);var M=p(D,2),V=t(M,!0);a(M),a(l),a(y);var R=p(y,2);{var q=i=>{var r=ie(),w=t(r);{var J=o=>{F(o,{variant:"secondary",size:"md",icon:"",$$events:{click(...d){f()?.apply(this,d)}},children:(d,Q)=>{N();var x=A();P(()=>b(x,k())),u(d,x)},$$slots:{default:!0}})};T(w,o=>{f()&&o(J)})}var K=p(w,2);{var O=o=>{F(o,{variant:"danger",size:"md",icon:"",$$events:{click(...d){h()?.apply(this,d)}},children:(d,Q)=>{N();var x=A();P(()=>b(x,j())),u(d,x)},$$slots:{default:!0}})};T(K,o=>{h()&&o(O)})}a(r),u(i,r)};T(R,i=>{(f()||h())&&i(q)})}a(m),a(v),a(c),P(()=>{Y(D,1,`text-2xl font-bold text-gray-900 dark:text-white ${g()??""}`),b(G,n()),b(V,S())}),u(L,c)}var oe=I('');function xe(L,e){W(e,!1);let n=s(e,"instances",8),S=s(e,"entityType",8),_=s(e,"onDeleteInstance",8);const f=[{key:"name",title:"Name",cellComponent:ne,cellProps:{entityType:"instance",nameField:"name"}},{key:"status",title:"Status",cellComponent:B,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:B,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"created",title:"Created",cellComponent:ae,cellProps:{field:"created_at",type:"date"}},{key:"actions",title:"Actions",align:"right",cellComponent:se,cellProps:{actions:[{type:"delete",label:"Delete",title:"Delete instance",ariaLabel:"Delete instance",action:"delete"}]}}],h={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"}],actions:[{type:"delete",handler:l=>k(l)}]};function k(l){_()(l)}function j(l){k(l.detail.item)}U();var g=oe(),c=t(g),v=t(c),m=t(v),y=t(m);a(m);var C=p(m,2);a(v);var E=p(v,2);te(E,{get columns(){return f},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return H(n()),z(()=>n().length)},totalPages:1,get totalItems(){return H(n()),z(()=>n().length)},itemName:"instances",emptyTitle:"No instances running",get emptyMessage(){return`No instances running for this ${S()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return h},$$events:{delete:j}}),a(c),a(g),P(()=>{b(y,`Instances (${H(n()),z(()=>n().length)??""})`),$(C,"href",`${ee}/instances`)}),u(L,g),X()}export{ye as D,xe as I}; diff --git a/webapp/assets/_app/immutable/chunks/DQP15tlf.js b/webapp/assets/_app/immutable/chunks/DQP15tlf.js new file mode 100644 index 00000000..73a0c7e8 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/DQP15tlf.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as cr}from"./B3Pzt0F_.js";import{p as pr,E as vr,m as u,o as fr,s as n,f as E,j as d,r as t,k as a,g as e,t as _,x as He,u as h,z as mr,n as D,v as k,e as w,c as m,D as yr,d as xr}from"./D8EpLgQ1.js";import{p as _r,i as ge,s as hr,a as kr}from"./5WA7h8uK.js";import{e as wr,i as Er}from"./u94nIB4-.js";import{r as c,b as Ke,c as Rr}from"./CiE1LlKV.js";import{b as p,a as $r}from"./C6k1Q4We.js";import{p as Sr}from"./D4Caz1gY.js";import{M as Tr}from"./qB7B8uiS.js";import{J as Or}from"./DZblzgqm.js";import{e as Pr}from"./wyaP0EDu.js";var Jr=E('

                '),Mr=E(' '),Nr=E('
                '),Ur=E('
                Updating...
                '),Ar=E('

                Pool Information (Read-only)

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Tags
                Extra Specs (JSON)
                ');function Wr(We,ce){pr(ce,!1);const[qe,Qe]=hr(),B=()=>kr(Pr,"$eagerCache",qe);let o=_r(ce,"pool",8);const G=vr();let R=u(!1),$=u(""),S=u(o().image||""),T=u(o().flavor||""),O=u(o().max_runners),P=u(o().min_idle_runners),J=u(o().runner_bootstrap_timeout),M=u(o().priority),N=u(o().runner_prefix||""),y=u(o().os_type||"linux"),x=u(o().os_arch||"amd64"),U=u(o()["github-runner-group"]||""),A=u(o().enabled),g=u((o().tags||[]).map(i=>i.name||"").filter(Boolean)),f=u(""),v=u("{}");function Ve(i){if(i.repo_id){const s=B().repositories.find(l=>l.id===i.repo_id);return s?`${s.owner}/${s.name}`:"Unknown Entity"}if(i.org_id){const s=B().organizations.find(l=>l.id===i.org_id);return s&&s.name?s.name:"Unknown Entity"}if(i.enterprise_id){const s=B().enterprises.find(l=>l.id===i.enterprise_id);return s&&s.name?s.name:"Unknown Entity"}return"Unknown Entity"}function Xe(i){return i.repo_id?"Repository":i.org_id?"Organization":i.enterprise_id?"Enterprise":"Unknown"}fr(()=>{if(o().extra_specs)try{if(typeof o().extra_specs=="object")n(v,JSON.stringify(o().extra_specs,null,2));else{const i=JSON.parse(o().extra_specs);n(v,JSON.stringify(i,null,2))}}catch{n(v,o().extra_specs||"{}")}});function pe(){e(f).trim()&&!e(g).includes(e(f).trim())&&(n(g,[...e(g),e(f).trim()]),n(f,""))}function Ye(i){n(g,e(g).filter((s,l)=>l!==i))}function Ze(i){i.key==="Enter"&&(i.preventDefault(),pe())}async function er(){try{n(R,!0),n($,"");let i={};if(e(v).trim())try{i=JSON.parse(e(v))}catch{throw new Error("Invalid JSON in extra specs")}const s={image:e(S)!==o().image?e(S):void 0,flavor:e(T)!==o().flavor?e(T):void 0,max_runners:e(O)!==o().max_runners?e(O):void 0,min_idle_runners:e(P)!==o().min_idle_runners?e(P):void 0,runner_bootstrap_timeout:e(J)!==o().runner_bootstrap_timeout?e(J):void 0,priority:e(M)!==o().priority?e(M):void 0,runner_prefix:e(N)!==o().runner_prefix?e(N):void 0,os_type:e(y)!==o().os_type?e(y):void 0,os_arch:e(x)!==o().os_arch?e(x):void 0,"github-runner-group":e(U)!==o()["github-runner-group"]&&e(U)||void 0,enabled:e(A)!==o().enabled?e(A):void 0,tags:JSON.stringify(e(g))!==JSON.stringify((o().tags||[]).map(l=>l.name||"").filter(Boolean))?e(g):void 0,extra_specs:e(v).trim()!==JSON.stringify(o().extra_specs||{},null,2).trim()?i:void 0};Object.keys(s).forEach(l=>{s[l]===void 0&&delete s[l]}),G("submit",s)}catch(i){n($,i instanceof Error?i.message:"Failed to update pool")}finally{n(R,!1)}}cr(),Tr(We,{$$events:{close:()=>G("close")},children:(i,s)=>{var l=Ar(),z=d(l),ve=d(z),rr=d(ve);t(ve),t(z);var L=a(z,2),fe=d(L);{var tr=r=>{var b=Jr(),j=d(b),C=d(j,!0);t(j),t(b),_(()=>k(C,e($))),m(r,b)};ge(fe,r=>{e($)&&r(tr)})}var F=a(fe,2),me=a(d(F),2),H=d(me),ye=a(d(H),2),ar=d(ye,!0);t(ye),t(H);var xe=a(H,2),_e=a(d(xe),2),dr=d(_e);t(_e),t(xe),t(me),t(F);var K=a(F,2),he=a(d(K),2),W=d(he),ke=a(d(W),2);c(ke),t(W);var q=a(W,2),we=a(d(q),2);c(we),t(q);var Q=a(q,2),V=a(d(Q),2);_(()=>{e(y),He(()=>{})});var X=d(V);X.value=X.__value="linux";var Ee=a(X);Ee.value=Ee.__value="windows",t(V),t(Q);var Re=a(Q,2),Y=a(d(Re),2);_(()=>{e(x),He(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var $e=a(Z);$e.value=$e.__value="arm64",t(Y),t(Re),t(he),t(K);var ee=a(K,2),Se=a(d(ee),2),re=d(Se),Te=a(d(re),2);c(Te),t(re);var te=a(re,2),Oe=a(d(te),2);c(Oe),t(te);var Pe=a(te,2),Je=a(d(Pe),2);c(Je),t(Pe),t(Se),t(ee);var ae=a(ee,2),de=a(d(ae),2),oe=d(de),Me=a(d(oe),2);c(Me),t(oe);var ie=a(oe,2),Ne=a(d(ie),2);c(Ne),t(ie);var Ue=a(ie,2),Ae=a(d(Ue),2);c(Ae),t(Ue),t(de);var ne=a(de,2),Ie=d(ne),je=a(d(Ie),2),se=d(je),I=d(se);c(I);var or=a(I,2);t(se);var ir=a(se,2);{var nr=r=>{var b=Nr();wr(b,5,()=>e(g),Er,(j,C,gr)=>{var be=Mr(),Le=d(be),Fe=a(Le);t(be),_(()=>{k(Le,`${e(C)??""} `),Rr(Fe,"aria-label",`Remove tag ${e(C)??""}`)}),w("click",Fe,()=>Ye(gr)),m(j,be)}),t(b),m(r,b)};ge(ir,r=>{e(g),h(()=>e(g).length>0)&&r(nr)})}t(je),t(Ie),t(ne);var le=a(ne,2),Ce=d(le),sr=a(d(Ce),2);Or(sr,{rows:4,placeholder:"{}",get value(){return e(v)},set value(r){n(v,r)},$$legacy:!0}),t(Ce),t(le);var De=a(le,2),Be=d(De);c(Be),mr(2),t(De),t(ae);var Ge=a(ae,2),ze=d(Ge),ue=a(ze,2),lr=d(ue);{var ur=r=>{var b=Ur();m(r,b)},br=r=>{var b=yr("Update Pool");m(r,b)};ge(lr,r=>{e(R)?r(ur):r(br,!1)})}t(ue),t(Ge),t(L),t(l),_((r,b)=>{k(rr,`Update Pool ${D(o()),h(()=>o().id)??""}`),k(ar,(D(o()),h(()=>o().provider_name))),k(dr,`${r??""}: ${b??""}`),ue.disabled=e(R)},[()=>(D(o()),h(()=>Xe(o()))),()=>(D(o()),h(()=>Ve(o())))]),p(ke,()=>e(S),r=>n(S,r)),p(we,()=>e(T),r=>n(T,r)),Ke(V,()=>e(y),r=>n(y,r)),Ke(Y,()=>e(x),r=>n(x,r)),p(Te,()=>e(P),r=>n(P,r)),p(Oe,()=>e(O),r=>n(O,r)),p(Je,()=>e(J),r=>n(J,r)),p(Me,()=>e(N),r=>n(N,r)),p(Ne,()=>e(M),r=>n(M,r)),p(Ae,()=>e(U),r=>n(U,r)),p(I,()=>e(f),r=>n(f,r)),w("keydown",I,Ze),w("click",or,pe),$r(Be,()=>e(A),r=>n(A,r)),w("click",ze,()=>G("close")),w("submit",L,Sr(er)),m(i,l)},$$slots:{default:!0}}),xr(),Qe()}export{Wr as U}; diff --git a/webapp/assets/_app/immutable/chunks/DZblzgqm.js b/webapp/assets/_app/immutable/chunks/DZblzgqm.js new file mode 100644 index 00000000..3b77c940 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/DZblzgqm.js @@ -0,0 +1,4 @@ +import"./DsnmJJEf.js";import{i as g}from"./B3Pzt0F_.js";import{p as k,l as x,s as d,m as w,n as y,a as J,f as m,j as z,w as j,k as L,g as c,r as B,t as C,c as n,d as E}from"./D8EpLgQ1.js";import{p as o,i as M}from"./5WA7h8uK.js";import{c as f,s as N}from"./CiE1LlKV.js";import{b as O}from"./C6k1Q4We.js";var S=m('
                '),V=m('
                ');function I(p,r){k(r,!1);let t=o(r,"value",12,""),u=o(r,"placeholder",8,"{}"),b=o(r,"rows",8,4),i=o(r,"disabled",8,!1),a=w(!0);x(()=>y(t()),()=>{if(t().trim())try{JSON.parse(t()),d(a,!0)}catch{d(a,!1)}else d(a,!0)}),J(),g();var l=V(),e=z(l);j(e);var v=L(e,2);{var h=s=>{var _=S();n(s,_)};M(v,s=>{c(a)||s(h)})}B(l),C(()=>{f(e,"placeholder",u()),f(e,"rows",b()),e.disabled=i(),N(e,1,`w-full px-3 py-2 border rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 font-mono text-sm resize-none + ${c(a)?"border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-700 text-gray-900 dark:text-white":"border-red-300 dark:border-red-600 bg-red-50 dark:bg-red-900/20 text-red-900 dark:text-red-100"} + ${i()?"opacity-50 cursor-not-allowed":""} + `)}),O(e,t),n(p,l),E()}export{I as J}; diff --git a/webapp/assets/_app/immutable/chunks/Dbd6PPbz.js b/webapp/assets/_app/immutable/chunks/Dbd6PPbz.js new file mode 100644 index 00000000..cca64cf0 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/Dbd6PPbz.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as ae}from"./B3Pzt0F_.js";import{p as se,E as re,l as P,n as ie,s as r,g as t,m as k,a as le,f as p,j as v,k as U,r as f,c as l,d as oe,B as T,b as $,z as V,D as q,t as E,v as N,u as ne}from"./D8EpLgQ1.js";import{p as R,i as m}from"./5WA7h8uK.js";import{g as u,B as G}from"./CiE1LlKV.js";import{t as y}from"./BEkVdVE1.js";var de=p('
                Checking...
                '),ce=p('
                '),ve=p('
                Webhook installed
                ',1),fe=p('
                No webhook installed
                '),ue=p('

                Webhook Status

                ');function ye(H,g){se(g,!1);const x=k();let h=R(g,"entityType",8),s=R(g,"entityId",8),j=R(g,"entityName",8),i=k(null),o=k(!1),b=k(!0);const O=re();async function _(){if(s())try{r(b,!0),h()==="repository"?r(i,await u.getRepositoryWebhookInfo(s())):r(i,await u.getOrganizationWebhookInfo(s()))}catch(e){e&&typeof e=="object"&&"response"in e&&e.response?.status===404?r(i,null):(console.warn("Failed to check webhook status:",e),r(i,null))}finally{r(b,!1)}}async function J(){if(s())try{r(o,!0),h()==="repository"?await u.installRepositoryWebhook(s()):await u.installOrganizationWebhook(s()),y.success("Webhook Installed",`Webhook for ${h()} ${j()} has been installed successfully.`),await _(),O("webhookStatusChanged",{installed:!0})}catch(e){y.error("Webhook Installation Failed",e instanceof Error?e.message:"Failed to install webhook.")}finally{r(o,!1)}}async function K(){if(s())try{r(o,!0),h()==="repository"?await u.uninstallRepositoryWebhook(s()):await u.uninstallOrganizationWebhook(s()),y.success("Webhook Uninstalled",`Webhook for ${h()} ${j()} has been uninstalled successfully.`),await _(),O("webhookStatusChanged",{installed:!1})}catch(e){y.error("Webhook Uninstall Failed",e instanceof Error?e.message:"Failed to uninstall webhook.")}finally{r(o,!1)}}P(()=>ie(s()),()=>{s()&&_()}),P(()=>t(i),()=>{r(x,t(i)&&t(i).active)}),le(),ae();var w=ue(),A=v(w),D=v(A),W=v(D),L=U(v(W),2),Q=v(L);{var X=e=>{var d=de();l(e,d)},Y=e=>{var d=T(),z=$(d);{var I=a=>{var n=ve(),B=U($(n),2);{var c=C=>{var F=ce(),te=v(F);f(F),E(()=>N(te,`URL: ${t(i),ne(()=>t(i).url||"N/A")??""}`)),l(C,F)};m(B,C=>{t(i)&&C(c)})}l(a,n)},S=a=>{var n=fe();l(a,n)};m(z,a=>{t(x)?a(I):a(S,!1)},!0)}l(e,d)};m(Q,e=>{t(b)?e(X):e(Y,!1)})}f(L),f(W);var M=U(W,2),Z=v(M);{var ee=e=>{var d=T(),z=$(d);{var I=a=>{G(a,{variant:"danger",size:"sm",get disabled(){return t(o)},$$events:{click:K},children:(n,B)=>{V();var c=q();E(()=>N(c,t(o)?"Uninstalling...":"Uninstall")),l(n,c)},$$slots:{default:!0}})},S=a=>{G(a,{variant:"primary",size:"sm",get disabled(){return t(o)},$$events:{click:J},children:(n,B)=>{V();var c=q();E(()=>N(c,t(o)?"Installing...":"Install Webhook")),l(n,c)},$$slots:{default:!0}})};m(z,a=>{t(x)?a(I):a(S,!1)})}l(e,d)};m(Z,e=>{t(b)||e(ee)})}f(M),f(D),f(A),f(w),l(H,w),oe()}export{ye as W}; diff --git a/webapp/assets/_app/immutable/chunks/DsnmJJEf.js b/webapp/assets/_app/immutable/chunks/DsnmJJEf.js new file mode 100644 index 00000000..ca27dc73 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/DsnmJJEf.js @@ -0,0 +1 @@ +typeof window<"u"&&((window.__svelte??={}).v??=new Set).add("5"); diff --git a/webapp/assets/_app/immutable/chunks/KQ2xQpA3.js b/webapp/assets/_app/immutable/chunks/KQ2xQpA3.js new file mode 100644 index 00000000..c94aef62 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/KQ2xQpA3.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as q}from"./B3Pzt0F_.js";import{p as A,E as F,f as y,k as l,j as e,r as a,z as $,D as b,c as o,t as p,v as n,d as G}from"./D8EpLgQ1.js";import{p as v,i as H}from"./5WA7h8uK.js";import{M as I}from"./qB7B8uiS.js";import{B as w}from"./CiE1LlKV.js";var J=y('

                '),K=y('

                ');function W(D,s){A(s,!1);let j=v(s,"title",8),M=v(s,"message",8),g=v(s,"itemName",8,""),d=v(s,"loading",8,!1);const c=F();function B(){c("confirm")}q(),I(D,{$$events:{close:()=>c("close")},children:(C,O)=>{var m=K(),f=l(e(m),2),u=e(f),P=e(u,!0);a(u);var h=l(u,2),x=e(h),z=e(x,!0);a(x);var E=l(x,2);{var L=t=>{var i=J(),r=e(i,!0);a(i),p(()=>n(r,g())),o(t,i)};H(E,t=>{g()&&t(L)})}a(h),a(f);var _=l(f,2),k=e(_);w(k,{variant:"secondary",get disabled(){return d()},$$events:{click:()=>c("close")},children:(t,i)=>{$();var r=b("Cancel");o(t,r)},$$slots:{default:!0}});var N=l(k,2);w(N,{variant:"danger",get disabled(){return d()},get loading(){return d()},$$events:{click:B},children:(t,i)=>{$();var r=b();p(()=>n(r,d()?"Deleting...":"Delete")),o(t,r)},$$slots:{default:!0}}),a(_),a(m),p(()=>{n(P,j()),n(z,M())}),o(C,m)},$$slots:{default:!0}}),G()}export{W as D}; diff --git a/webapp/assets/_app/immutable/chunks/duD3WMbl.js b/webapp/assets/_app/immutable/chunks/duD3WMbl.js new file mode 100644 index 00000000..50fdf414 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/duD3WMbl.js @@ -0,0 +1 @@ +import{I as w}from"./D8EpLgQ1.js";import{g as r}from"./CiE1LlKV.js";const m=!0,z=m,I=()=>window.location.port==="5173",b={isAuthenticated:!1,user:null,loading:!0,needsInitialization:!1},n=w(b);function f(t,a,e=7){const i=new Date;i.setTime(i.getTime()+e*24*60*60*1e3),document.cookie=`${t}=${a};expires=${i.toUTCString()};path=/;SameSite=Lax`}function d(t){const a=t+"=",e=document.cookie.split(";");for(let i=0;i({...i,loading:!0}));const e=await r.login({username:t,password:a});z&&(f("garm_token",e.token),f("garm_user",t)),r.setToken(e.token),n.set({isAuthenticated:!0,user:t,loading:!1,needsInitialization:!1})}catch(e){throw n.update(i=>({...i,loading:!1})),e}},logout(){g("garm_token"),g("garm_user"),n.set({isAuthenticated:!1,user:null,loading:!1,needsInitialization:!1})},async init(){try{n.update(e=>({...e,loading:!0})),await c.checkInitializationStatus();const t=d("garm_token"),a=d("garm_user");if(t&&a&&(r.setToken(t),await c.checkAuth())){n.set({isAuthenticated:!0,user:a,loading:!1,needsInitialization:!1});return}n.update(e=>({...e,loading:!1,needsInitialization:!1}))}catch{n.update(a=>({...a,loading:!1}))}},async checkInitializationStatus(){try{const t={Accept:"application/json"},a=d("garm_token"),e=I();e&&a&&(t.Authorization=`Bearer ${a}`);const i=await fetch("/api/v1/login",{method:"GET",headers:t,credentials:e?"omit":"include"});if(!i.ok){if(i.status===409&&(await i.json()).error==="init_required")throw n.update(s=>({...s,needsInitialization:!0,loading:!1})),new Error("Initialization required");return}return}catch(t){if(t instanceof Error&&t.message==="Initialization required")throw t;return}},async checkAuth(){try{return await c.checkInitializationStatus(),await r.getControllerInfo(),!0}catch(t){return t instanceof Error&&t.message==="Initialization required"?!1:t?.response?.status===409&&t?.response?.data?.error==="init_required"?(n.update(a=>({...a,needsInitialization:!0,loading:!1})),!1):(c.logout(),!1)}},async initialize(t,a,e,i,o){try{n.update(u=>({...u,loading:!0}));const s=await r.firstRun({username:t,email:a,password:e,full_name:i||t});await c.login(t,e);const l=window.location.origin,h=o?.metadataUrl||`${l}/api/v1/metadata`,p=o?.callbackUrl||`${l}/api/v1/callbacks`,k=o?.webhookUrl||`${l}/webhooks`;await r.updateController({metadata_url:h,callback_url:p,webhook_url:k}),n.update(u=>({...u,needsInitialization:!1}))}catch(s){throw n.update(l=>({...l,loading:!1})),s}}};export{n as a,c as b}; diff --git a/webapp/assets/_app/immutable/chunks/ow_oMtSd.js b/webapp/assets/_app/immutable/chunks/ow_oMtSd.js new file mode 100644 index 00000000..a2bd2eaf --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/ow_oMtSd.js @@ -0,0 +1 @@ +function a(e){return e?e.replace(/_/g," ").toLowerCase().split(" ").map(r=>r.charAt(0).toUpperCase()+r.slice(1)).join(" "):""}function g(e){if(!e)return"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20";switch(e.toLowerCase()){case"running":case"online":return"bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-500/10 dark:text-green-400 dark:ring-green-500/20";case"idle":case"stopped":return"bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-500/10 dark:text-blue-400 dark:ring-blue-500/20";case"active":return"bg-yellow-50 text-yellow-700 ring-yellow-600/20 dark:bg-yellow-500/10 dark:text-yellow-400 dark:ring-yellow-500/20";case"creating":case"installing":case"pending_create":case"provisioning":return"bg-purple-50 text-purple-700 ring-purple-600/20 dark:bg-purple-500/10 dark:text-purple-400 dark:ring-purple-500/20 animate-pulse";case"deleting":case"terminating":case"pending_delete":case"destroying":return"bg-orange-50 text-orange-700 ring-orange-600/20 dark:bg-orange-500/10 dark:text-orange-400 dark:ring-orange-500/20 animate-pulse";case"failed":case"error":case"terminated":case"offline":return"bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-500/10 dark:text-red-400 dark:ring-red-500/20";case"pending":case"unknown":return"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20 animate-pulse";default:return"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20"}}export{a as f,g}; diff --git a/webapp/assets/_app/immutable/chunks/qB7B8uiS.js b/webapp/assets/_app/immutable/chunks/qB7B8uiS.js new file mode 100644 index 00000000..b4282e10 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/qB7B8uiS.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as u}from"./B3Pzt0F_.js";import{p as v,E as m,f as h,j as r,r as d,e as t,c as k,d as g}from"./D8EpLgQ1.js";import{d as b}from"./CiE1LlKV.js";var w=h('');function j(s,i){v(i,!1);const l=m();function n(){l("close")}function c(o){o.stopPropagation()}function f(o){o.key==="Escape"&&l("close")}u();var a=w(),e=r(a),p=r(e);b(p,i,"default",{}),d(e),d(a),t("click",e,c),t("click",a,n),t("keydown",a,f),k(s,a),g()}export{j as M}; diff --git a/webapp/assets/_app/immutable/chunks/u94nIB4-.js b/webapp/assets/_app/immutable/chunks/u94nIB4-.js new file mode 100644 index 00000000..0d15509c --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/u94nIB4-.js @@ -0,0 +1 @@ +import{O as K,M as te,Y as F,K as M,Z as ae,L as re,g as $,q as oe,_ as ie,a0 as se,a1 as J,a2 as G,T as L,a3 as le,a4 as ce,P as q,R as fe,Q as ue,a5 as V,m as de,a6 as B,a7 as Q,a8 as ve,a9 as D,aa as X,U as he,ab as R,ac as Se,ad as P,ae as Z,af as me,ag as _e,ah as we,ai as Ee,aj as pe,ak as z,H as be,al as ge,I as Ie}from"./D8EpLgQ1.js";function Te(c,o){return o}function Ne(c,o,n){for(var t=c.items,u=[],S=o.length,s=0;s0&&u.length===0&&n!==null;if(d){var N=n.parentNode;we(N),N.append(n),t.clear(),T(c,o[0].prev,o[S-1].next)}Ee(u,()=>{for(var m=0;m{var _=n();return ve(_)?_:_==null?[]:Q(_)}),r,v;function f(){ke(v,r,d,E,s,u,o,t,n),S!==null&&(r.length===0?i?X(i):i=q(()=>S(s)):i!==null&&he(i,()=>{i=null}))}te(()=>{v??=pe,r=$(x);var _=r.length;if(g&&_===0)return;g=_===0;let p=!1;if(M){var k=ie(s)===se;k!==(_===0)&&(s=J(),F(s),G(!1),p=!0)}if(M){for(var C=null,b,e=0;e<_;e++){if(L.nodeType===le&&L.data===ce){s=L,p=!0,G(!1);break}var a=r[e],l=t(a,e);b=Y(L,d,C,null,a,l,e,u,o,n),d.items.set(l,b),C=b}_>0&&F(J())}if(M)_===0&&S&&(i=q(()=>S(s)));else if(fe()){var w=new Set,O=ue;for(e=0;e<_;e+=1){a=r[e],l=t(a,e);var h=d.items.get(l)??E.get(l);h?(o&(R|D))!==0&&j(h,a,e,o):(b=Y(null,d,null,null,a,l,e,u,o,n,!0),E.set(l,b)),w.add(l)}for(const[I,W]of d.items)w.has(I)||O.skipped_effects.add(W.e);O.add_callback(f)}else f();p&&G(!0),$(x)}),M&&(s=L)}function ke(c,o,n,t,u,S,s,d,N){var m=(s&ge)!==0,i=(s&(R|D))!==0,g=o.length,E=n.items,x=n.first,r=x,v,f=null,_,p=[],k=[],C,b,e,a;if(m)for(a=0;a0){var ee=(s&z)!==0&&g===0?u:null;if(m){for(a=0;a{if(_!==void 0)for(e of _)e.a?.apply()}),c.first=n.first&&n.first.e,c.last=f&&f.e;for(var ne of t.values())Z(ne.e);t.clear()}function j(c,o,n,t){(t&R)!==0&&V(c.v,o),(t&D)!==0?V(c.i,n):c.i=n}function Y(c,o,n,t,u,S,s,d,N,m,i){var g=(N&R)!==0,E=(N&Se)===0,x=g?E?de(u,!1,!1):B(u):u,r=(N&D)===0?s:B(s),v={i:r,v:x,k:S,a:null,e:null,prev:n,next:t};try{if(c===null){var f=document.createDocumentFragment();f.append(c=K())}return v.e=q(()=>d(c,x,r,m),M),v.e.prev=n&&n.e,v.e.next=t&&t.e,n===null?i||(o.first=v):(n.next=v,n.e.next=v.e),t!==null&&(t.prev=v,t.e.prev=v.e),v}finally{}}function U(c,o,n){for(var t=c.next?c.next.e.nodes_start:n,u=o?o.e.nodes_start:n,S=c.e.nodes_start;S!==null&&S!==t;){var s=me(S);u.before(S),S=s}}function T(c,o,n){o===null?c.first=n:(o.next=n,o.e.next=n&&n.e),n!==null&&(n.prev=o,n.e.prev=o&&o.e)}function Ce(){const{subscribe:c,set:o,update:n}=Ie({connected:!1,connecting:!1,error:null,lastEvent:null});let t=null,u=0,S=50,s=1e3,d=1e3,N=3e4,m=null,i=[],g=!1;const E=new Map;function x(){const e=window.location.protocol==="https:"?"wss:":"ws:",a=window.location.host;return`${e}//${a}/api/v1/ws/events`}function r(){if(!(t&&(t.readyState===WebSocket.CONNECTING||t.readyState===WebSocket.OPEN))){g=!1,n(e=>({...e,connecting:!0,error:null}));try{const e=x();t=new WebSocket(e);const a=setTimeout(()=>{t&&t.readyState===WebSocket.CONNECTING&&t.close()},1e4);t.onopen=()=>{clearTimeout(a),u=0,d=s,n(l=>({...l,connected:!0,connecting:!1,error:null})),i.length>0&&p(i)},t.onmessage=l=>{try{const w=JSON.parse(l.data);n(h=>({...h,lastEvent:w})),(E.get(w["entity-type"])||[]).forEach(h=>{try{h(w)}catch(I){console.error("[WebSocket] Error in event callback:",I)}})}catch(w){console.error("[WebSocket] Error parsing message:",w)}},t.onclose=l=>{clearTimeout(a);const w=l.code===1e3&&g,O=l.code!==1e3?`Connection closed: ${l.reason||"Unknown reason"}`:null;n(h=>({...h,connected:!1,connecting:!1,error:O})),w||_()},t.onerror=l=>{clearTimeout(a),n(w=>({...w,connected:!1,connecting:!1,error:"WebSocket connection error"})),g||_()}}catch(e){n(a=>({...a,connected:!1,connecting:!1,error:e instanceof Error?e.message:"Failed to connect"}))}}}function v(){}function f(){}function _(){if(g)return;m&&clearTimeout(m),u++,u>S&&(u=1,d=s);const e=Math.min(d,N);m=window.setTimeout(()=>{if(!g){r();const a=Math.random()*1e3;d=Math.min(d*1.5+a,N)}},e)}function p(e){if(t&&t.readyState===WebSocket.OPEN){const a={"send-everything":!1,filters:e};t.send(JSON.stringify(a)),i=[...e]}}function k(){g=!0,m&&(clearTimeout(m),m=null),t&&(t.close(1e3,"Manual disconnect"),t=null),E.clear(),i=[],n(e=>({...e,connected:!1,connecting:!1,error:null,lastEvent:null}))}function C(){navigator.onLine&&!g&&setTimeout(()=>{(!t||t.readyState===WebSocket.CLOSED||t.readyState===WebSocket.CLOSING)&&(u=0,d=s,r())},2e3)}typeof window<"u"&&(window.addEventListener("online",C),window.addEventListener("offline",()=>{n(e=>({...e,error:"Network offline"}))}),setInterval(()=>{g||(!t||t.readyState===WebSocket.CLOSED||t.readyState===WebSocket.CLOSING)&&r()},1e4));function b(e,a,l){E.has(e)||E.set(e,[]),E.get(e).push(l);const w=i.findIndex(h=>h["entity-type"]===e),O={"entity-type":e,operations:a};if(w>=0){const h=i[w].operations;O.operations=Array.from(new Set([...h,...a])),i[w]=O}else i.push(O);return t&&t.readyState===WebSocket.OPEN&&p(i),(!t||t.readyState===WebSocket.CLOSED||t.readyState===WebSocket.CLOSING)&&r(),()=>{const h=E.get(e);if(h){const I=h.indexOf(l);if(I>-1&&h.splice(I,1),h.length===0){E.delete(e);const W=i.findIndex(A=>A["entity-type"]===e);W>-1&&(i.splice(W,1),t&&t.readyState===WebSocket.OPEN&&p(i))}}}}return typeof window<"u"&&r(),{subscribe:c,connect:r,disconnect:k,subscribeToEntity:b}}const We=Ce();export{xe as e,Te as i,We as w}; diff --git a/webapp/assets/_app/immutable/chunks/wyaP0EDu.js b/webapp/assets/_app/immutable/chunks/wyaP0EDu.js new file mode 100644 index 00000000..7650f5b9 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/wyaP0EDu.js @@ -0,0 +1 @@ +import{I as p,J as l}from"./D8EpLgQ1.js";import{g as d}from"./CiE1LlKV.js";import{w as r}from"./u94nIB4-.js";const f={repositories:[],organizations:[],enterprises:[],pools:[],scalesets:[],credentials:[],endpoints:[],controllerInfo:null,loading:{repositories:!1,organizations:!1,enterprises:!1,pools:!1,scalesets:!1,credentials:!1,endpoints:!1,controllerInfo:!1},loaded:{repositories:!1,organizations:!1,enterprises:!1,pools:!1,scalesets:!1,credentials:!1,endpoints:!1,controllerInfo:!1},errorMessages:{repositories:"",organizations:"",enterprises:"",pools:"",scalesets:"",credentials:"",endpoints:"",controllerInfo:""}},a=p(f);class u{unsubscribers=[];loadingPromises=new Map;retryAttempts=new Map;MAX_RETRIES=3;RETRY_DELAY_MS=1e3;websocketStatusUnsubscriber=null;async loadResource(e,t=!1){if(this.loadingPromises.has(e))return this.loadingPromises.get(e);a.update(o=>({...o,loading:{...o.loading,[e]:!0},errorMessages:{...o.errorMessages,[e]:""}}));const s=this.attemptLoad(e);this.loadingPromises.set(e,s);try{const o=await s;return a.update(n=>({...n,[e]:o,loading:{...n.loading,[e]:!1},loaded:{...n.loaded,[e]:!0},errorMessages:{...n.errorMessages,[e]:""}})),this.retryAttempts.delete(e),t&&this.startBackgroundLoading(e),o}catch(o){const n=o instanceof Error?o.message:"Failed to load data";throw a.update(i=>({...i,loading:{...i.loading,[e]:!1},errorMessages:{...i.errorMessages,[e]:n}})),console.error(`Failed to load ${e}:`,o),o}finally{this.loadingPromises.delete(e)}}async attemptLoad(e){const t=(this.retryAttempts.get(e)||0)+1;this.retryAttempts.set(e,t);try{let s;switch(e){case"repositories":s=d.listRepositories();break;case"organizations":s=d.listOrganizations();break;case"enterprises":s=d.listEnterprises();break;case"pools":s=d.listAllPools();break;case"scalesets":s=d.listScaleSets();break;case"credentials":s=d.listAllCredentials();break;case"endpoints":s=d.listAllEndpoints();break;case"controllerInfo":s=d.getControllerInfo();break;default:throw new Error(`Unknown resource type: ${e}`)}return await s}catch(s){if(tsetTimeout(n,o)),this.attemptLoad(e)}else throw console.error(`All ${this.MAX_RETRIES} attempts failed for ${e}:`,s),s}}async startBackgroundLoading(e){const s=["repositories","organizations","enterprises","pools","scalesets","credentials","endpoints"].filter(o=>o!==e);for(const o of s)setTimeout(()=>{this.loadResource(o,!1).catch(n=>{console.warn(`Background loading failed for ${o}:`,n)})},100*s.indexOf(o))}retryResource(e){return this.retryAttempts.delete(e),this.loadResource(e,!0)}setupWebSocketSubscriptions(){this.cleanup();const e=[r.subscribeToEntity("repository",["create","update","delete"],this.handleRepositoryEvent.bind(this)),r.subscribeToEntity("organization",["create","update","delete"],this.handleOrganizationEvent.bind(this)),r.subscribeToEntity("enterprise",["create","update","delete"],this.handleEnterpriseEvent.bind(this)),r.subscribeToEntity("pool",["create","update","delete"],this.handlePoolEvent.bind(this)),r.subscribeToEntity("scaleset",["create","update","delete"],this.handleScaleSetEvent.bind(this)),r.subscribeToEntity("controller",["update"],this.handleControllerEvent.bind(this)),r.subscribeToEntity("github_credentials",["create","update","delete"],this.handleCredentialsEvent.bind(this)),r.subscribeToEntity("gitea_credentials",["create","update","delete"],this.handleCredentialsEvent.bind(this)),r.subscribeToEntity("github_endpoint",["create","update","delete"],this.handleEndpointEvent.bind(this))];this.unsubscribers=e,this.setupWebSocketStatusMonitoring()}setupWebSocketStatusMonitoring(){this.websocketStatusUnsubscriber&&this.websocketStatusUnsubscriber();let e=!1;this.websocketStatusUnsubscriber=r.subscribe(t=>{t.connected&&!e&&(console.log("[EagerCache] WebSocket connected - reinitializing cache"),this.initializeAllResources()),e=t.connected})}async initializeAllResources(){const t=["repositories","organizations","enterprises","pools","scalesets","credentials","endpoints","controllerInfo"].map(s=>this.loadResource(s,!0).catch(o=>{console.warn(`Failed to reload ${s} on WebSocket reconnect:`,o)}));await Promise.allSettled(t)}handleRepositoryEvent(e){a.update(t=>{if(!t.loaded.repositories)return t;const s=[...t.repositories],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,repositories:s}})}handleOrganizationEvent(e){a.update(t=>{if(!t.loaded.organizations)return t;const s=[...t.organizations],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,organizations:s}})}handleEnterpriseEvent(e){a.update(t=>{if(!t.loaded.enterprises)return t;const s=[...t.enterprises],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,enterprises:s}})}handlePoolEvent(e){a.update(t=>{if(!t.loaded.pools)return t;const s=[...t.pools],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,pools:s}})}handleScaleSetEvent(e){a.update(t=>{if(!t.loaded.scalesets)return t;const s=[...t.scalesets],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,scalesets:s}})}handleCredentialsEvent(e){a.update(t=>{if(!t.loaded.credentials)return t;const s=[...t.credentials],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.id===o.id);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.id:o,i=s.findIndex(c=>c.id===n);i!==-1&&s.splice(i,1)}return{...t,credentials:s}})}handleEndpointEvent(e){a.update(t=>{if(!t.loaded.endpoints)return t;const s=[...t.endpoints],o=e.payload;if(e.operation==="create")s.push(o);else if(e.operation==="update"){const n=s.findIndex(i=>i.name===o.name);n!==-1&&(s[n]=o)}else if(e.operation==="delete"){const n=typeof o=="object"?o.name:o,i=s.findIndex(c=>c.name===n);i!==-1&&s.splice(i,1)}return{...t,endpoints:s}})}cleanup(){this.unsubscribers.forEach(e=>e()),this.unsubscribers=[],this.websocketStatusUnsubscriber&&(this.websocketStatusUnsubscriber(),this.websocketStatusUnsubscriber=null)}shouldUseCache(){return l(r).connected}async getRepositories(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching repositories directly from API"),await d.listRepositories();const t=l(a);return t.loaded.repositories?t.repositories:this.loadResource("repositories",!0)}async getOrganizations(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching organizations directly from API"),await d.listOrganizations();const t=l(a);return t.loaded.organizations?t.organizations:this.loadResource("organizations",!0)}async getEnterprises(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching enterprises directly from API"),await d.listEnterprises();const t=l(a);return t.loaded.enterprises?t.enterprises:this.loadResource("enterprises",!0)}async getPools(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching pools directly from API"),await d.listAllPools();const t=l(a);return t.loaded.pools?t.pools:this.loadResource("pools",!0)}async getScaleSets(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching scalesets directly from API"),await d.listScaleSets();const t=l(a);return t.loaded.scalesets?t.scalesets:this.loadResource("scalesets",!0)}async getCredentials(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching credentials directly from API"),await d.listAllCredentials();const t=l(a);return t.loaded.credentials?t.credentials:this.loadResource("credentials",!0)}async getEndpoints(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching endpoints directly from API"),await d.listAllEndpoints();const t=l(a);return t.loaded.endpoints?t.endpoints:this.loadResource("endpoints",!0)}async getControllerInfo(){if(!l(r).connected)return console.log("[EagerCache] WebSocket disconnected - fetching controller info directly from API"),await d.getControllerInfo();const t=l(a);return t.loaded.controllerInfo?t.controllerInfo:this.loadResource("controllerInfo",!0)}handleControllerEvent(e){a.update(t=>{if(!t.loaded.controllerInfo)return t;const s=e.payload;return e.operation==="update"?{...t,controllerInfo:s}:t})}}const h=new u;typeof window<"u"&&h.setupWebSocketSubscriptions();export{h as a,a as e}; diff --git a/webapp/assets/_app/immutable/entry/app.kAVAdeq9.js b/webapp/assets/_app/immutable/entry/app.kAVAdeq9.js new file mode 100644 index 00000000..f6063c71 --- /dev/null +++ b/webapp/assets/_app/immutable/entry/app.kAVAdeq9.js @@ -0,0 +1,2 @@ +const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["../nodes/0.DINiyk_8.js","../chunks/DsnmJJEf.js","../chunks/B3Pzt0F_.js","../chunks/D8EpLgQ1.js","../chunks/5WA7h8uK.js","../chunks/CiE1LlKV.js","../chunks/C41YH50Q.js","../chunks/CTf6mQoE.js","../chunks/CoIRRsD9.js","../chunks/duD3WMbl.js","../chunks/u94nIB4-.js","../chunks/BEkVdVE1.js","../assets/0.BPrCR_r7.css","../nodes/1.DcR4nNsi.js","../nodes/2.CiT4lj0D.js","../chunks/wyaP0EDu.js","../chunks/C6k1Q4We.js","../chunks/D4Caz1gY.js","../chunks/qB7B8uiS.js","../nodes/3.BSFz0YHn.js","../chunks/CO4LUyTP.js","../chunks/CNMHKIIK.js","../chunks/BGVHQGl-.js","../chunks/C9DJVOi1.js","../chunks/CCSWcuVN.js","../chunks/CGpPw4EW.js","../chunks/BE4wujub.js","../chunks/ow_oMtSd.js","../nodes/4.XnVoh6ca.js","../nodes/5.rvsSG-AQ.js","../chunks/CclkODgu.js","../chunks/KQ2xQpA3.js","../chunks/B7ITzBt8.js","../nodes/6.CtGX0qgG.js","../chunks/BmGWMSQm.js","../chunks/BAg1iRPq.js","../chunks/DDhBTdDt.js","../chunks/CwqI2jFH.js","../chunks/DZblzgqm.js","../nodes/7.0w3i9VHx.js","../nodes/8.BiZNKYxk.js","../nodes/9.DpSfMRgo.js","../nodes/10.LnrIJgIa.js","../nodes/11.Bsn67lBa.js","../nodes/12.B-vC_cmu.js","../chunks/Dbd6PPbz.js","../nodes/13.Br7HzjXP.js","../chunks/DQP15tlf.js","../chunks/CLYUNKnN.js","../nodes/14.Cd0DOn96.js","../nodes/15.CkHQugXH.js","../nodes/16.B35VVkOd.js","../nodes/17.CCltcs-Z.js","../chunks/C89fcOde.js","../nodes/18.iVIhGVtu.js"])))=>i.map(i=>d[i]); +import{s as A,aL as z,g as f,aN as U,aO as G,aP as Q,ax as W,aQ as Y,m as F,p as H,an as J,ao as K,o as X,aR as b,aS as Z,f as C,b as L,k as $,c as g,d as tt,B as T,j as et,r as rt,aT as D,D as st,t as ot,v as at}from"../chunks/D8EpLgQ1.js";import"../chunks/DsnmJJEf.js";import{p as I,i as V}from"../chunks/5WA7h8uK.js";import{c as w}from"../chunks/CCSWcuVN.js";import{b as k}from"../chunks/BAg1iRPq.js";function nt(c){return class extends it{constructor(t){super({component:c,...t})}}}class it{#e;#t;constructor(t){var a=new Map,u=(r,e)=>{var s=F(e,!1,!1);return a.set(r,s),s};const l=new Proxy({...t.props||{},$$events:{}},{get(r,e){return f(a.get(e)??u(e,Reflect.get(r,e)))},has(r,e){return e===z?!0:(f(a.get(e)??u(e,Reflect.get(r,e))),Reflect.has(r,e))},set(r,e,s){return A(a.get(e)??u(e,s),s),Reflect.set(r,e,s)}});this.#t=(t.hydrate?U:G)(t.component,{target:t.target,anchor:t.anchor,props:l,context:t.context,intro:t.intro??!1,recover:t.recover}),(!t?.props?.$$host||t.sync===!1)&&Q(),this.#e=l.$$events;for(const r of Object.keys(this.#t))r==="$set"||r==="$destroy"||r==="$on"||W(this,r,{get(){return this.#t[r]},set(e){this.#t[r]=e},enumerable:!0});this.#t.$set=r=>{Object.assign(l,r)},this.#t.$destroy=()=>{Y(this.#t)}}$set(t){this.#t.$set(t)}$on(t,a){this.#e[t]=this.#e[t]||[];const u=(...l)=>a.call(this,...l);return this.#e[t].push(u),()=>{this.#e[t]=this.#e[t].filter(l=>l!==u)}}$destroy(){this.#t.$destroy()}}const ct="modulepreload",ut=function(c,t){return new URL(c,t).href},j={},o=function(t,a,u){let l=Promise.resolve();if(a&&a.length>0){let O=function(i){return Promise.all(i.map(d=>Promise.resolve(d).then(v=>({status:"fulfilled",value:v}),v=>({status:"rejected",reason:v}))))};const e=document.getElementsByTagName("link"),s=document.querySelector("meta[property=csp-nonce]"),y=s?.nonce||s?.getAttribute("nonce");l=O(a.map(i=>{if(i=ut(i,u),i in j)return;j[i]=!0;const d=i.endsWith(".css"),v=d?'[rel="stylesheet"]':"";if(!!u)for(let n=e.length-1;n>=0;n--){const _=e[n];if(_.href===i&&(!d||_.rel==="stylesheet"))return}else if(document.querySelector(`link[href="${i}"]${v}`))return;const m=document.createElement("link");if(m.rel=d?"stylesheet":ct,d||(m.as="script"),m.crossOrigin="",m.href=i,y&&m.setAttribute("nonce",y),document.head.appendChild(m),d)return new Promise((n,_)=>{m.addEventListener("load",n),m.addEventListener("error",()=>_(new Error(`Unable to preload CSS for ${i}`)))})}))}function r(e){const s=new Event("vite:preloadError",{cancelable:!0});if(s.payload=e,window.dispatchEvent(s),!s.defaultPrevented)throw e}return l.then(e=>{for(const s of e||[])s.status==="rejected"&&r(s.reason);return t().catch(r)})},Rt={};var lt=C('
                '),_t=C(" ",1);function mt(c,t){H(t,!0);let a=I(t,"components",23,()=>[]),u=I(t,"data_0",3,null),l=I(t,"data_1",3,null);J(()=>t.stores.page.set(t.page)),K(()=>{t.stores,t.page,t.constructors,a(),t.form,u(),l(),t.stores.page.notify()});let r=b(!1),e=b(!1),s=b(null);X(()=>{const n=t.stores.page.subscribe(()=>{f(r)&&(A(e,!0),Z().then(()=>{A(s,document.title||"untitled page",!0)}))});return A(r,!0),n});const y=D(()=>t.constructors[1]);var O=_t(),i=L(O);{var d=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params},children:(R,vt)=>{var S=T(),B=L(S);w(B,()=>f(y),(N,M)=>{k(M(N,{get data(){return l()},get form(){return t.form},get params(){return t.page.params}}),q=>a()[1]=q,()=>a()?.[1])}),g(R,S)},$$slots:{default:!0}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)},v=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)};V(i,n=>{t.constructors[1]?n(d):n(v,!1)})}var x=$(i,2);{var m=n=>{var _=lt(),h=et(_);{var P=E=>{var p=st();ot(()=>at(p,f(s))),g(E,p)};V(h,E=>{f(e)&&E(P)})}rt(_),g(n,_)};V(x,n=>{f(r)&&n(m)})}g(c,O),tt()}const yt=nt(mt),Ot=[()=>o(()=>import("../nodes/0.DINiyk_8.js"),__vite__mapDeps([0,1,2,3,4,5,6,7,8,9,10,11,12]),import.meta.url),()=>o(()=>import("../nodes/1.DcR4nNsi.js"),__vite__mapDeps([13,1,2,3,7,8]),import.meta.url),()=>o(()=>import("../nodes/2.CiT4lj0D.js"),__vite__mapDeps([14,1,2,3,4,10,5,8,15,16,17,18,11]),import.meta.url),()=>o(()=>import("../nodes/3.BSFz0YHn.js"),__vite__mapDeps([19,1,2,3,4,10,5,16,17,20,21,22,23,24,8,15,11,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/4.XnVoh6ca.js"),__vite__mapDeps([28,1,2,3,4,5,16,17,20,21,22,23,10,24,8,15,11,25]),import.meta.url),()=>o(()=>import("../nodes/5.rvsSG-AQ.js"),__vite__mapDeps([29,1,2,3,4,5,8,20,10,16,17,18,15,30,31,11,22,23,24,32,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/6.CtGX0qgG.js"),__vite__mapDeps([33,1,2,3,4,5,6,7,8,30,10,16,17,18,31,34,22,15,23,24,32,26,27,35,36,11,37,38]),import.meta.url),()=>o(()=>import("../nodes/7.0w3i9VHx.js"),__vite__mapDeps([39,1,2,3,4,5,16,17,7,8,9,11]),import.meta.url),()=>o(()=>import("../nodes/8.BiZNKYxk.js"),__vite__mapDeps([40,1,2,3,4,5,31,18,20,10,11,23,24,16,8,22,32,26,27]),import.meta.url),()=>o(()=>import("../nodes/9.DpSfMRgo.js"),__vite__mapDeps([41,1,2,3,4,10,5,35,6,7,8,31,18,27,22]),import.meta.url),()=>o(()=>import("../nodes/10.LnrIJgIa.js"),__vite__mapDeps([42,1,2,3,4,5,16,17,7,8,9]),import.meta.url),()=>o(()=>import("../nodes/11.Bsn67lBa.js"),__vite__mapDeps([43,1,2,3,4,5,8,10,16,17,18,21,22,15,30,31,20,11,23,24,32,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/12.B-vC_cmu.js"),__vite__mapDeps([44,1,2,3,4,5,6,7,8,30,10,16,17,18,31,34,22,15,23,24,32,26,27,35,36,45,11,37,38]),import.meta.url),()=>o(()=>import("../nodes/13.Br7HzjXP.js"),__vite__mapDeps([46,1,2,3,4,5,8,20,37,10,16,17,18,38,47,15,31,11,22,23,24,32,25,26,27,48]),import.meta.url),()=>o(()=>import("../nodes/14.Cd0DOn96.js"),__vite__mapDeps([49,1,2,3,4,10,5,6,7,8,47,16,17,18,38,15,31,36,23,24,22,32,26,27,11]),import.meta.url),()=>o(()=>import("../nodes/15.CkHQugXH.js"),__vite__mapDeps([50,1,2,3,4,5,10,16,17,18,21,22,15,30,31,20,11,23,24,8,32,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/16.B35VVkOd.js"),__vite__mapDeps([51,1,2,3,4,5,6,7,8,30,10,16,17,18,31,34,22,15,23,24,32,26,27,35,36,45,11,37,38]),import.meta.url),()=>o(()=>import("../nodes/17.CCltcs-Z.js"),__vite__mapDeps([52,1,2,3,4,5,8,20,10,16,17,18,38,53,31,15,11,22,23,24,32,25,26,27,48]),import.meta.url),()=>o(()=>import("../nodes/18.iVIhGVtu.js"),__vite__mapDeps([54,1,2,3,4,5,6,7,8,53,16,17,18,38,31,36,23,10,24,22,32,26,27,11]),import.meta.url)],Lt=[],At={"/":[2],"/credentials":[3],"/endpoints":[4],"/enterprises":[5],"/enterprises/[id]":[6],"/init":[7],"/instances":[8],"/instances/[id]":[9],"/login":[10],"/organizations":[11],"/organizations/[id]":[12],"/pools":[13],"/pools/[id]":[14],"/repositories":[15],"/repositories/[id]":[16],"/scalesets":[17],"/scalesets/[id]":[18]},dt={handleError:({error:c})=>{console.error(c)},reroute:()=>{},transport:{}},ft=Object.fromEntries(Object.entries(dt.transport).map(([c,t])=>[c,t.decode])),bt=!1,Tt=(c,t)=>ft[c](t);export{Tt as decode,ft as decoders,At as dictionary,bt as hash,dt as hooks,Rt as matchers,Ot as nodes,yt as root,Lt as server_loads}; diff --git a/webapp/assets/_app/immutable/entry/start.CI0Cdear.js b/webapp/assets/_app/immutable/entry/start.CI0Cdear.js new file mode 100644 index 00000000..b4e2a9f4 --- /dev/null +++ b/webapp/assets/_app/immutable/entry/start.CI0Cdear.js @@ -0,0 +1 @@ +import{l as o,a as r}from"../chunks/CTf6mQoE.js";export{o as load_css,r as start}; diff --git a/webapp/assets/_app/immutable/nodes/0.DINiyk_8.js b/webapp/assets/_app/immutable/nodes/0.DINiyk_8.js new file mode 100644 index 00000000..d50c7e62 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/0.DINiyk_8.js @@ -0,0 +1,13 @@ +import"../chunks/DsnmJJEf.js";import{i as He}from"../chunks/B3Pzt0F_.js";import{p as Se,o as De,s as h,m as F,g as e,l as X,a as Le,f as c,b as E,j as o,k as n,r as t,u as i,t as P,v as ge,c as s,B as U,C as Y,e as I,d as Be,q as cr,h as gr,$ as hr}from"../chunks/D8EpLgQ1.js";import{a as me,i as w,s as Ae}from"../chunks/5WA7h8uK.js";import{c as _,s as Q,h as ur,B as fr,d as Ge}from"../chunks/CiE1LlKV.js";import{p as qe}from"../chunks/C41YH50Q.js";import{g as fe}from"../chunks/CTf6mQoE.js";import{b as l}from"../chunks/CoIRRsD9.js";import{b as Ne,a as mr}from"../chunks/duD3WMbl.js";import{e as ne,i as ce,w as xr}from"../chunks/u94nIB4-.js";import{t as Oe}from"../chunks/BEkVdVE1.js";const pr=async({url:Z})=>({url:Z.pathname}),kr=!1,br=!1,va=Object.freeze(Object.defineProperty({__proto__:null,load:pr,prerender:kr,ssr:br},Symbol.toStringTag,{value:"Module"}));var yr=c('
                Live Updates
                '),_r=c('
                Connecting
                '),wr=c('
                Updates Unavailable
                '),Mr=c('
                Manual Refresh
                '),$r=Y(''),jr=Y(''),zr=Y(''),Cr=Y(''),Hr=c(' '),Sr=c(' '),Lr=c('
                '),Br=c('
                '),Ar=c('
                '),Vr=c('
                '),Ir=Y(''),Rr=Y(''),Tr=Y(''),Pr=Y(''),Er=c(' '),Gr=c(' '),Or=c('
                '),Dr=c('
                '),qr=c('
                GARM GARM

                GARM

                ',1);function Nr(Z,ee){Se(ee,!1);const[re,he]=Ae(),M=()=>me(xr,"$websocketStore",re),m=()=>me(qe,"$page",re),u=F(),y=F();let $=F(!1),G=F(!1),f=F(!1);De(()=>{j(),window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change",v)});function j(){const a=localStorage.getItem("theme");a==="dark"?h(f,!0):a==="light"?h(f,!1):h(f,window.matchMedia("(prefers-color-scheme: dark)").matches),p()}function v(a){(!localStorage.getItem("theme")||localStorage.getItem("theme")==="system")&&(h(f,a.matches),p())}function O(){h(f,!e(f)),localStorage.setItem("theme",e(f)?"dark":"light"),p()}function p(){e(f)?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}function z(){Ne.logout(),h(G,!1)}const le=[{href:`${l}/`,label:"Dashboard",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:`${l}/repositories`,label:"Repositories",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:`${l}/organizations`,label:"Organizations",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:`${l}/enterprises`,label:"Enterprises",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:`${l}/pools`,label:"Pools",icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"},{href:`${l}/scalesets`,label:"Scale Sets",icon:"M4 7v10c0 2.21 3.582 4 8 4s8-1.79 8-4V7M4 7c0 2.21 3.582 4 8 4s8-1.79 8-4M4 7c0-2.21 3.582-4 8-4s8 1.79 8 4m0 5c0 2.21-3.582 4-8 4s-8-1.79-8-4"},{href:`${l}/instances`,label:"Runners",icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z"}],J=[{href:`${l}/credentials`,label:"Credentials",icon:"M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1721 9z"},{href:`${l}/endpoints`,label:"Endpoints",icon:"M13 10V3L4 14h7v7l9-11h-7z"}];X(()=>M(),()=>{h(u,M())}),X(()=>m(),()=>{m().url.pathname&&h($,!1)}),X(()=>m(),()=>{h(y,m().url.pathname)}),Le(),He();var D=qr(),V=E(D),q=o(V),ae=o(q),K=o(ae),te=o(K),b=o(te),C=n(b,2);t(te),t(K);var N=n(K,2),oe=o(N),de=o(oe),xe=o(de);{var ke=a=>{var r=yr();s(a,r)},Ue=a=>{var r=U(),g=E(r);{var H=x=>{var S=_r();s(x,S)},R=x=>{var S=U(),W=E(S);{var L=k=>{var T=wr();s(k,T)},B=k=>{var T=Mr();s(k,T)};w(W,k=>{e(u),i(()=>e(u).error)?k(L):k(B,!1)},!0)}s(x,S)};w(g,x=>{e(u),i(()=>e(u).connecting)?x(H):x(R,!1)},!0)}s(a,r)};w(xe,a=>{e(u),i(()=>e(u).connected)?a(ke):a(Ue,!1)})}t(de);var pe=n(de,2),Qe=o(pe);{var Fe=a=>{var r=$r();s(a,r)},Je=a=>{var r=jr();s(a,r)};w(Qe,a=>{e(f)?a(Fe):a(Je,!1)})}t(pe),t(oe),t(N),t(ae);var Ve=n(ae,2),Ie=o(Ve);ne(Ie,1,()=>le,ce,(a,r)=>{var g=Hr(),H=o(g),R=o(H);{var x=L=>{var B=U(),k=E(B);ne(k,1,()=>(e(r),i(()=>e(r).icon)),ce,(T,se)=>{var d=zr();P(()=>_(d,"d",e(se))),s(T,d)}),s(L,B)},S=L=>{var B=Cr();P(()=>_(B,"d",(e(r),i(()=>e(r).icon)))),s(L,B)};w(R,L=>{e(r),i(()=>Array.isArray(e(r).icon))?L(x):L(S,!1)})}t(H);var W=n(H);t(g),P(()=>{_(g,"href",(e(r),i(()=>e(r).href))),Q(g,1,`group flex items-center px-2 py-2 text-sm font-medium rounded-md transition-colors duration-200 + ${e(y),e(r),i(()=>e(y)===e(r).href?"bg-gray-100 text-gray-900 dark:bg-gray-700 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),ge(W,` ${e(r),i(()=>e(r).label)??""}`)}),s(a,g)});var be=n(Ie,2);ne(be,5,()=>J,ce,(a,r)=>{var g=Sr(),H=o(g),R=o(H);t(H);var x=n(H);t(g),P(()=>{_(g,"href",(e(r),i(()=>e(r).href))),Q(g,1,`group flex items-center px-2 py-2 text-sm font-medium rounded-md transition-colors duration-200 + ${e(y),e(r),i(()=>e(y)===e(r).href?"bg-gray-100 text-gray-900 dark:bg-gray-700 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),_(R,"d",(e(r),i(()=>e(r).icon))),ge(x,` ${e(r),i(()=>e(r).label)??""}`)}),s(a,g)}),t(be);var Re=n(be,2),Ke=o(Re);t(Re),t(Ve),t(q),t(V);var ye=n(V,2),_e=o(ye),Te=o(_e),we=n(Te,2),Me=o(we),$e=n(Me,2),Pe=n($e,4),We=o(Pe);{var Xe=a=>{var r=Lr();s(a,r)},Ye=a=>{var r=U(),g=E(r);{var H=x=>{var S=Br();s(x,S)},R=x=>{var S=U(),W=E(S);{var L=k=>{var T=Ar();s(k,T)},B=k=>{var T=Vr();s(k,T)};w(W,k=>{e(u),i(()=>e(u).error)?k(L):k(B,!1)},!0)}s(x,S)};w(g,x=>{e(u),i(()=>e(u).connecting)?x(H):x(R,!1)},!0)}s(a,r)};w(We,a=>{e(u),i(()=>e(u).connected)?a(Xe):a(Ye,!1)})}t(Pe),t(we);var je=n(we,2),Ze=o(je);{var er=a=>{var r=Ir();s(a,r)},rr=a=>{var r=Rr();s(a,r)};w(Ze,a=>{e(f)?a(er):a(rr,!1)})}t(je),t(_e);var ar=n(_e,2);{var tr=a=>{var r=Or(),g=o(r),H=n(g,2),R=o(H),x=o(R);t(R);var S=n(R,2),W=o(S),L=o(W);ne(L,1,()=>le,ce,(se,d)=>{var A=Er(),ie=o(A),ze=o(ie);{var Ce=ve=>{var ue=U(),lr=E(ue);ne(lr,1,()=>(e(d),i(()=>e(d).icon)),ce,(dr,vr)=>{var Ee=Tr();P(()=>_(Ee,"d",e(vr))),s(dr,Ee)}),s(ve,ue)},ir=ve=>{var ue=Pr();P(()=>_(ue,"d",(e(d),i(()=>e(d).icon)))),s(ve,ue)};w(ze,ve=>{e(d),i(()=>Array.isArray(e(d).icon))?ve(Ce):ve(ir,!1)})}t(ie);var nr=n(ie);t(A),P(()=>{_(A,"href",(e(d),i(()=>e(d).href))),Q(A,1,`group flex items-center px-2 py-2 text-base font-medium rounded-md transition-colors duration-200 + ${e(y),e(d),i(()=>e(y)===e(d).href?"bg-gray-100 dark:bg-gray-700 text-gray-900 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),ge(nr,` ${e(d),i(()=>e(d).label)??""}`)}),I("click",A,()=>h($,!1)),s(se,A)});var B=n(L,2);ne(B,5,()=>J,ce,(se,d)=>{var A=Gr(),ie=o(A),ze=o(ie);t(ie);var Ce=n(ie);t(A),P(()=>{_(A,"href",(e(d),i(()=>e(d).href))),Q(A,1,`group flex items-center px-2 py-2 text-base font-medium rounded-md transition-colors duration-200 + ${e(y),e(d),i(()=>e(y)===e(d).href?"bg-gray-100 dark:bg-gray-700 text-gray-900 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),_(ze,"d",(e(d),i(()=>e(d).icon))),ge(Ce,` ${e(d),i(()=>e(d).label)??""}`)}),I("click",A,()=>h($,!1)),s(se,A)}),t(B);var k=n(B,2),T=o(k);t(k),t(W),t(S),t(H),t(r),I("click",g,()=>h($,!1)),I("keydown",g,se=>{se.key==="Escape"&&h($,!1)}),I("click",x,()=>h($,!1)),I("click",T,z),s(a,r)};w(ar,a=>{e($)&&a(tr)})}t(ye);var or=n(ye,2);{var sr=a=>{var r=Dr();I("click",r,()=>h(G,!1)),I("keydown",r,g=>{g.key==="Escape"&&h(G,!1)}),s(a,r)};w(or,a=>{e(G)&&a(sr)})}P(()=>{_(te,"href",`${l}/`),_(b,"src",`${l??""}/assets/garm-light.svg`),_(C,"src",`${l??""}/assets/garm-dark.svg`),_(pe,"title",e(f)?"Switch to Light Mode":"Switch to Dark Mode"),_(Me,"src",`${l??""}/assets/garm-light.svg`),Q(Me,1,`${e(f)?"hidden":"block"} h-8 w-8`),_($e,"src",`${l??""}/assets/garm-dark.svg`),Q($e,1,`${e(f)?"block":"hidden"} h-8 w-8`)}),I("click",pe,O),I("click",Ke,z),I("click",Te,()=>h($,!e($))),I("click",je,O),s(Z,D),Be(),he()}var Ur=c("
                "),Qr=c('

                '),Fr=c('
                ');function Jr(Z,ee){Se(ee,!1);const[re,he]=Ae(),M=()=>me(Oe,"$toastStore",re),m=F();function u(j){switch(j){case"success":return` + + `;case"error":return` + + `;case"warning":return` + + `;case"info":default:return` + + `}}function y(j){switch(j){case"success":return"bg-green-50 dark:bg-green-900 border-green-200 dark:border-green-700";case"error":return"bg-red-50 dark:bg-red-900 border-red-200 dark:border-red-700";case"warning":return"bg-yellow-50 dark:bg-yellow-900 border-yellow-200 dark:border-yellow-700";case"info":default:return"bg-blue-50 dark:bg-blue-900 border-blue-200 dark:border-blue-700"}}function $(j){switch(j){case"success":return"text-green-800 dark:text-green-200";case"error":return"text-red-800 dark:text-red-200";case"warning":return"text-yellow-800 dark:text-yellow-200";case"info":default:return"text-blue-800 dark:text-blue-200"}}function G(j){switch(j){case"success":return"text-green-700 dark:text-green-300";case"error":return"text-red-700 dark:text-red-300";case"warning":return"text-yellow-700 dark:text-yellow-300";case"info":default:return"text-blue-700 dark:text-blue-300"}}X(()=>M(),()=>{h(m,M())}),Le(),He();var f=Fr();ne(f,5,()=>e(m),j=>j.id,(j,v)=>{var O=Qr(),p=o(O),z=o(p),le=o(z);ur(le,()=>(e(v),i(()=>u(e(v).type)))),t(z);var J=n(z,2),D=o(J),V=o(D,!0);t(D);var q=n(D,2);{var ae=b=>{var C=Ur(),N=o(C,!0);t(C),P(oe=>{Q(C,1,`mt-1 text-sm ${oe??""}`),ge(N,(e(v),i(()=>e(v).message)))},[()=>(e(v),i(()=>G(e(v).type)))]),s(b,C)};w(q,b=>{e(v),i(()=>e(v).message)&&b(ae)})}t(J);var K=n(J,2),te=o(K);{let b=cr(()=>(e(v),i(()=>e(v).type==="success"?"text-green-400 hover:text-green-500 focus:ring-green-500":e(v).type==="error"?"text-red-400 hover:text-red-500 focus:ring-red-500":e(v).type==="warning"?"text-yellow-400 hover:text-yellow-500 focus:ring-yellow-500":"text-blue-400 hover:text-blue-500 focus:ring-blue-500")));fr(te,{variant:"ghost",size:"sm","aria-label":"Dismiss notification",icon:"",get class(){return e(b)},$$events:{click:()=>Oe.remove(e(v).id)}})}t(K),t(p),t(O),P((b,C)=>{Q(O,1,`relative rounded-lg border p-4 shadow-lg transition-all duration-300 ease-in-out ${b??""}`),Q(D,1,`text-sm font-medium ${C??""}`),ge(V,(e(v),i(()=>e(v).title)))},[()=>(e(v),i(()=>y(e(v).type))),()=>(e(v),i(()=>$(e(v).type)))]),s(j,O)}),t(f),s(Z,f),Be(),he()}var Kr=c('

                Loading...

                '),Wr=c('

                Redirecting to login...

                '),Xr=c('
                '),Yr=c(" ",1);function ca(Z,ee){Se(ee,!1);const[re,he]=Ae(),M=()=>me(qe,"$page",re),m=()=>me(mr,"$authStore",re),u=F(),y=F(),$=F();De(()=>{Ne.init(),setTimeout(()=>{const p=M().url.pathname===`${l}/login`,z=M().url.pathname===`${l}/init`;!p&&!z&&!m().isAuthenticated&&!m().loading&&(m().needsInitialization?fe(`${l}/init`):fe(`${l}/login`))},200)}),X(()=>(m(),M(),fe),()=>{if(!m().loading){const p=M().url.pathname===`${l}/login`,z=M().url.pathname===`${l}/init`;!p&&!z&&!m().isAuthenticated&&(m().needsInitialization?fe(`${l}/init`):fe(`${l}/login`))}}),X(()=>(M(),l),()=>{h(u,M().url.pathname===`${l}/login`)}),X(()=>(M(),l),()=>{h(y,M().url.pathname===`${l}/init`)}),X(()=>(e(u),e(y)),()=>{h($,!e(u)&&!e(y))}),Le(),He();var G=Yr();gr(p=>{hr.title="GARM - GitHub Actions Runner Manager"});var f=E(G);{var j=p=>{var z=Kr();s(p,z)},v=p=>{var z=U(),le=E(z);{var J=V=>{var q=Wr();s(V,q)},D=V=>{var q=U(),ae=E(q);{var K=b=>{var C=U(),N=E(C);Ge(N,ee,"default",{}),s(b,C)},te=b=>{var C=Xr(),N=o(C);Nr(N,{});var oe=n(N,2),de=o(oe),xe=o(de),ke=o(xe);Ge(ke,ee,"default",{}),t(xe),t(de),t(oe),t(C),s(b,C)};w(ae,b=>{e(u)||e(y)?b(K):b(te,!1)},!0)}s(V,q)};w(le,V=>{e($),m(),i(()=>e($)&&!m().isAuthenticated)?V(J):V(D,!1)},!0)}s(p,z)};w(f,p=>{m(),i(()=>m().loading)?p(j):p(v,!1)})}var O=n(f,2);Jr(O,{}),s(Z,G),Be(),he()}export{ca as component,va as universal}; diff --git a/webapp/assets/_app/immutable/nodes/1.DcR4nNsi.js b/webapp/assets/_app/immutable/nodes/1.DcR4nNsi.js new file mode 100644 index 00000000..820e3848 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/1.DcR4nNsi.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as u}from"../chunks/B3Pzt0F_.js";import{p as h,f as g,b as v,t as d,c as l,d as _,j as s,r as a,k as x,v as o}from"../chunks/D8EpLgQ1.js";import{s as k,p}from"../chunks/CTf6mQoE.js";const $={get error(){return p.error},get status(){return p.status}};k.updated.check;const i=$;var b=g("

                ",1);function y(m,c){h(c,!1),u();var r=b(),t=v(r),n=s(t,!0);a(t);var e=x(t,2),f=s(e,!0);a(e),d(()=>{o(n,i.status),o(f,i.error?.message)}),l(m,r),_()}export{y as component}; diff --git a/webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js b/webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js new file mode 100644 index 00000000..d1f479c8 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as X}from"../chunks/B3Pzt0F_.js";import{p as Y,o as Z,l as ee,a as ae,f as H,h as re,t as _,g as a,e as k,c as w,d as te,$ as se,k as d,D as de,m as f,j as r,s as i,r as t,z as B,v as D}from"../chunks/D8EpLgQ1.js";import{i as oe,s as ie,a as le}from"../chunks/5WA7h8uK.js";import{B as ne,r as q,c as T}from"../chunks/CiE1LlKV.js";import{b as U}from"../chunks/C6k1Q4We.js";import{p as ce}from"../chunks/D4Caz1gY.js";import{g as C}from"../chunks/CTf6mQoE.js";import{b as c}from"../chunks/CoIRRsD9.js";import{a as me,b as ue}from"../chunks/duD3WMbl.js";var pe=H('

                '),ve=H('
                GARM

                Sign in to GARM

                GitHub Actions Runner Manager

                ');function Le(I,K){Y(K,!1);const[W,F]=ie(),$=()=>le(me,"$authStore",W);let m=f(""),u=f(""),o=f(!1),l=f("");Z(()=>{J()});function J(){const e=localStorage.getItem("theme");let s=!1;e==="dark"?s=!0:e==="light"?s=!1:s=window.matchMedia("(prefers-color-scheme: dark)").matches,s?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}async function L(){if(!a(m)||!a(u)){i(l,"Please enter both username and password");return}i(o,!0),i(l,"");try{await ue.login(a(m),a(u)),C(`${c}/`)}catch(e){i(l,e instanceof Error?e.message:"Login failed")}finally{i(o,!1)}}function M(e){e.key==="Enter"&&L()}ee(()=>($(),c),()=>{$().isAuthenticated&&C(`${c}/`)}),ae(),X();var g=ve();re(e=>{se.title="Login - GARM"});var z=r(g),h=r(z),S=r(h),A=r(S),N=d(A,2);t(S),B(4),t(h);var b=d(h,2),x=r(b),y=r(x),p=d(r(y),2);q(p),t(y);var G=d(y,2),v=d(r(G),2);q(v),t(G),t(x);var P=d(x,2);{var O=e=>{var s=pe(),n=r(s),E=d(r(n),2),j=r(E),V=r(j,!0);t(j),t(E),t(n),t(s),_(()=>D(V,a(l))),w(e,s)};oe(P,e=>{a(l)&&e(O)})}var R=d(P,2),Q=r(R);ne(Q,{type:"submit",variant:"primary",size:"md",fullWidth:!0,get disabled(){return a(o)},get loading(){return a(o)},children:(e,s)=>{B();var n=de();_(()=>D(n,a(o)?"Signing in...":"Sign in")),w(e,n)},$$slots:{default:!0}}),t(R),t(b),t(z),t(g),_(()=>{T(A,"src",`${c??""}/assets/garm-light.svg`),T(N,"src",`${c??""}/assets/garm-dark.svg`),p.disabled=a(o),v.disabled=a(o)}),U(p,()=>a(m),e=>i(m,e)),k("keypress",p,M),U(v,()=>a(u),e=>i(u,e)),k("keypress",v,M),k("submit",b,ce(L)),w(I,g),te(),F()}export{Le as component}; diff --git a/webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js b/webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js new file mode 100644 index 00000000..2a8fb7cb --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Ie}from"../chunks/B3Pzt0F_.js";import{p as Be,E as Ve,o as Le,l as x,s as a,m as s,g as e,y as W,a as Ne,f as T,k as i,j as o,r as n,c as M,t as V,v as se,x as De,u as v,z as Fe,e as $e,d as je,h as Ye,b as Ke,$ as Qe,n as E,q as ue}from"../chunks/D8EpLgQ1.js";import{a as qe,i as Y,s as Ge}from"../chunks/5WA7h8uK.js";import{r as ge,b as Se,h as Xe,c as Ze,g as me}from"../chunks/CiE1LlKV.js";import{b as Ae}from"../chunks/CoIRRsD9.js";import{e as et,i as tt}from"../chunks/u94nIB4-.js";import{b as Ue,a as Re}from"../chunks/C6k1Q4We.js";import{p as at}from"../chunks/D4Caz1gY.js";import{M as rt}from"../chunks/qB7B8uiS.js";import{F as ot}from"../chunks/CNMHKIIK.js";import{e as He,a as Pe}from"../chunks/wyaP0EDu.js";import{U as nt}from"../chunks/CclkODgu.js";import{D as it}from"../chunks/KQ2xQpA3.js";import{P as st}from"../chunks/CO4LUyTP.js";import{t as ie}from"../chunks/BEkVdVE1.js";import{B as lt,k as Ce,g as Oe,l as dt}from"../chunks/BGVHQGl-.js";import{D as ct,A as We,G as ut,a as gt}from"../chunks/C9DJVOi1.js";import{E as mt}from"../chunks/B7ITzBt8.js";import{E as pt}from"../chunks/CGpPw4EW.js";import{S as ft}from"../chunks/BE4wujub.js";var bt=T('

                '),vt=T('

                Loading...

                '),yt=T(""),ht=T(''),_t=T('

                Webhook secret will be automatically generated

                '),xt=T('
                '),kt=T('

                Create Organization

                ');function wt(pe,fe){Be(fe,!1);const[be,ve]=Ge(),p=()=>qe(He,"$eagerCache",be),D=s(),w=s(),z=s(),K=s(),$=Ve();let C=s(!1),f=s(""),y=s("github"),r=s({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),k=s(!0),b=s(!0);async function I(){if(!p().loaded.credentials&&!p().loading.credentials)try{await Pe.getCredentials()}catch(d){a(f,d instanceof Error?d.message:"Failed to load credentials")}}function B(d){a(y,d.detail),W(r,e(r).credentials_name="")}function c(){if(e(r).credentials_name){const d=e(D).find(L=>L.name===e(r).credentials_name);d&&d.forge_type&&a(y,d.forge_type)}}function ye(){const d=new Uint8Array(32);return crypto.getRandomValues(d),Array.from(d,L=>L.toString(16).padStart(2,"0")).join("")}async function he(){if(!e(r).name?.trim()){a(f,"Organization name is required");return}if(!e(r).credentials_name){a(f,"Please select credentials");return}try{a(C,!0),a(f,"");const d={...e(r),install_webhook:e(k),auto_generate_secret:e(b)};$("submit",d)}catch(d){a(f,d instanceof Error?d.message:"Failed to create organization"),a(C,!1)}}Le(()=>{I()}),x(()=>p(),()=>{a(D,p().credentials)}),x(()=>p(),()=>{a(w,p().loading.credentials)}),x(()=>(e(D),e(y)),()=>{a(z,e(D).filter(d=>e(y)?d.forge_type===e(y):!0))}),x(()=>e(b),()=>{e(b)?W(r,e(r).webhook_secret=ye()):e(b)||W(r,e(r).webhook_secret="")}),x(()=>(e(r),e(b)),()=>{a(K,e(r).name?.trim()!==""&&e(r).credentials_name!==""&&(e(b)||e(r).webhook_secret&&e(r).webhook_secret.trim()!==""))}),Ne(),Ie(),rt(pe,{$$events:{close:()=>$("close")},children:(d,L)=>{var F=kt(),N=i(o(F),2);{var _e=h=>{var _=bt(),S=o(_),A=o(S,!0);n(S),n(_),V(()=>se(A,e(f))),M(h,_)};Y(N,h=>{e(f)&&h(_e)})}var xe=i(N,2);{var ke=h=>{var _=vt();M(h,_)},we=h=>{var _=xt(),S=o(_);ot(S,{get selectedForgeType(){return e(y)},set selectedForgeType(l){a(y,l)},$$events:{select:B},$$legacy:!0});var A=i(S,2),Q=i(o(A),2);ge(Q),n(A);var U=i(A,2),R=i(o(U),2);V(()=>{e(r),De(()=>{e(z)})});var X=o(R);X.value=X.__value="";var le=i(X);et(le,1,()=>e(z),tt,(l,m)=>{var P=yt(),ze=o(P);n(P);var ne={};V(()=>{se(ze,`${e(m),v(()=>e(m).name)??""} (${e(m),v(()=>e(m).endpoint?.name||"Unknown endpoint")??""})`),ne!==(ne=(e(m),v(()=>e(m).name)))&&(P.value=(P.__value=(e(m),v(()=>e(m).name)))??"")}),M(l,P)}),n(R),n(U);var Z=i(U,2),j=i(o(Z),2);V(()=>{e(r),De(()=>{})});var ee=o(j);ee.value=ee.__value="roundrobin";var de=i(ee);de.value=de.__value="pack",n(j),n(Z);var te=i(Z,2),t=o(te),g=o(t);ge(g),Fe(2),n(t);var q=i(t,2),O=o(q),u=o(O);ge(u),Fe(2),n(O);var G=i(O,2);{var ae=l=>{var m=ht();ge(m),Ue(m,()=>e(r).webhook_secret,P=>W(r,e(r).webhook_secret=P)),M(l,m)},re=l=>{var m=_t();M(l,m)};Y(G,l=>{e(b)?l(re,!1):l(ae)})}n(q),n(te);var H=i(te,2),J=o(H),oe=i(J,2),ce=o(oe,!0);n(oe),n(H),n(_),V(()=>{oe.disabled=e(C)||e(w)||!e(K),se(ce,e(C)?"Creating...":"Create Organization")}),Ue(Q,()=>e(r).name,l=>W(r,e(r).name=l)),Se(R,()=>e(r).credentials_name,l=>W(r,e(r).credentials_name=l)),$e("change",R,c),Se(j,()=>e(r).pool_balancer_type,l=>W(r,e(r).pool_balancer_type=l)),Re(g,()=>e(k),l=>a(k,l)),Re(u,()=>e(b),l=>a(b,l)),$e("click",J,()=>$("close")),$e("submit",_,at(he)),M(h,_)};Y(xe,h=>{e(C)?h(ke):h(we,!1)})}n(F),M(d,F)},$$slots:{default:!0}}),je(),ve()}var zt=T(''),$t=T('
                ',1);function Jt(pe,fe){Be(fe,!1);const[be,ve]=Ge(),p=()=>qe(He,"$eagerCache",be),D=s(),w=s(),z=s(),K=s();let $=s([]),C=s(!0),f=s(""),y=s(""),r=s(1),k=s(25),b=s(!1),I=s(!1),B=s(!1),c=s(null);async function ye(t){try{a(f,"");const g=t.detail,q={name:g.name,credentials_name:g.credentials_name,webhook_secret:g.webhook_secret,pool_balancer_type:g.pool_balancer_type},O=await me.createOrganization(q);if(g.install_webhook&&O.id)try{await me.installOrganizationWebhook(O.id),ie.success("Webhook Installed",`Webhook for organization ${O.name} has been installed successfully.`)}catch(u){console.warn("Organization created but webhook installation failed:",u),ie.error("Webhook Installation Failed",u instanceof Error?u.message:"Failed to install webhook. You can try installing it manually from the organization details page.")}ie.success("Organization Created",`Organization ${O.name} has been created successfully.`),a(b,!1)}catch(g){throw a(f,g instanceof Error?g.message:"Failed to create organization"),g}}async function he(t){if(e(c))try{await me.updateOrganization(e(c).id,t),ie.success("Organization Updated",`Organization ${e(c).name} has been updated successfully.`),a(I,!1),a(c,null)}catch(g){throw g}}async function d(){if(e(c))try{a(f,""),await me.deleteOrganization(e(c).id),ie.success("Organization Deleted",`Organization ${e(c).name} has been deleted successfully.`),a(B,!1),a(c,null)}catch(t){a(f,t instanceof Error?t.message:"Failed to delete organization")}}function L(){a(b,!0)}function F(t){a(c,t),a(I,!0)}function N(t){a(c,t),a(B,!0)}Le(async()=>{try{a(C,!0);const t=await Pe.getOrganizations();t&&Array.isArray(t)&&a($,t)}catch(t){console.error("Failed to load organizations:",t),a(f,t instanceof Error?t.message:"Failed to load organizations")}finally{a(C,!1)}});async function _e(){try{await Pe.retryResource("organizations")}catch(t){console.error("Retry failed:",t)}}const xe=[{key:"name",title:"Name",cellComponent:mt,cellProps:{entityType:"organization"}},{key:"endpoint",title:"Endpoint",cellComponent:pt},{key:"credentials",title:"Credentials",cellComponent:ut,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:ft,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:gt}],ke={entityType:"organization",primaryText:{field:"name",isClickable:!0,href:"/organizations/{id}"},customInfo:[{icon:t=>Oe(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>Ce(t)}],actions:[{type:"edit",handler:t=>F(t)},{type:"delete",handler:t=>N(t)}]};function we(t){a(y,t.detail.term),a(r,1)}function h(t){a(r,t.detail.page)}function _(t){a(k,t.detail.perPage),a(r,1)}function S(t){F(t.detail.item)}function A(t){N(t.detail.item)}x(()=>(e($),p()),()=>{(!e($).length||p().loaded.organizations)&&a($,p().organizations)}),x(()=>p(),()=>{a(C,p().loading.organizations)}),x(()=>p(),()=>{a(D,p().errorMessages.organizations)}),x(()=>(e($),e(y)),()=>{a(w,dt(e($),e(y)))}),x(()=>(e(w),e(k)),()=>{a(z,Math.ceil(e(w).length/e(k)))}),x(()=>(e(r),e(z)),()=>{e(r)>e(z)&&e(z)>0&&a(r,e(z))}),x(()=>(e(w),e(r),e(k)),()=>{a(K,e(w).slice((e(r)-1)*e(k),e(r)*e(k)))}),Ne(),Ie();var Q=$t();Ye(t=>{Qe.title="Organizations - GARM"});var U=Ke(Q),R=o(U);st(R,{title:"Organizations",description:"Manage GitHub and Gitea organizations",actionLabel:"Add Organization",$$events:{action:L}});var X=i(R,2);{let t=ue(()=>e(D)||e(f)),g=ue(()=>!!e(D));ct(X,{get columns(){return xe},get data(){return e(K)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(y)},searchPlaceholder:"Search organizations...",get currentPage(){return e(r)},get perPage(){return e(k)},get totalPages(){return e(z)},get totalItems(){return e(w),v(()=>e(w).length)},itemName:"organizations",emptyIconType:"building",get showRetry(){return e(g)},get mobileCardConfig(){return ke},$$events:{search:we,pageChange:h,perPageChange:_,retry:_e,edit:S,delete:A},$$slots:{"mobile-card":(q,O)=>{const u=ue(()=>O.item),G=ue(()=>(E(Ce),E(e(u)),v(()=>Ce(e(u)))));var ae=zt(),re=o(ae),H=o(re),J=o(H),oe=o(J,!0);n(J);var ce=i(J,2),l=o(ce),m=o(l);Xe(m,()=>(E(Oe),E(e(u)),v(()=>Oe(e(u).endpoint?.endpoint_type||"unknown"))));var P=i(m,2),ze=o(P,!0);n(P),n(l),n(ce),n(H),n(re);var ne=i(re,2),Ee=o(ne);lt(Ee,{get variant(){return E(e(G)),v(()=>e(G).variant)},get text(){return E(e(G)),v(()=>e(G).text)}});var Me=i(Ee,2),Te=o(Me);We(Te,{action:"edit",size:"sm",title:"Edit organization",ariaLabel:"Edit organization",$$events:{click:()=>F(e(u))}});var Je=i(Te,2);We(Je,{action:"delete",size:"sm",title:"Delete organization",ariaLabel:"Delete organization",$$events:{click:()=>N(e(u))}}),n(Me),n(ne),n(ae),V(()=>{Ze(H,"href",(E(Ae),E(e(u)),v(()=>`${Ae}/organizations/${e(u).id}`))),se(oe,(E(e(u)),v(()=>e(u).name))),se(ze,(E(e(u)),v(()=>e(u).endpoint?.name||"Unknown")))}),M(q,ae)}}})}n(U);var le=i(U,2);{var Z=t=>{wt(t,{$$events:{close:()=>a(b,!1),submit:ye}})};Y(le,t=>{e(b)&&t(Z)})}var j=i(le,2);{var ee=t=>{nt(t,{get entity(){return e(c)},entityType:"organization",$$events:{close:()=>{a(I,!1),a(c,null)},submit:g=>he(g.detail)}})};Y(j,t=>{e(I)&&e(c)&&t(ee)})}var de=i(j,2);{var te=t=>{it(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone.",get itemName(){return e(c),v(()=>e(c).name)},$$events:{close:()=>{a(B,!1),a(c,null)},confirm:d}})};Y(de,t=>{e(B)&&e(c)&&t(te)})}M(pe,Q),je(),ve()}export{Jt as component}; diff --git a/webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js b/webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js new file mode 100644 index 00000000..2fef2926 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Le}from"../chunks/B3Pzt0F_.js";import{p as Ne,o as We,A as qe,l as He,a as je,f as A,h as Ge,b as M,t as q,c as x,d as Re,g as e,m as l,s as o,u as s,$ as Ve,j as f,r as g,k as d,v as le,y as Je,B as de,q as m,n as Ke}from"../chunks/D8EpLgQ1.js";import{i as h,s as Qe,a as Xe}from"../chunks/5WA7h8uK.js";import{c as Ye,g as _}from"../chunks/CiE1LlKV.js";import{p as Ze}from"../chunks/C41YH50Q.js";import{g as ce}from"../chunks/CTf6mQoE.js";import{b as H}from"../chunks/CoIRRsD9.js";import{U as et}from"../chunks/CclkODgu.js";import{D as ue}from"../chunks/KQ2xQpA3.js";import{E as tt,P as at,a as nt}from"../chunks/BmGWMSQm.js";import{D as ot,I as rt}from"../chunks/DDhBTdDt.js";import{g as fe}from"../chunks/BGVHQGl-.js";import{W as it}from"../chunks/Dbd6PPbz.js";import{C as st}from"../chunks/CwqI2jFH.js";import{w as j}from"../chunks/u94nIB4-.js";import{t as C}from"../chunks/BEkVdVE1.js";var lt=A('

                Loading organization...

                '),dt=A('

                '),ct=A(" ",1),ut=A(' ',1);function Tt(ge,me){Ne(me,!1);const[pe,ve]=Qe(),G=()=>Xe(Ze,"$page",pe),w=l();let a=l(null),c=l([]),p=l([]),F=l(!0),I=l(""),O=l(!1),D=l(!1),E=l(!1),T=l(!1),u=l(null),P=null,b=l();async function R(){if(e(w))try{o(F,!0),o(I,"");const[t,n,r]=await Promise.all([_.getOrganization(e(w)),_.listOrganizationPools(e(w)).catch(()=>[]),_.listOrganizationInstances(e(w)).catch(()=>[])]);o(a,t),o(c,n),o(p,r)}catch(t){o(I,t instanceof Error?t.message:"Failed to load organization")}finally{o(F,!1)}}function ye(t,n){const{events:r}=t;return{...n,events:r}}async function he(t){if(e(a))try{await _.updateOrganization(e(a).id,t),await R(),C.success("Organization Updated",`Organization ${e(a).name} has been updated successfully.`),o(O,!1)}catch(n){throw n}}async function _e(){if(e(a)){try{await _.deleteOrganization(e(a).id),ce(`${H}/organizations`)}catch(t){o(I,t instanceof Error?t.message:"Failed to delete organization")}o(D,!1)}}async function be(){if(e(u))try{await _.deleteInstance(e(u).name),C.success("Instance Deleted",`Instance ${e(u).name} has been deleted successfully.`),o(E,!1),o(u,null)}catch(t){const n=t instanceof Error?t.message:"Failed to delete instance";C.error("Delete Failed",n),o(E,!1),o(u,null)}}function ze(t){o(u,t),o(E,!0)}function $e(){o(T,!0)}async function xe(t){try{if(!e(a))return;await _.createOrganizationPool(e(a).id,t.detail),C.success("Pool Created",`Pool has been created successfully for organization ${e(a).name}.`),o(T,!1)}catch(n){throw n}}function V(){e(b)&&Je(b,e(b).scrollTop=e(b).scrollHeight)}function we(t){if(t.operation==="update"){const n=t.payload;if(e(a)&&n.id===e(a).id){const r=e(a).events?.length||0,i=n.events?.length||0;o(a,ye(e(a),n)),i>r&&setTimeout(()=>{V()},100)}}else if(t.operation==="delete"){const n=t.payload.id||t.payload;e(a)&&e(a).id===n&&ce(`${H}/organizations`)}}function Ie(t){if(!e(a))return;const n=t.payload;if(n.org_id===e(a).id){if(t.operation==="create")o(c,[...e(c),n]);else if(t.operation==="update")o(c,e(c).map(r=>r.id===n.id?n:r));else if(t.operation==="delete"){const r=n.id||n;o(c,e(c).filter(i=>i.id!==r))}}}function Ee(t){if(!e(a)||!e(c))return;const n=t.payload;if(e(c).some(i=>i.id===n.pool_id)){if(t.operation==="create")o(p,[...e(p),n]);else if(t.operation==="update")o(p,e(p).map(i=>i.id===n.id?n:i));else if(t.operation==="delete"){const i=n.id||n;o(p,e(p).filter(L=>L.id!==i))}}}We(()=>{R().then(()=>{e(a)?.events?.length&&setTimeout(()=>{V()},100)});const t=j.subscribeToEntity("organization",["update","delete"],we),n=j.subscribeToEntity("pool",["create","update","delete"],Ie),r=j.subscribeToEntity("instance",["create","update","delete"],Ee);P=()=>{t(),n(),r()}}),qe(()=>{P&&(P(),P=null)}),He(()=>G(),()=>{o(w,G().params.id)}),je(),Le();var J=ut();Ge(t=>{q(()=>Ve.title=`${e(a),s(()=>e(a)?`${e(a).name} - Organization Details`:"Organization Details")??""} - GARM`)});var S=M(J),B=f(S),K=f(B),U=f(K),Oe=f(U);g(U);var Q=d(U,2),X=f(Q),Y=d(f(X),2),De=f(Y,!0);g(Y),g(X),g(Q),g(K),g(B);var Te=d(B,2);{var Pe=t=>{var n=lt();x(t,n)},ke=t=>{var n=de(),r=M(n);{var i=z=>{var $=dt(),k=f($),N=f(k,!0);g(k),g($),q(()=>le(N,e(I))),x(z,$)},L=z=>{var $=de(),k=M($);{var N=W=>{var ae=ct(),ne=M(ae);{let v=m(()=>(e(a),s(()=>e(a).name||"Organization"))),y=m(()=>(e(a),s(()=>e(a).endpoint?.name))),Ue=m(()=>(Ke(fe),e(a),s(()=>fe(e(a).endpoint?.endpoint_type||"unknown"))));ot(ne,{get title(){return e(v)},get subtitle(){return`Endpoint: ${e(y)??""}`},get forgeIcon(){return e(Ue)},onEdit:()=>o(O,!0),onDelete:()=>o(D,!0)})}var oe=d(ne,2);tt(oe,{get entity(){return e(a)},entityType:"organization"});var re=d(oe,2);{let v=m(()=>(e(a),s(()=>e(a).id||""))),y=m(()=>(e(a),s(()=>e(a).name||"")));it(re,{entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)}})}var ie=d(re,2);{let v=m(()=>(e(a),s(()=>e(a).id||""))),y=m(()=>(e(a),s(()=>e(a).name||"")));at(ie,{get pools(){return e(c)},entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)},$$events:{addPool:$e}})}var se=d(ie,2);rt(se,{get instances(){return e(p)},entityType:"organization",onDeleteInstance:ze});var Be=d(se,2);{let v=m(()=>(e(a),s(()=>e(a)?.events)));nt(Be,{get events(){return e(v)},get eventsContainer(){return e(b)},set eventsContainer(y){o(b,y)},$$legacy:!0})}x(W,ae)};h(k,W=>{e(a)&&W(N)},!0)}x(z,$)};h(r,z=>{e(I)?z(i):z(L,!1)},!0)}x(t,n)};h(Te,t=>{e(F)?t(Pe):t(ke,!1)})}g(S);var Z=d(S,2);{var Me=t=>{et(t,{get entity(){return e(a)},entityType:"organization",$$events:{close:()=>o(O,!1),submit:n=>he(n.detail)}})};h(Z,t=>{e(O)&&e(a)&&t(Me)})}var ee=d(Z,2);{var Ce=t=>{ue(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a),s(()=>e(a).name)},$$events:{close:()=>o(D,!1),confirm:_e}})};h(ee,t=>{e(D)&&e(a)&&t(Ce)})}var te=d(ee,2);{var Ae=t=>{ue(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(u),s(()=>e(u).name)},$$events:{close:()=>{o(E,!1),o(u,null)},confirm:be}})};h(te,t=>{e(E)&&e(u)&&t(Ae)})}var Fe=d(te,2);{var Se=t=>{{let n=m(()=>(e(a),s(()=>e(a).id||"")));st(t,{initialEntityType:"organization",get initialEntityId(){return e(n)},$$events:{close:()=>o(T,!1),submit:xe}})}};h(Fe,t=>{e(T)&&e(a)&&t(Se)})}q(()=>{Ye(Oe,"href",`${H}/organizations`),le(De,(e(a),s(()=>e(a)?e(a).name:"Loading...")))}),x(ge,J),Re(),ve()}export{Tt as component}; diff --git a/webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js b/webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js new file mode 100644 index 00000000..7a557f43 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as re}from"../chunks/B3Pzt0F_.js";import{p as se,o as ne,l as d,a as ie,f as ce,h as de,b as pe,c as me,d as ue,g as t,m as r,$ as fe,j as ge,q as E,u as S,k as v,s as o,r as ye,n as he}from"../chunks/D8EpLgQ1.js";import{i as w,s as Pe,a as Ce}from"../chunks/5WA7h8uK.js";import{g as N}from"../chunks/CiE1LlKV.js";import"../chunks/CoIRRsD9.js";import{P as ve}from"../chunks/CO4LUyTP.js";import{C as $e}from"../chunks/CwqI2jFH.js";import{U as be}from"../chunks/DQP15tlf.js";import{D as _e}from"../chunks/KQ2xQpA3.js";import{e as Me,a as R}from"../chunks/wyaP0EDu.js";import{t as $}from"../chunks/BEkVdVE1.js";import{e as b,h as Ee}from"../chunks/BGVHQGl-.js";import{D as we,G as D,a as De}from"../chunks/C9DJVOi1.js";import{E as Te}from"../chunks/B7ITzBt8.js";import{E as ke}from"../chunks/CGpPw4EW.js";import{S as Ae}from"../chunks/BE4wujub.js";import{P as Fe}from"../chunks/CLYUNKnN.js";var Ue=ce('
                ',1);function Xe(G,q){se(q,!1);const[L,j]=Pe(),s=()=>Ce(Me,"$eagerCache",L),_=r(),i=r(),p=r(),T=r();let m=r([]),g=r(!0),u=r(""),y=r(""),n=r(1),c=r(25),h=r(!1),P=r(!1),f=r(!1),a=r(null);async function H(e){try{o(u,""),o(h,!1)}catch(l){throw o(u,l instanceof Error?l.message:"Failed to create pool"),l}}async function V(e){if(t(a))try{await N.updatePool(t(a).id,e),o(P,!1),$.add({type:"success",title:"Pool Updated",message:`Pool ${t(a).id.slice(0,8)}... has been updated successfully.`}),o(a,null)}catch(l){const C=l instanceof Error?l.message:"Failed to update pool";throw $.add({type:"error",title:"Update Failed",message:C}),l}}async function z(){if(!t(a))return;const e=`Pool ${t(a).id.slice(0,8)}...`;try{await N.deletePool(t(a).id),o(f,!1),$.add({type:"success",title:"Pool Deleted",message:`${e} has been deleted successfully.`}),o(a,null)}catch(l){const C=l instanceof Error?l.message:"Failed to delete pool";o(u,C),$.add({type:"error",title:"Delete Failed",message:C})}o(f,!1),o(a,null)}function B(){o(h,!0)}function k(e){o(a,e),o(P,!0)}function A(e){o(a,e),o(f,!0)}ne(async()=>{try{o(g,!0);const e=await R.getPools();e&&Array.isArray(e)&&o(m,e)}catch(e){console.error("Failed to load pools:",e),o(u,e instanceof Error?e.message:"Failed to load pools")}finally{o(g,!1)}});async function J(){try{await R.retryResource("pools")}catch(e){console.error("Retry failed:",e)}}const K=[{key:"id",title:"ID",flexible:!0,cellComponent:Te,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:D,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:D,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:D,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:Fe},{key:"endpoint",title:"Endpoint",cellComponent:ke},{key:"status",title:"Status",cellComponent:Ae,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:De}],O={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:e=>b(e,s())},badges:[{type:"custom",value:e=>({variant:e.enabled?"success":"error",text:e.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:e=>k(e)},{type:"delete",handler:e=>A(e)}]};function Q(e){o(y,e.detail.term),o(n,1)}function W(e){o(n,e.detail.page)}function X(e){o(c,e.detail.perPage),o(n,1)}function Y(e){k(e.detail.item)}function Z(e){A(e.detail.item)}d(()=>(t(m),s()),()=>{(!t(m).length||s().loaded.pools)&&o(m,s().pools)}),d(()=>s(),()=>{o(g,s().loading.pools)}),d(()=>s(),()=>{o(_,s().errorMessages.pools)}),d(()=>(t(m),t(y),s()),()=>{o(i,Ee(t(m),t(y),e=>b(e,s())))}),d(()=>(t(i),t(c)),()=>{o(p,Math.ceil(t(i).length/t(c)))}),d(()=>(t(n),t(p)),()=>{t(n)>t(p)&&t(p)>0&&o(n,t(p))}),d(()=>(t(i),t(n),t(c)),()=>{o(T,t(i).slice((t(n)-1)*t(c),t(n)*t(c)))}),ie(),re();var F=Ue();de(e=>{fe.title="Pools - GARM"});var M=pe(F),U=ge(M);ve(U,{title:"Pools",description:"Manage runner pools across all entities",actionLabel:"Add Pool",$$events:{action:B}});var ee=v(U,2);{let e=E(()=>t(_)||t(u)),l=E(()=>!!t(_));we(ee,{get columns(){return K},get data(){return t(T)},get loading(){return t(g)},get error(){return t(e)},get searchTerm(){return t(y)},searchPlaceholder:"Search by entity name...",get currentPage(){return t(n)},get perPage(){return t(c)},get totalPages(){return t(p)},get totalItems(){return t(i),S(()=>t(i).length)},itemName:"pools",emptyIconType:"cog",get showRetry(){return t(l)},get mobileCardConfig(){return O},$$events:{search:Q,pageChange:W,perPageChange:X,retry:J,edit:Y,delete:Z}})}ye(M);var x=v(M,2);{var te=e=>{$e(e,{$$events:{close:()=>o(h,!1),submit:l=>H(l.detail)}})};w(x,e=>{t(h)&&e(te)})}var I=v(x,2);{var oe=e=>{be(e,{get pool(){return t(a)},$$events:{close:()=>{o(P,!1),o(a,null)},submit:l=>V(l.detail)}})};w(I,e=>{t(P)&&t(a)&&e(oe)})}var ae=v(I,2);{var le=e=>{{let l=E(()=>(t(a),he(b),s(),S(()=>`Pool ${t(a).id.slice(0,8)}... (${b(t(a),s())})`)));_e(e,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(l)},$$events:{close:()=>{o(f,!1),o(a,null)},confirm:z}})}};w(ae,e=>{t(f)&&t(a)&&e(le)})}me(G,F),ue(),j()}export{Xe as component}; diff --git a/webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js b/webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js new file mode 100644 index 00000000..c682684d --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as ze}from"../chunks/B3Pzt0F_.js";import{p as He,o as Ve,A as We,l as Ke,a as Qe,f as h,h as Xe,b as S,t as b,c as u,d as Ye,s as n,m as y,u as i,$ as Ze,g as t,j as r,r as a,k as s,v as l,y as mt,B as Wt,q as B,n as f}from"../chunks/D8EpLgQ1.js";import{i as g,s as ta,a as ea}from"../chunks/5WA7h8uK.js";import{w as Kt,e as aa,i as ra}from"../chunks/u94nIB4-.js";import{c as Qt,g as N,s as da}from"../chunks/CiE1LlKV.js";import{p as sa}from"../chunks/C41YH50Q.js";import{g as Xt}from"../chunks/CTf6mQoE.js";import{b as xt}from"../chunks/CoIRRsD9.js";import{U as ia}from"../chunks/DQP15tlf.js";import{D as Yt}from"../chunks/KQ2xQpA3.js";import{D as oa,I as na}from"../chunks/DDhBTdDt.js";import{t as E}from"../chunks/BEkVdVE1.js";import{e as P,i as R,j as Zt,b as C,g as te}from"../chunks/BGVHQGl-.js";var la=h('

                Loading pool...

                '),va=h('

                '),ca=h('
                GitHub Runner Group
                '),ma=h(' '),xa=h('
                Tags
                '),ua=h('

                Extra Specifications

                 
                '),ga=h('

                Basic Information

                Pool ID
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Priority
                Runner Prefix
                OS Type / Architecture
                ',1),pa=h(' ',1);function Ta(ee,ae){He(ae,!1);const[re,de]=ta(),ut=()=>ea(sa,"$page",re),L=y();let e=y(null),O=y(!0),M=y(""),F=y(!1),T=y(!1),D=y(!1),x=y(null),A=null;async function se(){if(t(L))try{n(O,!0),n(M,""),n(e,await N.getPool(t(L)))}catch(d){n(M,d instanceof Error?d.message:"Failed to load pool")}finally{n(O,!1)}}async function ie(d){if(t(e))try{const o=await N.updatePool(t(e).id,d);n(e,o),n(F,!1),E.success("Pool Updated",`Pool ${t(e).id} has been updated successfully.`)}catch(o){const _=o instanceof Error?o.message:"Failed to update pool";E.error("Update Failed",_)}}async function oe(){if(t(e)){try{await N.deletePool(t(e).id),Xt(`${xt}/pools`)}catch(d){const o=d instanceof Error?d.message:"Failed to delete pool";E.error("Delete Failed",o)}n(T,!1)}}async function ne(){if(t(x)){try{await N.deleteInstance(t(x).name),E.success("Instance Deleted",`Instance ${t(x).name} has been deleted successfully.`),n(D,!1),n(x,null)}catch(d){const o=d instanceof Error?d.message:"Failed to delete instance";E.error("Delete Failed",o)}n(D,!1),n(x,null)}}function le(d){n(x,d),n(D,!0)}function ve(d){if(!d)return"{}";try{if(typeof d=="string"){const o=JSON.parse(d);return JSON.stringify(o,null,2)}return JSON.stringify(d,null,2)}catch{return d.toString()}}function ce(d){if(d.operation==="update"){const o=d.payload;t(e)&&o.id===t(e).id&&n(e,o)}else if(d.operation==="delete"){const o=d.payload.id||d.payload;t(e)&&t(e).id===o&&Xt(`${xt}/pools`)}}function me(d){if(!t(e)||!t(e).instances)return;const o=d.payload;if(o.pool_id===t(e).id){if(d.operation==="create")mt(e,t(e).instances=[...t(e).instances,o]);else if(d.operation==="update")mt(e,t(e).instances=t(e).instances.map(_=>_.id===o.id?o:_));else if(d.operation==="delete"){const _=o.id||o;mt(e,t(e).instances=t(e).instances.filter(q=>q.id!==_))}n(e,t(e))}}Ve(()=>{se();const d=Kt.subscribeToEntity("pool",["update","delete"],ce),o=Kt.subscribeToEntity("instance",["create","update","delete"],me);A=()=>{d(),o()}}),We(()=>{A&&(A(),A=null)}),Ke(()=>ut(),()=>{n(L,ut().params.id)}),Qe(),ze();var gt=pa();Xe(d=>{b(()=>Ze.title=`${t(e),i(()=>t(e)?`Pool ${t(e).id} - Pool Details`:"Pool Details")??""} - GARM`)});var G=S(gt),J=r(G),pt=r(J),j=r(pt),xe=r(j);a(j);var ft=s(j,2),_t=r(ft),yt=s(r(_t),2),ue=r(yt,!0);a(yt),a(_t),a(ft),a(pt),a(J);var ge=s(J,2);{var pe=d=>{var o=la();u(d,o)},fe=d=>{var o=Wt(),_=S(o);{var q=k=>{var w=va(),U=r(w),z=r(U,!0);a(U),a(w),b(()=>l(z,t(M))),u(k,w)},ke=k=>{var w=Wt(),U=S(w);{var z=H=>{var kt=ga(),wt=S(kt);{let v=B(()=>(f(P),t(e),i(()=>P(t(e))))),c=B(()=>(f(R),t(e),i(()=>R(t(e))))),m=B(()=>(f(te),t(e),i(()=>te(t(e).endpoint?.endpoint_type||"unknown"))));oa(wt,{get title(){return t(e),i(()=>t(e).id)},get subtitle(){return`Pool for ${t(v)??""} (${t(c)??""})`},get forgeIcon(){return t(m)},onEdit:()=>n(F,!0),onDelete:()=>n(T,!0)})}var V=s(wt,2),W=r(V),$t=r(W),Pt=s(r($t),2),K=r(Pt),Dt=s(r(K),2),we=r(Dt,!0);a(Dt),a(K);var Q=s(K,2),It=s(r(Q),2),$e=r(It,!0);a(It),a(Q);var X=s(Q,2),Et=s(r(X),2),Mt=r(Et),Pe=r(Mt,!0);a(Mt),a(Et),a(X);var Y=s(X,2),Ft=s(r(Y),2),De=r(Ft,!0);a(Ft),a(Y);var Z=s(Y,2),Tt=s(r(Z),2),tt=r(Tt),Ie=r(tt,!0);a(tt),a(Tt),a(Z);var et=s(Z,2),At=s(r(et),2),Ut=r(At),at=r(Ut),Ee=r(at,!0);a(at);var rt=s(at,2),Me=r(rt,!0);a(rt),a(Ut),a(At),a(et);var dt=s(et,2),St=s(r(dt),2),Fe=r(St,!0);a(St),a(dt);var Bt=s(dt,2),Nt=s(r(Bt),2),Te=r(Nt,!0);a(Nt),a(Bt),a(Pt),a($t),a(W);var Rt=s(W,2),Ct=r(Rt),Lt=s(r(Ct),2),st=r(Lt),Ot=s(r(st),2),Ae=r(Ot,!0);a(Ot),a(st);var it=s(st,2),Gt=s(r(it),2),Ue=r(Gt,!0);a(Gt),a(it);var ot=s(it,2),Jt=s(r(ot),2),Se=r(Jt);a(Jt),a(ot);var nt=s(ot,2),jt=s(r(nt),2),Be=r(jt,!0);a(jt),a(nt);var lt=s(nt,2),qt=s(r(lt),2),Ne=r(qt,!0);a(qt),a(lt);var vt=s(lt,2),zt=s(r(vt),2),Re=r(zt);a(zt),a(vt);var Ht=s(vt,2);{var Ce=v=>{var c=ca(),m=s(r(c),2),p=r(m,!0);a(m),a(c),b(()=>l(p,(t(e),i(()=>t(e)["github-runner-group"])))),u(v,c)};g(Ht,v=>{t(e),i(()=>t(e)["github-runner-group"])&&v(Ce)})}var Le=s(Ht,2);{var Oe=v=>{var c=xa(),m=s(r(c),2),p=r(m);aa(p,5,()=>(t(e),i(()=>t(e).tags)),ra,(I,$)=>{var ct=ma(),qe=r(ct,!0);a(ct),b(()=>l(qe,(t($),i(()=>typeof t($)=="string"?t($):t($).name)))),u(I,ct)}),a(p),a(m),a(c),u(v,c)};g(Le,v=>{t(e),i(()=>t(e).tags&&t(e).tags.length>0)&&v(Oe)})}a(Lt),a(Ct),a(Rt),a(V);var Vt=s(V,2);{var Ge=v=>{var c=ua(),m=r(c),p=s(r(m),2),I=r(p,!0);a(p),a(m),a(c),b($=>l(I,$),[()=>(t(e),i(()=>ve(t(e).extra_specs)))]),u(v,c)};g(Vt,v=>{t(e),i(()=>t(e).extra_specs)&&v(Ge)})}var Je=s(Vt,2);{var je=v=>{na(v,{get instances(){return t(e),i(()=>t(e).instances)},entityType:"repository",onDeleteInstance:le})};g(Je,v=>{t(e),i(()=>t(e).instances)&&v(je)})}b((v,c,m,p,I)=>{l(we,(t(e),i(()=>t(e).id))),l($e,(t(e),i(()=>t(e).provider_name))),l(Pe,(t(e),i(()=>t(e).image))),l(De,(t(e),i(()=>t(e).flavor))),da(tt,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${t(e),i(()=>t(e).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),l(Ie,(t(e),i(()=>t(e).enabled?"Enabled":"Disabled"))),l(Ee,v),Qt(rt,"href",c),l(Me,m),l(Fe,p),l(Te,I),l(Ae,(t(e),i(()=>t(e).max_runners))),l(Ue,(t(e),i(()=>t(e).min_idle_runners))),l(Se,`${t(e),i(()=>t(e).runner_bootstrap_timeout)??""} minutes`),l(Be,(t(e),i(()=>t(e).priority))),l(Ne,(t(e),i(()=>t(e).runner_prefix||"garm"))),l(Re,`${t(e),i(()=>t(e).os_type)??""} / ${t(e),i(()=>t(e).os_arch)??""}`)},[()=>(f(R),t(e),i(()=>R(t(e)))),()=>(f(Zt),t(e),i(()=>Zt(t(e)))),()=>(f(P),t(e),i(()=>P(t(e)))),()=>(f(C),t(e),i(()=>C(t(e).created_at||""))),()=>(f(C),t(e),i(()=>C(t(e).updated_at||"")))]),u(H,kt)};g(U,H=>{t(e)&&H(z)},!0)}u(k,w)};g(_,k=>{t(M)?k(q):k(ke,!1)},!0)}u(d,o)};g(ge,d=>{t(O)?d(pe):d(fe,!1)})}a(G);var ht=s(G,2);{var _e=d=>{ia(d,{get pool(){return t(e)},$$events:{close:()=>n(F,!1),submit:o=>ie(o.detail)}})};g(ht,d=>{t(F)&&t(e)&&d(_e)})}var bt=s(ht,2);{var ye=d=>{{let o=B(()=>(t(e),f(P),i(()=>`Pool ${t(e).id} (${P(t(e))})`)));Yt(d,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(o)},$$events:{close:()=>n(T,!1),confirm:oe}})}};g(bt,d=>{t(T)&&t(e)&&d(ye)})}var he=s(bt,2);{var be=d=>{Yt(d,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(x),i(()=>t(x).name)},$$events:{close:()=>{n(D,!1),n(x,null)},confirm:ne}})};g(he,d=>{t(D)&&t(x)&&d(be)})}b(()=>{Qt(xe,"href",`${xt}/pools`),l(ue,(t(e),i(()=>t(e)?t(e).id:"Loading...")))}),u(ee,gt),Ye(),de()}export{Ta as component}; diff --git a/webapp/assets/_app/immutable/nodes/15.CkHQugXH.js b/webapp/assets/_app/immutable/nodes/15.CkHQugXH.js new file mode 100644 index 00000000..2e6c5196 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/15.CkHQugXH.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Ee}from"../chunks/B3Pzt0F_.js";import{p as Me,E as Le,o as Te,l as w,s as r,m as a,g as e,y as T,a as Fe,f as S,k as l,j as c,r as u,c as F,t as K,v as fe,x as xe,u as j,z as $e,e as be,d as Se,h as Ne,b as ze,$ as Be,q as Re}from"../chunks/D8EpLgQ1.js";import{a as De,i as J,s as Ae}from"../chunks/5WA7h8uK.js";import{r as Q,b as Ce,g as oe}from"../chunks/CiE1LlKV.js";import{e as Oe,i as He}from"../chunks/u94nIB4-.js";import{b as ye,a as Pe}from"../chunks/C6k1Q4We.js";import{p as Je}from"../chunks/D4Caz1gY.js";import{M as Ve}from"../chunks/qB7B8uiS.js";import{F as Ye}from"../chunks/CNMHKIIK.js";import{e as Ie,a as ve}from"../chunks/wyaP0EDu.js";import{U as Ke}from"../chunks/CclkODgu.js";import{D as Qe}from"../chunks/KQ2xQpA3.js";import{P as Xe}from"../chunks/CO4LUyTP.js";import{t as X}from"../chunks/BEkVdVE1.js";import{k as Ze,g as et,c as tt,m as rt,p as ot}from"../chunks/BGVHQGl-.js";import{D as at,G as st,a as nt}from"../chunks/C9DJVOi1.js";import{E as it}from"../chunks/B7ITzBt8.js";import{E as lt}from"../chunks/CGpPw4EW.js";import{S as dt}from"../chunks/BE4wujub.js";import"../chunks/CoIRRsD9.js";var ct=S('

                '),ut=S('

                Loading...

                '),pt=S(""),mt=S(''),gt=S('

                Webhook secret will be automatically generated

                '),ft=S('
                '),bt=S('

                Create Repository

                ');function yt(ae,se){Me(se,!1);const[ne,ie]=Ae(),p=()=>De(Ie,"$eagerCache",ne),R=a(),k=a(),G=a(),x=a(),C=Le();let g=a(!1),f=a(""),b=a("github"),o=a({name:"",owner:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),D=a(!0),d=a(!0);async function y(){if(!p().loaded.credentials&&!p().loading.credentials)try{await ve.getCredentials()}catch(s){r(f,s instanceof Error?s.message:"Failed to load credentials")}}function _(s){r(b,s.detail),T(o,e(o).credentials_name="")}function P(){if(e(o).credentials_name){const s=e(R).find(A=>A.name===e(o).credentials_name);s&&s.forge_type&&r(b,s.forge_type)}}function E(){const s=new Uint8Array(32);return crypto.getRandomValues(s),Array.from(s,A=>A.toString(16).padStart(2,"0")).join("")}Te(()=>{y()});async function le(){if(!e(o).name?.trim()){r(f,"Repository name is required");return}if(!e(o).owner?.trim()){r(f,"Repository owner is required");return}if(!e(o).credentials_name){r(f,"Please select credentials");return}try{r(g,!0),r(f,"");const s={...e(o),install_webhook:e(D),auto_generate_secret:e(d)};C("submit",s)}catch(s){r(f,s instanceof Error?s.message:"Failed to create repository"),r(g,!1)}}w(()=>p(),()=>{r(R,p().credentials)}),w(()=>p(),()=>{r(k,p().loading.credentials)}),w(()=>(e(R),e(b)),()=>{r(G,e(R).filter(s=>e(b)?s.forge_type===e(b):!0))}),w(()=>e(d),()=>{e(d)?T(o,e(o).webhook_secret=E()):e(d)||T(o,e(o).webhook_secret="")}),w(()=>(e(o),e(d)),()=>{r(x,e(o).name?.trim()!==""&&e(o).owner?.trim()!==""&&e(o).credentials_name!==""&&(e(d)||e(o).webhook_secret?.trim()!==""))}),Fe(),Ee(),Ve(ae,{$$events:{close:()=>C("close")},children:(s,A)=>{var M=bt(),Z=l(c(M),2);{var de=v=>{var h=ct(),I=c(h),W=c(I,!0);u(I),u(h),K(()=>fe(W,e(f))),F(v,h)};J(Z,v=>{e(f)&&v(de)})}var ce=l(Z,2);{var ue=v=>{var h=ut();F(v,h)},pe=v=>{var h=ft(),I=c(h);Ye(I,{get selectedForgeType(){return e(b)},set selectedForgeType(i){r(b,i)},$$events:{select:_},$$legacy:!0});var W=l(I,2),ee=l(c(W),2);Q(ee),u(W);var L=l(W,2),N=l(c(L),2);Q(N),u(L);var z=l(L,2),B=l(c(z),2);K(()=>{e(o),xe(()=>{e(G)})});var O=c(B);O.value=O.__value="";var me=l(O);Oe(me,1,()=>e(G),He,(i,m)=>{var U=pt(),Ge=c(U);u(U);var ke={};K(()=>{fe(Ge,`${e(m),j(()=>e(m).name)??""} (${e(m),j(()=>e(m).endpoint?.name)??""})`),ke!==(ke=(e(m),j(()=>e(m).name)))&&(U.value=(U.__value=(e(m),j(()=>e(m).name)))??"")}),F(i,U)}),u(B),u(z);var H=l(z,2),V=l(c(H),2);K(()=>{e(o),xe(()=>{})});var Y=c(V);Y.value=Y.__value="roundrobin";var te=l(Y);te.value=te.__value="pack",u(V),u(H);var t=l(H,2),n=c(t),re=c(n);Q(re),$e(2),u(n);var $=l(n,2),q=c($),he=c(q);Q(he),$e(2),u(q);var We=l(q,2);{var qe=i=>{var m=mt();Q(m),ye(m,()=>e(o).webhook_secret,U=>T(o,e(o).webhook_secret=U)),F(i,m)},Ue=i=>{var m=gt();F(i,m)};J(We,i=>{e(d)?i(Ue,!1):i(qe)})}u($),u(t);var _e=l(t,2),we=c(_e),ge=l(we,2),je=c(ge,!0);u(ge),u(_e),u(h),K(()=>{ge.disabled=e(g)||e(k)||!e(x),fe(je,e(g)?"Creating...":"Create Repository")}),ye(ee,()=>e(o).name,i=>T(o,e(o).name=i)),ye(N,()=>e(o).owner,i=>T(o,e(o).owner=i)),Ce(B,()=>e(o).credentials_name,i=>T(o,e(o).credentials_name=i)),be("change",B,P),Ce(V,()=>e(o).pool_balancer_type,i=>T(o,e(o).pool_balancer_type=i)),Pe(re,()=>e(D),i=>r(D,i)),Pe(he,()=>e(d),i=>r(d,i)),be("click",we,()=>C("close")),be("submit",h,Je(le)),F(v,h)};J(ce,v=>{e(g)?v(ue):v(pe,!1)})}u(M),F(s,M)},$$slots:{default:!0}}),Se(),ie()}var vt=S('
                ',1);function Gt(ae,se){Me(se,!1);const[ne,ie]=Ae(),p=()=>De(Ie,"$eagerCache",ne),R=a(),k=a(),G=a();let x=a([]),C=a(!0),g=a(""),f=a(""),b=a(!1),o=a(!1),D=a(!1),d=a(null),y=a(null),_=a(1),P=a(25),E=a(1);Te(async()=>{try{r(C,!0);const t=await ve.getRepositories();t&&Array.isArray(t)&&r(x,t)}catch(t){console.error("Failed to load repositories:",t),r(g,t instanceof Error?t.message:"Failed to load repositories")}finally{r(C,!1)}});async function le(){try{await ve.retryResource("repositories")}catch(t){console.error("Retry failed:",t)}}function s(t){r(d,t),r(o,!0)}function A(t){r(y,t),r(D,!0)}function M(){r(b,!1),r(o,!1),r(D,!1),r(d,null),r(y,null),r(g,"")}async function Z(t){try{r(g,"");const n=t.detail,re={name:n.name,owner:n.owner,credentials_name:n.credentials_name,webhook_secret:n.webhook_secret},$=await oe.createRepository(re);if(n.install_webhook&&$.id)try{await oe.installRepoWebhook($.id),X.success("Webhook Installed",`Webhook for repository ${$.owner}/${$.name} has been installed successfully.`)}catch(q){console.warn("Repository created but webhook installation failed:",q),X.error("Webhook Installation Failed",q instanceof Error?q.message:"Failed to install webhook. You can try installing it manually from the repository details page.")}r(b,!1),X.success("Repository Created",`Repository ${$.owner}/${$.name} has been created successfully.`)}catch(n){throw r(g,n instanceof Error?n.message:"Failed to create repository"),n}}async function de(t){if(e(d))try{await oe.updateRepository(e(d).id,t),X.success("Repository Updated",`Repository ${e(d).owner}/${e(d).name} has been updated successfully.`),M()}catch(n){throw n}}async function ce(){if(e(y))try{r(g,""),await oe.deleteRepository(e(y).id),X.success("Repository Deleted",`Repository ${e(y).owner}/${e(y).name} has been deleted successfully.`),M()}catch(t){r(g,t instanceof Error?t.message:"Failed to delete repository")}}const ue=[{key:"repository",title:"Repository",cellComponent:it,cellProps:{entityType:"repository",showOwner:!0}},{key:"endpoint",title:"Endpoint",cellComponent:lt},{key:"credentials",title:"Credentials",cellComponent:st,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:dt,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:nt}],pe={entityType:"repository",primaryText:{field:"name",isClickable:!0,href:"/repositories/{id}",showOwner:!0},customInfo:[{icon:t=>et(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>Ze(t)}],actions:[{type:"edit",handler:t=>s(t)},{type:"delete",handler:t=>A(t)}]};function v(t){r(f,t.detail.term),r(_,1)}function h(t){r(_,t.detail.page)}function I(t){const n=tt(t.detail.perPage);r(P,n.newPerPage),r(_,n.newCurrentPage)}function W(t){s(t.detail.item)}function ee(t){A(t.detail.item)}w(()=>(e(x),p()),()=>{(!e(x).length||p().loaded.repositories)&&r(x,p().repositories)}),w(()=>p(),()=>{r(C,p().loading.repositories)}),w(()=>p(),()=>{r(R,p().errorMessages.repositories)}),w(()=>(e(x),e(f)),()=>{r(k,rt(e(x),e(f)))}),w(()=>(e(E),e(k),e(P),e(_)),()=>{r(E,Math.ceil(e(k).length/e(P))),e(_)>e(E)&&e(E)>0&&r(_,e(E))}),w(()=>(e(k),e(_),e(P)),()=>{r(G,ot(e(k),e(_),e(P)))}),Fe(),Ee();var L=vt();Ne(t=>{Be.title="Repositories - GARM"});var N=ze(L),z=c(N);Xe(z,{title:"Repositories",description:"Manage your GitHub repositories and their runners",actionLabel:"Add Repository",$$events:{action:()=>{r(b,!0)}}});var B=l(z,2);{let t=Re(()=>e(R)||e(g)),n=Re(()=>!!e(R));at(B,{get columns(){return ue},get data(){return e(G)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(f)},searchPlaceholder:"Search repositories by name or owner...",get currentPage(){return e(_)},get perPage(){return e(P)},get totalPages(){return e(E)},get totalItems(){return e(k),j(()=>e(k).length)},itemName:"repositories",emptyIconType:"building",get showRetry(){return e(n)},get mobileCardConfig(){return pe},$$events:{search:v,pageChange:h,perPageChange:I,retry:le,edit:W,delete:ee}})}u(N);var O=l(N,2);{var me=t=>{yt(t,{$$events:{close:()=>r(b,!1),submit:Z}})};J(O,t=>{e(b)&&t(me)})}var H=l(O,2);{var V=t=>{Ke(t,{get entity(){return e(d)},entityType:"repository",$$events:{close:M,submit:n=>de(n.detail)}})};J(H,t=>{e(o)&&e(d)&&t(V)})}var Y=l(H,2);{var te=t=>{Qe(t,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and runners.",get itemName(){return`${e(y),j(()=>e(y).owner)??""}/${e(y),j(()=>e(y).name)??""}`},$$events:{close:M,confirm:ce}})};J(Y,t=>{e(D)&&e(y)&&t(te)})}F(ae,L),Se(),ie()}export{Gt as component}; diff --git a/webapp/assets/_app/immutable/nodes/16.B35VVkOd.js b/webapp/assets/_app/immutable/nodes/16.B35VVkOd.js new file mode 100644 index 00000000..5ee0941c --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/16.B35VVkOd.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as We}from"../chunks/B3Pzt0F_.js";import{p as qe,o as ze,A as He,l as je,a as Ge,f as A,h as Oe,b as M,t as z,c as w,d as Ve,g as e,m as l,s as r,u as s,$ as Je,j as u,r as f,k as d,v as le,y as Ke,B as de,q as m,n as Qe}from"../chunks/D8EpLgQ1.js";import{i as g,s as Xe,a as Ye}from"../chunks/5WA7h8uK.js";import{c as Ze,g as h}from"../chunks/CiE1LlKV.js";import{p as et}from"../chunks/C41YH50Q.js";import{g as ce}from"../chunks/CTf6mQoE.js";import{b as H}from"../chunks/CoIRRsD9.js";import{U as tt}from"../chunks/CclkODgu.js";import{D as pe}from"../chunks/KQ2xQpA3.js";import{E as ot,P as at,a as rt}from"../chunks/BmGWMSQm.js";import{D as st,I as nt}from"../chunks/DDhBTdDt.js";import{g as ue}from"../chunks/BGVHQGl-.js";import{W as it}from"../chunks/Dbd6PPbz.js";import{C as lt}from"../chunks/CwqI2jFH.js";import{w as j}from"../chunks/u94nIB4-.js";import{t as C}from"../chunks/BEkVdVE1.js";var dt=A('

                Loading repository...

                '),ct=A('

                '),pt=A(" ",1),ut=A(' ',1);function kt(fe,me){qe(me,!1);const[ye,ve]=Xe(),G=()=>Ye(et,"$page",ye),x=l();let t=l(null),c=l([]),y=l([]),F=l(!0),I=l(""),R=l(!1),D=l(!1),E=l(!1),T=l(!1),p=l(null),P=null,_=l();async function O(){if(e(x))try{r(F,!0),r(I,"");const[o,a,n]=await Promise.all([h.getRepository(e(x)),h.listRepositoryPools(e(x)).catch(()=>[]),h.listRepositoryInstances(e(x)).catch(()=>[])]);r(t,o),r(c,a),r(y,n)}catch(o){r(I,o instanceof Error?o.message:"Failed to load repository")}finally{r(F,!1)}}function ge(o,a){const{events:n}=o;return{...a,events:n}}async function he(o){if(e(t))try{await h.updateRepository(e(t).id,o),await O(),C.success("Repository Updated",`Repository ${e(t).owner}/${e(t).name} has been updated successfully.`),r(R,!1)}catch(a){throw a}}async function _e(){if(e(t)){try{await h.deleteRepository(e(t).id),ce(`${H}/repositories`)}catch(o){r(I,o instanceof Error?o.message:"Failed to delete repository")}r(D,!1)}}async function $e(){if(e(p))try{await h.deleteInstance(e(p).name),C.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),r(E,!1),r(p,null)}catch(o){const a=o instanceof Error?o.message:"Failed to delete instance";C.error("Delete Failed",a),r(E,!1),r(p,null)}}function be(o){r(p,o),r(E,!0)}function we(){r(T,!0)}async function xe(o){try{if(!e(t))return;await h.createRepositoryPool(e(t).id,o.detail),C.success("Pool Created",`Pool has been created successfully for repository ${e(t).owner}/${e(t).name}.`),r(T,!1)}catch(a){throw a}}function V(){e(_)&&Ke(_,e(_).scrollTop=e(_).scrollHeight)}function Ie(o){if(o.operation==="update"){const a=o.payload;if(e(t)&&a.id===e(t).id){const n=e(t).events?.length||0,i=a.events?.length||0;r(t,ge(e(t),a)),i>n&&setTimeout(()=>{V()},100)}}else if(o.operation==="delete"){const a=o.payload.id||o.payload;e(t)&&e(t).id===a&&ce(`${H}/repositories`)}}function Ee(o){if(!e(t))return;const a=o.payload;if(a.repo_id===e(t).id){if(o.operation==="create")r(c,[...e(c),a]);else if(o.operation==="update")r(c,e(c).map(n=>n.id===a.id?a:n));else if(o.operation==="delete"){const n=a.id||a;r(c,e(c).filter(i=>i.id!==n))}}}function Re(o){if(!e(t)||!e(c))return;const a=o.payload;if(e(c).some(i=>i.id===a.pool_id)){if(o.operation==="create")r(y,[...e(y),a]);else if(o.operation==="update")r(y,e(y).map(i=>i.id===a.id?a:i));else if(o.operation==="delete"){const i=a.id||a;r(y,e(y).filter(L=>L.id!==i))}}}ze(()=>{O().then(()=>{e(t)?.events?.length&&setTimeout(()=>{V()},100)});const o=j.subscribeToEntity("repository",["update","delete"],Ie),a=j.subscribeToEntity("pool",["create","update","delete"],Ee),n=j.subscribeToEntity("instance",["create","update","delete"],Re);P=()=>{o(),a(),n()}}),He(()=>{P&&(P(),P=null)}),je(()=>G(),()=>{r(x,G().params.id)}),Ge(),We();var J=ut();Oe(o=>{z(()=>Je.title=`${e(t),s(()=>e(t)?`${e(t).name} - Repository Details`:"Repository Details")??""} - GARM`)});var S=M(J),B=u(S),K=u(B),U=u(K),De=u(U);f(U);var Q=d(U,2),X=u(Q),Y=d(u(X),2),Te=u(Y,!0);f(Y),f(X),f(Q),f(K),f(B);var Pe=d(B,2);{var ke=o=>{var a=dt();w(o,a)},Me=o=>{var a=de(),n=M(a);{var i=$=>{var b=ct(),k=u(b),N=u(k,!0);f(k),f(b),z(()=>le(N,e(I))),w($,b)},L=$=>{var b=de(),k=M(b);{var N=W=>{var oe=pt(),ae=M(oe);{let v=m(()=>(e(t),s(()=>e(t).name||"Repository"))),q=m(()=>(e(t),s(()=>e(t).owner))),Le=m(()=>(e(t),s(()=>e(t).endpoint?.name))),Ne=m(()=>(Qe(ue),e(t),s(()=>ue(e(t).endpoint?.endpoint_type||"unknown"))));st(ae,{get title(){return e(v)},get subtitle(){return`Owner: ${e(q)??""} • Endpoint: ${e(Le)??""}`},get forgeIcon(){return e(Ne)},onEdit:()=>r(R,!0),onDelete:()=>r(D,!0)})}var re=d(ae,2);ot(re,{get entity(){return e(t)},entityType:"repository"});var se=d(re,2);{let v=m(()=>(e(t),s(()=>e(t).id||"")));it(se,{entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),s(()=>e(t).owner)??""}/${e(t),s(()=>e(t).name)??""}`}})}var ne=d(se,2);{let v=m(()=>(e(t),s(()=>e(t).id||"")));at(ne,{get pools(){return e(c)},entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),s(()=>e(t).owner)??""}/${e(t),s(()=>e(t).name)??""}`},$$events:{addPool:we}})}var ie=d(ne,2);nt(ie,{get instances(){return e(y)},entityType:"repository",onDeleteInstance:be});var Ue=d(ie,2);{let v=m(()=>(e(t),s(()=>e(t)?.events)));rt(Ue,{get events(){return e(v)},get eventsContainer(){return e(_)},set eventsContainer(q){r(_,q)},$$legacy:!0})}w(W,oe)};g(k,W=>{e(t)&&W(N)},!0)}w($,b)};g(n,$=>{e(I)?$(i):$(L,!1)},!0)}w(o,a)};g(Pe,o=>{e(F)?o(ke):o(Me,!1)})}f(S);var Z=d(S,2);{var Ce=o=>{tt(o,{get entity(){return e(t)},entityType:"repository",$$events:{close:()=>r(R,!1),submit:a=>he(a.detail)}})};g(Z,o=>{e(R)&&e(t)&&o(Ce)})}var ee=d(Z,2);{var Ae=o=>{{let a=m(()=>(e(t),s(()=>`${e(t).owner}/${e(t).name}`)));pe(o,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a)},$$events:{close:()=>r(D,!1),confirm:_e}})}};g(ee,o=>{e(D)&&e(t)&&o(Ae)})}var te=d(ee,2);{var Fe=o=>{pe(o,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),s(()=>e(p).name)},$$events:{close:()=>{r(E,!1),r(p,null)},confirm:$e}})};g(te,o=>{e(E)&&e(p)&&o(Fe)})}var Se=d(te,2);{var Be=o=>{{let a=m(()=>(e(t),s(()=>e(t).id||"")));lt(o,{initialEntityType:"repository",get initialEntityId(){return e(a)},$$events:{close:()=>r(T,!1),submit:xe}})}};g(Se,o=>{e(T)&&e(t)&&o(Be)})}z(()=>{Ze(De,"href",`${H}/repositories`),le(Te,(e(t),s(()=>e(t)?e(t).name:"Loading...")))}),w(fe,J),Ve(),ve()}export{kt as component}; diff --git a/webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js b/webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js new file mode 100644 index 00000000..731517fe --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as yr}from"../chunks/B3Pzt0F_.js";import{p as hr,E as Gr,o as xr,f as w,k as a,j as o,g as e,m as i,r as s,t as k,s as r,e as de,c as f,v as X,b as kr,z as br,x as ye,D as qe,d as _r,l as U,a as Hr,h as jr,$ as Fr,q as Oe,u as mr,n as Nr}from"../chunks/D8EpLgQ1.js";import{i as D,s as Ur,a as qr}from"../chunks/5WA7h8uK.js";import{r as T,s as Le,b as he,g as R}from"../chunks/CiE1LlKV.js";import"../chunks/CoIRRsD9.js";import{P as Or}from"../chunks/CO4LUyTP.js";import{e as pr,i as vr}from"../chunks/u94nIB4-.js";import{b as H,a as Lr}from"../chunks/C6k1Q4We.js";import{p as Br}from"../chunks/D4Caz1gY.js";import{M as Jr}from"../chunks/qB7B8uiS.js";import{J as Vr}from"../chunks/DZblzgqm.js";import{U as Wr}from"../chunks/C89fcOde.js";import{D as Kr}from"../chunks/KQ2xQpA3.js";import{e as Qr,a as fr}from"../chunks/wyaP0EDu.js";import{t as Be}from"../chunks/BEkVdVE1.js";import{e as ne,h as Xr}from"../chunks/BGVHQGl-.js";import{D as Yr,G as Je,a as Zr}from"../chunks/C9DJVOi1.js";import{E as et}from"../chunks/B7ITzBt8.js";import{E as rt}from"../chunks/CGpPw4EW.js";import{S as tt}from"../chunks/BE4wujub.js";import{P as at}from"../chunks/CLYUNKnN.js";var ot=w('

                '),st=w('
                '),lt=w(""),it=w(''),dt=w('
                '),nt=w(""),ct=w(''),ut=w('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),gt=w('
                Creating...
                '),bt=w('

                Create New Scale Set

                Scale sets are only available for GitHub endpoints

                Entity Level *
                ');function mt(xe,ke){hr(ke,!1);const Y=Gr();let q=i(!1),m=i(""),n=i(""),v=i([]),S=i([]),O=i(!1),C=i(!1),$=i(""),p=i(""),_=i(""),u=i(""),y=i(""),A=i(void 0),z=i(void 0),I=i(void 0),d=i("garm"),L=i("linux"),B=i("amd64"),Z=i(""),ee=i(!0),G=i("{}");async function ce(){try{r(C,!0),r(S,await R.listProviders())}catch(c){r(m,c instanceof Error?c.message:"Failed to load providers")}finally{r(C,!1)}}async function _e(){if(e(n))try{switch(r(O,!0),r(v,[]),e(n)){case"repository":r(v,await R.listRepositories());break;case"organization":r(v,await R.listOrganizations());break;case"enterprise":r(v,await R.listEnterprises());break}}catch(c){r(m,c instanceof Error?c.message:`Failed to load ${e(n)}s`)}finally{r(O,!1)}}function re(c){e(n)!==c&&(r(n,c),r(p,""),_e())}async function we(){if(!e($)||!e(n)||!e(p)||!e(_)||!e(u)||!e(y)){r(m,"Please fill in all required fields");return}try{r(q,!0),r(m,"");let c={};if(e(G).trim())try{c=JSON.parse(e(G))}catch{throw new Error("Invalid JSON in extra specs")}const J={name:e($),provider_name:e(_),image:e(u),flavor:e(y),max_runners:e(A)||10,min_idle_runners:e(z)||0,runner_bootstrap_timeout:e(I)||20,runner_prefix:e(d),os_type:e(L),os_arch:e(B),"github-runner-group":e(Z)||void 0,enabled:e(ee),extra_specs:e(G).trim()?c:void 0};let E;switch(e(n)){case"repository":E=await R.createRepositoryScaleSet(e(p),J);break;case"organization":E=await R.createOrganizationScaleSet(e(p),J);break;case"enterprise":E=await R.createEnterpriseScaleSet(e(p),J);break;default:throw new Error("Invalid entity level selected")}Y("submit",E)}catch(c){r(m,c instanceof Error?c.message:"Failed to create scale set")}finally{r(q,!1)}}xr(()=>{ce()}),yr(),Jr(xe,{$$events:{close:()=>Y("close")},children:(c,J)=>{var E=bt(),V=a(o(E),2),ue=o(V);{var ge=b=>{var P=ot(),K=o(P),ve=o(K,!0);s(K),s(P),k(()=>X(ve,e(m))),f(b,P)};D(ue,b=>{e(m)&&b(ge)})}var j=a(ue,2),te=a(o(j),2);T(te),s(j);var ae=a(j,2),oe=o(ae),be=a(o(oe),2),W=o(be),se=a(W,2),me=a(se,2);s(be),s(oe),s(ae);var pe=a(ae,2);{var t=b=>{var P=ut(),K=kr(P),ve=a(o(K),2),Ce=o(ve),$e=o(Ce),$r=o($e);br(),s($e);var Er=a($e,2);{var Pr=l=>{var h=st();f(l,h)},Mr=l=>{var h=it();k(()=>{e(p),ye(()=>{e(n),e(v)})});var M=o(h),Ue=o(M);s(M),M.value=M.__value="";var le=a(M);pr(le,1,()=>e(v),vr,(F,x)=>{var N=lt(),fe=o(N);{var zr=Q=>{var ie=qe();k(()=>X(ie,`${e(x).owner??""}/${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,ie)},Ir=Q=>{var ie=qe();k(()=>X(ie,`${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,ie)};D(fe,Q=>{e(n)==="repository"?Q(zr):Q(Ir,!1)})}s(N);var gr={};k(()=>{gr!==(gr=e(x).id)&&(N.value=(N.__value=e(x).id)??"")}),f(F,N)}),s(h),k(()=>X(Ue,`Select a ${e(n)??""}`)),he(h,()=>e(p),F=>r(p,F)),f(l,h)};D(Er,l=>{e(O)?l(Pr):l(Mr,!1)})}s(Ce);var We=a(Ce,2),Tr=a(o(We),2);{var Rr=l=>{var h=dt();f(l,h)},Dr=l=>{var h=ct();k(()=>{e(_),ye(()=>{e(S)})});var M=o(h);M.value=M.__value="";var Ue=a(M);pr(Ue,1,()=>e(S),vr,(le,F)=>{var x=nt(),N=o(x,!0);s(x);var fe={};k(()=>{X(N,e(F).name),fe!==(fe=e(F).name)&&(x.value=(x.__value=e(F).name)??"")}),f(le,x)}),s(h),he(h,()=>e(_),le=>r(_,le)),f(l,h)};D(Tr,l=>{e(C)?l(Rr):l(Dr,!1)})}s(We),s(ve),s(K);var Ee=a(K,2),Ke=a(o(Ee),2),Pe=o(Ke),Qe=a(o(Pe),2);T(Qe),s(Pe);var Me=a(Pe,2),Xe=a(o(Me),2);T(Xe),s(Me);var Te=a(Me,2),Re=a(o(Te),2);k(()=>{e(L),ye(()=>{})});var De=o(Re);De.value=De.__value="linux";var Ye=a(De);Ye.value=Ye.__value="windows",s(Re),s(Te);var Ze=a(Te,2),Ae=a(o(Ze),2);k(()=>{e(B),ye(()=>{})});var ze=o(Ae);ze.value=ze.__value="amd64";var er=a(ze);er.value=er.__value="arm64",s(Ae),s(Ze),s(Ke),s(Ee);var Ie=a(Ee,2),rr=a(o(Ie),2),Ge=o(rr),tr=a(o(Ge),2);T(tr),s(Ge);var He=a(Ge,2),ar=a(o(He),2);T(ar),s(He);var or=a(He,2),sr=a(o(or),2);T(sr),s(or),s(rr),s(Ie);var lr=a(Ie,2),je=a(o(lr),2),Fe=o(je),ir=a(o(Fe),2);T(ir),s(Fe);var dr=a(Fe,2),nr=a(o(dr),2);T(nr),s(dr),s(je);var Ne=a(je,2),Ar=a(o(Ne),2);Vr(Ar,{rows:4,placeholder:"{}",get value(){return e(G)},set value(l){r(G,l)},$$legacy:!0}),s(Ne);var cr=a(Ne,2),ur=o(cr);T(ur),br(2),s(cr),s(lr),k(l=>X($r,`${l??""} `),[()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)]),H(Qe,()=>e(u),l=>r(u,l)),H(Xe,()=>e(y),l=>r(y,l)),he(Re,()=>e(L),l=>r(L,l)),he(Ae,()=>e(B),l=>r(B,l)),H(tr,()=>e(z),l=>r(z,l)),H(ar,()=>e(A),l=>r(A,l)),H(sr,()=>e(I),l=>r(I,l)),H(ir,()=>e(d),l=>r(d,l)),H(nr,()=>e(Z),l=>r(Z,l)),Lr(ur,()=>e(ee),l=>r(ee,l)),f(b,P)};D(pe,b=>{e(n)&&b(t)})}var g=a(pe,2),Ve=o(g),Se=a(Ve,2),wr=o(Se);{var Sr=b=>{var P=gt();f(b,P)},Cr=b=>{var P=qe("Create Scale Set");f(b,P)};D(wr,b=>{e(q)?b(Sr):b(Cr,!1)})}s(Se),s(g),s(V),s(E),k(()=>{Le(W,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Le(se,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Le(me,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Se.disabled=e(q)||!e($)||!e(n)||!e(p)||!e(_)||!e(u)||!e(y)}),H(te,()=>e($),b=>r($,b)),de("click",W,()=>re("repository")),de("click",se,()=>re("organization")),de("click",me,()=>re("enterprise")),de("click",Ve,()=>Y("close")),de("submit",V,Br(we)),f(c,E)},$$slots:{default:!0}}),_r()}var pt=w('
                ',1);function jt(xe,ke){hr(ke,!1);const[Y,q]=Ur(),m=()=>qr(Qr,"$eagerCache",Y),n=i(),v=i(),S=i(),O=i();let C=i([]),$=i(!0),p=i(""),_=i(""),u=i(1),y=i(25),A=i(!1),z=i(!1),I=i(!1),d=i(null);async function L(t){try{r(p,""),r(A,!1),Be.success("Scale Set Created","Scale set has been created successfully.")}catch(g){throw r(p,g instanceof Error?g.message:"Failed to create scale set"),g}}async function B(t){if(e(d))try{await R.updateScaleSet(e(d).id,t),Be.success("Scale Set Updated",`Scale set ${e(d).name} has been updated successfully.`),r(z,!1),r(d,null)}catch(g){throw g}}async function Z(){if(e(d))try{await R.deleteScaleSet(e(d).id),Be.success("Scale Set Deleted",`Scale set ${e(d).name} has been deleted successfully.`),r(I,!1),r(d,null)}catch(t){r(p,t instanceof Error?t.message:"Failed to delete scale set")}}function ee(){r(A,!0)}function G(t){r(d,t),r(z,!0)}function ce(t){r(d,t),r(I,!0)}xr(async()=>{try{r($,!0);const t=await fr.getScaleSets();t&&Array.isArray(t)&&r(C,t)}catch(t){console.error("Failed to load scale sets:",t),r(p,t instanceof Error?t.message:"Failed to load scale sets")}finally{r($,!1)}});async function _e(){try{await fr.retryResource("scalesets")}catch(t){console.error("Retry failed:",t)}}const re=[{key:"name",title:"Name",cellComponent:et,cellProps:{entityType:"scaleset"}},{key:"image",title:"Image",cellComponent:Je,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:Je,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:Je,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:at},{key:"endpoint",title:"Endpoint",cellComponent:rt},{key:"status",title:"Status",cellComponent:tt,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:Zr}],we={entityType:"scaleset",primaryText:{field:"name",isClickable:!0,href:"/scalesets/{id}"},secondaryText:{field:"entity_name",computedValue:t=>ne(t)},badges:[{type:"custom",value:t=>({variant:t.enabled?"success":"error",text:t.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:t=>G(t)},{type:"delete",handler:t=>ce(t)}]};function c(t){r(_,t.detail.term),r(u,1)}function J(t){r(u,t.detail.page)}function E(t){r(y,t.detail.perPage),r(u,1)}function V(t){G(t.detail.item)}function ue(t){ce(t.detail.item)}U(()=>(e(C),m()),()=>{(!e(C).length||m().loaded.scalesets)&&r(C,m().scalesets)}),U(()=>m(),()=>{r($,m().loading.scalesets)}),U(()=>m(),()=>{r(n,m().errorMessages.scalesets)}),U(()=>(e(C),e(_),ne),()=>{r(v,Xr(e(C),e(_),t=>ne(t)))}),U(()=>(e(v),e(y)),()=>{r(S,Math.ceil(e(v).length/e(y)))}),U(()=>(e(u),e(S)),()=>{e(u)>e(S)&&e(S)>0&&r(u,e(S))}),U(()=>(e(v),e(u),e(y)),()=>{r(O,e(v).slice((e(u)-1)*e(y),e(u)*e(y)))}),Hr(),yr();var ge=pt();jr(t=>{Fr.title="Scale Sets - GARM"});var j=kr(ge),te=o(j);Or(te,{title:"Scale Sets",description:"Manage GitHub runner scale sets",actionLabel:"Add Scale Set",$$events:{action:ee}});var ae=a(te,2);{let t=Oe(()=>e(n)||e(p)),g=Oe(()=>!!e(n));Yr(ae,{get columns(){return re},get data(){return e(O)},get loading(){return e($)},get error(){return e(t)},get searchTerm(){return e(_)},searchPlaceholder:"Search by entity name...",get currentPage(){return e(u)},get perPage(){return e(y)},get totalPages(){return e(S)},get totalItems(){return e(v),mr(()=>e(v).length)},itemName:"scale sets",emptyIconType:"cog",get showRetry(){return e(g)},get mobileCardConfig(){return we},$$events:{search:c,pageChange:J,perPageChange:E,retry:_e,edit:V,delete:ue}})}s(j);var oe=a(j,2);{var be=t=>{mt(t,{$$events:{close:()=>r(A,!1),submit:g=>L(g.detail)}})};D(oe,t=>{e(A)&&t(be)})}var W=a(oe,2);{var se=t=>{Wr(t,{get scaleSet(){return e(d)},$$events:{close:()=>{r(z,!1),r(d,null)},submit:g=>B(g.detail)}})};D(W,t=>{e(z)&&e(d)&&t(se)})}var me=a(W,2);{var pe=t=>{{let g=Oe(()=>(e(d),Nr(ne),mr(()=>`Scale Set ${e(d).name} (${ne(e(d))})`)));Kr(t,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return e(g)},$$events:{close:()=>{r(I,!1),r(d,null)},confirm:Z}})}};D(me,t=>{e(I)&&e(d)&&t(pe)})}f(xe,ge),_r(),q()}export{jt as component}; diff --git a/webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js b/webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js new file mode 100644 index 00000000..ad26b0dc --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Ht}from"../chunks/B3Pzt0F_.js";import{p as Jt,o as jt,A as qt,l as zt,a as Pt,f as w,h as Vt,b as F,t as k,c as p,d as Wt,s as l,m as _,u as i,$ as Kt,g as e,j as a,r,k as d,v as o,y as ve,B as Pe,q as I,n as x}from"../chunks/D8EpLgQ1.js";import{i as g,s as Qt,a as Xt}from"../chunks/5WA7h8uK.js";import{c as Ve,g as U,s as Yt}from"../chunks/CiE1LlKV.js";import{p as Zt}from"../chunks/C41YH50Q.js";import{g as We}from"../chunks/CTf6mQoE.js";import{b as me}from"../chunks/CoIRRsD9.js";import{U as ea}from"../chunks/C89fcOde.js";import{D as Ke}from"../chunks/KQ2xQpA3.js";import{D as ta,I as aa}from"../chunks/DDhBTdDt.js";import{w as Qe}from"../chunks/u94nIB4-.js";import{t as B}from"../chunks/BEkVdVE1.js";import{e as S,i as R,j as Xe,b as C,g as Ye}from"../chunks/BGVHQGl-.js";var ra=w('

                Loading scale set...

                '),sa=w('

                '),da=w('
                GitHub Runner Group
                '),ia=w('

                Extra Specifications

                 
                '),na=w('

                Basic Information

                Scale Set ID
                Name
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Runner Prefix
                OS Type / Architecture
                ',1),la=w(' ',1);function Sa(Ze,et){Jt(et,!1);const[tt,at]=Qt(),ue=()=>Xt(Zt,"$page",tt),D=_();let t=_(null),G=_(!0),E=_(""),M=_(!1),N=_(!1),$=_(!1),m=_(null),A=null;async function L(){if(!(!e(D)||isNaN(e(D))))try{l(G,!0),l(E,""),l(t,await U.getScaleSet(e(D)))}catch(s){l(E,s instanceof Error?s.message:"Failed to load scale set")}finally{l(G,!1)}}async function rt(s){if(e(t))try{await U.updateScaleSet(e(t).id,s),await L(),B.success("Scale Set Updated",`Scale Set ${e(t).name} has been updated successfully.`),l(M,!1)}catch(n){throw n}}async function st(){if(e(t)){try{await U.deleteScaleSet(e(t).id),We(`${me}/scalesets`)}catch(s){const n=s instanceof Error?s.message:"Failed to delete scale set";B.error("Delete Failed",n)}l(N,!1)}}async function dt(){if(e(m)){try{await U.deleteInstance(e(m).name),B.success("Instance Deleted",`Instance ${e(m).name} has been deleted successfully.`),await L(),l($,!1),l(m,null)}catch(s){const n=s instanceof Error?s.message:"Failed to delete instance";B.error("Delete Failed",n)}l($,!1),l(m,null)}}function it(s){l(m,s),l($,!0)}function nt(s){if(!s)return"{}";try{if(typeof s=="string"){const n=JSON.parse(s);return JSON.stringify(n,null,2)}return JSON.stringify(s,null,2)}catch{return s.toString()}}function lt(s){if(s.operation==="update"){const n=s.payload;e(t)&&n.id===e(t).id&&l(t,n)}else if(s.operation==="delete"){const n=s.payload.id||s.payload;e(t)&&e(t).id===n&&We(`${me}/scalesets`)}}function ot(s){if(!e(t)||!e(t).instances)return;const n=s.payload;if(n.scale_set_id===e(t).id){if(s.operation==="create")ve(t,e(t).instances=[...e(t).instances,n]);else if(s.operation==="update")ve(t,e(t).instances=e(t).instances.map(y=>y.id===n.id?n:y));else if(s.operation==="delete"){const y=n.id||n;ve(t,e(t).instances=e(t).instances.filter(j=>j.id!==y))}l(t,e(t))}}jt(()=>{L();const s=Qe.subscribeToEntity("scaleset",["update","delete"],lt),n=Qe.subscribeToEntity("instance",["create","update","delete"],ot);A=()=>{s(),n()}}),qt(()=>{A&&(A(),A=null)}),zt(()=>ue(),()=>{l(D,parseInt(ue().params.id||"0"))}),Pt(),Ht();var xe=la();Vt(s=>{k(()=>Kt.title=`${e(t),i(()=>e(t)?`${e(t).name} - Scale Set Details`:"Scale Set Details")??""} - GARM`)});var O=F(xe),H=a(O),ge=a(H),J=a(ge),ct=a(J);r(J);var fe=d(J,2),pe=a(fe),_e=d(a(pe),2),vt=a(_e,!0);r(_e),r(pe),r(fe),r(ge),r(H);var mt=d(H,2);{var ut=s=>{var n=ra();p(s,n)},xt=s=>{var n=Pe(),y=F(n);{var j=h=>{var b=sa(),T=a(b),q=a(T,!0);r(T),r(b),k(()=>o(q,e(E))),p(h,b)},yt=h=>{var b=Pe(),T=F(b);{var q=z=>{var be=na(),ke=F(be);{let c=I(()=>(e(t),i(()=>e(t).name||"Scale Set"))),v=I(()=>(x(S),e(t),i(()=>S(e(t))))),u=I(()=>(x(R),e(t),i(()=>R(e(t))))),f=I(()=>(x(Ye),i(()=>Ye("github"))));ta(ke,{get title(){return e(c)},get subtitle(){return`Scale set for ${e(v)??""} (${e(u)??""}) • GitHub Runner Scale Set`},get forgeIcon(){return e(f)},onEdit:()=>l(M,!0),onDelete:()=>l(N,!0)})}var P=d(ke,2),V=a(P),Se=a(V),we=d(a(Se),2),W=a(we),$e=d(a(W),2),ht=a($e,!0);r($e),r(W);var K=d(W,2),Ie=d(a(K),2),bt=a(Ie,!0);r(Ie),r(K);var Q=d(K,2),De=d(a(Q),2),kt=a(De,!0);r(De),r(Q);var X=d(Q,2),Ee=d(a(X),2),Me=a(Ee),St=a(Me,!0);r(Me),r(Ee),r(X);var Y=d(X,2),Ne=d(a(Y),2),wt=a(Ne,!0);r(Ne),r(Y);var Z=d(Y,2),Ae=d(a(Z),2),ee=a(Ae),$t=a(ee,!0);r(ee),r(Ae),r(Z);var te=d(Z,2),Te=d(a(te),2),Fe=a(Te),ae=a(Fe),It=a(ae,!0);r(ae);var re=d(ae,2),Dt=a(re,!0);r(re),r(Fe),r(Te),r(te);var se=d(te,2),Ue=d(a(se),2),Et=a(Ue,!0);r(Ue),r(se);var Be=d(se,2),Re=d(a(Be),2),Mt=a(Re,!0);r(Re),r(Be),r(we),r(Se),r(V);var Ce=d(V,2),Ge=a(Ce),Le=d(a(Ge),2),de=a(Le),Oe=d(a(de),2),Nt=a(Oe,!0);r(Oe),r(de);var ie=d(de,2),He=d(a(ie),2),At=a(He,!0);r(He),r(ie);var ne=d(ie,2),Je=d(a(ne),2),Tt=a(Je);r(Je),r(ne);var le=d(ne,2),je=d(a(le),2),Ft=a(je,!0);r(je),r(le);var oe=d(le,2),qe=d(a(oe),2),Ut=a(qe);r(qe),r(oe);var Bt=d(oe,2);{var Rt=c=>{var v=da(),u=d(a(v),2),f=a(u,!0);r(u),r(v),k(()=>o(f,(e(t),i(()=>e(t)["github-runner-group"])))),p(c,v)};g(Bt,c=>{e(t),i(()=>e(t)["github-runner-group"])&&c(Rt)})}r(Le),r(Ge),r(Ce),r(P);var ze=d(P,2);{var Ct=c=>{var v=ia(),u=a(v),f=d(a(u),2),ce=a(f,!0);r(f),r(u),r(v),k(Ot=>o(ce,Ot),[()=>(e(t),i(()=>nt(e(t).extra_specs)))]),p(c,v)};g(ze,c=>{e(t),i(()=>e(t).extra_specs)&&c(Ct)})}var Gt=d(ze,2);{var Lt=c=>{aa(c,{get instances(){return e(t),i(()=>e(t).instances)},entityType:"scaleset",onDeleteInstance:it})};g(Gt,c=>{e(t),i(()=>e(t).instances)&&c(Lt)})}k((c,v,u,f,ce)=>{o(ht,(e(t),i(()=>e(t).id))),o(bt,(e(t),i(()=>e(t).name))),o(kt,(e(t),i(()=>e(t).provider_name))),o(St,(e(t),i(()=>e(t).image))),o(wt,(e(t),i(()=>e(t).flavor))),Yt(ee,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${e(t),i(()=>e(t).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),o($t,(e(t),i(()=>e(t).enabled?"Enabled":"Disabled"))),o(It,c),Ve(re,"href",v),o(Dt,u),o(Et,f),o(Mt,ce),o(Nt,(e(t),i(()=>e(t).max_runners))),o(At,(e(t),i(()=>e(t).min_idle_runners))),o(Tt,`${e(t),i(()=>e(t).runner_bootstrap_timeout)??""} minutes`),o(Ft,(e(t),i(()=>e(t).runner_prefix||"garm"))),o(Ut,`${e(t),i(()=>e(t).os_type)??""} / ${e(t),i(()=>e(t).os_arch)??""}`)},[()=>(x(R),e(t),i(()=>R(e(t)))),()=>(x(Xe),e(t),i(()=>Xe(e(t)))),()=>(x(S),e(t),i(()=>S(e(t)))),()=>(x(C),e(t),i(()=>C(e(t).created_at||""))),()=>(x(C),e(t),i(()=>C(e(t).updated_at||"")))]),p(z,be)};g(T,z=>{e(t)&&z(q)},!0)}p(h,b)};g(y,h=>{e(E)?h(j):h(yt,!1)},!0)}p(s,n)};g(mt,s=>{e(G)?s(ut):s(xt,!1)})}r(O);var ye=d(O,2);{var gt=s=>{ea(s,{get scaleSet(){return e(t)},$$events:{close:()=>l(M,!1),submit:n=>rt(n.detail)}})};g(ye,s=>{e(M)&&e(t)&&s(gt)})}var he=d(ye,2);{var ft=s=>{{let n=I(()=>(e(t),x(S),i(()=>`Scale Set ${e(t).name} (${S(e(t))})`)));Ke(s,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return e(n)},$$events:{close:()=>l(N,!1),confirm:st}})}};g(he,s=>{e(N)&&e(t)&&s(ft)})}var pt=d(he,2);{var _t=s=>{Ke(s,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(m),i(()=>e(m).name)},$$events:{close:()=>{l($,!1),l(m,null)},confirm:dt}})};g(pt,s=>{e($)&&e(m)&&s(_t)})}k(()=>{Ve(ct,"href",`${me}/scalesets`),o(vt,(e(t),i(()=>e(t)?e(t).name:"Loading...")))}),p(Ze,xe),Wt(),at()}export{Sa as component}; diff --git a/webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js b/webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js new file mode 100644 index 00000000..b08a6de1 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Je}from"../chunks/B3Pzt0F_.js";import{f as h,k as a,j as r,r as t,t as T,v as z,c as m,B as He,b as ze,p as Oe,E as vt,l as Le,s as v,m as I,g as e,a as Ne,C as ut,z as ve,n as B,u,d as Qe,e as Ge,o as mt,A as gt,h as bt,$ as pt,y as N}from"../chunks/D8EpLgQ1.js";import{p as fe,i as C,s as ft,a as xt}from"../chunks/5WA7h8uK.js";import{e as ht,w as Me,i as kt}from"../chunks/u94nIB4-.js";import{s as xe,B as yt,r as Ue,g as Ke,c as ue}from"../chunks/CiE1LlKV.js";import{b as Q}from"../chunks/CoIRRsD9.js";import{e as _t,a as Ce}from"../chunks/wyaP0EDu.js";import{b as Re}from"../chunks/C6k1Q4We.js";import{p as wt}from"../chunks/D4Caz1gY.js";import{M as Mt}from"../chunks/qB7B8uiS.js";import{t as Fe}from"../chunks/BEkVdVE1.js";var Ut=h('
                '),Ct=h('
                '),Rt=h('
                '),zt=h('
                '),Lt=h('
                ');function pe(me,D){let x=fe(D,"title",8),re=fe(D,"content",8),s=fe(D,"position",8,"top"),ae=fe(D,"width",8,"w-80");var i=Lt(),k=a(r(i),2),p=r(k),y=r(p,!0);t(p);var d=a(p,2),R=r(d,!0);t(d);var oe=a(d,2);{var ge=E=>{var q=Ut();m(E,q)},se=E=>{var q=He(),W=ze(q);{var F=L=>{var P=Ct();m(L,P)},K=L=>{var P=He(),ie=ze(P);{var le=$=>{var H=Rt();m($,H)},ne=$=>{var H=He(),X=ze(H);{var de=l=>{var o=zt();m(l,o)};C(X,l=>{s()==="right"&&l(de)},!0)}m($,H)};C(ie,$=>{s()==="left"?$(le):$(ne,!1)},!0)}m(L,P)};C(W,L=>{s()==="bottom"?L(F):L(K,!1)},!0)}m(E,q)};C(oe,E=>{s()==="top"?E(ge):E(se,!1)})}t(k),t(i),T(()=>{xe(k,1,`absolute ${s()==="top"?"bottom-full":s()==="bottom"?"top-full":s()==="left"?"right-full top-1/2 -translate-y-1/2":"left-full top-1/2 -translate-y-1/2"} left-1/2 transform -translate-x-1/2 ${s()==="top"?"mb-2":s()==="bottom"?"mt-2":"mx-2"} ${ae()??""} p-3 bg-gray-900 text-white text-xs rounded-lg shadow-lg opacity-0 invisible group-hover:opacity-100 group-hover:visible transition-all duration-200 z-50`),z(y,x()),z(R,re())}),m(me,i)}var $t=ut(' Settings',1),St=h('
                Metadata
                '),jt=h('
                Callback
                '),Bt=h('
                Webhook
                '),At=h('

                No URLs configured

                '),It=h('
                Controller Webhook URL

                Use this URL in your GitHub organization/repository webhook settings

                '),Et=h('

                Please enter a valid URL

                '),Ht=h('

                Please enter a valid URL

                '),Gt=h('

                Please enter a valid URL

                '),qt=h('

                Controller Settings

                URL where runners can fetch metadata and setup information

                URL where runners send status updates and lifecycle events

                URL where GitHub/Gitea will send webhook events for job notifications

                Time to wait before spinning up a runner for a new job (0 = immediate)

                '),Pt=h('

                Controller Information

                Identity

                Controller ID
                Hostname
                Job Age Backoff

                Integration URLs

                ',1);function Tt(me,D){Oe(D,!1);const x=I(),re=I();let s=fe(D,"controllerInfo",12);const ae=vt();let i=I(!1),k=I(!1),p=I(""),y=I(""),d=I(""),R=I(null);function oe(){v(p,s().metadata_url||""),v(y,s().callback_url||""),v(d,s().webhook_url||""),v(R,s().minimum_job_age_backoff||null),v(i,!0)}async function ge(){try{v(k,!0);const n={};e(p).trim()&&(n.metadata_url=e(p).trim()),e(y).trim()&&(n.callback_url=e(y).trim()),e(d).trim()&&(n.webhook_url=e(d).trim()),e(R)!==null&&e(R)>=0&&(n.minimum_job_age_backoff=e(R));const c=await Ke.updateController(n);Fe.success("Settings Updated","Controller settings have been updated successfully."),v(i,!1),s(c),ae("updated",c)}catch(n){Fe.error("Update Failed",n instanceof Error?n.message:"Failed to update controller settings")}finally{v(k,!1)}}function se(){v(i,!1),v(p,""),v(y,""),v(d,""),v(R,null)}Le(()=>{},()=>{v(x,n=>{if(!n.trim())return!0;try{return new URL(n),!0}catch{return!1}})}),Le(()=>(e(x),e(p),e(y),e(d),e(R)),()=>{v(re,e(x)(e(p))&&e(x)(e(y))&&e(x)(e(d))&&(e(R)===null||e(R)>=0))}),Ne(),Je();var E=Pt(),q=ze(E),W=r(q),F=r(W),K=r(F),L=a(r(K),2),P=a(r(L),2),ie=r(P),le=r(ie);t(ie),t(P),t(L),t(K);var ne=a(K,2);yt(ne,{variant:"secondary",size:"sm",$$events:{click:oe},children:(n,c)=>{var b=$t();ve(),m(n,b)},$$slots:{default:!0}}),t(F);var $=a(F,2),H=r($),X=r(H),de=a(r(X),2),l=r(de),o=a(r(l),2),S=r(o,!0);t(o),t(l);var _=a(l,2),f=a(r(_),2),w=r(f,!0);t(f),t(_);var A=a(_,2),V=r(A),Y=a(r(V),2),Z=r(Y);pe(Z,{title:"Job Age Backoff",content:"Time in seconds GARM waits after receiving a new job before spinning up a runner. This delay allows existing idle runners to pick up jobs first, preventing unnecessary runner creation. Set to 0 for immediate response."}),t(Y),t(V);var J=a(V,2),O=r(J);t(J),t(A),t(de),t(X),t(H);var ee=a(H,2),ce=r(ee),he=a(r(ce),2),ke=r(he);{var $e=n=>{var c=St(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Metadata URL",content:"URL where runners retrieve setup information and metadata. Runners must be able to connect to this URL during their initialization process. Usually accessible at /api/v1/metadata endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().metadata_url)))),m(n,c)};C(ke,n=>{B(s()),u(()=>s().metadata_url)&&n($e)})}var qe=a(ke,2);{var Xe=n=>{var c=jt(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Callback URL",content:"URL where runners send status updates and system information (OS version, runner agent ID, etc.) to the controller. Runners must be able to connect to this URL. Usually accessible at /api/v1/callbacks endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().callback_url)))),m(n,c)};C(qe,n=>{B(s()),u(()=>s().callback_url)&&n(Xe)})}var Pe=a(qe,2);{var Ye=n=>{var c=Bt(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Webhook Base URL",content:"Base URL for webhooks where GitHub sends job notifications. GARM needs to receive these webhooks to know when to create new runners for jobs. GitHub must be able to connect to this URL. Usually accessible at /webhooks endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().webhook_url)))),m(n,c)};C(Pe,n=>{B(s()),u(()=>s().webhook_url)&&n(Ye)})}var Ze=a(Pe,2);{var et=n=>{var c=At(),b=a(r(c),4);t(c),Ge("click",b,oe),m(n,c)};C(Ze,n=>{B(s()),u(()=>!s().metadata_url&&!s().callback_url&&!s().webhook_url)&&n(et)})}t(he),t(ce),t(ee),t($);var tt=a($,2);{var rt=n=>{var c=It(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Controller Webhook URL",content:"Unique webhook URL for this GARM controller. This is the preferred URL to use in GitHub webhook settings as it's controller-specific and allows multiple GARM controllers to work with the same repository. Automatically combines the webhook base URL with the controller ID."}),t(M),t(b);var U=a(b,2),j=r(U),be=a(r(j),2),ye=r(be),Se=r(ye,!0);t(ye),ve(2),t(be),t(j),t(U),t(c),T(()=>z(Se,(B(s()),u(()=>s().controller_webhook_url)))),m(n,c)};C(tt,n=>{B(s()),u(()=>s().controller_webhook_url)&&n(rt)})}t(W),t(q);var at=a(q,2);{var ot=n=>{Mt(n,{$$events:{close:se},children:(c,b)=>{var M=qt(),G=a(r(M),2),U=r(G),j=a(r(U),2);Ue(j);let be;var ye=a(j,2);{var Se=g=>{var te=Et();m(g,te)};C(ye,g=>{e(x),e(p),u(()=>!e(x)(e(p)))&&g(Se)})}ve(2),t(U);var je=a(U,2),_e=a(r(je),2);Ue(_e);let Te;var st=a(_e,2);{var it=g=>{var te=Ht();m(g,te)};C(st,g=>{e(x),e(y),u(()=>!e(x)(e(y)))&&g(it)})}ve(2),t(je);var Be=a(je,2),we=a(r(Be),2);Ue(we);let De;var lt=a(we,2);{var nt=g=>{var te=Gt();m(g,te)};C(lt,g=>{e(x),e(d),u(()=>!e(x)(e(d)))&&g(nt)})}ve(2),t(Be);var Ae=a(Be,2),Ve=a(r(Ae),2);Ue(Ve),ve(2),t(Ae);var We=a(Ae,2),Ie=r(We),Ee=a(Ie,2),dt=r(Ee,!0);t(Ee),t(We),t(G),t(M),T((g,te,ct)=>{be=xe(j,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,be,g),Te=xe(_e,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,Te,te),De=xe(we,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,De,ct),Ie.disabled=e(k),Ee.disabled=!e(re)||e(k),z(dt,e(k)?"Saving...":"Save Changes")},[()=>({"border-red-300":!e(x)(e(p))}),()=>({"border-red-300":!e(x)(e(y))}),()=>({"border-red-300":!e(x)(e(d))})]),Re(j,()=>e(p),g=>v(p,g)),Re(_e,()=>e(y),g=>v(y,g)),Re(we,()=>e(d),g=>v(d,g)),Re(Ve,()=>e(R),g=>v(R,g)),Ge("click",Ie,se),Ge("submit",G,wt(ge)),m(c,M)},$$slots:{default:!0}})};C(at,n=>{e(i)&&n(ot)})}T(n=>{z(le,`v${n??""}`),z(S,(B(s()),u(()=>s().controller_id))),z(w,(B(s()),u(()=>s().hostname||"Unknown"))),z(O,`${B(s()),u(()=>s().minimum_job_age_backoff||30)??""}s`)},[()=>(B(s()),u(()=>s().version?.replace(/^v/,"")||"Unknown"))]),m(me,E),Qe()}var Dt=h('

                Error loading dashboard

                '),Vt=h('
                '),Wt=h('

                Dashboard

                Welcome to GARM - GitHub Actions Runner Manager

                ');function ar(me,D){Oe(D,!1);const[x,re]=ft(),s=()=>xt(_t,"$eagerCache",x),ae=I();let i=I({repositories:0,organizations:0,pools:0,instances:0}),k=I(null),p=I(""),y=[];function d(l,o,S=1e3){const _=parseInt(l.textContent||"0"),f=(o-_)/(S/16);let w=_;const A=()=>{if(w+=f,f>0&&w>=o||f<0&&w<=o){l.textContent=o.toString();return}l.textContent=Math.floor(w).toString(),requestAnimationFrame(A)};_!==o&&requestAnimationFrame(A)}mt(async()=>{try{const[f,w,A,V,Y]=await Promise.all([Ce.getRepositories(),Ce.getOrganizations(),Ce.getPools(),Ke.listInstances(),Ce.getControllerInfo()]);setTimeout(()=>{const Z=document.querySelector('[data-stat="repositories"]'),J=document.querySelector('[data-stat="organizations"]'),O=document.querySelector('[data-stat="pools"]'),ee=document.querySelector('[data-stat="instances"]');Z&&d(Z,f.length),J&&d(J,w.length),O&&d(O,A.length),ee&&d(ee,V.length)},100),v(i,{repositories:f.length,organizations:w.length,pools:A.length,instances:V.length}),Y&&v(k,Y)}catch(f){v(p,f instanceof Error?f.message:"Failed to load dashboard data"),console.error("Dashboard error:",f)}const l=Me.subscribeToEntity("repository",["create","delete"],R),o=Me.subscribeToEntity("organization",["create","delete"],oe),S=Me.subscribeToEntity("pool",["create","delete"],ge),_=Me.subscribeToEntity("instance",["create","delete"],se);y=[l,o,S,_]}),gt(()=>{y.forEach(l=>l())});function R(l){const o=document.querySelector('[data-stat="repositories"]');l.operation==="create"?(N(i,e(i).repositories++),o&&d(o,e(i).repositories,500)):l.operation==="delete"&&(N(i,e(i).repositories=Math.max(0,e(i).repositories-1)),o&&d(o,e(i).repositories,500))}function oe(l){const o=document.querySelector('[data-stat="organizations"]');l.operation==="create"?(N(i,e(i).organizations++),o&&d(o,e(i).organizations,500)):l.operation==="delete"&&(N(i,e(i).organizations=Math.max(0,e(i).organizations-1)),o&&d(o,e(i).organizations,500))}function ge(l){const o=document.querySelector('[data-stat="pools"]');l.operation==="create"?(N(i,e(i).pools++),o&&d(o,e(i).pools,500)):l.operation==="delete"&&(N(i,e(i).pools=Math.max(0,e(i).pools-1)),o&&d(o,e(i).pools,500))}function se(l){const o=document.querySelector('[data-stat="instances"]');l.operation==="create"?(N(i,e(i).instances++),o&&d(o,e(i).instances,500)):l.operation==="delete"&&(N(i,e(i).instances=Math.max(0,e(i).instances-1)),o&&d(o,e(i).instances,500))}function E(l){v(k,l.detail)}function q(l){return{blue:"bg-blue-500 text-white",green:"bg-green-500 text-white",purple:"bg-purple-500 text-white",yellow:"bg-yellow-500 text-white"}[l]||"bg-gray-500 text-white"}Le(()=>(e(k),s()),()=>{(!e(k)||s().loaded.controllerInfo)&&v(k,s().controllerInfo)}),Le(()=>(e(i),Q),()=>{v(ae,[{title:"Repositories",value:e(i).repositories,icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",color:"blue",href:`${Q}/repositories`},{title:"Organizations",value:e(i).organizations,icon:"M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z",color:"green",href:`${Q}/organizations`},{title:"Pools",value:e(i).pools,icon:"M4.318 6.318a4.5 4.5 0 000 6.364L12 20.364l7.682-7.682a4.5 4.5 0 00-6.364-6.364L12 7.636l-1.318-1.318a4.5 4.5 0 00-6.364 0z",color:"purple",href:`${Q}/pools`},{title:"Instances",value:e(i).instances,icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z",color:"yellow",href:`${Q}/instances`}])}),Ne(),Je();var W=Wt();bt(l=>{pt.title="Dashboard - GARM"});var F=a(r(W),2);{var K=l=>{var o=Dt(),S=r(o),_=a(r(S),2),f=a(r(_),2),w=r(f,!0);t(f),t(_),t(S),t(o),T(()=>z(w,e(p))),m(l,o)};C(F,l=>{e(p)&&l(K)})}var L=a(F,2);ht(L,5,()=>e(ae),kt,(l,o)=>{var S=Vt(),_=r(S),f=r(_),w=r(f),A=r(w),V=r(A),Y=r(V);t(V),t(A),t(w);var Z=a(w,2),J=r(Z),O=r(J),ee=r(O,!0);t(O);var ce=a(O,2),he=r(ce,!0);t(ce),t(J),t(Z),t(f),t(_),t(S),T((ke,$e)=>{ue(S,"href",(e(o),u(()=>e(o).href))),xe(A,1,`w-8 h-8 rounded-md ${ke??""} flex items-center justify-center`),ue(Y,"d",(e(o),u(()=>e(o).icon))),z(ee,(e(o),u(()=>e(o).title))),ue(ce,"data-stat",$e),z(he,(e(o),u(()=>e(o).value)))},[()=>(e(o),u(()=>q(e(o).color))),()=>(e(o),u(()=>e(o).title.toLowerCase()))]),m(l,S)}),t(L);var P=a(L,2);{var ie=l=>{Tt(l,{get controllerInfo(){return e(k)},$$events:{updated:E}})};C(P,l=>{e(k)&&l(ie)})}var le=a(P,2),ne=r(le),$=a(r(ne),4),H=r($),X=a(H,2),de=a(X,2);t($),t(ne),t(le),t(W),T(()=>{ue(H,"href",`${Q??""}/repositories`),ue(X,"href",`${Q??""}/pools`),ue(de,"href",`${Q??""}/instances`)}),m(me,W),Qe(),re()}export{ar as component}; diff --git a/webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js b/webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js new file mode 100644 index 00000000..f89efad2 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js @@ -0,0 +1,7 @@ +import"../chunks/DsnmJJEf.js";import{i as It}from"../chunks/B3Pzt0F_.js";import{p as jt,g as e,o as Gt,l as Q,a as Ut,f as E,e as f,h as qt,b as Fe,c as $,d as zt,$ as Bt,m as b,s as n,i as St,j as i,k as a,r as o,u,n as ye,q as Le,t as se,v as W,w as tt,x as Lt,y as _,z as X}from"../chunks/D8EpLgQ1.js";import{i as G,s as Nt,a as Kt}from"../chunks/5WA7h8uK.js";import{e as Vt,i as Ht}from"../chunks/u94nIB4-.js";import{h as Rt,r as L,s as Ie,b as Yt,a as Ot,g as ve}from"../chunks/CiE1LlKV.js";import{b as N,a as Jt}from"../chunks/C6k1Q4We.js";import{p as rt}from"../chunks/D4Caz1gY.js";import{P as Qt}from"../chunks/CO4LUyTP.js";import{F as Wt}from"../chunks/CNMHKIIK.js";import{D as Xt,A as at,G as it,a as Zt}from"../chunks/C9DJVOi1.js";import{e as er,a as Ne}from"../chunks/wyaP0EDu.js";import{t as je}from"../chunks/BEkVdVE1.js";import{f as tr,p as rr,g as Ke,B as ot,c as ar}from"../chunks/BGVHQGl-.js";import"../chunks/CoIRRsD9.js";import{E as ir}from"../chunks/CGpPw4EW.js";import{S as or}from"../chunks/BE4wujub.js";var nr=E('

                '),dr=E(""),sr=E('

                '),lr=E('

                Gitea only supports PAT authentication

                '),cr=E('
                '),ur=E('

                or drag and drop

                PEM, KEY files only

                ',1),pr=E(''),gr=E('
                '),br=E('

                or drag and drop

                PEM, KEY files only. Upload new private key.

                ',1),yr=E(" ",1),vr=E(''),fr=E(''),mr=E('
                ',1);function Ur(nt,dt){jt(dt,!1);const[st,lt]=Nt(),U=()=>Kt(er,"$eagerCache",st),Ge=b(),Z=b(),Ve=b(),Ue=b(),p={PAT:"pat",APP:"app"};let we=b(!0),le=b([]),J=b([]),fe=b(""),Ce=b(""),K=b(1),ce=b(25),ue=b(1),Pe=b(!1),Ae=b(!1),Te=b(!1),D=b(p.PAT),g=b(null),M=b(null),r=b({name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),$e={...e(r)},ee=b(!1);function ct(t){t.key==="Escape"&&(e(Pe)||e(Ae)||e(Te))&&P()}Gt(async()=>{try{n(we,!0);const[t,s]=await Promise.all([Ne.getCredentials(),Ne.getEndpoints()]);t&&Array.isArray(t)&&n(le,t),s&&Array.isArray(s)&&n(J,s)}catch(t){console.error("Failed to load credentials:",t),n(fe,t instanceof Error?t.message:"Failed to load credentials")}finally{n(we,!1)}});async function ut(){try{await Ne.retryResource("credentials")}catch(t){console.error("Retry failed:",t)}}async function pt(){He(),n(Pe,!0),n(x,"github")}let x=b("");function gt(t){n(x,t.detail),_(r,e(r).auth_type=p.PAT),n(D,p.PAT)}async function qe(t){n(g,t),n(r,{name:t.name||"",description:t.description||"",endpoint:t.endpoint?.name||"",auth_type:t["auth-type"]||p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),n(D,t["auth-type"]||p.PAT),$e={...e(r)},n(ee,!1),n(Ae,!0)}function ze(t){n(M,t),n(Te,!0)}function He(){n(r,{name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),$e={...e(r)},n(D,p.PAT),n(ee,!1)}function P(){n(Pe,!1),n(Ae,!1),n(Te,!1),n(g,null),n(M,null),n(x,""),He()}function Re(t){n(D,t),_(r,e(r).auth_type=t)}function bt(){const t={};if(e(r).name!==$e.name&&e(r).name.trim()!==""&&(t.name=e(r).name.trim()),e(r).description!==$e.description&&e(r).description.trim()!==""&&(t.description=e(r).description.trim()),e(ee)&&e(g))if(e(g)["auth-type"]===p.PAT)e(r).oauth2_token.trim()!==""&&(t.pat={oauth2_token:e(r).oauth2_token.trim()});else{const s={};let y=!1;if(e(r).app_id.trim()!==""&&(s.app_id=parseInt(e(r).app_id.trim()),y=!0),e(r).installation_id.trim()!==""&&(s.installation_id=parseInt(e(r).installation_id.trim()),y=!0),e(r).private_key_bytes!=="")try{const m=atob(e(r).private_key_bytes);s.private_key_bytes=Array.from(m,l=>l.charCodeAt(0)),y=!0}catch{}y&&(t.app=s)}return t}async function yt(){try{if(e(x)==="github")await ve.createGithubCredentials(e(r));else if(e(x)==="gitea")await ve.createGiteaCredentials(e(r));else throw new Error("Please select a forge type");je.success("Credentials Created",`Credentials ${e(r).name} have been created successfully.`),P()}catch(t){n(fe,t instanceof Error?t.message:"Failed to create credentials")}}async function vt(){if(!(!e(g)||!e(g).id))try{const t=bt();if(Object.keys(t).length===0){je.info("No Changes","No fields were modified."),P();return}e(g).forge_type==="github"?await ve.updateGithubCredentials(e(g).id,t):await ve.updateGiteaCredentials(e(g).id,t),je.success("Credentials Updated",`Credentials ${e(g)?.name||"Unknown"} have been updated successfully.`),P()}catch(t){n(fe,t instanceof Error?t.message:"Failed to update credentials")}}async function ft(){if(!(!e(M)||!e(M).id))try{e(M).forge_type==="github"?await ve.deleteGithubCredentials(e(M).id):await ve.deleteGiteaCredentials(e(M).id),je.success("Credentials Deleted",`Credentials ${e(M)?.name||"Unknown"} have been deleted successfully.`),P()}catch(t){n(fe,t instanceof Error?t.message:"Failed to delete credentials")}}function Ye(t){const y=t.target.files?.[0];if(!y){_(r,e(r).private_key_bytes="");return}const m=new FileReader;m.onload=l=>{const h=l.target?.result;_(r,e(r).private_key_bytes=btoa(h))},m.readAsText(y)}function Oe(){return!e(r).name||!e(r).description||!e(r).endpoint?!1:e(r).auth_type===p.PAT?!!e(r).oauth2_token:!!e(r).app_id&&!!e(r).installation_id&&!!e(r).private_key_bytes}function Je(){return!e(r).name.trim()||!e(r).description.trim()?!1:e(ee)&&e(g)?e(g)["auth-type"]===p.PAT?!!e(r).oauth2_token.trim():!!e(r).app_id.trim()&&!!e(r).installation_id.trim()&&!!e(r).private_key_bytes:!0}function mt(t){return e(J).find(y=>y.name===t)?.endpoint_type||""}function xt(t){return mt(t)==="gitea"}const _t=[{key:"name",title:"Name",cellComponent:it,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:it,cellProps:{field:"description",type:"description"}},{key:"endpoint",title:"Endpoint",cellComponent:ir},{key:"auth_type",title:"Auth Type",cellComponent:or,cellProps:{statusType:"custom",statusField:"auth-type"}},{key:"actions",title:"Actions",align:"right",cellComponent:Zt}],ht={entityType:"credential",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:t=>Ke(t?.forge_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"auth",field:"auth-type"}],actions:[{type:"edit",handler:t=>qe(t)},{type:"delete",handler:t=>ze(t)}]};function kt(t){n(Ce,t.detail.term),n(K,1)}function wt(t){n(K,t.detail.page)}function Ct(t){const s=ar(t.detail.perPage);n(ce,s.newPerPage),n(K,s.newCurrentPage)}function Pt(t){qe(t.detail.item)}function At(t){ze(t.detail.item)}Q(()=>(e(le),U()),()=>{(!e(le).length||U().loaded.credentials)&&n(le,U().credentials)}),Q(()=>U(),()=>{n(we,U().loading.credentials)}),Q(()=>U(),()=>{n(Ge,U().errorMessages.credentials)}),Q(()=>(e(J),U()),()=>{(!e(J).length||U().loaded.endpoints)&&n(J,U().endpoints)}),Q(()=>(e(le),e(Ce)),()=>{n(Z,tr(e(le),e(Ce)))}),Q(()=>(e(ue),e(Z),e(ce),e(K)),()=>{n(ue,Math.ceil(e(Z).length/e(ce))),e(K)>e(ue)&&e(ue)>0&&n(K,e(ue))}),Q(()=>(e(Z),e(K),e(ce)),()=>{n(Ve,rr(e(Z),e(K),e(ce)))}),Q(()=>(e(x),e(J)),()=>{n(Ue,e(x)?e(J).filter(t=>t.endpoint_type===e(x)):e(J))}),Ut(),It();var Qe=mr();f("keydown",St,ct),qt(t=>{Bt.title="Credentials - GARM"});var Be=Fe(Qe),We=i(Be);Qt(We,{title:"Credentials",description:"Manage authentication credentials for your GitHub and Gitea endpoints.",actionLabel:"Add Credentials",$$events:{action:pt}});var Tt=a(We,2);{let t=Le(()=>e(Ge)||e(fe)),s=Le(()=>!!e(Ge));Xt(Tt,{get columns(){return _t},get data(){return e(Ve)},get loading(){return e(we)},get error(){return e(t)},get searchTerm(){return e(Ce)},searchPlaceholder:"Search credentials by name, description, or endpoint...",get currentPage(){return e(K)},get perPage(){return e(ce)},get totalPages(){return e(ue)},get totalItems(){return e(Z),u(()=>e(Z).length)},itemName:"credentials",emptyIconType:"key",get showRetry(){return e(s)},get mobileCardConfig(){return ht},$$events:{search:kt,pageChange:wt,perPageChange:Ct,retry:ut,edit:Pt,delete:At},$$slots:{"mobile-card":(y,m)=>{const l=Le(()=>m.item);var h=nr(),A=i(h),q=i(A),F=i(q),I=i(F,!0);o(F);var k=a(F,2),V=i(k,!0);o(k);var z=a(k,2),B=i(z),S=i(B);Rt(S,()=>(ye(Ke),ye(e(l)),u(()=>Ke(e(l).forge_type||"unknown"))));var te=a(S,2),re=i(te,!0);o(te),o(B),o(z),o(q),o(A);var ae=a(A,2),H=i(ae);{var ie=j=>{ot(j,{variant:"success",text:"PAT"})},R=j=>{ot(j,{variant:"info",text:"App"})};G(H,j=>{ye(e(l)),u(()=>(e(l)["auth-type"]||"pat")==="pat")?j(ie):j(R,!1)})}var Y=a(H,2),oe=i(Y);at(oe,{action:"edit",size:"sm",title:"Edit credentials",ariaLabel:"Edit credentials",$$events:{click:()=>qe(e(l))}});var pe=a(oe,2);at(pe,{action:"delete",size:"sm",title:"Delete credentials",ariaLabel:"Delete credentials",$$events:{click:()=>ze(e(l))}}),o(Y),o(ae),o(h),se(()=>{W(I,(ye(e(l)),u(()=>e(l).name))),W(V,(ye(e(l)),u(()=>e(l).description))),W(re,(ye(e(l)),u(()=>e(l).endpoint?.name||"Unknown")))}),$(y,h)}}})}o(Be);var Xe=a(Be,2);{var $t=t=>{var s=pr(),y=i(s),m=a(y,2),l=i(m),h=a(i(l),2);o(l);var A=a(l,2),q=i(A);Wt(q,{get selectedForgeType(){return e(x)},set selectedForgeType(d){n(x,d)},$$events:{select:gt},$$legacy:!0});var F=a(q,2),I=a(i(F),2);L(I),o(F);var k=a(F,2),V=a(i(k),2);tt(V),o(k);var z=a(k,2),B=a(i(z),2);se(()=>{e(r),Lt(()=>{e(Ue)})});var S=i(B);S.value=S.__value="";var te=a(S);Vt(te,1,()=>e(Ue),Ht,(d,c)=>{var v=dr(),C=i(v);o(v);var T={};se(()=>{W(C,`${e(c),u(()=>e(c).name)??""} (${e(c),u(()=>e(c).endpoint_type)??""})`),T!==(T=(e(c),u(()=>e(c).name)))&&(v.value=(v.__value=(e(c),u(()=>e(c).name)))??"")}),$(d,v)}),o(B);var re=a(B,2);{var ae=d=>{var c=sr(),v=i(c);o(c),se(()=>W(v,`Showing only ${e(x)??""} endpoints`)),$(d,c)};G(re,d=>{e(x)&&d(ae)})}o(z);var H=a(z,2),ie=a(i(H),2),R=i(ie),Y=a(R,2);o(ie);var oe=a(ie,2);{var pe=d=>{var c=lr();$(d,c)};G(oe,d=>{e(x)==="gitea"&&d(pe)})}o(H);var j=a(H,2);{var Ee=d=>{var c=cr(),v=a(i(c),2);L(v),o(c),N(v,()=>e(r).oauth2_token,C=>_(r,e(r).oauth2_token=C)),$(d,c)};G(j,d=>{e(D),u(()=>e(D)===p.PAT)&&d(Ee)})}var me=a(j,2);{var De=d=>{var c=ur(),v=Fe(c),C=a(i(v),2);L(C),o(v);var T=a(v,2),O=a(i(T),2);L(O),o(T);var de=a(T,2),ge=a(i(de),2),_e=i(ge),he=a(_e,2),ke=a(i(he),2),Me=i(ke);X(),o(ke),X(2),o(he),o(ge),o(de),N(C,()=>e(r).app_id,be=>_(r,e(r).app_id=be)),N(O,()=>e(r).installation_id,be=>_(r,e(r).installation_id=be)),f("change",_e,Ye),f("click",Me,()=>document.getElementById("private_key")?.click()),$(d,c)};G(me,d=>{e(D),u(()=>e(D)===p.APP)&&d(De)})}var w=a(me,2),ne=i(w),xe=a(ne,2);o(w),o(A),o(m),o(s),se((d,c,v)=>{Ie(R,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 cursor-pointer + ${e(D),u(()=>e(D)===p.PAT?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} + ${d??""}`),Y.disabled=e(x)==="gitea",Ie(Y,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 + ${e(D),u(()=>e(D)===p.APP?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} + ${e(x)==="gitea"?"opacity-50 cursor-not-allowed":"cursor-pointer"}`),xe.disabled=c,Ie(xe,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${v??""}`)},[()=>(e(r),u(()=>(e(r).endpoint&&xt(e(r).endpoint),""))),()=>u(()=>!Oe()),()=>u(()=>Oe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",y,P),f("click",h,P),N(I,()=>e(r).name,d=>_(r,e(r).name=d)),N(V,()=>e(r).description,d=>_(r,e(r).description=d)),Yt(B,()=>e(r).endpoint,d=>_(r,e(r).endpoint=d)),f("click",R,()=>Re(p.PAT)),f("click",Y,()=>Re(p.APP)),f("click",ne,P),f("submit",A,rt(yt)),$(t,s)};G(Xe,t=>{e(Pe)&&t($t)})}var Ze=a(Xe,2);{var Et=t=>{var s=vr(),y=i(s),m=a(y,2),l=i(m),h=i(l),A=a(i(h),2),q=i(A);o(A),o(h);var F=a(h,2);o(l);var I=a(l,2),k=i(I),V=a(i(k),2);L(V),o(k);var z=a(k,2),B=a(i(z),2);tt(B),o(z);var S=a(z,2),te=a(i(S),2);L(te),X(2),o(S);var re=a(S,2),ae=a(i(re),2),H=i(ae),ie=i(H,!0);o(H),o(ae),X(2),o(re);var R=a(re,2),Y=i(R),oe=i(Y);L(oe),X(2),o(Y),X(2),o(R);var pe=a(R,2);{var j=w=>{var ne=yr(),xe=Fe(ne);{var d=C=>{var T=gr(),O=a(i(T),2);L(O),o(T),N(O,()=>e(r).oauth2_token,de=>_(r,e(r).oauth2_token=de)),$(C,T)};G(xe,C=>{e(g),u(()=>e(g)["auth-type"]===p.PAT)&&C(d)})}var c=a(xe,2);{var v=C=>{var T=br(),O=Fe(T),de=a(i(O),2);L(de),o(O);var ge=a(O,2),_e=a(i(ge),2);L(_e),o(ge);var he=a(ge,2),ke=a(i(he),2),Me=i(ke),be=a(Me,2),et=a(i(be),2),Ft=i(et);X(),o(et),X(2),o(be),o(ke),o(he),N(de,()=>e(r).app_id,Se=>_(r,e(r).app_id=Se)),N(_e,()=>e(r).installation_id,Se=>_(r,e(r).installation_id=Se)),f("change",Me,Ye),f("click",Ft,()=>document.getElementById("edit_private_key")?.click()),$(C,T)};G(c,C=>{e(g),u(()=>e(g)["auth-type"]===p.APP)&&C(v)})}$(w,ne)};G(pe,w=>{e(ee)&&w(j)})}var Ee=a(pe,2),me=i(Ee),De=a(me,2);o(Ee),o(I),o(m),o(s),se((w,ne)=>{W(q,`Update credentials for ${e(g),u(()=>e(g)?.name||"Unknown")??""}`),Ot(te,(e(r),u(()=>e(r).endpoint))),W(ie,(e(g),u(()=>(e(g)?.["auth-type"]||p.PAT)===p.PAT?"Personal Access Token (PAT)":"GitHub App"))),De.disabled=w,Ie(De,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${ne??""}`)},[()=>u(()=>!Je()),()=>u(()=>Je()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",y,P),f("click",F,P),N(V,()=>e(r).name,w=>_(r,e(r).name=w)),N(B,()=>e(r).description,w=>_(r,e(r).description=w)),Jt(oe,()=>e(ee),w=>n(ee,w)),f("click",me,P),f("submit",I,rt(vt)),$(t,s)};G(Ze,t=>{e(Ae)&&e(g)&&t(Et)})}var Dt=a(Ze,2);{var Mt=t=>{var s=fr(),y=i(s),m=a(y,2),l=i(m),h=i(l),A=a(i(h),2),q=a(i(A),2),F=i(q);o(q),o(A),o(h),o(l);var I=a(l,2),k=i(I),V=a(k,2);o(I),o(m),o(s),se(()=>W(F,`Are you sure you want to delete the credentials "${e(M),u(()=>e(M)?.name||"Unknown")??""}"? This action cannot be undone.`)),f("click",y,P),f("click",k,P),f("click",V,ft),$(t,s)};G(Dt,t=>{e(Te)&&e(M)&&t(Mt)})}$(nt,Qe),zt(),lt()}export{Ur as component}; diff --git a/webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js b/webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js new file mode 100644 index 00000000..36e9bd97 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js @@ -0,0 +1,3 @@ +import"../chunks/DsnmJJEf.js";import{i as pt}from"../chunks/B3Pzt0F_.js";import{p as bt,g as e,o as gt,l as Z,a as ft,f as I,e as b,h as _t,b as Ee,c as j,d as mt,m as p,i as yt,$ as vt,j as o,q as Ce,k as a,t as me,u as E,s,r as i,n as se,v as ne,w as De,z as V,y as g}from"../chunks/D8EpLgQ1.js";import{i as de,s as xt,a as ht}from"../chunks/5WA7h8uK.js";import{h as kt,r as U,c as je,s as Ie,g as ee}from"../chunks/CiE1LlKV.js";import{b as C}from"../chunks/C6k1Q4We.js";import{p as Ge}from"../chunks/D4Caz1gY.js";import{P as wt}from"../chunks/CO4LUyTP.js";import{F as Et}from"../chunks/CNMHKIIK.js";import{D as Ct,A as ze,G as Pe,a as Pt}from"../chunks/C9DJVOi1.js";import{e as $t,a as qe}from"../chunks/wyaP0EDu.js";import{t as ye}from"../chunks/BEkVdVE1.js";import{g as $e,c as Rt,a as At,p as Bt}from"../chunks/BGVHQGl-.js";import"../chunks/CoIRRsD9.js";import{E as Ut}from"../chunks/CGpPw4EW.js";var Lt=I('

                '),Mt=I('
                ',1),Tt=I('

                If empty, Base URL will be used as API Base URL

                '),Ft=I(''),Dt=I('
                ',1),jt=I('

                If empty, Base URL will be used as API Base URL

                '),It=I(''),Gt=I(''),zt=I('
                ',1);function rr(He,Ne){bt(Ne,!1);const[Se,Ve]=xt(),q=()=>ht($t,"$eagerCache",Se),ve=p(),H=p(),Re=p();let le=p(!0),O=p([]),te=p(""),ue=p(""),L=p(1),K=p(25),J=p(1),ce=p(!1),pe=p(!1),be=p(!1),G=p("github"),m=p(null),R=p(null),r=p({name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)};gt(async()=>{try{s(le,!0);const t=await qe.getEndpoints();t&&Array.isArray(t)&&s(O,t)}catch(t){console.error("Failed to load endpoints:",t),s(te,t instanceof Error?t.message:"Failed to load endpoints")}finally{s(le,!1)}});async function Oe(){try{await qe.retryResource("endpoints")}catch(t){console.error("Retry failed:",t)}}const Ke=[{key:"name",title:"Name",cellComponent:Pe,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:Pe,cellProps:{field:"description"}},{key:"api_url",title:"API URL",cellComponent:Pe,cellProps:{field:"api_base_url",fallbackField:"base_url"}},{key:"forge_type",title:"Forge Type",cellComponent:Ut},{key:"actions",title:"Actions",align:"right",cellComponent:Pt}],Je={entityType:"endpoint",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:t=>$e(t?.endpoint_type||"unknown"),text:t=>t?.api_base_url||"Unknown"}],actions:[{type:"edit",handler:t=>xe(t)},{type:"delete",handler:t=>he(t)}]};function Qe(t){s(ue,t.detail.term),s(L,1)}function We(t){s(L,t.detail.page)}function Xe(t){const d=Rt(t.detail.perPage);s(K,d.newPerPage),s(L,d.newCurrentPage)}function Ye(t){xe(t.detail.item)}function Ze(t){he(t.detail.item)}function et(){s(G,"github"),Ae(),s(ce,!0)}function tt(t){s(G,t.detail),g(r,e(r).endpoint_type=t.detail)}function xe(t){s(m,t),s(r,{name:t.name||"",description:t.description||"",endpoint_type:t.endpoint_type||"",base_url:t.base_url||"",api_base_url:t.api_base_url||"",upload_base_url:t.upload_base_url||"",ca_cert_bundle:typeof t.ca_cert_bundle=="string"?t.ca_cert_bundle:""}),k={...e(r)},s(pe,!0)}function he(t){s(R,t),s(be,!0)}function Ae(){s(r,{name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)}}function rt(t){t.key==="Escape"&&(e(ce)||e(pe)||e(be))&&y()}function y(){s(ce,!1),s(pe,!1),s(be,!1),s(G,"github"),s(m,null),s(R,null),Ae()}function at(){const t={};if(e(r).description!==k.description&&(e(r).description.trim()!==""||k.description!=="")&&(t.description=e(r).description.trim()),e(r).base_url!==k.base_url&&e(r).base_url.trim()!==""&&(t.base_url=e(r).base_url.trim()),e(r).api_base_url!==k.api_base_url&&(e(r).api_base_url.trim()!==""||k.api_base_url!=="")&&(t.api_base_url=e(r).api_base_url.trim()),e(m)?.endpoint_type==="github"&&e(r).upload_base_url!==k.upload_base_url&&(e(r).upload_base_url.trim()!==""||k.upload_base_url!=="")&&(t.upload_base_url=e(r).upload_base_url.trim()),e(r).ca_cert_bundle!==k.ca_cert_bundle)if(e(r).ca_cert_bundle!=="")try{const d=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(d,c=>c.charCodeAt(0))}catch{k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[])}else k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[]);return t}async function ot(){try{const t={name:e(r).name,description:e(r).description,endpoint_type:e(r).endpoint_type,base_url:e(r).base_url,api_base_url:e(r).api_base_url,upload_base_url:e(r).upload_base_url};if(e(r).ca_cert_bundle&&e(r).ca_cert_bundle.trim()!=="")try{const d=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(d,c=>c.charCodeAt(0))}catch{}e(r).endpoint_type==="github"?await ee.createGithubEndpoint(t):await ee.createGiteaEndpoint(t),ye.success("Endpoint Created",`Endpoint ${e(r).name} has been created successfully.`),y()}catch(t){s(te,t instanceof Error?t.message:"Failed to create endpoint")}}async function it(){if(e(m))try{const t=at();if(Object.keys(t).length===0){ye.info("No Changes","No fields were modified."),y();return}e(m).endpoint_type==="github"?await ee.updateGithubEndpoint(e(m).name,t):await ee.updateGiteaEndpoint(e(m).name,t),ye.success("Endpoint Updated",`Endpoint ${e(m).name} has been updated successfully.`),y()}catch(t){s(te,t instanceof Error?t.message:"Failed to update endpoint")}}async function st(){if(e(R))try{e(R).endpoint_type==="github"?await ee.deleteGithubEndpoint(e(R).name):await ee.deleteGiteaEndpoint(e(R).name),ye.success("Endpoint Deleted",`Endpoint ${e(R).name} has been deleted successfully.`),y()}catch(t){s(te,t instanceof Error?t.message:"Failed to delete endpoint")}}function Be(t){const c=t.target.files?.[0];if(!c){g(r,e(r).ca_cert_bundle="");return}const v=new FileReader;v.onload=n=>{const f=n.target?.result;g(r,e(r).ca_cert_bundle=btoa(f))},v.readAsText(c)}function ge(){return!(!e(r).name||!e(r).description||!e(r).base_url||e(r).endpoint_type==="github"&&!e(r).api_base_url)}Z(()=>(e(O),q()),()=>{(!e(O).length||q().loaded.endpoints)&&s(O,q().endpoints)}),Z(()=>q(),()=>{s(le,q().loading.endpoints)}),Z(()=>q(),()=>{s(ve,q().errorMessages.endpoints)}),Z(()=>(e(O),e(ue)),()=>{s(H,At(e(O),e(ue)))}),Z(()=>(e(J),e(H),e(K),e(L)),()=>{s(J,Math.ceil(e(H).length/e(K))),e(L)>e(J)&&e(J)>0&&s(L,e(J))}),Z(()=>(e(H),e(L),e(K)),()=>{s(Re,Bt(e(H),e(L),e(K)))}),ft(),pt();var Ue=zt();b("keydown",yt,rt),_t(t=>{vt.title="Endpoints - GARM"});var ke=Ee(Ue),Le=o(ke);wt(Le,{title:"Endpoints",description:"Manage your GitHub and Gitea endpoints for runner management.",actionLabel:"Add Endpoint",$$events:{action:et}});var nt=a(Le,2);{let t=Ce(()=>e(ve)||e(te)),d=Ce(()=>!!e(ve));Ct(nt,{get columns(){return Ke},get data(){return e(Re)},get loading(){return e(le)},get error(){return e(t)},get searchTerm(){return e(ue)},searchPlaceholder:"Search endpoints by name, description, or URL...",get currentPage(){return e(L)},get perPage(){return e(K)},get totalPages(){return e(J)},get totalItems(){return e(H),E(()=>e(H).length)},itemName:"endpoints",emptyIconType:"settings",get showRetry(){return e(d)},get mobileCardConfig(){return Je},$$events:{search:Qe,pageChange:We,perPageChange:Xe,retry:Oe,edit:Ye,delete:Ze},$$slots:{"mobile-card":(c,v)=>{const n=Ce(()=>v.item);var f=Lt(),x=o(f),A=o(x),P=o(A),w=o(P,!0);i(P);var _=a(P,2),M=o(_,!0);i(_);var B=a(_,2),T=o(B);kt(T,()=>(se($e),se(e(n)),E(()=>$e(e(n).endpoint_type||"","w-5 h-5"))));var F=a(T,2),Q=o(F,!0);i(F),i(B),i(A),i(x);var N=a(x,2),z=o(N);ze(z,{action:"edit",size:"sm",title:"Edit endpoint",ariaLabel:"Edit endpoint",$$events:{click:()=>xe(e(n))}});var W=a(z,2);ze(W,{action:"delete",size:"sm",title:"Delete endpoint",ariaLabel:"Delete endpoint",$$events:{click:()=>he(e(n))}}),i(N),i(f),me(()=>{ne(w,(se(e(n)),E(()=>e(n).name))),ne(M,(se(e(n)),E(()=>e(n).description))),ne(Q,(se(e(n)),E(()=>e(n).endpoint_type)))}),j(c,f)}}})}i(ke);var Me=a(ke,2);{var dt=t=>{var d=Ft(),c=o(d),v=a(c,2),n=o(v),f=a(o(n),2);i(n);var x=a(n,2),A=o(x);Et(A,{get selectedForgeType(){return e(G)},set selectedForgeType(u){s(G,u)},$$events:{select:tt},$$legacy:!0});var P=a(A,2),w=a(o(P),2);U(w),i(P);var _=a(P,2),M=a(o(_),2);De(M),i(_);var B=a(_,2),T=a(o(B),2);U(T),i(B);var F=a(B,2);{var Q=u=>{var $=Mt(),l=Ee($),h=a(o(l),2);U(h),i(l);var D=a(l,2),S=a(o(D),2);U(S),i(D),C(h,()=>e(r).api_base_url,Y=>g(r,e(r).api_base_url=Y)),C(S,()=>e(r).upload_base_url,Y=>g(r,e(r).upload_base_url=Y)),j(u,$)},N=u=>{var $=Tt(),l=a(o($),2);U(l),V(2),i($),C(l,()=>e(r).api_base_url,h=>g(r,e(r).api_base_url=h)),j(u,$)};de(F,u=>{e(G)==="github"?u(Q):u(N,!1)})}var z=a(F,2),W=a(o(z),2),X=o(W),re=a(X,2),ae=a(o(re),2),fe=o(ae);V(),i(ae),V(2),i(re),i(W),i(z);var oe=a(z,2),_e=o(oe),ie=a(_e,2);i(oe),i(x),i(v),i(d),me((u,$)=>{je(w,"placeholder",e(G)==="github"?"e.g., github-enterprise or github-com":"e.g., gitea-main or my-gitea"),je(T,"placeholder",e(G)==="github"?"https://github.com or https://github.example.com":"https://gitea.example.com"),ie.disabled=u,Ie(ie,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${$??""}`)},[()=>E(()=>!ge()),()=>E(()=>ge()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",f,y),C(w,()=>e(r).name,u=>g(r,e(r).name=u)),C(M,()=>e(r).description,u=>g(r,e(r).description=u)),C(T,()=>e(r).base_url,u=>g(r,e(r).base_url=u)),b("change",X,Be),b("click",fe,()=>document.getElementById("ca_cert_file")?.click()),b("click",_e,y),b("submit",x,Ge(ot)),j(t,d)};de(Me,t=>{e(ce)&&t(dt)})}var Te=a(Me,2);{var lt=t=>{var d=It(),c=o(d),v=a(c,2),n=o(v),f=o(n),x=o(f),A=o(x);i(x),V(2),i(f);var P=a(f,2);i(n);var w=a(n,2),_=o(w),M=a(o(_),2);U(M),i(_);var B=a(_,2),T=a(o(B),2);De(T),i(B);var F=a(B,2),Q=a(o(F),2);U(Q),i(F);var N=a(F,2);{var z=l=>{var h=Dt(),D=Ee(h),S=a(o(D),2);U(S),i(D);var Y=a(D,2),Fe=a(o(Y),2);U(Fe),i(Y),C(S,()=>e(r).api_base_url,we=>g(r,e(r).api_base_url=we)),C(Fe,()=>e(r).upload_base_url,we=>g(r,e(r).upload_base_url=we)),j(l,h)},W=l=>{var h=jt(),D=a(o(h),2);U(D),V(2),i(h),C(D,()=>e(r).api_base_url,S=>g(r,e(r).api_base_url=S)),j(l,h)};de(N,l=>{e(m),E(()=>e(m).endpoint_type==="github")?l(z):l(W,!1)})}var X=a(N,2),re=a(o(X),2),ae=o(re),fe=a(ae,2),oe=a(o(fe),2),_e=o(oe);V(),i(oe),V(2),i(fe),i(re),i(X);var ie=a(X,2),u=o(ie),$=a(u,2);i(ie),i(w),i(v),i(d),me((l,h)=>{ne(A,`Edit ${e(m),E(()=>e(m).endpoint_type==="github"?"GitHub":"Gitea")??""} Endpoint`),$.disabled=l,Ie($,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${h??""}`)},[()=>E(()=>!ge()),()=>E(()=>ge()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",P,y),C(M,()=>e(r).name,l=>g(r,e(r).name=l)),C(T,()=>e(r).description,l=>g(r,e(r).description=l)),C(Q,()=>e(r).base_url,l=>g(r,e(r).base_url=l)),b("change",ae,Be),b("click",_e,()=>document.getElementById("edit_ca_cert_file")?.click()),b("click",u,y),b("submit",w,Ge(it)),j(t,d)};de(Te,t=>{e(pe)&&e(m)&&t(lt)})}var ut=a(Te,2);{var ct=t=>{var d=Gt(),c=o(d),v=a(c,2),n=o(v),f=o(n),x=a(o(f),2),A=a(o(x),2),P=o(A);i(A),i(x),i(f),i(n);var w=a(n,2),_=o(w),M=a(_,2);i(w),i(v),i(d),me(()=>ne(P,`Are you sure you want to delete the endpoint "${e(R),E(()=>e(R).name)??""}"? This action cannot be undone.`)),b("click",c,y),b("click",_,y),b("click",M,st),j(t,d)};de(ut,t=>{e(be)&&e(R)&&t(ct)})}j(He,Ue),mt(),Ve()}export{rr as component}; diff --git a/webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js b/webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js new file mode 100644 index 00000000..5f4f52cc --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Ae}from"../chunks/B3Pzt0F_.js";import{p as Ge,E as je,o as Se,l as w,s as r,m as n,g as e,a as Be,f as M,k as o,j as i,r as l,c as $,t as L,v as K,x as Ee,u as y,z as Ie,y as Z,e as $e,d as He,B as Je,b as Re,h as Ve,$ as We,n as A,q as ee}from"../chunks/D8EpLgQ1.js";import{a as Ue,i as B,s as Fe}from"../chunks/5WA7h8uK.js";import{r as Ce,b as Pe,c as Ye,g as ge}from"../chunks/CiE1LlKV.js";import{b as Me}from"../chunks/CoIRRsD9.js";import{P as Ke}from"../chunks/CO4LUyTP.js";import{e as Oe,i as Qe}from"../chunks/u94nIB4-.js";import{b as De}from"../chunks/C6k1Q4We.js";import{p as Xe}from"../chunks/D4Caz1gY.js";import{M as Ze}from"../chunks/qB7B8uiS.js";import{e as Le,a as ye}from"../chunks/wyaP0EDu.js";import{U as et}from"../chunks/CclkODgu.js";import{D as tt}from"../chunks/KQ2xQpA3.js";import{t as be}from"../chunks/BEkVdVE1.js";import{B as rt,k as ve,l as at}from"../chunks/BGVHQGl-.js";import{D as st,A as Te,G as ot,a as nt}from"../chunks/C9DJVOi1.js";import{E as it}from"../chunks/B7ITzBt8.js";import{E as lt}from"../chunks/CGpPw4EW.js";import{S as dt}from"../chunks/BE4wujub.js";var ct=M('

                '),ut=M('

                Loading...

                '),pt=M(""),mt=M('

                Loading credentials...

                '),ft=M('

                No GitHub credentials found. Please create GitHub credentials first.

                '),gt=M(`

                You'll need to manually configure this secret in GitHub's enterprise webhook settings.

                `),bt=M('

                Create Enterprise

                Enterprises are only available for GitHub endpoints.

                ');function vt(te,re){Ge(re,!1);const[ae,se]=Fe(),p=()=>Ue(Le,"$eagerCache",ae),G=n(),h=n(),m=n(),H=n(),E=je();let x=n(!1),f=n(""),a=n({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"});async function _(){if(!p().loaded.credentials&&!p().loading.credentials)try{await ye.getCredentials()}catch(c){r(f,c instanceof Error?c.message:"Failed to load credentials")}}async function C(){if(!e(a).name||!e(a).name.trim()){r(f,"Enterprise name is required");return}if(!e(a).credentials_name){r(f,"Please select credentials");return}try{r(x,!0),r(f,"");const c={...e(a)};E("submit",c)}catch(c){r(f,c instanceof Error?c.message:"Failed to create enterprise"),r(x,!1)}}Se(()=>{_()}),w(()=>p(),()=>{r(G,p().credentials)}),w(()=>p(),()=>{r(h,p().loading.credentials)}),w(()=>e(G),()=>{r(m,e(G).filter(c=>c.forge_type==="github"))}),w(()=>e(a),()=>{r(H,e(a).name&&e(a).name.trim()!==""&&e(a).credentials_name!==""&&e(a).webhook_secret&&e(a).webhook_secret.trim()!=="")}),Be(),Ae(),Ze(te,{$$events:{close:()=>E("close")},children:(c,N)=>{var D=bt(),d=o(i(D),4);{var oe=g=>{var b=ct(),T=i(b),R=i(T,!0);l(T),l(b),L(()=>K(R,e(f))),$(g,b)};B(d,g=>{e(f)&&g(oe)})}var ne=o(d,2);{var ie=g=>{var b=ut();$(g,b)},le=g=>{var b=gt(),T=i(b),R=o(i(T),2);Ce(R),l(T);var q=o(T,2),U=o(i(q),2);L(()=>{e(a),Ee(()=>{e(m)})});var z=i(U);z.value=z.__value="";var de=o(z);Oe(de,1,()=>e(m),Qe,(s,u)=>{var P=pt(),v=i(P);l(P);var k={};L(()=>{K(v,`${e(u),y(()=>e(u).name)??""} (${e(u),y(()=>e(u).endpoint?.name||"Unknown endpoint")??""})`),k!==(k=(e(u),y(()=>e(u).name)))&&(P.value=(P.__value=(e(u),y(()=>e(u).name)))??"")}),$(s,P)}),l(U);var ce=o(U,2);{var ue=s=>{var u=mt();$(s,u)},O=s=>{var u=Je(),P=Re(u);{var v=k=>{var Y=ft();$(k,Y)};B(P,k=>{e(m),y(()=>e(m).length===0)&&k(v)},!0)}$(s,u)};B(ce,s=>{e(h)?s(ue):s(O,!1)})}l(q);var S=o(q,2),F=o(i(S),2);L(()=>{e(a),Ee(()=>{})});var j=i(F);j.value=j.__value="roundrobin";var I=o(j);I.value=I.__value="pack",l(F),l(S);var J=o(S,2),V=o(i(J),2);Ce(V),Ie(2),l(J);var Q=o(J,2),X=i(Q),W=o(X,2),t=i(W,!0);l(W),l(Q),l(b),L(()=>{W.disabled=(e(x),e(h),e(H),e(m),y(()=>e(x)||e(h)||!e(H)||e(m).length===0)),K(t,e(x)?"Creating...":"Create Enterprise")}),De(R,()=>e(a).name,s=>Z(a,e(a).name=s)),Pe(U,()=>e(a).credentials_name,s=>Z(a,e(a).credentials_name=s)),Pe(F,()=>e(a).pool_balancer_type,s=>Z(a,e(a).pool_balancer_type=s)),De(V,()=>e(a).webhook_secret,s=>Z(a,e(a).webhook_secret=s)),$e("click",X,()=>E("close")),$e("submit",b,Xe(C)),$(g,b)};B(ne,g=>{e(x)?g(ie):g(le,!1)})}l(D),$(c,D)},$$slots:{default:!0}}),He(),se()}var yt=M(''),ht=M('
                ',1);function Nt(te,re){Ge(re,!1);const[ae,se]=Fe(),p=()=>Ue(Le,"$eagerCache",ae),G=n(),h=n(),m=n(),H=n();let E=n([]),x=n(!0),f=n(""),a=n(""),_=n(1),C=n(25),c=n(!1),N=n(!1),D=n(!1),d=n(null);async function oe(t){try{r(f,""),await ge.createEnterprise(t),be.success("Enterprise Created",`Enterprise ${t.name} has been created successfully.`),r(c,!1)}catch(s){throw r(f,s instanceof Error?s.message:"Failed to create enterprise"),s}}async function ne(t){if(e(d))try{await ge.updateEnterprise(e(d).id,t),be.success("Enterprise Updated",`Enterprise ${e(d).name} has been updated successfully.`),r(N,!1),r(d,null)}catch(s){throw s}}async function ie(){if(e(d))try{r(f,""),await ge.deleteEnterprise(e(d).id),be.success("Enterprise Deleted",`Enterprise ${e(d).name} has been deleted successfully.`),r(D,!1),r(d,null)}catch(t){r(f,t instanceof Error?t.message:"Failed to delete enterprise")}}function le(){r(c,!0)}function g(t){r(d,t),r(N,!0)}function b(t){r(d,t),r(D,!0)}Se(async()=>{try{r(x,!0);const t=await ye.getEnterprises();t&&Array.isArray(t)&&r(E,t)}catch(t){console.error("Failed to load enterprises:",t),r(f,t instanceof Error?t.message:"Failed to load enterprises")}finally{r(x,!1)}});async function T(){try{await ye.retryResource("enterprises")}catch(t){console.error("Retry failed:",t)}}const R=[{key:"name",title:"Name",cellComponent:it,cellProps:{entityType:"enterprise"}},{key:"endpoint",title:"Endpoint",cellComponent:lt},{key:"credentials",title:"Credentials",cellComponent:ot,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:dt,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:nt}],q={entityType:"enterprise",primaryText:{field:"name",isClickable:!0,href:"/enterprises/{id}"},secondaryText:{field:"credentials_name"},badges:[{type:"custom",value:t=>ve(t)}],actions:[{type:"edit",handler:t=>g(t)},{type:"delete",handler:t=>b(t)}]};function U(t){r(a,t.detail.term),r(_,1)}function z(t){r(_,t.detail.page)}function de(t){r(C,t.detail.perPage),r(_,1)}function ce(t){g(t.detail.item)}function ue(t){b(t.detail.item)}w(()=>(e(E),p()),()=>{(!e(E).length||p().loaded.enterprises)&&r(E,p().enterprises)}),w(()=>p(),()=>{r(x,p().loading.enterprises)}),w(()=>p(),()=>{r(G,p().errorMessages.enterprises)}),w(()=>(e(E),e(a)),()=>{r(h,at(e(E),e(a)))}),w(()=>(e(h),e(C)),()=>{r(m,Math.ceil(e(h).length/e(C)))}),w(()=>(e(_),e(m)),()=>{e(_)>e(m)&&e(m)>0&&r(_,e(m))}),w(()=>(e(h),e(_),e(C)),()=>{r(H,e(h).slice((e(_)-1)*e(C),e(_)*e(C)))}),Be(),Ae();var O=ht();Ve(t=>{We.title="Enterprises - GARM"});var S=Re(O),F=i(S);Ke(F,{title:"Enterprises",description:"Manage GitHub enterprises",actionLabel:"Add Enterprise",$$events:{action:le}});var j=o(F,2);{let t=ee(()=>e(G)||e(f)),s=ee(()=>!!e(G));st(j,{get columns(){return R},get data(){return e(H)},get loading(){return e(x)},get error(){return e(t)},get searchTerm(){return e(a)},searchPlaceholder:"Search enterprises...",get currentPage(){return e(_)},get perPage(){return e(C)},get totalPages(){return e(m)},get totalItems(){return e(h),y(()=>e(h).length)},itemName:"enterprises",emptyIconType:"building",get showRetry(){return e(s)},get mobileCardConfig(){return q},$$events:{search:U,pageChange:z,perPageChange:de,retry:T,edit:ce,delete:ue},$$slots:{"mobile-card":(u,P)=>{const v=ee(()=>P.item),k=ee(()=>(A(ve),A(e(v)),y(()=>ve(e(v)))));var Y=yt(),pe=i(Y),me=i(pe),fe=i(me),Ne=i(fe,!0);l(fe);var he=o(fe,2),qe=i(he,!0);l(he),l(me),l(pe);var _e=o(pe,2),xe=i(_e);rt(xe,{get variant(){return A(e(k)),y(()=>e(k).variant)},get text(){return A(e(k)),y(()=>e(k).text)}});var ke=o(xe,2),we=i(ke);Te(we,{action:"edit",size:"sm",title:"Edit enterprise",ariaLabel:"Edit enterprise",$$events:{click:()=>g(e(v))}});var ze=o(we,2);Te(ze,{action:"delete",size:"sm",title:"Delete enterprise",ariaLabel:"Delete enterprise",$$events:{click:()=>b(e(v))}}),l(ke),l(_e),l(Y),L(()=>{Ye(me,"href",(A(Me),A(e(v)),y(()=>`${Me}/enterprises/${e(v).id}`))),K(Ne,(A(e(v)),y(()=>e(v).name))),K(qe,(A(e(v)),y(()=>e(v).credentials_name)))}),$(u,Y)}}})}l(S);var I=o(S,2);{var J=t=>{vt(t,{$$events:{close:()=>r(c,!1),submit:s=>oe(s.detail)}})};B(I,t=>{e(c)&&t(J)})}var V=o(I,2);{var Q=t=>{et(t,{get entity(){return e(d)},entityType:"enterprise",$$events:{close:()=>{r(N,!1),r(d,null)},submit:s=>ne(s.detail)}})};B(V,t=>{e(N)&&e(d)&&t(Q)})}var X=o(V,2);{var W=t=>{tt(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone.",get itemName(){return e(d),y(()=>e(d).name)},$$events:{close:()=>{r(D,!1),r(d,null)},confirm:ie}})};B(X,t=>{e(D)&&e(d)&&t(W)})}$(te,O),He(),se()}export{Nt as component}; diff --git a/webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js b/webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js new file mode 100644 index 00000000..03f28a9f --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as He}from"../chunks/B3Pzt0F_.js";import{p as Ne,o as qe,A as ze,l as Ge,a as je,f as B,h as Re,b as A,t as G,c as E,d as Ve,g as e,m as o,s,u as l,$ as We,j as u,r as f,k as d,v as oe,y as Je,B as le,q as v,n as Ke}from"../chunks/D8EpLgQ1.js";import{i as g,s as Oe,a as Qe}from"../chunks/5WA7h8uK.js";import{c as Xe,g as y}from"../chunks/CiE1LlKV.js";import{p as Ye}from"../chunks/C41YH50Q.js";import{g as de}from"../chunks/CTf6mQoE.js";import{b as j}from"../chunks/CoIRRsD9.js";import{U as Ze}from"../chunks/CclkODgu.js";import{D as ce}from"../chunks/KQ2xQpA3.js";import{E as et,P as tt,a as at}from"../chunks/BmGWMSQm.js";import{D as rt,I as st}from"../chunks/DDhBTdDt.js";import{g as pe}from"../chunks/BGVHQGl-.js";import{w as R}from"../chunks/u94nIB4-.js";import{t as F}from"../chunks/BEkVdVE1.js";import{C as nt}from"../chunks/CwqI2jFH.js";var it=B('

                Loading enterprise...

                '),ot=B('

                '),lt=B(" ",1),dt=B(' ',1);function Dt(ue,fe){Ne(fe,!1);const[me,ve]=Oe(),V=()=>Qe(Ye,"$page",me),$=o();let a=o(null),c=o([]),m=o([]),S=o(!0),x=o(""),T=o(!1),P=o(!1),w=o(!1),M=o(!1),p=o(null),C=null,h=o();async function W(){if(e($))try{s(S,!0),s(x,"");const[t,r,n]=await Promise.all([y.getEnterprise(e($)),y.listEnterprisePools(e($)).catch(()=>[]),y.listEnterpriseInstances(e($)).catch(()=>[])]);s(a,t),s(c,r),s(m,n)}catch(t){s(x,t instanceof Error?t.message:"Failed to load enterprise")}finally{s(S,!1)}}function ge(t,r){const{events:n}=t;return{...r,events:n}}async function ye(t){if(e(a))try{await y.updateEnterprise(e(a).id,t),await W(),F.success("Enterprise Updated",`Enterprise ${e(a).name} has been updated successfully.`),s(T,!1)}catch(r){throw r}}async function he(){if(e(a)){try{await y.deleteEnterprise(e(a).id),de(`${j}/enterprises`)}catch(t){s(x,t instanceof Error?t.message:"Failed to delete enterprise")}s(P,!1)}}async function be(){if(e(p))try{await y.deleteInstance(e(p).name),F.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),s(w,!1),s(p,null)}catch(t){const r=t instanceof Error?t.message:"Failed to delete instance";F.error("Delete Failed",r),s(w,!1),s(p,null)}}function _e(t){s(p,t),s(w,!0)}function Ee(){s(M,!0)}async function $e(t){try{if(!e(a))return;await y.createEnterprisePool(e(a).id,t.detail),F.success("Pool Created",`Pool has been created successfully for enterprise ${e(a).name}.`),s(M,!1)}catch(r){throw r}}function J(){e(h)&&Je(h,e(h).scrollTop=e(h).scrollHeight)}function xe(t){if(t.operation==="update"){const r=t.payload;if(e(a)&&r.id===e(a).id){const n=e(a).events?.length||0,i=r.events?.length||0;s(a,ge(e(a),r)),i>n&&setTimeout(()=>{J()},100)}}else if(t.operation==="delete"){const r=t.payload.id||t.payload;e(a)&&e(a).id===r&&de(`${j}/enterprises`)}}function we(t){if(!e(a))return;const r=t.payload;if(r.enterprise_id===e(a).id){if(t.operation==="create")s(c,[...e(c),r]);else if(t.operation==="update")s(c,e(c).map(n=>n.id===r.id?r:n));else if(t.operation==="delete"){const n=r.id||r;s(c,e(c).filter(i=>i.id!==n))}}}function Ie(t){if(!e(a)||!e(c))return;const r=t.payload;if(e(c).some(i=>i.id===r.pool_id)){if(t.operation==="create")s(m,[...e(m),r]);else if(t.operation==="update")s(m,e(m).map(i=>i.id===r.id?r:i));else if(t.operation==="delete"){const i=r.id||r;s(m,e(m).filter(N=>N.id!==i))}}}qe(()=>{W().then(()=>{e(a)?.events?.length&&setTimeout(()=>{J()},100)});const t=R.subscribeToEntity("enterprise",["update","delete"],xe),r=R.subscribeToEntity("pool",["create","update","delete"],we),n=R.subscribeToEntity("instance",["create","update","delete"],Ie);C=()=>{t(),r(),n()}}),ze(()=>{C&&(C(),C=null)}),Ge(()=>V(),()=>{s($,V().params.id)}),je(),He();var K=dt();Re(t=>{G(()=>We.title=`${e(a),l(()=>e(a)?`${e(a).name} - Enterprise Details`:"Enterprise Details")??""} - GARM`)});var U=A(K),L=u(U),O=u(L),H=u(O),De=u(H);f(H);var Q=d(H,2),X=u(Q),Y=d(u(X),2),Te=u(Y,!0);f(Y),f(X),f(Q),f(O),f(L);var Pe=d(L,2);{var Me=t=>{var r=it();E(t,r)},Ce=t=>{var r=le(),n=A(r);{var i=b=>{var _=ot(),k=u(_),q=u(k,!0);f(k),f(_),G(()=>oe(q,e(x))),E(b,_)},N=b=>{var _=le(),k=A(_);{var q=z=>{var ae=lt(),re=A(ae);{let I=v(()=>(e(a),l(()=>e(a).name||"Enterprise"))),D=v(()=>(e(a),l(()=>e(a).endpoint?.name))),Le=v(()=>(Ke(pe),l(()=>pe("github"))));rt(re,{get title(){return e(I)},get subtitle(){return`Endpoint: ${e(D)??""} • GitHub Enterprise`},get forgeIcon(){return e(Le)},onEdit:()=>s(T,!0),onDelete:()=>s(P,!0)})}var se=d(re,2);et(se,{get entity(){return e(a)},entityType:"enterprise"});var ne=d(se,2);{let I=v(()=>(e(a),l(()=>e(a).id||""))),D=v(()=>(e(a),l(()=>e(a).name||"")));tt(ne,{get pools(){return e(c)},entityType:"enterprise",get entityId(){return e(I)},get entityName(){return e(D)},$$events:{addPool:Ee}})}var ie=d(ne,2);st(ie,{get instances(){return e(m)},entityType:"enterprise",onDeleteInstance:_e});var Ue=d(ie,2);{let I=v(()=>(e(a),l(()=>e(a)?.events)));at(Ue,{get events(){return e(I)},get eventsContainer(){return e(h)},set eventsContainer(D){s(h,D)},$$legacy:!0})}E(z,ae)};g(k,z=>{e(a)&&z(q)},!0)}E(b,_)};g(n,b=>{e(x)?b(i):b(N,!1)},!0)}E(t,r)};g(Pe,t=>{e(S)?t(Me):t(Ce,!1)})}f(U);var Z=d(U,2);{var ke=t=>{Ze(t,{get entity(){return e(a)},entityType:"enterprise",$$events:{close:()=>s(T,!1),submit:r=>ye(r.detail)}})};g(Z,t=>{e(T)&&e(a)&&t(ke)})}var ee=d(Z,2);{var Ae=t=>{ce(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a),l(()=>e(a).name)},$$events:{close:()=>s(P,!1),confirm:he}})};g(ee,t=>{e(P)&&e(a)&&t(Ae)})}var te=d(ee,2);{var Fe=t=>{ce(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),l(()=>e(p).name)},$$events:{close:()=>{s(w,!1),s(p,null)},confirm:be}})};g(te,t=>{e(w)&&e(p)&&t(Fe)})}var Be=d(te,2);{var Se=t=>{{let r=v(()=>(e(a),l(()=>e(a).id||"")));nt(t,{initialEntityType:"enterprise",get initialEntityId(){return e(r)},$$events:{close:()=>s(M,!1),submit:$e}})}};g(Be,t=>{e(M)&&e(a)&&t(Se)})}G(()=>{Xe(De,"href",`${j}/enterprises`),oe(Te,(e(a),l(()=>e(a)?e(a).name:"Loading...")))}),E(ue,K),Ve(),ve()}export{Dt as component}; diff --git a/webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js b/webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js new file mode 100644 index 00000000..3b5d5acd --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Xe}from"../chunks/B3Pzt0F_.js";import{p as Ye,o as Ze,l as R,a as er,f as m,h as rr,t as re,g as e,e as ar,c as n,d as tr,m as i,$ as dr,j as a,k as d,s,r as t,z as L,u as h,C as lr,b as sr,D as or,v as $e,q as ir}from"../chunks/D8EpLgQ1.js";import{i as u,s as nr,a as vr}from"../chunks/5WA7h8uK.js";import{B as Ue,r as A,s as F,c as ze}from"../chunks/CiE1LlKV.js";import{b as P}from"../chunks/C6k1Q4We.js";import{p as mr}from"../chunks/D4Caz1gY.js";import{g as W}from"../chunks/CTf6mQoE.js";import{b as q}from"../chunks/CoIRRsD9.js";import{a as cr,b as ur}from"../chunks/duD3WMbl.js";import{t as pr}from"../chunks/BEkVdVE1.js";var br=m('

                Username is required

                '),gr=m('

                Please enter a valid email address

                '),fr=m('

                Full name is required

                '),xr=m('

                Password must be at least 8 characters long

                '),hr=m('

                Passwords do not match

                '),yr=lr(' Advanced Configuration (Optional)',1),kr=m('

                URL where runners can fetch metadata and setup information.

                URL where runners send status updates and lifecycle events.

                URL where GitHub/Gitea will send webhook events for job notifications.

                '),_r=m("
              • Enter a username
              • "),wr=m("
              • Enter a valid email address
              • "),$r=m("
              • Enter your full name
              • "),Ur=m("
              • Enter a password with at least 8 characters
              • "),zr=m("
              • Confirm your password
              • "),Mr=m('

                Please complete all required fields

                '),Rr=m('

                '),Ar=m('
                GARM

                Welcome to GARM

                Complete the first-run setup to get started

                First-Run Initialization

                GARM needs to be initialized before first use. This will create the admin user and generate a unique controller ID for this installation.

                This will create the admin user, generate a unique controller ID, and configure the required URLs for your GARM installation.
                Make sure to remember these credentials as they cannot be recovered.

                ');function Sr(Me,Re){Ye(Re,!1);const[Ae,Pe]=nr(),C=()=>vr(cr,"$authStore",Ae),k=i(),_=i(),w=i(),$=i(),U=i(),N=i();let g=i("admin"),p=i("admin@garm.local"),c=i(""),f=i(""),x=i("Administrator"),S=i(!1),H=i(""),T=i(!1),E=i(""),I=i(""),V=i("");async function qe(){if(e(N))try{s(S,!0),s(H,""),await ur.initialize(e(g).trim(),e(p).trim(),e(c),e(x).trim(),{callbackUrl:e(E).trim()||void 0,metadataUrl:e(I).trim()||void 0,webhookUrl:e(V).trim()||void 0}),pr.success("GARM Initialized","GARM has been successfully initialized. Welcome!"),W(`${q}/`)}catch(r){s(H,r instanceof Error?r.message:"Failed to initialize GARM")}finally{s(S,!1)}}Ze(()=>{if(C().isAuthenticated){W(`${q}/`);return}!C().needsInitialization&&!C().loading&&W(`${q}/login`)}),R(()=>(e(E),e(I),e(V)),()=>{if(typeof window<"u"){const r=window.location.origin;e(E)||s(E,`${r}/api/v1/callbacks`),e(I)||s(I,`${r}/api/v1/metadata`),e(V)||s(V,`${r}/webhooks`)}}),R(()=>e(p),()=>{s(k,e(p).trim()!==""&&e(p).includes("@"))}),R(()=>e(c),()=>{s(_,e(c).length>=8)}),R(()=>(e(f),e(c)),()=>{s(w,e(f).length>0&&e(c)===e(f))}),R(()=>e(g),()=>{s($,e(g).trim()!=="")}),R(()=>e(x),()=>{s(U,e(x).trim()!=="")}),R(()=>(e($),e(k),e(U),e(_),e(w)),()=>{s(N,e($)&&e(k)&&e(U)&&e(_)&&e(w))}),R(()=>(C(),q),()=>{C().isAuthenticated?W(`${q}/`):!C().needsInitialization&&!C().loading&&W(`${q}/login`)}),er(),Xe();var ae=Ar();rr(r=>{dr.title="Initialize GARM - First Run Setup"});var te=a(ae),me=a(te),ce=a(me),Ce=d(ce,2);t(me),L(4),t(te);var ue=d(te,2),pe=d(a(ue),2),de=a(pe),le=a(de),be=d(a(le),2),O=a(be);A(O);var Ge=d(O,2);{var Le=r=>{var l=br();n(r,l)};u(Ge,r=>{e($),e(g),h(()=>!e($)&&e(g).length>0)&&r(Le)})}t(be),t(le);var se=d(le,2),ge=d(a(se),2),J=a(ge);A(J);var Ee=d(J,2);{var Ie=r=>{var l=gr();n(r,l)};u(Ee,r=>{e(k),e(p),h(()=>!e(k)&&e(p).length>0)&&r(Ie)})}t(ge),t(se);var oe=d(se,2),fe=d(a(oe),2),K=a(fe);A(K);var Ve=d(K,2);{var Be=r=>{var l=fr();n(r,l)};u(Ve,r=>{e(U),e(x),h(()=>!e(U)&&e(x).length>0)&&r(Be)})}t(fe),t(oe);var ie=d(oe,2),xe=d(a(ie),2),Q=a(xe);A(Q);var Fe=d(Q,2);{var Ne=r=>{var l=xr();n(r,l)};u(Fe,r=>{e(_),e(c),h(()=>!e(_)&&e(c).length>0)&&r(Ne)})}t(xe),t(ie);var ne=d(ie,2),he=d(a(ne),2),X=a(he);A(X);var Se=d(X,2);{var je=r=>{var l=hr();n(r,l)};u(Se,r=>{e(w),e(f),h(()=>!e(w)&&e(f).length>0)&&r(je)})}t(he),t(ne);var ve=d(ne,2),ye=a(ve);Ue(ye,{type:"button",variant:"ghost",size:"sm",$$events:{click:()=>s(T,!e(T))},children:(r,l)=>{var b=yr(),v=sr(b);L(),re(()=>F(v,0,`w-4 h-4 mr-2 transition-transform ${e(T)?"rotate-90":""}`)),n(r,b)},$$slots:{default:!0}});var De=d(ye,2);{var We=r=>{var l=kr(),b=a(l),v=a(b),z=d(a(v),2),G=a(z);A(G),L(2),t(z),t(v);var B=d(v,2),Y=d(a(B),2),j=a(Y);A(j),L(2),t(Y),t(B);var Z=d(B,2),D=d(a(Z),2),ee=a(D);A(ee),L(2),t(D),t(Z),t(b),t(l),P(G,()=>e(I),M=>s(I,M)),P(j,()=>e(E),M=>s(E,M)),P(ee,()=>e(V),M=>s(V,M)),n(r,l)};u(De,r=>{e(T)&&r(We)})}t(ve);var ke=d(ve,2);{var He=r=>{var l=Mr(),b=a(l),v=d(a(b),2),z=d(a(v),2),G=a(z),B=a(G);{var Y=o=>{var y=_r();n(o,y)};u(B,o=>{e($)||o(Y)})}var j=d(B,2);{var Z=o=>{var y=wr();n(o,y)};u(j,o=>{e(k)||o(Z)})}var D=d(j,2);{var ee=o=>{var y=$r();n(o,y)};u(D,o=>{e(U)||o(ee)})}var M=d(D,2);{var Je=o=>{var y=Ur();n(o,y)};u(M,o=>{e(_)||o(Je)})}var Ke=d(M,2);{var Qe=o=>{var y=zr();n(o,y)};u(Ke,o=>{e(w)||o(Qe)})}t(G),t(z),t(v),t(b),t(l),n(r,l)};u(ke,r=>{e(N),e(g),e(p),e(x),e(c),e(f),h(()=>!e(N)&&(e(g).length>0||e(p).length>0||e(x).length>0||e(c).length>0||e(f).length>0))&&r(He)})}var _e=d(ke,2);{var Te=r=>{var l=Rr(),b=a(l),v=d(a(b),2),z=a(v),G=a(z,!0);t(z),t(v),t(b),t(l),re(()=>$e(G,e(H))),n(r,l)};u(_e,r=>{e(H)&&r(Te)})}var we=d(_e,2),Oe=a(we);{let r=ir(()=>!e(N)||e(S));Ue(Oe,{type:"submit",variant:"primary",size:"lg",fullWidth:!0,get loading(){return e(S)},get disabled(){return e(r)},children:(l,b)=>{L();var v=or();re(()=>$e(v,e(S)?"Initializing...":"Initialize GARM")),n(l,v)},$$slots:{default:!0}})}t(we),t(de),L(2),t(pe),t(ue),t(ae),re(()=>{ze(ce,"src",`${q??""}/assets/garm-light.svg`),ze(Ce,"src",`${q??""}/assets/garm-dark.svg`),F(O,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e($),e(g),h(()=>!e($)&&e(g).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(J,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(k),e(p),h(()=>!e(k)&&e(p).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(K,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(U),e(x),h(()=>!e(U)&&e(x).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(Q,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(_),e(c),h(()=>!e(_)&&e(c).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(X,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(w),e(f),h(()=>!e(w)&&e(f).length>0?"border-red-300 dark:border-red-600":"")??""}`)}),P(O,()=>e(g),r=>s(g,r)),P(J,()=>e(p),r=>s(p,r)),P(K,()=>e(x),r=>s(x,r)),P(Q,()=>e(c),r=>s(c,r)),P(X,()=>e(f),r=>s(f,r)),ar("submit",de,mr(qe)),n(Me,ae),tr(),Pe()}export{Sr as component}; diff --git a/webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js b/webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js new file mode 100644 index 00000000..38d013cb --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as z}from"../chunks/B3Pzt0F_.js";import{p as J,f as C,c as x,d as K,j as y,n as _,u as f,r as b,t as A,v as L,B as ce,b as O,o as de,A as ue,l as w,a as me,h as pe,g as e,m as d,$ as fe,q as ge,k as S,s}from"../chunks/D8EpLgQ1.js";import{p as ve,i as D}from"../chunks/5WA7h8uK.js";import{c as T,g as B}from"../chunks/CiE1LlKV.js";import{D as he}from"../chunks/KQ2xQpA3.js";import{P as _e}from"../chunks/CO4LUyTP.js";import{w as ye}from"../chunks/u94nIB4-.js";import{t as be}from"../chunks/BEkVdVE1.js";import{D as xe,G as Ce,a as ke}from"../chunks/C9DJVOi1.js";import{E as Pe}from"../chunks/B7ITzBt8.js";import{S as H}from"../chunks/BE4wujub.js";import{b as W}from"../chunks/CoIRRsD9.js";var Ie=C(' '),we=C(' '),Se=C('-'),Te=C('
                ');function De($,P){J(P,!1);let a=ve(P,"item",8);z();var p=Te(),I=y(p);{var u=r=>{var n=Ie(),h=y(n);b(n),A(()=>{T(n,"href",`${W??""}/pools/${_(a()),f(()=>a().pool_id)??""}`),T(n,"title",`Pool: ${_(a()),f(()=>a().pool_id)??""}`),L(h,`Pool: ${_(a()),f(()=>a().pool_id)??""}`)}),x(r,n)},k=r=>{var n=ce(),h=O(n);{var o=l=>{var i=we(),m=y(i);b(i),A(()=>{T(i,"href",`${W??""}/scalesets/${_(a()),f(()=>a().scale_set_id)??""}`),T(i,"title",`Scale Set: ${_(a()),f(()=>a().scale_set_id)??""}`),L(m,`Scale Set: ${_(a()),f(()=>a().scale_set_id)??""}`)}),x(l,i)},g=l=>{var i=Se();x(l,i)};D(h,l=>{_(a()),f(()=>a()?.scale_set_id)?l(o):l(g,!1)},!0)}x(r,n)};D(I,r=>{_(a()),f(()=>a()?.pool_id)?r(u):r(k,!1)})}b(p),x($,p),K()}var $e=C('

                Error

                '),Ee=C('
                ',1);function Je($,P){J(P,!1);const a=d(),p=d(),I=d();let u=d([]),k=d(!0),r=d(""),n=d(""),h=null,o=d(1),g=d(25),l=d(""),i=d(!1),m=d(null);async function M(){try{s(k,!0),s(r,""),s(u,await B.listInstances())}catch(t){s(r,t instanceof Error?t.message:"Failed to load instances")}finally{s(k,!1)}}function F(t){s(m,t),s(i,!0)}async function Q(){if(e(m))try{await B.deleteInstance(e(m).name),be.success("Instance Deleted",`Instance ${e(m).name} has been deleted successfully.`),s(i,!1),s(m,null)}catch(t){s(r,t instanceof Error?t.message:"Failed to delete instance")}}const U=[{key:"name",title:"Name",cellComponent:Pe,cellProps:{entityType:"instance",showId:!0}},{key:"pool_scale_set",title:"Pool/Scale Set",flexible:!0,cellComponent:De},{key:"created",title:"Created",cellComponent:Ce,cellProps:{field:"created_at",type:"date"}},{key:"status",title:"Status",cellComponent:H,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:H,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"actions",title:"Actions",align:"right",cellComponent:ke,cellProps:{actions:[{type:"delete",title:"Delete",ariaLabel:"Delete instance",action:"delete"}]}}],V={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"},{type:"status",field:"runner_status"}],actions:[{type:"delete",handler:t=>F(t)}]};function X(t){s(l,t.detail.term),s(o,1)}function Y(t){s(o,t.detail.page)}function Z(t){s(g,t.detail.perPage),s(o,1)}async function ee(){try{await M()}catch(t){console.error("Retry failed:",t)}}function te(t){}function ae(t){F(t.detail.item)}function se(t){if(t.operation==="create"){const c=t.payload;s(u,[...e(u),c])}else if(t.operation==="update"){const c=t.payload;s(u,e(u).map(v=>v.name===c.name?c:v))}else if(t.operation==="delete"){const c=t.payload.name||t.payload;s(u,e(u).filter(v=>v.name!==c))}}de(()=>{M(),h=ye.subscribeToEntity("instance",["create","update","delete"],se)}),ue(()=>{h&&(h(),h=null)}),w(()=>(e(u),e(l),e(n)),()=>{s(a,e(u).filter(t=>{const c=e(l)===""||t.name?.toLowerCase().includes(e(l).toLowerCase())||t.provider_id?.toLowerCase().includes(e(l).toLowerCase()),v=e(n)===""||t.status===e(n)||t.runner_status===e(n);return c&&v}))}),w(()=>(e(a),e(g)),()=>{s(p,Math.ceil(e(a).length/e(g)))}),w(()=>(e(o),e(p)),()=>{e(o)>e(p)&&e(p)>0&&s(o,e(p))}),w(()=>(e(a),e(o),e(g)),()=>{s(I,e(a).slice((e(o)-1)*e(g),e(o)*e(g)))}),me(),z();var R=Ee();pe(t=>{fe.title="Instances - GARM"});var E=O(R),N=y(E);_e(N,{title:"Runner Instances",description:"Monitor your running instances",showAction:!1});var G=S(N,2);{var re=t=>{var c=$e(),v=y(c),q=y(v),j=S(y(q),2),ie=y(j,!0);b(j),b(q),b(v),b(c),A(()=>L(ie,e(r))),x(t,c)};D(G,t=>{e(r)&&t(re)})}var ne=S(G,2);{let t=ge(()=>!!e(r));xe(ne,{get columns(){return U},get data(){return e(I)},get loading(){return e(k)},get error(){return e(r)},get searchTerm(){return e(l)},searchPlaceholder:"Search instances...",get currentPage(){return e(o)},get perPage(){return e(g)},get totalPages(){return e(p)},get totalItems(){return e(a),f(()=>e(a).length)},itemName:"instances",emptyIconType:"cog",get showRetry(){return e(t)},get mobileCardConfig(){return V},$$events:{search:X,pageChange:Y,perPageChange:Z,retry:ee,edit:te,delete:ae}})}b(E);var le=S(E,2);{var oe=t=>{he(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(m),f(()=>e(m).name)},$$events:{close:()=>{s(i,!1),s(m,null)},confirm:Q}})};D(le,t=>{e(i)&&e(m)&&t(oe)})}x($,R),K()}export{Je as component}; diff --git a/webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js b/webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js new file mode 100644 index 00000000..54eb6b8a --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Te}from"../chunks/B3Pzt0F_.js";import{p as Ue,o as Le,A as Oe,l as Ve,a as ze,f as g,h as Pe,b as O,t as f,c as v,d as Re,g as t,m as M,s as p,u as i,$ as qe,j as s,r as a,k as r,v as c,B as Et,n as b,e as Fe,q as Tt,D as Ut}from"../chunks/D8EpLgQ1.js";import{i as _,s as Ge,a as He}from"../chunks/5WA7h8uK.js";import{w as We,e as Lt,i as Ot}from"../chunks/u94nIB4-.js";import{c as nt,g as Vt,s as zt}from"../chunks/CiE1LlKV.js";import{b as Je}from"../chunks/BAg1iRPq.js";import{p as Ke}from"../chunks/C41YH50Q.js";import{g as Pt}from"../chunks/CTf6mQoE.js";import{b as A}from"../chunks/CoIRRsD9.js";import{D as Qe}from"../chunks/KQ2xQpA3.js";import{g as V,f as z}from"../chunks/ow_oMtSd.js";import{s as Rt,b as B,B as qt,d as Ft}from"../chunks/BGVHQGl-.js";var Xe=g('

                Error

                '),Ye=g('

                Loading instance details...

                '),Ze=g(' '),ta=g(' '),ea=g('-'),aa=g('
                Updated At:
                '),sa=g('
                '),ra=g('
                Network Addresses:
                '),da=g('
                Network Addresses:
                No addresses available
                '),ia=g('
                OS Type:
                '),na=g('
                OS Name:
                '),oa=g('
                OS Version:
                '),la=g('
                OS Architecture:
                '),va=g('

                '),xa=g('

                Status Messages

                '),ca=g('

                Status Messages

                No status messages available

                '),ma=g('

                Instance Information

                ID:
                Name:
                Provider ID:
                Provider:
                Pool/Scale Set:
                Agent ID:
                Created At:

                Status & Network

                Instance Status:
                Runner Status:
                ',1),ga=g('
                Instance not found.
                '),_a=g(' ',1);function Ba(Gt,Ht){Ue(Ht,!1);const[Wt,Jt]=Ge(),ot=()=>He(Ke,"$page",Wt),P=M();let e=M(null),R=M(!0),$=M(""),N=M(!1),C=null,E=M();async function Kt(){if(t(P))try{p(R,!0),p($,""),p(e,await Vt.getInstance(t(P)))}catch(o){p($,o instanceof Error?o.message:"Failed to load instance")}finally{p(R,!1)}}async function Qt(){if(t(e)){try{await Vt.deleteInstance(t(e).name),Pt(`${A}/instances`)}catch(o){p($,o instanceof Error?o.message:"Failed to delete instance")}p(N,!1)}}function Xt(o){if(t(e))if(o.operation==="update"&&o.payload.id===t(e).id){const h=t(e).status_messages?.length||0,j={...t(e),...o.payload},S=j.status_messages?.length||0;p(e,j),S>h&&setTimeout(()=>{Rt(t(E))},100)}else o.operation==="delete"&&(o.payload.id||o.payload)===t(e).id&&Pt(`${A}/instances`)}Le(()=>{Kt().then(()=>{t(e)?.status_messages?.length&&setTimeout(()=>{Rt(t(E))},100)}),C=We.subscribeToEntity("instance",["update","delete"],Xt)}),Oe(()=>{C&&(C(),C=null)}),Ve(()=>ot(),()=>{p(P,decodeURIComponent(ot().params.id||""))}),ze(),Te();var lt=_a();Pe(o=>{f(()=>qe.title=`${t(e),i(()=>t(e)?`${t(e).name} - Instance Details`:"Instance Details")??""} - GARM`)});var q=O(lt),F=s(q),vt=s(F),G=s(vt),Yt=s(G);a(G);var xt=r(G,2),ct=s(xt),mt=r(s(ct),2),Zt=s(mt,!0);a(mt),a(ct),a(xt),a(vt),a(F);var gt=r(F,2);{var te=o=>{var h=Xe(),j=s(h),S=s(j),T=r(s(S),2),I=s(T,!0);a(T),a(S),a(j),a(h),f(()=>c(I,t($))),v(o,h)};_(gt,o=>{t($)&&o(te)})}var ee=r(gt,2);{var ae=o=>{var h=Ye();v(o,h)},se=o=>{var h=Et(),j=O(h);{var S=I=>{var U=ma(),H=O(U),W=s(H),J=s(W),_t=r(s(J),2),ie=s(_t);a(_t),a(J);var ut=r(J,2),K=s(ut),ft=r(s(K),2),ne=s(ft,!0);a(ft),a(K);var Q=r(K,2),yt=r(s(Q),2),oe=s(yt,!0);a(yt),a(Q);var X=r(Q,2),pt=r(s(X),2),le=s(pt,!0);a(pt),a(X);var Y=r(X,2),ht=r(s(Y),2),ve=s(ht,!0);a(ht),a(Y);var Z=r(Y,2),kt=r(s(Z),2),xe=s(kt);{var ce=d=>{var n=Ze(),l=s(n,!0);a(n),f(()=>{nt(n,"href",`${A??""}/pools/${t(e),i(()=>t(e).pool_id)??""}`),c(l,(t(e),i(()=>t(e).pool_id)))}),v(d,n)},me=d=>{var n=Et(),l=O(n);{var m=u=>{var y=ta(),D=s(y,!0);a(y),f(()=>{nt(y,"href",`${A??""}/scalesets/${t(e),i(()=>t(e).scale_set_id)??""}`),c(D,(t(e),i(()=>t(e).scale_set_id)))}),v(u,y)},x=u=>{var y=ea();v(u,y)};_(l,u=>{t(e),i(()=>t(e).scale_set_id)?u(m):u(x,!1)},!0)}v(d,n)};_(xe,d=>{t(e),i(()=>t(e).pool_id)?d(ce):d(me,!1)})}a(kt),a(Z);var tt=r(Z,2),bt=r(s(tt),2),ge=s(bt,!0);a(bt),a(tt);var et=r(tt,2),wt=r(s(et),2),_e=s(wt,!0);a(wt),a(et);var ue=r(et,2);{var fe=d=>{var n=aa(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(x=>c(m,x),[()=>(b(B),t(e),i(()=>B(t(e).updated_at)))]),v(d,n)};_(ue,d=>{t(e),i(()=>t(e).updated_at&&t(e).updated_at!==t(e).created_at)&&d(fe)})}a(ut),a(W);var It=r(W,2),jt=r(s(It),2),at=s(jt),Dt=r(s(at),2),st=s(Dt),ye=s(st,!0);a(st),a(Dt),a(at);var rt=r(at,2),St=r(s(rt),2),dt=s(St),pe=s(dt,!0);a(dt),a(St),a(rt);var Mt=r(rt,2);{var he=d=>{var n=ra(),l=r(s(n),2);Lt(l,5,()=>(t(e),i(()=>t(e).addresses)),Ot,(m,x)=>{var u=sa(),y=s(u),D=s(y,!0);a(y);var it=r(y,2);{let L=Tt(()=>(t(x),i(()=>t(x).type||"Unknown")));qt(it,{variant:"info",get text(){return t(L)}})}a(u),f(()=>c(D,(t(x),i(()=>t(x).address)))),v(m,u)}),a(l),a(n),v(d,n)},ke=d=>{var n=da();v(d,n)};_(Mt,d=>{t(e),i(()=>t(e).addresses&&t(e).addresses.length>0)?d(he):d(ke,!1)})}var Bt=r(Mt,2);{var be=d=>{var n=ia(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_type)))),v(d,n)};_(Bt,d=>{t(e),i(()=>t(e).os_type)&&d(be)})}var $t=r(Bt,2);{var we=d=>{var n=na(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_name)))),v(d,n)};_($t,d=>{t(e),i(()=>t(e).os_name)&&d(we)})}var At=r($t,2);{var Ie=d=>{var n=oa(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_version)))),v(d,n)};_(At,d=>{t(e),i(()=>t(e).os_version)&&d(Ie)})}var je=r(At,2);{var De=d=>{var n=la(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_arch)))),v(d,n)};_(je,d=>{t(e),i(()=>t(e).os_arch)&&d(De)})}a(jt),a(It),a(H);var Se=r(H,2);{var Me=d=>{var n=xa(),l=r(s(n),2);Lt(l,5,()=>(t(e),i(()=>t(e).status_messages)),Ot,(m,x)=>{var u=va(),y=s(u),D=s(y),it=s(D,!0);a(D);var L=r(D,2),Nt=s(L);{var $e=k=>{const w=Tt(()=>(b(Ft),t(x),i(()=>Ft(t(x).event_level))));qt(k,{get variant(){return b(t(w)),i(()=>t(w).variant)},get text(){return b(t(w)),i(()=>t(w).text)}})};_(Nt,k=>{t(x),i(()=>t(x).event_level)&&k($e)})}var Ct=r(Nt,2),Ae=s(Ct);{var Ne=k=>{var w=Ut();f(Ee=>c(w,Ee),[()=>(b(B),t(x),i(()=>B(t(x).created_at)))]),v(k,w)},Ce=k=>{var w=Ut("Unknown date");v(k,w)};_(Ae,k=>{t(x),i(()=>t(x).created_at)?k(Ne):k(Ce,!1)})}a(Ct),a(L),a(y),a(u),f(()=>c(it,(t(x),i(()=>t(x).message)))),v(m,u)}),a(l),Je(l,m=>p(E,m),()=>t(E)),a(n),v(d,n)},Be=d=>{var n=ca();v(d,n)};_(Se,d=>{t(e),i(()=>t(e).status_messages&&t(e).status_messages.length>0)?d(Me):d(Be,!1)})}f((d,n,l,m,x)=>{c(ne,(t(e),i(()=>t(e).id))),c(oe,(t(e),i(()=>t(e).name))),c(le,(t(e),i(()=>t(e).provider_id))),c(ve,(t(e),i(()=>t(e).provider_name||"Unknown"))),c(ge,(t(e),i(()=>t(e).agent_id||"Not assigned"))),c(_e,d),zt(st,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${n??""}`),c(ye,l),zt(dt,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${m??""}`),c(pe,x)},[()=>(b(B),t(e),i(()=>B(t(e).created_at))),()=>(b(V),t(e),i(()=>V(t(e).status||"unknown"))),()=>(b(z),t(e),i(()=>z(t(e).status||"unknown"))),()=>(b(V),t(e),i(()=>V(t(e).runner_status||"unknown"))),()=>(b(z),t(e),i(()=>z(t(e).runner_status||"unknown")))]),Fe("click",ie,()=>p(N,!0)),v(I,U)},T=I=>{var U=ga();v(I,U)};_(j,I=>{t(e)?I(S):I(T,!1)},!0)}v(o,h)};_(ee,o=>{t(R)?o(ae):o(se,!1)})}a(q);var re=r(q,2);{var de=o=>{Qe(o,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(e),i(()=>t(e).name)},$$events:{close:()=>p(N,!1),confirm:Qt}})};_(re,o=>{t(N)&&t(e)&&o(de)})}f(()=>{nt(Yt,"href",`${A}/instances`),c(Zt,(t(e),i(()=>t(e)?t(e).name:"Instance Details")))}),v(Gt,lt),Re(),Jt()}export{Ba as component}; diff --git a/webapp/assets/_app/version.json b/webapp/assets/_app/version.json new file mode 100644 index 00000000..6268ff48 --- /dev/null +++ b/webapp/assets/_app/version.json @@ -0,0 +1 @@ +{"version":"1755334486454"} \ No newline at end of file diff --git a/webapp/assets/assets.go b/webapp/assets/assets.go new file mode 100644 index 00000000..613baa0d --- /dev/null +++ b/webapp/assets/assets.go @@ -0,0 +1,83 @@ +package assets + +import ( + "embed" + "net/http" + "path/filepath" + "strings" +) + +//go:generate go run github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0 generate spec --output=../swagger.yaml --scan-models --work-dir=../../ +//go:generate go run github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0 validate ../swagger.yaml +//go:generate rm -rf ../src/lib/api/generated +//go:generate openapi-generator-cli generate --skip-validate-spec -i ../swagger.yaml -g typescript-axios -o ../src/lib/api/generated + +//go:embed all:* +var EmbeddedSPA embed.FS + +// GetSPAFileSystem returns the embedded SPA file system for use with http.FileServer +func GetSPAFileSystem() http.FileSystem { + return http.FS(EmbeddedSPA) +} + +// ServeSPA serves the embedded SPA with proper content types and SPA routing +// This is kept for backward compatibility +func ServeSPA(w http.ResponseWriter, r *http.Request) { + ServeSPAWithPath(w, r, "/ui/") +} + +// ServeSPAWithPath serves the embedded SPA with a custom webapp path +func ServeSPAWithPath(w http.ResponseWriter, r *http.Request, webappPath string) { + filename := strings.TrimPrefix(r.URL.Path, webappPath) + + // Handle root path and SPA routing - serve index.html for all routes + if filename == "" || !strings.Contains(filename, ".") { + filename = "index.html" + } + + // Security check - prevent directory traversal + if strings.Contains(filename, "..") { + http.NotFound(w, r) + return + } + + // Read file from embedded filesystem + content, err := EmbeddedSPA.ReadFile(filename) + if err != nil { + // If file not found, serve index.html for SPA routing + content, err = EmbeddedSPA.ReadFile("index.html") + if err != nil { + http.NotFound(w, r) + return + } + filename = "index.html" + } + + // Set appropriate content type based on file extension + ext := strings.ToLower(filepath.Ext(filename)) + switch ext { + case ".html": + w.Header().Set("Content-Type", "text/html; charset=utf-8") + case ".js": + w.Header().Set("Content-Type", "application/javascript") + case ".css": + w.Header().Set("Content-Type", "text/css") + case ".json": + w.Header().Set("Content-Type", "application/json") + case ".svg": + w.Header().Set("Content-Type", "image/svg+xml") + case ".png": + w.Header().Set("Content-Type", "image/png") + default: + w.Header().Set("Content-Type", "text/plain") + } + + // Set cache headers for static assets (but not for HTML to ensure fresh content) + if ext != ".html" { + w.Header().Set("Cache-Control", "public, max-age=3600") + } else { + w.Header().Set("Cache-Control", "no-cache, must-revalidate") + } + + w.Write(content) +} diff --git a/webapp/assets/assets/garm-dark.svg b/webapp/assets/assets/garm-dark.svg new file mode 100644 index 00000000..f0a0c564 --- /dev/null +++ b/webapp/assets/assets/garm-dark.svg @@ -0,0 +1,37 @@ + + + + + + + + + + + diff --git a/webapp/assets/assets/garm-light.svg b/webapp/assets/assets/garm-light.svg new file mode 100644 index 00000000..2495959d --- /dev/null +++ b/webapp/assets/assets/garm-light.svg @@ -0,0 +1,36 @@ + + + + + + + + + + diff --git a/webapp/assets/assets/gitea.svg b/webapp/assets/assets/gitea.svg new file mode 100644 index 00000000..e4643ce3 --- /dev/null +++ b/webapp/assets/assets/gitea.svg @@ -0,0 +1 @@ + diff --git a/webapp/assets/assets/github-mark-white.svg b/webapp/assets/assets/github-mark-white.svg new file mode 100644 index 00000000..d5e64918 --- /dev/null +++ b/webapp/assets/assets/github-mark-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/webapp/assets/assets/github-mark.svg b/webapp/assets/assets/github-mark.svg new file mode 100644 index 00000000..37fa923d --- /dev/null +++ b/webapp/assets/assets/github-mark.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/webapp/assets/favicon-dark.png b/webapp/assets/favicon-dark.png new file mode 100644 index 0000000000000000000000000000000000000000..d16186d1d64f35f2676d6bb462e4824345f811de GIT binary patch literal 3506 zcmeHK`8O197k|dsjeTus8v7{BkRoKsjAaI8u1 zT>wP=a+@u+0bts(Ftfoi0($3m{*iwe`2S&m@#opag&3^>=p-w1YXBfHerArVUBekj zx7}ags<>F|{M4q_RqBs_-1MUVSUD&P-YO6jozUWz)WsUueD>8pbLyxt6DD?XZ-0CB zU9kl_abWQM_{>hJKkX2o#L4I{LK@2?Hk0g1PM^2%-*%d49a$(rT~VR^XC#Z!-1!Re za=LnvE{STnU`N-&yD?GX3hwY-3VM3Qq{7RtaU8cz4X))ZW|UlBIJwE<&o| z>5aqr#}E(tCi_F>rJL}%VS1hmpT2L5T%ev6P=0M2cTen5z>$kB=WdaCgUX&=JAGL$ z^Mp*!&9?M6F0DP!jU9)g*5Yqp?|A&;rK*!do_2S2T1|MC*f7+PK?CH9sSy^$vm7>N z7;GdH0*-Bkotsrt@ZOQM&kV9WOtAVko-=EA!!N!4Al!L6+G04)XyGtTpR?F1p8ZT3 zv%*8c5pWcJOjbc0g_l;-7IL=3L7ZVvUmILxf#56`PAE9F?85t&zm^rReYJa<>3g*B zW3%s^GKx z#FgWn=h(@jp;>mGJ}zqDhTP=jgq8-JcP{4HAd{%a=j^^r{{Yc;S2aNbRFL;z)@VqL${1HT7F)@pQde%&LzlE+*V^UzKt1er>xfeM?2xlmLytT{!lY7YsJ9Y~fbG zs=DjBwuek%U&UW49$oDW zzku88V!T}inYBxV@@dE#@{Hny(MR&p~#CcVRQb6)}pQOiCbp z#CcXLZ2biJ;Fm9(oCS15yf%SIZiCHVt(ve4-xz)r+>yD%L)5ggKO2fT(Pthv*By_S z{jDbfr?I{cO6J%m5PJQqN%tK5bNt_|rViO2szL(YlVwf2F+6Dzbxoz`+#AE7`Yq;p zkFu#9#%yT~h6@GR6Jw9o5yLC%%RRjd^6qU5unLuH5uQL!I~SjW3}=?PfK z@9EzVsq{V%{T+JgUG0}h@;k(3k>j=?TNBeJeW7Z%R+q_9txG^phX28eM21*H?SqE( zTsoWZ5fQ;|(9yHP3fVXGx{e?RIF~~?t6yg|o2E5~j3+HfU91unXR<)bOsK6{aP$;3 zW*`LXZ)`7O+!ly*lk$X%FaR|4(uLn`TVBjUY&URJ9Bn4vC30n{CzxH$M{CI67NI{liu>=8XT)p#g&hBf8y=M{7CI> zk)PFPx_{Yv%iP1Uqh0-s*W-K+i2}r@3AKBj9~P4>%EhUH%&0iOL5zY6(wBqR_2;a% zf&a3$dx&IHjn~IdJH8X2D?YvA*Si!OTcF(%rJ<`gbl#~~S z6MCpfgi#0Wc&Wv0p&8|JY}z_F5o%Jjq7t+tknS!--VF9T0q-+u@|VzrgCJa+dD#egmGr5VQS!po?+w95 zoepu*Lqut@p2(pYEfvr+1qY`WpREf)o7)Pu_X(^Jm>J9|0t0b^YdZzl0RAB<&ogBE zBG-9gzlzP{B610wB|2eCU(P}}pT~eYB7Q-9BG$`!!}^7_p6L=6fVcLwt(0!_TMzyc zJ6UB;k)0TRaf6&U@^iTAvF&K4aR**P-V0GMtN7kK zoccKTcxro-dW8C^_az$ce8WG~)n$}6S#p(vt%-UpAHM3xrois5&Y9bk zU|`Scf-^#g)EtDIl@NUt=uSc36tfU8fP% ze{LjGKk&Chif3k;rXJ!;MeF%Yvu%g2K0Z+e%YCtHW60)6WLmK)1oo?Vm?GhN>jOSr ze$mo3#8E+>C8K$;!c`7w*uP3M63nIvvM2Qh1eU1?$G=wtnLG6#_qzDj-%WM|89LMU#!~tb#9efGIKxv`UPhl^Q*@_i2lirT(>em+` zKR3Jd-W)Ie0Uk>PcO`r(pXua9ha{TL{{c-*w>TsV6)(|IOFK{Yu0FmUn8Y)TDmZ@eGDsE za1~NMD%Png(AMrhu=<5%qeCt^tHf**Es1~mHons2lnjo&gI|#BSR%5K?9NiIMlKz= zR*ahB9$9zRxh>?Sk%l_#`K{xLO$$?xh#Jzq2jtqoIxi2D&24cS zR-K-2l}x2eJSDft!JLeH!dOVsi@FDcYpsDzs2!Gi^#H!b7KZ~>#AXilbhu6tgspnY zumo~aGbW2lO-T($eY%ot*un;y?i%sB2 z$bdsb0j=3TBKmORDw_NUnI)V{yuq=Vbcg?{u%0@{UV!sye1@_*UWIwn4{bd-y){nK zs$!SlX+5!!u~xaWeUN%rfl#K}Upkdx_UcdDzUYyS`Q2g7v=PP>B1^)ZBe^=0+%#SM z-53g}VpLBkVKkIfRBcpLHPtj2atwpf#9+$tIK~F_UjQGXtCxG|{|1bS9vmV8ED@D`scdCe z3E4v=SxSbX-ub@g{o(!P{R`gjIrq7rbMJjV&pqdJo*(YLZDDT24TVDi0Nf_VSStoP z{e3u?8UBOeUw;=^JoE|r06b0R+IMDU#7GxoD*^yvasZHH0N7)s$jbl(VgUH+1c2s! z07U(=8!fc~VA`=Tv%xVg@W|`@WB(}d|Dyoo&(HN{K4t)&WMys*00hR*!g;l0C=Kbh z`|Dc;7h8>=+T^-Q?eR~WUi2R;2L-`fd4i(jTHKPlSOc5SzWQfQ9TjH6#4hgbZ?C>D zwBR824}2J#-YNE{9paNX8T~~_W0}Nml35P*c?bV(r-{~)WhSUADzyHLWHp*SUnZXT zwK3V+>f>Xxk)@a`f%6aB^Ib)z-aMPBPbf;_387Rgp$zZdA2m0%_x8AC>0FA7kg9ur z<8baV#G{^xzEFAT27Gpyp69~c_l@BT)UyK0Z*Aiqh&>57a$Ei!LV$%|{JFUzH$ zkjc8)lG^Li-1XAfaWHBv{`U2@r>|bCIyvNMcUGoUg=dHjF&i>ufLt*(!h(3#!^Vt& zon%75v9EA&vuR2|^Ar66;P5cP>f3luuiX#7^zNf@`{`(lp*W+3!!&(pp;bJ`nG_a< z$AZJ)DEgSJf;b89&Gs!lRjQtO0jd zj<=uVAd7}(*m?T6sDT@D6BFZF8gSm(m=^;~q8^_!d(!;_L|a|e1PRQ7yl1l)-PGg- zUwu235s+Aj(3nPR(P?*I z(gMF(aI;hQyg&i9I9IHp&q9l*qjxc*HlnbQaLavF#=Y~c?Xr+JAJDs`jh5UDZzR`F z@ScX%u<*6Tc~heDIcEdS?8I2|<1Jbt;Ie}5;w|?r6npXB_LlGx>%;RP|;_R}ESSm!SN4i@rz z>Ni9xwZ}t$hhBVN`!$mM9&uUZxGl)k#I#6XsMxL6WpY&O5YUt1f3zZzCRSJdsBS%* z&MtgJM6eUIb*->L_6@zRBgg^HrOeREw;7G5DUBgx2@6seD@4VaERZteYHJppU3v9s z2*KJL+lv^t1tQ&~^v*>X02+F!!tb{&FJ?DPJgD&Pc^QqYzbXH7pm_e}j4ikGzR^89 zlc;rqc&?Lqs;#YBrzacUq7D$Z{Jqu_${<>Yn!Iiqdh zzpU*ZBH2*o^=WR$cl>kN+#7zqOR=$e+D%a!x_X1>ow_wXTN{ZBlw~LBTG~CQ)(AYK zhl)fPwb70jTih0!R*w2r*`K$Gwxlxky@vSoa%JNwdj&NS*A1#%-6H#;VXP$+SAk_Q zZC54(WNSAwO`M$1(Ry*^3eR)<>xtNhR#ZIfday$v!I0^@(;6%JN7a{BpdA?g=m zncsboHN~3U@0xr~LqFSRrqrO%=fWTpRVJ}ZXm25zDXAw)CIt@OVR=`8Ty0M|+=xKQ z;g1_J4?5t&au*zMLHqxl>J24rWXs9R#p@U_>wS|faGd)7algIcsGupa1_=`KkvKZL z@tcCySFp2_rjr(-sQ_RjDLOw%(ko}Oa=t0g^N8eVqcS*VMhP5^9#J|sgB!p8DU%{w zvH3l0{!Xl4pUq&a41oA9gzQxV+SaSRKwWl~Bh#JU5z527)5Qt5M+6EI7p{J<3pQ$Z zh?5>9N{e+x4o+*SfUZe6IKB8{U5L4{C2xD5zy^Vt!JHy65GS~{Q-BTNAC&SuL$)t) zofq~i+dM8JcL!Re6Snl_ECl*82GkJo3*zIkUd|iVudMYRk70b|QQpbwc)!3uO6*f2ga=2yLS1Dg|2=^;AB5)sJ0)!(APk-Ehai zp3Mbkgbt}X2stYu`XtCFE05BnieKw98T1V8z3?X7Y)`4wSRmmQWyf|%iqBoA9@Tel zI9)&Rw?mR=dWxnV;!9cc`E;{whmIaTQ3cC=v1_Bq#zD8)iDS71`0JFG7B4 zcIksTUiu?EmI&@k_>{lU$%zh0GzC_B#>UKdC;*Dump`Q;Fagdxwr4g?OkE;sNc%33Z3An+++Q-g@#)escBbj@;|ny5Kx|88 zYOYlxl`ip|+$0BcGVTgvB}p&p9t^BC2R5K~SZmb-_!gTS4pb4FS=7_vIt38+%1Of_ z$VttZ3@SA#IUJRB_u2^O#BVyAE)5YJ7 z0YDX_dO``Kp`@Z}qoS&*ropgd7>p(cQ;NqiHlY6^_z+#a+(Z92VN~?sfI-mtyF);b zho%eB$H&c=1ds;*?&OLL_9A&AO? + + + + + + + + + + + + + + + + + + + + + + + +
                + +
                + + diff --git a/webapp/assets/openapitools.json b/webapp/assets/openapitools.json new file mode 100644 index 00000000..a82623d6 --- /dev/null +++ b/webapp/assets/openapitools.json @@ -0,0 +1,7 @@ +{ + "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", + "spaces": 2, + "generator-cli": { + "version": "7.14.0" + } +} diff --git a/webapp/openapitools.json b/webapp/openapitools.json new file mode 100644 index 00000000..a82623d6 --- /dev/null +++ b/webapp/openapitools.json @@ -0,0 +1,7 @@ +{ + "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", + "spaces": 2, + "generator-cli": { + "version": "7.14.0" + } +} diff --git a/webapp/package-lock.json b/webapp/package-lock.json new file mode 100644 index 00000000..c6e47eb5 --- /dev/null +++ b/webapp/package-lock.json @@ -0,0 +1,5603 @@ +{ + "name": "garm-webapp", + "version": "0.0.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "garm-webapp", + "version": "0.0.1", + "license": "ISC", + "dependencies": { + "@codemirror/lang-json": "^6.0.2", + "@codemirror/state": "^6.5.2", + "@codemirror/theme-one-dark": "^6.1.3", + "@codemirror/view": "^6.38.1", + "@tailwindcss/typography": "^0.5.10", + "codemirror": "^6.0.2" + }, + "devDependencies": { + "@openapitools/openapi-generator-cli": "^2.21.4", + "@sveltejs/adapter-static": "^3.0.1", + "@sveltejs/kit": "^2.0.0", + "@sveltejs/vite-plugin-svelte": "^6.1.0", + "@tailwindcss/forms": "^0.5.7", + "@tailwindcss/postcss": "^4.1.11", + "@types/node": "^24.2.0", + "autoprefixer": "^10.4.16", + "postcss": "^8.4.32", + "svelte": "^5.38.0", + "svelte-check": "^4.3.1", + "swagger-typescript-api": "^13.2.7", + "tailwindcss": "^4.1.11", + "typescript": "^5.0.0", + "vite": "^7.1.1" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@biomejs/js-api": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@biomejs/js-api/-/js-api-1.0.0.tgz", + "integrity": "sha512-69OfQ7+09AtiCIg+k+aU3rEsGit5o/SJWCS3BeBH/2nJYdJGi0cIx+ybka8i1EK69aNcZxYO1y1iAAEmYMq1HA==", + "dev": true, + "license": "MIT OR Apache-2.0", + "peerDependencies": { + "@biomejs/wasm-bundler": "^2.0.0", + "@biomejs/wasm-nodejs": "^2.0.0", + "@biomejs/wasm-web": "^2.0.0" + }, + "peerDependenciesMeta": { + "@biomejs/wasm-bundler": { + "optional": true + }, + "@biomejs/wasm-nodejs": { + "optional": true + }, + "@biomejs/wasm-web": { + "optional": true + } + } + }, + "node_modules/@biomejs/wasm-nodejs": { + "version": "2.0.5", + "dev": true, + "license": "MIT OR Apache-2.0" + }, + "node_modules/@codemirror/autocomplete": { + "version": "6.18.6", + "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.6.tgz", + "integrity": "sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@codemirror/autocomplete/node_modules/@lezer/common": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", + "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", + "license": "MIT" + }, + "node_modules/@codemirror/commands": { + "version": "6.8.1", + "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.8.1.tgz", + "integrity": "sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.4.0", + "@codemirror/view": "^6.27.0", + "@lezer/common": "^1.1.0" + } + }, + "node_modules/@codemirror/commands/node_modules/@lezer/common": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", + "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", + "license": "MIT" + }, + "node_modules/@codemirror/lang-json": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.2.tgz", + "integrity": "sha512-x2OtO+AvwEHrEwR0FyyPtfDUiloG3rnVTSZV1W8UteaLL8/MajQd8DpvUb2YVzC+/T18aSDv0H9mu+xw0EStoQ==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/json": "^1.0.0" + } + }, + "node_modules/@codemirror/language": { + "version": "6.11.2", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.11.2.tgz", + "integrity": "sha512-p44TsNArL4IVXDTbapUmEkAlvWs2CFQbcfc0ymDsis1kH2wh0gcY96AS29c/vp2d0y2Tquk1EDSaawpzilUiAw==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.23.0", + "@lezer/common": "^1.1.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/language/node_modules/@lezer/common": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", + "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", + "license": "MIT" + }, + "node_modules/@codemirror/language/node_modules/@lezer/lr": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", + "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@codemirror/language/node_modules/style-mod": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", + "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", + "license": "MIT" + }, + "node_modules/@codemirror/lint": { + "version": "6.8.5", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.5.tgz", + "integrity": "sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.35.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/lint/node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", + "license": "MIT" + }, + "node_modules/@codemirror/search": { + "version": "6.5.11", + "resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.11.tgz", + "integrity": "sha512-KmWepDE6jUdL6n8cAAqIpRmLPBZ5ZKnicE8oGU/s3QrAVID+0VhLFrzUucVKHG5035/BSykhExDL/Xm7dHthiA==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/search/node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", + "license": "MIT" + }, + "node_modules/@codemirror/state": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.5.2.tgz", + "integrity": "sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA==", + "license": "MIT", + "dependencies": { + "@marijn/find-cluster-break": "^1.0.0" + } + }, + "node_modules/@codemirror/theme-one-dark": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/@codemirror/theme-one-dark/-/theme-one-dark-6.1.3.tgz", + "integrity": "sha512-NzBdIvEJmx6fjeremiGp3t/okrLPYT0d9orIc7AFun8oZcRk58aejkqhv6spnz4MLAevrKNPMQYXEWMg4s+sKA==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/highlight": "^1.0.0" + } + }, + "node_modules/@codemirror/view": { + "version": "6.38.1", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.38.1.tgz", + "integrity": "sha512-RmTOkE7hRU3OVREqFVITWHz6ocgBjv08GoePscAakgVQfciA3SGCEk7mb9IzwW61cKKmlTpHXG6DUE5Ubx+MGQ==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.5.0", + "crelt": "^1.0.6", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, + "node_modules/@codemirror/view/node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", + "license": "MIT" + }, + "node_modules/@codemirror/view/node_modules/style-mod": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", + "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", + "license": "MIT" + }, + "node_modules/@codemirror/view/node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "license": "MIT" + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.8.tgz", + "integrity": "sha512-urAvrUedIqEiFR3FYSLTWQgLu5tb+m0qZw0NBEasUeo6wuqatkMDaRT+1uABiGXEu5vqgPd7FGE1BhsAIy9QVA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.8.tgz", + "integrity": "sha512-RONsAvGCz5oWyePVnLdZY/HHwA++nxYWIX1atInlaW6SEkwq6XkP3+cb825EUcRs5Vss/lGh/2YxAb5xqc07Uw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.8.tgz", + "integrity": "sha512-OD3p7LYzWpLhZEyATcTSJ67qB5D+20vbtr6vHlHWSQYhKtzUYrETuWThmzFpZtFsBIxRvhO07+UgVA9m0i/O1w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.8.tgz", + "integrity": "sha512-yJAVPklM5+4+9dTeKwHOaA+LQkmrKFX96BM0A/2zQrbS6ENCmxc4OVoBs5dPkCCak2roAD+jKCdnmOqKszPkjA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.8.tgz", + "integrity": "sha512-Jw0mxgIaYX6R8ODrdkLLPwBqHTtYHJSmzzd+QeytSugzQ0Vg4c5rDky5VgkoowbZQahCbsv1rT1KW72MPIkevw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.8.tgz", + "integrity": "sha512-Vh2gLxxHnuoQ+GjPNvDSDRpoBCUzY4Pu0kBqMBDlK4fuWbKgGtmDIeEC081xi26PPjn+1tct+Bh8FjyLlw1Zlg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.8.tgz", + "integrity": "sha512-YPJ7hDQ9DnNe5vxOm6jaie9QsTwcKedPvizTVlqWG9GBSq+BuyWEDazlGaDTC5NGU4QJd666V0yqCBL2oWKPfA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.8.tgz", + "integrity": "sha512-MmaEXxQRdXNFsRN/KcIimLnSJrk2r5H8v+WVafRWz5xdSVmWLoITZQXcgehI2ZE6gioE6HirAEToM/RvFBeuhw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.8.tgz", + "integrity": "sha512-FuzEP9BixzZohl1kLf76KEVOsxtIBFwCaLupVuk4eFVnOZfU+Wsn+x5Ryam7nILV2pkq2TqQM9EZPsOBuMC+kg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.8.tgz", + "integrity": "sha512-WIgg00ARWv/uYLU7lsuDK00d/hHSfES5BzdWAdAig1ioV5kaFNrtK8EqGcUBJhYqotlUByUKz5Qo6u8tt7iD/w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.8.tgz", + "integrity": "sha512-A1D9YzRX1i+1AJZuFFUMP1E9fMaYY+GnSQil9Tlw05utlE86EKTUA7RjwHDkEitmLYiFsRd9HwKBPEftNdBfjg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.8.tgz", + "integrity": "sha512-O7k1J/dwHkY1RMVvglFHl1HzutGEFFZ3kNiDMSOyUrB7WcoHGf96Sh+64nTRT26l3GMbCW01Ekh/ThKM5iI7hQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.8.tgz", + "integrity": "sha512-uv+dqfRazte3BzfMp8PAQXmdGHQt2oC/y2ovwpTteqrMx2lwaksiFZ/bdkXJC19ttTvNXBuWH53zy/aTj1FgGw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.8.tgz", + "integrity": "sha512-GyG0KcMi1GBavP5JgAkkstMGyMholMDybAf8wF5A70CALlDM2p/f7YFE7H92eDeH/VBtFJA5MT4nRPDGg4JuzQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.8.tgz", + "integrity": "sha512-rAqDYFv3yzMrq7GIcen3XP7TUEG/4LK86LUPMIz6RT8A6pRIDn0sDcvjudVZBiiTcZCY9y2SgYX2lgK3AF+1eg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.8.tgz", + "integrity": "sha512-Xutvh6VjlbcHpsIIbwY8GVRbwoviWT19tFhgdA7DlenLGC/mbc3lBoVb7jxj9Z+eyGqvcnSyIltYUrkKzWqSvg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.8.tgz", + "integrity": "sha512-ASFQhgY4ElXh3nDcOMTkQero4b1lgubskNlhIfJrsH5OKZXDpUAKBlNS0Kx81jwOBp+HCeZqmoJuihTv57/jvQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.8.tgz", + "integrity": "sha512-d1KfruIeohqAi6SA+gENMuObDbEjn22olAR7egqnkCD9DGBG0wsEARotkLgXDu6c4ncgWTZJtN5vcgxzWRMzcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.8.tgz", + "integrity": "sha512-nVDCkrvx2ua+XQNyfrujIG38+YGyuy2Ru9kKVNyh5jAys6n+l44tTtToqHjino2My8VAY6Lw9H7RI73XFi66Cg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.8.tgz", + "integrity": "sha512-j8HgrDuSJFAujkivSMSfPQSAa5Fxbvk4rgNAS5i3K+r8s1X0p1uOO2Hl2xNsGFppOeHOLAVgYwDVlmxhq5h+SQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.8.tgz", + "integrity": "sha512-1h8MUAwa0VhNCDp6Af0HToI2TJFAn1uqT9Al6DJVzdIBAd21m/G0Yfc77KDM3uF3T/YaOgQq3qTJHPbTOInaIQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.8.tgz", + "integrity": "sha512-r2nVa5SIK9tSWd0kJd9HCffnDHKchTGikb//9c7HX+r+wHYCpQrSgxhlY6KWV1nFo1l4KFbsMlHk+L6fekLsUg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.8.tgz", + "integrity": "sha512-zUlaP2S12YhQ2UzUfcCuMDHQFJyKABkAjvO5YSndMiIkMimPmxA+BYSBikWgsRpvyxuRnow4nS5NPnf9fpv41w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.8.tgz", + "integrity": "sha512-YEGFFWESlPva8hGL+zvj2z/SaK+pH0SwOM0Nc/d+rVnW7GSTFlLBGzZkuSU9kFIGIo8q9X3ucpZhu8PDN5A2sQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.8.tgz", + "integrity": "sha512-hiGgGC6KZ5LZz58OL/+qVVoZiuZlUYlYHNAmczOm7bs2oE1XriPFi5ZHHrS8ACpV5EjySrnoCKmcbQMN+ojnHg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.8.tgz", + "integrity": "sha512-cn3Yr7+OaaZq1c+2pe+8yxC8E144SReCQjN6/2ynubzYjvyqZjTXfQJpAcQpsdJq3My7XADANiYGHoFC69pLQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", + "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.12", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", + "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.29", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", + "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@lezer/highlight": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", + "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/highlight/node_modules/@lezer/common": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", + "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", + "license": "MIT" + }, + "node_modules/@lezer/json": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@lezer/json/-/json-1.0.3.tgz", + "integrity": "sha512-BP9KzdF9Y35PDpv04r0VeSTKDeox5vVr3efE7eBbx3r4s3oNLfunchejZhjArmeieBH+nVOpgIiBJpEAv8ilqQ==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/json/node_modules/@lezer/common": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", + "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", + "license": "MIT" + }, + "node_modules/@lezer/json/node_modules/@lezer/lr": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", + "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@marijn/find-cluster-break": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", + "integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==", + "license": "MIT" + }, + "node_modules/@nestjs/axios": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@nestjs/axios/-/axios-4.0.1.tgz", + "integrity": "sha512-68pFJgu+/AZbWkGu65Z3r55bTsCPlgyKaV4BSG8yUAD72q1PPuyVRgUwFv6BxdnibTUHlyxm06FmYWNC+bjN7A==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@nestjs/common": "^10.0.0 || ^11.0.0", + "axios": "^1.3.1", + "rxjs": "^7.0.0" + } + }, + "node_modules/@nestjs/common": { + "version": "11.1.5", + "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-11.1.5.tgz", + "integrity": "sha512-DQpWdr3ShO0BHWkHl3I4W/jR6R3pDtxyBlmrpTuZF+PXxQyBXNvsUne0Wyo6QHPEDi+pAz9XchBFoKbqOhcdTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "file-type": "21.0.0", + "iterare": "1.2.1", + "load-esm": "1.0.2", + "tslib": "2.8.1", + "uid": "2.0.2" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nest" + }, + "peerDependencies": { + "class-transformer": ">=0.4.1", + "class-validator": ">=0.13.2", + "reflect-metadata": "^0.1.12 || ^0.2.0", + "rxjs": "^7.1.0" + }, + "peerDependenciesMeta": { + "class-transformer": { + "optional": true + }, + "class-validator": { + "optional": true + } + } + }, + "node_modules/@nestjs/common/node_modules/@lukeed/csprng": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", + "integrity": "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@nestjs/common/node_modules/@tokenizer/inflate": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz", + "integrity": "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "fflate": "^0.8.2", + "token-types": "^6.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/@nestjs/common/node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@nestjs/common/node_modules/file-type": { + "version": "21.0.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-21.0.0.tgz", + "integrity": "sha512-ek5xNX2YBYlXhiUXui3D/BXa3LdqPmoLJ7rqEx2bKJ7EAUEfmXgW0Das7Dc6Nr9MvqaOnIqiPV0mZk/r/UpNAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tokenizer/inflate": "^0.2.7", + "strtok3": "^10.2.2", + "token-types": "^6.0.0", + "uint8array-extras": "^1.4.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sindresorhus/file-type?sponsor=1" + } + }, + "node_modules/@nestjs/common/node_modules/strtok3": { + "version": "10.3.4", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-10.3.4.tgz", + "integrity": "sha512-KIy5nylvC5le1OdaaoCJ07L+8iQzJHGH6pWDuzS+d07Cu7n1MZ2x26P8ZKIWfbK02+XIL8Mp4RkWeqdUCrDMfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tokenizer/token": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/@nestjs/common/node_modules/uid": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/uid/-/uid-2.0.2.tgz", + "integrity": "sha512-u3xV3X7uzvi5b1MncmZo3i2Aw222Zk1keqLA1YkHldREkAhAqi65wuPfe7lHx8H/Wzy+8CE7S7uS3jekIM5s8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@lukeed/csprng": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@nestjs/core": { + "version": "11.1.5", + "resolved": "https://registry.npmjs.org/@nestjs/core/-/core-11.1.5.tgz", + "integrity": "sha512-Qr25MEY9t8VsMETy7eXQ0cNXqu0lzuFrrTr+f+1G57ABCtV5Pogm7n9bF71OU2bnkDD32Bi4hQLeFR90cku3Tw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@nuxt/opencollective": "0.4.1", + "fast-safe-stringify": "2.1.1", + "iterare": "1.2.1", + "path-to-regexp": "8.2.0", + "tslib": "2.8.1", + "uid": "2.0.2" + }, + "engines": { + "node": ">= 20" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nest" + }, + "peerDependencies": { + "@nestjs/common": "^11.0.0", + "@nestjs/microservices": "^11.0.0", + "@nestjs/platform-express": "^11.0.0", + "@nestjs/websockets": "^11.0.0", + "reflect-metadata": "^0.1.12 || ^0.2.0", + "rxjs": "^7.1.0" + }, + "peerDependenciesMeta": { + "@nestjs/microservices": { + "optional": true + }, + "@nestjs/platform-express": { + "optional": true + }, + "@nestjs/websockets": { + "optional": true + } + } + }, + "node_modules/@nestjs/core/node_modules/@lukeed/csprng": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", + "integrity": "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@nestjs/core/node_modules/@nuxt/opencollective": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@nuxt/opencollective/-/opencollective-0.4.1.tgz", + "integrity": "sha512-GXD3wy50qYbxCJ652bDrDzgMr3NFEkIS374+IgFQKkCvk9yiYcLvX2XDYr7UyQxf4wK0e+yqDYRubZ0DtOxnmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "consola": "^3.2.3" + }, + "bin": { + "opencollective": "bin/opencollective.js" + }, + "engines": { + "node": "^14.18.0 || >=16.10.0", + "npm": ">=5.10.0" + } + }, + "node_modules/@nestjs/core/node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/@nestjs/core/node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@nestjs/core/node_modules/uid": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/uid/-/uid-2.0.2.tgz", + "integrity": "sha512-u3xV3X7uzvi5b1MncmZo3i2Aw222Zk1keqLA1YkHldREkAhAqi65wuPfe7lHx8H/Wzy+8CE7S7uS3jekIM5s8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@lukeed/csprng": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@nuxtjs/opencollective": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@nuxtjs/opencollective/-/opencollective-0.3.2.tgz", + "integrity": "sha512-um0xL3fO7Mf4fDxcqx9KryrB7zgRM5JSlvGN5AGkP6JLM5XEKyjeAiPbNxdXVXQ16isuAhYpvP88NgL2BGd6aA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "consola": "^2.15.0", + "node-fetch": "^2.6.1" + }, + "bin": { + "opencollective": "bin/opencollective.js" + }, + "engines": { + "node": ">=8.0.0", + "npm": ">=5.0.0" + } + }, + "node_modules/@nuxtjs/opencollective/node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/@openapitools/openapi-generator-cli": { + "version": "2.21.4", + "resolved": "https://registry.npmjs.org/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.21.4.tgz", + "integrity": "sha512-s2OBgiNml0DL0ebkvAMQxZi7c8SUQMHssTUJwWsFDv4kVtBVDV4UzsCh9gQEXlNjuEcEgZoa5BIOai2sT0sE8g==", + "dev": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@nestjs/axios": "4.0.1", + "@nestjs/common": "11.1.5", + "@nestjs/core": "11.1.5", + "@nuxtjs/opencollective": "0.3.2", + "axios": "1.11.0", + "chalk": "4.1.2", + "commander": "8.3.0", + "compare-versions": "4.1.4", + "concurrently": "9.2.0", + "console.table": "0.10.0", + "fs-extra": "11.3.0", + "glob": "11.0.3", + "inquirer": "8.2.6", + "lodash": "4.17.21", + "proxy-agent": "6.5.0", + "reflect-metadata": "0.2.2", + "rxjs": "7.8.2", + "tslib": "2.8.1" + }, + "bin": { + "openapi-generator-cli": "main.js" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/openapi_generator" + } + }, + "node_modules/@polka/url": { + "version": "1.0.0-next.29", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.46.2", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@sveltejs/acorn-typescript": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.5.tgz", + "integrity": "sha512-IwQk4yfwLdibDlrXVE04jTZYlLnwsTT2PIOQQGNLWfjavGifnk1JD1LcZjZaBTRcxZu2FfPfNLOE04DSu9lqtQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^8.9.0" + } + }, + "node_modules/@sveltejs/adapter-static": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.9.tgz", + "integrity": "sha512-aytHXcMi7lb9ljsWUzXYQ0p5X1z9oWud2olu/EpmH7aCu4m84h7QLvb5Wp+CFirKcwoNnYvYWhyP/L8Vh1ztdw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@sveltejs/kit": "^2.0.0" + } + }, + "node_modules/@sveltejs/kit": { + "version": "2.27.3", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.27.3.tgz", + "integrity": "sha512-jiG3NGZ8RRpi+ncjVnX+oR7uWEgzy//3YLGcTU5mHtjGraeGyNDr7GJFHlk7z0vi8bMXpXIUkEXj6p70FJmHvw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/cookie": "^0.6.0", + "acorn": "^8.14.1", + "cookie": "^0.6.0", + "devalue": "^5.1.0", + "esm-env": "^1.2.2", + "kleur": "^4.1.5", + "magic-string": "^0.30.5", + "mrmime": "^2.0.0", + "sade": "^1.8.1", + "set-cookie-parser": "^2.6.0", + "sirv": "^3.0.0" + }, + "bin": { + "svelte-kit": "svelte-kit.js" + }, + "engines": { + "node": ">=18.13" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", + "svelte": "^4.0.0 || ^5.0.0-next.0", + "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" + } + }, + "node_modules/@sveltejs/kit/node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sveltejs/kit/node_modules/devalue": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.1.1.tgz", + "integrity": "sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-6.1.0.tgz", + "integrity": "sha512-+U6lz1wvGEG/BvQyL4z/flyNdQ9xDNv5vrh+vWBWTHaebqT0c9RNggpZTo/XSPoHsSCWBlYaTlRX8pZ9GATXCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0-next.1", + "debug": "^4.4.1", + "deepmerge": "^4.3.1", + "kleur": "^4.1.5", + "magic-string": "^0.30.17", + "vitefu": "^1.1.1" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.0.tgz", + "integrity": "sha512-iwQ8Z4ET6ZFSt/gC+tVfcsSBHwsqc6RumSaiLUkAurW3BCpJam65cmHw0oOlDMTO0u+PZi9hilBRYN+LZNHTUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.4.1" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^6.0.0-next.0", + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@tailwindcss/forms": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.10.tgz", + "integrity": "sha512-utI1ONF6uf/pPNO68kmN1b8rEwNXv3czukalo8VtJH8ksIkZXr3Q3VYudZLkCsDd4Wku120uF02hYK25XGPorw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mini-svg-data-uri": "^1.2.3" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1 || >= 4.0.0-alpha.20 || >= 4.0.0-beta.1" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.11.tgz", + "integrity": "sha512-yzhzuGRmv5QyU9qLNg4GTlYI6STedBWRE7NjxP45CsFYYq9taI0zJXZBMqIC/c8fViNLhmrbpSFS57EoxUmD6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "enhanced-resolve": "^5.18.1", + "jiti": "^2.4.2", + "lightningcss": "1.30.1", + "magic-string": "^0.30.17", + "source-map-js": "^1.2.1", + "tailwindcss": "4.1.11" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.11.tgz", + "integrity": "sha512-Q69XzrtAhuyfHo+5/HMgr1lAiPP/G40OMFAnws7xcFEYqcypZmdW8eGXaOUIeOl1dzPJBPENXgbjsOyhg2nkrg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "detect-libc": "^2.0.4", + "tar": "^7.4.3" + }, + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.1.11", + "@tailwindcss/oxide-darwin-arm64": "4.1.11", + "@tailwindcss/oxide-darwin-x64": "4.1.11", + "@tailwindcss/oxide-freebsd-x64": "4.1.11", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.11", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.11", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.11", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.11", + "@tailwindcss/oxide-linux-x64-musl": "4.1.11", + "@tailwindcss/oxide-wasm32-wasi": "4.1.11", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.11", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.11" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.11.tgz", + "integrity": "sha512-3IfFuATVRUMZZprEIx9OGDjG3Ou3jG4xQzNTvjDoKmU9JdmoCohQJ83MYd0GPnQIu89YoJqvMM0G3uqLRFtetg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.11.tgz", + "integrity": "sha512-ESgStEOEsyg8J5YcMb1xl8WFOXfeBmrhAwGsFxxB2CxY9evy63+AtpbDLAyRkJnxLy2WsD1qF13E97uQyP1lfQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.11.tgz", + "integrity": "sha512-EgnK8kRchgmgzG6jE10UQNaH9Mwi2n+yw1jWmof9Vyg2lpKNX2ioe7CJdf9M5f8V9uaQxInenZkOxnTVL3fhAw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.11.tgz", + "integrity": "sha512-xdqKtbpHs7pQhIKmqVpxStnY1skuNh4CtbcyOHeX1YBE0hArj2romsFGb6yUmzkq/6M24nkxDqU8GYrKrz+UcA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.11.tgz", + "integrity": "sha512-ryHQK2eyDYYMwB5wZL46uoxz2zzDZsFBwfjssgB7pzytAeCCa6glsiJGjhTEddq/4OsIjsLNMAiMlHNYnkEEeg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.11.tgz", + "integrity": "sha512-mYwqheq4BXF83j/w75ewkPJmPZIqqP1nhoghS9D57CLjsh3Nfq0m4ftTotRYtGnZd3eCztgbSPJ9QhfC91gDZQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.11.tgz", + "integrity": "sha512-m/NVRFNGlEHJrNVk3O6I9ggVuNjXHIPoD6bqay/pubtYC9QIdAMpS+cswZQPBLvVvEF6GtSNONbDkZrjWZXYNQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.11.tgz", + "integrity": "sha512-YW6sblI7xukSD2TdbbaeQVDysIm/UPJtObHJHKxDEcW2exAtY47j52f8jZXkqE1krdnkhCMGqP3dbniu1Te2Fg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.11.tgz", + "integrity": "sha512-e3C/RRhGunWYNC3aSF7exsQkdXzQ/M+aYuZHKnw4U7KQwTJotnWsGOIVih0s2qQzmEzOFIJ3+xt7iq67K/p56Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.11.tgz", + "integrity": "sha512-Xo1+/GU0JEN/C/dvcammKHzeM6NqKovG+6921MR6oadee5XPBaKOumrJCXvopJ/Qb5TH7LX/UAywbqrP4lax0g==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@emnapi/wasi-threads": "^1.0.2", + "@napi-rs/wasm-runtime": "^0.2.11", + "@tybys/wasm-util": "^0.9.0", + "tslib": "^2.8.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.11.tgz", + "integrity": "sha512-UgKYx5PwEKrac3GPNPf6HVMNhUIGuUh4wlDFR2jYYdkX6pL/rn73zTq/4pzUm8fOjAn5L8zDeHp9iXmUGOXZ+w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.11.tgz", + "integrity": "sha512-YfHoggn1j0LK7wR82TOucWc5LDCguHnoS879idHekmmiR7g9HUtMw9MI0NHatS28u/Xlkfi9w5RJWgz2Dl+5Qg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/postcss": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.1.11.tgz", + "integrity": "sha512-q/EAIIpF6WpLhKEuQSEVMZNMIY8KhWoAemZ9eylNAih9jxMGAYPPWBn3I9QL/2jZ+e7OEz/tZkX5HwbBR4HohA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "@tailwindcss/node": "4.1.11", + "@tailwindcss/oxide": "4.1.11", + "postcss": "^8.4.41", + "tailwindcss": "4.1.11" + } + }, + "node_modules/@tailwindcss/typography": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.16.tgz", + "integrity": "sha512-0wDLwCVF5V3x3b1SGXPCDcdsbDHMBe+lkFzBRaHeLvNi+nrrnZ1lA18u+OTWO8iSWU2GxUOCvlXtDuqftc1oiA==", + "license": "MIT", + "dependencies": { + "lodash.castarray": "^4.4.0", + "lodash.isplainobject": "^4.0.6", + "lodash.merge": "^4.6.2", + "postcss-selector-parser": "6.0.10" + }, + "peerDependencies": { + "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" + } + }, + "node_modules/@tokenizer/token": { + "version": "0.3.0", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/cookie": { + "version": "0.6.0", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.2.0.tgz", + "integrity": "sha512-3xyG3pMCq3oYCNg7/ZP+E1ooTaGB4cG8JWRsqqOYQdbWNY4zbaV0Ennrd7stjiJEFZCaybcIgpTjJWHRfBSIDw==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.10.0" + } + }, + "node_modules/@types/swagger-schema-official": { + "version": "2.0.25", + "dev": true, + "license": "MIT" + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/autoprefixer": { + "version": "10.4.21", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz", + "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.24.4", + "caniuse-lite": "^1.0.30001702", + "fraction.js": "^4.3.7", + "normalize-range": "^0.1.2", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/axios": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", + "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", + "dev": true, + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axios/node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/axios/node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/axios/node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axios/node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axios/node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axios/node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axios/node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axios/node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axios/node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true, + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/browserslist": { + "version": "4.25.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", + "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001726", + "electron-to-chromium": "^1.5.173", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/browserslist/node_modules/electron-to-chromium": { + "version": "1.5.199", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.199.tgz", + "integrity": "sha512-3gl0S7zQd88kCAZRO/DnxtBKuhMO4h0EaQIN3YgZfV6+pW+5+bf2AdQeHNESCoaQqo/gjGVYEf2YM4O5HJQqpQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/browserslist/node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/browserslist/node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" + }, + "node_modules/browserslist/node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/c12": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "chokidar": "^4.0.3", + "confbox": "^0.2.2", + "defu": "^6.1.4", + "dotenv": "^17.2.1", + "exsolve": "^1.0.7", + "giget": "^2.0.0", + "jiti": "^2.5.1", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "perfect-debounce": "^1.0.0", + "pkg-types": "^2.2.0", + "rc9": "^2.1.2" + }, + "peerDependencies": { + "magicast": "^0.3.5" + }, + "peerDependenciesMeta": { + "magicast": { + "optional": true + } + } + }, + "node_modules/c12/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/c12/node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "dev": true, + "license": "MIT" + }, + "node_modules/c12/node_modules/dotenv": { + "version": "17.2.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.1.tgz", + "integrity": "sha512-kQhDYKZecqnM0fCnzI5eIv5L4cAe/iRI+HqMbO/hbRdTAeXDG+M9FjipUxNfbARuEg4iHIbhnhs78BCHNbSxEQ==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/c12/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/c12/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/call-me-maybe": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001731", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001731.tgz", + "integrity": "sha512-lDdp2/wrOmTRWuoB5DpfNkC0rJDU8DqRa6nYL6HK6sytw70QMopt/NIc/9SM7ylItlBWfACXk0tEn37UWM/+mg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chardet": { + "version": "0.7.0", + "dev": true, + "license": "MIT" + }, + "node_modules/chownr": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/citty": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", + "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "consola": "^3.2.3" + } + }, + "node_modules/citty/node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/clone": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/codemirror": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.2.tgz", + "integrity": "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/commands": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/search": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/compare-versions": { + "version": "4.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/concurrently": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-9.2.0.tgz", + "integrity": "sha512-IsB/fiXTupmagMW4MNp2lx2cdSN2FfZq78vF90LBB+zZHArbIQZjQtzXCiXnvTxCZSvXanTqFLWBjw2UkLx1SQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.2", + "lodash": "^4.17.21", + "rxjs": "^7.8.1", + "shell-quote": "^1.8.1", + "supports-color": "^8.1.1", + "tree-kill": "^1.2.2", + "yargs": "^17.7.2" + }, + "bin": { + "conc": "dist/bin/concurrently.js", + "concurrently": "dist/bin/concurrently.js" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/open-cli-tools/concurrently?sponsor=1" + } + }, + "node_modules/concurrently/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/concurrently/node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/concurrently/node_modules/shell-quote": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", + "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/concurrently/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/concurrently/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/concurrently/node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/concurrently/node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/concurrently/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/confbox": { + "version": "0.2.2", + "dev": true, + "license": "MIT" + }, + "node_modules/consola": { + "version": "2.15.3", + "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", + "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/console.table": { + "version": "0.10.0", + "dev": true, + "license": "MIT", + "dependencies": { + "easy-table": "1.1.0" + }, + "engines": { + "node": "> 0.10" + } + }, + "node_modules/cookie": { + "version": "0.6.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/defaults": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/detect-libc": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", + "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/easy-table": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "optionalDependencies": { + "wcwidth": ">=1.0.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/enhanced-resolve": { + "version": "5.18.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", + "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/es6-promise": { + "version": "3.3.1", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.8.tgz", + "integrity": "sha512-vVC0USHGtMi8+R4Kz8rt6JhEWLxsv9Rnu/lGYbPR8u47B+DCBksq9JarW0zOO7bs37hyOK1l2/oqtbciutL5+Q==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.8", + "@esbuild/android-arm": "0.25.8", + "@esbuild/android-arm64": "0.25.8", + "@esbuild/android-x64": "0.25.8", + "@esbuild/darwin-arm64": "0.25.8", + "@esbuild/darwin-x64": "0.25.8", + "@esbuild/freebsd-arm64": "0.25.8", + "@esbuild/freebsd-x64": "0.25.8", + "@esbuild/linux-arm": "0.25.8", + "@esbuild/linux-arm64": "0.25.8", + "@esbuild/linux-ia32": "0.25.8", + "@esbuild/linux-loong64": "0.25.8", + "@esbuild/linux-mips64el": "0.25.8", + "@esbuild/linux-ppc64": "0.25.8", + "@esbuild/linux-riscv64": "0.25.8", + "@esbuild/linux-s390x": "0.25.8", + "@esbuild/linux-x64": "0.25.8", + "@esbuild/netbsd-arm64": "0.25.8", + "@esbuild/netbsd-x64": "0.25.8", + "@esbuild/openbsd-arm64": "0.25.8", + "@esbuild/openbsd-x64": "0.25.8", + "@esbuild/openharmony-arm64": "0.25.8", + "@esbuild/sunos-x64": "0.25.8", + "@esbuild/win32-arm64": "0.25.8", + "@esbuild/win32-ia32": "0.25.8", + "@esbuild/win32-x64": "0.25.8" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/esm-env": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esprima": { + "version": "4.0.1", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esrap": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.1.0.tgz", + "integrity": "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eta": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/eta/-/eta-2.2.0.tgz", + "integrity": "sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + }, + "funding": { + "url": "https://github.com/eta-dev/eta?sponsor=1" + } + }, + "node_modules/exsolve": { + "version": "1.0.7", + "dev": true, + "license": "MIT" + }, + "node_modules/external-editor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/figures": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fs-extra": { + "version": "11.3.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", + "integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/giget": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "defu": "^6.1.4", + "node-fetch-native": "^1.6.6", + "nypm": "^0.6.0", + "pathe": "^2.0.3" + }, + "bin": { + "giget": "dist/cli.mjs" + } + }, + "node_modules/giget/node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/giget/node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "dev": true, + "license": "MIT" + }, + "node_modules/giget/node_modules/node-fetch-native": { + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz", + "integrity": "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/giget/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/glob": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz", + "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.3.1", + "jackspeak": "^4.1.1", + "minimatch": "^10.0.3", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^2.0.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/glob/node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", + "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/glob/node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/glob/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/glob/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/glob/node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/glob/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/glob/node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/jackspeak": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz", + "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/lru-cache": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.1.0.tgz", + "integrity": "sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==", + "dev": true, + "license": "ISC", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz", + "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==", + "dev": true, + "license": "ISC", + "dependencies": { + "@isaacs/brace-expansion": "^5.0.0" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/glob/node_modules/path-scurry": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz", + "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/glob/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hasown/node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/inquirer": { + "version": "8.2.6", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.6.tgz", + "integrity": "sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.1", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.21", + "mute-stream": "0.0.8", + "ora": "^5.4.1", + "run-async": "^2.4.0", + "rxjs": "^7.5.5", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6", + "wrap-ansi": "^6.0.1" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/inquirer/node_modules/cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 10" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-interactive": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-reference": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", + "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.6" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/iterare": { + "version": "1.2.1", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=6" + } + }, + "node_modules/jiti": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.5.1.tgz", + "integrity": "sha512-twQoecYPiVA5K/h6SxtORw/Bs3ar+mLUtoPSc7iMXzQzK8d7eJ/R09wmTwAjiamETn1cXYPGfNnu7DMoHgu12w==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/js-yaml/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/jsbn": { + "version": "1.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/lightningcss": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz", + "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-darwin-arm64": "1.30.1", + "lightningcss-darwin-x64": "1.30.1", + "lightningcss-freebsd-x64": "1.30.1", + "lightningcss-linux-arm-gnueabihf": "1.30.1", + "lightningcss-linux-arm64-gnu": "1.30.1", + "lightningcss-linux-arm64-musl": "1.30.1", + "lightningcss-linux-x64-gnu": "1.30.1", + "lightningcss-linux-x64-musl": "1.30.1", + "lightningcss-win32-arm64-msvc": "1.30.1", + "lightningcss-win32-x64-msvc": "1.30.1" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz", + "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz", + "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz", + "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz", + "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz", + "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz", + "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz", + "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz", + "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz", + "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz", + "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/load-esm": { + "version": "1.0.2", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + }, + { + "type": "buymeacoffee", + "url": "https://buymeacoffee.com/borewit" + } + ], + "license": "MIT", + "engines": { + "node": ">=13.2.0" + } + }, + "node_modules/locate-character": { + "version": "3.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.castarray": { + "version": "4.4.0", + "license": "MIT" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "license": "MIT" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/magic-string": { + "version": "0.30.17", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types/node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/mini-svg-data-uri": { + "version": "1.4.4", + "dev": true, + "license": "MIT", + "bin": { + "mini-svg-data-uri": "cli.js" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minizlib": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", + "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.1.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/mkdirp": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "dev": true, + "license": "MIT", + "bin": { + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/mrmime": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "dev": true, + "license": "ISC" + }, + "node_modules/node-fetch-h2": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "http2-client": "^1.2.5" + }, + "engines": { + "node": "4.x || >=6.0.0" + } + }, + "node_modules/node-fetch-h2/node_modules/http2-client": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", + "integrity": "sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-readfiles": { + "version": "0.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "es6-promise": "^3.2.1" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/nypm": { + "version": "0.6.1", + "dev": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.2", + "pathe": "^2.0.3", + "pkg-types": "^2.2.0", + "tinyexec": "^1.0.1" + }, + "bin": { + "nypm": "dist/cli.mjs" + }, + "engines": { + "node": "^14.16.0 || >=16.10.0" + } + }, + "node_modules/nypm/node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/nypm/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/nypm/node_modules/tinyexec": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz", + "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==", + "dev": true, + "license": "MIT" + }, + "node_modules/oas-kit-common": { + "version": "1.0.8", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "fast-safe-stringify": "^2.0.7" + } + }, + "node_modules/oas-kit-common/node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/oas-linter": { + "version": "3.2.2", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@exodus/schemasafe": "^1.0.0-rc.2", + "should": "^13.2.1", + "yaml": "^1.10.0" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/oas-linter/node_modules/@exodus/schemasafe": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@exodus/schemasafe/-/schemasafe-1.3.0.tgz", + "integrity": "sha512-5Aap/GaRupgNx/feGBwLLTVv8OQFfv3pq2lPRzPg9R+IOBnDgghTGW7l7EuVXOvg5cc/xSAlRW8rBrjIC3Nvqw==", + "dev": true, + "license": "MIT" + }, + "node_modules/oas-linter/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 6" + } + }, + "node_modules/oas-schema-walker": { + "version": "1.1.5", + "dev": true, + "license": "BSD-3-Clause", + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/ohash": { + "version": "2.0.11", + "dev": true, + "license": "MIT" + }, + "node_modules/onetime": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora": { + "version": "5.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/perfect-debounce": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pkg-types": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.2.2", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" + } + }, + "node_modules/pkg-types/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.0.10", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/postcss/node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/proxy-agent": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.5.0.tgz", + "integrity": "sha512-TmatMXdr2KlRiA2CyDu8GqR8EjahTG3aY3nXjdzFyoZbmB8hrBsTyMezhULIXKnC0jpfjlmiZ3+EaCzoInSu/A==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "http-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.6", + "lru-cache": "^7.14.1", + "pac-proxy-agent": "^7.1.0", + "proxy-from-env": "^1.1.0", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/@tootallnate/quickjs-emscripten": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", + "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", + "dev": true, + "license": "MIT" + }, + "node_modules/proxy-agent/node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/ast-types": { + "version": "0.13.4", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", + "integrity": "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/proxy-agent/node_modules/basic-ftp": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", + "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/proxy-agent/node_modules/data-uri-to-buffer": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", + "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/degenerator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/degenerator/-/degenerator-5.0.1.tgz", + "integrity": "sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ast-types": "^0.13.4", + "escodegen": "^2.1.0", + "esprima": "^4.0.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/escodegen": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/proxy-agent/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/proxy-agent/node_modules/get-uri": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.5.tgz", + "integrity": "sha512-b1O07XYq8eRuVzBNgJLstU6FYc1tS6wnMtF1I1D9lE8LxZSOGZ7LhxN54yPP6mGw5f2CkXY2BQUL9Fx41qvcIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "basic-ftp": "^5.0.2", + "data-uri-to-buffer": "^6.0.2", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/proxy-agent/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/proxy-agent/node_modules/netmask": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", + "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/proxy-agent/node_modules/pac-proxy-agent": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.2.0.tgz", + "integrity": "sha512-TEB8ESquiLMc0lV8vcd5Ql/JAKAoyzHFXaStwjkzpOpC5Yv+pIzLfHvjTSdf3vpa2bMiUQrg9i6276yn8666aA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tootallnate/quickjs-emscripten": "^0.23.0", + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "get-uri": "^6.0.1", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.6", + "pac-resolver": "^7.0.1", + "socks-proxy-agent": "^8.0.5" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/pac-resolver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz", + "integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==", + "dev": true, + "license": "MIT", + "dependencies": { + "degenerator": "^5.0.0", + "netmask": "^2.0.2" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true, + "license": "MIT" + }, + "node_modules/proxy-agent/node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/proxy-agent/node_modules/socks": { + "version": "2.8.6", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.6.tgz", + "integrity": "sha512-pe4Y2yzru68lXCb38aAqRf5gvN8YdjP1lok5o0J7BOHljkyCGKVz7H3vpVIXKD27rj2giOJ7DwVyk/GWrPHDWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/proxy-agent/node_modules/socks-proxy-agent": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", + "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/proxy-agent/node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/rc9": { + "version": "2.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "defu": "^6.1.4", + "destr": "^2.0.3" + } + }, + "node_modules/rc9/node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "dev": true, + "license": "MIT" + }, + "node_modules/rc9/node_modules/destr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/destr/-/destr-2.0.5.tgz", + "integrity": "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==", + "dev": true, + "license": "MIT" + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/reflect-metadata": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz", + "integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/require-directory": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/restore-cursor/node_modules/signal-exit": { + "version": "3.0.7", + "dev": true, + "license": "ISC" + }, + "node_modules/run-async": { + "version": "2.4.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/sade": { + "version": "1.8.1", + "dev": true, + "license": "MIT", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/set-cookie-parser": { + "version": "2.7.1", + "dev": true, + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/should": { + "version": "13.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "should-equal": "^2.0.0", + "should-format": "^3.0.3", + "should-type": "^1.4.0", + "should-type-adaptors": "^1.0.1", + "should-util": "^1.0.0" + } + }, + "node_modules/should-equal": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "should-type": "^1.4.0" + } + }, + "node_modules/should-format": { + "version": "3.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "should-type": "^1.3.0", + "should-type-adaptors": "^1.0.1" + } + }, + "node_modules/should-type": { + "version": "1.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/should-type-adaptors": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "should-type": "^1.3.0", + "should-util": "^1.0.0" + } + }, + "node_modules/should-util": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/sirv": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/sirv/node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/svelte": { + "version": "5.38.0", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.38.0.tgz", + "integrity": "sha512-cWF1Oc2IM/QbktdK89u5lt9MdKxRtQnRKnf2tq6KOhYuhLOd2hbMuTiJ+vWMzAeMDe81AzbCgLd4GVtOJ4fDRg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.3.0", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/estree": "^1.0.5", + "acorn": "^8.12.1", + "aria-query": "^5.3.1", + "axobject-query": "^4.1.0", + "clsx": "^2.1.1", + "esm-env": "^1.2.1", + "esrap": "^2.1.0", + "is-reference": "^3.0.3", + "locate-character": "^3.0.0", + "magic-string": "^0.30.11", + "zimmerframe": "^1.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/svelte-check": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.3.1.tgz", + "integrity": "sha512-lkh8gff5gpHLjxIV+IaApMxQhTGnir2pNUAqcNgeKkvK5bT/30Ey/nzBxNLDlkztCH4dP7PixkMt9SWEKFPBWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "chokidar": "^4.0.1", + "fdir": "^6.2.0", + "picocolors": "^1.0.0", + "sade": "^1.7.4" + }, + "bin": { + "svelte-check": "bin/svelte-check" + }, + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "svelte": "^4.0.0 || ^5.0.0-next.0", + "typescript": ">=5.0.0" + } + }, + "node_modules/svelte-check/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/svelte-check/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/svelte/node_modules/aria-query": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", + "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/svelte/node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/swagger-schema-official": { + "version": "2.0.0-bab6bed", + "dev": true, + "license": "ISC" + }, + "node_modules/swagger-typescript-api": { + "version": "13.2.7", + "resolved": "https://registry.npmjs.org/swagger-typescript-api/-/swagger-typescript-api-13.2.7.tgz", + "integrity": "sha512-rfqqoRFpZJPl477M/snMJPM90EvI8WqhuUHSF5ecC2r/w376T29+QXNJFVPsJmbFu5rBc/8m3vhArtMctjONdw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@biomejs/js-api": "1.0.0", + "@biomejs/wasm-nodejs": "2.0.5", + "@types/swagger-schema-official": "^2.0.25", + "c12": "^3.0.4", + "citty": "^0.1.6", + "consola": "^3.4.2", + "eta": "^2.2.0", + "js-yaml": "^4.1.0", + "lodash": "^4.17.21", + "nanoid": "^5.1.5", + "swagger-schema-official": "2.0.0-bab6bed", + "swagger2openapi": "^7.0.8", + "typescript": "~5.8.3" + }, + "bin": { + "sta": "dist/cli.js", + "swagger-typescript-api": "dist/cli.js" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/swagger-typescript-api/node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/swagger-typescript-api/node_modules/nanoid": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.5.tgz", + "integrity": "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.js" + }, + "engines": { + "node": "^18 || >=20" + } + }, + "node_modules/swagger-typescript-api/node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/swagger2openapi": { + "version": "7.0.8", + "resolved": "https://registry.npmjs.org/swagger2openapi/-/swagger2openapi-7.0.8.tgz", + "integrity": "sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "call-me-maybe": "^1.0.1", + "node-fetch": "^2.6.1", + "node-fetch-h2": "^2.3.0", + "node-readfiles": "^0.2.0", + "oas-kit-common": "^1.0.8", + "oas-resolver": "^2.5.6", + "oas-schema-walker": "^1.1.5", + "oas-validator": "^5.0.8", + "reftools": "^1.1.9", + "yaml": "^1.10.0", + "yargs": "^17.0.1" + }, + "bin": { + "boast": "boast.js", + "oas-validate": "oas-validate.js", + "swagger2openapi": "swagger2openapi.js" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/swagger2openapi/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/swagger2openapi/node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/swagger2openapi/node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/swagger2openapi/node_modules/oas-resolver": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/oas-resolver/-/oas-resolver-2.5.6.tgz", + "integrity": "sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "node-fetch-h2": "^2.3.0", + "oas-kit-common": "^1.0.8", + "reftools": "^1.1.9", + "yaml": "^1.10.0", + "yargs": "^17.0.1" + }, + "bin": { + "resolve": "resolve.js" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/swagger2openapi/node_modules/oas-validator": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/oas-validator/-/oas-validator-5.0.8.tgz", + "integrity": "sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "call-me-maybe": "^1.0.1", + "oas-kit-common": "^1.0.8", + "oas-linter": "^3.2.2", + "oas-resolver": "^2.5.6", + "oas-schema-walker": "^1.1.5", + "reftools": "^1.1.9", + "should": "^13.2.1", + "yaml": "^1.10.0" + }, + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/swagger2openapi/node_modules/reftools": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/reftools/-/reftools-1.1.9.tgz", + "integrity": "sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==", + "dev": true, + "license": "BSD-3-Clause", + "funding": { + "url": "https://github.com/Mermade/oas-kit?sponsor=1" + } + }, + "node_modules/swagger2openapi/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/swagger2openapi/node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/swagger2openapi/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 6" + } + }, + "node_modules/swagger2openapi/node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/swagger2openapi/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/tailwindcss": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.11.tgz", + "integrity": "sha512-2E9TBm6MDD/xKYe+dvJZAmg3yxIEDNRc0jwlNyDg/4Fil2QcSLjFKGVff0lAf1jjeaArlG/M75Ey/EYr/OJtBA==", + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.2.tgz", + "integrity": "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/tar": { + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", + "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "dev": true, + "license": "ISC", + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/through": { + "version": "2.3.8", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tmp": { + "version": "0.0.33", + "dev": true, + "license": "MIT", + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/token-types": { + "version": "6.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/tree-kill": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "bin": { + "tree-kill": "cli.js" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD" + }, + "node_modules/type-fest": { + "version": "0.21.3", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", + "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uint8array-extras": { + "version": "1.4.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/undici-types": { + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/universalify": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "license": "MIT" + }, + "node_modules/vite": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.1.tgz", + "integrity": "sha512-yJ+Mp7OyV+4S+afWo+QyoL9jFWD11QFH0i5i7JypnfTcA1rmgxCbiA8WwAICDEtZ1Z1hzrVhN8R8rGTqkTY8ZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.6", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.14" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.46.2.tgz", + "integrity": "sha512-Zj3Hl6sN34xJtMv7Anwb5Gu01yujyE/cLBDB2gnHTAHaWS1Z38L7kuSG+oAh0giZMqG060f/YBStXtMH6FvPMA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-android-arm64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.46.2.tgz", + "integrity": "sha512-nTeCWY83kN64oQ5MGz3CgtPx8NSOhC5lWtsjTs+8JAJNLcP3QbLCtDDgUKQc/Ro/frpMq4SHUaHN6AMltcEoLQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.46.2.tgz", + "integrity": "sha512-HV7bW2Fb/F5KPdM/9bApunQh68YVDU8sO8BvcW9OngQVN3HHHkw99wFupuUJfGR9pYLLAjcAOA6iO+evsbBaPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-darwin-x64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.46.2.tgz", + "integrity": "sha512-SSj8TlYV5nJixSsm/y3QXfhspSiLYP11zpfwp6G/YDXctf3Xkdnk4woJIF5VQe0of2OjzTt8EsxnJDCdHd2xMA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.46.2.tgz", + "integrity": "sha512-ZyrsG4TIT9xnOlLsSSi9w/X29tCbK1yegE49RYm3tu3wF1L/B6LVMqnEWyDB26d9Ecx9zrmXCiPmIabVuLmNSg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.46.2.tgz", + "integrity": "sha512-pCgHFoOECwVCJ5GFq8+gR8SBKnMO+xe5UEqbemxBpCKYQddRQMgomv1104RnLSg7nNvgKy05sLsY51+OVRyiVw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.46.2.tgz", + "integrity": "sha512-EtP8aquZ0xQg0ETFcxUbU71MZlHaw9MChwrQzatiE8U/bvi5uv/oChExXC4mWhjiqK7azGJBqU0tt5H123SzVA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.46.2.tgz", + "integrity": "sha512-qO7F7U3u1nfxYRPM8HqFtLd+raev2K137dsV08q/LRKRLEc7RsiDWihUnrINdsWQxPR9jqZ8DIIZ1zJJAm5PjQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.46.2.tgz", + "integrity": "sha512-3dRaqLfcOXYsfvw5xMrxAk9Lb1f395gkoBYzSFcc/scgRFptRXL9DOaDpMiehf9CO8ZDRJW2z45b6fpU5nwjng==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.46.2.tgz", + "integrity": "sha512-fhHFTutA7SM+IrR6lIfiHskxmpmPTJUXpWIsBXpeEwNgZzZZSg/q4i6FU4J8qOGyJ0TR+wXBwx/L7Ho9z0+uDg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-loongarch64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.46.2.tgz", + "integrity": "sha512-i7wfGFXu8x4+FRqPymzjD+Hyav8l95UIZ773j7J7zRYc3Xsxy2wIn4x+llpunexXe6laaO72iEjeeGyUFmjKeA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.46.2.tgz", + "integrity": "sha512-B/l0dFcHVUnqcGZWKcWBSV2PF01YUt0Rvlurci5P+neqY/yMKchGU8ullZvIv5e8Y1C6wOn+U03mrDylP5q9Yw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.46.2.tgz", + "integrity": "sha512-32k4ENb5ygtkMwPMucAb8MtV8olkPT03oiTxJbgkJa7lJ7dZMr0GCFJlyvy+K8iq7F/iuOr41ZdUHaOiqyR3iQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.46.2.tgz", + "integrity": "sha512-t5B2loThlFEauloaQkZg9gxV05BYeITLvLkWOkRXogP4qHXLkWSbSHKM9S6H1schf/0YGP/qNKtiISlxvfmmZw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.46.2.tgz", + "integrity": "sha512-YKjekwTEKgbB7n17gmODSmJVUIvj8CX7q5442/CK80L8nqOUbMtf8b01QkG3jOqyr1rotrAnW6B/qiHwfcuWQA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.46.2.tgz", + "integrity": "sha512-7kX69DIrBeD7yNp4A5b81izs8BqoZkCIaxQaOpumcJ1S/kmqNFjPhDu1LHeVXv0SexfHQv5cqHsxLOjETuqDuA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.46.2.tgz", + "integrity": "sha512-wiJWMIpeaak/jsbaq2HMh/rzZxHVW1rU6coyeNNpMwk5isiPjSTx0a4YLSlYDwBH/WBvLz+EtsNqQScZTLJy3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.46.2.tgz", + "integrity": "sha512-gBgaUDESVzMgWZhcyjfs9QFK16D8K6QZpwAaVNJxYDLHWayOta4ZMjGm/vsAEy3hvlS2GosVFlBlP9/Wb85DqQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.46.2.tgz", + "integrity": "sha512-CvUo2ixeIQGtF6WvuB87XWqPQkoFAFqW+HUo/WzHwuHDvIwZCtjdWXoYCcr06iKGydiqTclC4jU/TNObC/xKZg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/vite/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/vite/node_modules/rollup": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.46.2.tgz", + "integrity": "sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.46.2", + "@rollup/rollup-android-arm64": "4.46.2", + "@rollup/rollup-darwin-arm64": "4.46.2", + "@rollup/rollup-darwin-x64": "4.46.2", + "@rollup/rollup-freebsd-arm64": "4.46.2", + "@rollup/rollup-freebsd-x64": "4.46.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.46.2", + "@rollup/rollup-linux-arm-musleabihf": "4.46.2", + "@rollup/rollup-linux-arm64-gnu": "4.46.2", + "@rollup/rollup-linux-arm64-musl": "4.46.2", + "@rollup/rollup-linux-loongarch64-gnu": "4.46.2", + "@rollup/rollup-linux-ppc64-gnu": "4.46.2", + "@rollup/rollup-linux-riscv64-gnu": "4.46.2", + "@rollup/rollup-linux-riscv64-musl": "4.46.2", + "@rollup/rollup-linux-s390x-gnu": "4.46.2", + "@rollup/rollup-linux-x64-gnu": "4.46.2", + "@rollup/rollup-linux-x64-musl": "4.46.2", + "@rollup/rollup-win32-arm64-msvc": "4.46.2", + "@rollup/rollup-win32-ia32-msvc": "4.46.2", + "@rollup/rollup-win32-x64-msvc": "4.46.2", + "fsevents": "~2.3.2" + } + }, + "node_modules/vitefu": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.1.tgz", + "integrity": "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==", + "dev": true, + "license": "MIT", + "workspaces": [ + "tests/deps/*", + "tests/projects/*", + "tests/projects/workspace/packages/*" + ], + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + } + }, + "node_modules/wcwidth": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/yallist": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/zimmerframe": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.2.tgz", + "integrity": "sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/webapp/package.json b/webapp/package.json new file mode 100644 index 00000000..93b9ef61 --- /dev/null +++ b/webapp/package.json @@ -0,0 +1,43 @@ +{ + "name": "garm-webapp", + "version": "0.0.1", + "private": true, + "scripts": { + "dev": "NODE_ENV=development vite dev --host 0.0.0.0 --port 5173", + "build": "vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch" + }, + "devDependencies": { + "@openapitools/openapi-generator-cli": "^2.21.4", + "@sveltejs/adapter-static": "^3.0.1", + "@sveltejs/kit": "^2.0.0", + "@sveltejs/vite-plugin-svelte": "^6.1.0", + "@tailwindcss/forms": "^0.5.7", + "@tailwindcss/postcss": "^4.1.11", + "@types/node": "^24.2.0", + "autoprefixer": "^10.4.16", + "postcss": "^8.4.32", + "svelte": "^5.38.0", + "svelte-check": "^4.3.1", + "swagger-typescript-api": "^13.2.7", + "tailwindcss": "^4.1.11", + "typescript": "^5.0.0", + "vite": "^7.1.1" + }, + "type": "module", + "dependencies": { + "@codemirror/lang-json": "^6.0.2", + "@codemirror/state": "^6.5.2", + "@codemirror/theme-one-dark": "^6.1.3", + "@codemirror/view": "^6.38.1", + "@tailwindcss/typography": "^0.5.10", + "codemirror": "^6.0.2" + }, + "description": "", + "main": "postcss.config.js", + "keywords": [], + "author": "", + "license": "ISC" +} diff --git a/webapp/postcss.config.js b/webapp/postcss.config.js new file mode 100644 index 00000000..571e3e11 --- /dev/null +++ b/webapp/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + '@tailwindcss/postcss': {}, + autoprefixer: {} + } +}; \ No newline at end of file diff --git a/webapp/src/app.css b/webapp/src/app.css new file mode 100644 index 00000000..5b1d339f --- /dev/null +++ b/webapp/src/app.css @@ -0,0 +1,18 @@ +@import "tailwindcss"; + +@theme { + --breakpoint-sm: 640px; + --breakpoint-md: 768px; + --breakpoint-lg: 1024px; + --breakpoint-xl: 1280px; + --breakpoint-2xl: 1536px; +} + +/* Configure dark mode to use class strategy in Tailwind v4 */ +@variant dark (.dark &); + +@layer base { + html { + font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif; + } +} \ No newline at end of file diff --git a/webapp/src/app.d.ts b/webapp/src/app.d.ts new file mode 100644 index 00000000..f451d6bf --- /dev/null +++ b/webapp/src/app.d.ts @@ -0,0 +1,10 @@ +declare global { + namespace App { + interface Error {} + interface Locals {} + interface PageData {} + interface Platform {} + } +} + +export {}; \ No newline at end of file diff --git a/webapp/src/app.html b/webapp/src/app.html new file mode 100644 index 00000000..44b27308 --- /dev/null +++ b/webapp/src/app.html @@ -0,0 +1,78 @@ + + + + + + + + + + + + + %sveltekit.head% + + +
                %sveltekit.body%
                + + diff --git a/webapp/src/lib/api/client.ts b/webapp/src/lib/api/client.ts new file mode 100644 index 00000000..4ceea8df --- /dev/null +++ b/webapp/src/lib/api/client.ts @@ -0,0 +1,77 @@ +// Importing from the generated client wrapper +import { + GeneratedGarmApiClient, + type Repository, + type Organization, + type Enterprise, + type Endpoint, + type Pool, + type ScaleSet, + type Instance, + type ForgeCredentials, + type Provider, + type ControllerInfo, + type CreateRepoParams, + type CreateOrgParams, + type CreateEnterpriseParams, + type CreatePoolParams, + type CreateScaleSetParams, + type UpdateEntityParams, + type UpdatePoolParams, + type LoginRequest, + type LoginResponse, +} from './generated-client.js'; + +// Import endpoint and credentials types directly +import type { + CreateGithubEndpointParams as CreateEndpointParams, + UpdateGithubEndpointParams as UpdateEndpointParams, + CreateGithubCredentialsParams as CreateCredentialsParams, + UpdateGithubCredentialsParams as UpdateCredentialsParams, +} from './generated/api'; + +// Re-export types for compatibility +export type { + Repository, + Organization, + Enterprise, + Endpoint, + Pool, + ScaleSet, + Instance, + ForgeCredentials, + Provider, + ControllerInfo, + CreateRepoParams, + CreateOrgParams, + CreateEnterpriseParams, + CreateEndpointParams, + UpdateEndpointParams, + CreateCredentialsParams, + UpdateCredentialsParams, + CreatePoolParams, + CreateScaleSetParams, + UpdateEntityParams, + UpdatePoolParams, + LoginRequest, + LoginResponse, +}; + +// Legacy APIError type for backward compatibility +export interface APIError { + error: string; + details?: string; +} + +// GarmApiClient now extends/wraps the generated client +export class GarmApiClient extends GeneratedGarmApiClient { + constructor(baseUrl: string = '') { + super(baseUrl); + } + + // All methods are inherited from GeneratedGarmApiClient + // This class now acts as a simple wrapper for backward compatibility +} + +// Create a singleton instance +export const garmApi = new GarmApiClient(); diff --git a/webapp/src/lib/api/generated-client.ts b/webapp/src/lib/api/generated-client.ts new file mode 100644 index 00000000..ce45d28b --- /dev/null +++ b/webapp/src/lib/api/generated-client.ts @@ -0,0 +1,596 @@ +// Generated API Client Wrapper for GARM +// This wraps the auto-generated OpenAPI client to match our existing interface + +import { + LoginApi, + ControllerInfoApi, + ControllerApi, + EndpointsApi, + CredentialsApi, + RepositoriesApi, + OrganizationsApi, + EnterprisesApi, + PoolsApi, + ScalesetsApi, + InstancesApi, + ProvidersApi, + FirstRunApi, + HooksApi, + type Repository, + type Organization, + type Enterprise, + type ForgeEndpoint, + type Pool, + type ScaleSet, + type Instance, + type ForgeCredentials, + type Provider, + type ControllerInfo, + type CreateRepoParams, + type CreateOrgParams, + type CreateEnterpriseParams, + type CreateGithubEndpointParams, + type CreateGiteaEndpointParams, + type UpdateGithubEndpointParams, + type UpdateGiteaEndpointParams, + type CreateGithubCredentialsParams, + type CreateGiteaCredentialsParams, + type UpdateGithubCredentialsParams, + type UpdateGiteaCredentialsParams, + type CreatePoolParams, + type CreateScaleSetParams, + type UpdateEntityParams, + type UpdatePoolParams, + type PasswordLoginParams, + type JWTResponse, + type NewUserParams, + type User, + type UpdateControllerParams, + type HookInfo, + Configuration +} from './generated/index'; + +// Re-export types for compatibility +export type { + Repository, + Organization, + Enterprise, + ForgeEndpoint as Endpoint, + Pool, + ScaleSet, + Instance, + ForgeCredentials, + Provider, + ControllerInfo, + CreateRepoParams, + CreateOrgParams, + CreateEnterpriseParams, + CreateGithubEndpointParams as CreateEndpointParams, + UpdateGithubEndpointParams as UpdateEndpointParams, + CreateGithubCredentialsParams as CreateCredentialsParams, + UpdateGithubCredentialsParams as UpdateCredentialsParams, + CreatePoolParams, + CreateScaleSetParams, + UpdateEntityParams, + UpdatePoolParams, + PasswordLoginParams, + JWTResponse, + NewUserParams, + User, + UpdateControllerParams, +}; + +// Define common request types for compatibility +export interface LoginRequest { + username: string; + password: string; +} + +export interface LoginResponse { + token: string; +} + +export class GeneratedGarmApiClient { + private baseUrl: string; + private token?: string; + private config: Configuration; + + // Check if we're in development mode (cross-origin setup) + private isDevelopmentMode(): boolean { + if (typeof window === 'undefined') return false; + // Development mode: either VITE_GARM_API_URL is set OR we detect cross-origin + return !!(import.meta.env.VITE_GARM_API_URL) || window.location.port === '5173'; + } + + // Generated API client instances + private loginApi: LoginApi; + private controllerInfoApi: ControllerInfoApi; + private controllerApi: ControllerApi; + private endpointsApi: EndpointsApi; + private credentialsApi: CredentialsApi; + private repositoriesApi: RepositoriesApi; + private organizationsApi: OrganizationsApi; + private enterprisesApi: EnterprisesApi; + private poolsApi: PoolsApi; + private scaleSetsApi: ScalesetsApi; + private instancesApi: InstancesApi; + private providersApi: ProvidersApi; + private firstRunApi: FirstRunApi; + private hooksApi: HooksApi; + + constructor(baseUrl: string = '') { + this.baseUrl = baseUrl || window.location.origin; + + // Create configuration for the generated client + const isDevMode = this.isDevelopmentMode(); + this.config = new Configuration({ + basePath: `${this.baseUrl}/api/v1`, + accessToken: () => this.token || '', + baseOptions: { + // In development mode, don't send cookies (use Bearer token only) + // In production mode, include cookies for authentication + withCredentials: !isDevMode, + }, + }); + + // Initialize generated API clients + this.loginApi = new LoginApi(this.config); + this.controllerInfoApi = new ControllerInfoApi(this.config); + this.controllerApi = new ControllerApi(this.config); + this.endpointsApi = new EndpointsApi(this.config); + this.credentialsApi = new CredentialsApi(this.config); + this.repositoriesApi = new RepositoriesApi(this.config); + this.organizationsApi = new OrganizationsApi(this.config); + this.enterprisesApi = new EnterprisesApi(this.config); + this.poolsApi = new PoolsApi(this.config); + this.scaleSetsApi = new ScalesetsApi(this.config); + this.instancesApi = new InstancesApi(this.config); + this.providersApi = new ProvidersApi(this.config); + this.firstRunApi = new FirstRunApi(this.config); + this.hooksApi = new HooksApi(this.config); + } + + // Set authentication token + setToken(token: string) { + this.token = token; + + // Update configuration for all clients + const isDevMode = this.isDevelopmentMode(); + this.config = new Configuration({ + basePath: `${this.baseUrl}/api/v1`, + accessToken: () => token, + baseOptions: { + // In development mode, don't send cookies (use Bearer token only) + // In production mode, include cookies for authentication + withCredentials: !isDevMode, + }, + }); + + // Recreate all API instances with new config + this.loginApi = new LoginApi(this.config); + this.controllerInfoApi = new ControllerInfoApi(this.config); + this.controllerApi = new ControllerApi(this.config); + this.endpointsApi = new EndpointsApi(this.config); + this.credentialsApi = new CredentialsApi(this.config); + this.repositoriesApi = new RepositoriesApi(this.config); + this.organizationsApi = new OrganizationsApi(this.config); + this.enterprisesApi = new EnterprisesApi(this.config); + this.poolsApi = new PoolsApi(this.config); + this.scaleSetsApi = new ScalesetsApi(this.config); + this.instancesApi = new InstancesApi(this.config); + this.providersApi = new ProvidersApi(this.config); + this.firstRunApi = new FirstRunApi(this.config); + this.hooksApi = new HooksApi(this.config); + } + + // Authentication + async login(credentials: LoginRequest): Promise { + const params: PasswordLoginParams = { + username: credentials.username, + password: credentials.password, + }; + const response = await this.loginApi.login(params); + const token = response.data.token; + if (token) { + this.setToken(token); + return { token }; + } + throw new Error('Login failed'); + } + + async getControllerInfo(): Promise { + const response = await this.controllerInfoApi.controllerInfo(); + return response.data; + } + + // GitHub Endpoints + async listGithubEndpoints(): Promise { + const response = await this.endpointsApi.listGithubEndpoints(); + return response.data || []; + } + + async getGithubEndpoint(name: string): Promise { + const response = await this.endpointsApi.getGithubEndpoint(name); + return response.data; + } + + async createGithubEndpoint(params: CreateGithubEndpointParams): Promise { + const response = await this.endpointsApi.createGithubEndpoint(params); + return response.data; + } + + async updateGithubEndpoint(name: string, params: UpdateGithubEndpointParams): Promise { + const response = await this.endpointsApi.updateGithubEndpoint(name, params); + return response.data; + } + + async deleteGithubEndpoint(name: string): Promise { + await this.endpointsApi.deleteGithubEndpoint(name); + } + + // Gitea Endpoints + async listGiteaEndpoints(): Promise { + const response = await this.endpointsApi.listGiteaEndpoints(); + return response.data || []; + } + + async getGiteaEndpoint(name: string): Promise { + const response = await this.endpointsApi.getGiteaEndpoint(name); + return response.data; + } + + async createGiteaEndpoint(params: CreateGiteaEndpointParams): Promise { + const response = await this.endpointsApi.createGiteaEndpoint(params); + return response.data; + } + + async updateGiteaEndpoint(name: string, params: UpdateGiteaEndpointParams): Promise { + const response = await this.endpointsApi.updateGiteaEndpoint(name, params); + return response.data; + } + + async deleteGiteaEndpoint(name: string): Promise { + await this.endpointsApi.deleteGiteaEndpoint(name); + } + + // Combined Endpoints helper + async listAllEndpoints(): Promise { + const [githubEndpoints, giteaEndpoints] = await Promise.all([ + this.listGithubEndpoints().catch(() => []), + this.listGiteaEndpoints().catch(() => []) + ]); + + return [ + ...githubEndpoints.map(ep => ({ ...ep, endpoint_type: 'github' as const })), + ...giteaEndpoints.map(ep => ({ ...ep, endpoint_type: 'gitea' as const })) + ]; + } + + // GitHub Credentials + async listGithubCredentials(): Promise { + const response = await this.credentialsApi.listCredentials(); + return response.data || []; + } + + async getGithubCredentials(id: number): Promise { + const response = await this.credentialsApi.getCredentials(id); + return response.data; + } + + async createGithubCredentials(params: CreateGithubCredentialsParams): Promise { + const response = await this.credentialsApi.createCredentials(params); + return response.data; + } + + async updateGithubCredentials(id: number, params: UpdateGithubCredentialsParams): Promise { + const response = await this.credentialsApi.updateCredentials(id, params); + return response.data; + } + + async deleteGithubCredentials(id: number): Promise { + await this.credentialsApi.deleteCredentials(id); + } + + // Gitea Credentials + async listGiteaCredentials(): Promise { + const response = await this.credentialsApi.listGiteaCredentials(); + return response.data || []; + } + + async getGiteaCredentials(id: number): Promise { + const response = await this.credentialsApi.getGiteaCredentials(id); + return response.data; + } + + async createGiteaCredentials(params: CreateGiteaCredentialsParams): Promise { + const response = await this.credentialsApi.createGiteaCredentials(params); + return response.data; + } + + async updateGiteaCredentials(id: number, params: UpdateGiteaCredentialsParams): Promise { + const response = await this.credentialsApi.updateGiteaCredentials(id, params); + return response.data; + } + + async deleteGiteaCredentials(id: number): Promise { + await this.credentialsApi.deleteGiteaCredentials(id); + } + + // Combined Credentials helper + async listAllCredentials(): Promise { + const [githubCredentials, giteaCredentials] = await Promise.all([ + this.listGithubCredentials().catch(() => []), + this.listGiteaCredentials().catch(() => []) + ]); + + return [...githubCredentials, ...giteaCredentials]; + } + + // Repositories + async installRepositoryWebhook(repoId: string, params: any = {}): Promise { + await this.repositoriesApi.installRepoWebhook(repoId, params); + } + + async uninstallRepositoryWebhook(repoId: string): Promise { + await this.hooksApi.uninstallRepoWebhook(repoId); + } + + async getRepositoryWebhookInfo(repoId: string): Promise { + const response = await this.hooksApi.getRepoWebhookInfo(repoId); + return response.data; + } + async listRepositories(): Promise { + const response = await this.repositoriesApi.listRepos(); + return response.data || []; + } + + async getRepository(id: string): Promise { + const response = await this.repositoriesApi.getRepo(id); + return response.data; + } + + async createRepository(params: CreateRepoParams): Promise { + const response = await this.repositoriesApi.createRepo(params); + return response.data; + } + + async updateRepository(id: string, params: UpdateEntityParams): Promise { + const response = await this.repositoriesApi.updateRepo(id, params); + return response.data; + } + + async deleteRepository(id: string): Promise { + await this.repositoriesApi.deleteRepo(id); + } + + async installRepoWebhook(id: string): Promise { + await this.repositoriesApi.installRepoWebhook(id, {}); + } + + async listRepositoryPools(id: string): Promise { + const response = await this.repositoriesApi.listRepoPools(id); + return response.data || []; + } + + async listRepositoryInstances(id: string): Promise { + const response = await this.repositoriesApi.listRepoInstances(id); + return response.data || []; + } + + async createRepositoryPool(id: string, params: CreatePoolParams): Promise { + const response = await this.repositoriesApi.createRepoPool(id, params); + return response.data; + } + + // Organizations + async installOrganizationWebhook(orgId: string, params: any = {}): Promise { + await this.organizationsApi.installOrgWebhook(orgId, params); + } + + async uninstallOrganizationWebhook(orgId: string): Promise { + await this.hooksApi.uninstallOrgWebhook(orgId); + } + + async getOrganizationWebhookInfo(orgId: string): Promise { + const response = await this.hooksApi.getOrgWebhookInfo(orgId); + return response.data; + } + async listOrganizations(): Promise { + const response = await this.organizationsApi.listOrgs(); + return response.data || []; + } + + async getOrganization(id: string): Promise { + const response = await this.organizationsApi.getOrg(id); + return response.data; + } + + async createOrganization(params: CreateOrgParams): Promise { + const response = await this.organizationsApi.createOrg(params); + return response.data; + } + + async updateOrganization(id: string, params: UpdateEntityParams): Promise { + const response = await this.organizationsApi.updateOrg(id, params); + return response.data; + } + + async deleteOrganization(id: string): Promise { + await this.organizationsApi.deleteOrg(id); + } + + async listOrganizationPools(id: string): Promise { + const response = await this.organizationsApi.listOrgPools(id); + return response.data || []; + } + + async listOrganizationInstances(id: string): Promise { + const response = await this.organizationsApi.listOrgInstances(id); + return response.data || []; + } + + async createOrganizationPool(id: string, params: CreatePoolParams): Promise { + const response = await this.organizationsApi.createOrgPool(id, params); + return response.data; + } + + // Enterprises + async listEnterprises(): Promise { + const response = await this.enterprisesApi.listEnterprises(); + return response.data || []; + } + + async getEnterprise(id: string): Promise { + const response = await this.enterprisesApi.getEnterprise(id); + return response.data; + } + + async createEnterprise(params: CreateEnterpriseParams): Promise { + const response = await this.enterprisesApi.createEnterprise(params); + return response.data; + } + + async updateEnterprise(id: string, params: UpdateEntityParams): Promise { + const response = await this.enterprisesApi.updateEnterprise(id, params); + return response.data; + } + + async deleteEnterprise(id: string): Promise { + await this.enterprisesApi.deleteEnterprise(id); + } + + async listEnterprisePools(id: string): Promise { + const response = await this.enterprisesApi.listEnterprisePools(id); + return response.data || []; + } + + async listEnterpriseInstances(id: string): Promise { + const response = await this.enterprisesApi.listEnterpriseInstances(id); + return response.data || []; + } + + async createEnterprisePool(id: string, params: CreatePoolParams): Promise { + const response = await this.enterprisesApi.createEnterprisePool(id, params); + return response.data; + } + + // Scale sets for repositories, organizations, and enterprises + async createRepositoryScaleSet(id: string, params: CreateScaleSetParams): Promise { + const response = await this.repositoriesApi.createRepoScaleSet(id, params); + return response.data; + } + + async listRepositoryScaleSets(id: string): Promise { + const response = await this.repositoriesApi.listRepoScaleSets(id); + return response.data || []; + } + + async createOrganizationScaleSet(id: string, params: CreateScaleSetParams): Promise { + const response = await this.organizationsApi.createOrgScaleSet(id, params); + return response.data; + } + + async listOrganizationScaleSets(id: string): Promise { + const response = await this.organizationsApi.listOrgScaleSets(id); + return response.data || []; + } + + async createEnterpriseScaleSet(id: string, params: CreateScaleSetParams): Promise { + const response = await this.enterprisesApi.createEnterpriseScaleSet(id, params); + return response.data; + } + + async listEnterpriseScaleSets(id: string): Promise { + const response = await this.enterprisesApi.listEnterpriseScaleSets(id); + return response.data || []; + } + + // Pools + async listPools(): Promise { + const response = await this.poolsApi.listPools(); + return response.data || []; + } + + async listAllPools(): Promise { + return this.listPools(); + } + + async getPool(id: string): Promise { + const response = await this.poolsApi.getPool(id); + return response.data; + } + + async updatePool(id: string, params: UpdatePoolParams): Promise { + const response = await this.poolsApi.updatePool(id, params); + return response.data; + } + + async deletePool(id: string): Promise { + await this.poolsApi.deletePool(id); + } + + // Scale Sets + async listScaleSets(): Promise { + const response = await this.scaleSetsApi.listScalesets(); + return response.data || []; + } + + async getScaleSet(id: number): Promise { + const response = await this.scaleSetsApi.getScaleSet(id.toString()); + return response.data; + } + + async updateScaleSet(id: number, params: Partial): Promise { + const response = await this.scaleSetsApi.updateScaleSet(id.toString(), params); + return response.data; + } + + async deleteScaleSet(id: number): Promise { + await this.scaleSetsApi.deleteScaleSet(id.toString()); + } + + // Instances + async listInstances(): Promise { + const response = await this.instancesApi.listInstances(); + return response.data || []; + } + + async getInstance(name: string): Promise { + const response = await this.instancesApi.getInstance(name); + return response.data; + } + + async deleteInstance(name: string): Promise { + await this.instancesApi.deleteInstance(name); + } + + // Providers + async listProviders(): Promise { + const response = await this.providersApi.listProviders(); + return response.data || []; + } + + // Compatibility aliases + async listCredentials(): Promise { + return this.listAllCredentials(); + } + + async listEndpoints(): Promise { + return this.listAllEndpoints(); + } + + // First-run initialization + async firstRun(params: NewUserParams): Promise { + const response = await this.firstRunApi.firstRun(params); + return response.data; + } + + // Controller management + async updateController(params: UpdateControllerParams): Promise { + const response = await this.controllerApi.updateController(params); + return response.data; + } +} + +// Create a singleton instance +export const generatedGarmApi = new GeneratedGarmApiClient(); \ No newline at end of file diff --git a/webapp/src/lib/api/generated/.gitignore b/webapp/src/lib/api/generated/.gitignore new file mode 100644 index 00000000..149b5765 --- /dev/null +++ b/webapp/src/lib/api/generated/.gitignore @@ -0,0 +1,4 @@ +wwwroot/*.js +node_modules +typings +dist diff --git a/webapp/src/lib/api/generated/.npmignore b/webapp/src/lib/api/generated/.npmignore new file mode 100644 index 00000000..999d88df --- /dev/null +++ b/webapp/src/lib/api/generated/.npmignore @@ -0,0 +1 @@ +# empty npmignore to ensure all required files (e.g., in the dist folder) are published by npm \ No newline at end of file diff --git a/webapp/src/lib/api/generated/.openapi-generator-ignore b/webapp/src/lib/api/generated/.openapi-generator-ignore new file mode 100644 index 00000000..7484ee59 --- /dev/null +++ b/webapp/src/lib/api/generated/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/webapp/src/lib/api/generated/.openapi-generator/FILES b/webapp/src/lib/api/generated/.openapi-generator/FILES new file mode 100644 index 00000000..5f89d927 --- /dev/null +++ b/webapp/src/lib/api/generated/.openapi-generator/FILES @@ -0,0 +1,70 @@ +.gitignore +.npmignore +.openapi-generator-ignore +api.ts +base.ts +common.ts +configuration.ts +docs/APIErrorResponse.md +docs/Address.md +docs/ControllerApi.md +docs/ControllerInfo.md +docs/ControllerInfoApi.md +docs/CreateEnterpriseParams.md +docs/CreateGiteaCredentialsParams.md +docs/CreateGiteaEndpointParams.md +docs/CreateGithubCredentialsParams.md +docs/CreateGithubEndpointParams.md +docs/CreateOrgParams.md +docs/CreatePoolParams.md +docs/CreateRepoParams.md +docs/CreateScaleSetParams.md +docs/CredentialsApi.md +docs/EndpointsApi.md +docs/Enterprise.md +docs/EnterprisesApi.md +docs/EntityEvent.md +docs/FirstRunApi.md +docs/ForgeCredentials.md +docs/ForgeEndpoint.md +docs/ForgeEntity.md +docs/GithubApp.md +docs/GithubPAT.md +docs/GithubRateLimit.md +docs/HookInfo.md +docs/HooksApi.md +docs/InstallWebhookParams.md +docs/Instance.md +docs/InstancesApi.md +docs/JWTResponse.md +docs/Job.md +docs/JobsApi.md +docs/LoginApi.md +docs/MetricsTokenApi.md +docs/NewUserParams.md +docs/Organization.md +docs/OrganizationsApi.md +docs/PasswordLoginParams.md +docs/Pool.md +docs/PoolManagerStatus.md +docs/PoolsApi.md +docs/Provider.md +docs/ProvidersApi.md +docs/RepositoriesApi.md +docs/Repository.md +docs/RunnerPrefix.md +docs/ScaleSet.md +docs/ScalesetsApi.md +docs/StatusMessage.md +docs/Tag.md +docs/UpdateControllerParams.md +docs/UpdateEntityParams.md +docs/UpdateGiteaCredentialsParams.md +docs/UpdateGiteaEndpointParams.md +docs/UpdateGithubCredentialsParams.md +docs/UpdateGithubEndpointParams.md +docs/UpdatePoolParams.md +docs/UpdateScaleSetParams.md +docs/User.md +git_push.sh +index.ts diff --git a/webapp/src/lib/api/generated/.openapi-generator/VERSION b/webapp/src/lib/api/generated/.openapi-generator/VERSION new file mode 100644 index 00000000..e465da43 --- /dev/null +++ b/webapp/src/lib/api/generated/.openapi-generator/VERSION @@ -0,0 +1 @@ +7.14.0 diff --git a/webapp/src/lib/api/generated/api.ts b/webapp/src/lib/api/generated/api.ts new file mode 100644 index 00000000..41d28ce1 --- /dev/null +++ b/webapp/src/lib/api/generated/api.ts @@ -0,0 +1,11684 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Garm API. + * The Garm API generated using go-swagger. + * + * The version of the OpenAPI document: 1.0.0 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +import type { Configuration } from './configuration'; +import type { AxiosPromise, AxiosInstance, RawAxiosRequestConfig } from 'axios'; +import globalAxios from 'axios'; +// Some imports not used depending on template conditions +// @ts-ignore +import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common'; +import type { RequestArgs } from './base'; +// @ts-ignore +import { BASE_PATH, COLLECTION_FORMATS, BaseAPI, RequiredError, operationServerMap } from './base'; + +/** + * + * @export + * @interface APIErrorResponse + */ +export interface APIErrorResponse { + /** + * + * @type {string} + * @memberof APIErrorResponse + */ + 'details'?: string; + /** + * + * @type {string} + * @memberof APIErrorResponse + */ + 'error'?: string; +} +/** + * + * @export + * @interface Address + */ +export interface Address { + /** + * + * @type {string} + * @memberof Address + */ + 'address'?: string; + /** + * + * @type {string} + * @memberof Address + */ + 'type'?: string; +} +/** + * + * @export + * @interface ControllerInfo + */ +export interface ControllerInfo { + /** + * CallbackURL is the URL where instances can send updates back to the controller. This URL is used by instances to send status updates back to the controller. The URL itself may be made available to instances via a reverse proxy or a load balancer. That means that the user is responsible for telling GARM what the public URL is, by setting this field. + * @type {string} + * @memberof ControllerInfo + */ + 'callback_url'?: string; + /** + * ControllerID is the unique ID of this controller. This ID gets generated automatically on controller init. + * @type {string} + * @memberof ControllerInfo + */ + 'controller_id'?: string; + /** + * ControllerWebhookURL is the controller specific URL where webhooks will be received. This field holds the WebhookURL defined above to which we append the ControllerID. Functionally it is the same as WebhookURL, but it allows us to safely manage webhooks from GARM without accidentally removing webhooks from other services or GARM controllers. + * @type {string} + * @memberof ControllerInfo + */ + 'controller_webhook_url'?: string; + /** + * Hostname is the hostname of the machine that runs this controller. In the future, this field will be migrated to a separate table that will keep track of each the controller nodes that are part of a cluster. This will happen when we implement controller scale-out capability. + * @type {string} + * @memberof ControllerInfo + */ + 'hostname'?: string; + /** + * MetadataURL is the public metadata URL of the GARM instance. This URL is used by instances to fetch information they need to set themselves up. The URL itself may be made available to runners via a reverse proxy or a load balancer. That means that the user is responsible for telling GARM what the public URL is, by setting this field. + * @type {string} + * @memberof ControllerInfo + */ + 'metadata_url'?: string; + /** + * MinimumJobAgeBackoff is the minimum time in seconds that a job must be in queued state before GARM will attempt to allocate a runner for it. When set to a non zero value, GARM will ignore the job until the job\'s age is greater than this value. When using the min_idle_runners feature of a pool, this gives enough time for potential idle runners to pick up the job before GARM attempts to allocate a new runner, thus avoiding the need to potentially scale down runners later. + * @type {number} + * @memberof ControllerInfo + */ + 'minimum_job_age_backoff'?: number; + /** + * Version is the version of the GARM controller. + * @type {string} + * @memberof ControllerInfo + */ + 'version'?: string; + /** + * WebhookURL is the base URL where the controller will receive webhooks from github. When webhook management is used, this URL is used as a base to which the controller UUID is appended and which will receive the webhooks. The URL itself may be made available to instances via a reverse proxy or a load balancer. That means that the user is responsible for telling GARM what the public URL is, by setting this field. + * @type {string} + * @memberof ControllerInfo + */ + 'webhook_url'?: string; +} +/** + * + * @export + * @interface CreateEnterpriseParams + */ +export interface CreateEnterpriseParams { + /** + * + * @type {string} + * @memberof CreateEnterpriseParams + */ + 'credentials_name'?: string; + /** + * + * @type {string} + * @memberof CreateEnterpriseParams + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof CreateEnterpriseParams + */ + 'pool_balancer_type'?: string; + /** + * + * @type {string} + * @memberof CreateEnterpriseParams + */ + 'webhook_secret'?: string; +} +/** + * + * @export + * @interface CreateGiteaCredentialsParams + */ +export interface CreateGiteaCredentialsParams { + /** + * + * @type {GithubApp} + * @memberof CreateGiteaCredentialsParams + */ + 'app'?: GithubApp; + /** + * + * @type {string} + * @memberof CreateGiteaCredentialsParams + */ + 'auth_type'?: string; + /** + * + * @type {string} + * @memberof CreateGiteaCredentialsParams + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof CreateGiteaCredentialsParams + */ + 'endpoint'?: string; + /** + * + * @type {string} + * @memberof CreateGiteaCredentialsParams + */ + 'name'?: string; + /** + * + * @type {GithubPAT} + * @memberof CreateGiteaCredentialsParams + */ + 'pat'?: GithubPAT; +} +/** + * + * @export + * @interface CreateGiteaEndpointParams + */ +export interface CreateGiteaEndpointParams { + /** + * + * @type {string} + * @memberof CreateGiteaEndpointParams + */ + 'api_base_url'?: string; + /** + * + * @type {string} + * @memberof CreateGiteaEndpointParams + */ + 'base_url'?: string; + /** + * + * @type {Array} + * @memberof CreateGiteaEndpointParams + */ + 'ca_cert_bundle'?: Array; + /** + * + * @type {string} + * @memberof CreateGiteaEndpointParams + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof CreateGiteaEndpointParams + */ + 'name'?: string; +} +/** + * + * @export + * @interface CreateGithubCredentialsParams + */ +export interface CreateGithubCredentialsParams { + /** + * + * @type {GithubApp} + * @memberof CreateGithubCredentialsParams + */ + 'app'?: GithubApp; + /** + * + * @type {string} + * @memberof CreateGithubCredentialsParams + */ + 'auth_type'?: string; + /** + * + * @type {string} + * @memberof CreateGithubCredentialsParams + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof CreateGithubCredentialsParams + */ + 'endpoint'?: string; + /** + * + * @type {string} + * @memberof CreateGithubCredentialsParams + */ + 'name'?: string; + /** + * + * @type {GithubPAT} + * @memberof CreateGithubCredentialsParams + */ + 'pat'?: GithubPAT; +} +/** + * + * @export + * @interface CreateGithubEndpointParams + */ +export interface CreateGithubEndpointParams { + /** + * + * @type {string} + * @memberof CreateGithubEndpointParams + */ + 'api_base_url'?: string; + /** + * + * @type {string} + * @memberof CreateGithubEndpointParams + */ + 'base_url'?: string; + /** + * + * @type {Array} + * @memberof CreateGithubEndpointParams + */ + 'ca_cert_bundle'?: Array; + /** + * + * @type {string} + * @memberof CreateGithubEndpointParams + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof CreateGithubEndpointParams + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof CreateGithubEndpointParams + */ + 'upload_base_url'?: string; +} +/** + * + * @export + * @interface CreateOrgParams + */ +export interface CreateOrgParams { + /** + * + * @type {string} + * @memberof CreateOrgParams + */ + 'credentials_name'?: string; + /** + * + * @type {string} + * @memberof CreateOrgParams + */ + 'forge_type'?: string; + /** + * + * @type {string} + * @memberof CreateOrgParams + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof CreateOrgParams + */ + 'pool_balancer_type'?: string; + /** + * + * @type {string} + * @memberof CreateOrgParams + */ + 'webhook_secret'?: string; +} +/** + * + * @export + * @interface CreatePoolParams + */ +export interface CreatePoolParams { + /** + * + * @type {boolean} + * @memberof CreatePoolParams + */ + 'enabled'?: boolean; + /** + * + * @type {object} + * @memberof CreatePoolParams + */ + 'extra_specs'?: object; + /** + * + * @type {string} + * @memberof CreatePoolParams + */ + 'flavor'?: string; + /** + * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. + * @type {string} + * @memberof CreatePoolParams + */ + 'github-runner-group'?: string; + /** + * + * @type {string} + * @memberof CreatePoolParams + */ + 'image'?: string; + /** + * + * @type {number} + * @memberof CreatePoolParams + */ + 'max_runners'?: number; + /** + * + * @type {number} + * @memberof CreatePoolParams + */ + 'min_idle_runners'?: number; + /** + * + * @type {string} + * @memberof CreatePoolParams + */ + 'os_arch'?: string; + /** + * + * @type {string} + * @memberof CreatePoolParams + */ + 'os_type'?: string; + /** + * + * @type {number} + * @memberof CreatePoolParams + */ + 'priority'?: number; + /** + * + * @type {string} + * @memberof CreatePoolParams + */ + 'provider_name'?: string; + /** + * + * @type {number} + * @memberof CreatePoolParams + */ + 'runner_bootstrap_timeout'?: number; + /** + * + * @type {string} + * @memberof CreatePoolParams + */ + 'runner_prefix'?: string; + /** + * + * @type {Array} + * @memberof CreatePoolParams + */ + 'tags'?: Array; +} +/** + * + * @export + * @interface CreateRepoParams + */ +export interface CreateRepoParams { + /** + * + * @type {string} + * @memberof CreateRepoParams + */ + 'credentials_name'?: string; + /** + * + * @type {string} + * @memberof CreateRepoParams + */ + 'forge_type'?: string; + /** + * + * @type {string} + * @memberof CreateRepoParams + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof CreateRepoParams + */ + 'owner'?: string; + /** + * + * @type {string} + * @memberof CreateRepoParams + */ + 'pool_balancer_type'?: string; + /** + * + * @type {string} + * @memberof CreateRepoParams + */ + 'webhook_secret'?: string; +} +/** + * + * @export + * @interface CreateScaleSetParams + */ +export interface CreateScaleSetParams { + /** + * + * @type {boolean} + * @memberof CreateScaleSetParams + */ + 'disable_update'?: boolean; + /** + * + * @type {boolean} + * @memberof CreateScaleSetParams + */ + 'enabled'?: boolean; + /** + * + * @type {object} + * @memberof CreateScaleSetParams + */ + 'extra_specs'?: object; + /** + * + * @type {string} + * @memberof CreateScaleSetParams + */ + 'flavor'?: string; + /** + * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. + * @type {string} + * @memberof CreateScaleSetParams + */ + 'github-runner-group'?: string; + /** + * + * @type {string} + * @memberof CreateScaleSetParams + */ + 'image'?: string; + /** + * + * @type {number} + * @memberof CreateScaleSetParams + */ + 'max_runners'?: number; + /** + * + * @type {number} + * @memberof CreateScaleSetParams + */ + 'min_idle_runners'?: number; + /** + * + * @type {string} + * @memberof CreateScaleSetParams + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof CreateScaleSetParams + */ + 'os_arch'?: string; + /** + * + * @type {string} + * @memberof CreateScaleSetParams + */ + 'os_type'?: string; + /** + * + * @type {string} + * @memberof CreateScaleSetParams + */ + 'provider_name'?: string; + /** + * + * @type {number} + * @memberof CreateScaleSetParams + */ + 'runner_bootstrap_timeout'?: number; + /** + * + * @type {string} + * @memberof CreateScaleSetParams + */ + 'runner_prefix'?: string; + /** + * + * @type {number} + * @memberof CreateScaleSetParams + */ + 'scale_set_id'?: number; + /** + * + * @type {Array} + * @memberof CreateScaleSetParams + */ + 'tags'?: Array; +} +/** + * + * @export + * @interface Enterprise + */ +export interface Enterprise { + /** + * + * @type {string} + * @memberof Enterprise + */ + 'created_at'?: string; + /** + * + * @type {ForgeCredentials} + * @memberof Enterprise + */ + 'credentials'?: ForgeCredentials; + /** + * + * @type {number} + * @memberof Enterprise + */ + 'credentials_id'?: number; + /** + * CredentialName is the name of the credentials associated with the enterprise. This field is now deprecated. Use CredentialsID instead. This field will be removed in v0.2.0. + * @type {string} + * @memberof Enterprise + */ + 'credentials_name'?: string; + /** + * + * @type {ForgeEndpoint} + * @memberof Enterprise + */ + 'endpoint'?: ForgeEndpoint; + /** + * + * @type {Array} + * @memberof Enterprise + */ + 'events'?: Array; + /** + * + * @type {string} + * @memberof Enterprise + */ + 'id'?: string; + /** + * + * @type {string} + * @memberof Enterprise + */ + 'name'?: string; + /** + * + * @type {Array} + * @memberof Enterprise + */ + 'pool'?: Array; + /** + * + * @type {string} + * @memberof Enterprise + */ + 'pool_balancing_type'?: string; + /** + * + * @type {PoolManagerStatus} + * @memberof Enterprise + */ + 'pool_manager_status'?: PoolManagerStatus; + /** + * + * @type {string} + * @memberof Enterprise + */ + 'updated_at'?: string; +} +/** + * + * @export + * @interface EntityEvent + */ +export interface EntityEvent { + /** + * + * @type {string} + * @memberof EntityEvent + */ + 'created_at'?: string; + /** + * + * @type {string} + * @memberof EntityEvent + */ + 'event_level'?: string; + /** + * + * @type {string} + * @memberof EntityEvent + */ + 'event_type'?: string; + /** + * + * @type {number} + * @memberof EntityEvent + */ + 'id'?: number; + /** + * + * @type {string} + * @memberof EntityEvent + */ + 'message'?: string; +} +/** + * + * @export + * @interface ForgeCredentials + */ +export interface ForgeCredentials { + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'api_base_url'?: string; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'auth-type'?: string; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'base_url'?: string; + /** + * + * @type {Array} + * @memberof ForgeCredentials + */ + 'ca_bundle'?: Array; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'created_at'?: string; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'description'?: string; + /** + * + * @type {ForgeEndpoint} + * @memberof ForgeCredentials + */ + 'endpoint'?: ForgeEndpoint; + /** + * + * @type {Array} + * @memberof ForgeCredentials + */ + 'enterprises'?: Array; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'forge_type'?: string; + /** + * + * @type {number} + * @memberof ForgeCredentials + */ + 'id'?: number; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'name'?: string; + /** + * + * @type {Array} + * @memberof ForgeCredentials + */ + 'organizations'?: Array; + /** + * + * @type {GithubRateLimit} + * @memberof ForgeCredentials + */ + 'rate_limit'?: GithubRateLimit; + /** + * + * @type {Array} + * @memberof ForgeCredentials + */ + 'repositories'?: Array; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'updated_at'?: string; + /** + * + * @type {string} + * @memberof ForgeCredentials + */ + 'upload_base_url'?: string; +} +/** + * + * @export + * @interface ForgeEndpoint + */ +export interface ForgeEndpoint { + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'api_base_url'?: string; + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'base_url'?: string; + /** + * + * @type {Array} + * @memberof ForgeEndpoint + */ + 'ca_cert_bundle'?: Array; + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'created_at'?: string; + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'endpoint_type'?: string; + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'updated_at'?: string; + /** + * + * @type {string} + * @memberof ForgeEndpoint + */ + 'upload_base_url'?: string; +} +/** + * + * @export + * @interface ForgeEntity + */ +export interface ForgeEntity { + /** + * + * @type {number} + * @memberof ForgeEntity + */ + 'agent_id'?: number; + /** + * + * @type {string} + * @memberof ForgeEntity + */ + 'os_name'?: string; + /** + * + * @type {string} + * @memberof ForgeEntity + */ + 'os_version'?: string; +} +/** + * + * @export + * @interface GithubApp + */ +export interface GithubApp { + /** + * + * @type {number} + * @memberof GithubApp + */ + 'app_id'?: number; + /** + * + * @type {number} + * @memberof GithubApp + */ + 'installation_id'?: number; + /** + * + * @type {Array} + * @memberof GithubApp + */ + 'private_key_bytes'?: Array; +} +/** + * + * @export + * @interface GithubPAT + */ +export interface GithubPAT { + /** + * + * @type {string} + * @memberof GithubPAT + */ + 'oauth2_token'?: string; +} +/** + * + * @export + * @interface GithubRateLimit + */ +export interface GithubRateLimit { + /** + * + * @type {number} + * @memberof GithubRateLimit + */ + 'limit'?: number; + /** + * + * @type {number} + * @memberof GithubRateLimit + */ + 'remaining'?: number; + /** + * + * @type {number} + * @memberof GithubRateLimit + */ + 'reset'?: number; + /** + * + * @type {number} + * @memberof GithubRateLimit + */ + 'used'?: number; +} +/** + * + * @export + * @interface HookInfo + */ +export interface HookInfo { + /** + * + * @type {boolean} + * @memberof HookInfo + */ + 'active'?: boolean; + /** + * + * @type {Array} + * @memberof HookInfo + */ + 'events'?: Array; + /** + * + * @type {number} + * @memberof HookInfo + */ + 'id'?: number; + /** + * + * @type {boolean} + * @memberof HookInfo + */ + 'insecure_ssl'?: boolean; + /** + * + * @type {string} + * @memberof HookInfo + */ + 'url'?: string; +} +/** + * + * @export + * @interface InstallWebhookParams + */ +export interface InstallWebhookParams { + /** + * + * @type {boolean} + * @memberof InstallWebhookParams + */ + 'insecure_ssl'?: boolean; + /** + * + * @type {string} + * @memberof InstallWebhookParams + */ + 'webhook_endpoint_type'?: string; +} +/** + * + * @export + * @interface Instance + */ +export interface Instance { + /** + * Addresses is a list of IP addresses the provider reports for this instance. + * @type {Array
                } + * @memberof Instance + */ + 'addresses'?: Array
                ; + /** + * AgentID is the github runner agent ID. + * @type {number} + * @memberof Instance + */ + 'agent_id'?: number; + /** + * CreatedAt is the timestamp of the creation of this runner. + * @type {string} + * @memberof Instance + */ + 'created_at'?: string; + /** + * GithubRunnerGroup is the github runner group to which the runner belongs. The runner group must be created by someone with access to the enterprise. + * @type {string} + * @memberof Instance + */ + 'github-runner-group'?: string; + /** + * ID is the database ID of this instance. + * @type {string} + * @memberof Instance + */ + 'id'?: string; + /** + * + * @type {Job} + * @memberof Instance + */ + 'job'?: Job; + /** + * Name is the name associated with an instance. Depending on the provider, this may or may not be useful in the context of the provider, but we can use it internally to identify the instance. + * @type {string} + * @memberof Instance + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof Instance + */ + 'os_arch'?: string; + /** + * OSName is the name of the OS. Eg: ubuntu, centos, etc. + * @type {string} + * @memberof Instance + */ + 'os_name'?: string; + /** + * + * @type {string} + * @memberof Instance + */ + 'os_type'?: string; + /** + * OSVersion is the version of the operating system. + * @type {string} + * @memberof Instance + */ + 'os_version'?: string; + /** + * PoolID is the ID of the garm pool to which a runner belongs. + * @type {string} + * @memberof Instance + */ + 'pool_id'?: string; + /** + * ProviderFault holds any error messages captured from the IaaS provider that is responsible for managing the lifecycle of the runner. + * @type {Array} + * @memberof Instance + */ + 'provider_fault'?: Array; + /** + * PeoviderID is the unique ID the provider associated with the compute instance. We use this to identify the instance in the provider. + * @type {string} + * @memberof Instance + */ + 'provider_id'?: string; + /** + * ProviderName is the name of the IaaS where the instance was created. + * @type {string} + * @memberof Instance + */ + 'provider_name'?: string; + /** + * + * @type {string} + * @memberof Instance + */ + 'runner_status'?: string; + /** + * ScaleSetID is the ID of the scale set to which a runner belongs. + * @type {number} + * @memberof Instance + */ + 'scale_set_id'?: number; + /** + * + * @type {string} + * @memberof Instance + */ + 'status'?: string; + /** + * StatusMessages is a list of status messages sent back by the runner as it sets itself up. + * @type {Array} + * @memberof Instance + */ + 'status_messages'?: Array; + /** + * UpdatedAt is the timestamp of the last update to this runner. + * @type {string} + * @memberof Instance + */ + 'updated_at'?: string; +} +/** + * JWTResponse holds the JWT token returned as a result of a successful auth + * @export + * @interface JWTResponse + */ +export interface JWTResponse { + /** + * + * @type {string} + * @memberof JWTResponse + */ + 'token'?: string; +} +/** + * + * @export + * @interface Job + */ +export interface Job { + /** + * Action is the specific activity that triggered the event. + * @type {string} + * @memberof Job + */ + 'action'?: string; + /** + * + * @type {string} + * @memberof Job + */ + 'completed_at'?: string; + /** + * Conclusion is the outcome of the job. Possible values: \"success\", \"failure\", \"neutral\", \"cancelled\", \"skipped\", \"timed_out\", \"action_required\" + * @type {string} + * @memberof Job + */ + 'conclusion'?: string; + /** + * + * @type {string} + * @memberof Job + */ + 'created_at'?: string; + /** + * + * @type {string} + * @memberof Job + */ + 'enterprise_id'?: string; + /** + * ID is the ID of the job. + * @type {number} + * @memberof Job + */ + 'id'?: number; + /** + * + * @type {Array} + * @memberof Job + */ + 'labels'?: Array; + /** + * + * @type {string} + * @memberof Job + */ + 'locked_by'?: string; + /** + * Name is the name if the job that was triggered. + * @type {string} + * @memberof Job + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof Job + */ + 'org_id'?: string; + /** + * The entity that received the hook. Webhooks may be configured on the repo, the org and/or the enterprise. If we only configure a repo to use garm, we\'ll only ever receive a webhook from the repo. But if we configure the parent org of the repo and the parent enterprise of the org to use garm, a webhook will be sent for each entity type, in response to one workflow event. Thus, we will get 3 webhooks with the same run_id and job id. Record all involved entities in the same job if we have them configured in garm. + * @type {string} + * @memberof Job + */ + 'repo_id'?: string; + /** + * repository in which the job was triggered. + * @type {string} + * @memberof Job + */ + 'repository_name'?: string; + /** + * + * @type {string} + * @memberof Job + */ + 'repository_owner'?: string; + /** + * RunID is the ID of the workflow run. A run may have multiple jobs. + * @type {number} + * @memberof Job + */ + 'run_id'?: number; + /** + * + * @type {number} + * @memberof Job + */ + 'runner_group_id'?: number; + /** + * + * @type {string} + * @memberof Job + */ + 'runner_group_name'?: string; + /** + * + * @type {number} + * @memberof Job + */ + 'runner_id'?: number; + /** + * + * @type {string} + * @memberof Job + */ + 'runner_name'?: string; + /** + * ScaleSetJobID is the job ID when generated for a scale set. + * @type {string} + * @memberof Job + */ + 'scaleset_job_id'?: string; + /** + * + * @type {string} + * @memberof Job + */ + 'started_at'?: string; + /** + * Status is the phase of the lifecycle that the job is currently in. \"queued\", \"in_progress\" and \"completed\". + * @type {string} + * @memberof Job + */ + 'status'?: string; + /** + * + * @type {string} + * @memberof Job + */ + 'updated_at'?: string; + /** + * + * @type {number} + * @memberof Job + */ + 'workflow_job_id'?: number; +} +/** + * NewUserParams holds the needed information to create a new user + * @export + * @interface NewUserParams + */ +export interface NewUserParams { + /** + * + * @type {string} + * @memberof NewUserParams + */ + 'email'?: string; + /** + * + * @type {string} + * @memberof NewUserParams + */ + 'full_name'?: string; + /** + * + * @type {string} + * @memberof NewUserParams + */ + 'password'?: string; + /** + * + * @type {string} + * @memberof NewUserParams + */ + 'username'?: string; +} +/** + * + * @export + * @interface Organization + */ +export interface Organization { + /** + * + * @type {string} + * @memberof Organization + */ + 'created_at'?: string; + /** + * + * @type {ForgeCredentials} + * @memberof Organization + */ + 'credentials'?: ForgeCredentials; + /** + * + * @type {number} + * @memberof Organization + */ + 'credentials_id'?: number; + /** + * CredentialName is the name of the credentials associated with the enterprise. This field is now deprecated. Use CredentialsID instead. This field will be removed in v0.2.0. + * @type {string} + * @memberof Organization + */ + 'credentials_name'?: string; + /** + * + * @type {ForgeEndpoint} + * @memberof Organization + */ + 'endpoint'?: ForgeEndpoint; + /** + * + * @type {Array} + * @memberof Organization + */ + 'events'?: Array; + /** + * + * @type {string} + * @memberof Organization + */ + 'id'?: string; + /** + * + * @type {string} + * @memberof Organization + */ + 'name'?: string; + /** + * + * @type {Array} + * @memberof Organization + */ + 'pool'?: Array; + /** + * + * @type {string} + * @memberof Organization + */ + 'pool_balancing_type'?: string; + /** + * + * @type {PoolManagerStatus} + * @memberof Organization + */ + 'pool_manager_status'?: PoolManagerStatus; + /** + * + * @type {string} + * @memberof Organization + */ + 'updated_at'?: string; +} +/** + * + * @export + * @interface PasswordLoginParams + */ +export interface PasswordLoginParams { + /** + * + * @type {string} + * @memberof PasswordLoginParams + */ + 'password'?: string; + /** + * + * @type {string} + * @memberof PasswordLoginParams + */ + 'username'?: string; +} +/** + * + * @export + * @interface Pool + */ +export interface Pool { + /** + * + * @type {string} + * @memberof Pool + */ + 'created_at'?: string; + /** + * + * @type {boolean} + * @memberof Pool + */ + 'enabled'?: boolean; + /** + * + * @type {ForgeEndpoint} + * @memberof Pool + */ + 'endpoint'?: ForgeEndpoint; + /** + * + * @type {string} + * @memberof Pool + */ + 'enterprise_id'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'enterprise_name'?: string; + /** + * ExtraSpecs is an opaque raw json that gets sent to the provider as part of the bootstrap params for instances. It can contain any kind of data needed by providers. The contents of this field means nothing to garm itself. We don\'t act on the information in this field at all. We only validate that it\'s a proper json. + * @type {object} + * @memberof Pool + */ + 'extra_specs'?: object; + /** + * + * @type {string} + * @memberof Pool + */ + 'flavor'?: string; + /** + * GithubRunnerGroup is the github runner group in which the runners will be added. The runner group must be created by someone with access to the enterprise. + * @type {string} + * @memberof Pool + */ + 'github-runner-group'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'id'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'image'?: string; + /** + * + * @type {Array} + * @memberof Pool + */ + 'instances'?: Array; + /** + * + * @type {number} + * @memberof Pool + */ + 'max_runners'?: number; + /** + * + * @type {number} + * @memberof Pool + */ + 'min_idle_runners'?: number; + /** + * + * @type {string} + * @memberof Pool + */ + 'org_id'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'org_name'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'os_arch'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'os_type'?: string; + /** + * Priority is the priority of the pool. The higher the number, the higher the priority. When fetching matching pools for a set of tags, the result will be sorted in descending order of priority. + * @type {number} + * @memberof Pool + */ + 'priority'?: number; + /** + * + * @type {string} + * @memberof Pool + */ + 'provider_name'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'repo_id'?: string; + /** + * + * @type {string} + * @memberof Pool + */ + 'repo_name'?: string; + /** + * + * @type {number} + * @memberof Pool + */ + 'runner_bootstrap_timeout'?: number; + /** + * + * @type {string} + * @memberof Pool + */ + 'runner_prefix'?: string; + /** + * + * @type {Array} + * @memberof Pool + */ + 'tags'?: Array; + /** + * + * @type {string} + * @memberof Pool + */ + 'updated_at'?: string; +} +/** + * + * @export + * @interface PoolManagerStatus + */ +export interface PoolManagerStatus { + /** + * + * @type {string} + * @memberof PoolManagerStatus + */ + 'failure_reason'?: string; + /** + * + * @type {boolean} + * @memberof PoolManagerStatus + */ + 'running'?: boolean; +} +/** + * + * @export + * @interface Provider + */ +export interface Provider { + /** + * + * @type {string} + * @memberof Provider + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof Provider + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof Provider + */ + 'type'?: string; +} +/** + * + * @export + * @interface Repository + */ +export interface Repository { + /** + * + * @type {string} + * @memberof Repository + */ + 'created_at'?: string; + /** + * + * @type {ForgeCredentials} + * @memberof Repository + */ + 'credentials'?: ForgeCredentials; + /** + * + * @type {number} + * @memberof Repository + */ + 'credentials_id'?: number; + /** + * CredentialName is the name of the credentials associated with the enterprise. This field is now deprecated. Use CredentialsID instead. This field will be removed in v0.2.0. + * @type {string} + * @memberof Repository + */ + 'credentials_name'?: string; + /** + * + * @type {ForgeEndpoint} + * @memberof Repository + */ + 'endpoint'?: ForgeEndpoint; + /** + * + * @type {Array} + * @memberof Repository + */ + 'events'?: Array; + /** + * + * @type {string} + * @memberof Repository + */ + 'id'?: string; + /** + * + * @type {string} + * @memberof Repository + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof Repository + */ + 'owner'?: string; + /** + * + * @type {Array} + * @memberof Repository + */ + 'pool'?: Array; + /** + * + * @type {string} + * @memberof Repository + */ + 'pool_balancing_type'?: string; + /** + * + * @type {PoolManagerStatus} + * @memberof Repository + */ + 'pool_manager_status'?: PoolManagerStatus; + /** + * + * @type {string} + * @memberof Repository + */ + 'updated_at'?: string; +} +/** + * + * @export + * @interface RunnerPrefix + */ +export interface RunnerPrefix { + /** + * + * @type {string} + * @memberof RunnerPrefix + */ + 'runner_prefix'?: string; +} +/** + * + * @export + * @interface ScaleSet + */ +export interface ScaleSet { + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'created_at'?: string; + /** + * + * @type {number} + * @memberof ScaleSet + */ + 'desired_runner_count'?: number; + /** + * + * @type {boolean} + * @memberof ScaleSet + */ + 'disable_update'?: boolean; + /** + * + * @type {boolean} + * @memberof ScaleSet + */ + 'enabled'?: boolean; + /** + * + * @type {ForgeEndpoint} + * @memberof ScaleSet + */ + 'endpoint'?: ForgeEndpoint; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'enterprise_id'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'enterprise_name'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'extended_state'?: string; + /** + * ExtraSpecs is an opaque raw json that gets sent to the provider as part of the bootstrap params for instances. It can contain any kind of data needed by providers. The contents of this field means nothing to garm itself. We don\'t act on the information in this field at all. We only validate that it\'s a proper json. + * @type {object} + * @memberof ScaleSet + */ + 'extra_specs'?: object; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'flavor'?: string; + /** + * GithubRunnerGroup is the github runner group in which the runners will be added. The runner group must be created by someone with access to the enterprise. + * @type {string} + * @memberof ScaleSet + */ + 'github-runner-group'?: string; + /** + * + * @type {number} + * @memberof ScaleSet + */ + 'id'?: number; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'image'?: string; + /** + * + * @type {Array} + * @memberof ScaleSet + */ + 'instances'?: Array; + /** + * + * @type {number} + * @memberof ScaleSet + */ + 'max_runners'?: number; + /** + * + * @type {number} + * @memberof ScaleSet + */ + 'min_idle_runners'?: number; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'org_id'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'org_name'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'os_arch'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'os_type'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'provider_name'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'repo_id'?: string; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'repo_name'?: string; + /** + * + * @type {number} + * @memberof ScaleSet + */ + 'runner_bootstrap_timeout'?: number; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'runner_prefix'?: string; + /** + * + * @type {number} + * @memberof ScaleSet + */ + 'scale_set_id'?: number; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'state'?: string; + /** + * + * @type {Array} + * @memberof ScaleSet + */ + 'status_messages'?: Array; + /** + * + * @type {string} + * @memberof ScaleSet + */ + 'updated_at'?: string; +} +/** + * + * @export + * @interface StatusMessage + */ +export interface StatusMessage { + /** + * + * @type {string} + * @memberof StatusMessage + */ + 'created_at'?: string; + /** + * + * @type {string} + * @memberof StatusMessage + */ + 'event_level'?: string; + /** + * + * @type {string} + * @memberof StatusMessage + */ + 'event_type'?: string; + /** + * + * @type {string} + * @memberof StatusMessage + */ + 'message'?: string; +} +/** + * + * @export + * @interface Tag + */ +export interface Tag { + /** + * + * @type {string} + * @memberof Tag + */ + 'id'?: string; + /** + * + * @type {string} + * @memberof Tag + */ + 'name'?: string; +} +/** + * + * @export + * @interface UpdateControllerParams + */ +export interface UpdateControllerParams { + /** + * + * @type {string} + * @memberof UpdateControllerParams + */ + 'callback_url'?: string; + /** + * + * @type {string} + * @memberof UpdateControllerParams + */ + 'metadata_url'?: string; + /** + * + * @type {number} + * @memberof UpdateControllerParams + */ + 'minimum_job_age_backoff'?: number; + /** + * + * @type {string} + * @memberof UpdateControllerParams + */ + 'webhook_url'?: string; +} +/** + * + * @export + * @interface UpdateEntityParams + */ +export interface UpdateEntityParams { + /** + * + * @type {string} + * @memberof UpdateEntityParams + */ + 'credentials_name'?: string; + /** + * + * @type {string} + * @memberof UpdateEntityParams + */ + 'pool_balancer_type'?: string; + /** + * + * @type {string} + * @memberof UpdateEntityParams + */ + 'webhook_secret'?: string; +} +/** + * + * @export + * @interface UpdateGiteaCredentialsParams + */ +export interface UpdateGiteaCredentialsParams { + /** + * + * @type {string} + * @memberof UpdateGiteaCredentialsParams + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof UpdateGiteaCredentialsParams + */ + 'name'?: string; + /** + * + * @type {GithubPAT} + * @memberof UpdateGiteaCredentialsParams + */ + 'pat'?: GithubPAT; +} +/** + * + * @export + * @interface UpdateGiteaEndpointParams + */ +export interface UpdateGiteaEndpointParams { + /** + * + * @type {string} + * @memberof UpdateGiteaEndpointParams + */ + 'api_base_url'?: string; + /** + * + * @type {string} + * @memberof UpdateGiteaEndpointParams + */ + 'base_url'?: string; + /** + * + * @type {Array} + * @memberof UpdateGiteaEndpointParams + */ + 'ca_cert_bundle'?: Array; + /** + * + * @type {string} + * @memberof UpdateGiteaEndpointParams + */ + 'description'?: string; +} +/** + * + * @export + * @interface UpdateGithubCredentialsParams + */ +export interface UpdateGithubCredentialsParams { + /** + * + * @type {GithubApp} + * @memberof UpdateGithubCredentialsParams + */ + 'app'?: GithubApp; + /** + * + * @type {string} + * @memberof UpdateGithubCredentialsParams + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof UpdateGithubCredentialsParams + */ + 'name'?: string; + /** + * + * @type {GithubPAT} + * @memberof UpdateGithubCredentialsParams + */ + 'pat'?: GithubPAT; +} +/** + * + * @export + * @interface UpdateGithubEndpointParams + */ +export interface UpdateGithubEndpointParams { + /** + * + * @type {string} + * @memberof UpdateGithubEndpointParams + */ + 'api_base_url'?: string; + /** + * + * @type {string} + * @memberof UpdateGithubEndpointParams + */ + 'base_url'?: string; + /** + * + * @type {Array} + * @memberof UpdateGithubEndpointParams + */ + 'ca_cert_bundle'?: Array; + /** + * + * @type {string} + * @memberof UpdateGithubEndpointParams + */ + 'description'?: string; + /** + * + * @type {string} + * @memberof UpdateGithubEndpointParams + */ + 'upload_base_url'?: string; +} +/** + * + * @export + * @interface UpdatePoolParams + */ +export interface UpdatePoolParams { + /** + * + * @type {boolean} + * @memberof UpdatePoolParams + */ + 'enabled'?: boolean; + /** + * + * @type {object} + * @memberof UpdatePoolParams + */ + 'extra_specs'?: object; + /** + * + * @type {string} + * @memberof UpdatePoolParams + */ + 'flavor'?: string; + /** + * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. + * @type {string} + * @memberof UpdatePoolParams + */ + 'github-runner-group'?: string; + /** + * + * @type {string} + * @memberof UpdatePoolParams + */ + 'image'?: string; + /** + * + * @type {number} + * @memberof UpdatePoolParams + */ + 'max_runners'?: number; + /** + * + * @type {number} + * @memberof UpdatePoolParams + */ + 'min_idle_runners'?: number; + /** + * + * @type {string} + * @memberof UpdatePoolParams + */ + 'os_arch'?: string; + /** + * + * @type {string} + * @memberof UpdatePoolParams + */ + 'os_type'?: string; + /** + * + * @type {number} + * @memberof UpdatePoolParams + */ + 'priority'?: number; + /** + * + * @type {number} + * @memberof UpdatePoolParams + */ + 'runner_bootstrap_timeout'?: number; + /** + * + * @type {string} + * @memberof UpdatePoolParams + */ + 'runner_prefix'?: string; + /** + * + * @type {Array} + * @memberof UpdatePoolParams + */ + 'tags'?: Array; +} +/** + * + * @export + * @interface UpdateScaleSetParams + */ +export interface UpdateScaleSetParams { + /** + * + * @type {boolean} + * @memberof UpdateScaleSetParams + */ + 'enabled'?: boolean; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'extended_state'?: string; + /** + * + * @type {object} + * @memberof UpdateScaleSetParams + */ + 'extra_specs'?: object; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'flavor'?: string; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'image'?: string; + /** + * + * @type {number} + * @memberof UpdateScaleSetParams + */ + 'max_runners'?: number; + /** + * + * @type {number} + * @memberof UpdateScaleSetParams + */ + 'min_idle_runners'?: number; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'name'?: string; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'os_arch'?: string; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'os_type'?: string; + /** + * + * @type {number} + * @memberof UpdateScaleSetParams + */ + 'runner_bootstrap_timeout'?: number; + /** + * GithubRunnerGroup is the github runner group in which the runners of this pool will be added to. The runner group must be created by someone with access to the enterprise. + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'runner_group'?: string; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'runner_prefix'?: string; + /** + * + * @type {string} + * @memberof UpdateScaleSetParams + */ + 'state'?: string; +} +/** + * Users holds information about a particular user + * @export + * @interface User + */ +export interface User { + /** + * + * @type {string} + * @memberof User + */ + 'created_at'?: string; + /** + * + * @type {string} + * @memberof User + */ + 'email'?: string; + /** + * + * @type {boolean} + * @memberof User + */ + 'enabled'?: boolean; + /** + * + * @type {string} + * @memberof User + */ + 'full_name'?: string; + /** + * + * @type {string} + * @memberof User + */ + 'id'?: string; + /** + * + * @type {boolean} + * @memberof User + */ + 'is_admin'?: boolean; + /** + * + * @type {string} + * @memberof User + */ + 'updated_at'?: string; + /** + * + * @type {string} + * @memberof User + */ + 'username'?: string; +} + +/** + * ControllerApi - axios parameter creator + * @export + */ +export const ControllerApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Update controller. + * @param {UpdateControllerParams} body Parameters used when updating the controller. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateController: async (body: UpdateControllerParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('updateController', 'body', body) + const localVarPath = `/controller`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * ControllerApi - functional programming interface + * @export + */ +export const ControllerApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = ControllerApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Update controller. + * @param {UpdateControllerParams} body Parameters used when updating the controller. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateController(body: UpdateControllerParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateController(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ControllerApi.updateController']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * ControllerApi - factory interface + * @export + */ +export const ControllerApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = ControllerApiFp(configuration) + return { + /** + * + * @summary Update controller. + * @param {UpdateControllerParams} body Parameters used when updating the controller. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateController(body: UpdateControllerParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateController(body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * ControllerApi - object-oriented interface + * @export + * @class ControllerApi + * @extends {BaseAPI} + */ +export class ControllerApi extends BaseAPI { + /** + * + * @summary Update controller. + * @param {UpdateControllerParams} body Parameters used when updating the controller. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ControllerApi + */ + public updateController(body: UpdateControllerParams, options?: RawAxiosRequestConfig) { + return ControllerApiFp(this.configuration).updateController(body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * ControllerInfoApi - axios parameter creator + * @export + */ +export const ControllerInfoApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Get controller info. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + controllerInfo: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/controller-info`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * ControllerInfoApi - functional programming interface + * @export + */ +export const ControllerInfoApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = ControllerInfoApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Get controller info. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async controllerInfo(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.controllerInfo(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ControllerInfoApi.controllerInfo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * ControllerInfoApi - factory interface + * @export + */ +export const ControllerInfoApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = ControllerInfoApiFp(configuration) + return { + /** + * + * @summary Get controller info. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + controllerInfo(options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.controllerInfo(options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * ControllerInfoApi - object-oriented interface + * @export + * @class ControllerInfoApi + * @extends {BaseAPI} + */ +export class ControllerInfoApi extends BaseAPI { + /** + * + * @summary Get controller info. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ControllerInfoApi + */ + public controllerInfo(options?: RawAxiosRequestConfig) { + return ControllerInfoApiFp(this.configuration).controllerInfo(options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * CredentialsApi - axios parameter creator + * @export + */ +export const CredentialsApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Create a GitHub credential. + * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createCredentials: async (body: CreateGithubCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('createCredentials', 'body', body) + const localVarPath = `/github/credentials`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create a Gitea credential. + * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createGiteaCredentials: async (body: CreateGiteaCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('createGiteaCredentials', 'body', body) + const localVarPath = `/gitea/credentials`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('deleteCredentials', 'id', id) + const localVarPath = `/github/credentials/{id}` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteGiteaCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('deleteGiteaCredentials', 'id', id) + const localVarPath = `/gitea/credentials/{id}` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('getCredentials', 'id', id) + const localVarPath = `/github/credentials/{id}` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getGiteaCredentials: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('getGiteaCredentials', 'id', id) + const localVarPath = `/gitea/credentials/{id}` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listCredentials: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/github/credentials`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listGiteaCredentials: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/gitea/credentials`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateCredentials: async (id: number, body: UpdateGithubCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('updateCredentials', 'id', id) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateCredentials', 'body', body) + const localVarPath = `/github/credentials/{id}` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateGiteaCredentials: async (id: number, body: UpdateGiteaCredentialsParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('updateGiteaCredentials', 'id', id) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateGiteaCredentials', 'body', body) + const localVarPath = `/gitea/credentials/{id}` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * CredentialsApi - functional programming interface + * @export + */ +export const CredentialsApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = CredentialsApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Create a GitHub credential. + * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createCredentials(body: CreateGithubCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createCredentials(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.createCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create a Gitea credential. + * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createGiteaCredentials(body: CreateGiteaCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createGiteaCredentials(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.createGiteaCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteCredentials(id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.deleteCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteGiteaCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteGiteaCredentials(id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.deleteGiteaCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getCredentials(id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.getCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getGiteaCredentials(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getGiteaCredentials(id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.getGiteaCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listCredentials(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listCredentials(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.listCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listGiteaCredentials(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listGiteaCredentials(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.listGiteaCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateCredentials(id: number, body: UpdateGithubCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateCredentials(id, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.updateCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateGiteaCredentials(id: number, body: UpdateGiteaCredentialsParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateGiteaCredentials(id, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['CredentialsApi.updateGiteaCredentials']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * CredentialsApi - factory interface + * @export + */ +export const CredentialsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = CredentialsApiFp(configuration) + return { + /** + * + * @summary Create a GitHub credential. + * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createCredentials(body: CreateGithubCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createCredentials(body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create a Gitea credential. + * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createGiteaCredentials(body: CreateGiteaCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createGiteaCredentials(body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteCredentials(id, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteGiteaCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteGiteaCredentials(id, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getCredentials(id, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getGiteaCredentials(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getGiteaCredentials(id, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listCredentials(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listCredentials(options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listGiteaCredentials(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listGiteaCredentials(options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateCredentials(id: number, body: UpdateGithubCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateCredentials(id, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateGiteaCredentials(id: number, body: UpdateGiteaCredentialsParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateGiteaCredentials(id, body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * CredentialsApi - object-oriented interface + * @export + * @class CredentialsApi + * @extends {BaseAPI} + */ +export class CredentialsApi extends BaseAPI { + /** + * + * @summary Create a GitHub credential. + * @param {CreateGithubCredentialsParams} body Parameters used when creating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public createCredentials(body: CreateGithubCredentialsParams, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).createCredentials(body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create a Gitea credential. + * @param {CreateGiteaCredentialsParams} body Parameters used when creating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public createGiteaCredentials(body: CreateGiteaCredentialsParams, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).createGiteaCredentials(body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public deleteCredentials(id: number, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).deleteCredentials(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public deleteGiteaCredentials(id: number, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).deleteGiteaCredentials(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public getCredentials(id: number, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).getCredentials(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public getGiteaCredentials(id: number, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).getGiteaCredentials(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public listCredentials(options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).listCredentials(options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List all credentials. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public listGiteaCredentials(options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).listGiteaCredentials(options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update a GitHub credential. + * @param {number} id ID of the GitHub credential. + * @param {UpdateGithubCredentialsParams} body Parameters used when updating a GitHub credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public updateCredentials(id: number, body: UpdateGithubCredentialsParams, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).updateCredentials(id, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update a Gitea credential. + * @param {number} id ID of the Gitea credential. + * @param {UpdateGiteaCredentialsParams} body Parameters used when updating a Gitea credential. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CredentialsApi + */ + public updateGiteaCredentials(id: number, body: UpdateGiteaCredentialsParams, options?: RawAxiosRequestConfig) { + return CredentialsApiFp(this.configuration).updateGiteaCredentials(id, body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * EndpointsApi - axios parameter creator + * @export + */ +export const EndpointsApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Create a Gitea Endpoint. + * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createGiteaEndpoint: async (body: CreateGiteaEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('createGiteaEndpoint', 'body', body) + const localVarPath = `/gitea/endpoints`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create a GitHub Endpoint. + * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createGithubEndpoint: async (body: CreateGithubEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('createGithubEndpoint', 'body', body) + const localVarPath = `/github/endpoints`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteGiteaEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'name' is not null or undefined + assertParamExists('deleteGiteaEndpoint', 'name', name) + const localVarPath = `/gitea/endpoints/{name}` + .replace(`{${"name"}}`, encodeURIComponent(String(name))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteGithubEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'name' is not null or undefined + assertParamExists('deleteGithubEndpoint', 'name', name) + const localVarPath = `/github/endpoints/{name}` + .replace(`{${"name"}}`, encodeURIComponent(String(name))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getGiteaEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'name' is not null or undefined + assertParamExists('getGiteaEndpoint', 'name', name) + const localVarPath = `/gitea/endpoints/{name}` + .replace(`{${"name"}}`, encodeURIComponent(String(name))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getGithubEndpoint: async (name: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'name' is not null or undefined + assertParamExists('getGithubEndpoint', 'name', name) + const localVarPath = `/github/endpoints/{name}` + .replace(`{${"name"}}`, encodeURIComponent(String(name))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List all Gitea Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listGiteaEndpoints: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/gitea/endpoints`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List all GitHub Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listGithubEndpoints: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/github/endpoints`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateGiteaEndpoint: async (name: string, body: UpdateGiteaEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'name' is not null or undefined + assertParamExists('updateGiteaEndpoint', 'name', name) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateGiteaEndpoint', 'body', body) + const localVarPath = `/gitea/endpoints/{name}` + .replace(`{${"name"}}`, encodeURIComponent(String(name))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateGithubEndpoint: async (name: string, body: UpdateGithubEndpointParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'name' is not null or undefined + assertParamExists('updateGithubEndpoint', 'name', name) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateGithubEndpoint', 'body', body) + const localVarPath = `/github/endpoints/{name}` + .replace(`{${"name"}}`, encodeURIComponent(String(name))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * EndpointsApi - functional programming interface + * @export + */ +export const EndpointsApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = EndpointsApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Create a Gitea Endpoint. + * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createGiteaEndpoint(body: CreateGiteaEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createGiteaEndpoint(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.createGiteaEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create a GitHub Endpoint. + * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createGithubEndpoint(body: CreateGithubEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createGithubEndpoint(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.createGithubEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteGiteaEndpoint(name, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.deleteGiteaEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteGithubEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteGithubEndpoint(name, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.deleteGithubEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getGiteaEndpoint(name, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.getGiteaEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getGithubEndpoint(name: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getGithubEndpoint(name, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.getGithubEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List all Gitea Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listGiteaEndpoints(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listGiteaEndpoints(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.listGiteaEndpoints']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List all GitHub Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listGithubEndpoints(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listGithubEndpoints(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.listGithubEndpoints']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateGiteaEndpoint(name: string, body: UpdateGiteaEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateGiteaEndpoint(name, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.updateGiteaEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateGithubEndpoint(name: string, body: UpdateGithubEndpointParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateGithubEndpoint(name, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EndpointsApi.updateGithubEndpoint']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * EndpointsApi - factory interface + * @export + */ +export const EndpointsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = EndpointsApiFp(configuration) + return { + /** + * + * @summary Create a Gitea Endpoint. + * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createGiteaEndpoint(body: CreateGiteaEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createGiteaEndpoint(body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create a GitHub Endpoint. + * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createGithubEndpoint(body: CreateGithubEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createGithubEndpoint(body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteGiteaEndpoint(name, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteGithubEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteGithubEndpoint(name, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getGiteaEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getGiteaEndpoint(name, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getGithubEndpoint(name: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getGithubEndpoint(name, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List all Gitea Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listGiteaEndpoints(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listGiteaEndpoints(options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List all GitHub Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listGithubEndpoints(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listGithubEndpoints(options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateGiteaEndpoint(name: string, body: UpdateGiteaEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateGiteaEndpoint(name, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateGithubEndpoint(name: string, body: UpdateGithubEndpointParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateGithubEndpoint(name, body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * EndpointsApi - object-oriented interface + * @export + * @class EndpointsApi + * @extends {BaseAPI} + */ +export class EndpointsApi extends BaseAPI { + /** + * + * @summary Create a Gitea Endpoint. + * @param {CreateGiteaEndpointParams} body Parameters used when creating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public createGiteaEndpoint(body: CreateGiteaEndpointParams, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).createGiteaEndpoint(body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create a GitHub Endpoint. + * @param {CreateGithubEndpointParams} body Parameters used when creating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public createGithubEndpoint(body: CreateGithubEndpointParams, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).createGithubEndpoint(body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public deleteGiteaEndpoint(name: string, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).deleteGiteaEndpoint(name, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public deleteGithubEndpoint(name: string, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).deleteGithubEndpoint(name, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public getGiteaEndpoint(name: string, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).getGiteaEndpoint(name, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public getGithubEndpoint(name: string, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).getGithubEndpoint(name, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List all Gitea Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public listGiteaEndpoints(options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).listGiteaEndpoints(options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List all GitHub Endpoints. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public listGithubEndpoints(options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).listGithubEndpoints(options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update a Gitea Endpoint. + * @param {string} name The name of the Gitea endpoint. + * @param {UpdateGiteaEndpointParams} body Parameters used when updating a Gitea endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public updateGiteaEndpoint(name: string, body: UpdateGiteaEndpointParams, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).updateGiteaEndpoint(name, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update a GitHub Endpoint. + * @param {string} name The name of the GitHub endpoint. + * @param {UpdateGithubEndpointParams} body Parameters used when updating a GitHub endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EndpointsApi + */ + public updateGithubEndpoint(name: string, body: UpdateGithubEndpointParams, options?: RawAxiosRequestConfig) { + return EndpointsApiFp(this.configuration).updateGithubEndpoint(name, body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * EnterprisesApi - axios parameter creator + * @export + */ +export const EnterprisesApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Create enterprise with the given parameters. + * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterprise: async (body: CreateEnterpriseParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('createEnterprise', 'body', body) + const localVarPath = `/enterprises`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterprisePool: async (enterpriseID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('createEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createEnterprisePool', 'body', body) + const localVarPath = `/enterprises/{enterpriseID}/pools` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterpriseScaleSet: async (enterpriseID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('createEnterpriseScaleSet', 'enterpriseID', enterpriseID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createEnterpriseScaleSet', 'body', body) + const localVarPath = `/enterprises/{enterpriseID}/scalesets` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete enterprise by ID. + * @param {string} enterpriseID ID of the enterprise to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteEnterprise: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('deleteEnterprise', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('deleteEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('deleteEnterprisePool', 'poolID', poolID) + const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get enterprise by ID. + * @param {string} enterpriseID The ID of the enterprise to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getEnterprise: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('getEnterprise', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('getEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('getEnterprisePool', 'poolID', poolID) + const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseInstances: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('listEnterpriseInstances', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}/instances` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterprisePools: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('listEnterprisePools', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}/pools` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseScaleSets: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('listEnterpriseScaleSets', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}/scalesets` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List all enterprises. + * @param {string} [name] Exact enterprise name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterprises: async (name?: string, endpoint?: string, options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/enterprises`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + if (name !== undefined) { + localVarQueryParameter['name'] = name; + } + + if (endpoint !== undefined) { + localVarQueryParameter['endpoint'] = endpoint; + } + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update enterprise with the given parameters. + * @param {string} enterpriseID The ID of the enterprise to update. + * @param {UpdateEntityParams} body Parameters used when updating the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateEnterprise: async (enterpriseID: string, body: UpdateEntityParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('updateEnterprise', 'enterpriseID', enterpriseID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateEnterprise', 'body', body) + const localVarPath = `/enterprises/{enterpriseID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateEnterprisePool: async (enterpriseID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('updateEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('updateEnterprisePool', 'poolID', poolID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateEnterprisePool', 'body', body) + const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * EnterprisesApi - functional programming interface + * @export + */ +export const EnterprisesApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = EnterprisesApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Create enterprise with the given parameters. + * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createEnterprise(body: CreateEnterpriseParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterprise(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.createEnterprise']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterprisePool(enterpriseID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.createEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterpriseScaleSet(enterpriseID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.createEnterpriseScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete enterprise by ID. + * @param {string} enterpriseID ID of the enterprise to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteEnterprise(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.deleteEnterprise']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteEnterprisePool(enterpriseID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.deleteEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get enterprise by ID. + * @param {string} enterpriseID The ID of the enterprise to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getEnterprise(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.getEnterprise']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getEnterprisePool(enterpriseID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.getEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseInstances(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterpriseInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterprisePools(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterprisePools']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseScaleSets(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterpriseScaleSets']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List all enterprises. + * @param {string} [name] Exact enterprise name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listEnterprises(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterprises(name, endpoint, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.listEnterprises']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update enterprise with the given parameters. + * @param {string} enterpriseID The ID of the enterprise to update. + * @param {UpdateEntityParams} body Parameters used when updating the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateEnterprise(enterpriseID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateEnterprise(enterpriseID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.updateEnterprise']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateEnterprisePool(enterpriseID, poolID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['EnterprisesApi.updateEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * EnterprisesApi - factory interface + * @export + */ +export const EnterprisesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = EnterprisesApiFp(configuration) + return { + /** + * + * @summary Create enterprise with the given parameters. + * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterprise(body: CreateEnterpriseParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createEnterprise(body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createEnterprisePool(enterpriseID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete enterprise by ID. + * @param {string} enterpriseID ID of the enterprise to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteEnterprise(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get enterprise by ID. + * @param {string} enterpriseID The ID of the enterprise to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getEnterprise(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listEnterpriseInstances(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listEnterprisePools(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listEnterpriseScaleSets(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List all enterprises. + * @param {string} [name] Exact enterprise name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterprises(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listEnterprises(name, endpoint, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update enterprise with the given parameters. + * @param {string} enterpriseID The ID of the enterprise to update. + * @param {UpdateEntityParams} body Parameters used when updating the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateEnterprise(enterpriseID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateEnterprise(enterpriseID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * EnterprisesApi - object-oriented interface + * @export + * @class EnterprisesApi + * @extends {BaseAPI} + */ +export class EnterprisesApi extends BaseAPI { + /** + * + * @summary Create enterprise with the given parameters. + * @param {CreateEnterpriseParams} body Parameters used to create the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public createEnterprise(body: CreateEnterpriseParams, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).createEnterprise(body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).createEnterprisePool(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete enterprise by ID. + * @param {string} enterpriseID ID of the enterprise to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public deleteEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).deleteEnterprise(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get enterprise by ID. + * @param {string} enterpriseID The ID of the enterprise to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public getEnterprise(enterpriseID: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).getEnterprise(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).getEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).listEnterpriseInstances(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).listEnterprisePools(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).listEnterpriseScaleSets(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List all enterprises. + * @param {string} [name] Exact enterprise name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public listEnterprises(name?: string, endpoint?: string, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).listEnterprises(name, endpoint, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update enterprise with the given parameters. + * @param {string} enterpriseID The ID of the enterprise to update. + * @param {UpdateEntityParams} body Parameters used when updating the enterprise. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public updateEnterprise(enterpriseID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).updateEnterprise(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EnterprisesApi + */ + public updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { + return EnterprisesApiFp(this.configuration).updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * FirstRunApi - axios parameter creator + * @export + */ +export const FirstRunApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Initialize the first run of the controller. + * @param {NewUserParams} body Create a new user. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + firstRun: async (body: NewUserParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('firstRun', 'body', body) + const localVarPath = `/first-run`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * FirstRunApi - functional programming interface + * @export + */ +export const FirstRunApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = FirstRunApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Initialize the first run of the controller. + * @param {NewUserParams} body Create a new user. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async firstRun(body: NewUserParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.firstRun(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['FirstRunApi.firstRun']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * FirstRunApi - factory interface + * @export + */ +export const FirstRunApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = FirstRunApiFp(configuration) + return { + /** + * + * @summary Initialize the first run of the controller. + * @param {NewUserParams} body Create a new user. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + firstRun(body: NewUserParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.firstRun(body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * FirstRunApi - object-oriented interface + * @export + * @class FirstRunApi + * @extends {BaseAPI} + */ +export class FirstRunApi extends BaseAPI { + /** + * + * @summary Initialize the first run of the controller. + * @param {NewUserParams} body Create a new user. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof FirstRunApi + */ + public firstRun(body: NewUserParams, options?: RawAxiosRequestConfig) { + return FirstRunApiFp(this.configuration).firstRun(body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * HooksApi - axios parameter creator + * @export + */ +export const HooksApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgWebhookInfo: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('getOrgWebhookInfo', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/webhook` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoWebhookInfo: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('getRepoWebhookInfo', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/webhook` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installOrgWebhook: async (orgID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('installOrgWebhook', 'orgID', orgID) + // verify required parameter 'body' is not null or undefined + assertParamExists('installOrgWebhook', 'body', body) + const localVarPath = `/organizations/{orgID}/webhook` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installRepoWebhook: async (repoID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('installRepoWebhook', 'repoID', repoID) + // verify required parameter 'body' is not null or undefined + assertParamExists('installRepoWebhook', 'body', body) + const localVarPath = `/repositories/{repoID}/webhook` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallOrgWebhook: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('uninstallOrgWebhook', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/webhook` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallRepoWebhook: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('uninstallRepoWebhook', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/webhook` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * HooksApi - functional programming interface + * @export + */ +export const HooksApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = HooksApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgWebhookInfo(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['HooksApi.getOrgWebhookInfo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoWebhookInfo(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['HooksApi.getRepoWebhookInfo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.installOrgWebhook(orgID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['HooksApi.installOrgWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.installRepoWebhook(repoID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['HooksApi.installRepoWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallOrgWebhook(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['HooksApi.uninstallOrgWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallRepoWebhook(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['HooksApi.uninstallRepoWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * HooksApi - factory interface + * @export + */ +export const HooksApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = HooksApiFp(configuration) + return { + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getOrgWebhookInfo(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getRepoWebhookInfo(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.installOrgWebhook(orgID, body, options).then((request) => request(axios, basePath)); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.installRepoWebhook(repoID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.uninstallOrgWebhook(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.uninstallRepoWebhook(repoID, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * HooksApi - object-oriented interface + * @export + * @class HooksApi + * @extends {BaseAPI} + */ +export class HooksApi extends BaseAPI { + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof HooksApi + */ + public getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig) { + return HooksApiFp(this.configuration).getOrgWebhookInfo(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof HooksApi + */ + public getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig) { + return HooksApiFp(this.configuration).getRepoWebhookInfo(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof HooksApi + */ + public installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { + return HooksApiFp(this.configuration).installOrgWebhook(orgID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof HooksApi + */ + public installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { + return HooksApiFp(this.configuration).installRepoWebhook(repoID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof HooksApi + */ + public uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig) { + return HooksApiFp(this.configuration).uninstallOrgWebhook(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof HooksApi + */ + public uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig) { + return HooksApiFp(this.configuration).uninstallRepoWebhook(repoID, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * InstancesApi - axios parameter creator + * @export + */ +export const InstancesApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Delete runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. + * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteInstance: async (instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'instanceName' is not null or undefined + assertParamExists('deleteInstance', 'instanceName', instanceName) + const localVarPath = `/instances/{instanceName}` + .replace(`{${"instanceName"}}`, encodeURIComponent(String(instanceName))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + if (forceRemove !== undefined) { + localVarQueryParameter['forceRemove'] = forceRemove; + } + + if (bypassGHUnauthorized !== undefined) { + localVarQueryParameter['bypassGHUnauthorized'] = bypassGHUnauthorized; + } + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getInstance: async (instanceName: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'instanceName' is not null or undefined + assertParamExists('getInstance', 'instanceName', instanceName) + const localVarPath = `/instances/{instanceName}` + .replace(`{${"instanceName"}}`, encodeURIComponent(String(instanceName))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseInstances: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('listEnterpriseInstances', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}/instances` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get all runners\' instances. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listInstances: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/instances`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgInstances: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('listOrgInstances', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/instances` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List runner instances in a pool. + * @param {string} poolID Runner pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listPoolInstances: async (poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'poolID' is not null or undefined + assertParamExists('listPoolInstances', 'poolID', poolID) + const localVarPath = `/pools/{poolID}/instances` + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoInstances: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('listRepoInstances', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/instances` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List runner instances in a scale set. + * @param {string} scalesetID Runner scale set ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listScaleSetInstances: async (scalesetID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'scalesetID' is not null or undefined + assertParamExists('listScaleSetInstances', 'scalesetID', scalesetID) + const localVarPath = `/scalesets/{scalesetID}/instances` + .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * InstancesApi - functional programming interface + * @export + */ +export const InstancesApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = InstancesApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Delete runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. + * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteInstance(instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteInstance(instanceName, forceRemove, bypassGHUnauthorized, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.deleteInstance']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getInstance(instanceName: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getInstance(instanceName, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.getInstance']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseInstances(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.listEnterpriseInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get all runners\' instances. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listInstances(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listInstances(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.listInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgInstances(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.listOrgInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List runner instances in a pool. + * @param {string} poolID Runner pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listPoolInstances(poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listPoolInstances(poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.listPoolInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoInstances(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.listRepoInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List runner instances in a scale set. + * @param {string} scalesetID Runner scale set ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listScaleSetInstances(scalesetID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listScaleSetInstances(scalesetID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['InstancesApi.listScaleSetInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * InstancesApi - factory interface + * @export + */ +export const InstancesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = InstancesApiFp(configuration) + return { + /** + * + * @summary Delete runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. + * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteInstance(instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteInstance(instanceName, forceRemove, bypassGHUnauthorized, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getInstance(instanceName: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getInstance(instanceName, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listEnterpriseInstances(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get all runners\' instances. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listInstances(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listInstances(options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listOrgInstances(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List runner instances in a pool. + * @param {string} poolID Runner pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listPoolInstances(poolID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listPoolInstances(poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listRepoInstances(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List runner instances in a scale set. + * @param {string} scalesetID Runner scale set ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listScaleSetInstances(scalesetID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listScaleSetInstances(scalesetID, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * InstancesApi - object-oriented interface + * @export + * @class InstancesApi + * @extends {BaseAPI} + */ +export class InstancesApi extends BaseAPI { + /** + * + * @summary Delete runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {boolean} [forceRemove] If true GARM will ignore any provider error when removing the runner and will continue to remove the runner from github and the GARM database. + * @param {boolean} [bypassGHUnauthorized] If true GARM will ignore unauthorized errors returned by GitHub when removing a runner. This is useful if you want to clean up runners and your credentials have expired. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public deleteInstance(instanceName: string, forceRemove?: boolean, bypassGHUnauthorized?: boolean, options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).deleteInstance(instanceName, forceRemove, bypassGHUnauthorized, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get runner instance by name. + * @param {string} instanceName Runner instance name. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public getInstance(instanceName: string, options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).getInstance(instanceName, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List enterprise instances. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public listEnterpriseInstances(enterpriseID: string, options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).listEnterpriseInstances(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get all runners\' instances. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public listInstances(options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).listInstances(options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public listOrgInstances(orgID: string, options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).listOrgInstances(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List runner instances in a pool. + * @param {string} poolID Runner pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public listPoolInstances(poolID: string, options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).listPoolInstances(poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public listRepoInstances(repoID: string, options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).listRepoInstances(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List runner instances in a scale set. + * @param {string} scalesetID Runner scale set ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof InstancesApi + */ + public listScaleSetInstances(scalesetID: string, options?: RawAxiosRequestConfig) { + return InstancesApiFp(this.configuration).listScaleSetInstances(scalesetID, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * JobsApi - axios parameter creator + * @export + */ +export const JobsApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary List all jobs. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listJobs: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/jobs`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * JobsApi - functional programming interface + * @export + */ +export const JobsApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = JobsApiAxiosParamCreator(configuration) + return { + /** + * + * @summary List all jobs. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listJobs(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listJobs(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['JobsApi.listJobs']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * JobsApi - factory interface + * @export + */ +export const JobsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = JobsApiFp(configuration) + return { + /** + * + * @summary List all jobs. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listJobs(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listJobs(options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * JobsApi - object-oriented interface + * @export + * @class JobsApi + * @extends {BaseAPI} + */ +export class JobsApi extends BaseAPI { + /** + * + * @summary List all jobs. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof JobsApi + */ + public listJobs(options?: RawAxiosRequestConfig) { + return JobsApiFp(this.configuration).listJobs(options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * LoginApi - axios parameter creator + * @export + */ +export const LoginApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Logs in a user and returns a JWT token. + * @param {PasswordLoginParams} body Login information. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + login: async (body: PasswordLoginParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('login', 'body', body) + const localVarPath = `/auth/login`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * LoginApi - functional programming interface + * @export + */ +export const LoginApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = LoginApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Logs in a user and returns a JWT token. + * @param {PasswordLoginParams} body Login information. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async login(body: PasswordLoginParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.login(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['LoginApi.login']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * LoginApi - factory interface + * @export + */ +export const LoginApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = LoginApiFp(configuration) + return { + /** + * + * @summary Logs in a user and returns a JWT token. + * @param {PasswordLoginParams} body Login information. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + login(body: PasswordLoginParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.login(body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * LoginApi - object-oriented interface + * @export + * @class LoginApi + * @extends {BaseAPI} + */ +export class LoginApi extends BaseAPI { + /** + * + * @summary Logs in a user and returns a JWT token. + * @param {PasswordLoginParams} body Login information. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof LoginApi + */ + public login(body: PasswordLoginParams, options?: RawAxiosRequestConfig) { + return LoginApiFp(this.configuration).login(body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * MetricsTokenApi - axios parameter creator + * @export + */ +export const MetricsTokenApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Returns a JWT token that can be used to access the metrics endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getMetricsToken: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/metrics-token`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * MetricsTokenApi - functional programming interface + * @export + */ +export const MetricsTokenApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = MetricsTokenApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Returns a JWT token that can be used to access the metrics endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getMetricsToken(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getMetricsToken(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['MetricsTokenApi.getMetricsToken']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * MetricsTokenApi - factory interface + * @export + */ +export const MetricsTokenApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = MetricsTokenApiFp(configuration) + return { + /** + * + * @summary Returns a JWT token that can be used to access the metrics endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getMetricsToken(options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getMetricsToken(options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * MetricsTokenApi - object-oriented interface + * @export + * @class MetricsTokenApi + * @extends {BaseAPI} + */ +export class MetricsTokenApi extends BaseAPI { + /** + * + * @summary Returns a JWT token that can be used to access the metrics endpoint. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof MetricsTokenApi + */ + public getMetricsToken(options?: RawAxiosRequestConfig) { + return MetricsTokenApiFp(this.configuration).getMetricsToken(options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * OrganizationsApi - axios parameter creator + * @export + */ +export const OrganizationsApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Create organization with the parameters given. + * @param {CreateOrgParams} body Parameters used when creating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrg: async (body: CreateOrgParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('createOrg', 'body', body) + const localVarPath = `/organizations`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgPool: async (orgID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('createOrgPool', 'orgID', orgID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createOrgPool', 'body', body) + const localVarPath = `/organizations/{orgID}/pools` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgScaleSet: async (orgID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('createOrgScaleSet', 'orgID', orgID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createOrgScaleSet', 'body', body) + const localVarPath = `/organizations/{orgID}/scalesets` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete organization by ID. + * @param {string} orgID ID of the organization to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteOrg: async (orgID: string, keepWebhook?: boolean, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('deleteOrg', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + if (keepWebhook !== undefined) { + localVarQueryParameter['keepWebhook'] = keepWebhook; + } + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('deleteOrgPool', 'orgID', orgID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('deleteOrgPool', 'poolID', poolID) + const localVarPath = `/organizations/{orgID}/pools/{poolID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get organization by ID. + * @param {string} orgID ID of the organization to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrg: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('getOrg', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('getOrgPool', 'orgID', orgID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('getOrgPool', 'poolID', poolID) + const localVarPath = `/organizations/{orgID}/pools/{poolID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgWebhookInfo: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('getOrgWebhookInfo', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/webhook` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installOrgWebhook: async (orgID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('installOrgWebhook', 'orgID', orgID) + // verify required parameter 'body' is not null or undefined + assertParamExists('installOrgWebhook', 'body', body) + const localVarPath = `/organizations/{orgID}/webhook` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgInstances: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('listOrgInstances', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/instances` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgPools: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('listOrgPools', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/pools` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgScaleSets: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('listOrgScaleSets', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/scalesets` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List organizations. + * @param {string} [name] Exact organization name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgs: async (name?: string, endpoint?: string, options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/organizations`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + if (name !== undefined) { + localVarQueryParameter['name'] = name; + } + + if (endpoint !== undefined) { + localVarQueryParameter['endpoint'] = endpoint; + } + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallOrgWebhook: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('uninstallOrgWebhook', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/webhook` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update organization with the parameters given. + * @param {string} orgID ID of the organization to update. + * @param {UpdateEntityParams} body Parameters used when updating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateOrg: async (orgID: string, body: UpdateEntityParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('updateOrg', 'orgID', orgID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateOrg', 'body', body) + const localVarPath = `/organizations/{orgID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateOrgPool: async (orgID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('updateOrgPool', 'orgID', orgID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('updateOrgPool', 'poolID', poolID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateOrgPool', 'body', body) + const localVarPath = `/organizations/{orgID}/pools/{poolID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * OrganizationsApi - functional programming interface + * @export + */ +export const OrganizationsApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = OrganizationsApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Create organization with the parameters given. + * @param {CreateOrgParams} body Parameters used when creating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createOrg(body: CreateOrgParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createOrg(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.createOrg']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgPool(orgID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.createOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgScaleSet(orgID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.createOrgScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete organization by ID. + * @param {string} orgID ID of the organization to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteOrg(orgID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteOrg(orgID, keepWebhook, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.deleteOrg']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteOrgPool(orgID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.deleteOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get organization by ID. + * @param {string} orgID ID of the organization to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getOrg(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getOrg(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.getOrg']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgPool(orgID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.getOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgWebhookInfo(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.getOrgWebhookInfo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.installOrgWebhook(orgID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.installOrgWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgInstances(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listOrgPools(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgPools(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgPools']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgScaleSets(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgScaleSets']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List organizations. + * @param {string} [name] Exact organization name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listOrgs(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgs(name, endpoint, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.listOrgs']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallOrgWebhook(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.uninstallOrgWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update organization with the parameters given. + * @param {string} orgID ID of the organization to update. + * @param {UpdateEntityParams} body Parameters used when updating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateOrg(orgID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateOrg(orgID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.updateOrg']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateOrgPool(orgID, poolID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['OrganizationsApi.updateOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * OrganizationsApi - factory interface + * @export + */ +export const OrganizationsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = OrganizationsApiFp(configuration) + return { + /** + * + * @summary Create organization with the parameters given. + * @param {CreateOrgParams} body Parameters used when creating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrg(body: CreateOrgParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createOrg(body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createOrgPool(orgID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createOrgScaleSet(orgID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete organization by ID. + * @param {string} orgID ID of the organization to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteOrg(orgID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteOrg(orgID, keepWebhook, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get organization by ID. + * @param {string} orgID ID of the organization to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrg(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getOrg(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getOrgWebhookInfo(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.installOrgWebhook(orgID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgInstances(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listOrgInstances(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgPools(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listOrgPools(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listOrgScaleSets(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List organizations. + * @param {string} [name] Exact organization name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgs(name?: string, endpoint?: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listOrgs(name, endpoint, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.uninstallOrgWebhook(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update organization with the parameters given. + * @param {string} orgID ID of the organization to update. + * @param {UpdateEntityParams} body Parameters used when updating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateOrg(orgID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateOrg(orgID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateOrgPool(orgID, poolID, body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * OrganizationsApi - object-oriented interface + * @export + * @class OrganizationsApi + * @extends {BaseAPI} + */ +export class OrganizationsApi extends BaseAPI { + /** + * + * @summary Create organization with the parameters given. + * @param {CreateOrgParams} body Parameters used when creating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public createOrg(body: CreateOrgParams, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).createOrg(body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).createOrgPool(orgID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).createOrgScaleSet(orgID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete organization by ID. + * @param {string} orgID ID of the organization to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this organization, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public deleteOrg(orgID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).deleteOrg(orgID, keepWebhook, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).deleteOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get organization by ID. + * @param {string} orgID ID of the organization to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public getOrg(orgID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).getOrg(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).getOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get information about the GARM installed webhook on an organization. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public getOrgWebhookInfo(orgID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).getOrgWebhookInfo(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} orgID Organization ID. + * @param {InstallWebhookParams} body Parameters used when creating the organization webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public installOrgWebhook(orgID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).installOrgWebhook(orgID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List organization instances. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public listOrgInstances(orgID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).listOrgInstances(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public listOrgPools(orgID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).listOrgPools(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).listOrgScaleSets(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List organizations. + * @param {string} [name] Exact organization name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public listOrgs(name?: string, endpoint?: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).listOrgs(name, endpoint, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Uninstall organization webhook. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public uninstallOrgWebhook(orgID: string, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).uninstallOrgWebhook(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update organization with the parameters given. + * @param {string} orgID ID of the organization to update. + * @param {UpdateEntityParams} body Parameters used when updating the organization. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public updateOrg(orgID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).updateOrg(orgID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof OrganizationsApi + */ + public updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { + return OrganizationsApiFp(this.configuration).updateOrgPool(orgID, poolID, body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * PoolsApi - axios parameter creator + * @export + */ +export const PoolsApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterprisePool: async (enterpriseID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('createEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createEnterprisePool', 'body', body) + const localVarPath = `/enterprises/{enterpriseID}/pools` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgPool: async (orgID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('createOrgPool', 'orgID', orgID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createOrgPool', 'body', body) + const localVarPath = `/organizations/{orgID}/pools` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoPool: async (repoID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('createRepoPool', 'repoID', repoID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createRepoPool', 'body', body) + const localVarPath = `/repositories/{repoID}/pools` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('deleteEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('deleteEnterprisePool', 'poolID', poolID) + const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('deleteOrgPool', 'orgID', orgID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('deleteOrgPool', 'poolID', poolID) + const localVarPath = `/organizations/{orgID}/pools/{poolID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete pool by ID. + * @param {string} poolID ID of the pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deletePool: async (poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'poolID' is not null or undefined + assertParamExists('deletePool', 'poolID', poolID) + const localVarPath = `/pools/{poolID}` + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('deleteRepoPool', 'repoID', repoID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('deleteRepoPool', 'poolID', poolID) + const localVarPath = `/repositories/{repoID}/pools/{poolID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getEnterprisePool: async (enterpriseID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('getEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('getEnterprisePool', 'poolID', poolID) + const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgPool: async (orgID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('getOrgPool', 'orgID', orgID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('getOrgPool', 'poolID', poolID) + const localVarPath = `/organizations/{orgID}/pools/{poolID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get pool by ID. + * @param {string} poolID ID of the pool to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getPool: async (poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'poolID' is not null or undefined + assertParamExists('getPool', 'poolID', poolID) + const localVarPath = `/pools/{poolID}` + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('getRepoPool', 'repoID', repoID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('getRepoPool', 'poolID', poolID) + const localVarPath = `/repositories/{repoID}/pools/{poolID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterprisePools: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('listEnterprisePools', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}/pools` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgPools: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('listOrgPools', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/pools` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List all pools. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listPools: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/pools`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoPools: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('listRepoPools', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/pools` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateEnterprisePool: async (enterpriseID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('updateEnterprisePool', 'enterpriseID', enterpriseID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('updateEnterprisePool', 'poolID', poolID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateEnterprisePool', 'body', body) + const localVarPath = `/enterprises/{enterpriseID}/pools/{poolID}` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateOrgPool: async (orgID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('updateOrgPool', 'orgID', orgID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('updateOrgPool', 'poolID', poolID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateOrgPool', 'body', body) + const localVarPath = `/organizations/{orgID}/pools/{poolID}` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update pool by ID. + * @param {string} poolID ID of the pool to update. + * @param {UpdatePoolParams} body Parameters to update the pool with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updatePool: async (poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'poolID' is not null or undefined + assertParamExists('updatePool', 'poolID', poolID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updatePool', 'body', body) + const localVarPath = `/pools/{poolID}` + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateRepoPool: async (repoID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('updateRepoPool', 'repoID', repoID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('updateRepoPool', 'poolID', poolID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateRepoPool', 'body', body) + const localVarPath = `/repositories/{repoID}/pools/{poolID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * PoolsApi - functional programming interface + * @export + */ +export const PoolsApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = PoolsApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterprisePool(enterpriseID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.createEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgPool(orgID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.createOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoPool(repoID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.createRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteEnterprisePool(enterpriseID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.deleteEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteOrgPool(orgID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.deleteOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete pool by ID. + * @param {string} poolID ID of the pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deletePool(poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deletePool(poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.deletePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteRepoPool(repoID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.deleteRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getEnterprisePool(enterpriseID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.getEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getOrgPool(orgID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.getOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get pool by ID. + * @param {string} poolID ID of the pool to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getPool(poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getPool(poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.getPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoPool(repoID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.getRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterprisePools(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.listEnterprisePools']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listOrgPools(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgPools(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.listOrgPools']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List all pools. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listPools(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listPools(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.listPools']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listRepoPools(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoPools(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.listRepoPools']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateEnterprisePool(enterpriseID, poolID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.updateEnterprisePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateOrgPool(orgID, poolID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.updateOrgPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update pool by ID. + * @param {string} poolID ID of the pool to update. + * @param {UpdatePoolParams} body Parameters to update the pool with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updatePool(poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updatePool(poolID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.updatePool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateRepoPool(repoID, poolID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['PoolsApi.updateRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * PoolsApi - factory interface + * @export + */ +export const PoolsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = PoolsApiFp(configuration) + return { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createEnterprisePool(enterpriseID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createOrgPool(orgID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createRepoPool(repoID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete pool by ID. + * @param {string} poolID ID of the pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deletePool(poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deletePool(poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getEnterprisePool(enterpriseID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getOrgPool(orgID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get pool by ID. + * @param {string} poolID ID of the pool to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getPool(poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getPool(poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listEnterprisePools(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgPools(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listOrgPools(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List all pools. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listPools(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listPools(options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoPools(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listRepoPools(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateOrgPool(orgID, poolID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update pool by ID. + * @param {string} poolID ID of the pool to update. + * @param {UpdatePoolParams} body Parameters to update the pool with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updatePool(poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updatePool(poolID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateRepoPool(repoID, poolID, body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * PoolsApi - object-oriented interface + * @export + * @class PoolsApi + * @extends {BaseAPI} + */ +export class PoolsApi extends BaseAPI { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreatePoolParams} body Parameters used when creating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public createEnterprisePool(enterpriseID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).createEnterprisePool(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreatePoolParams} body Parameters used when creating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public createOrgPool(orgID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).createOrgPool(orgID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).createRepoPool(repoID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public deleteEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).deleteEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public deleteOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).deleteOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete pool by ID. + * @param {string} poolID ID of the pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public deletePool(poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).deletePool(poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).deleteRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get enterprise pool by ID. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public getEnterprisePool(enterpriseID: string, poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).getEnterprisePool(enterpriseID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get organization pool by ID. + * @param {string} orgID Organization ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public getOrgPool(orgID: string, poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).getOrgPool(orgID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get pool by ID. + * @param {string} poolID ID of the pool to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public getPool(poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).getPool(poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).getRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List enterprise pools. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public listEnterprisePools(enterpriseID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).listEnterprisePools(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List organization pools. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public listOrgPools(orgID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).listOrgPools(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List all pools. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public listPools(options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).listPools(options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public listRepoPools(repoID: string, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).listRepoPools(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {string} poolID ID of the enterprise pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the enterprise pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public updateEnterprisePool(enterpriseID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).updateEnterprisePool(enterpriseID, poolID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update organization pool with the parameters given. + * @param {string} orgID Organization ID. + * @param {string} poolID ID of the organization pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the organization pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public updateOrgPool(orgID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).updateOrgPool(orgID, poolID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update pool by ID. + * @param {string} poolID ID of the pool to update. + * @param {UpdatePoolParams} body Parameters to update the pool with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public updatePool(poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).updatePool(poolID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PoolsApi + */ + public updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { + return PoolsApiFp(this.configuration).updateRepoPool(repoID, poolID, body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * ProvidersApi - axios parameter creator + * @export + */ +export const ProvidersApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary List all providers. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listProviders: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/providers`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * ProvidersApi - functional programming interface + * @export + */ +export const ProvidersApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = ProvidersApiAxiosParamCreator(configuration) + return { + /** + * + * @summary List all providers. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listProviders(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listProviders(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ProvidersApi.listProviders']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * ProvidersApi - factory interface + * @export + */ +export const ProvidersApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = ProvidersApiFp(configuration) + return { + /** + * + * @summary List all providers. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listProviders(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listProviders(options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * ProvidersApi - object-oriented interface + * @export + * @class ProvidersApi + * @extends {BaseAPI} + */ +export class ProvidersApi extends BaseAPI { + /** + * + * @summary List all providers. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ProvidersApi + */ + public listProviders(options?: RawAxiosRequestConfig) { + return ProvidersApiFp(this.configuration).listProviders(options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * RepositoriesApi - axios parameter creator + * @export + */ +export const RepositoriesApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Create repository with the parameters given. + * @param {CreateRepoParams} body Parameters used when creating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepo: async (body: CreateRepoParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'body' is not null or undefined + assertParamExists('createRepo', 'body', body) + const localVarPath = `/repositories`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoPool: async (repoID: string, body: CreatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('createRepoPool', 'repoID', repoID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createRepoPool', 'body', body) + const localVarPath = `/repositories/{repoID}/pools` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoScaleSet: async (repoID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('createRepoScaleSet', 'repoID', repoID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createRepoScaleSet', 'body', body) + const localVarPath = `/repositories/{repoID}/scalesets` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete repository by ID. + * @param {string} repoID ID of the repository to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRepo: async (repoID: string, keepWebhook?: boolean, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('deleteRepo', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + if (keepWebhook !== undefined) { + localVarQueryParameter['keepWebhook'] = keepWebhook; + } + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('deleteRepoPool', 'repoID', repoID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('deleteRepoPool', 'poolID', poolID) + const localVarPath = `/repositories/{repoID}/pools/{poolID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get repository by ID. + * @param {string} repoID ID of the repository to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepo: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('getRepo', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoPool: async (repoID: string, poolID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('getRepoPool', 'repoID', repoID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('getRepoPool', 'poolID', poolID) + const localVarPath = `/repositories/{repoID}/pools/{poolID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoWebhookInfo: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('getRepoWebhookInfo', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/webhook` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installRepoWebhook: async (repoID: string, body: InstallWebhookParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('installRepoWebhook', 'repoID', repoID) + // verify required parameter 'body' is not null or undefined + assertParamExists('installRepoWebhook', 'body', body) + const localVarPath = `/repositories/{repoID}/webhook` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoInstances: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('listRepoInstances', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/instances` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoPools: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('listRepoPools', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/pools` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoScaleSets: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('listRepoScaleSets', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/scalesets` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List repositories. + * @param {string} [owner] Exact owner name to filter by + * @param {string} [name] Exact repository name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepos: async (owner?: string, name?: string, endpoint?: string, options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/repositories`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + if (owner !== undefined) { + localVarQueryParameter['owner'] = owner; + } + + if (name !== undefined) { + localVarQueryParameter['name'] = name; + } + + if (endpoint !== undefined) { + localVarQueryParameter['endpoint'] = endpoint; + } + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallRepoWebhook: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('uninstallRepoWebhook', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/webhook` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update repository with the parameters given. + * @param {string} repoID ID of the repository to update. + * @param {UpdateEntityParams} body Parameters used when updating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateRepo: async (repoID: string, body: UpdateEntityParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('updateRepo', 'repoID', repoID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateRepo', 'body', body) + const localVarPath = `/repositories/{repoID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateRepoPool: async (repoID: string, poolID: string, body: UpdatePoolParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('updateRepoPool', 'repoID', repoID) + // verify required parameter 'poolID' is not null or undefined + assertParamExists('updateRepoPool', 'poolID', poolID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateRepoPool', 'body', body) + const localVarPath = `/repositories/{repoID}/pools/{poolID}` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))) + .replace(`{${"poolID"}}`, encodeURIComponent(String(poolID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * RepositoriesApi - functional programming interface + * @export + */ +export const RepositoriesApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = RepositoriesApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Create repository with the parameters given. + * @param {CreateRepoParams} body Parameters used when creating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createRepo(body: CreateRepoParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createRepo(body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.createRepo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoPool(repoID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.createRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoScaleSet(repoID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.createRepoScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete repository by ID. + * @param {string} repoID ID of the repository to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteRepo(repoID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteRepo(repoID, keepWebhook, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.deleteRepo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteRepoPool(repoID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.deleteRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get repository by ID. + * @param {string} repoID ID of the repository to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getRepo(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getRepo(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.getRepo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoPool(repoID, poolID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.getRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getRepoWebhookInfo(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.getRepoWebhookInfo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.installRepoWebhook(repoID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.installRepoWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoInstances(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepoInstances']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listRepoPools(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoPools(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepoPools']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoScaleSets(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepoScaleSets']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List repositories. + * @param {string} [owner] Exact owner name to filter by + * @param {string} [name] Exact repository name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listRepos(owner?: string, name?: string, endpoint?: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listRepos(owner, name, endpoint, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.listRepos']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.uninstallRepoWebhook(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.uninstallRepoWebhook']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update repository with the parameters given. + * @param {string} repoID ID of the repository to update. + * @param {UpdateEntityParams} body Parameters used when updating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateRepo(repoID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateRepo(repoID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.updateRepo']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateRepoPool(repoID, poolID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['RepositoriesApi.updateRepoPool']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * RepositoriesApi - factory interface + * @export + */ +export const RepositoriesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = RepositoriesApiFp(configuration) + return { + /** + * + * @summary Create repository with the parameters given. + * @param {CreateRepoParams} body Parameters used when creating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepo(body: CreateRepoParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createRepo(body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createRepoPool(repoID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createRepoScaleSet(repoID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete repository by ID. + * @param {string} repoID ID of the repository to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRepo(repoID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteRepo(repoID, keepWebhook, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get repository by ID. + * @param {string} repoID ID of the repository to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepo(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getRepo(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getRepoPool(repoID, poolID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getRepoWebhookInfo(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.installRepoWebhook(repoID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoInstances(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listRepoInstances(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoPools(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listRepoPools(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listRepoScaleSets(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List repositories. + * @param {string} [owner] Exact owner name to filter by + * @param {string} [name] Exact repository name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepos(owner?: string, name?: string, endpoint?: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listRepos(owner, name, endpoint, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.uninstallRepoWebhook(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update repository with the parameters given. + * @param {string} repoID ID of the repository to update. + * @param {UpdateEntityParams} body Parameters used when updating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateRepo(repoID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateRepo(repoID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateRepoPool(repoID, poolID, body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * RepositoriesApi - object-oriented interface + * @export + * @class RepositoriesApi + * @extends {BaseAPI} + */ +export class RepositoriesApi extends BaseAPI { + /** + * + * @summary Create repository with the parameters given. + * @param {CreateRepoParams} body Parameters used when creating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public createRepo(body: CreateRepoParams, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).createRepo(body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreatePoolParams} body Parameters used when creating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public createRepoPool(repoID: string, body: CreatePoolParams, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).createRepoPool(repoID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).createRepoScaleSet(repoID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete repository by ID. + * @param {string} repoID ID of the repository to delete. + * @param {boolean} [keepWebhook] If true and a webhook is installed for this repo, it will not be removed. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public deleteRepo(repoID: string, keepWebhook?: boolean, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).deleteRepo(repoID, keepWebhook, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public deleteRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).deleteRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get repository by ID. + * @param {string} repoID ID of the repository to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public getRepo(repoID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).getRepo(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get repository pool by ID. + * @param {string} repoID Repository ID. + * @param {string} poolID Pool ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public getRepoPool(repoID: string, poolID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).getRepoPool(repoID, poolID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get information about the GARM installed webhook on a repository. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public getRepoWebhookInfo(repoID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).getRepoWebhookInfo(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Install the GARM webhook for an organization. The secret configured on the organization will be used to validate the requests. + * @param {string} repoID Repository ID. + * @param {InstallWebhookParams} body Parameters used when creating the repository webhook. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public installRepoWebhook(repoID: string, body: InstallWebhookParams, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).installRepoWebhook(repoID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List repository instances. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public listRepoInstances(repoID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).listRepoInstances(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List repository pools. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public listRepoPools(repoID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).listRepoPools(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).listRepoScaleSets(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List repositories. + * @param {string} [owner] Exact owner name to filter by + * @param {string} [name] Exact repository name to filter by + * @param {string} [endpoint] Exact endpoint name to filter by + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public listRepos(owner?: string, name?: string, endpoint?: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).listRepos(owner, name, endpoint, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Uninstall organization webhook. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public uninstallRepoWebhook(repoID: string, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).uninstallRepoWebhook(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update repository with the parameters given. + * @param {string} repoID ID of the repository to update. + * @param {UpdateEntityParams} body Parameters used when updating the repository. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public updateRepo(repoID: string, body: UpdateEntityParams, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).updateRepo(repoID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update repository pool with the parameters given. + * @param {string} repoID Repository ID. + * @param {string} poolID ID of the repository pool to update. + * @param {UpdatePoolParams} body Parameters used when updating the repository pool. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RepositoriesApi + */ + public updateRepoPool(repoID: string, poolID: string, body: UpdatePoolParams, options?: RawAxiosRequestConfig) { + return RepositoriesApiFp(this.configuration).updateRepoPool(repoID, poolID, body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + +/** + * ScalesetsApi - axios parameter creator + * @export + */ +export const ScalesetsApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterpriseScaleSet: async (enterpriseID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('createEnterpriseScaleSet', 'enterpriseID', enterpriseID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createEnterpriseScaleSet', 'body', body) + const localVarPath = `/enterprises/{enterpriseID}/scalesets` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgScaleSet: async (orgID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('createOrgScaleSet', 'orgID', orgID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createOrgScaleSet', 'body', body) + const localVarPath = `/organizations/{orgID}/scalesets` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoScaleSet: async (repoID: string, body: CreateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('createRepoScaleSet', 'repoID', repoID) + // verify required parameter 'body' is not null or undefined + assertParamExists('createRepoScaleSet', 'body', body) + const localVarPath = `/repositories/{repoID}/scalesets` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Delete scale set by ID. + * @param {string} scalesetID ID of the scale set to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteScaleSet: async (scalesetID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'scalesetID' is not null or undefined + assertParamExists('deleteScaleSet', 'scalesetID', scalesetID) + const localVarPath = `/scalesets/{scalesetID}` + .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Get scale set by ID. + * @param {string} scalesetID ID of the scale set to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getScaleSet: async (scalesetID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'scalesetID' is not null or undefined + assertParamExists('getScaleSet', 'scalesetID', scalesetID) + const localVarPath = `/scalesets/{scalesetID}` + .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseScaleSets: async (enterpriseID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'enterpriseID' is not null or undefined + assertParamExists('listEnterpriseScaleSets', 'enterpriseID', enterpriseID) + const localVarPath = `/enterprises/{enterpriseID}/scalesets` + .replace(`{${"enterpriseID"}}`, encodeURIComponent(String(enterpriseID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgScaleSets: async (orgID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'orgID' is not null or undefined + assertParamExists('listOrgScaleSets', 'orgID', orgID) + const localVarPath = `/organizations/{orgID}/scalesets` + .replace(`{${"orgID"}}`, encodeURIComponent(String(orgID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoScaleSets: async (repoID: string, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'repoID' is not null or undefined + assertParamExists('listRepoScaleSets', 'repoID', repoID) + const localVarPath = `/repositories/{repoID}/scalesets` + .replace(`{${"repoID"}}`, encodeURIComponent(String(repoID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary List all scalesets. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listScalesets: async (options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/scalesets`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary Update scale set by ID. + * @param {string} scalesetID ID of the scale set to update. + * @param {UpdateScaleSetParams} body Parameters to update the scale set with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateScaleSet: async (scalesetID: string, body: UpdateScaleSetParams, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'scalesetID' is not null or undefined + assertParamExists('updateScaleSet', 'scalesetID', scalesetID) + // verify required parameter 'body' is not null or undefined + assertParamExists('updateScaleSet', 'body', body) + const localVarPath = `/scalesets/{scalesetID}` + .replace(`{${"scalesetID"}}`, encodeURIComponent(String(scalesetID))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * ScalesetsApi - functional programming interface + * @export + */ +export const ScalesetsApiFp = function(configuration?: Configuration) { + const localVarAxiosParamCreator = ScalesetsApiAxiosParamCreator(configuration) + return { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createEnterpriseScaleSet(enterpriseID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.createEnterpriseScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createOrgScaleSet(orgID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.createOrgScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.createRepoScaleSet(repoID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.createRepoScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Delete scale set by ID. + * @param {string} scalesetID ID of the scale set to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async deleteScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.deleteScaleSet(scalesetID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.deleteScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Get scale set by ID. + * @param {string} scalesetID ID of the scale set to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async getScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.getScaleSet(scalesetID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.getScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listEnterpriseScaleSets(enterpriseID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listEnterpriseScaleSets']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listOrgScaleSets(orgID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listOrgScaleSets']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listRepoScaleSets(repoID, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listRepoScaleSets']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary List all scalesets. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async listScalesets(options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.listScalesets(options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.listScalesets']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * + * @summary Update scale set by ID. + * @param {string} scalesetID ID of the scale set to update. + * @param {UpdateScaleSetParams} body Parameters to update the scale set with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async updateScaleSet(scalesetID: string, body: UpdateScaleSetParams, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.updateScaleSet(scalesetID, body, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ScalesetsApi.updateScaleSet']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + } +}; + +/** + * ScalesetsApi - factory interface + * @export + */ +export const ScalesetsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + const localVarFp = ScalesetsApiFp(configuration) + return { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createOrgScaleSet(orgID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.createRepoScaleSet(repoID, body, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Delete scale set by ID. + * @param {string} scalesetID ID of the scale set to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.deleteScaleSet(scalesetID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Get scale set by ID. + * @param {string} scalesetID ID of the scale set to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getScaleSet(scalesetID: string, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.getScaleSet(scalesetID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listEnterpriseScaleSets(enterpriseID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listOrgScaleSets(orgID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listRepoScaleSets(repoID, options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary List all scalesets. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listScalesets(options?: RawAxiosRequestConfig): AxiosPromise> { + return localVarFp.listScalesets(options).then((request) => request(axios, basePath)); + }, + /** + * + * @summary Update scale set by ID. + * @param {string} scalesetID ID of the scale set to update. + * @param {UpdateScaleSetParams} body Parameters to update the scale set with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + updateScaleSet(scalesetID: string, body: UpdateScaleSetParams, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.updateScaleSet(scalesetID, body, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * ScalesetsApi - object-oriented interface + * @export + * @class ScalesetsApi + * @extends {BaseAPI} + */ +export class ScalesetsApi extends BaseAPI { + /** + * + * @summary Create enterprise pool with the parameters given. + * @param {string} enterpriseID Enterprise ID. + * @param {CreateScaleSetParams} body Parameters used when creating the enterprise scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public createEnterpriseScaleSet(enterpriseID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).createEnterpriseScaleSet(enterpriseID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create organization scale set with the parameters given. + * @param {string} orgID Organization ID. + * @param {CreateScaleSetParams} body Parameters used when creating the organization scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public createOrgScaleSet(orgID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).createOrgScaleSet(orgID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Create repository scale set with the parameters given. + * @param {string} repoID Repository ID. + * @param {CreateScaleSetParams} body Parameters used when creating the repository scale set. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public createRepoScaleSet(repoID: string, body: CreateScaleSetParams, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).createRepoScaleSet(repoID, body, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Delete scale set by ID. + * @param {string} scalesetID ID of the scale set to delete. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public deleteScaleSet(scalesetID: string, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).deleteScaleSet(scalesetID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Get scale set by ID. + * @param {string} scalesetID ID of the scale set to fetch. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public getScaleSet(scalesetID: string, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).getScaleSet(scalesetID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List enterprise scale sets. + * @param {string} enterpriseID Enterprise ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public listEnterpriseScaleSets(enterpriseID: string, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).listEnterpriseScaleSets(enterpriseID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List organization scale sets. + * @param {string} orgID Organization ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public listOrgScaleSets(orgID: string, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).listOrgScaleSets(orgID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List repository scale sets. + * @param {string} repoID Repository ID. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public listRepoScaleSets(repoID: string, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).listRepoScaleSets(repoID, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary List all scalesets. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public listScalesets(options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).listScalesets(options).then((request) => request(this.axios, this.basePath)); + } + + /** + * + * @summary Update scale set by ID. + * @param {string} scalesetID ID of the scale set to update. + * @param {UpdateScaleSetParams} body Parameters to update the scale set with. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ScalesetsApi + */ + public updateScaleSet(scalesetID: string, body: UpdateScaleSetParams, options?: RawAxiosRequestConfig) { + return ScalesetsApiFp(this.configuration).updateScaleSet(scalesetID, body, options).then((request) => request(this.axios, this.basePath)); + } +} + + + diff --git a/webapp/src/lib/api/generated/base.ts b/webapp/src/lib/api/generated/base.ts new file mode 100644 index 00000000..2fa2314d --- /dev/null +++ b/webapp/src/lib/api/generated/base.ts @@ -0,0 +1,86 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Garm API. + * The Garm API generated using go-swagger. + * + * The version of the OpenAPI document: 1.0.0 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +import type { Configuration } from './configuration'; +// Some imports not used depending on template conditions +// @ts-ignore +import type { AxiosPromise, AxiosInstance, RawAxiosRequestConfig } from 'axios'; +import globalAxios from 'axios'; + +export const BASE_PATH = "/api/v1".replace(/\/+$/, ""); + +/** + * + * @export + */ +export const COLLECTION_FORMATS = { + csv: ",", + ssv: " ", + tsv: "\t", + pipes: "|", +}; + +/** + * + * @export + * @interface RequestArgs + */ +export interface RequestArgs { + url: string; + options: RawAxiosRequestConfig; +} + +/** + * + * @export + * @class BaseAPI + */ +export class BaseAPI { + protected configuration: Configuration | undefined; + + constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath ?? basePath; + } + } +}; + +/** + * + * @export + * @class RequiredError + * @extends {Error} + */ +export class RequiredError extends Error { + constructor(public field: string, msg?: string) { + super(msg); + this.name = "RequiredError" + } +} + +interface ServerMap { + [key: string]: { + url: string, + description: string, + }[]; +} + +/** + * + * @export + */ +export const operationServerMap: ServerMap = { +} diff --git a/webapp/src/lib/api/generated/common.ts b/webapp/src/lib/api/generated/common.ts new file mode 100644 index 00000000..a1ef3fb4 --- /dev/null +++ b/webapp/src/lib/api/generated/common.ts @@ -0,0 +1,150 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Garm API. + * The Garm API generated using go-swagger. + * + * The version of the OpenAPI document: 1.0.0 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +import type { Configuration } from "./configuration"; +import type { RequestArgs } from "./base"; +import type { AxiosInstance, AxiosResponse } from 'axios'; +import { RequiredError } from "./base"; + +/** + * + * @export + */ +export const DUMMY_BASE_URL = 'https://example.com' + +/** + * + * @throws {RequiredError} + * @export + */ +export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) { + if (paramValue === null || paramValue === undefined) { + throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`); + } +} + +/** + * + * @export + */ +export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) { + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = typeof configuration.apiKey === 'function' + ? await configuration.apiKey(keyParamName) + : await configuration.apiKey; + object[keyParamName] = localVarApiKeyValue; + } +} + +/** + * + * @export + */ +export const setBasicAuthToObject = function (object: any, configuration?: Configuration) { + if (configuration && (configuration.username || configuration.password)) { + object["auth"] = { username: configuration.username, password: configuration.password }; + } +} + +/** + * + * @export + */ +export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) { + if (configuration && configuration.accessToken) { + const accessToken = typeof configuration.accessToken === 'function' + ? await configuration.accessToken() + : await configuration.accessToken; + object["Authorization"] = "Bearer " + accessToken; + } +} + +/** + * + * @export + */ +export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) { + if (configuration && configuration.accessToken) { + const localVarAccessTokenValue = typeof configuration.accessToken === 'function' + ? await configuration.accessToken(name, scopes) + : await configuration.accessToken; + object["Authorization"] = "Bearer " + localVarAccessTokenValue; + } +} + +function setFlattenedQueryParams(urlSearchParams: URLSearchParams, parameter: any, key: string = ""): void { + if (parameter == null) return; + if (typeof parameter === "object") { + if (Array.isArray(parameter)) { + (parameter as any[]).forEach(item => setFlattenedQueryParams(urlSearchParams, item, key)); + } + else { + Object.keys(parameter).forEach(currentKey => + setFlattenedQueryParams(urlSearchParams, parameter[currentKey], `${key}${key !== '' ? '.' : ''}${currentKey}`) + ); + } + } + else { + if (urlSearchParams.has(key)) { + urlSearchParams.append(key, parameter); + } + else { + urlSearchParams.set(key, parameter); + } + } +} + +/** + * + * @export + */ +export const setSearchParams = function (url: URL, ...objects: any[]) { + const searchParams = new URLSearchParams(url.search); + setFlattenedQueryParams(searchParams, objects); + url.search = searchParams.toString(); +} + +/** + * + * @export + */ +export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) { + const nonString = typeof value !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(requestOptions.headers['Content-Type']) + : nonString; + return needsSerialization + ? JSON.stringify(value !== undefined ? value : {}) + : (value || ""); +} + +/** + * + * @export + */ +export const toPathString = function (url: URL) { + return url.pathname + url.search + url.hash +} + +/** + * + * @export + */ +export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) { + return >(axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...axiosArgs.options, url: (axios.defaults.baseURL ? '' : configuration?.basePath ?? basePath) + axiosArgs.url}; + return axios.request(axiosRequestArgs); + }; +} diff --git a/webapp/src/lib/api/generated/configuration.ts b/webapp/src/lib/api/generated/configuration.ts new file mode 100644 index 00000000..d71ed227 --- /dev/null +++ b/webapp/src/lib/api/generated/configuration.ts @@ -0,0 +1,115 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Garm API. + * The Garm API generated using go-swagger. + * + * The version of the OpenAPI document: 1.0.0 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +export interface ConfigurationParameters { + apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise); + username?: string; + password?: string; + accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise); + basePath?: string; + serverIndex?: number; + baseOptions?: any; + formDataCtor?: new () => any; +} + +export class Configuration { + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; + /** + * override server index + * + * @type {number} + * @memberof Configuration + */ + serverIndex?: number; + /** + * base options for axios calls + * + * @type {any} + * @memberof Configuration + */ + baseOptions?: any; + /** + * The FormData constructor that will be used to create multipart form data + * requests. You can inject this here so that execution environments that + * do not support the FormData class can still run the generated client. + * + * @type {new () => FormData} + */ + formDataCtor?: new () => any; + + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + this.serverIndex = param.serverIndex; + this.baseOptions = { + ...param.baseOptions, + headers: { + ...param.baseOptions?.headers, + }, + }; + this.formDataCtor = param.formDataCtor; + } + + /** + * Check if the given MIME is a JSON MIME. + * JSON MIME examples: + * application/json + * application/json; charset=UTF8 + * APPLICATION/JSON + * application/vnd.company+json + * @param mime - MIME (Multipurpose Internet Mail Extensions) + * @return True if the given MIME is JSON, false otherwise. + */ + public isJsonMime(mime: string): boolean { + const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i'); + return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json'); + } +} diff --git a/webapp/src/lib/api/generated/index.ts b/webapp/src/lib/api/generated/index.ts new file mode 100644 index 00000000..c5c83e0c --- /dev/null +++ b/webapp/src/lib/api/generated/index.ts @@ -0,0 +1,18 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Garm API. + * The Garm API generated using go-swagger. + * + * The version of the OpenAPI document: 1.0.0 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +export * from "./api"; +export * from "./configuration"; + diff --git a/webapp/src/lib/components/ActionButton.svelte b/webapp/src/lib/components/ActionButton.svelte new file mode 100644 index 00000000..61236bd1 --- /dev/null +++ b/webapp/src/lib/components/ActionButton.svelte @@ -0,0 +1,68 @@ + + + \ No newline at end of file diff --git a/webapp/src/lib/components/Badge.svelte b/webapp/src/lib/components/Badge.svelte new file mode 100644 index 00000000..cd75174f --- /dev/null +++ b/webapp/src/lib/components/Badge.svelte @@ -0,0 +1,48 @@ + + + + {text} + \ No newline at end of file diff --git a/webapp/src/lib/components/Button.svelte b/webapp/src/lib/components/Button.svelte new file mode 100644 index 00000000..3f98d4dc --- /dev/null +++ b/webapp/src/lib/components/Button.svelte @@ -0,0 +1,82 @@ + + + \ No newline at end of file diff --git a/webapp/src/lib/components/ControllerInfoCard.svelte b/webapp/src/lib/components/ControllerInfoCard.svelte new file mode 100644 index 00000000..36533d64 --- /dev/null +++ b/webapp/src/lib/components/ControllerInfoCard.svelte @@ -0,0 +1,403 @@ + + +
                +
                + +
                +
                +
                + + + +
                +
                +

                Controller Information

                +
                + + v{controllerInfo.version?.replace(/^v/, '') || 'Unknown'} + +
                +
                +
                + + +
                + + +
                + +
                +
                +

                Identity

                +
                + +
                +
                Controller ID
                +
                + {controllerInfo.controller_id} +
                +
                + + +
                +
                Hostname
                +
                + {controllerInfo.hostname || 'Unknown'} +
                +
                + + +
                +
                +
                Job Age Backoff
                +
                + +
                +
                +
                + {controllerInfo.minimum_job_age_backoff || 30}s +
                +
                +
                +
                +
                + + +
                +
                +

                Integration URLs

                +
                + + {#if controllerInfo.metadata_url} +
                +
                +
                Metadata
                +
                + +
                +
                +
                + {controllerInfo.metadata_url} +
                +
                + {/if} + + + {#if controllerInfo.callback_url} +
                +
                +
                Callback
                +
                + +
                +
                +
                + {controllerInfo.callback_url} +
                +
                + {/if} + + + {#if controllerInfo.webhook_url} +
                +
                +
                Webhook
                +
                + +
                +
                +
                + {controllerInfo.webhook_url} +
                +
                + {/if} + + + {#if !controllerInfo.metadata_url && !controllerInfo.callback_url && !controllerInfo.webhook_url} +
                + + + +

                No URLs configured

                + +
                + {/if} +
                +
                +
                +
                + + + {#if controllerInfo.controller_webhook_url} +
                +
                +
                Controller Webhook URL
                +
                + +
                +
                +
                +
                +
                + + + +
                +
                + + {controllerInfo.controller_webhook_url} + +

                + Use this URL in your GitHub organization/repository webhook settings +

                +
                +
                +
                +
                + {/if} +
                +
                + + +{#if showSettingsModal} + +
                +

                Controller Settings

                + +
                + +
                + + + {#if !isValidUrl(metadataUrl)} +

                Please enter a valid URL

                + {/if} +

                + URL where runners can fetch metadata and setup information +

                +
                + + +
                + + + {#if !isValidUrl(callbackUrl)} +

                Please enter a valid URL

                + {/if} +

                + URL where runners send status updates and lifecycle events +

                +
                + + +
                + + + {#if !isValidUrl(webhookUrl)} +

                Please enter a valid URL

                + {/if} +

                + URL where GitHub/Gitea will send webhook events for job notifications +

                +
                + + +
                + + +

                + Time to wait before spinning up a runner for a new job (0 = immediate) +

                +
                + + +
                + + +
                +
                +
                +
                +{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/CreateEnterpriseModal.svelte b/webapp/src/lib/components/CreateEnterpriseModal.svelte new file mode 100644 index 00000000..7e1b2f35 --- /dev/null +++ b/webapp/src/lib/components/CreateEnterpriseModal.svelte @@ -0,0 +1,213 @@ + + + dispatch('close')}> +
                +

                Create Enterprise

                +

                + Enterprises are only available for GitHub endpoints. +

                + + {#if error} +
                +

                {error}

                +
                + {/if} + + {#if loading} +
                +
                +

                Loading...

                +
                + {:else} +
                + +
                + + +
                + + +
                + + + {#if credentialsLoading} +

                + Loading credentials... +

                + {:else if filteredCredentials.length === 0} +

                + No GitHub credentials found. Please create GitHub credentials first. +

                + {/if} +
                + + +
                +
                + +
                + + + + +
                +
                + +
                + + +
                + + +

                + You'll need to manually configure this secret in GitHub's enterprise webhook settings. +

                +
                + + +
                + + +
                +
                + {/if} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreateOrganizationModal.svelte b/webapp/src/lib/components/CreateOrganizationModal.svelte new file mode 100644 index 00000000..9a91abba --- /dev/null +++ b/webapp/src/lib/components/CreateOrganizationModal.svelte @@ -0,0 +1,271 @@ + + + dispatch('close')}> +
                +

                Create Organization

                + + {#if error} +
                +

                {error}

                +
                + {/if} + + {#if loading} +
                +
                +

                Loading...

                +
                + {:else} +
                + + + + +
                + + +
                + + +
                + + +
                + + +
                +
                + +
                + + + + +
                +
                + +
                + + +
                +
                + + +
                + +
                +
                + + +
                + + {#if !generateWebhookSecret} + + {:else} +

                + Webhook secret will be automatically generated +

                + {/if} +
                +
                + + +
                + + +
                + + {/if} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreatePoolModal.svelte b/webapp/src/lib/components/CreatePoolModal.svelte new file mode 100644 index 00000000..36ce3179 --- /dev/null +++ b/webapp/src/lib/components/CreatePoolModal.svelte @@ -0,0 +1,541 @@ + + + dispatch('close')}> +
                +
                +

                Create New Pool

                +
                + +
                + {#if error} +
                +

                {error}

                +
                + {/if} + + +
                + + Entity Level * + +
                + + + +
                +
                + + {#if entityLevel} + +
                +

                + Entity & Provider Configuration +

                +
                +
                + + {#if loadingEntities} +
                + {:else} + + {/if} +
                +
                + + {#if loadingProviders} +
                + {:else} + + {/if} +
                +
                +
                + + +
                +

                + Image & OS Configuration +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Runner Limits & Timing +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Advanced Settings +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + +
                + +
                +
                + + +
                + {#if tags.length > 0} +
                + {#each tags as tag, index} + + {tag} + + + {/each} +
                + {/if} +
                +
                + + +
                + + Extra Specs (JSON) + + +
                + + +
                + + +
                +
                + {/if} + + +
                + + +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreateRepositoryModal.svelte b/webapp/src/lib/components/CreateRepositoryModal.svelte new file mode 100644 index 00000000..28ff4065 --- /dev/null +++ b/webapp/src/lib/components/CreateRepositoryModal.svelte @@ -0,0 +1,294 @@ + + + dispatch('close')}> +
                +

                Create Repository

                + + {#if error} +
                +

                {error}

                +
                + {/if} + + {#if loading} +
                +
                +

                Loading...

                +
                + {:else} +
                + + + + +
                + + +
                + + +
                + + +
                + + +
                + + +
                + + +
                +
                + +
                + + + + +
                +
                + +
                + + +
                +
                + + +
                + +
                +
                + + +
                + + {#if !generateWebhookSecret} + + {:else} +

                + Webhook secret will be automatically generated +

                + {/if} +
                +
                + + +
                + + +
                + + {/if} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/CreateScaleSetModal.svelte b/webapp/src/lib/components/CreateScaleSetModal.svelte new file mode 100644 index 00000000..2711ba09 --- /dev/null +++ b/webapp/src/lib/components/CreateScaleSetModal.svelte @@ -0,0 +1,472 @@ + + + dispatch('close')}> +
                +
                +

                Create New Scale Set

                +

                Scale sets are only available for GitHub endpoints

                +
                + +
                + {#if error} +
                +

                {error}

                +
                + {/if} + + +
                + + +
                + + +
                +
                + + Entity Level * + +
                + + + +
                +
                +
                + + {#if entityLevel} + +
                +

                + Entity & Provider Configuration +

                +
                +
                + + {#if loadingEntities} +
                + {:else} + + {/if} +
                +
                + + {#if loadingProviders} +
                + {:else} + + {/if} +
                +
                +
                + + +
                +

                + Image & OS Configuration +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Runner Limits & Timing +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Advanced Settings +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + Extra Specs (JSON) +
                + +
                + + +
                + + +
                +
                + {/if} + + +
                + + +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/DataTable.svelte b/webapp/src/lib/components/DataTable.svelte new file mode 100644 index 00000000..4723420c --- /dev/null +++ b/webapp/src/lib/components/DataTable.svelte @@ -0,0 +1,237 @@ + + +
                + {#if showSearch} + + {/if} + +
                + {#if loading} + + {:else if error} + + {:else if data.length === 0} + + {:else} + {#if showMobileCards} + +
                + {#each data as item, index (item.id || item.name || index)} +
                + {#if mobileCardConfig} + + {#key `${item.id || item.name}-${item.updated_at}-mobile`} + + {/key} + {:else} + + + {/if} +
                + {/each} +
                + {/if} + + + + {/if} + + {#if showPagination && !loading && !error && data.length > 0} + + {/if} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/DeleteModal.svelte b/webapp/src/lib/components/DeleteModal.svelte new file mode 100644 index 00000000..88a0922d --- /dev/null +++ b/webapp/src/lib/components/DeleteModal.svelte @@ -0,0 +1,57 @@ + + + dispatch('close')}> +
                +
                + + + +
                + +
                +

                {title}

                +
                +

                {message}

                + {#if itemName} +

                {itemName}

                + {/if} +
                +
                + +
                + + +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/DetailHeader.svelte b/webapp/src/lib/components/DetailHeader.svelte new file mode 100644 index 00000000..3d675b4d --- /dev/null +++ b/webapp/src/lib/components/DetailHeader.svelte @@ -0,0 +1,56 @@ + + +
                +
                +
                +
                + {#if forgeIcon} +
                + {@html forgeIcon} +
                + {/if} +
                +

                {title}

                +

                + {subtitle} +

                +
                +
                + {#if onEdit || onDelete} +
                + {#if onEdit} + + {/if} + {#if onDelete} + + {/if} +
                + {/if} +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/EmptyState.svelte b/webapp/src/lib/components/EmptyState.svelte new file mode 100644 index 00000000..51eaee9d --- /dev/null +++ b/webapp/src/lib/components/EmptyState.svelte @@ -0,0 +1,37 @@ + + +
                + {#if iconType === 'document'} + + + + {:else if iconType === 'building'} + + + + {:else if iconType === 'users'} + + + + {:else if iconType === 'cog'} + + + + + {:else if iconType === 'key'} + + + + {:else if iconType === 'settings'} + + + + + {/if} +

                {title}

                +

                {message}

                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/EntityInformation.svelte b/webapp/src/lib/components/EntityInformation.svelte new file mode 100644 index 00000000..98977052 --- /dev/null +++ b/webapp/src/lib/components/EntityInformation.svelte @@ -0,0 +1,103 @@ + + +
                +
                +

                {getEntityTitle()}

                +
                +
                +
                ID
                +
                {entity.id}
                +
                +
                +
                Created At
                +
                {formatDate(entity.created_at)}
                +
                +
                +
                Updated At
                +
                {formatDate(entity.updated_at)}
                +
                +
                +
                Status
                +
                + {#if entity.pool_manager_status?.running} + + {:else} + + {/if} +
                +
                +
                +
                Pool Balancer Type
                +
                {getPoolBalancerDisplay()}
                +
                +
                +
                {getUrlLabel()}
                +
                + + {getEntityUrl()} + + + + +
                +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/ErrorState.svelte b/webapp/src/lib/components/ErrorState.svelte new file mode 100644 index 00000000..ee42c73e --- /dev/null +++ b/webapp/src/lib/components/ErrorState.svelte @@ -0,0 +1,37 @@ + + +
                +
                +
                +
                + + + +
                +
                +

                {title}

                +

                {message}

                + {#if showRetry && onRetry} +
                + +
                + {/if} +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/EventsSection.svelte b/webapp/src/lib/components/EventsSection.svelte new file mode 100644 index 00000000..0c17ef65 --- /dev/null +++ b/webapp/src/lib/components/EventsSection.svelte @@ -0,0 +1,47 @@ + + +{#if events && events.length > 0} +
                +
                +

                Events

                +
                + {#each events as event} +
                +
                +

                {event.message}

                +
                + {#if (event.event_level || 'info').toLowerCase() === 'error'} + + {:else if (event.event_level || 'info').toLowerCase() === 'warning'} + + {:else} + + {/if} + {formatDate(event.created_at)} +
                +
                +
                + {/each} +
                +
                +
                +{:else} +
                +
                +

                Events

                +
                + + + +

                No events available

                +
                +
                +
                +{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/ForgeTypeSelector.svelte b/webapp/src/lib/components/ForgeTypeSelector.svelte new file mode 100644 index 00000000..68dbe187 --- /dev/null +++ b/webapp/src/lib/components/ForgeTypeSelector.svelte @@ -0,0 +1,40 @@ + + +
                + + {label} + +
                + + +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/Icons.svelte b/webapp/src/lib/components/Icons.svelte new file mode 100644 index 00000000..c38fadfe --- /dev/null +++ b/webapp/src/lib/components/Icons.svelte @@ -0,0 +1,51 @@ + + + + {@html iconPath} + \ No newline at end of file diff --git a/webapp/src/lib/components/InstancesSection.svelte b/webapp/src/lib/components/InstancesSection.svelte new file mode 100644 index 00000000..b17c4606 --- /dev/null +++ b/webapp/src/lib/components/InstancesSection.svelte @@ -0,0 +1,114 @@ + + +
                +
                +
                +

                Instances ({instances.length})

                + View all instances +
                + +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/JsonEditor.svelte b/webapp/src/lib/components/JsonEditor.svelte new file mode 100644 index 00000000..443c6e25 --- /dev/null +++ b/webapp/src/lib/components/JsonEditor.svelte @@ -0,0 +1,48 @@ + + +
                + + + {#if !isValidJson} +
                + + + +
                + {/if} +
                \ No newline at end of file diff --git a/webapp/src/lib/components/LoadingState.svelte b/webapp/src/lib/components/LoadingState.svelte new file mode 100644 index 00000000..0a9985c2 --- /dev/null +++ b/webapp/src/lib/components/LoadingState.svelte @@ -0,0 +1,8 @@ + + +
                +
                +

                {message}

                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/MobileCard.svelte b/webapp/src/lib/components/MobileCard.svelte new file mode 100644 index 00000000..d17ef66c --- /dev/null +++ b/webapp/src/lib/components/MobileCard.svelte @@ -0,0 +1,254 @@ + + +
                +
                + {#if config.primaryText.isClickable} + +

                + {getPrimaryText()} +

                + {#if config.secondaryText} +

                + {getSecondaryText()} +

                + {/if} +
                + {:else} +
                +

                + {getPrimaryText()} +

                + {#if config.secondaryText} +

                + {getSecondaryText()} +

                + {/if} +
                + {/if} + + {#if config.customInfo || config.badges?.some(b => b.type === 'forge')} +
                + {#if config.customInfo} + {#each config.customInfo as info} + {@const iconHtml = typeof info.icon === 'function' ? info.icon(item) : info.icon} + {@const text = typeof info.text === 'function' ? info.text(item) : info.text} +
                + {#if iconHtml} + {@html iconHtml} + {/if} + {text} +
                + {/each} + {/if} + + {#if config.badges} + {#each config.badges.filter(b => b.type === 'forge') as badge} +
                + {@html getForgeIcon(badge.field ? (item?.[badge.field] || 'unknown') : (item?.endpoint?.endpoint_type || 'unknown'))} + + {item?.endpoint?.name || 'Unknown'} + +
                + {/each} + {/if} +
                + {/if} +
                + +
                + {#if config.badges} + {#each config.badges.filter(b => b.type !== 'forge') as badge} + {#if badge.type === 'status'} + {@const badgeProps = getBadgeProps(badge)} + + {badgeProps.text} + + {:else} + {@const badgeProps = getBadgeProps(badge)} + + {/if} + {/each} + {/if} + + {#if config.actions} +
                + {#each config.actions as action} + handleAction(action.type)} + /> + {/each} +
                + {/if} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/Modal.svelte b/webapp/src/lib/components/Modal.svelte new file mode 100644 index 00000000..b446f2ee --- /dev/null +++ b/webapp/src/lib/components/Modal.svelte @@ -0,0 +1,40 @@ + + + diff --git a/webapp/src/lib/components/Navigation.svelte b/webapp/src/lib/components/Navigation.svelte new file mode 100644 index 00000000..1b8b5b06 --- /dev/null +++ b/webapp/src/lib/components/Navigation.svelte @@ -0,0 +1,406 @@ + + + + + + +
                +
                + + + + +
                + GARM + GARM +

                GARM

                + + +
                + {#if wsState.connected} +
                +
                +
                + {:else if wsState.connecting} +
                +
                +
                + {:else if wsState.error} +
                +
                +
                + {:else} +
                +
                +
                + {/if} +
                +
                + + + +
                + + + {#if mobileMenuOpen} + + {/if} +
                + + +{#if userMenuOpen} +
                userMenuOpen = false} on:keydown={(e) => { if (e.key === 'Escape') userMenuOpen = false; }} role="button" tabindex="0" aria-label="Close user menu">
                +{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/PageHeader.svelte b/webapp/src/lib/components/PageHeader.svelte new file mode 100644 index 00000000..4ac247af --- /dev/null +++ b/webapp/src/lib/components/PageHeader.svelte @@ -0,0 +1,39 @@ + + + +
                +
                +

                {title}

                +

                + {description} +

                +
                + {#if showAction && actionLabel} +
                + +
                + {/if} +
                \ No newline at end of file diff --git a/webapp/src/lib/components/PoolsSection.svelte b/webapp/src/lib/components/PoolsSection.svelte new file mode 100644 index 00000000..8ff740fa --- /dev/null +++ b/webapp/src/lib/components/PoolsSection.svelte @@ -0,0 +1,136 @@ + + +
                +
                +
                +

                Pools ({pools.length})

                + View all pools +
                + {#if pools.length === 0} + +
                + + + + +

                No pools configured

                +

                No pools configured for this {entityType}.

                +
                + +
                +
                + {:else} + + {/if} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/SearchBar.svelte b/webapp/src/lib/components/SearchBar.svelte new file mode 100644 index 00000000..bc051b14 --- /dev/null +++ b/webapp/src/lib/components/SearchBar.svelte @@ -0,0 +1,30 @@ + + +
                +
                + +
                + +
                \ No newline at end of file diff --git a/webapp/src/lib/components/SearchFilterBar.svelte b/webapp/src/lib/components/SearchFilterBar.svelte new file mode 100644 index 00000000..fab0288c --- /dev/null +++ b/webapp/src/lib/components/SearchFilterBar.svelte @@ -0,0 +1,55 @@ + + +
                +
                +
                +
                + + +
                +
                + {#if showPerPageSelector} +
                +
                + + +
                +
                + {/if} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/TablePagination.svelte b/webapp/src/lib/components/TablePagination.svelte new file mode 100644 index 00000000..f5cb0d96 --- /dev/null +++ b/webapp/src/lib/components/TablePagination.svelte @@ -0,0 +1,98 @@ + + +{#if totalPages > 1} +
                + +
                + + +
                + + + +
                +{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/Toast.svelte b/webapp/src/lib/components/Toast.svelte new file mode 100644 index 00000000..02197e34 --- /dev/null +++ b/webapp/src/lib/components/Toast.svelte @@ -0,0 +1,107 @@ + + + +
                + {#each toasts as toast (toast.id)} +
                +
                +
                + {@html getToastIcon(toast.type)} +
                +
                +

                + {toast.title} +

                + {#if toast.message} +
                + {toast.message} +
                + {/if} +
                +
                + +
                +
                +
                + {/each} +
                \ No newline at end of file diff --git a/webapp/src/lib/components/Tooltip.svelte b/webapp/src/lib/components/Tooltip.svelte new file mode 100644 index 00000000..7071bf45 --- /dev/null +++ b/webapp/src/lib/components/Tooltip.svelte @@ -0,0 +1,29 @@ + + +
                + + + + + + +
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateEnterpriseModal.svelte b/webapp/src/lib/components/UpdateEnterpriseModal.svelte new file mode 100644 index 00000000..839fd927 --- /dev/null +++ b/webapp/src/lib/components/UpdateEnterpriseModal.svelte @@ -0,0 +1,207 @@ + + + dispatch('close')}> +
                +
                +

                Update Enterprise

                +

                {enterprise.name}

                +
                + +
                + + {#if error} +
                +

                {error}

                +
                + {/if} + + {#if loading} +
                +
                +

                Loading...

                +
                + {:else} +
                + +
                + + +

                + Only showing credentials for GitHub endpoints +

                +
                + + +
                + + +
                + + +
                +
                + + +
                + + {#if changeWebhookSecret} +
                +
                + + +
                + {#if !generateWebhookSecret} + + {:else} +

                + A new webhook secret will be automatically generated +

                + {/if} +
                + {/if} +
                + + +
                + + +
                +
                + {/if} +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateEntityModal.svelte b/webapp/src/lib/components/UpdateEntityModal.svelte new file mode 100644 index 00000000..c2b80c34 --- /dev/null +++ b/webapp/src/lib/components/UpdateEntityModal.svelte @@ -0,0 +1,265 @@ + + + dispatch('close')}> +
                +
                +

                Update {getEntityTitle()}

                +

                {getEntityDisplayName()}

                +
                + +
                + {#if error} +
                +

                {error}

                +
                + {/if} + + +
                +

                {getEntityTitle()} Information

                +
                + {#if entityType === 'repository'} +
                + Owner: + {getOwner()} +
                + {/if} +
                + Name: + {entity.name} +
                +
                + Endpoint: + {entity.endpoint?.name} +
                +
                + Current Credentials: + {entity.credentials_name} +
                +
                + Current Pool Balancer: + {entity.pool_balancing_type || 'roundrobin'} +
                +
                +
                + +
                + +
                + + {#if loadingCredentials} +
                + {:else} + + {/if} +

                + Leave unchanged to keep current credentials +

                +
                + + +
                + + +

                + Round Robin distributes jobs evenly across pools, Pack fills pools in order +

                +
                + + +
                +
                + + +
                + + {#if changeWebhookSecret} +
                + + +

                + Leave empty to auto-generate a new secret +

                +
                + {/if} +
                +
                + + +
                + + +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateOrganizationModal.svelte b/webapp/src/lib/components/UpdateOrganizationModal.svelte new file mode 100644 index 00000000..848804f5 --- /dev/null +++ b/webapp/src/lib/components/UpdateOrganizationModal.svelte @@ -0,0 +1,210 @@ + + + dispatch('close')}> +
                +
                +

                Update Organization

                +

                {organization.name}

                +
                + +
                + + {#if error} +
                +

                {error}

                +
                + {/if} + + {#if loading} +
                +
                +

                Loading...

                +
                + {:else} +
                + +
                + + +

                + Only showing credentials for {organizationEndpointType} endpoints +

                +
                + + +
                + + +
                + + +
                +
                + + +
                + + {#if changeWebhookSecret} +
                +
                + + +
                + {#if !generateWebhookSecret} + + {:else} +

                + A new webhook secret will be automatically generated +

                + {/if} +
                + {/if} +
                + + +
                + + +
                +
                + {/if} +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdatePoolModal.svelte b/webapp/src/lib/components/UpdatePoolModal.svelte new file mode 100644 index 00000000..cbd85d8e --- /dev/null +++ b/webapp/src/lib/components/UpdatePoolModal.svelte @@ -0,0 +1,426 @@ + + + dispatch('close')}> +
                +
                +

                + Update Pool {pool.id} +

                +
                + +
                + {#if error} +
                +

                {error}

                +
                + {/if} + + +
                +

                Pool Information (Read-only)

                +
                +
                + Provider: + {pool.provider_name} +
                +
                + Entity: + + {getEntityType(pool)}: {getEntityName(pool)} + +
                +
                +
                + + +
                +

                + Image & OS Configuration +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Runner Limits & Timing +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Advanced Settings +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + Tags + +
                +
                + + +
                + {#if tags.length > 0} +
                + {#each tags as tag, index} + + {tag} + + + {/each} +
                + {/if} +
                +
                +
                + + +
                +
                + + Extra Specs (JSON) + + +
                +
                + + +
                + + +
                +
                + + +
                + + +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateRepositoryModal.svelte b/webapp/src/lib/components/UpdateRepositoryModal.svelte new file mode 100644 index 00000000..e5b209a9 --- /dev/null +++ b/webapp/src/lib/components/UpdateRepositoryModal.svelte @@ -0,0 +1,146 @@ + + + dispatch('close')}> +
                +
                +

                Update Repository

                +

                {repository.owner}/{repository.name}

                +
                + +
                + {#if error} +
                +

                {error}

                +
                + {/if} + + +
                +

                Repository Information

                +
                +
                + Owner: + {repository.owner} +
                +
                + Name: + {repository.name} +
                +
                + Endpoint: + {repository.endpoint?.name} +
                +
                + Credentials: + {repository.credentials_name} +
                +
                +
                + + +
                +
                + + +
                + + {#if changeWebhookSecret} +
                + + +

                + Leave empty to auto-generate a new secret +

                +
                + {/if} +
                + + +
                + + +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/UpdateScaleSetModal.svelte b/webapp/src/lib/components/UpdateScaleSetModal.svelte new file mode 100644 index 00000000..73a31d63 --- /dev/null +++ b/webapp/src/lib/components/UpdateScaleSetModal.svelte @@ -0,0 +1,339 @@ + + + dispatch('close')}> +
                +
                +

                + Update Scale Set {scaleSet.name} +

                +
                + +
                + {#if error} +
                +

                {error}

                +
                + {/if} + + +
                +

                Scale Set Information

                +
                +
                + Provider: + {scaleSet.provider_name} +
                +
                + Entity: + + {#if scaleSet.repo_name}Repository: {scaleSet.repo_name} + {:else if scaleSet.org_name}Organization: {scaleSet.org_name} + {:else if scaleSet.enterprise_name}Enterprise: {scaleSet.enterprise_name} + {:else}Unknown Entity{/if} + +
                +
                +
                + + +
                + + +
                + + +
                +

                + Image & OS Configuration +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Runner Limits & Timing +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                +
                + + +
                +

                + Advanced Settings +

                +
                +
                + + +
                +
                + + +
                +
                + + +
                +
                + + Extra Specs (JSON) + + +
                +
                + + +
                + + +
                +
                + + +
                + + +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/WebhookSection.svelte b/webapp/src/lib/components/WebhookSection.svelte new file mode 100644 index 00000000..4d263935 --- /dev/null +++ b/webapp/src/lib/components/WebhookSection.svelte @@ -0,0 +1,172 @@ + + +
                +
                +
                +
                +

                + Webhook Status +

                +
                + {#if checking} +
                +
                + Checking... +
                + {:else if isInstalled} +
                + + + + Webhook installed +
                + {#if webhookInfo} +
                + URL: {webhookInfo.url || 'N/A'} +
                + {/if} + {:else} +
                + + + + No webhook installed +
                + {/if} +
                +
                + +
                + {#if !checking} + {#if isInstalled} + + {:else} + + {/if} + {/if} +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/ActionsCell.svelte b/webapp/src/lib/components/cells/ActionsCell.svelte new file mode 100644 index 00000000..f79df481 --- /dev/null +++ b/webapp/src/lib/components/cells/ActionsCell.svelte @@ -0,0 +1,46 @@ + + +
                + {#each actions as action} + handleAction(action.type)} + /> + {/each} +
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/EndpointCell.svelte b/webapp/src/lib/components/cells/EndpointCell.svelte new file mode 100644 index 00000000..0978b249 --- /dev/null +++ b/webapp/src/lib/components/cells/EndpointCell.svelte @@ -0,0 +1,15 @@ + + +
                +
                + {@html getForgeIcon(item?.endpoint?.endpoint_type || item?.endpoint_type || 'unknown', iconSize)} +
                +
                + {item?.endpoint?.name || item?.endpoint_name || item?.github_endpoint_name || 'Unknown'} +
                +
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/EntityCell.svelte b/webapp/src/lib/components/cells/EntityCell.svelte new file mode 100644 index 00000000..f81d4cf9 --- /dev/null +++ b/webapp/src/lib/components/cells/EntityCell.svelte @@ -0,0 +1,84 @@ + + +
                + + {entityName} + + {#if entityType === 'instance' && item?.provider_id} +
                + {item.provider_id} +
                + {/if} +
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/GenericCell.svelte b/webapp/src/lib/components/cells/GenericCell.svelte new file mode 100644 index 00000000..edbca9f1 --- /dev/null +++ b/webapp/src/lib/components/cells/GenericCell.svelte @@ -0,0 +1,59 @@ + + +{#if type === 'code'} + + {displayValue} + +{:else} + + {displayValue} + +{/if} \ No newline at end of file diff --git a/webapp/src/lib/components/cells/InstancePoolCell.svelte b/webapp/src/lib/components/cells/InstancePoolCell.svelte new file mode 100644 index 00000000..cadd9c17 --- /dev/null +++ b/webapp/src/lib/components/cells/InstancePoolCell.svelte @@ -0,0 +1,19 @@ + + +
                +{#if item?.pool_id} + + Pool: {item.pool_id} + +{:else if item?.scale_set_id} + + Scale Set: {item.scale_set_id} + +{:else} + - +{/if} +
                diff --git a/webapp/src/lib/components/cells/PoolEntityCell.svelte b/webapp/src/lib/components/cells/PoolEntityCell.svelte new file mode 100644 index 00000000..0861d11c --- /dev/null +++ b/webapp/src/lib/components/cells/PoolEntityCell.svelte @@ -0,0 +1,16 @@ + + +
                + + {getEntityName(item, eagerCache)} + + + {getEntityType(item)} + +
                \ No newline at end of file diff --git a/webapp/src/lib/components/cells/StatusCell.svelte b/webapp/src/lib/components/cells/StatusCell.svelte new file mode 100644 index 00000000..5526d307 --- /dev/null +++ b/webapp/src/lib/components/cells/StatusCell.svelte @@ -0,0 +1,118 @@ + + +{#key `${item?.name || 'item'}-${item?.[statusField] || 'status'}-${item?.updated_at || 'time'}`} + +{/key} \ No newline at end of file diff --git a/webapp/src/lib/components/cells/index.ts b/webapp/src/lib/components/cells/index.ts new file mode 100644 index 00000000..b54e5f74 --- /dev/null +++ b/webapp/src/lib/components/cells/index.ts @@ -0,0 +1,8 @@ +export { default as EntityCell } from './EntityCell.svelte'; +export { default as EndpointCell } from './EndpointCell.svelte'; +export { default as StatusCell } from './StatusCell.svelte'; +export { default as ActionsCell } from './ActionsCell.svelte'; +export { default as GenericCell } from './GenericCell.svelte'; +export { default as PoolEntityCell } from './PoolEntityCell.svelte'; +export { default as InstancePoolCell } from './InstancePoolCell.svelte'; +export { default as MobileCard } from '../MobileCard.svelte'; \ No newline at end of file diff --git a/webapp/src/lib/stores/auth.ts b/webapp/src/lib/stores/auth.ts new file mode 100644 index 00000000..5e5befa4 --- /dev/null +++ b/webapp/src/lib/stores/auth.ts @@ -0,0 +1,281 @@ +import { writable } from 'svelte/store'; +import { browser } from '$app/environment'; +import { garmApi } from '../api/client.js'; + +// Check if we're in development mode (cross-origin setup) +const isDevelopmentMode = () => { + if (!browser) return false; + // Development mode: either VITE_GARM_API_URL is set OR we detect cross-origin + return !!(import.meta.env.VITE_GARM_API_URL) || window.location.port === '5173'; +}; + +interface AuthState { + isAuthenticated: boolean; + user: string | null; + loading: boolean; + needsInitialization: boolean; +} + +const initialState: AuthState = { + isAuthenticated: false, + user: null, + loading: true, + needsInitialization: false +}; + +// Keep using writable store for compatibility with existing API calls +// but enhance with Svelte 5 features where possible +export const authStore = writable(initialState); + +// Cookie utilities +function setCookie(name: string, value: string, days: number = 7): void { + if (!browser) return; + + const expires = new Date(); + expires.setTime(expires.getTime() + (days * 24 * 60 * 60 * 1000)); + document.cookie = `${name}=${value};expires=${expires.toUTCString()};path=/;SameSite=Lax`; +} + +function getCookie(name: string): string | null { + if (!browser) return null; + + const nameEQ = name + "="; + const ca = document.cookie.split(';'); + for (let i = 0; i < ca.length; i++) { + let c = ca[i]; + while (c.charAt(0) === ' ') c = c.substring(1, c.length); + if (c.indexOf(nameEQ) === 0) { + const value = c.substring(nameEQ.length, c.length); + return value; + } + } + return null; +} + +function deleteCookie(name: string): void { + if (!browser) return; + document.cookie = `${name}=;expires=Thu, 01 Jan 1970 00:00:01 GMT;path=/`; +} + +// Auth utilities +export const auth = { + async login(username: string, password: string): Promise { + try { + authStore.update(state => ({ ...state, loading: true })); + + const response = await garmApi.login({ username, password }); + + // Store JWT token in cookies for server authentication and set it in the API client + if (browser) { + setCookie('garm_token', response.token); + setCookie('garm_user', username); + } + + // Set the token in the API client for future requests + garmApi.setToken(response.token); + + authStore.set({ + isAuthenticated: true, + user: username, + loading: false, + needsInitialization: false + }); + } catch (error) { + authStore.update(state => ({ ...state, loading: false })); + throw error; + } + }, + + logout(): void { + if (browser) { + deleteCookie('garm_token'); + deleteCookie('garm_user'); + } + + authStore.set({ + isAuthenticated: false, + user: null, + loading: false, + needsInitialization: false + }); + }, + + async init(): Promise { + if (browser) { + try { + authStore.update(state => ({ ...state, loading: true })); + + // First, always check initialization status by doing GET /api/v1/login + await auth.checkInitializationStatus(); + + // If we get here without needsInitialization being set, check for existing auth + const token = getCookie('garm_token'); + const user = getCookie('garm_user'); + + if (token && user) { + // Set the token in the API client for future requests + garmApi.setToken(token); + + // Verify token is still valid + const isValid = await auth.checkAuth(); + if (isValid) { + // Token is valid, set authenticated state + authStore.set({ + isAuthenticated: true, + user, + loading: false, + needsInitialization: false + }); + return; + } + } + + // No valid token, user needs to login (but GARM is initialized) + authStore.update(state => ({ + ...state, + loading: false, + needsInitialization: false + })); + + } catch (error) { + // If checkInitializationStatus threw an error, it should have set needsInitialization + authStore.update(state => ({ ...state, loading: false })); + } + } else { + authStore.update(state => ({ ...state, loading: false })); + } + }, + + // Check initialization status by calling GET /api/v1/login + async checkInitializationStatus(): Promise { + try { + // Make a GET request to /api/v1/login to check status + const headers: Record = { + 'Accept': 'application/json', + }; + + // In development mode, always use Bearer token; in production, prefer cookies + const token = getCookie('garm_token'); + const isDevMode = isDevelopmentMode(); + + if (isDevMode && token) { + headers['Authorization'] = `Bearer ${token}`; + } + + const response = await fetch('/api/v1/login', { + method: 'GET', + headers, + // Only include credentials in production (same-origin) + credentials: isDevMode ? 'omit' : 'include' + }); + + if (!response.ok) { + if (response.status === 409) { + const errorData = await response.json(); + if (errorData.error === 'init_required') { + // GARM needs initialization + authStore.update(state => ({ + ...state, + needsInitialization: true, + loading: false + })); + throw new Error('Initialization required'); + } + } + // For other 4xx/5xx errors, assume GARM is initialized + return; + } + + // GET /api/v1/login succeeded, GARM is initialized + return; + + } catch (error) { + // If it's our initialization error, re-throw it + if (error instanceof Error && error.message === 'Initialization required') { + throw error; + } + // For network errors or other issues, assume GARM is initialized + return; + } + }, + + // Check if token is still valid by making a test API call + async checkAuth(): Promise { + try { + // First check if initialization is still required + await auth.checkInitializationStatus(); + + // If we get here, GARM is initialized, now check if token is valid + await garmApi.getControllerInfo(); + return true; + } catch (error: any) { + // If it's initialization required, the checkInitializationStatus already handled it + if (error instanceof Error && error.message === 'Initialization required') { + return false; + } + + // Check if it's an initialization required error from the API call + if (error?.response?.status === 409 && + error?.response?.data?.error === 'init_required') { + authStore.update(state => ({ + ...state, + needsInitialization: true, + loading: false + })); + return false; + } + + // Token is invalid, logout + auth.logout(); + return false; + } + }, + + // Initialize GARM controller + async initialize( + username: string, + email: string, + password: string, + fullName?: string, + urls?: { + callbackUrl?: string; + metadataUrl?: string; + webhookUrl?: string; + } + ): Promise { + try { + authStore.update(state => ({ ...state, loading: true })); + + // Step 1: Create the admin user + const response = await garmApi.firstRun({ + username, + email, + password, + full_name: fullName || username + }); + + // Step 2: Login with the new credentials + await auth.login(username, password); + + // Step 3: Set controller URLs (similar to garm-cli init) + const currentUrl = window.location.origin; + const finalMetadataUrl = urls?.metadataUrl || `${currentUrl}/api/v1/metadata`; + const finalCallbackUrl = urls?.callbackUrl || `${currentUrl}/api/v1/callbacks`; + const finalWebhookUrl = urls?.webhookUrl || `${currentUrl}/webhooks`; + + await garmApi.updateController({ + metadata_url: finalMetadataUrl, + callback_url: finalCallbackUrl, + webhook_url: finalWebhookUrl + }); + + authStore.update(state => ({ + ...state, + needsInitialization: false + })); + } catch (error) { + authStore.update(state => ({ ...state, loading: false })); + throw error; + } + } +}; \ No newline at end of file diff --git a/webapp/src/lib/stores/eager-cache.ts b/webapp/src/lib/stores/eager-cache.ts new file mode 100644 index 00000000..37835e82 --- /dev/null +++ b/webapp/src/lib/stores/eager-cache.ts @@ -0,0 +1,609 @@ +import { writable, get } from 'svelte/store'; +import { garmApi } from '../api/client.js'; +import { websocketStore, type WebSocketEvent } from './websocket.js'; +import type { + Repository, + Organization, + Enterprise, + Pool, + ScaleSet, + ForgeCredentials, + ForgeEndpoint, + ControllerInfo +} from '../api/generated/api.js'; + +interface EagerCacheState { + repositories: Repository[]; + organizations: Organization[]; + enterprises: Enterprise[]; + pools: Pool[]; + scalesets: ScaleSet[]; + credentials: ForgeCredentials[]; + endpoints: ForgeEndpoint[]; + controllerInfo: ControllerInfo | null; + loading: { + repositories: boolean; + organizations: boolean; + enterprises: boolean; + pools: boolean; + scalesets: boolean; + credentials: boolean; + endpoints: boolean; + controllerInfo: boolean; + }; + loaded: { + repositories: boolean; + organizations: boolean; + enterprises: boolean; + pools: boolean; + scalesets: boolean; + credentials: boolean; + endpoints: boolean; + controllerInfo: boolean; + }; + errorMessages: { + repositories: string; + organizations: string; + enterprises: string; + pools: string; + scalesets: string; + credentials: string; + endpoints: string; + controllerInfo: string; + }; +} + +const initialState: EagerCacheState = { + repositories: [], + organizations: [], + enterprises: [], + pools: [], + scalesets: [], + credentials: [], + endpoints: [], + controllerInfo: null, + loading: { + repositories: false, + organizations: false, + enterprises: false, + pools: false, + scalesets: false, + credentials: false, + endpoints: false, + controllerInfo: false, + }, + loaded: { + repositories: false, + organizations: false, + enterprises: false, + pools: false, + scalesets: false, + credentials: false, + endpoints: false, + controllerInfo: false, + }, + errorMessages: { + repositories: '', + organizations: '', + enterprises: '', + pools: '', + scalesets: '', + credentials: '', + endpoints: '', + controllerInfo: '', + } +}; + +export const eagerCache = writable(initialState); + +class EagerCacheManager { + private unsubscribers: (() => void)[] = []; + private loadingPromises: Map> = new Map(); + private retryAttempts: Map = new Map(); + private readonly MAX_RETRIES = 3; + private readonly RETRY_DELAY_MS = 1000; + private websocketStatusUnsubscriber: (() => void) | null = null; + + async loadResource(resourceType: keyof Omit, priority: boolean = false) { + // Avoid duplicate loading + if (this.loadingPromises.has(resourceType)) { + return this.loadingPromises.get(resourceType); + } + + // Clear any previous error message and set loading state + eagerCache.update(state => ({ + ...state, + loading: { ...state.loading, [resourceType]: true }, + errorMessages: { ...state.errorMessages, [resourceType]: '' } + })); + + const loadPromise = this.attemptLoad(resourceType); + this.loadingPromises.set(resourceType, loadPromise); + + try { + const data = await loadPromise; + eagerCache.update(state => ({ + ...state, + [resourceType]: data, + loading: { ...state.loading, [resourceType]: false }, + loaded: { ...state.loaded, [resourceType]: true }, + errorMessages: { ...state.errorMessages, [resourceType]: '' } + })); + + // Reset retry attempts on success + this.retryAttempts.delete(resourceType); + + // If this is a priority load, start background loading of other resources + if (priority) { + this.startBackgroundLoading(resourceType); + } + + return data; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Failed to load data'; + eagerCache.update(state => ({ + ...state, + loading: { ...state.loading, [resourceType]: false }, + errorMessages: { ...state.errorMessages, [resourceType]: errorMessage } + })); + console.error(`Failed to load ${resourceType}:`, error); + throw error; + } finally { + this.loadingPromises.delete(resourceType); + } + } + + private async attemptLoad(resourceType: keyof Omit): Promise { + const currentAttempt = (this.retryAttempts.get(resourceType) || 0) + 1; + this.retryAttempts.set(resourceType, currentAttempt); + + try { + let loadPromise: Promise; + + switch (resourceType) { + case 'repositories': + loadPromise = garmApi.listRepositories(); + break; + case 'organizations': + loadPromise = garmApi.listOrganizations(); + break; + case 'enterprises': + loadPromise = garmApi.listEnterprises(); + break; + case 'pools': + loadPromise = garmApi.listAllPools(); + break; + case 'scalesets': + loadPromise = garmApi.listScaleSets(); + break; + case 'credentials': + loadPromise = garmApi.listAllCredentials(); + break; + case 'endpoints': + loadPromise = garmApi.listAllEndpoints(); + break; + case 'controllerInfo': + loadPromise = garmApi.getControllerInfo(); + break; + default: + throw new Error(`Unknown resource type: ${resourceType}`); + } + + return await loadPromise; + } catch (error) { + // If we haven't reached max retries, try again with exponential backoff + if (currentAttempt < this.MAX_RETRIES) { + const delay = this.RETRY_DELAY_MS * Math.pow(2, currentAttempt - 1); // Exponential backoff + console.warn(`Attempt ${currentAttempt} failed for ${resourceType}, retrying in ${delay}ms...`, error); + + await new Promise(resolve => setTimeout(resolve, delay)); + return this.attemptLoad(resourceType); + } else { + console.error(`All ${this.MAX_RETRIES} attempts failed for ${resourceType}:`, error); + throw error; + } + } + } + + private async startBackgroundLoading(excludeResource: string) { + const resourceTypes = ['repositories', 'organizations', 'enterprises', 'pools', 'scalesets', 'credentials', 'endpoints']; + const toLoad = resourceTypes.filter(type => type !== excludeResource); + + // Load in background with slight delays to avoid overwhelming the API + for (const resourceType of toLoad) { + setTimeout(() => { + this.loadResource(resourceType as any, false).catch(error => { + console.warn(`Background loading failed for ${resourceType}:`, error); + // Background loading failures are not critical, just log them + }); + }, 100 * toLoad.indexOf(resourceType)); + } + } + + // Public method to manually retry loading a resource + retryResource(resourceType: keyof Omit) { + // Clear any existing retry attempts to start fresh + this.retryAttempts.delete(resourceType); + return this.loadResource(resourceType, true); + } + + setupWebSocketSubscriptions() { + // Clean up existing subscriptions + this.cleanup(); + + // Subscribe to all resource types + const subscriptions = [ + websocketStore.subscribeToEntity('repository', ['create', 'update', 'delete'], this.handleRepositoryEvent.bind(this)), + websocketStore.subscribeToEntity('organization', ['create', 'update', 'delete'], this.handleOrganizationEvent.bind(this)), + websocketStore.subscribeToEntity('enterprise', ['create', 'update', 'delete'], this.handleEnterpriseEvent.bind(this)), + websocketStore.subscribeToEntity('pool', ['create', 'update', 'delete'], this.handlePoolEvent.bind(this)), + websocketStore.subscribeToEntity('scaleset', ['create', 'update', 'delete'], this.handleScaleSetEvent.bind(this)), + websocketStore.subscribeToEntity('controller', ['update'], this.handleControllerEvent.bind(this)), + websocketStore.subscribeToEntity('github_credentials', ['create', 'update', 'delete'], this.handleCredentialsEvent.bind(this)), + websocketStore.subscribeToEntity('gitea_credentials', ['create', 'update', 'delete'], this.handleCredentialsEvent.bind(this)), + websocketStore.subscribeToEntity('github_endpoint', ['create', 'update', 'delete'], this.handleEndpointEvent.bind(this)) + ]; + + this.unsubscribers = subscriptions; + + // Monitor WebSocket connection status + this.setupWebSocketStatusMonitoring(); + } + + private setupWebSocketStatusMonitoring() { + if (this.websocketStatusUnsubscriber) { + this.websocketStatusUnsubscriber(); + } + + let wasConnected = false; + + this.websocketStatusUnsubscriber = websocketStore.subscribe(state => { + // When WebSocket connects for the first time or reconnects after being disconnected + if (state.connected && !wasConnected) { + console.log('[EagerCache] WebSocket connected - reinitializing cache'); + // Reload all resources when WebSocket connects + this.initializeAllResources(); + } + wasConnected = state.connected; + }); + } + + // Reinitialize all resources when WebSocket connects + private async initializeAllResources() { + const resourceTypes: (keyof Omit)[] = [ + 'repositories', 'organizations', 'enterprises', 'pools', 'scalesets', + 'credentials', 'endpoints', 'controllerInfo' + ]; + + // Load all resources in parallel + const loadPromises = resourceTypes.map(resourceType => + this.loadResource(resourceType, true).catch(error => { + console.warn(`Failed to reload ${resourceType} on WebSocket reconnect:`, error); + }) + ); + + await Promise.allSettled(loadPromises); + } + + private handleRepositoryEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.repositories) return state; + + const repositories = [...state.repositories]; + const repo = event.payload as Repository; + + if (event.operation === 'create') { + repositories.push(repo); + } else if (event.operation === 'update') { + const index = repositories.findIndex(r => r.id === repo.id); + if (index !== -1) repositories[index] = repo; + } else if (event.operation === 'delete') { + const repoId = typeof repo === 'object' ? repo.id : repo; + const index = repositories.findIndex(r => r.id === repoId); + if (index !== -1) repositories.splice(index, 1); + } + + return { ...state, repositories }; + }); + } + + private handleOrganizationEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.organizations) return state; + + const organizations = [...state.organizations]; + const org = event.payload as Organization; + + if (event.operation === 'create') { + organizations.push(org); + } else if (event.operation === 'update') { + const index = organizations.findIndex(o => o.id === org.id); + if (index !== -1) organizations[index] = org; + } else if (event.operation === 'delete') { + const orgId = typeof org === 'object' ? org.id : org; + const index = organizations.findIndex(o => o.id === orgId); + if (index !== -1) organizations.splice(index, 1); + } + + return { ...state, organizations }; + }); + } + + private handleEnterpriseEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.enterprises) return state; + + const enterprises = [...state.enterprises]; + const ent = event.payload as Enterprise; + + if (event.operation === 'create') { + enterprises.push(ent); + } else if (event.operation === 'update') { + const index = enterprises.findIndex(e => e.id === ent.id); + if (index !== -1) enterprises[index] = ent; + } else if (event.operation === 'delete') { + const entId = typeof ent === 'object' ? ent.id : ent; + const index = enterprises.findIndex(e => e.id === entId); + if (index !== -1) enterprises.splice(index, 1); + } + + return { ...state, enterprises }; + }); + } + + private handlePoolEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.pools) return state; + + const pools = [...state.pools]; + const pool = event.payload as Pool; + + if (event.operation === 'create') { + pools.push(pool); + } else if (event.operation === 'update') { + const index = pools.findIndex(p => p.id === pool.id); + if (index !== -1) pools[index] = pool; + } else if (event.operation === 'delete') { + const poolId = typeof pool === 'object' ? pool.id : pool; + const index = pools.findIndex(p => p.id === poolId); + if (index !== -1) pools.splice(index, 1); + } + + return { ...state, pools }; + }); + } + + private handleScaleSetEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.scalesets) return state; + + const scalesets = [...state.scalesets]; + const scaleset = event.payload as ScaleSet; + + if (event.operation === 'create') { + scalesets.push(scaleset); + } else if (event.operation === 'update') { + const index = scalesets.findIndex(s => s.id === scaleset.id); + if (index !== -1) scalesets[index] = scaleset; + } else if (event.operation === 'delete') { + const scalesetId = typeof scaleset === 'object' ? scaleset.id : scaleset; + const index = scalesets.findIndex(s => s.id === scalesetId); + if (index !== -1) scalesets.splice(index, 1); + } + + return { ...state, scalesets }; + }); + } + + private handleCredentialsEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.credentials) return state; + + const credentials = [...state.credentials]; + const cred = event.payload as ForgeCredentials; + + if (event.operation === 'create') { + credentials.push(cred); + } else if (event.operation === 'update') { + const index = credentials.findIndex(c => c.id === cred.id); + if (index !== -1) credentials[index] = cred; + } else if (event.operation === 'delete') { + const credId = typeof cred === 'object' ? cred.id : cred; + const index = credentials.findIndex(c => c.id === credId); + if (index !== -1) credentials.splice(index, 1); + } + + return { ...state, credentials }; + }); + } + + private handleEndpointEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.endpoints) return state; + + const endpoints = [...state.endpoints]; + const endpoint = event.payload as ForgeEndpoint; + + if (event.operation === 'create') { + endpoints.push(endpoint); + } else if (event.operation === 'update') { + const index = endpoints.findIndex(e => e.name === endpoint.name); + if (index !== -1) endpoints[index] = endpoint; + } else if (event.operation === 'delete') { + const endpointName = typeof endpoint === 'object' ? endpoint.name : endpoint; + const index = endpoints.findIndex(e => e.name === endpointName); + if (index !== -1) endpoints.splice(index, 1); + } + + return { ...state, endpoints }; + }); + } + + cleanup() { + this.unsubscribers.forEach(unsubscribe => unsubscribe()); + this.unsubscribers = []; + + if (this.websocketStatusUnsubscriber) { + this.websocketStatusUnsubscriber(); + this.websocketStatusUnsubscriber = null; + } + } + + // Helper method to check if we should use cache or direct API + private shouldUseCache(): boolean { + const wsState = get(websocketStore); + return wsState.connected; + } + + // Helper methods for components - check WebSocket status first + async getRepositories(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + // WebSocket disconnected - fetch directly from API + console.log('[EagerCache] WebSocket disconnected - fetching repositories directly from API'); + return await garmApi.listRepositories(); + } + + const state = get(eagerCache); + if (state.loaded.repositories) { + return state.repositories; + } + + return this.loadResource('repositories', true); + } + + async getOrganizations(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + console.log('[EagerCache] WebSocket disconnected - fetching organizations directly from API'); + return await garmApi.listOrganizations(); + } + + const state = get(eagerCache); + if (state.loaded.organizations) { + return state.organizations; + } + + return this.loadResource('organizations', true); + } + + async getEnterprises(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + console.log('[EagerCache] WebSocket disconnected - fetching enterprises directly from API'); + return await garmApi.listEnterprises(); + } + + const state = get(eagerCache); + if (state.loaded.enterprises) { + return state.enterprises; + } + + return this.loadResource('enterprises', true); + } + + async getPools(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + console.log('[EagerCache] WebSocket disconnected - fetching pools directly from API'); + return await garmApi.listAllPools(); + } + + const state = get(eagerCache); + if (state.loaded.pools) { + return state.pools; + } + + return this.loadResource('pools', true); + } + + async getScaleSets(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + console.log('[EagerCache] WebSocket disconnected - fetching scalesets directly from API'); + return await garmApi.listScaleSets(); + } + + const state = get(eagerCache); + if (state.loaded.scalesets) { + return state.scalesets; + } + + return this.loadResource('scalesets', true); + } + + async getCredentials(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + console.log('[EagerCache] WebSocket disconnected - fetching credentials directly from API'); + return await garmApi.listAllCredentials(); + } + + const state = get(eagerCache); + if (state.loaded.credentials) { + return state.credentials; + } + + return this.loadResource('credentials', true); + } + + async getEndpoints(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + console.log('[EagerCache] WebSocket disconnected - fetching endpoints directly from API'); + return await garmApi.listAllEndpoints(); + } + + const state = get(eagerCache); + if (state.loaded.endpoints) { + return state.endpoints; + } + + return this.loadResource('endpoints', true); + } + + async getControllerInfo(): Promise { + const wsState = get(websocketStore); + + if (!wsState.connected) { + console.log('[EagerCache] WebSocket disconnected - fetching controller info directly from API'); + return await garmApi.getControllerInfo(); + } + + const state = get(eagerCache); + if (state.loaded.controllerInfo) { + return state.controllerInfo; + } + + return this.loadResource('controllerInfo', true); + } + + private handleControllerEvent(event: WebSocketEvent) { + eagerCache.update(state => { + if (!state.loaded.controllerInfo) return state; + + const controllerInfo = event.payload as ControllerInfo; + + // Controller info is a singleton, so we just replace it + if (event.operation === 'update') { + return { ...state, controllerInfo }; + } + + return state; + }); + } +} + +export const eagerCacheManager = new EagerCacheManager(); + +// Initialize websocket subscriptions when the module is loaded +if (typeof window !== 'undefined') { + eagerCacheManager.setupWebSocketSubscriptions(); +} \ No newline at end of file diff --git a/webapp/src/lib/stores/toast.ts b/webapp/src/lib/stores/toast.ts new file mode 100644 index 00000000..84619ec1 --- /dev/null +++ b/webapp/src/lib/stores/toast.ts @@ -0,0 +1,58 @@ +import { writable } from 'svelte/store'; + +export interface Toast { + id: string; + type: 'success' | 'error' | 'info' | 'warning'; + title: string; + message: string; + duration?: number; // milliseconds, 0 for manual dismiss +} + +function createToastStore() { + const { subscribe, set, update } = writable([]); + + const store = { + subscribe, + add: (toast: Omit) => { + const id = Math.random().toString(36).substr(2, 9); + const newToast: Toast = { + ...toast, + id, + duration: toast.duration ?? 5000 + }; + + update(toasts => [...toasts, newToast]); + + // Auto-remove after duration + if (newToast.duration && newToast.duration > 0) { + setTimeout(() => { + update(toasts => toasts.filter(t => t.id !== id)); + }, newToast.duration); + } + + return id; + }, + remove: (id: string) => { + update(toasts => toasts.filter(t => t.id !== id)); + }, + clear: () => { + set([]); + }, + success: (title: string, message: string = '', duration?: number) => { + return store.add({ type: 'success', title, message, duration }); + }, + error: (title: string, message: string = '', duration?: number) => { + return store.add({ type: 'error', title, message, duration }); + }, + info: (title: string, message: string = '', duration?: number) => { + return store.add({ type: 'info', title, message, duration }); + }, + warning: (title: string, message: string = '', duration?: number) => { + return store.add({ type: 'warning', title, message, duration }); + } + }; + + return store; +} + +export const toastStore = createToastStore(); \ No newline at end of file diff --git a/webapp/src/lib/stores/websocket.ts b/webapp/src/lib/stores/websocket.ts new file mode 100644 index 00000000..3938023b --- /dev/null +++ b/webapp/src/lib/stores/websocket.ts @@ -0,0 +1,367 @@ +import { writable, get } from 'svelte/store'; + +// Event types that match the websocket API +export type EntityType = + | 'repository' + | 'organization' + | 'enterprise' + | 'pool' + | 'user' + | 'instance' + | 'job' + | 'controller' + | 'github_credentials' + | 'gitea_credentials' + | 'github_endpoint' + | 'scaleset'; + +export type Operation = 'create' | 'update' | 'delete'; + +export interface EventFilter { + 'entity-type': EntityType; + operations: Operation[]; +} + +export interface FilterMessage { + 'send-everything'?: boolean; + filters?: EventFilter[]; +} + +export interface WebSocketEvent { + 'entity-type': EntityType; + operation: Operation; + payload: any; +} + +export interface WebSocketState { + connected: boolean; + connecting: boolean; + error: string | null; + lastEvent: WebSocketEvent | null; +} + +// Create the websocket store +function createWebSocketStore() { + const { subscribe, set, update } = writable({ + connected: false, + connecting: false, + error: null, + lastEvent: null + }); + + let ws: WebSocket | null = null; + let reconnectAttempts = 0; + let maxReconnectAttempts = 50; // Increased for more persistent reconnection + let baseReconnectInterval = 1000; // Base interval + let reconnectInterval = 1000; // Current interval + let maxReconnectInterval = 30000; // Max 30 seconds + let reconnectTimeout: number | null = null; + let currentFilters: EventFilter[] = []; + let manuallyDisconnected = false; + + // Event callbacks organized by entity type + const eventCallbacks = new Map void)[]>(); + + function getWebSocketUrl(): string { + const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; + const host = window.location.host; + return `${protocol}//${host}/api/v1/ws/events`; + } + + function connect() { + if (ws && (ws.readyState === WebSocket.CONNECTING || ws.readyState === WebSocket.OPEN)) { + return; + } + + manuallyDisconnected = false; + update(state => ({ ...state, connecting: true, error: null })); + + try { + const wsUrl = getWebSocketUrl(); + + // Use cookie authentication - no need for Bearer token in protocol + ws = new WebSocket(wsUrl); + + // Set connection timeout + const connectionTimeout = setTimeout(() => { + if (ws && ws.readyState === WebSocket.CONNECTING) { + ws.close(); + } + }, 10000); // 10 second timeout + + ws.onopen = () => { + clearTimeout(connectionTimeout); + reconnectAttempts = 0; + reconnectInterval = baseReconnectInterval; + + update(state => ({ ...state, connected: true, connecting: false, error: null })); + + // Send current filters if any + if (currentFilters.length > 0) { + sendFilters(currentFilters); + } + + // Setup heartbeat (currently no-op, but ready for future use) + startHeartbeat(); + }; + + ws.onmessage = (event) => { + try { + const data = JSON.parse(event.data); + + // Update the store with the last event + update(state => ({ ...state, lastEvent: data })); + + // Call registered callbacks for this entity type + const callbacks = eventCallbacks.get(data['entity-type']) || []; + callbacks.forEach(callback => { + try { + callback(data); + } catch (err) { + console.error('[WebSocket] Error in event callback:', err); + } + }); + } catch (err) { + console.error('[WebSocket] Error parsing message:', err); + } + }; + + ws.onclose = (event) => { + clearTimeout(connectionTimeout); + cleanup(); + + const wasManualDisconnect = event.code === 1000 && manuallyDisconnected; + const errorMessage = event.code !== 1000 ? `Connection closed: ${event.reason || 'Unknown reason'}` : null; + + update(state => ({ + ...state, + connected: false, + connecting: false, + error: errorMessage + })); + + // Attempt to reconnect unless it was explicitly a manual disconnect + // This includes server restarts that result in clean closes (code 1000) + if (!wasManualDisconnect) { + scheduleReconnect(); + } + }; + + ws.onerror = (error) => { + clearTimeout(connectionTimeout); + cleanup(); + + update(state => ({ + ...state, + connected: false, + connecting: false, + error: 'WebSocket connection error' + })); + + // Schedule reconnect on error if not manually disconnected + if (!manuallyDisconnected) { + scheduleReconnect(); + } + }; + + } catch (err) { + update(state => ({ + ...state, + connected: false, + connecting: false, + error: err instanceof Error ? err.message : 'Failed to connect' + })); + } + } + + function startHeartbeat() { + // Clear any existing intervals + cleanup(); + + // No need for client-side heartbeat checks since: + // 1. Server handles ping/pong automatically (every ~54 seconds) + // 2. Browser WebSocket automatically responds to ping frames with pong frames + // 3. Server will close connection if it doesn't receive pong responses + // 4. Server may not send any messages if there are no events to stream + // 5. onclose/onerror handlers will trigger reconnection if needed + } + + function cleanup() { + // No intervals to clean up currently + } + + function scheduleReconnect() { + if (manuallyDisconnected) { + return; + } + + if (reconnectTimeout) { + clearTimeout(reconnectTimeout); + } + + reconnectAttempts++; + + // Reset attempts periodically to allow for long-term reconnection + if (reconnectAttempts > maxReconnectAttempts) { + reconnectAttempts = 1; + reconnectInterval = baseReconnectInterval; + } + + const actualInterval = Math.min(reconnectInterval, maxReconnectInterval); + + reconnectTimeout = window.setTimeout(() => { + if (!manuallyDisconnected) { + connect(); + // Exponential backoff with jitter to avoid thundering herd + const jitter = Math.random() * 1000; // 0-1 second jitter + reconnectInterval = Math.min(reconnectInterval * 1.5 + jitter, maxReconnectInterval); + } + }, actualInterval); + } + + function sendFilters(filters: EventFilter[]) { + if (ws && ws.readyState === WebSocket.OPEN) { + const message: FilterMessage = { + 'send-everything': false, + filters: filters + }; + ws.send(JSON.stringify(message)); + currentFilters = [...filters]; + } + } + + function disconnect() { + manuallyDisconnected = true; + + if (reconnectTimeout) { + clearTimeout(reconnectTimeout); + reconnectTimeout = null; + } + + cleanup(); + + if (ws) { + ws.close(1000, 'Manual disconnect'); + ws = null; + } + + // Clear all callbacks + eventCallbacks.clear(); + currentFilters = []; + + update(state => ({ + ...state, + connected: false, + connecting: false, + error: null, + lastEvent: null + })); + } + + // Handle network connectivity changes + function handleNetworkChange() { + if (navigator.onLine && !manuallyDisconnected) { + // Delay reconnection slightly to allow network to stabilize + setTimeout(() => { + if (!ws || ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) { + reconnectAttempts = 0; // Reset attempts on network recovery + reconnectInterval = baseReconnectInterval; + connect(); + } + }, 2000); + } + } + + // Listen for network changes + if (typeof window !== 'undefined') { + window.addEventListener('online', handleNetworkChange); + window.addEventListener('offline', () => { + update(state => ({ ...state, error: 'Network offline' })); + }); + + // Periodic check to ensure connection is maintained + setInterval(() => { + // Always maintain connection unless manually disconnected + if (!manuallyDisconnected) { + // If we should be connected but aren't, attempt to reconnect + if (!ws || ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) { + connect(); + } + } + }, 10000); // Check every 10 seconds + } + + // Subscribe to events for a specific entity type + function subscribeToEntity(entityType: EntityType, operations: Operation[], callback: (event: WebSocketEvent) => void) { + + // Add callback to the list for this entity type + if (!eventCallbacks.has(entityType)) { + eventCallbacks.set(entityType, []); + } + eventCallbacks.get(entityType)!.push(callback); + + // Add or update the filter for this entity type + const existingFilterIndex = currentFilters.findIndex(f => f['entity-type'] === entityType); + const newFilter: EventFilter = { + 'entity-type': entityType, + operations: operations + }; + + if (existingFilterIndex >= 0) { + // Merge operations with existing filter + const existingOps = currentFilters[existingFilterIndex].operations; + newFilter.operations = Array.from(new Set([...existingOps, ...operations])); + currentFilters[existingFilterIndex] = newFilter; + } else { + currentFilters.push(newFilter); + } + + // Send updated filters if connected + if (ws && ws.readyState === WebSocket.OPEN) { + sendFilters(currentFilters); + } + + // Ensure connection exists (should already be connected via auto-connect) + if (!ws || ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING) { + connect(); + } + + // Return unsubscribe function + return () => { + const callbacks = eventCallbacks.get(entityType); + if (callbacks) { + const index = callbacks.indexOf(callback); + if (index > -1) { + callbacks.splice(index, 1); + } + + // If no more callbacks for this entity type, remove the filter + if (callbacks.length === 0) { + eventCallbacks.delete(entityType); + const filterIndex = currentFilters.findIndex(f => f['entity-type'] === entityType); + if (filterIndex > -1) { + currentFilters.splice(filterIndex, 1); + if (ws && ws.readyState === WebSocket.OPEN) { + sendFilters(currentFilters); + } + } + } + } + }; + } + + // Auto-connect when store is created (browser environment only) + if (typeof window !== 'undefined') { + // Connect immediately + connect(); + } + + return { + subscribe, + connect, + disconnect, + subscribeToEntity + }; +} + +export const websocketStore = createWebSocketStore(); \ No newline at end of file diff --git a/webapp/src/lib/utils/common.ts b/webapp/src/lib/utils/common.ts new file mode 100644 index 00000000..9bac810d --- /dev/null +++ b/webapp/src/lib/utils/common.ts @@ -0,0 +1,296 @@ +/** + * Common utility functions shared across components and pages + */ + +/** + * Formats a date string or Date object to a human-readable format + */ +export function formatDate(date: string | Date | null | undefined): string { + if (!date) return 'N/A'; + try { + const d = typeof date === 'string' ? new Date(date) : date; + return d.toLocaleString(); + } catch { + return 'Invalid Date'; + } +} + +/** + * Returns the appropriate forge icon SVG for the given endpoint type + * @param endpointType - The type of endpoint ('github', 'gitea', etc.) + * @param sizeClasses - Optional size classes (e.g., 'w-4 h-4', 'w-8 h-8'). Defaults to 'w-4 h-4' + */ +export function getForgeIcon(endpointType: string, sizeClasses: string = 'w-4 h-4'): string { + if (endpointType === 'gitea') { + return ``; + } else if (endpointType === 'github') { + // GitHub (also used for GHES) + return `
                `; + } else { + // Return a generic placeholder icon if endpoint type is unknown + return ` + + + `; + } +} + +/** + * Truncates an image name to a specified length and indicates if it was truncated + */ +export function truncateImageName(imageName: string, maxLength: number = 25): { truncated: string, isTruncated: boolean } { + if (imageName.length <= maxLength) { + return { truncated: imageName, isTruncated: false }; + } + return { truncated: imageName.substring(0, maxLength) + '...', isTruncated: true }; +} + +/** + * Gets the entity name for a Pool or ScaleSet object + */ +export function getEntityName(entity: any, eagerCacheStores?: any): string { + // Both Pool and ScaleSet objects now include the name fields directly + if (entity.repo_name) return entity.repo_name; + if (entity.org_name) return entity.org_name; + if (entity.enterprise_name) return entity.enterprise_name; + + // Fallback to eager cache lookup if name fields are not available (older API or cached data) + if (entity.repo_id && !entity.repo_name && eagerCacheStores?.repositories) { + const repo = eagerCacheStores.repositories.find((r: any) => r.id === entity.repo_id); + return repo ? `${repo.owner}/${repo.name}` : 'Unknown Entity'; + } + if (entity.org_id && !entity.org_name && eagerCacheStores?.organizations) { + const org = eagerCacheStores.organizations.find((o: any) => o.id === entity.org_id); + return (org && org.name) ? org.name : 'Unknown Entity'; + } + if (entity.enterprise_id && !entity.enterprise_name && eagerCacheStores?.enterprises) { + const enterprise = eagerCacheStores.enterprises.find((e: any) => e.id === entity.enterprise_id); + return (enterprise && enterprise.name) ? enterprise.name : 'Unknown Entity'; + } + + return 'Unknown Entity'; +} + +/** + * Gets the entity type for a Pool or ScaleSet object + */ +export function getEntityType(entity: any): string { + if (entity.repo_id) return 'repository'; + if (entity.org_id) return 'organization'; + if (entity.enterprise_id) return 'enterprise'; + return 'unknown'; +} + +/** + * Gets the URL for an entity detail page + */ +export function getEntityUrl(entity: any, base: string = ''): string { + if (entity.repo_id) return `${base}/repositories/${entity.repo_id}`; + if (entity.org_id) return `${base}/organizations/${entity.org_id}`; + if (entity.enterprise_id) return `${base}/enterprises/${entity.enterprise_id}`; + return '#'; +} + +/** + * Updates entity fields, preserving events and other non-API fields + */ +export function updateEntityFields(currentEntity: any, updatedFields: any): any { + // Preserve only fields that are definitely not in the API response + const { events: originalEvents } = currentEntity; + + // Use the API response as the primary source, add back preserved fields + const result = { + ...updatedFields, + events: originalEvents // Always preserve events since they're managed by websockets + }; + + return result; +} + +/** + * Scrolls to bottom of events container + */ +export function scrollToBottomEvents(eventsContainer: HTMLElement | null): void { + if (eventsContainer) { + eventsContainer.scrollTop = eventsContainer.scrollHeight; + } +} + +/** + * Changes pagination page + */ +export function changePage(currentPage: number, targetPage: number, totalPages: number): number { + if (targetPage >= 1 && targetPage <= totalPages) { + return targetPage; + } + return currentPage; +} + +/** + * Changes items per page and resets to page 1 + */ +export function changePerPage(newPerPage: number): { newPerPage: number, newCurrentPage: number } { + return { newPerPage, newCurrentPage: 1 }; +} + +/** + * Gets entity status badge information based on pool_manager_status + */ +export function getEntityStatusBadge(entity: any): { text: string, variant: 'success' | 'error' } { + if (entity.pool_manager_status?.running) { + return { + text: 'Running', + variant: 'success' + }; + } else { + return { + text: 'Stopped', + variant: 'error' + }; + } +} + +/** + * Gets badge variant for enabled/disabled status + */ +export function getEnabledStatusBadge(enabled: boolean): { text: string, variant: 'success' | 'error' } { + return { + text: enabled ? 'Enabled' : 'Disabled', + variant: enabled ? 'success' : 'error' + }; +} + +/** + * Gets badge variant for authentication type + */ +export function getAuthTypeBadge(authType: string): { text: string, variant: 'success' | 'info' } { + return { + text: authType === 'pat' ? 'PAT' : 'App', + variant: authType === 'pat' ? 'success' : 'info' + }; +} + +/** + * Gets badge variant for event level + */ +export function getEventLevelBadge(level: string): { text: string, variant: 'success' | 'error' | 'warning' | 'info' } { + const normalizedLevel = level.toLowerCase(); + switch (normalizedLevel) { + case 'error': + return { text: 'Error', variant: 'error' }; + case 'warning': + return { text: 'Warning', variant: 'warning' }; + case 'info': + return { text: 'Info', variant: 'info' }; + default: + return { text: level, variant: 'info' }; + } +} + +/** + * Filters entities by search term, supporting different search field configurations + */ +export function filterEntities>( + entities: T[], + searchTerm: string, + searchFields: string[] | ((entity: T, eagerCache?: any) => string) +): T[] { + if (!searchTerm.trim()) return entities; + + const lowercaseSearch = searchTerm.toLowerCase(); + + return entities.filter(entity => { + if (typeof searchFields === 'function') { + // Custom search function (e.g., for pools/scalesets using getEntityName) + const searchText = searchFields(entity); + return searchText.toLowerCase().includes(lowercaseSearch); + } else { + // Field-based search + return searchFields.some(field => { + const value = entity[field]; + return value?.toString().toLowerCase().includes(lowercaseSearch); + }); + } + }); +} + +/** + * Convenience function for filtering repositories (searches name and owner) + */ +export function filterRepositories(repositories: T[], searchTerm: string): T[] { + return filterEntities(repositories, searchTerm, ['name', 'owner']); +} + +/** + * Convenience function for filtering organizations/enterprises (searches name only) + */ +export function filterByName(entities: T[], searchTerm: string): T[] { + return filterEntities(entities, searchTerm, ['name']); +} + +/** + * Convenience function for filtering credentials (searches name, description, and endpoint name) + */ +export function filterCredentials(credentials: T[], searchTerm: string): T[] { + return filterEntities(credentials, searchTerm, (credential) => { + const searchableText = [ + credential.name || '', + credential.description || '', + credential.endpoint?.name || '' + ].join(' '); + return searchableText; + }); +} + +/** + * Convenience function for filtering endpoints (searches name, description, base_url, and api_base_url) + */ +export function filterEndpoints(endpoints: T[], searchTerm: string): T[] { + return filterEntities(endpoints, searchTerm, ['name', 'description', 'base_url', 'api_base_url']); +} + +/** + * Pagination utility functions + */ +export interface PaginationState { + currentPage: number; + perPage: number; + totalPages: number; +} + +/** + * Creates paginated slice of items + */ +export function paginateItems(items: T[], currentPage: number, perPage: number): T[] { + return items.slice( + (currentPage - 1) * perPage, + currentPage * perPage + ); +} + +/** + * Calculates total pages and adjusts current page if needed + */ +export function calculatePagination(totalItems: number, perPage: number, currentPage: number): PaginationState { + const totalPages = Math.ceil(totalItems / perPage); + const adjustedCurrentPage = (currentPage > totalPages && totalPages > 0) ? totalPages : currentPage; + + return { + currentPage: adjustedCurrentPage, + perPage, + totalPages + }; +} + +/** + * Creates pagination info text (e.g., "Showing 1 to 25 of 100 results") + */ +export function getPaginationInfo(currentPage: number, perPage: number, totalItems: number): string { + if (totalItems === 0) return 'No results'; + + const start = (currentPage - 1) * perPage + 1; + const end = Math.min(currentPage * perPage, totalItems); + + return `Showing ${start} to ${end} of ${totalItems} results`; +} + diff --git a/webapp/src/lib/utils/status.ts b/webapp/src/lib/utils/status.ts new file mode 100644 index 00000000..7f7f68a8 --- /dev/null +++ b/webapp/src/lib/utils/status.ts @@ -0,0 +1,90 @@ +/** + * Unified status formatting and styling utilities + * Provides consistent status display and color coding across all pages + */ + +/** + * Formats status text for display by replacing underscores with spaces + * and converting to proper case + */ +export function formatStatusText(status: string): string { + if (!status) return ''; + return status.replace(/_/g, ' ').toLowerCase() + .split(' ') + .map(word => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); +} + +/** + * Returns Tailwind CSS classes for status badges based on industry-standard color conventions: + * - Green: Successfully running/active states + * - Blue: Idle/ready states + * - Yellow/Amber: Warning or transitional states + * - Purple: Creating/building states + * - Orange: Deletion/termination in progress + * - Red: Failed/error states + * - Gray: Unknown/pending states + */ +export function getStatusBadgeClass(status: string): string { + if (!status) { + return 'bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20'; + } + + const normalizedStatus = status.toLowerCase(); + + switch (normalizedStatus) { + // Successfully running states - Green + case 'running': + case 'online': + return 'bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-500/10 dark:text-green-400 dark:ring-green-500/20'; + + // Idle/ready states - Blue + case 'idle': + case 'stopped': + return 'bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-500/10 dark:text-blue-400 dark:ring-blue-500/20'; + + // Active/working states - Yellow + case 'active': + return 'bg-yellow-50 text-yellow-700 ring-yellow-600/20 dark:bg-yellow-500/10 dark:text-yellow-400 dark:ring-yellow-500/20'; + + // Creating/building states - Purple with pulse animation + case 'creating': + case 'installing': + case 'pending_create': + case 'provisioning': + return 'bg-purple-50 text-purple-700 ring-purple-600/20 dark:bg-purple-500/10 dark:text-purple-400 dark:ring-purple-500/20 animate-pulse'; + + // Deletion/termination states - Orange with pulse animation + case 'deleting': + case 'terminating': + case 'pending_delete': + case 'destroying': + return 'bg-orange-50 text-orange-700 ring-orange-600/20 dark:bg-orange-500/10 dark:text-orange-400 dark:ring-orange-500/20 animate-pulse'; + + // Failed/error states - Red + case 'failed': + case 'error': + case 'terminated': + case 'offline': + return 'bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-500/10 dark:text-red-400 dark:ring-red-500/20'; + + // General pending states - Gray with pulse animation + case 'pending': + case 'unknown': + return 'bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20 animate-pulse'; + + // Default - Gray + default: + return 'bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-500/10 dark:text-gray-400 dark:ring-gray-500/20'; + } +} + +/** + * Combined utility that returns both formatted text and CSS classes + */ +export function getFormattedStatus(status: string): { text: string; classes: string } { + return { + text: formatStatusText(status), + classes: getStatusBadgeClass(status) + }; +} \ No newline at end of file diff --git a/webapp/src/openapitools.json b/webapp/src/openapitools.json new file mode 100644 index 00000000..a82623d6 --- /dev/null +++ b/webapp/src/openapitools.json @@ -0,0 +1,7 @@ +{ + "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", + "spaces": 2, + "generator-cli": { + "version": "7.14.0" + } +} diff --git a/webapp/src/routes/+layout.svelte b/webapp/src/routes/+layout.svelte new file mode 100644 index 00000000..0ce5770f --- /dev/null +++ b/webapp/src/routes/+layout.svelte @@ -0,0 +1,87 @@ + + + + GARM - GitHub Actions Runner Manager + + +{#if $authStore.loading} +
                +
                +
                +

                Loading...

                +
                +
                +{:else if requiresAuth && !$authStore.isAuthenticated} + +
                +
                +

                Redirecting to login...

                +
                +
                +{:else if isLoginPage || isInitPage} + + +{:else} + +
                + + +
                +
                +
                + +
                +
                +
                +
                +{/if} + + + diff --git a/webapp/src/routes/+layout.ts b/webapp/src/routes/+layout.ts new file mode 100644 index 00000000..bc675bc0 --- /dev/null +++ b/webapp/src/routes/+layout.ts @@ -0,0 +1,13 @@ +import type { LayoutLoad } from './$types'; + +export const load: LayoutLoad = async ({ url }) => { + // For now, we'll handle auth redirect in the component + // In a real app, you might check auth state here + + return { + url: url.pathname + }; +}; + +export const prerender = false; +export const ssr = false; \ No newline at end of file diff --git a/webapp/src/routes/+page.svelte b/webapp/src/routes/+page.svelte new file mode 100644 index 00000000..a38ff4fc --- /dev/null +++ b/webapp/src/routes/+page.svelte @@ -0,0 +1,321 @@ + + + + Dashboard - GARM + + +
                + +
                +

                Dashboard

                +

                + Welcome to GARM - GitHub Actions Runner Manager +

                +
                + + {#if error} + +
                +
                +
                + + + +
                +
                +

                Error loading dashboard

                +

                {error}

                +
                +
                +
                + {/if} + + + + + + {#if controllerInfo} + + {/if} + + + +
                \ No newline at end of file diff --git a/webapp/src/routes/credentials/+page.svelte b/webapp/src/routes/credentials/+page.svelte new file mode 100644 index 00000000..d0433100 --- /dev/null +++ b/webapp/src/routes/credentials/+page.svelte @@ -0,0 +1,1022 @@ + + + + + + Credentials - GARM + + +
                + + + + + + +
                +
                +
                +

                + {credential.name} +

                +

                + {credential.description} +

                +
                +
                + {@html getForgeIcon(credential.forge_type || 'unknown')} + {credential.endpoint?.name || 'Unknown'} +
                +
                +
                +
                +
                + {#if (credential['auth-type'] || 'pat') === 'pat'} + + {:else} + + {/if} +
                + showEditCredentialsModal(credential)} + /> + showDeleteCredentialsModal(credential)} + /> +
                +
                +
                +
                + + +
                +
                + + +{#if showCreateModal} + +{/if} + + +{#if showEditModal && editingCredential} + +{/if} + + +{#if showDeleteModal && deletingCredential} + +{/if} \ No newline at end of file diff --git a/webapp/src/routes/endpoints/+page.svelte b/webapp/src/routes/endpoints/+page.svelte new file mode 100644 index 00000000..50873282 --- /dev/null +++ b/webapp/src/routes/endpoints/+page.svelte @@ -0,0 +1,838 @@ + + + + + + Endpoints - GARM + + +
                + + + + + + +
                +
                +
                +

                + {endpoint.name} +

                +

                + {endpoint.description} +

                +
                + {@html getForgeIcon(endpoint.endpoint_type || '', 'w-5 h-5')} + {endpoint.endpoint_type} +
                +
                +
                +
                + showEditEndpointModal(endpoint)} + /> + showDeleteEndpointModal(endpoint)} + /> +
                +
                +
                + +
                +
                + + +{#if showCreateModal} + +{/if} + + +{#if showEditModal && editingEndpoint} + +{/if} + + +{#if showDeleteModal && deletingEndpoint} + +{/if} \ No newline at end of file diff --git a/webapp/src/routes/enterprises/+page.svelte b/webapp/src/routes/enterprises/+page.svelte new file mode 100644 index 00000000..75f4af74 --- /dev/null +++ b/webapp/src/routes/enterprises/+page.svelte @@ -0,0 +1,329 @@ + + + + Enterprises - GARM + + +
                + + + + + + + {@const status = getEntityStatusBadge(enterprise)} +
                + +
                + +
                + openUpdateModal(enterprise)} + /> + openDeleteModal(enterprise)} + /> +
                +
                +
                +
                + +
                +
                + + +{#if showCreateModal} + showCreateModal = false} + on:submit={(e) => handleCreateEnterprise(e.detail)} + /> +{/if} + +{#if showUpdateModal && selectedEnterprise} + { showUpdateModal = false; selectedEnterprise = null; }} + on:submit={(e) => handleUpdateEnterprise(e.detail)} + /> +{/if} + +{#if showDeleteModal && selectedEnterprise} + { showDeleteModal = false; selectedEnterprise = null; }} + on:confirm={handleDeleteEnterprise} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/enterprises/[id]/+page.svelte b/webapp/src/routes/enterprises/[id]/+page.svelte new file mode 100644 index 00000000..22c77bd7 --- /dev/null +++ b/webapp/src/routes/enterprises/[id]/+page.svelte @@ -0,0 +1,381 @@ + + + + {enterprise ? `${enterprise.name} - Enterprise Details` : 'Enterprise Details'} - GARM + + +
                + + + + {#if loading} +
                +
                +

                Loading enterprise...

                +
                + {:else if error} +
                +

                {error}

                +
                + {:else if enterprise} + + showUpdateModal = true} + onDelete={() => showDeleteModal = true} + /> + + + + + + + + + + + + + {/if} +
                + + +{#if showUpdateModal && enterprise} + showUpdateModal = false} + on:submit={(e) => handleUpdate(e.detail)} + /> +{/if} + +{#if showDeleteModal && enterprise} + showDeleteModal = false} + on:confirm={handleDelete} + /> +{/if} + +{#if showDeleteInstanceModal && selectedInstance} + { showDeleteInstanceModal = false; selectedInstance = null; }} + on:confirm={handleDeleteInstance} + /> +{/if} + +{#if showCreatePoolModal && enterprise} + showCreatePoolModal = false} + on:submit={handleCreatePool} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/init/+page.svelte b/webapp/src/routes/init/+page.svelte new file mode 100644 index 00000000..528e150c --- /dev/null +++ b/webapp/src/routes/init/+page.svelte @@ -0,0 +1,431 @@ + + + + Initialize GARM - First Run Setup + + +
                +
                +
                + GARM + +
                +

                + Welcome to GARM +

                +

                + Complete the first-run setup to get started +

                +
                + +
                + +
                +
                +
                + + + +
                +
                +

                + First-Run Initialization +

                +
                +

                GARM needs to be initialized before first use. This will create the admin user and generate a unique controller ID for this installation.

                +
                +
                +
                +
                + + +
                +
                + +
                + +
                + + {#if !isValidUsername && username.length > 0} +

                + Username is required +

                + {/if} +
                +
                + + +
                + +
                + + {#if !isValidEmail && email.length > 0} +

                + Please enter a valid email address +

                + {/if} +
                +
                + + +
                + +
                + + {#if !isValidFullName && fullName.length > 0} +

                + Full name is required +

                + {/if} +
                +
                + + +
                + +
                + + {#if !isValidPassword && password.length > 0} +

                + Password must be at least 8 characters long +

                + {/if} +
                +
                + + +
                + +
                + + {#if !isValidConfirmPassword && confirmPassword.length > 0} +

                + Passwords do not match +

                + {/if} +
                +
                + + +
                + + + {#if showAdvanced} +
                +
                + +
                + +
                + +

                + URL where runners can fetch metadata and setup information. +

                +
                +
                + + +
                + +
                + +

                + URL where runners send status updates and lifecycle events. +

                +
                +
                + + +
                + +
                + +

                + URL where GitHub/Gitea will send webhook events for job notifications. +

                +
                +
                +
                +
                + {/if} +
                + + + {#if !isValid && (username.length > 0 || email.length > 0 || fullName.length > 0 || password.length > 0 || confirmPassword.length > 0)} +
                +
                +
                + + + +
                +
                +

                + Please complete all required fields +

                +
                +
                  + {#if !isValidUsername} +
                • Enter a username
                • + {/if} + {#if !isValidEmail} +
                • Enter a valid email address
                • + {/if} + {#if !isValidFullName} +
                • Enter your full name
                • + {/if} + {#if !isValidPassword} +
                • Enter a password with at least 8 characters
                • + {/if} + {#if !isValidConfirmPassword} +
                • Confirm your password
                • + {/if} +
                +
                +
                +
                +
                + {/if} + + + {#if error} +
                +
                +
                + + + +
                +
                +

                {error}

                +
                +
                +
                + {/if} + + +
                + +
                +
                + + +
                +
                +

                + This will create the admin user, generate a unique controller ID, and configure the required URLs for your GARM installation. +
                + Make sure to remember these credentials as they cannot be recovered. +

                +
                +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/routes/instances/+page.svelte b/webapp/src/routes/instances/+page.svelte new file mode 100644 index 00000000..0710c711 --- /dev/null +++ b/webapp/src/routes/instances/+page.svelte @@ -0,0 +1,284 @@ + + + + Instances - GARM + + +
                + + + {#if error} +
                +
                +
                +

                Error

                +
                {error}
                +
                +
                +
                + {/if} + + + +
                + + +{#if showDeleteModal && instanceToDelete} + { + showDeleteModal = false; + instanceToDelete = null; + }} + on:confirm={confirmDelete} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/instances/[id]/+page.svelte b/webapp/src/routes/instances/[id]/+page.svelte new file mode 100644 index 00000000..5589aa5e --- /dev/null +++ b/webapp/src/routes/instances/[id]/+page.svelte @@ -0,0 +1,344 @@ + + + + {instance ? `${instance.name} - Instance Details` : 'Instance Details'} - GARM + + +
                + + + + {#if error} +
                +
                +
                +

                Error

                +
                {error}
                +
                +
                +
                + {/if} + + {#if loading} +
                +
                +
                +

                Loading instance details...

                +
                +
                + {:else if instance} + +
                + +
                +
                +

                Instance Information

                +
                + +
                +
                +
                +
                +
                ID:
                +
                {instance.id}
                +
                +
                +
                Name:
                +
                {instance.name}
                +
                +
                +
                Provider ID:
                +
                {instance.provider_id}
                +
                +
                +
                Provider:
                +
                {instance.provider_name || 'Unknown'}
                +
                +
                +
                Pool/Scale Set:
                +
                + {#if instance.pool_id} + + {instance.pool_id} + + {:else if instance.scale_set_id} + + {instance.scale_set_id} + + {:else} + - + {/if} +
                +
                +
                +
                Agent ID:
                +
                {instance.agent_id || 'Not assigned'}
                +
                +
                +
                Created At:
                +
                {formatDate(instance.created_at!)}
                +
                + {#if instance.updated_at && instance.updated_at !== instance.created_at} +
                +
                Updated At:
                +
                {formatDate(instance.updated_at)}
                +
                + {/if} +
                +
                + + +
                +

                Status & Network

                +
                +
                +
                Instance Status:
                +
                + + {formatStatusText(instance.status || 'unknown')} + +
                +
                +
                +
                Runner Status:
                +
                + + {formatStatusText(instance.runner_status || 'unknown')} + +
                +
                + {#if instance.addresses && instance.addresses.length > 0} +
                +
                Network Addresses:
                +
                + {#each instance.addresses as address} +
                + {address.address} + +
                + {/each} +
                +
                + {:else} +
                +
                Network Addresses:
                +
                No addresses available
                +
                + {/if} + {#if instance.os_type} +
                +
                OS Type:
                +
                {instance.os_type}
                +
                + {/if} + {#if instance.os_name} +
                +
                OS Name:
                +
                {instance.os_name}
                +
                + {/if} + {#if instance.os_version} +
                +
                OS Version:
                +
                {instance.os_version}
                +
                + {/if} + {#if instance.os_arch} +
                +
                OS Architecture:
                +
                {instance.os_arch}
                +
                + {/if} +
                +
                +
                + + {#if instance.status_messages && instance.status_messages.length > 0} + +
                +

                Status Messages

                +
                + {#each instance.status_messages as message} +
                +
                +

                {message.message}

                +
                + {#if message.event_level} + {@const levelBadge = getEventLevelBadge(message.event_level)} + + {/if} + + {#if message.created_at} + {formatDate(message.created_at)} + {:else} + Unknown date + {/if} + +
                +
                +
                + {/each} +
                +
                + {:else} + +
                +

                Status Messages

                +
                + + + +

                No status messages available

                +
                +
                + {/if} + {:else} +
                +
                + Instance not found. +
                +
                + {/if} +
                + + +{#if showDeleteModal && instance} + showDeleteModal = false} + on:confirm={handleDelete} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/login/+page.svelte b/webapp/src/routes/login/+page.svelte new file mode 100644 index 00000000..c358ebf3 --- /dev/null +++ b/webapp/src/routes/login/+page.svelte @@ -0,0 +1,159 @@ + + + + Login - GARM + + +
                +
                +
                +
                + GARM + +
                +

                + Sign in to GARM +

                +

                + GitHub Actions Runner Manager +

                +
                + +
                +
                +
                + + +
                +
                + + +
                +
                + + {#if error} +
                +
                +
                + + + +
                +
                +

                + {error} +

                +
                +
                +
                + {/if} + +
                + +
                +
                +
                +
                \ No newline at end of file diff --git a/webapp/src/routes/organizations/+page.svelte b/webapp/src/routes/organizations/+page.svelte new file mode 100644 index 00000000..978cb7e2 --- /dev/null +++ b/webapp/src/routes/organizations/+page.svelte @@ -0,0 +1,364 @@ + + + + Organizations - GARM + + +
                + + + + + + + {@const status = getEntityStatusBadge(organization)} + + + + +
                + + +{#if showCreateModal} + showCreateModal = false} + on:submit={handleCreateOrganization} + /> +{/if} + +{#if showUpdateModal && selectedOrganization} + { showUpdateModal = false; selectedOrganization = null; }} + on:submit={(e) => handleUpdateOrganization(e.detail)} + /> +{/if} + +{#if showDeleteModal && selectedOrganization} + { showDeleteModal = false; selectedOrganization = null; }} + on:confirm={handleDeleteOrganization} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/organizations/[id]/+page.svelte b/webapp/src/routes/organizations/[id]/+page.svelte new file mode 100644 index 00000000..dc69c856 --- /dev/null +++ b/webapp/src/routes/organizations/[id]/+page.svelte @@ -0,0 +1,392 @@ + + + + {organization ? `${organization.name} - Organization Details` : 'Organization Details'} - GARM + + +
                + + + + {#if loading} +
                +
                +

                Loading organization...

                +
                + {:else if error} +
                +

                {error}

                +
                + {:else if organization} + + showUpdateModal = true} + onDelete={() => showDeleteModal = true} + /> + + + + + + + + + + + + + + + + {/if} +
                + + +{#if showUpdateModal && organization} + showUpdateModal = false} + on:submit={(e) => handleUpdate(e.detail)} + /> +{/if} + +{#if showDeleteModal && organization} + showDeleteModal = false} + on:confirm={handleDelete} + /> +{/if} + +{#if showDeleteInstanceModal && selectedInstance} + { showDeleteInstanceModal = false; selectedInstance = null; }} + on:confirm={handleDeleteInstance} + /> +{/if} + +{#if showCreatePoolModal && organization} + showCreatePoolModal = false} + on:submit={handleCreatePool} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/pools/+page.svelte b/webapp/src/routes/pools/+page.svelte new file mode 100644 index 00000000..81b45c2a --- /dev/null +++ b/webapp/src/routes/pools/+page.svelte @@ -0,0 +1,339 @@ + + + + Pools - GARM + + +
                + + + + +
                + + +{#if showCreateModal} + showCreateModal = false} + on:submit={(e) => handleCreatePool(e.detail)} + /> +{/if} + +{#if showUpdateModal && selectedPool} + { showUpdateModal = false; selectedPool = null; }} + on:submit={(e) => handleUpdatePool(e.detail)} + /> +{/if} + +{#if showDeleteModal && selectedPool} + { showDeleteModal = false; selectedPool = null; }} + on:confirm={handleDeletePool} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/pools/[id]/+page.svelte b/webapp/src/routes/pools/[id]/+page.svelte new file mode 100644 index 00000000..d97bbe6a --- /dev/null +++ b/webapp/src/routes/pools/[id]/+page.svelte @@ -0,0 +1,398 @@ + + + + {pool ? `Pool ${pool.id} - Pool Details` : 'Pool Details'} - GARM + + +
                + + + + {#if loading} +
                +
                +

                Loading pool...

                +
                + {:else if error} +
                +

                {error}

                +
                + {:else if pool} + + showUpdateModal = true} + onDelete={() => showDeleteModal = true} + /> + + +
                + +
                +
                +

                Basic Information

                +
                +
                +
                Pool ID
                +
                {pool.id}
                +
                +
                +
                Provider
                +
                {pool.provider_name}
                +
                +
                +
                Image
                +
                + {pool.image} +
                +
                +
                +
                Flavor
                +
                {pool.flavor}
                +
                +
                +
                Status
                +
                + + {pool.enabled ? 'Enabled' : 'Disabled'} + +
                +
                +
                +
                Entity
                +
                +
                + + {getEntityType(pool)} + + + {getEntityName(pool)} + +
                +
                +
                +
                +
                Created At
                +
                {formatDate(pool.created_at || '')}
                +
                +
                +
                Updated At
                +
                {formatDate(pool.updated_at || '')}
                +
                +
                +
                +
                + + +
                +
                +

                Configuration

                +
                +
                +
                Max Runners
                +
                {pool.max_runners}
                +
                +
                +
                Min Idle Runners
                +
                {pool.min_idle_runners}
                +
                +
                +
                Bootstrap Timeout
                +
                {pool.runner_bootstrap_timeout} minutes
                +
                +
                +
                Priority
                +
                {pool.priority}
                +
                +
                +
                Runner Prefix
                +
                {pool.runner_prefix || 'garm'}
                +
                +
                +
                OS Type / Architecture
                +
                {pool.os_type} / {pool.os_arch}
                +
                + {#if pool['github-runner-group']} +
                +
                GitHub Runner Group
                +
                {pool['github-runner-group']}
                +
                + {/if} + {#if pool.tags && pool.tags.length > 0} +
                +
                Tags
                +
                +
                + {#each pool.tags as tag} + + {typeof tag === 'string' ? tag : tag.name} + + {/each} +
                +
                +
                + {/if} +
                +
                +
                +
                + + + {#if pool.extra_specs} +
                +
                +

                Extra Specifications

                +
                {formatExtraSpecs(pool.extra_specs)}
                +
                +
                + {/if} + + + {#if pool.instances} + + {/if} + + {/if} +
                + + +{#if showUpdateModal && pool} + showUpdateModal = false} + on:submit={(e) => handleUpdate(e.detail)} + /> +{/if} + +{#if showDeleteModal && pool} + showDeleteModal = false} + on:confirm={handleDelete} + /> +{/if} + +{#if showDeleteInstanceModal && selectedInstance} + { showDeleteInstanceModal = false; selectedInstance = null; }} + on:confirm={handleDeleteInstance} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/repositories/+page.svelte b/webapp/src/routes/repositories/+page.svelte new file mode 100644 index 00000000..36e74e32 --- /dev/null +++ b/webapp/src/routes/repositories/+page.svelte @@ -0,0 +1,339 @@ + + + + Repositories - GARM + + +
                + + { showCreateModal = true; }} + /> + + +
                + + +{#if showCreateModal} + showCreateModal = false} + on:submit={handleCreateRepository} + /> +{/if} + +{#if showEditModal && editingRepository} + handleUpdateRepository(e.detail)} + /> +{/if} + +{#if showDeleteModal && deletingRepository} + +{/if} diff --git a/webapp/src/routes/repositories/[id]/+page.svelte b/webapp/src/routes/repositories/[id]/+page.svelte new file mode 100644 index 00000000..0bafe74e --- /dev/null +++ b/webapp/src/routes/repositories/[id]/+page.svelte @@ -0,0 +1,392 @@ + + + + {repository ? `${repository.name} - Repository Details` : 'Repository Details'} - GARM + + +
                + + + + {#if loading} +
                +
                +

                Loading repository...

                +
                + {:else if error} +
                +

                {error}

                +
                + {:else if repository} + + showUpdateModal = true} + onDelete={() => showDeleteModal = true} + /> + + + + + + + + + + + + + + + + {/if} +
                + + +{#if showUpdateModal && repository} + showUpdateModal = false} + on:submit={(e) => handleUpdate(e.detail)} + /> +{/if} + +{#if showDeleteModal && repository} + showDeleteModal = false} + on:confirm={handleDelete} + /> +{/if} + +{#if showDeleteInstanceModal && selectedInstance} + { showDeleteInstanceModal = false; selectedInstance = null; }} + on:confirm={handleDeleteInstance} + /> +{/if} + +{#if showCreatePoolModal && repository} + showCreatePoolModal = false} + on:submit={handleCreatePool} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/scalesets/+page.svelte b/webapp/src/routes/scalesets/+page.svelte new file mode 100644 index 00000000..1a6acb7b --- /dev/null +++ b/webapp/src/routes/scalesets/+page.svelte @@ -0,0 +1,316 @@ + + + + Scale Sets - GARM + + +
                + + + + +
                + + +{#if showCreateModal} + showCreateModal = false} + on:submit={(e) => handleCreateScaleSet(e.detail)} + /> +{/if} + +{#if showUpdateModal && selectedScaleSet} + { showUpdateModal = false; selectedScaleSet = null; }} + on:submit={(e) => handleUpdateScaleSet(e.detail)} + /> +{/if} + +{#if showDeleteModal && selectedScaleSet} + { showDeleteModal = false; selectedScaleSet = null; }} + on:confirm={handleDeleteScaleSet} + /> +{/if} \ No newline at end of file diff --git a/webapp/src/routes/scalesets/[id]/+page.svelte b/webapp/src/routes/scalesets/[id]/+page.svelte new file mode 100644 index 00000000..2ad9c320 --- /dev/null +++ b/webapp/src/routes/scalesets/[id]/+page.svelte @@ -0,0 +1,383 @@ + + + + {scaleSet ? `${scaleSet.name} - Scale Set Details` : 'Scale Set Details'} - GARM + + +
                + + + + {#if loading} +
                +
                +

                Loading scale set...

                +
                + {:else if error} +
                +

                {error}

                +
                + {:else if scaleSet} + + showUpdateModal = true} + onDelete={() => showDeleteModal = true} + /> + + +
                + +
                +
                +

                Basic Information

                +
                +
                +
                Scale Set ID
                +
                {scaleSet.id}
                +
                +
                +
                Name
                +
                {scaleSet.name}
                +
                +
                +
                Provider
                +
                {scaleSet.provider_name}
                +
                +
                +
                Image
                +
                + {scaleSet.image} +
                +
                +
                +
                Flavor
                +
                {scaleSet.flavor}
                +
                +
                +
                Status
                +
                + + {scaleSet.enabled ? 'Enabled' : 'Disabled'} + +
                +
                +
                +
                Entity
                +
                +
                + + {getEntityType(scaleSet)} + + + {getEntityName(scaleSet)} + +
                +
                +
                +
                +
                Created At
                +
                {formatDate(scaleSet.created_at || '')}
                +
                +
                +
                Updated At
                +
                {formatDate(scaleSet.updated_at|| '')}
                +
                +
                +
                +
                + + +
                +
                +

                Configuration

                +
                +
                +
                Max Runners
                +
                {scaleSet.max_runners}
                +
                +
                +
                Min Idle Runners
                +
                {scaleSet.min_idle_runners}
                +
                +
                +
                Bootstrap Timeout
                +
                {scaleSet.runner_bootstrap_timeout} minutes
                +
                + +
                +
                Runner Prefix
                +
                {scaleSet.runner_prefix || 'garm'}
                +
                +
                +
                OS Type / Architecture
                +
                {scaleSet.os_type} / {scaleSet.os_arch}
                +
                + {#if scaleSet['github-runner-group']} +
                +
                GitHub Runner Group
                +
                {scaleSet['github-runner-group']}
                +
                + {/if} + +
                +
                +
                +
                + + + + {#if scaleSet.extra_specs} +
                +
                +

                Extra Specifications

                +
                {formatExtraSpecs(scaleSet.extra_specs)}
                +
                +
                + {/if} + + + {#if scaleSet.instances} + + {/if} + + {/if} +
                + + +{#if showUpdateModal && scaleSet} + showUpdateModal = false} + on:submit={(e) => handleUpdate(e.detail)} + /> +{/if} + +{#if showDeleteModal && scaleSet} + showDeleteModal = false} + on:confirm={handleDelete} + /> +{/if} + +{#if showDeleteInstanceModal && selectedInstance} + { showDeleteInstanceModal = false; selectedInstance = null; }} + on:confirm={handleDeleteInstance} + /> +{/if} \ No newline at end of file diff --git a/webapp/static/assets/garm-dark.svg b/webapp/static/assets/garm-dark.svg new file mode 100644 index 00000000..f0a0c564 --- /dev/null +++ b/webapp/static/assets/garm-dark.svg @@ -0,0 +1,37 @@ + + + + + + + + + + + diff --git a/webapp/static/assets/garm-light.svg b/webapp/static/assets/garm-light.svg new file mode 100644 index 00000000..2495959d --- /dev/null +++ b/webapp/static/assets/garm-light.svg @@ -0,0 +1,36 @@ + + + + + + + + + + diff --git a/webapp/static/assets/gitea.svg b/webapp/static/assets/gitea.svg new file mode 100644 index 00000000..e4643ce3 --- /dev/null +++ b/webapp/static/assets/gitea.svg @@ -0,0 +1 @@ + diff --git a/webapp/static/assets/github-mark-white.svg b/webapp/static/assets/github-mark-white.svg new file mode 100644 index 00000000..d5e64918 --- /dev/null +++ b/webapp/static/assets/github-mark-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/webapp/static/assets/github-mark.svg b/webapp/static/assets/github-mark.svg new file mode 100644 index 00000000..37fa923d --- /dev/null +++ b/webapp/static/assets/github-mark.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/webapp/static/favicon-dark.png b/webapp/static/favicon-dark.png new file mode 100644 index 0000000000000000000000000000000000000000..d16186d1d64f35f2676d6bb462e4824345f811de GIT binary patch literal 3506 zcmeHK`8O197k|dsjeTus8v7{BkRoKsjAaI8u1 zT>wP=a+@u+0bts(Ftfoi0($3m{*iwe`2S&m@#opag&3^>=p-w1YXBfHerArVUBekj zx7}ags<>F|{M4q_RqBs_-1MUVSUD&P-YO6jozUWz)WsUueD>8pbLyxt6DD?XZ-0CB zU9kl_abWQM_{>hJKkX2o#L4I{LK@2?Hk0g1PM^2%-*%d49a$(rT~VR^XC#Z!-1!Re za=LnvE{STnU`N-&yD?GX3hwY-3VM3Qq{7RtaU8cz4X))ZW|UlBIJwE<&o| z>5aqr#}E(tCi_F>rJL}%VS1hmpT2L5T%ev6P=0M2cTen5z>$kB=WdaCgUX&=JAGL$ z^Mp*!&9?M6F0DP!jU9)g*5Yqp?|A&;rK*!do_2S2T1|MC*f7+PK?CH9sSy^$vm7>N z7;GdH0*-Bkotsrt@ZOQM&kV9WOtAVko-=EA!!N!4Al!L6+G04)XyGtTpR?F1p8ZT3 zv%*8c5pWcJOjbc0g_l;-7IL=3L7ZVvUmILxf#56`PAE9F?85t&zm^rReYJa<>3g*B zW3%s^GKx z#FgWn=h(@jp;>mGJ}zqDhTP=jgq8-JcP{4HAd{%a=j^^r{{Yc;S2aNbRFL;z)@VqL${1HT7F)@pQde%&LzlE+*V^UzKt1er>xfeM?2xlmLytT{!lY7YsJ9Y~fbG zs=DjBwuek%U&UW49$oDW zzku88V!T}inYBxV@@dE#@{Hny(MR&p~#CcVRQb6)}pQOiCbp z#CcXLZ2biJ;Fm9(oCS15yf%SIZiCHVt(ve4-xz)r+>yD%L)5ggKO2fT(Pthv*By_S z{jDbfr?I{cO6J%m5PJQqN%tK5bNt_|rViO2szL(YlVwf2F+6Dzbxoz`+#AE7`Yq;p zkFu#9#%yT~h6@GR6Jw9o5yLC%%RRjd^6qU5unLuH5uQL!I~SjW3}=?PfK z@9EzVsq{V%{T+JgUG0}h@;k(3k>j=?TNBeJeW7Z%R+q_9txG^phX28eM21*H?SqE( zTsoWZ5fQ;|(9yHP3fVXGx{e?RIF~~?t6yg|o2E5~j3+HfU91unXR<)bOsK6{aP$;3 zW*`LXZ)`7O+!ly*lk$X%FaR|4(uLn`TVBjUY&URJ9Bn4vC30n{CzxH$M{CI67NI{liu>=8XT)p#g&hBf8y=M{7CI> zk)PFPx_{Yv%iP1Uqh0-s*W-K+i2}r@3AKBj9~P4>%EhUH%&0iOL5zY6(wBqR_2;a% zf&a3$dx&IHjn~IdJH8X2D?YvA*Si!OTcF(%rJ<`gbl#~~S z6MCpfgi#0Wc&Wv0p&8|JY}z_F5o%Jjq7t+tknS!--VF9T0q-+u@|VzrgCJa+dD#egmGr5VQS!po?+w95 zoepu*Lqut@p2(pYEfvr+1qY`WpREf)o7)Pu_X(^Jm>J9|0t0b^YdZzl0RAB<&ogBE zBG-9gzlzP{B610wB|2eCU(P}}pT~eYB7Q-9BG$`!!}^7_p6L=6fVcLwt(0!_TMzyc zJ6UB;k)0TRaf6&U@^iTAvF&K4aR**P-V0GMtN7kK zoccKTcxro-dW8C^_az$ce8WG~)n$}6S#p(vt%-UpAHM3xrois5&Y9bk zU|`Scf-^#g)EtDIl@NUt=uSc36tfU8fP% ze{LjGKk&Chif3k;rXJ!;MeF%Yvu%g2K0Z+e%YCtHW60)6WLmK)1oo?Vm?GhN>jOSr ze$mo3#8E+>C8K$;!c`7w*uP3M63nIvvM2Qh1eU1?$G=wtnLG6#_qzDj-%WM|89LMU#!~tb#9efGIKxv`UPhl^Q*@_i2lirT(>em+` zKR3Jd-W)Ie0Uk>PcO`r(pXua9ha{TL{{c-*w>TsV6)(|IOFK{Yu0FmUn8Y)TDmZ@eGDsE za1~NMD%Png(AMrhu=<5%qeCt^tHf**Es1~mHons2lnjo&gI|#BSR%5K?9NiIMlKz= zR*ahB9$9zRxh>?Sk%l_#`K{xLO$$?xh#Jzq2jtqoIxi2D&24cS zR-K-2l}x2eJSDft!JLeH!dOVsi@FDcYpsDzs2!Gi^#H!b7KZ~>#AXilbhu6tgspnY zumo~aGbW2lO-T($eY%ot*un;y?i%sB2 z$bdsb0j=3TBKmORDw_NUnI)V{yuq=Vbcg?{u%0@{UV!sye1@_*UWIwn4{bd-y){nK zs$!SlX+5!!u~xaWeUN%rfl#K}Upkdx_UcdDzUYyS`Q2g7v=PP>B1^)ZBe^=0+%#SM z-53g}VpLBkVKkIfRBcpLHPtj2atwpf#9+$tIK~F_UjQGXtCxG|{|1bS9vmV8ED@D`scdCe z3E4v=SxSbX-ub@g{o(!P{R`gjIrq7rbMJjV&pqdJo*(YLZDDT24TVDi0Nf_VSStoP z{e3u?8UBOeUw;=^JoE|r06b0R+IMDU#7GxoD*^yvasZHH0N7)s$jbl(VgUH+1c2s! z07U(=8!fc~VA`=Tv%xVg@W|`@WB(}d|Dyoo&(HN{K4t)&WMys*00hR*!g;l0C=Kbh z`|Dc;7h8>=+T^-Q?eR~WUi2R;2L-`fd4i(jTHKPlSOc5SzWQfQ9TjH6#4hgbZ?C>D zwBR824}2J#-YNE{9paNX8T~~_W0}Nml35P*c?bV(r-{~)WhSUADzyHLWHp*SUnZXT zwK3V+>f>Xxk)@a`f%6aB^Ib)z-aMPBPbf;_387Rgp$zZdA2m0%_x8AC>0FA7kg9ur z<8baV#G{^xzEFAT27Gpyp69~c_l@BT)UyK0Z*Aiqh&>57a$Ei!LV$%|{JFUzH$ zkjc8)lG^Li-1XAfaWHBv{`U2@r>|bCIyvNMcUGoUg=dHjF&i>ufLt*(!h(3#!^Vt& zon%75v9EA&vuR2|^Ar66;P5cP>f3luuiX#7^zNf@`{`(lp*W+3!!&(pp;bJ`nG_a< z$AZJ)DEgSJf;b89&Gs!lRjQtO0jd zj<=uVAd7}(*m?T6sDT@D6BFZF8gSm(m=^;~q8^_!d(!;_L|a|e1PRQ7yl1l)-PGg- zUwu235s+Aj(3nPR(P?*I z(gMF(aI;hQyg&i9I9IHp&q9l*qjxc*HlnbQaLavF#=Y~c?Xr+JAJDs`jh5UDZzR`F z@ScX%u<*6Tc~heDIcEdS?8I2|<1Jbt;Ie}5;w|?r6npXB_LlGx>%;RP|;_R}ESSm!SN4i@rz z>Ni9xwZ}t$hhBVN`!$mM9&uUZxGl)k#I#6XsMxL6WpY&O5YUt1f3zZzCRSJdsBS%* z&MtgJM6eUIb*->L_6@zRBgg^HrOeREw;7G5DUBgx2@6seD@4VaERZteYHJppU3v9s z2*KJL+lv^t1tQ&~^v*>X02+F!!tb{&FJ?DPJgD&Pc^QqYzbXH7pm_e}j4ikGzR^89 zlc;rqc&?Lqs;#YBrzacUq7D$Z{Jqu_${<>Yn!Iiqdh zzpU*ZBH2*o^=WR$cl>kN+#7zqOR=$e+D%a!x_X1>ow_wXTN{ZBlw~LBTG~CQ)(AYK zhl)fPwb70jTih0!R*w2r*`K$Gwxlxky@vSoa%JNwdj&NS*A1#%-6H#;VXP$+SAk_Q zZC54(WNSAwO`M$1(Ry*^3eR)<>xtNhR#ZIfday$v!I0^@(;6%JN7a{BpdA?g=m zncsboHN~3U@0xr~LqFSRrqrO%=fWTpRVJ}ZXm25zDXAw)CIt@OVR=`8Ty0M|+=xKQ z;g1_J4?5t&au*zMLHqxl>J24rWXs9R#p@U_>wS|faGd)7algIcsGupa1_=`KkvKZL z@tcCySFp2_rjr(-sQ_RjDLOw%(ko}Oa=t0g^N8eVqcS*VMhP5^9#J|sgB!p8DU%{w zvH3l0{!Xl4pUq&a41oA9gzQxV+SaSRKwWl~Bh#JU5z527)5Qt5M+6EI7p{J<3pQ$Z zh?5>9N{e+x4o+*SfUZe6IKB8{U5L4{C2xD5zy^Vt!JHy65GS~{Q-BTNAC&SuL$)t) zofq~i+dM8JcL!Re6Snl_ECl*82GkJo3*zIkUd|iVudMYRk70b|QQpbwc)!3uO6*f2ga=2yLS1Dg|2=^;AB5)sJ0)!(APk-Ehai zp3Mbkgbt}X2stYu`XtCFE05BnieKw98T1V8z3?X7Y)`4wSRmmQWyf|%iqBoA9@Tel zI9)&Rw?mR=dWxnV;!9cc`E;{whmIaTQ3cC=v1_Bq#zD8)iDS71`0JFG7B4 zcIksTUiu?EmI&@k_>{lU$%zh0GzC_B#>UKdC;*Dump`Q;Fagdxwr4g?OkE;sNc%33Z3An+++Q-g@#)escBbj@;|ny5Kx|88 zYOYlxl`ip|+$0BcGVTgvB}p&p9t^BC2R5K~SZmb-_!gTS4pb4FS=7_vIt38+%1Of_ z$VttZ3@SA#IUJRB_u2^O#BVyAE)5YJ7 z0YDX_dO``Kp`@Z}qoS&*ropgd7>p(cQ;NqiHlY6^_z+#a+(Z92VN~?sfI-mtyF);b zho%eB$H&c=1ds;*?&OLL_9A&AO? { + // Load env variables based on the current mode + // Third param '' means load all variables, not just those prefixed with VITE_ + const env = loadEnv(mode, process.cwd(), ''); + + console.log(env.VITE_GARM_API_URL); + return { + plugins: [sveltekit()], + server: { + proxy: { + // Proxy API calls to GARM backend + '/api': { + target: env.VITE_GARM_API_URL, + changeOrigin: true, + ws: true, + configure: (proxy, _options) => { + proxy.on('error', (err, _req, _res) => { + console.log('proxy error', err); + }); + proxy.on('proxyReq', (proxyReq, req, _res) => { + console.log('Sending Request to the Target:', req.method, req.url); + }); + proxy.on('proxyRes', (proxyRes, req, _res) => { + console.log('Received Response from the Target:', proxyRes.statusCode, req.url); + }); + }, + secure: false + } + } + } + }; +}); + diff --git a/workers/entity/worker.go b/workers/entity/worker.go index 1cb40ad5..d16c15f5 100644 --- a/workers/entity/worker.go +++ b/workers/entity/worker.go @@ -92,6 +92,14 @@ func (w *Worker) Start() (err error) { w.mux.Lock() defer w.mux.Unlock() + epType, err := w.Entity.GetForgeType() + if err != nil { + return fmt.Errorf("failed to get endpoint type: %w", err) + } + if epType != params.GithubEndpointType { + return nil + } + ghCli, err := github.Client(w.ctx, w.Entity) if err != nil { return fmt.Errorf("creating github client: %w", err) diff --git a/workers/scaleset/scaleset.go b/workers/scaleset/scaleset.go index 03a93387..48aa8508 100644 --- a/workers/scaleset/scaleset.go +++ b/workers/scaleset/scaleset.go @@ -101,7 +101,8 @@ func (w *Worker) Start() (err error) { } for _, instance := range instances { - if instance.Status == commonParams.InstanceCreating { + switch { + case instance.Status == commonParams.InstanceCreating: // We're just starting up. We found an instance stuck in creating. // When a provider creates an instance, it sets the db instance to // creating and then issues an API call to the IaaS to create the @@ -176,7 +177,7 @@ func (w *Worker) Start() (err error) { return fmt.Errorf("updating runner %s: %w", instance.Name, err) } } - } else if instance.Status == commonParams.InstanceDeleting { + case instance.Status == commonParams.InstanceDeleting: // Set the instance in deleting. It is assumed that the runner was already // removed from github either by github or by garm. Deleting status indicates // that it was already being handled by the provider. There should be no entry on @@ -193,7 +194,7 @@ func (w *Worker) Start() (err error) { return fmt.Errorf("updating runner %s: %w", instance.Name, err) } } - } else if instance.Status == commonParams.InstanceDeleted { + case instance.Status == commonParams.InstanceDeleted: if err := w.handleInstanceCleanup(instance); err != nil { locking.Unlock(instance.Name, false) return fmt.Errorf("failed to remove database entry for %s: %w", instance.Name, err) From b58bf4c895c0a815c69070e1831502ea396225b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Aug 2025 10:25:06 +0000 Subject: [PATCH 159/179] Bump the npm_and_yarn group across 1 directory with 2 updates Bumps the npm_and_yarn group with 2 updates in the /webapp directory: [tmp](https://github.com/raszi/node-tmp) and [@openapitools/openapi-generator-cli](https://github.com/OpenAPITools/openapi-generator-cli). Removes `tmp` Updates `@openapitools/openapi-generator-cli` from 2.21.4 to 2.22.0 - [Release notes](https://github.com/OpenAPITools/openapi-generator-cli/releases) - [Changelog](https://github.com/OpenAPITools/openapi-generator-cli/blob/master/.releaserc) - [Commits](https://github.com/OpenAPITools/openapi-generator-cli/compare/v2.21.4...v2.22.0) --- updated-dependencies: - dependency-name: tmp dependency-version: dependency-type: indirect dependency-group: npm_and_yarn - dependency-name: "@openapitools/openapi-generator-cli" dependency-version: 2.22.0 dependency-type: direct:development dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] --- webapp/package-lock.json | 357 +++++++++++++++++++++++---------------- webapp/package.json | 2 +- 2 files changed, 213 insertions(+), 146 deletions(-) diff --git a/webapp/package-lock.json b/webapp/package-lock.json index c6e47eb5..4ed479a3 100644 --- a/webapp/package-lock.json +++ b/webapp/package-lock.json @@ -17,7 +17,7 @@ "codemirror": "^6.0.2" }, "devDependencies": { - "@openapitools/openapi-generator-cli": "^2.21.4", + "@openapitools/openapi-generator-cli": "^2.22.0", "@sveltejs/adapter-static": "^3.0.1", "@sveltejs/kit": "^2.0.0", "@sveltejs/vite-plugin-svelte": "^6.1.0", @@ -697,6 +697,28 @@ "node": ">=18" } }, + "node_modules/@inquirer/external-editor": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.1.tgz", + "integrity": "sha512-Oau4yL24d2B5IL4ma4UpbQigkVhzPDXLoqy1ggK4gnHg/stmkffJE4oOXHXF3uz0UEpywG68KcyXsyYpA1Re/Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "chardet": "^2.1.0", + "iconv-lite": "^0.6.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, "node_modules/@isaacs/fs-minipass": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", @@ -788,6 +810,16 @@ "@lezer/common": "^1.0.0" } }, + "node_modules/@lukeed/csprng": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", + "integrity": "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/@marijn/find-cluster-break": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", @@ -807,9 +839,9 @@ } }, "node_modules/@nestjs/common": { - "version": "11.1.5", - "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-11.1.5.tgz", - "integrity": "sha512-DQpWdr3ShO0BHWkHl3I4W/jR6R3pDtxyBlmrpTuZF+PXxQyBXNvsUne0Wyo6QHPEDi+pAz9XchBFoKbqOhcdTg==", + "version": "11.1.6", + "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-11.1.6.tgz", + "integrity": "sha512-krKwLLcFmeuKDqngG2N/RuZHCs2ycsKcxWIDgcm7i1lf3sQ0iG03ci+DsP/r3FcT/eJDFsIHnKtNta2LIi7PzQ==", "dev": true, "license": "MIT", "dependencies": { @@ -838,16 +870,6 @@ } } }, - "node_modules/@nestjs/common/node_modules/@lukeed/csprng": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", - "integrity": "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/@nestjs/common/node_modules/@tokenizer/inflate": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz", @@ -910,23 +932,10 @@ "url": "https://github.com/sponsors/Borewit" } }, - "node_modules/@nestjs/common/node_modules/uid": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/uid/-/uid-2.0.2.tgz", - "integrity": "sha512-u3xV3X7uzvi5b1MncmZo3i2Aw222Zk1keqLA1YkHldREkAhAqi65wuPfe7lHx8H/Wzy+8CE7S7uS3jekIM5s8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@lukeed/csprng": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@nestjs/core": { - "version": "11.1.5", - "resolved": "https://registry.npmjs.org/@nestjs/core/-/core-11.1.5.tgz", - "integrity": "sha512-Qr25MEY9t8VsMETy7eXQ0cNXqu0lzuFrrTr+f+1G57ABCtV5Pogm7n9bF71OU2bnkDD32Bi4hQLeFR90cku3Tw==", + "version": "11.1.6", + "resolved": "https://registry.npmjs.org/@nestjs/core/-/core-11.1.6.tgz", + "integrity": "sha512-siWX7UDgErisW18VTeJA+x+/tpNZrJewjTBsRPF3JVxuWRuAB1kRoiJcxHgln8Lb5UY9NdvklITR84DUEXD0Cg==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -965,17 +974,7 @@ } } }, - "node_modules/@nestjs/core/node_modules/@lukeed/csprng": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", - "integrity": "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@nestjs/core/node_modules/@nuxt/opencollective": { + "node_modules/@nuxt/opencollective": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@nuxt/opencollective/-/opencollective-0.4.1.tgz", "integrity": "sha512-GXD3wy50qYbxCJ652bDrDzgMr3NFEkIS374+IgFQKkCvk9yiYcLvX2XDYr7UyQxf4wK0e+yqDYRubZ0DtOxnmQ==", @@ -992,7 +991,7 @@ "npm": ">=5.10.0" } }, - "node_modules/@nestjs/core/node_modules/consola": { + "node_modules/@nuxt/opencollective/node_modules/consola": { "version": "3.4.2", "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", @@ -1002,26 +1001,6 @@ "node": "^14.18.0 || >=16.10.0" } }, - "node_modules/@nestjs/core/node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@nestjs/core/node_modules/uid": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/uid/-/uid-2.0.2.tgz", - "integrity": "sha512-u3xV3X7uzvi5b1MncmZo3i2Aw222Zk1keqLA1YkHldREkAhAqi65wuPfe7lHx8H/Wzy+8CE7S7uS3jekIM5s8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@lukeed/csprng": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@nuxtjs/opencollective": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/@nuxtjs/opencollective/-/opencollective-0.3.2.tgz", @@ -1063,16 +1042,16 @@ } }, "node_modules/@openapitools/openapi-generator-cli": { - "version": "2.21.4", - "resolved": "https://registry.npmjs.org/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.21.4.tgz", - "integrity": "sha512-s2OBgiNml0DL0ebkvAMQxZi7c8SUQMHssTUJwWsFDv4kVtBVDV4UzsCh9gQEXlNjuEcEgZoa5BIOai2sT0sE8g==", + "version": "2.22.0", + "resolved": "https://registry.npmjs.org/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.22.0.tgz", + "integrity": "sha512-HdjSiKsXpbnXBcSCnft494fv5pFZxPKFAV1eR+yMjo3bt1ONLb7OGy1D/5SrbjRfy9b82JcYUJ3gssh49suWKg==", "dev": true, "hasInstallScript": true, "license": "Apache-2.0", "dependencies": { "@nestjs/axios": "4.0.1", - "@nestjs/common": "11.1.5", - "@nestjs/core": "11.1.5", + "@nestjs/common": "11.1.6", + "@nestjs/core": "11.1.6", "@nuxtjs/opencollective": "0.3.2", "axios": "1.11.0", "chalk": "4.1.2", @@ -1080,10 +1059,9 @@ "compare-versions": "4.1.4", "concurrently": "9.2.0", "console.table": "0.10.0", - "fs-extra": "11.3.0", + "fs-extra": "11.3.1", "glob": "11.0.3", - "inquirer": "8.2.6", - "lodash": "4.17.21", + "inquirer": "8.2.7", "proxy-agent": "6.5.0", "reflect-metadata": "0.2.2", "rxjs": "7.8.2", @@ -1465,6 +1443,66 @@ "node": ">=14.0.0" } }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { + "version": "1.4.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.0.2", + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { + "version": "1.4.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.11", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.9.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { + "version": "0.9.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": { + "version": "2.8.0", + "dev": true, + "inBundle": true, + "license": "0BSD", + "optional": true + }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { "version": "4.1.11", "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.11.tgz", @@ -1575,6 +1613,8 @@ }, "node_modules/ansi-escapes": { "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1909,6 +1949,18 @@ ], "license": "MIT" }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, "node_modules/browserslist": { "version": "4.25.1", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", @@ -2150,7 +2202,9 @@ } }, "node_modules/chardet": { - "version": "0.7.0", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.0.tgz", + "integrity": "sha512-bNFETTG/pM5ryzQ9Ad0lJOTa6HWD/YsScAR3EnCPZRPlQh77JocYktSHOUHelyhm8IARL+o4c4F1bP5KVOjiRA==", "dev": true, "license": "MIT" }, @@ -2186,6 +2240,8 @@ }, "node_modules/cli-cursor": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", "dev": true, "license": "MIT", "dependencies": { @@ -2197,6 +2253,8 @@ }, "node_modules/cli-spinners": { "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", "dev": true, "license": "MIT", "engines": { @@ -2206,6 +2264,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 10" + } + }, "node_modules/clone": { "version": "1.0.4", "dev": true, @@ -2597,6 +2665,8 @@ }, "node_modules/escape-string-regexp": { "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true, "license": "MIT", "engines": { @@ -2658,18 +2728,12 @@ "dev": true, "license": "MIT" }, - "node_modules/external-editor": { - "version": "3.1.0", + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", "dev": true, - "license": "MIT", - "dependencies": { - "chardet": "^0.7.0", - "iconv-lite": "^0.4.24", - "tmp": "^0.0.33" - }, - "engines": { - "node": ">=4" - } + "license": "MIT" }, "node_modules/fdir": { "version": "6.4.6", @@ -2688,6 +2752,8 @@ }, "node_modules/figures": { "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", "dev": true, "license": "MIT", "dependencies": { @@ -2715,9 +2781,9 @@ } }, "node_modules/fs-extra": { - "version": "11.3.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", - "integrity": "sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==", + "version": "11.3.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.1.tgz", + "integrity": "sha512-eXvGGwZ5CL17ZSwHWd3bbgk7UUpF6IFHtP57NYYakPvHOs8GDgDe5KJI36jIJzDkJ6eJjuzRA8eBQb6SkKue0g==", "dev": true, "license": "MIT", "dependencies": { @@ -3082,11 +3148,13 @@ } }, "node_modules/iconv-lite": { - "version": "0.4.24", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", "dev": true, "license": "MIT", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" + "safer-buffer": ">= 2.1.2 < 3.0.0" }, "engines": { "node": ">=0.10.0" @@ -3119,17 +3187,17 @@ "license": "ISC" }, "node_modules/inquirer": { - "version": "8.2.6", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.6.tgz", - "integrity": "sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg==", + "version": "8.2.7", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.7.tgz", + "integrity": "sha512-UjOaSel/iddGZJ5xP/Eixh6dY1XghiBw4XK13rCCIJcJfyhhoul/7KhLLUGtebEj6GDYM6Vnx/mVsjx2L/mFIA==", "dev": true, "license": "MIT", "dependencies": { + "@inquirer/external-editor": "^1.0.0", "ansi-escapes": "^4.2.1", "chalk": "^4.1.1", "cli-cursor": "^3.1.0", "cli-width": "^3.0.0", - "external-editor": "^3.0.3", "figures": "^3.0.0", "lodash": "^4.17.21", "mute-stream": "0.0.8", @@ -3145,16 +3213,6 @@ "node": ">=12.0.0" } }, - "node_modules/inquirer/node_modules/cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">= 10" - } - }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "dev": true, @@ -3165,6 +3223,8 @@ }, "node_modules/is-interactive": { "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", + "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", "dev": true, "license": "MIT", "engines": { @@ -3183,6 +3243,8 @@ }, "node_modules/is-unicode-supported": { "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", "dev": true, "license": "MIT", "engines": { @@ -3241,7 +3303,9 @@ "license": "MIT" }, "node_modules/jsonfile": { - "version": "6.1.0", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", "dev": true, "license": "MIT", "dependencies": { @@ -3542,6 +3606,8 @@ }, "node_modules/log-symbols": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", "dev": true, "license": "MIT", "dependencies": { @@ -3588,6 +3654,8 @@ }, "node_modules/mimic-fn": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true, "license": "MIT", "engines": { @@ -3664,6 +3732,8 @@ }, "node_modules/mute-stream": { "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", "dev": true, "license": "ISC" }, @@ -3751,13 +3821,6 @@ "fast-safe-stringify": "^2.0.7" } }, - "node_modules/oas-kit-common/node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", - "dev": true, - "license": "MIT" - }, "node_modules/oas-linter": { "version": "3.2.2", "dev": true, @@ -3803,6 +3866,8 @@ }, "node_modules/onetime": { "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dev": true, "license": "MIT", "dependencies": { @@ -3817,6 +3882,8 @@ }, "node_modules/ora": { "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", + "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3837,26 +3904,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ora/node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/os-tmpdir": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/path-key": { "version": "3.1.1", "dev": true, @@ -3867,6 +3914,8 @@ }, "node_modules/path-to-regexp": { "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", "dev": true, "license": "MIT", "engines": { @@ -4319,6 +4368,8 @@ }, "node_modules/restore-cursor": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", "dev": true, "license": "MIT", "dependencies": { @@ -4329,13 +4380,10 @@ "node": ">=8" } }, - "node_modules/restore-cursor/node_modules/signal-exit": { - "version": "3.0.7", - "dev": true, - "license": "ISC" - }, "node_modules/run-async": { "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", "dev": true, "license": "MIT", "engines": { @@ -4386,6 +4434,8 @@ }, "node_modules/safer-buffer": { "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true, "license": "MIT" }, @@ -4461,6 +4511,13 @@ "dev": true, "license": "MIT" }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, "node_modules/sirv": { "version": "3.0.1", "dev": true, @@ -4978,6 +5035,8 @@ }, "node_modules/through": { "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", "dev": true, "license": "MIT" }, @@ -4998,17 +5057,6 @@ "url": "https://github.com/sponsors/SuperchupuDev" } }, - "node_modules/tmp": { - "version": "0.0.33", - "dev": true, - "license": "MIT", - "dependencies": { - "os-tmpdir": "~1.0.2" - }, - "engines": { - "node": ">=0.6.0" - } - }, "node_modules/token-types": { "version": "6.0.4", "dev": true, @@ -5047,6 +5095,8 @@ }, "node_modules/type-fest": { "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", "dev": true, "license": "(MIT OR CC0-1.0)", "engines": { @@ -5070,6 +5120,19 @@ "node": ">=14.17" } }, + "node_modules/uid": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/uid/-/uid-2.0.2.tgz", + "integrity": "sha512-u3xV3X7uzvi5b1MncmZo3i2Aw222Zk1keqLA1YkHldREkAhAqi65wuPfe7lHx8H/Wzy+8CE7S7uS3jekIM5s8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@lukeed/csprng": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/uint8array-extras": { "version": "1.4.0", "dev": true, @@ -5090,6 +5153,8 @@ }, "node_modules/universalify": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", "dev": true, "license": "MIT", "engines": { @@ -5554,6 +5619,8 @@ }, "node_modules/wrap-ansi": { "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, "license": "MIT", "dependencies": { diff --git a/webapp/package.json b/webapp/package.json index 93b9ef61..764761f9 100644 --- a/webapp/package.json +++ b/webapp/package.json @@ -10,7 +10,7 @@ "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch" }, "devDependencies": { - "@openapitools/openapi-generator-cli": "^2.21.4", + "@openapitools/openapi-generator-cli": "^2.22.0", "@sveltejs/adapter-static": "^3.0.1", "@sveltejs/kit": "^2.0.0", "@sveltejs/vite-plugin-svelte": "^6.1.0", From 118319c7c1eade64f60f3e31e21598d9d42d04a1 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 16 Aug 2025 19:31:58 +0000 Subject: [PATCH 160/179] Switch to fmt.Errorf Replace all instances of errors.Wrap() with fmt.Errorf. Signed-off-by: Gabriel Adrian Samfira --- apiserver/controllers/controllers.go | 19 +- auth/auth.go | 17 +- auth/instance_middleware.go | 3 +- cmd/garm-cli/cmd/init.go | 7 +- cmd/garm-cli/cmd/pool.go | 7 +- cmd/garm-cli/config/config.go | 18 +- cmd/garm-cli/config/home.go | 8 +- cmd/garm-cli/config/home_nix.go | 5 +- cmd/garm/main.go | 7 +- config/config.go | 11 +- config/config_test.go | 1 - database/sql/controller.go | 27 +- database/sql/enterprise.go | 57 +- database/sql/enterprise_test.go | 52 +- database/sql/gitea.go | 115 ++-- database/sql/gitea_test.go | 6 +- database/sql/github.go | 123 ++-- database/sql/github_test.go | 10 +- database/sql/instances.go | 61 +- database/sql/instances_test.go | 30 +- database/sql/jobs.go | 49 +- database/sql/models.go | 4 +- database/sql/organizations.go | 52 +- database/sql/organizations_test.go | 54 +- database/sql/pools.go | 64 +- database/sql/pools_test.go | 10 +- database/sql/repositories.go | 50 +- database/sql/repositories_test.go | 54 +- database/sql/scaleset_instances.go | 13 +- database/sql/scalesets.go | 68 +- database/sql/sql.go | 70 +- database/sql/users.go | 22 +- database/sql/users_test.go | 14 +- database/sql/util.go | 126 ++-- database/watcher/watcher.go | 5 +- database/watcher/watcher_test.go | 4 +- go.mod | 7 +- go.sum | 19 - internal/testing/testing.go | 2 +- params/requests.go | 6 +- runner/common.go | 8 +- runner/enterprises.go | 49 +- runner/enterprises_test.go | 12 +- runner/gitea_credentials.go | 17 +- runner/gitea_endpoints.go | 17 +- runner/github_credentials.go | 17 +- runner/github_endpoints.go | 17 +- runner/metadata.go | 37 +- runner/organizations.go | 73 +- runner/organizations_test.go | 12 +- runner/pool/common.go | 6 +- runner/pool/pool.go | 118 ++-- runner/pool/watcher.go | 4 +- runner/pools.go | 20 +- runner/pools_test.go | 6 +- runner/providers/providers.go | 7 +- runner/providers/v0.1.0/external.go | 7 +- runner/providers/v0.1.1/external.go | 21 +- runner/repositories.go | 77 ++- runner/repositories_test.go | 16 +- runner/runner.go | 158 +++-- runner/scalesets.go | 49 +- util/github/client.go | 22 +- util/github/gitea.go | 2 +- util/util.go | 7 +- vendor/github.com/juju/clock/.gitignore | 1 - vendor/github.com/juju/clock/LICENSE | 191 ------ vendor/github.com/juju/clock/Makefile | 20 - vendor/github.com/juju/clock/README.md | 7 - vendor/github.com/juju/clock/clock.go | 77 --- vendor/github.com/juju/clock/wall.go | 77 --- vendor/github.com/juju/errors/.gitignore | 23 - vendor/github.com/juju/errors/LICENSE | 191 ------ vendor/github.com/juju/errors/Makefile | 24 - vendor/github.com/juju/errors/README.md | 707 -------------------- vendor/github.com/juju/errors/doc.go | 79 --- vendor/github.com/juju/errors/error.go | 227 ------- vendor/github.com/juju/errors/errortypes.go | 473 ------------- vendor/github.com/juju/errors/functions.go | 454 ------------- vendor/github.com/juju/retry/.gitignore | 1 - vendor/github.com/juju/retry/LICENSE | 191 ------ vendor/github.com/juju/retry/Makefile | 15 - vendor/github.com/juju/retry/README.md | 277 -------- vendor/github.com/juju/retry/clock.go | 16 - vendor/github.com/juju/retry/doc.go | 90 --- vendor/github.com/juju/retry/retry.go | 260 ------- vendor/modules.txt | 13 - websocket/client.go | 4 +- 88 files changed, 1007 insertions(+), 4467 deletions(-) delete mode 100644 vendor/github.com/juju/clock/.gitignore delete mode 100644 vendor/github.com/juju/clock/LICENSE delete mode 100644 vendor/github.com/juju/clock/Makefile delete mode 100644 vendor/github.com/juju/clock/README.md delete mode 100644 vendor/github.com/juju/clock/clock.go delete mode 100644 vendor/github.com/juju/clock/wall.go delete mode 100644 vendor/github.com/juju/errors/.gitignore delete mode 100644 vendor/github.com/juju/errors/LICENSE delete mode 100644 vendor/github.com/juju/errors/Makefile delete mode 100644 vendor/github.com/juju/errors/README.md delete mode 100644 vendor/github.com/juju/errors/doc.go delete mode 100644 vendor/github.com/juju/errors/error.go delete mode 100644 vendor/github.com/juju/errors/errortypes.go delete mode 100644 vendor/github.com/juju/errors/functions.go delete mode 100644 vendor/github.com/juju/retry/.gitignore delete mode 100644 vendor/github.com/juju/retry/LICENSE delete mode 100644 vendor/github.com/juju/retry/Makefile delete mode 100644 vendor/github.com/juju/retry/README.md delete mode 100644 vendor/github.com/juju/retry/clock.go delete mode 100644 vendor/github.com/juju/retry/doc.go delete mode 100644 vendor/github.com/juju/retry/retry.go diff --git a/apiserver/controllers/controllers.go b/apiserver/controllers/controllers.go index 0c610c38..66aa6db9 100644 --- a/apiserver/controllers/controllers.go +++ b/apiserver/controllers/controllers.go @@ -17,6 +17,8 @@ package controllers import ( "context" "encoding/json" + "errors" + "fmt" "io" "log/slog" "net/http" @@ -25,7 +27,6 @@ import ( "github.com/gorilla/mux" "github.com/gorilla/websocket" - "github.com/pkg/errors" gErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm-provider-common/util" @@ -43,7 +44,7 @@ import ( func NewAPIController(r *runner.Runner, authenticator *auth.Authenticator, hub *wsWriter.Hub, apiCfg config.APIServer) (*APIController, error) { controllerInfo, err := r.GetControllerInfo(auth.GetAdminContext(context.Background())) if err != nil { - return nil, errors.Wrap(err, "failed to get controller info") + return nil, fmt.Errorf("failed to get controller info: %w", err) } var checkOrigin func(r *http.Request) bool if len(apiCfg.CORSOrigins) > 0 { @@ -91,24 +92,22 @@ type APIController struct { func handleError(ctx context.Context, w http.ResponseWriter, err error) { w.Header().Set("Content-Type", "application/json") - origErr := errors.Cause(err) apiErr := params.APIErrorResponse{ - Details: origErr.Error(), + Details: err.Error(), } - - switch origErr.(type) { - case *gErrors.NotFoundError: + switch { + case errors.Is(err, gErrors.ErrNotFound): w.WriteHeader(http.StatusNotFound) apiErr.Error = "Not Found" - case *gErrors.UnauthorizedError: + case errors.Is(err, gErrors.ErrUnauthorized): w.WriteHeader(http.StatusUnauthorized) apiErr.Error = "Not Authorized" // Don't include details on 401 errors. apiErr.Details = "" - case *gErrors.BadRequestError: + case errors.Is(err, gErrors.ErrBadRequest): w.WriteHeader(http.StatusBadRequest) apiErr.Error = "Bad Request" - case *gErrors.DuplicateUserError, *gErrors.ConflictError: + case errors.Is(err, gErrors.ErrDuplicateEntity), errors.Is(err, &gErrors.ConflictError{}): w.WriteHeader(http.StatusConflict) apiErr.Error = "Conflict" default: diff --git a/auth/auth.go b/auth/auth.go index 7dfabcf0..c5fa1ebd 100644 --- a/auth/auth.go +++ b/auth/auth.go @@ -16,11 +16,12 @@ package auth import ( "context" + "errors" + "fmt" "time" jwt "github.com/golang-jwt/jwt/v5" "github.com/nbutton23/zxcvbn-go" - "github.com/pkg/errors" "golang.org/x/crypto/bcrypt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -49,7 +50,7 @@ func (a *Authenticator) IsInitialized() bool { func (a *Authenticator) GetJWTToken(ctx context.Context) (string, error) { tokenID, err := util.GetRandomString(16) if err != nil { - return "", errors.Wrap(err, "generating random string") + return "", fmt.Errorf("error generating random string: %w", err) } expireToken := time.Now().Add(a.cfg.TimeToLive.Duration()) expires := &jwt.NumericDate{ @@ -72,7 +73,7 @@ func (a *Authenticator) GetJWTToken(ctx context.Context) (string, error) { token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) tokenString, err := token.SignedString([]byte(a.cfg.Secret)) if err != nil { - return "", errors.Wrap(err, "fetching token string") + return "", fmt.Errorf("error fetching token string: %w", err) } return tokenString, nil @@ -87,7 +88,7 @@ func (a *Authenticator) GetJWTMetricsToken(ctx context.Context) (string, error) tokenID, err := util.GetRandomString(16) if err != nil { - return "", errors.Wrap(err, "generating random string") + return "", fmt.Errorf("error generating random string: %w", err) } // nolint:golangci-lint,godox // TODO: currently this is the same TTL as the normal Token @@ -111,7 +112,7 @@ func (a *Authenticator) GetJWTMetricsToken(ctx context.Context) (string, error) token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) tokenString, err := token.SignedString([]byte(a.cfg.Secret)) if err != nil { - return "", errors.Wrap(err, "fetching token string") + return "", fmt.Errorf("error fetching token string: %w", err) } return tokenString, nil @@ -121,7 +122,7 @@ func (a *Authenticator) InitController(ctx context.Context, param params.NewUser _, err := a.store.ControllerInfo() if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.User{}, errors.Wrap(err, "initializing controller") + return params.User{}, fmt.Errorf("error initializing controller: %w", err) } } if a.store.HasAdminUser(ctx) { @@ -151,7 +152,7 @@ func (a *Authenticator) InitController(ctx context.Context, param params.NewUser hashed, err := util.PaswsordToBcrypt(param.Password) if err != nil { - return params.User{}, errors.Wrap(err, "creating user") + return params.User{}, fmt.Errorf("error creating user: %w", err) } param.Password = hashed @@ -169,7 +170,7 @@ func (a *Authenticator) AuthenticateUser(ctx context.Context, info params.Passwo if errors.Is(err, runnerErrors.ErrNotFound) { return ctx, runnerErrors.ErrUnauthorized } - return ctx, errors.Wrap(err, "authenticating") + return ctx, fmt.Errorf("error authenticating: %w", err) } if !user.Enabled { diff --git a/auth/instance_middleware.go b/auth/instance_middleware.go index bcae0b0a..dc31327e 100644 --- a/auth/instance_middleware.go +++ b/auth/instance_middleware.go @@ -24,7 +24,6 @@ import ( "time" jwt "github.com/golang-jwt/jwt/v5" - "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -91,7 +90,7 @@ func (i *instanceToken) NewInstanceJWTToken(instance params.Instance, entity par token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) tokenString, err := token.SignedString([]byte(i.jwtSecret)) if err != nil { - return "", errors.Wrap(err, "signing token") + return "", fmt.Errorf("error signing token: %w", err) } return tokenString, nil diff --git a/cmd/garm-cli/cmd/init.go b/cmd/garm-cli/cmd/init.go index 6c6a6072..c544699e 100644 --- a/cmd/garm-cli/cmd/init.go +++ b/cmd/garm-cli/cmd/init.go @@ -21,7 +21,6 @@ import ( openapiRuntimeClient "github.com/go-openapi/runtime/client" "github.com/jedib0t/go-pretty/v6/table" - "github.com/pkg/errors" "github.com/spf13/cobra" apiClientController "github.com/cloudbase/garm/client/controller" @@ -80,7 +79,7 @@ garm-cli init --name=dev --url=https://runner.example.com --username=admin --pas response, err := apiCli.FirstRun.FirstRun(newUserReq, authToken) if err != nil { - return errors.Wrap(err, "initializing manager") + return fmt.Errorf("error initializing manager: %w", err) } newLoginParamsReq := apiClientLogin.NewLoginParams() @@ -91,7 +90,7 @@ garm-cli init --name=dev --url=https://runner.example.com --username=admin --pas token, err := apiCli.Login.Login(newLoginParamsReq, authToken) if err != nil { - return errors.Wrap(err, "authenticating") + return fmt.Errorf("error authenticating: %w", err) } cfg.Managers = append(cfg.Managers, config.Manager{ @@ -104,7 +103,7 @@ garm-cli init --name=dev --url=https://runner.example.com --username=admin --pas cfg.ActiveManager = loginProfileName if err := cfg.SaveConfig(); err != nil { - return errors.Wrap(err, "saving config") + return fmt.Errorf("error saving config: %w", err) } updateUrlsReq := apiClientController.NewUpdateControllerParams() diff --git a/cmd/garm-cli/cmd/pool.go b/cmd/garm-cli/cmd/pool.go index 445801a6..5b8cadf3 100644 --- a/cmd/garm-cli/cmd/pool.go +++ b/cmd/garm-cli/cmd/pool.go @@ -21,7 +21,6 @@ import ( "strings" "github.com/jedib0t/go-pretty/v6/table" - "github.com/pkg/errors" "github.com/spf13/cobra" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -471,7 +470,7 @@ func init() { func extraSpecsFromFile(specsFile string) (json.RawMessage, error) { data, err := os.ReadFile(specsFile) if err != nil { - return nil, errors.Wrap(err, "opening specs file") + return nil, fmt.Errorf("error opening specs file: %w", err) } return asRawMessage(data) } @@ -481,14 +480,14 @@ func asRawMessage(data []byte) (json.RawMessage, error) { // have a valid json. var unmarshaled interface{} if err := json.Unmarshal(data, &unmarshaled); err != nil { - return nil, errors.Wrap(err, "decoding extra specs") + return nil, fmt.Errorf("error decoding extra specs: %w", err) } var asRawJSON json.RawMessage var err error asRawJSON, err = json.Marshal(unmarshaled) if err != nil { - return nil, errors.Wrap(err, "marshaling json") + return nil, fmt.Errorf("error marshaling json: %w", err) } return asRawJSON, nil } diff --git a/cmd/garm-cli/config/config.go b/cmd/garm-cli/config/config.go index 6f6b197c..cf1cf1d2 100644 --- a/cmd/garm-cli/config/config.go +++ b/cmd/garm-cli/config/config.go @@ -15,13 +15,13 @@ package config import ( + "errors" "fmt" "os" "path/filepath" "sync" "github.com/BurntSushi/toml" - "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" ) @@ -34,11 +34,11 @@ const ( func getConfigFilePath() (string, error) { configDir, err := getHomeDir() if err != nil { - return "", errors.Wrap(err, "fetching home folder") + return "", fmt.Errorf("error fetching home folder: %w", err) } if err := ensureHomeDir(configDir); err != nil { - return "", errors.Wrap(err, "ensuring config dir") + return "", fmt.Errorf("error ensuring config dir: %w", err) } cfgFile := filepath.Join(configDir, DefaultConfigFileName) @@ -48,7 +48,7 @@ func getConfigFilePath() (string, error) { func LoadConfig() (*Config, error) { cfgFile, err := getConfigFilePath() if err != nil { - return nil, errors.Wrap(err, "fetching config") + return nil, fmt.Errorf("error fetching config: %w", err) } if _, err := os.Stat(cfgFile); err != nil { @@ -56,12 +56,12 @@ func LoadConfig() (*Config, error) { // return empty config return &Config{}, nil } - return nil, errors.Wrap(err, "accessing config file") + return nil, fmt.Errorf("error accessing config file: %w", err) } var config Config if _, err := toml.DecodeFile(cfgFile, &config); err != nil { - return nil, errors.Wrap(err, "decoding toml") + return nil, fmt.Errorf("error decoding toml: %w", err) } return &config, nil @@ -157,17 +157,17 @@ func (c *Config) SaveConfig() error { cfgFile, err := getConfigFilePath() if err != nil { if !errors.Is(err, os.ErrNotExist) { - return errors.Wrap(err, "getting config") + return fmt.Errorf("error getting config: %w", err) } } cfgHandle, err := os.Create(cfgFile) if err != nil { - return errors.Wrap(err, "getting file handle") + return fmt.Errorf("error getting file handle: %w", err) } encoder := toml.NewEncoder(cfgHandle) if err := encoder.Encode(c); err != nil { - return errors.Wrap(err, "saving config") + return fmt.Errorf("error saving config: %w", err) } return nil diff --git a/cmd/garm-cli/config/home.go b/cmd/garm-cli/config/home.go index b6043289..11821e9c 100644 --- a/cmd/garm-cli/config/home.go +++ b/cmd/garm-cli/config/home.go @@ -15,19 +15,19 @@ package config import ( + "errors" + "fmt" "os" - - "github.com/pkg/errors" ) func ensureHomeDir(folder string) error { if _, err := os.Stat(folder); err != nil { if !errors.Is(err, os.ErrNotExist) { - return errors.Wrap(err, "checking home dir") + return fmt.Errorf("error checking home dir: %w", err) } if err := os.MkdirAll(folder, 0o710); err != nil { - return errors.Wrapf(err, "creating %s", folder) + return fmt.Errorf("error creating %s: %w", folder, err) } } diff --git a/cmd/garm-cli/config/home_nix.go b/cmd/garm-cli/config/home_nix.go index e9ffa521..323f29d7 100644 --- a/cmd/garm-cli/config/home_nix.go +++ b/cmd/garm-cli/config/home_nix.go @@ -17,16 +17,15 @@ package config import ( + "fmt" "os" "path/filepath" - - "github.com/pkg/errors" ) func getHomeDir() (string, error) { home, err := os.UserHomeDir() if err != nil { - return "", errors.Wrap(err, "fetching home dir") + return "", fmt.Errorf("error fetching home dir: %w", err) } return filepath.Join(home, ".local", "share", DefaultAppFolder), nil diff --git a/cmd/garm/main.go b/cmd/garm/main.go index 226a9e2a..bfc23d50 100644 --- a/cmd/garm/main.go +++ b/cmd/garm/main.go @@ -31,7 +31,6 @@ import ( "github.com/gorilla/handlers" "github.com/gorilla/mux" - "github.com/pkg/errors" lumberjack "gopkg.in/natefinch/lumberjack.v2" "github.com/cloudbase/garm-provider-common/util" @@ -73,7 +72,7 @@ func maybeInitController(db common.Store) (params.ControllerInfo, error) { info, err := db.InitController() if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "initializing controller") + return params.ControllerInfo{}, fmt.Errorf("error initializing controller: %w", err) } return info, nil @@ -152,7 +151,7 @@ func setupLogging(ctx context.Context, logCfg config.Logging, hub *websocket.Hub func maybeUpdateURLsFromConfig(cfg config.Config, store common.Store) error { info, err := store.ControllerInfo() if err != nil { - return errors.Wrap(err, "fetching controller info") + return fmt.Errorf("error fetching controller info: %w", err) } var updateParams params.UpdateControllerParams @@ -176,7 +175,7 @@ func maybeUpdateURLsFromConfig(cfg config.Config, store common.Store) error { _, err = store.UpdateController(updateParams) if err != nil { - return errors.Wrap(err, "updating controller info") + return fmt.Errorf("error updating controller info: %w", err) } return nil } diff --git a/config/config.go b/config/config.go index cdbec393..31a16ae2 100644 --- a/config/config.go +++ b/config/config.go @@ -31,7 +31,6 @@ import ( "github.com/BurntSushi/toml" "github.com/bradleyfalzon/ghinstallation/v2" zxcvbn "github.com/nbutton23/zxcvbn-go" - "github.com/pkg/errors" "golang.org/x/oauth2" "github.com/cloudbase/garm/params" @@ -84,10 +83,10 @@ const ( func NewConfig(cfgFile string) (*Config, error) { var config Config if _, err := toml.DecodeFile(cfgFile, &config); err != nil { - return nil, errors.Wrap(err, "decoding toml") + return nil, fmt.Errorf("error decoding toml: %w", err) } if err := config.Validate(); err != nil { - return nil, errors.Wrap(err, "validating config") + return nil, fmt.Errorf("error validating config: %w", err) } return &config, nil } @@ -496,19 +495,19 @@ type Database struct { // GormParams returns the database type and connection URI func (d *Database) GormParams() (dbType DBBackendType, uri string, err error) { if err := d.Validate(); err != nil { - return "", "", errors.Wrap(err, "validating database config") + return "", "", fmt.Errorf("error validating database config: %w", err) } dbType = d.DbBackend switch dbType { case MySQLBackend: uri, err = d.MySQL.ConnectionString() if err != nil { - return "", "", errors.Wrap(err, "fetching mysql connection string") + return "", "", fmt.Errorf("error fetching mysql connection string: %w", err) } case SQLiteBackend: uri, err = d.SQLite.ConnectionString() if err != nil { - return "", "", errors.Wrap(err, "fetching sqlite3 connection string") + return "", "", fmt.Errorf("error fetching sqlite3 connection string: %w", err) } default: return "", "", fmt.Errorf("invalid database backend: %s", dbType) diff --git a/config/config_test.go b/config/config_test.go index 52c2928e..bbf9e299 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -517,7 +517,6 @@ func TestJWTAuthConfig(t *testing.T) { func TestTimeToLiveDuration(t *testing.T) { cfg := JWTAuth{ - Secret: EncryptionPassphrase, TimeToLive: "48h", } diff --git a/database/sql/controller.go b/database/sql/controller.go index fb360e00..5bf60763 100644 --- a/database/sql/controller.go +++ b/database/sql/controller.go @@ -15,10 +15,11 @@ package sql import ( + "errors" + "fmt" "net/url" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -30,7 +31,7 @@ import ( func dbControllerToCommonController(dbInfo ControllerInfo) (params.ControllerInfo, error) { url, err := url.JoinPath(dbInfo.WebhookBaseURL, dbInfo.ControllerID.String()) if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "joining webhook URL") + return params.ControllerInfo{}, fmt.Errorf("error joining webhook URL: %w", err) } return params.ControllerInfo{ @@ -49,14 +50,14 @@ func (s *sqlDatabase) ControllerInfo() (params.ControllerInfo, error) { q := s.conn.Model(&ControllerInfo{}).First(&info) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return params.ControllerInfo{}, errors.Wrap(runnerErrors.ErrNotFound, "fetching controller info") + return params.ControllerInfo{}, fmt.Errorf("error fetching controller info: %w", runnerErrors.ErrNotFound) } - return params.ControllerInfo{}, errors.Wrap(q.Error, "fetching controller info") + return params.ControllerInfo{}, fmt.Errorf("error fetching controller info: %w", q.Error) } paramInfo, err := dbControllerToCommonController(info) if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "converting controller info") + return params.ControllerInfo{}, fmt.Errorf("error converting controller info: %w", err) } return paramInfo, nil @@ -69,7 +70,7 @@ func (s *sqlDatabase) InitController() (params.ControllerInfo, error) { newID, err := uuid.NewRandom() if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "generating UUID") + return params.ControllerInfo{}, fmt.Errorf("error generating UUID: %w", err) } newInfo := ControllerInfo{ @@ -79,7 +80,7 @@ func (s *sqlDatabase) InitController() (params.ControllerInfo, error) { q := s.conn.Save(&newInfo) if q.Error != nil { - return params.ControllerInfo{}, errors.Wrap(q.Error, "saving controller info") + return params.ControllerInfo{}, fmt.Errorf("error saving controller info: %w", q.Error) } return params.ControllerInfo{ @@ -98,13 +99,13 @@ func (s *sqlDatabase) UpdateController(info params.UpdateControllerParams) (para q := tx.Model(&ControllerInfo{}).First(&dbInfo) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "fetching controller info") + return fmt.Errorf("error fetching controller info: %w", runnerErrors.ErrNotFound) } - return errors.Wrap(q.Error, "fetching controller info") + return fmt.Errorf("error fetching controller info: %w", q.Error) } if err := info.Validate(); err != nil { - return errors.Wrap(err, "validating controller info") + return fmt.Errorf("error validating controller info: %w", err) } if info.MetadataURL != nil { @@ -125,17 +126,17 @@ func (s *sqlDatabase) UpdateController(info params.UpdateControllerParams) (para q = tx.Save(&dbInfo) if q.Error != nil { - return errors.Wrap(q.Error, "saving controller info") + return fmt.Errorf("error saving controller info: %w", q.Error) } return nil }) if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "updating controller info") + return params.ControllerInfo{}, fmt.Errorf("error updating controller info: %w", err) } paramInfo, err = dbControllerToCommonController(dbInfo) if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "converting controller info") + return params.ControllerInfo{}, fmt.Errorf("error converting controller info: %w", err) } return paramInfo, nil } diff --git a/database/sql/enterprise.go b/database/sql/enterprise.go index fc273165..d201cd21 100644 --- a/database/sql/enterprise.go +++ b/database/sql/enterprise.go @@ -16,10 +16,11 @@ package sql import ( "context" + "errors" + "fmt" "log/slog" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -33,12 +34,12 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name string, credent return params.Enterprise{}, errors.New("creating enterprise: missing secret") } if credentials.ForgeType != params.GithubEndpointType { - return params.Enterprise{}, errors.Wrap(runnerErrors.ErrBadRequest, "enterprises are not supported on this forge type") + return params.Enterprise{}, fmt.Errorf("enterprises are not supported on this forge type: %w", runnerErrors.ErrBadRequest) } secret, err := util.Seal([]byte(webhookSecret), []byte(s.cfg.Passphrase)) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "encoding secret") + return params.Enterprise{}, fmt.Errorf("error encoding secret: %w", err) } defer func() { @@ -57,22 +58,22 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name string, credent q := tx.Create(&newEnterprise) if q.Error != nil { - return errors.Wrap(q.Error, "creating enterprise") + return fmt.Errorf("error creating enterprise: %w", q.Error) } newEnterprise, err = s.getEnterpriseByID(ctx, tx, newEnterprise.ID.String(), "Pools", "Credentials", "Endpoint", "Credentials.Endpoint") if err != nil { - return errors.Wrap(err, "creating enterprise") + return fmt.Errorf("error creating enterprise: %w", err) } return nil }) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "creating enterprise") + return params.Enterprise{}, fmt.Errorf("error creating enterprise: %w", err) } ret, err := s.GetEnterpriseByID(ctx, newEnterprise.ID.String()) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "creating enterprise") + return params.Enterprise{}, fmt.Errorf("error creating enterprise: %w", err) } return ret, nil @@ -81,12 +82,12 @@ func (s *sqlDatabase) CreateEnterprise(ctx context.Context, name string, credent func (s *sqlDatabase) GetEnterprise(ctx context.Context, name, endpointName string) (params.Enterprise, error) { enterprise, err := s.getEnterprise(ctx, name, endpointName) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") + return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) } param, err := s.sqlToCommonEnterprise(enterprise, true) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") + return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) } return param, nil } @@ -101,12 +102,12 @@ func (s *sqlDatabase) GetEnterpriseByID(ctx context.Context, enterpriseID string } enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, preloadList...) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") + return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) } param, err := s.sqlToCommonEnterprise(enterprise, true) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") + return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) } return param, nil } @@ -125,7 +126,7 @@ func (s *sqlDatabase) ListEnterprises(_ context.Context, filter params.Enterpris } q = q.Find(&enterprises) if q.Error != nil { - return []params.Enterprise{}, errors.Wrap(q.Error, "fetching enterprises") + return []params.Enterprise{}, fmt.Errorf("error fetching enterprises: %w", q.Error) } ret := make([]params.Enterprise, len(enterprises)) @@ -133,7 +134,7 @@ func (s *sqlDatabase) ListEnterprises(_ context.Context, filter params.Enterpris var err error ret[idx], err = s.sqlToCommonEnterprise(val, true) if err != nil { - return nil, errors.Wrap(err, "fetching enterprises") + return nil, fmt.Errorf("error fetching enterprises: %w", err) } } @@ -143,7 +144,7 @@ func (s *sqlDatabase) ListEnterprises(_ context.Context, filter params.Enterpris func (s *sqlDatabase) DeleteEnterprise(ctx context.Context, enterpriseID string) error { enterprise, err := s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return errors.Wrap(err, "fetching enterprise") + return fmt.Errorf("error fetching enterprise: %w", err) } defer func(ent Enterprise) { @@ -159,7 +160,7 @@ func (s *sqlDatabase) DeleteEnterprise(ctx context.Context, enterpriseID string) q := s.conn.Unscoped().Delete(&enterprise) if q.Error != nil && !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return errors.Wrap(q.Error, "deleting enterprise") + return fmt.Errorf("error deleting enterprise: %w", q.Error) } return nil @@ -177,31 +178,31 @@ func (s *sqlDatabase) UpdateEnterprise(ctx context.Context, enterpriseID string, var err error enterprise, err = s.getEnterpriseByID(ctx, tx, enterpriseID) if err != nil { - return errors.Wrap(err, "fetching enterprise") + return fmt.Errorf("error fetching enterprise: %w", err) } if enterprise.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "enterprise has no endpoint") + return fmt.Errorf("error enterprise has no endpoint: %w", runnerErrors.ErrUnprocessable) } if param.CredentialsName != "" { creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { - return errors.Wrap(err, "fetching credentials") + return fmt.Errorf("error fetching credentials: %w", err) } if creds.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") + return fmt.Errorf("error credentials have no endpoint: %w", runnerErrors.ErrUnprocessable) } if *creds.EndpointName != *enterprise.EndpointName { - return errors.Wrap(runnerErrors.ErrBadRequest, "endpoint mismatch") + return fmt.Errorf("error endpoint mismatch: %w", runnerErrors.ErrBadRequest) } enterprise.CredentialsID = &creds.ID } if param.WebhookSecret != "" { secret, err := util.Seal([]byte(param.WebhookSecret), []byte(s.cfg.Passphrase)) if err != nil { - return errors.Wrap(err, "encoding secret") + return fmt.Errorf("error encoding secret: %w", err) } enterprise.WebhookSecret = secret } @@ -212,22 +213,22 @@ func (s *sqlDatabase) UpdateEnterprise(ctx context.Context, enterpriseID string, q := tx.Save(&enterprise) if q.Error != nil { - return errors.Wrap(q.Error, "saving enterprise") + return fmt.Errorf("error saving enterprise: %w", q.Error) } return nil }) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "updating enterprise") + return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) } enterprise, err = s.getEnterpriseByID(ctx, s.conn, enterpriseID, "Endpoint", "Credentials", "Credentials.Endpoint") if err != nil { - return params.Enterprise{}, errors.Wrap(err, "updating enterprise") + return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) } newParams, err = s.sqlToCommonEnterprise(enterprise, true) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "updating enterprise") + return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) } return newParams, nil } @@ -244,7 +245,7 @@ func (s *sqlDatabase) getEnterprise(_ context.Context, name, endpointName string if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Enterprise{}, runnerErrors.ErrNotFound } - return Enterprise{}, errors.Wrap(q.Error, "fetching enterprise from database") + return Enterprise{}, fmt.Errorf("error fetching enterprise from database: %w", q.Error) } return enterprise, nil } @@ -252,7 +253,7 @@ func (s *sqlDatabase) getEnterprise(_ context.Context, name, endpointName string func (s *sqlDatabase) getEnterpriseByID(_ context.Context, tx *gorm.DB, id string, preload ...string) (Enterprise, error) { u, err := uuid.Parse(id) if err != nil { - return Enterprise{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return Enterprise{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } var enterprise Enterprise @@ -268,7 +269,7 @@ func (s *sqlDatabase) getEnterpriseByID(_ context.Context, tx *gorm.DB, id strin if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Enterprise{}, runnerErrors.ErrNotFound } - return Enterprise{}, errors.Wrap(q.Error, "fetching enterprise from database") + return Enterprise{}, fmt.Errorf("error fetching enterprise from database: %w", q.Error) } return enterprise, nil } diff --git a/database/sql/enterprise_test.go b/database/sql/enterprise_test.go index 056bb7fa..224c04aa 100644 --- a/database/sql/enterprise_test.go +++ b/database/sql/enterprise_test.go @@ -218,7 +218,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseInvalidDBPassphrase() { params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterpriseDBCreateErr() { @@ -236,7 +236,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseDBCreateErr() { params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("creating enterprise: creating enterprise: creating enterprise mock error", err.Error()) + s.Require().Equal("error creating enterprise: error creating enterprise: creating enterprise mock error", err.Error()) s.assertSQLMockExpectations() } @@ -259,7 +259,7 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseNotFound() { _, err := s.Store.GetEnterprise(s.adminCtx, "dummy-name", "github.com") s.Require().NotNil(err) - s.Require().Equal("fetching enterprise: not found", err.Error()) + s.Require().Equal("error fetching enterprise: not found", err.Error()) } func (s *EnterpriseTestSuite) TestGetEnterpriseDBDecryptingErr() { @@ -271,7 +271,7 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseDBDecryptingErr() { _, err := s.StoreSQLMocked.GetEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].Name, s.Fixtures.Enterprises[0].Endpoint.Name) s.Require().NotNil(err) - s.Require().Equal("fetching enterprise: missing secret", err.Error()) + s.Require().Equal("error fetching enterprise: missing secret", err.Error()) s.assertSQLMockExpectations() } @@ -341,7 +341,7 @@ func (s *EnterpriseTestSuite) TestListEnterprisesDBFetchErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("fetching enterprises: fetching user from database mock error", err.Error()) + s.Require().Equal("error fetching enterprises: fetching user from database mock error", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprise() { @@ -350,14 +350,14 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprise() { s.Require().Nil(err) _, err = s.Store.GetEnterpriseByID(s.adminCtx, s.Fixtures.Enterprises[0].ID) s.Require().NotNil(err) - s.Require().Equal("fetching enterprise: not found", err.Error()) + s.Require().Equal("error fetching enterprise: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterpriseInvalidEnterpriseID() { err := s.Store.DeleteEnterprise(s.adminCtx, "dummy-enterprise-id") s.Require().NotNil(err) - s.Require().Equal("fetching enterprise: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching enterprise: error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterpriseDBDeleteErr() { @@ -375,7 +375,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterpriseDBDeleteErr() { err := s.StoreSQLMocked.DeleteEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID) s.Require().NotNil(err) - s.Require().Equal("deleting enterprise: mocked delete enterprise error", err.Error()) + s.Require().Equal("error deleting enterprise: mocked delete enterprise error", err.Error()) s.assertSQLMockExpectations() } @@ -391,7 +391,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseInvalidEnterpriseID() { _, err := s.Store.UpdateEnterprise(s.adminCtx, "dummy-enterprise-id", s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("updating enterprise: fetching enterprise: parsing id: invalid request", err.Error()) + s.Require().Equal("error updating enterprise: error fetching enterprise: error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBEncryptErr() { @@ -416,7 +416,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBEncryptErr() { _, err := s.StoreSQLMocked.UpdateEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("updating enterprise: encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error updating enterprise: error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -444,7 +444,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBSaveErr() { _, err := s.StoreSQLMocked.UpdateEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("updating enterprise: saving enterprise: saving enterprise mock error", err.Error()) + s.Require().Equal("error updating enterprise: error saving enterprise: saving enterprise mock error", err.Error()) s.assertSQLMockExpectations() } @@ -472,7 +472,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterpriseDBDecryptingErr() { _, err := s.StoreSQLMocked.UpdateEnterprise(s.adminCtx, s.Fixtures.Enterprises[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("updating enterprise: encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error updating enterprise: error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -487,7 +487,7 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseByIDInvalidEnterpriseID() { _, err := s.Store.GetEnterpriseByID(s.adminCtx, "dummy-enterprise-id") s.Require().NotNil(err) - s.Require().Equal("fetching enterprise: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching enterprise: error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestGetEnterpriseByIDDBDecryptingErr() { @@ -508,7 +508,7 @@ func (s *EnterpriseTestSuite) TestGetEnterpriseByIDDBDecryptingErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("fetching enterprise: missing secret", err.Error()) + s.Require().Equal("error fetching enterprise: missing secret", err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterprisePool() { @@ -547,7 +547,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolInvalidEnterpriseID() { _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("parsing id: invalid request", err.Error()) + s.Require().Equal("error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBFetchTagErr() { @@ -565,7 +565,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBFetchTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("creating tag: fetching tag from database: mocked fetching tag error", err.Error()) + s.Require().Equal("error creating tag: error fetching tag from database: mocked fetching tag error", err.Error()) s.assertSQLMockExpectations() } @@ -592,7 +592,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBAddingPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("creating pool: mocked adding pool error", err.Error()) + s.Require().Equal("error creating pool: mocked adding pool error", err.Error()) s.assertSQLMockExpectations() } @@ -623,7 +623,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBSaveTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("associating tags: mocked saving tag error", err.Error()) + s.Require().Equal("error associating tags: mocked saving tag error", err.Error()) s.assertSQLMockExpectations() } @@ -663,7 +663,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolDBFetchPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("fetching pool: not found", err.Error()) + s.Require().Equal("error fetching pool: not found", err.Error()) s.assertSQLMockExpectations() } @@ -694,7 +694,7 @@ func (s *EnterpriseTestSuite) TestListEnterprisePoolsInvalidEnterpriseID() { _, err := s.Store.ListEntityPools(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("fetching pools: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pools: error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestGetEnterprisePool() { @@ -719,7 +719,7 @@ func (s *EnterpriseTestSuite) TestGetEnterprisePoolInvalidEnterpriseID() { _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { @@ -734,7 +734,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { s.Require().Nil(err) _, err = s.Store.GetEntityPool(s.adminCtx, entity, pool.ID) - s.Require().Equal("fetching pool: finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolInvalidEnterpriseID() { @@ -745,7 +745,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolInvalidEnterpriseID() { err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("parsing id: invalid request", err.Error()) + s.Require().Equal("error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolDBDeleteErr() { @@ -765,7 +765,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolDBDeleteErr() { err = s.StoreSQLMocked.DeleteEntityPool(s.adminCtx, entity, pool.ID) s.Require().NotNil(err) - s.Require().Equal("removing pool: mocked deleting pool error", err.Error()) + s.Require().Equal("error removing pool: mocked deleting pool error", err.Error()) s.assertSQLMockExpectations() } @@ -800,7 +800,7 @@ func (s *EnterpriseTestSuite) TestListEnterpriseInstancesInvalidEnterpriseID() { _, err := s.Store.ListEntityInstances(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("fetching entity: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching entity: error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestUpdateEnterprisePool() { @@ -828,7 +828,7 @@ func (s *EnterpriseTestSuite) TestUpdateEnterprisePoolInvalidEnterpriseID() { _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-pool-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) } func (s *EnterpriseTestSuite) TestAddRepoEntityEvent() { diff --git a/database/sql/gitea.go b/database/sql/gitea.go index 45dc30e5..a9edde09 100644 --- a/database/sql/gitea.go +++ b/database/sql/gitea.go @@ -16,9 +16,10 @@ package sql import ( "context" + "errors" + "fmt" "log/slog" - "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -36,7 +37,7 @@ func (s *sqlDatabase) CreateGiteaEndpoint(_ context.Context, param params.Create var endpoint GithubEndpoint err = s.conn.Transaction(func(tx *gorm.DB) error { if err := tx.Where("name = ?", param.Name).First(&endpoint).Error; err == nil { - return errors.Wrap(runnerErrors.ErrDuplicateEntity, "gitea endpoint already exists") + return fmt.Errorf("gitea endpoint already exists: %w", runnerErrors.ErrDuplicateEntity) } endpoint = GithubEndpoint{ Name: param.Name, @@ -48,16 +49,16 @@ func (s *sqlDatabase) CreateGiteaEndpoint(_ context.Context, param params.Create } if err := tx.Create(&endpoint).Error; err != nil { - return errors.Wrap(err, "creating gitea endpoint") + return fmt.Errorf("error creating gitea endpoint: %w", err) } return nil }) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "creating gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error creating gitea endpoint: %w", err) } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "converting gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error converting gitea endpoint: %w", err) } return ghEndpoint, nil } @@ -66,14 +67,14 @@ func (s *sqlDatabase) ListGiteaEndpoints(_ context.Context) ([]params.ForgeEndpo var endpoints []GithubEndpoint err := s.conn.Where("endpoint_type = ?", params.GiteaEndpointType).Find(&endpoints).Error if err != nil { - return nil, errors.Wrap(err, "fetching gitea endpoints") + return nil, fmt.Errorf("error fetching gitea endpoints: %w", err) } var ret []params.ForgeEndpoint for _, ep := range endpoints { commonEp, err := s.sqlToCommonGithubEndpoint(ep) if err != nil { - return nil, errors.Wrap(err, "converting gitea endpoint") + return nil, fmt.Errorf("error converting gitea endpoint: %w", err) } ret = append(ret, commonEp) } @@ -90,19 +91,19 @@ func (s *sqlDatabase) UpdateGiteaEndpoint(_ context.Context, name string, param err = s.conn.Transaction(func(tx *gorm.DB) error { if err := tx.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "gitea endpoint not found") + return runnerErrors.NewNotFoundError("gitea endpoint %q not found", name) } - return errors.Wrap(err, "fetching gitea endpoint") + return fmt.Errorf("error fetching gitea endpoint: %w", err) } var credsCount int64 if err := tx.Model(&GiteaCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching gitea credentials") + return fmt.Errorf("error fetching gitea credentials: %w", err) } } if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil) { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update endpoint URLs with existing credentials") + return runnerErrors.NewBadRequestError("cannot update endpoint URLs with existing credentials") } if param.APIBaseURL != nil { @@ -122,17 +123,17 @@ func (s *sqlDatabase) UpdateGiteaEndpoint(_ context.Context, name string, param } if err := tx.Save(&endpoint).Error; err != nil { - return errors.Wrap(err, "updating gitea endpoint") + return fmt.Errorf("error updating gitea endpoint: %w", err) } return nil }) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "updating gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error updating gitea endpoint: %w", err) } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "converting gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error converting gitea endpoint: %w", err) } return ghEndpoint, nil } @@ -142,9 +143,9 @@ func (s *sqlDatabase) GetGiteaEndpoint(_ context.Context, name string) (params.F err := s.conn.Where("name = ? and endpoint_type = ?", name, params.GiteaEndpointType).First(&endpoint).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeEndpoint{}, errors.Wrap(runnerErrors.ErrNotFound, "gitea endpoint not found") + return params.ForgeEndpoint{}, runnerErrors.NewNotFoundError("gitea endpoint %q not found", name) } - return params.ForgeEndpoint{}, errors.Wrap(err, "fetching gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error fetching gitea endpoint: %w", err) } return s.sqlToCommonGithubEndpoint(endpoint) @@ -162,41 +163,41 @@ func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err e if errors.Is(err, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(err, "fetching gitea endpoint") + return fmt.Errorf("error fetching gitea endpoint: %w", err) } var credsCount int64 if err := tx.Model(&GiteaCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching gitea credentials") + return fmt.Errorf("error fetching gitea credentials: %w", err) } } var repoCnt int64 if err := tx.Model(&Repository{}).Where("endpoint_name = ?", endpoint.Name).Count(&repoCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching gitea repositories") + return fmt.Errorf("error fetching gitea repositories: %w", err) } } var orgCnt int64 if err := tx.Model(&Organization{}).Where("endpoint_name = ?", endpoint.Name).Count(&orgCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching gitea organizations") + return fmt.Errorf("error fetching gitea organizations: %w", err) } } if credsCount > 0 || repoCnt > 0 || orgCnt > 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete endpoint with associated entities") + return runnerErrors.NewBadRequestError("cannot delete endpoint with associated entities") } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { - return errors.Wrap(err, "deleting gitea endpoint") + return fmt.Errorf("error deleting gitea endpoint: %w", err) } return nil }) if err != nil { - return errors.Wrap(err, "deleting gitea endpoint") + return fmt.Errorf("error deleting gitea endpoint: %w", err) } return nil } @@ -204,10 +205,10 @@ func (s *sqlDatabase) DeleteGiteaEndpoint(_ context.Context, name string) (err e func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.CreateGiteaCredentialsParams) (gtCreds params.ForgeCredentials, err error) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "creating gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error creating gitea credentials: %w", err) } if param.Endpoint == "" { - return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrBadRequest, "endpoint name is required") + return params.ForgeCredentials{}, runnerErrors.NewBadRequestError("endpoint name is required") } defer func() { @@ -220,13 +221,13 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C var endpoint GithubEndpoint if err := tx.Where("name = ? and endpoint_type = ?", param.Endpoint, params.GiteaEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "gitea endpoint not found") + return runnerErrors.NewNotFoundError("gitea endpoint %q not found", param.Endpoint) } - return errors.Wrap(err, "fetching gitea endpoint") + return fmt.Errorf("error fetching gitea endpoint: %w", err) } if err := tx.Where("name = ? and user_id = ?", param.Name, userID).First(&creds).Error; err == nil { - return errors.Wrap(runnerErrors.ErrDuplicateEntity, "gitea credentials already exists") + return fmt.Errorf("gitea credentials already exists: %w", runnerErrors.ErrDuplicateEntity) } var data []byte @@ -235,10 +236,10 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C case params.ForgeAuthTypePAT: data, err = s.marshalAndSeal(param.PAT) default: - return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") + return runnerErrors.NewBadRequestError("invalid auth type %q", param.AuthType) } if err != nil { - return errors.Wrap(err, "marshaling and sealing credentials") + return fmt.Errorf("error marshaling and sealing credentials: %w", err) } creds = GiteaCredentials{ @@ -251,7 +252,7 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C } if err := tx.Create(&creds).Error; err != nil { - return errors.Wrap(err, "creating gitea credentials") + return fmt.Errorf("error creating gitea credentials: %w", err) } // Skip making an extra query. creds.Endpoint = endpoint @@ -259,11 +260,11 @@ func (s *sqlDatabase) CreateGiteaCredentials(ctx context.Context, param params.C return nil }) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "creating gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error creating gitea credentials: %w", err) } gtCreds, err = s.sqlGiteaToCommonForgeCredentials(creds) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error converting gitea credentials: %w", err) } return gtCreds, nil } @@ -284,16 +285,16 @@ func (s *sqlDatabase) getGiteaCredentialsByName(ctx context.Context, tx *gorm.DB userID, err := getUIDFromContext(ctx) if err != nil { - return GiteaCredentials{}, errors.Wrap(err, "fetching gitea credentials") + return GiteaCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) } q = q.Where("user_id = ?", userID) err = q.Where("name = ?", name).First(&creds).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return GiteaCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "gitea credentials not found") + return GiteaCredentials{}, runnerErrors.NewNotFoundError("gitea credentials %q not found", name) } - return GiteaCredentials{}, errors.Wrap(err, "fetching gitea credentials") + return GiteaCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) } return creds, nil @@ -302,7 +303,7 @@ func (s *sqlDatabase) getGiteaCredentialsByName(ctx context.Context, tx *gorm.DB func (s *sqlDatabase) GetGiteaCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { creds, err := s.getGiteaCredentialsByName(ctx, s.conn, name, detailed) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) } return s.sqlGiteaToCommonForgeCredentials(creds) @@ -325,7 +326,7 @@ func (s *sqlDatabase) GetGiteaCredentials(ctx context.Context, id uint, detailed if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) } q = q.Where("user_id = ?", userID) } @@ -333,9 +334,9 @@ func (s *sqlDatabase) GetGiteaCredentials(ctx context.Context, id uint, detailed err := q.Where("id = ?", id).First(&creds).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "gitea credentials not found") + return params.ForgeCredentials{}, runnerErrors.NewNotFoundError("gitea credentials not found") } - return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) } return s.sqlGiteaToCommonForgeCredentials(creds) @@ -346,7 +347,7 @@ func (s *sqlDatabase) ListGiteaCredentials(ctx context.Context) ([]params.ForgeC if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching gitea credentials") + return nil, fmt.Errorf("error fetching gitea credentials: %w", err) } q = q.Where("user_id = ?", userID) } @@ -354,14 +355,14 @@ func (s *sqlDatabase) ListGiteaCredentials(ctx context.Context) ([]params.ForgeC var creds []GiteaCredentials err := q.Preload("Endpoint").Find(&creds).Error if err != nil { - return nil, errors.Wrap(err, "fetching gitea credentials") + return nil, fmt.Errorf("error fetching gitea credentials: %w", err) } var ret []params.ForgeCredentials for _, c := range creds { commonCreds, err := s.sqlGiteaToCommonForgeCredentials(c) if err != nil { - return nil, errors.Wrap(err, "converting gitea credentials") + return nil, fmt.Errorf("error converting gitea credentials: %w", err) } ret = append(ret, commonCreds) } @@ -380,16 +381,16 @@ func (s *sqlDatabase) UpdateGiteaCredentials(ctx context.Context, id uint, param if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return errors.Wrap(err, "updating gitea credentials") + return fmt.Errorf("error updating gitea credentials: %w", err) } q = q.Where("user_id = ?", userID) } if err := q.Where("id = ?", id).First(&creds).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "gitea credentials not found") + return runnerErrors.NewNotFoundError("gitea credentials not found") } - return errors.Wrap(err, "fetching gitea credentials") + return fmt.Errorf("error fetching gitea credentials: %w", err) } if param.Name != nil { @@ -407,28 +408,28 @@ func (s *sqlDatabase) UpdateGiteaCredentials(ctx context.Context, id uint, param data, err = s.marshalAndSeal(param.PAT) } default: - return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") + return runnerErrors.NewBadRequestError("invalid auth type %q", creds.AuthType) } if err != nil { - return errors.Wrap(err, "marshaling and sealing credentials") + return fmt.Errorf("error marshaling and sealing credentials: %w", err) } if len(data) > 0 { creds.Payload = data } if err := tx.Save(&creds).Error; err != nil { - return errors.Wrap(err, "updating gitea credentials") + return fmt.Errorf("error updating gitea credentials: %w", err) } return nil }) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "updating gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error updating gitea credentials: %w", err) } gtCreds, err = s.sqlGiteaToCommonForgeCredentials(creds) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error converting gitea credentials: %w", err) } return gtCreds, nil } @@ -454,7 +455,7 @@ func (s *sqlDatabase) DeleteGiteaCredentials(ctx context.Context, id uint) (err if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return errors.Wrap(err, "deleting gitea credentials") + return fmt.Errorf("error deleting gitea credentials: %w", err) } q = q.Where("user_id = ?", userID) } @@ -464,22 +465,22 @@ func (s *sqlDatabase) DeleteGiteaCredentials(ctx context.Context, id uint) (err if errors.Is(err, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(err, "fetching gitea credentials") + return fmt.Errorf("error fetching gitea credentials: %w", err) } if len(creds.Repositories) > 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with repositories") + return runnerErrors.NewBadRequestError("cannot delete credentials with repositories") } if len(creds.Organizations) > 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with organizations") + return runnerErrors.NewBadRequestError("cannot delete credentials with organizations") } if err := tx.Unscoped().Delete(&creds).Error; err != nil { - return errors.Wrap(err, "deleting gitea credentials") + return fmt.Errorf("error deleting gitea credentials: %w", err) } return nil }) if err != nil { - return errors.Wrap(err, "deleting gitea credentials") + return fmt.Errorf("error deleting gitea credentials: %w", err) } return nil } diff --git a/database/sql/gitea_test.go b/database/sql/gitea_test.go index 7ce6fb02..dff5c471 100644 --- a/database/sql/gitea_test.go +++ b/database/sql/gitea_test.go @@ -236,7 +236,7 @@ func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenEndpointDoesNotExist() { _, err := s.db.CreateGiteaCredentials(ctx, params.CreateGiteaCredentialsParams{Endpoint: "non-existing"}) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrNotFound) - s.Require().Regexp("endpoint not found", err.Error()) + s.Require().Regexp("error creating gitea credentials: gitea endpoint \"non-existing\" not found", err.Error()) } func (s *GiteaTestSuite) TestCreateCredentialsFailsWhenAuthTypeIsInvalid() { @@ -807,7 +807,7 @@ func (s *GiteaTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated() _, err = s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating gitea endpoint: cannot update endpoint URLs with existing credentials: invalid request") + s.Require().EqualError(err, "error updating gitea endpoint: cannot update endpoint URLs with existing credentials") updateEpParams = params.UpdateGiteaEndpointParams{ APIBaseURL: &newAPIBaseURL, @@ -815,7 +815,7 @@ func (s *GiteaTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated() _, err = s.db.UpdateGiteaEndpoint(ctx, testEndpointName, updateEpParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating gitea endpoint: cannot update endpoint URLs with existing credentials: invalid request") + s.Require().EqualError(err, "error updating gitea endpoint: cannot update endpoint URLs with existing credentials") updateEpParams = params.UpdateGiteaEndpointParams{ Description: &newDescription, diff --git a/database/sql/github.go b/database/sql/github.go index 0ad52049..626d138f 100644 --- a/database/sql/github.go +++ b/database/sql/github.go @@ -16,8 +16,9 @@ package sql import ( "context" + "errors" + "fmt" - "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -35,7 +36,7 @@ func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.Creat var endpoint GithubEndpoint err = s.conn.Transaction(func(tx *gorm.DB) error { if err := tx.Where("name = ?", param.Name).First(&endpoint).Error; err == nil { - return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github endpoint already exists") + return fmt.Errorf("error github endpoint already exists: %w", runnerErrors.ErrDuplicateEntity) } endpoint = GithubEndpoint{ Name: param.Name, @@ -48,16 +49,16 @@ func (s *sqlDatabase) CreateGithubEndpoint(_ context.Context, param params.Creat } if err := tx.Create(&endpoint).Error; err != nil { - return errors.Wrap(err, "creating github endpoint") + return fmt.Errorf("error creating github endpoint: %w", err) } return nil }) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "creating github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error creating github endpoint: %w", err) } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "converting github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error converting github endpoint: %w", err) } return ghEndpoint, nil } @@ -66,14 +67,14 @@ func (s *sqlDatabase) ListGithubEndpoints(_ context.Context) ([]params.ForgeEndp var endpoints []GithubEndpoint err := s.conn.Where("endpoint_type = ?", params.GithubEndpointType).Find(&endpoints).Error if err != nil { - return nil, errors.Wrap(err, "fetching github endpoints") + return nil, fmt.Errorf("error fetching github endpoints: %w", err) } var ret []params.ForgeEndpoint for _, ep := range endpoints { commonEp, err := s.sqlToCommonGithubEndpoint(ep) if err != nil { - return nil, errors.Wrap(err, "converting github endpoint") + return nil, fmt.Errorf("error converting github endpoint: %w", err) } ret = append(ret, commonEp) } @@ -90,19 +91,19 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param err = s.conn.Transaction(func(tx *gorm.DB) error { if err := tx.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") + return fmt.Errorf("error github endpoint not found: %w", runnerErrors.ErrNotFound) } - return errors.Wrap(err, "fetching github endpoint") + return fmt.Errorf("error fetching github endpoint: %w", err) } var credsCount int64 if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching github credentials") + return fmt.Errorf("error fetching github credentials: %w", err) } } if credsCount > 0 && (param.APIBaseURL != nil || param.BaseURL != nil || param.UploadBaseURL != nil) { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update endpoint URLs with existing credentials") + return fmt.Errorf("cannot update endpoint URLs with existing credentials: %w", runnerErrors.ErrBadRequest) } if param.APIBaseURL != nil { @@ -126,17 +127,17 @@ func (s *sqlDatabase) UpdateGithubEndpoint(_ context.Context, name string, param } if err := tx.Save(&endpoint).Error; err != nil { - return errors.Wrap(err, "updating github endpoint") + return fmt.Errorf("error updating github endpoint: %w", err) } return nil }) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "updating github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error updating github endpoint: %w", err) } ghEndpoint, err = s.sqlToCommonGithubEndpoint(endpoint) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "converting github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error converting github endpoint: %w", err) } return ghEndpoint, nil } @@ -147,9 +148,9 @@ func (s *sqlDatabase) GetGithubEndpoint(_ context.Context, name string) (params. err := s.conn.Where("name = ? and endpoint_type = ?", name, params.GithubEndpointType).First(&endpoint).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeEndpoint{}, errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") + return params.ForgeEndpoint{}, fmt.Errorf("github endpoint not found: %w", runnerErrors.ErrNotFound) } - return params.ForgeEndpoint{}, errors.Wrap(err, "fetching github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("error fetching github endpoint: %w", err) } return s.sqlToCommonGithubEndpoint(endpoint) @@ -167,48 +168,48 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err if errors.Is(err, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(err, "fetching github endpoint") + return fmt.Errorf("error fetching github endpoint: %w", err) } var credsCount int64 if err := tx.Model(&GithubCredentials{}).Where("endpoint_name = ?", endpoint.Name).Count(&credsCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching github credentials") + return fmt.Errorf("error fetching github credentials: %w", err) } } var repoCnt int64 if err := tx.Model(&Repository{}).Where("endpoint_name = ?", endpoint.Name).Count(&repoCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching github repositories") + return fmt.Errorf("error fetching github repositories: %w", err) } } var orgCnt int64 if err := tx.Model(&Organization{}).Where("endpoint_name = ?", endpoint.Name).Count(&orgCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching github organizations") + return fmt.Errorf("error fetching github organizations: %w", err) } } var entCnt int64 if err := tx.Model(&Enterprise{}).Where("endpoint_name = ?", endpoint.Name).Count(&entCnt).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "fetching github enterprises") + return fmt.Errorf("error fetching github enterprises: %w", err) } } if credsCount > 0 || repoCnt > 0 || orgCnt > 0 || entCnt > 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete endpoint with associated entities") + return fmt.Errorf("cannot delete endpoint with associated entities: %w", runnerErrors.ErrBadRequest) } if err := tx.Unscoped().Delete(&endpoint).Error; err != nil { - return errors.Wrap(err, "deleting github endpoint") + return fmt.Errorf("error deleting github endpoint: %w", err) } return nil }) if err != nil { - return errors.Wrap(err, "deleting github endpoint") + return fmt.Errorf("error deleting github endpoint: %w", err) } return nil } @@ -216,10 +217,10 @@ func (s *sqlDatabase) DeleteGithubEndpoint(_ context.Context, name string) (err func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params.CreateGithubCredentialsParams) (ghCreds params.ForgeCredentials, err error) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error creating github credentials: %w", err) } if param.Endpoint == "" { - return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrBadRequest, "endpoint name is required") + return params.ForgeCredentials{}, fmt.Errorf("endpoint name is required: %w", runnerErrors.ErrBadRequest) } defer func() { @@ -232,13 +233,13 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. var endpoint GithubEndpoint if err := tx.Where("name = ? and endpoint_type = ?", param.Endpoint, params.GithubEndpointType).First(&endpoint).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "github endpoint not found") + return fmt.Errorf("github endpoint not found: %w", runnerErrors.ErrNotFound) } - return errors.Wrap(err, "fetching github endpoint") + return fmt.Errorf("error fetching github endpoint: %w", err) } if err := tx.Where("name = ? and user_id = ?", param.Name, userID).First(&creds).Error; err == nil { - return errors.Wrap(runnerErrors.ErrDuplicateEntity, "github credentials already exists") + return fmt.Errorf("github credentials already exists: %w", runnerErrors.ErrDuplicateEntity) } var data []byte @@ -249,10 +250,10 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. case params.ForgeAuthTypeApp: data, err = s.marshalAndSeal(param.App) default: - return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") + return fmt.Errorf("invalid auth type: %w", runnerErrors.ErrBadRequest) } if err != nil { - return errors.Wrap(err, "marshaling and sealing credentials") + return fmt.Errorf("error marshaling and sealing credentials: %w", err) } creds = GithubCredentials{ @@ -265,7 +266,7 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. } if err := tx.Create(&creds).Error; err != nil { - return errors.Wrap(err, "creating github credentials") + return fmt.Errorf("error creating github credentials: %w", err) } // Skip making an extra query. creds.Endpoint = endpoint @@ -273,11 +274,11 @@ func (s *sqlDatabase) CreateGithubCredentials(ctx context.Context, param params. return nil }) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "creating github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error creating github credentials: %w", err) } ghCreds, err = s.sqlToCommonForgeCredentials(creds) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error converting github credentials: %w", err) } return ghCreds, nil } @@ -298,16 +299,16 @@ func (s *sqlDatabase) getGithubCredentialsByName(ctx context.Context, tx *gorm.D userID, err := getUIDFromContext(ctx) if err != nil { - return GithubCredentials{}, errors.Wrap(err, "fetching github credentials") + return GithubCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) } q = q.Where("user_id = ?", userID) err = q.Where("name = ?", name).First(&creds).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return GithubCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") + return GithubCredentials{}, fmt.Errorf("github credentials not found: %w", runnerErrors.ErrNotFound) } - return GithubCredentials{}, errors.Wrap(err, "fetching github credentials") + return GithubCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) } return creds, nil @@ -316,7 +317,7 @@ func (s *sqlDatabase) getGithubCredentialsByName(ctx context.Context, tx *gorm.D func (s *sqlDatabase) GetGithubCredentialsByName(ctx context.Context, name string, detailed bool) (params.ForgeCredentials, error) { creds, err := s.getGithubCredentialsByName(ctx, s.conn, name, detailed) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) } return s.sqlToCommonForgeCredentials(creds) } @@ -338,7 +339,7 @@ func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detaile if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) } q = q.Where("user_id = ?", userID) } @@ -346,9 +347,9 @@ func (s *sqlDatabase) GetGithubCredentials(ctx context.Context, id uint, detaile err := q.Where("id = ?", id).First(&creds).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return params.ForgeCredentials{}, errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") + return params.ForgeCredentials{}, fmt.Errorf("github credentials not found: %w", runnerErrors.ErrNotFound) } - return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) } return s.sqlToCommonForgeCredentials(creds) @@ -359,7 +360,7 @@ func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.Forge if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching github credentials") + return nil, fmt.Errorf("error fetching github credentials: %w", err) } q = q.Where("user_id = ?", userID) } @@ -367,14 +368,14 @@ func (s *sqlDatabase) ListGithubCredentials(ctx context.Context) ([]params.Forge var creds []GithubCredentials err := q.Preload("Endpoint").Find(&creds).Error if err != nil { - return nil, errors.Wrap(err, "fetching github credentials") + return nil, fmt.Errorf("error fetching github credentials: %w", err) } var ret []params.ForgeCredentials for _, c := range creds { commonCreds, err := s.sqlToCommonForgeCredentials(c) if err != nil { - return nil, errors.Wrap(err, "converting github credentials") + return nil, fmt.Errorf("error converting github credentials: %w", err) } ret = append(ret, commonCreds) } @@ -393,16 +394,16 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return errors.Wrap(err, "updating github credentials") + return fmt.Errorf("error updating github credentials: %w", err) } q = q.Where("user_id = ?", userID) } if err := q.Where("id = ?", id).First(&creds).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "github credentials not found") + return fmt.Errorf("github credentials not found: %w", runnerErrors.ErrNotFound) } - return errors.Wrap(err, "fetching github credentials") + return fmt.Errorf("error fetching github credentials: %w", err) } if param.Name != nil { @@ -421,7 +422,7 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para } if param.App != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update app credentials for PAT") + return fmt.Errorf("cannot update app credentials for PAT: %w", runnerErrors.ErrBadRequest) } case params.ForgeAuthTypeApp: if param.App != nil { @@ -429,33 +430,33 @@ func (s *sqlDatabase) UpdateGithubCredentials(ctx context.Context, id uint, para } if param.PAT != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot update PAT credentials for app") + return fmt.Errorf("cannot update PAT credentials for app: %w", runnerErrors.ErrBadRequest) } default: // This should never happen, unless there was a bug in the DB migration code, // or the DB was manually modified. - return errors.Wrap(runnerErrors.ErrBadRequest, "invalid auth type") + return fmt.Errorf("invalid auth type: %w", runnerErrors.ErrBadRequest) } if err != nil { - return errors.Wrap(err, "marshaling and sealing credentials") + return fmt.Errorf("error marshaling and sealing credentials: %w", err) } if len(data) > 0 { creds.Payload = data } if err := tx.Save(&creds).Error; err != nil { - return errors.Wrap(err, "updating github credentials") + return fmt.Errorf("error updating github credentials: %w", err) } return nil }) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "updating github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error updating github credentials: %w", err) } ghCreds, err = s.sqlToCommonForgeCredentials(creds) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error converting github credentials: %w", err) } return ghCreds, nil } @@ -475,7 +476,7 @@ func (s *sqlDatabase) DeleteGithubCredentials(ctx context.Context, id uint) (err if !auth.IsAdmin(ctx) { userID, err := getUIDFromContext(ctx) if err != nil { - return errors.Wrap(err, "deleting github credentials") + return fmt.Errorf("error deleting github credentials: %w", err) } q = q.Where("user_id = ?", userID) } @@ -486,27 +487,27 @@ func (s *sqlDatabase) DeleteGithubCredentials(ctx context.Context, id uint) (err if errors.Is(err, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(err, "fetching github credentials") + return fmt.Errorf("error fetching github credentials: %w", err) } name = creds.Name if len(creds.Repositories) > 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with repositories") + return fmt.Errorf("cannot delete credentials with repositories: %w", runnerErrors.ErrBadRequest) } if len(creds.Organizations) > 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with organizations") + return fmt.Errorf("cannot delete credentials with organizations: %w", runnerErrors.ErrBadRequest) } if len(creds.Enterprises) > 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "cannot delete credentials with enterprises") + return fmt.Errorf("cannot delete credentials with enterprises: %w", runnerErrors.ErrBadRequest) } if err := tx.Unscoped().Delete(&creds).Error; err != nil { - return errors.Wrap(err, "deleting github credentials") + return fmt.Errorf("error deleting github credentials: %w", err) } return nil }) if err != nil { - return errors.Wrap(err, "deleting github credentials") + return fmt.Errorf("error deleting github credentials: %w", err) } return nil } diff --git a/database/sql/github_test.go b/database/sql/github_test.go index cca58a50..ae3a3954 100644 --- a/database/sql/github_test.go +++ b/database/sql/github_test.go @@ -265,7 +265,7 @@ func (s *GithubTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated( _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + s.Require().EqualError(err, "error updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") updateEpParams = params.UpdateGithubEndpointParams{ UploadBaseURL: &newUploadBaseURL, @@ -274,7 +274,7 @@ func (s *GithubTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated( _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + s.Require().EqualError(err, "error updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") updateEpParams = params.UpdateGithubEndpointParams{ APIBaseURL: &newAPIBaseURL, @@ -282,7 +282,7 @@ func (s *GithubTestSuite) TestUpdateEndpointURLsFailsIfCredentialsAreAssociated( _, err = s.db.UpdateGithubEndpoint(ctx, testEndpointName, updateEpParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") + s.Require().EqualError(err, "error updating github endpoint: cannot update endpoint URLs with existing credentials: invalid request") updateEpParams = params.UpdateGithubEndpointParams{ Description: &newDescription, @@ -737,7 +737,7 @@ func (s *GithubTestSuite) TestUpdateGithubCredentialsFailIfWrongCredentialTypeIs _, err = s.db.UpdateGithubCredentials(ctx, creds.ID, updateCredParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github credentials: cannot update app credentials for PAT: invalid request") + s.Require().EqualError(err, "error updating github credentials: cannot update app credentials for PAT: invalid request") credParamsWithApp := params.CreateGithubCredentialsParams{ Name: "test-credsApp", @@ -764,7 +764,7 @@ func (s *GithubTestSuite) TestUpdateGithubCredentialsFailIfWrongCredentialTypeIs _, err = s.db.UpdateGithubCredentials(ctx, credsApp.ID, updateCredParams) s.Require().Error(err) s.Require().ErrorIs(err, runnerErrors.ErrBadRequest) - s.Require().EqualError(err, "updating github credentials: cannot update PAT credentials for app: invalid request") + s.Require().EqualError(err, "error updating github credentials: cannot update PAT credentials for app: invalid request") } func (s *GithubTestSuite) TestUpdateCredentialsFailsForNonExistingCredentials() { diff --git a/database/sql/instances.go b/database/sql/instances.go index c6c2d204..92194c5e 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -17,10 +17,11 @@ package sql import ( "context" "encoding/json" + "errors" + "fmt" "log/slog" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" "gorm.io/gorm/clause" @@ -33,7 +34,7 @@ import ( func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param params.CreateInstanceParams) (instance params.Instance, err error) { pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return params.Instance{}, errors.Wrap(err, "fetching pool") + return params.Instance{}, fmt.Errorf("error fetching pool: %w", err) } defer func() { @@ -46,7 +47,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par if len(param.AditionalLabels) > 0 { labels, err = json.Marshal(param.AditionalLabels) if err != nil { - return params.Instance{}, errors.Wrap(err, "marshalling labels") + return params.Instance{}, fmt.Errorf("error marshalling labels: %w", err) } } @@ -54,7 +55,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par if len(param.JitConfiguration) > 0 { secret, err = s.marshalAndSeal(param.JitConfiguration) if err != nil { - return params.Instance{}, errors.Wrap(err, "marshalling jit config") + return params.Instance{}, fmt.Errorf("error marshalling jit config: %w", err) } } @@ -74,7 +75,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par } q := s.conn.Create(&newInstance) if q.Error != nil { - return params.Instance{}, errors.Wrap(q.Error, "creating instance") + return params.Instance{}, fmt.Errorf("error creating instance: %w", q.Error) } return s.sqlToParamsInstance(newInstance) @@ -83,7 +84,7 @@ func (s *sqlDatabase) CreateInstance(_ context.Context, poolID string, param par func (s *sqlDatabase) getPoolInstanceByName(poolID string, instanceName string) (Instance, error) { pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return Instance{}, errors.Wrap(err, "fetching pool") + return Instance{}, fmt.Errorf("error fetching pool: %w", err) } var instance Instance @@ -93,9 +94,9 @@ func (s *sqlDatabase) getPoolInstanceByName(poolID string, instanceName string) First(&instance) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return Instance{}, errors.Wrap(runnerErrors.ErrNotFound, "fetching pool instance by name") + return Instance{}, fmt.Errorf("error fetching pool instance by name: %w", runnerErrors.ErrNotFound) } - return Instance{}, errors.Wrap(q.Error, "fetching pool instance by name") + return Instance{}, fmt.Errorf("error fetching pool instance by name: %w", q.Error) } instance.Pool = pool @@ -119,9 +120,9 @@ func (s *sqlDatabase) getInstanceByName(_ context.Context, instanceName string, First(&instance) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { - return Instance{}, errors.Wrap(runnerErrors.ErrNotFound, "fetching instance by name") + return Instance{}, fmt.Errorf("error fetching instance by name: %w", runnerErrors.ErrNotFound) } - return Instance{}, errors.Wrap(q.Error, "fetching instance by name") + return Instance{}, fmt.Errorf("error fetching instance by name: %w", q.Error) } return instance, nil } @@ -129,7 +130,7 @@ func (s *sqlDatabase) getInstanceByName(_ context.Context, instanceName string, func (s *sqlDatabase) GetPoolInstanceByName(_ context.Context, poolID string, instanceName string) (params.Instance, error) { instance, err := s.getPoolInstanceByName(poolID, instanceName) if err != nil { - return params.Instance{}, errors.Wrap(err, "fetching instance") + return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) } return s.sqlToParamsInstance(instance) @@ -138,7 +139,7 @@ func (s *sqlDatabase) GetPoolInstanceByName(_ context.Context, poolID string, in func (s *sqlDatabase) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { instance, err := s.getInstanceByName(ctx, instanceName, "StatusMessages", "Pool", "ScaleSet") if err != nil { - return params.Instance{}, errors.Wrap(err, "fetching instance") + return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) } return s.sqlToParamsInstance(instance) @@ -150,7 +151,7 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN if errors.Is(err, runnerErrors.ErrNotFound) { return nil } - return errors.Wrap(err, "deleting instance") + return fmt.Errorf("error deleting instance: %w", err) } defer func() { @@ -182,7 +183,7 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(q.Error, "deleting instance") + return fmt.Errorf("error deleting instance: %w", q.Error) } return nil } @@ -193,7 +194,7 @@ func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName str if errors.Is(err, runnerErrors.ErrNotFound) { return nil } - return errors.Wrap(err, "deleting instance") + return fmt.Errorf("error deleting instance: %w", err) } defer func() { @@ -224,7 +225,7 @@ func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName str if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(q.Error, "deleting instance") + return fmt.Errorf("error deleting instance: %w", q.Error) } return nil } @@ -232,7 +233,7 @@ func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName str func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, statusMessage string) error { instance, err := s.getInstanceByName(ctx, instanceName) if err != nil { - return errors.Wrap(err, "updating instance") + return fmt.Errorf("error updating instance: %w", err) } msg := InstanceStatusUpdate{ @@ -242,7 +243,7 @@ func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, } if err := s.conn.Model(&instance).Association("StatusMessages").Append(&msg); err != nil { - return errors.Wrap(err, "adding status message") + return fmt.Errorf("error adding status message: %w", err) } return nil } @@ -250,7 +251,7 @@ func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { instance, err := s.getInstanceByName(ctx, instanceName, "Pool", "ScaleSet") if err != nil { - return params.Instance{}, errors.Wrap(err, "updating instance") + return params.Instance{}, fmt.Errorf("error updating instance: %w", err) } if param.AgentID != 0 { @@ -287,7 +288,7 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p if param.JitConfiguration != nil { secret, err := s.marshalAndSeal(param.JitConfiguration) if err != nil { - return params.Instance{}, errors.Wrap(err, "marshalling jit config") + return params.Instance{}, fmt.Errorf("error marshalling jit config: %w", err) } instance.JitConfiguration = secret } @@ -296,7 +297,7 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p q := s.conn.Save(&instance) if q.Error != nil { - return params.Instance{}, errors.Wrap(q.Error, "updating instance") + return params.Instance{}, fmt.Errorf("error updating instance: %w", q.Error) } if len(param.Addresses) > 0 { @@ -308,12 +309,12 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p }) } if err := s.conn.Model(&instance).Association("Addresses").Replace(addrs); err != nil { - return params.Instance{}, errors.Wrap(err, "updating addresses") + return params.Instance{}, fmt.Errorf("error updating addresses: %w", err) } } inst, err := s.sqlToParamsInstance(instance) if err != nil { - return params.Instance{}, errors.Wrap(err, "converting instance") + return params.Instance{}, fmt.Errorf("error converting instance: %w", err) } s.sendNotify(common.InstanceEntityType, common.UpdateOperation, inst) return inst, nil @@ -322,7 +323,7 @@ func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, p func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]params.Instance, error) { u, err := uuid.Parse(poolID) if err != nil { - return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } var instances []Instance @@ -332,14 +333,14 @@ func (s *sqlDatabase) ListPoolInstances(_ context.Context, poolID string) ([]par Where("pool_id = ?", u) if err := query.Find(&instances); err.Error != nil { - return nil, errors.Wrap(err.Error, "fetching instances") + return nil, fmt.Errorf("error fetching instances: %w", err.Error) } ret := make([]params.Instance, len(instances)) for idx, inst := range instances { ret[idx], err = s.sqlToParamsInstance(inst) if err != nil { - return nil, errors.Wrap(err, "converting instance") + return nil, fmt.Errorf("error converting instance: %w", err) } } return ret, nil @@ -354,14 +355,14 @@ func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, er Preload("Job"). Find(&instances) if q.Error != nil { - return nil, errors.Wrap(q.Error, "fetching instances") + return nil, fmt.Errorf("error fetching instances: %w", q.Error) } ret := make([]params.Instance, len(instances)) var err error for idx, instance := range instances { ret[idx], err = s.sqlToParamsInstance(instance) if err != nil { - return nil, errors.Wrap(err, "converting instance") + return nil, fmt.Errorf("error converting instance: %w", err) } } return ret, nil @@ -370,13 +371,13 @@ func (s *sqlDatabase) ListAllInstances(_ context.Context) ([]params.Instance, er func (s *sqlDatabase) PoolInstanceCount(_ context.Context, poolID string) (int64, error) { pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return 0, errors.Wrap(err, "fetching pool") + return 0, fmt.Errorf("error fetching pool: %w", err) } var cnt int64 q := s.conn.Model(&Instance{}).Where("pool_id = ?", pool.ID).Count(&cnt) if q.Error != nil { - return 0, errors.Wrap(q.Error, "fetching instance count") + return 0, fmt.Errorf("error fetching instance count: %w", q.Error) } return cnt, nil } diff --git a/database/sql/instances_test.go b/database/sql/instances_test.go index c70e35dd..c6093327 100644 --- a/database/sql/instances_test.go +++ b/database/sql/instances_test.go @@ -210,7 +210,7 @@ func (s *InstancesTestSuite) TestCreateInstance() { func (s *InstancesTestSuite) TestCreateInstanceInvalidPoolID() { _, err := s.Store.CreateInstance(s.adminCtx, "dummy-pool-id", params.CreateInstanceParams{}) - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestCreateInstanceDBCreateErr() { @@ -233,7 +233,7 @@ func (s *InstancesTestSuite) TestCreateInstanceDBCreateErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("creating instance: mocked insert instance error", err.Error()) + s.Require().Equal("error creating instance: mocked insert instance error", err.Error()) } func (s *InstancesTestSuite) TestGetPoolInstanceByName() { @@ -252,7 +252,7 @@ func (s *InstancesTestSuite) TestGetPoolInstanceByName() { func (s *InstancesTestSuite) TestGetPoolInstanceByNameNotFound() { _, err := s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, "not-existent-instance-name") - s.Require().Equal("fetching instance: fetching pool instance by name: not found", err.Error()) + s.Require().Equal("error fetching instance: error fetching pool instance by name: not found", err.Error()) } func (s *InstancesTestSuite) TestGetInstanceByName() { @@ -271,7 +271,7 @@ func (s *InstancesTestSuite) TestGetInstanceByName() { func (s *InstancesTestSuite) TestGetInstanceByNameFetchInstanceFailed() { _, err := s.Store.GetInstanceByName(s.adminCtx, "not-existent-instance-name") - s.Require().Equal("fetching instance: fetching instance by name: not found", err.Error()) + s.Require().Equal("error fetching instance: error fetching instance by name: not found", err.Error()) } func (s *InstancesTestSuite) TestDeleteInstance() { @@ -282,7 +282,7 @@ func (s *InstancesTestSuite) TestDeleteInstance() { s.Require().Nil(err) _, err = s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) - s.Require().Equal("fetching instance: fetching pool instance by name: not found", err.Error()) + s.Require().Equal("error fetching instance: error fetching pool instance by name: not found", err.Error()) err = s.Store.DeleteInstance(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) s.Require().Nil(err) @@ -296,7 +296,7 @@ func (s *InstancesTestSuite) TestDeleteInstanceByName() { s.Require().Nil(err) _, err = s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) - s.Require().Equal("fetching instance: fetching pool instance by name: not found", err.Error()) + s.Require().Equal("error fetching instance: error fetching pool instance by name: not found", err.Error()) err = s.Store.DeleteInstanceByName(s.adminCtx, storeInstance.Name) s.Require().Nil(err) @@ -305,7 +305,7 @@ func (s *InstancesTestSuite) TestDeleteInstanceByName() { func (s *InstancesTestSuite) TestDeleteInstanceInvalidPoolID() { err := s.Store.DeleteInstance(s.adminCtx, "dummy-pool-id", "dummy-instance-name") - s.Require().Equal("deleting instance: fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("error deleting instance: error fetching pool: error parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestDeleteInstanceDBRecordNotFoundErr() { @@ -380,7 +380,7 @@ func (s *InstancesTestSuite) TestDeleteInstanceDBDeleteErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("deleting instance: mocked delete instance error", err.Error()) + s.Require().Equal("error deleting instance: mocked delete instance error", err.Error()) } func (s *InstancesTestSuite) TestAddInstanceEvent() { @@ -431,7 +431,7 @@ func (s *InstancesTestSuite) TestAddInstanceEventDBUpdateErr() { err := s.StoreSQLMocked.AddInstanceEvent(s.adminCtx, instance.Name, params.StatusEvent, params.EventInfo, statusMsg) s.Require().NotNil(err) - s.Require().Equal("adding status message: mocked add status message error", err.Error()) + s.Require().Equal("error adding status message: mocked add status message error", err.Error()) s.assertSQLMockExpectations() } @@ -476,7 +476,7 @@ func (s *InstancesTestSuite) TestUpdateInstanceDBUpdateInstanceErr() { _, err := s.StoreSQLMocked.UpdateInstance(s.adminCtx, instance.Name, s.Fixtures.UpdateInstanceParams) s.Require().NotNil(err) - s.Require().Equal("updating instance: mocked update instance error", err.Error()) + s.Require().Equal("error updating instance: mocked update instance error", err.Error()) s.assertSQLMockExpectations() } @@ -522,7 +522,7 @@ func (s *InstancesTestSuite) TestUpdateInstanceDBUpdateAddressErr() { _, err := s.StoreSQLMocked.UpdateInstance(s.adminCtx, instance.Name, s.Fixtures.UpdateInstanceParams) s.Require().NotNil(err) - s.Require().Equal("updating addresses: update addresses mock error", err.Error()) + s.Require().Equal("error updating addresses: update addresses mock error", err.Error()) s.assertSQLMockExpectations() } @@ -536,7 +536,7 @@ func (s *InstancesTestSuite) TestListPoolInstances() { func (s *InstancesTestSuite) TestListPoolInstancesInvalidPoolID() { _, err := s.Store.ListPoolInstances(s.adminCtx, "dummy-pool-id") - s.Require().Equal("parsing id: invalid request", err.Error()) + s.Require().Equal("error parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestListAllInstances() { @@ -555,7 +555,7 @@ func (s *InstancesTestSuite) TestListAllInstancesDBFetchErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("fetching instances: fetch instances mock error", err.Error()) + s.Require().Equal("error fetching instances: fetch instances mock error", err.Error()) } func (s *InstancesTestSuite) TestPoolInstanceCount() { @@ -568,7 +568,7 @@ func (s *InstancesTestSuite) TestPoolInstanceCount() { func (s *InstancesTestSuite) TestPoolInstanceCountInvalidPoolID() { _, err := s.Store.PoolInstanceCount(s.adminCtx, "dummy-pool-id") - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) } func (s *InstancesTestSuite) TestPoolInstanceCountDBCountErr() { @@ -587,7 +587,7 @@ func (s *InstancesTestSuite) TestPoolInstanceCountDBCountErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("fetching instance count: count mock error", err.Error()) + s.Require().Equal("error fetching instance count: count mock error", err.Error()) } func TestInstTestSuite(t *testing.T) { diff --git a/database/sql/jobs.go b/database/sql/jobs.go index ff19394f..f4d24e42 100644 --- a/database/sql/jobs.go +++ b/database/sql/jobs.go @@ -17,10 +17,11 @@ package sql import ( "context" "encoding/json" + "errors" + "fmt" "log/slog" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/gorm" "gorm.io/gorm/clause" @@ -35,7 +36,7 @@ func sqlWorkflowJobToParamsJob(job WorkflowJob) (params.Job, error) { labels := []string{} if job.Labels != nil { if err := json.Unmarshal(job.Labels, &labels); err != nil { - return params.Job{}, errors.Wrap(err, "unmarshaling labels") + return params.Job{}, fmt.Errorf("error unmarshaling labels: %w", err) } } @@ -73,7 +74,7 @@ func sqlWorkflowJobToParamsJob(job WorkflowJob) (params.Job, error) { func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job) (WorkflowJob, error) { asJSON, err := json.Marshal(job.Labels) if err != nil { - return WorkflowJob{}, errors.Wrap(err, "marshaling labels") + return WorkflowJob{}, fmt.Errorf("error marshaling labels: %w", err) } workflofJob := WorkflowJob{ @@ -118,11 +119,11 @@ func (s *sqlDatabase) DeleteJob(_ context.Context, jobID int64) (err error) { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(q.Error, "fetching job") + return fmt.Errorf("error fetching job: %w", q.Error) } removedJob, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return errors.Wrap(err, "converting job") + return fmt.Errorf("error converting job: %w", err) } defer func() { @@ -137,7 +138,7 @@ func (s *sqlDatabase) DeleteJob(_ context.Context, jobID int64) (err error) { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(q.Error, "deleting job") + return fmt.Errorf("error deleting job: %w", q.Error) } return nil } @@ -145,7 +146,7 @@ func (s *sqlDatabase) DeleteJob(_ context.Context, jobID int64) (err error) { func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) error { entityUUID, err := uuid.Parse(entityID) if err != nil { - return errors.Wrap(err, "parsing entity id") + return fmt.Errorf("error parsing entity id: %w", err) } var workflowJob WorkflowJob q := s.conn.Preload("Instance").Where("id = ?", jobID).First(&workflowJob) @@ -154,7 +155,7 @@ func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) e if errors.Is(q.Error, gorm.ErrRecordNotFound) { return runnerErrors.ErrNotFound } - return errors.Wrap(q.Error, "fetching job") + return fmt.Errorf("error fetching job: %w", q.Error) } if workflowJob.LockedBy.String() == entityID { @@ -169,12 +170,12 @@ func (s *sqlDatabase) LockJob(_ context.Context, jobID int64, entityID string) e workflowJob.LockedBy = entityUUID if err := s.conn.Save(&workflowJob).Error; err != nil { - return errors.Wrap(err, "saving job") + return fmt.Errorf("error saving job: %w", err) } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return errors.Wrap(err, "converting job") + return fmt.Errorf("error converting job: %w", err) } s.sendNotify(common.JobEntityType, common.UpdateOperation, asParams) @@ -189,7 +190,7 @@ func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil } - return errors.Wrap(q.Error, "fetching job") + return fmt.Errorf("error fetching job: %w", q.Error) } if workflowJob.LockedBy == uuid.Nil { @@ -199,11 +200,11 @@ func (s *sqlDatabase) BreakLockJobIsQueued(_ context.Context, jobID int64) (err workflowJob.LockedBy = uuid.Nil if err := s.conn.Save(&workflowJob).Error; err != nil { - return errors.Wrap(err, "saving job") + return fmt.Errorf("error saving job: %w", err) } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return errors.Wrap(err, "converting job") + return fmt.Errorf("error converting job: %w", err) } s.sendNotify(common.JobEntityType, common.UpdateOperation, asParams) return nil @@ -217,7 +218,7 @@ func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) if errors.Is(q.Error, gorm.ErrRecordNotFound) { return runnerErrors.ErrNotFound } - return errors.Wrap(q.Error, "fetching job") + return fmt.Errorf("error fetching job: %w", q.Error) } if workflowJob.LockedBy == uuid.Nil { @@ -231,12 +232,12 @@ func (s *sqlDatabase) UnlockJob(_ context.Context, jobID int64, entityID string) workflowJob.LockedBy = uuid.Nil if err := s.conn.Save(&workflowJob).Error; err != nil { - return errors.Wrap(err, "saving job") + return fmt.Errorf("error saving job: %w", err) } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return errors.Wrap(err, "converting job") + return fmt.Errorf("error converting job: %w", err) } s.sendNotify(common.JobEntityType, common.UpdateOperation, asParams) return nil @@ -256,7 +257,7 @@ func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (pa if q.Error != nil { if !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return params.Job{}, errors.Wrap(q.Error, "fetching job") + return params.Job{}, fmt.Errorf("error fetching job: %w", q.Error) } } var operation common.OperationType @@ -302,23 +303,23 @@ func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (pa workflowJob.EnterpriseID = job.EnterpriseID } if err := s.conn.Save(&workflowJob).Error; err != nil { - return params.Job{}, errors.Wrap(err, "saving job") + return params.Job{}, fmt.Errorf("error saving job: %w", err) } } else { operation = common.CreateOperation workflowJob, err = s.paramsJobToWorkflowJob(ctx, job) if err != nil { - return params.Job{}, errors.Wrap(err, "converting job") + return params.Job{}, fmt.Errorf("error converting job: %w", err) } if err := s.conn.Create(&workflowJob).Error; err != nil { - return params.Job{}, errors.Wrap(err, "creating job") + return params.Job{}, fmt.Errorf("error creating job: %w", err) } } asParams, err := sqlWorkflowJobToParamsJob(workflowJob) if err != nil { - return params.Job{}, errors.Wrap(err, "converting job") + return params.Job{}, fmt.Errorf("error converting job: %w", err) } s.sendNotify(common.JobEntityType, operation, asParams) @@ -338,7 +339,7 @@ func (s *sqlDatabase) ListJobsByStatus(_ context.Context, status params.JobStatu for idx, job := range jobs { jobParam, err := sqlWorkflowJobToParamsJob(job) if err != nil { - return nil, errors.Wrap(err, "converting job") + return nil, fmt.Errorf("error converting job: %w", err) } ret[idx] = jobParam } @@ -379,7 +380,7 @@ func (s *sqlDatabase) ListEntityJobsByStatus(_ context.Context, entityType param for idx, job := range jobs { jobParam, err := sqlWorkflowJobToParamsJob(job) if err != nil { - return nil, errors.Wrap(err, "converting job") + return nil, fmt.Errorf("error converting job: %w", err) } ret[idx] = jobParam } @@ -401,7 +402,7 @@ func (s *sqlDatabase) ListAllJobs(_ context.Context) ([]params.Job, error) { for idx, job := range jobs { jobParam, err := sqlWorkflowJobToParamsJob(job) if err != nil { - return nil, errors.Wrap(err, "converting job") + return nil, fmt.Errorf("error converting job: %w", err) } ret[idx] = jobParam } diff --git a/database/sql/models.go b/database/sql/models.go index 8944dee1..d3cb044a 100644 --- a/database/sql/models.go +++ b/database/sql/models.go @@ -15,10 +15,10 @@ package sql import ( + "fmt" "time" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" @@ -40,7 +40,7 @@ func (b *Base) BeforeCreate(_ *gorm.DB) error { } newID, err := uuid.NewRandom() if err != nil { - return errors.Wrap(err, "generating id") + return fmt.Errorf("error generating id: %w", err) } b.ID = newID return nil diff --git a/database/sql/organizations.go b/database/sql/organizations.go index 3b1a05fa..22be6272 100644 --- a/database/sql/organizations.go +++ b/database/sql/organizations.go @@ -16,11 +16,11 @@ package sql import ( "context" + "errors" "fmt" "log/slog" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -35,7 +35,7 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name string, crede } secret, err := util.Seal([]byte(webhookSecret), []byte(s.cfg.Passphrase)) if err != nil { - return params.Organization{}, errors.Wrap(err, "encoding secret") + return params.Organization{}, fmt.Errorf("error encoding secret: %w", err) } defer func() { @@ -56,23 +56,23 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name string, crede case params.GiteaEndpointType: newOrg.GiteaCredentialsID = &credentials.ID default: - return errors.Wrap(runnerErrors.ErrBadRequest, "unsupported credentials type") + return fmt.Errorf("unsupported credentials type: %w", runnerErrors.ErrBadRequest) } newOrg.EndpointName = &credentials.Endpoint.Name q := tx.Create(&newOrg) if q.Error != nil { - return errors.Wrap(q.Error, "creating org") + return fmt.Errorf("error creating org: %w", q.Error) } return nil }) if err != nil { - return params.Organization{}, errors.Wrap(err, "creating org") + return params.Organization{}, fmt.Errorf("error creating org: %w", err) } ret, err := s.GetOrganizationByID(ctx, newOrg.ID.String()) if err != nil { - return params.Organization{}, errors.Wrap(err, "creating org") + return params.Organization{}, fmt.Errorf("error creating org: %w", err) } return ret, nil @@ -81,12 +81,12 @@ func (s *sqlDatabase) CreateOrganization(ctx context.Context, name string, crede func (s *sqlDatabase) GetOrganization(ctx context.Context, name, endpointName string) (params.Organization, error) { org, err := s.getOrg(ctx, name, endpointName) if err != nil { - return params.Organization{}, errors.Wrap(err, "fetching org") + return params.Organization{}, fmt.Errorf("error fetching org: %w", err) } param, err := s.sqlToCommonOrganization(org, true) if err != nil { - return params.Organization{}, errors.Wrap(err, "fetching org") + return params.Organization{}, fmt.Errorf("error fetching org: %w", err) } return param, nil @@ -110,7 +110,7 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context, filter params.Organiz } q = q.Find(&orgs) if q.Error != nil { - return []params.Organization{}, errors.Wrap(q.Error, "fetching org from database") + return []params.Organization{}, fmt.Errorf("error fetching org from database: %w", q.Error) } ret := make([]params.Organization, len(orgs)) @@ -118,7 +118,7 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context, filter params.Organiz var err error ret[idx], err = s.sqlToCommonOrganization(val, true) if err != nil { - return nil, errors.Wrap(err, "fetching org") + return nil, fmt.Errorf("error fetching org: %w", err) } } @@ -128,7 +128,7 @@ func (s *sqlDatabase) ListOrganizations(_ context.Context, filter params.Organiz func (s *sqlDatabase) DeleteOrganization(ctx context.Context, orgID string) (err error) { org, err := s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { - return errors.Wrap(err, "fetching org") + return fmt.Errorf("error fetching org: %w", err) } defer func(org Organization) { @@ -144,7 +144,7 @@ func (s *sqlDatabase) DeleteOrganization(ctx context.Context, orgID string) (err q := s.conn.Unscoped().Delete(&org) if q.Error != nil && !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return errors.Wrap(q.Error, "deleting org") + return fmt.Errorf("error deleting org: %w", q.Error) } return nil @@ -162,23 +162,23 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para var err error org, err = s.getOrgByID(ctx, tx, orgID) if err != nil { - return errors.Wrap(err, "fetching org") + return fmt.Errorf("error fetching org: %w", err) } if org.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "org has no endpoint") + return fmt.Errorf("error org has no endpoint: %w", runnerErrors.ErrUnprocessable) } if param.CredentialsName != "" { creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { - return errors.Wrap(err, "fetching credentials") + return fmt.Errorf("error fetching credentials: %w", err) } if creds.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") + return fmt.Errorf("error credentials have no endpoint: %w", runnerErrors.ErrUnprocessable) } if *creds.EndpointName != *org.EndpointName { - return errors.Wrap(runnerErrors.ErrBadRequest, "endpoint mismatch") + return fmt.Errorf("error endpoint mismatch: %w", runnerErrors.ErrBadRequest) } org.CredentialsID = &creds.ID } @@ -197,22 +197,22 @@ func (s *sqlDatabase) UpdateOrganization(ctx context.Context, orgID string, para q := tx.Save(&org) if q.Error != nil { - return errors.Wrap(q.Error, "saving org") + return fmt.Errorf("error saving org: %w", q.Error) } return nil }) if err != nil { - return params.Organization{}, errors.Wrap(err, "saving org") + return params.Organization{}, fmt.Errorf("error saving org: %w", err) } org, err = s.getOrgByID(ctx, s.conn, orgID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { - return params.Organization{}, errors.Wrap(err, "updating enterprise") + return params.Organization{}, fmt.Errorf("error updating enterprise: %w", err) } paramOrg, err = s.sqlToCommonOrganization(org, true) if err != nil { - return params.Organization{}, errors.Wrap(err, "saving org") + return params.Organization{}, fmt.Errorf("error saving org: %w", err) } return paramOrg, nil } @@ -229,12 +229,12 @@ func (s *sqlDatabase) GetOrganizationByID(ctx context.Context, orgID string) (pa } org, err := s.getOrgByID(ctx, s.conn, orgID, preloadList...) if err != nil { - return params.Organization{}, errors.Wrap(err, "fetching org") + return params.Organization{}, fmt.Errorf("error fetching org: %w", err) } param, err := s.sqlToCommonOrganization(org, true) if err != nil { - return params.Organization{}, errors.Wrap(err, "fetching org") + return params.Organization{}, fmt.Errorf("error fetching org: %w", err) } return param, nil } @@ -242,7 +242,7 @@ func (s *sqlDatabase) GetOrganizationByID(ctx context.Context, orgID string) (pa func (s *sqlDatabase) getOrgByID(_ context.Context, db *gorm.DB, id string, preload ...string) (Organization, error) { u, err := uuid.Parse(id) if err != nil { - return Organization{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return Organization{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } var org Organization @@ -258,7 +258,7 @@ func (s *sqlDatabase) getOrgByID(_ context.Context, db *gorm.DB, id string, prel if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Organization{}, runnerErrors.ErrNotFound } - return Organization{}, errors.Wrap(q.Error, "fetching org from database") + return Organization{}, fmt.Errorf("error fetching org from database: %w", q.Error) } return org, nil } @@ -277,7 +277,7 @@ func (s *sqlDatabase) getOrg(_ context.Context, name, endpointName string) (Orga if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Organization{}, runnerErrors.ErrNotFound } - return Organization{}, errors.Wrap(q.Error, "fetching org from database") + return Organization{}, fmt.Errorf("error fetching org from database: %w", q.Error) } return org, nil } diff --git a/database/sql/organizations_test.go b/database/sql/organizations_test.go index df876ba1..651c2927 100644 --- a/database/sql/organizations_test.go +++ b/database/sql/organizations_test.go @@ -251,7 +251,7 @@ func (s *OrgTestSuite) TestCreateOrganizationInvalidForgeType() { s.Fixtures.CreateOrgParams.WebhookSecret, params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("creating org: unsupported credentials type: invalid request", err.Error()) + s.Require().Equal("error creating org: unsupported credentials type: invalid request", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationInvalidDBPassphrase() { @@ -275,7 +275,7 @@ func (s *OrgTestSuite) TestCreateOrganizationInvalidDBPassphrase() { params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error encoding secret: invalid passphrase length (expected length 32 characters)", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationDBCreateErr() { @@ -293,7 +293,7 @@ func (s *OrgTestSuite) TestCreateOrganizationDBCreateErr() { params.PoolBalancerTypeRoundRobin) s.Require().NotNil(err) - s.Require().Equal("creating org: creating org: creating org mock error", err.Error()) + s.Require().Equal("error creating org: error creating org: creating org mock error", err.Error()) s.assertSQLMockExpectations() } @@ -316,7 +316,7 @@ func (s *OrgTestSuite) TestGetOrganizationNotFound() { _, err := s.Store.GetOrganization(s.adminCtx, "dummy-name", "github.com") s.Require().NotNil(err) - s.Require().Equal("fetching org: not found", err.Error()) + s.Require().Equal("error fetching org: not found", err.Error()) } func (s *OrgTestSuite) TestGetOrganizationDBDecryptingErr() { @@ -328,7 +328,7 @@ func (s *OrgTestSuite) TestGetOrganizationDBDecryptingErr() { _, err := s.StoreSQLMocked.GetOrganization(s.adminCtx, s.Fixtures.Orgs[0].Name, s.Fixtures.Orgs[0].Endpoint.Name) s.Require().NotNil(err) - s.Require().Equal("fetching org: missing secret", err.Error()) + s.Require().Equal("error fetching org: missing secret", err.Error()) s.assertSQLMockExpectations() } @@ -404,7 +404,7 @@ func (s *OrgTestSuite) TestListOrganizationsDBFetchErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("fetching org from database: fetching user from database mock error", err.Error()) + s.Require().Equal("error fetching org from database: fetching user from database mock error", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganization() { @@ -413,14 +413,14 @@ func (s *OrgTestSuite) TestDeleteOrganization() { s.Require().Nil(err) _, err = s.Store.GetOrganizationByID(s.adminCtx, s.Fixtures.Orgs[0].ID) s.Require().NotNil(err) - s.Require().Equal("fetching org: not found", err.Error()) + s.Require().Equal("error fetching org: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationInvalidOrgID() { err := s.Store.DeleteOrganization(s.adminCtx, "dummy-org-id") s.Require().NotNil(err) - s.Require().Equal("fetching org: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching org: error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationDBDeleteErr() { @@ -439,7 +439,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationDBDeleteErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("deleting org: mocked delete org error", err.Error()) + s.Require().Equal("error deleting org: mocked delete org error", err.Error()) } func (s *OrgTestSuite) TestUpdateOrganization() { @@ -454,7 +454,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationInvalidOrgID() { _, err := s.Store.UpdateOrganization(s.adminCtx, "dummy-org-id", s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving org: fetching org: parsing id: invalid request", err.Error()) + s.Require().Equal("error saving org: error fetching org: error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestUpdateOrganizationDBEncryptErr() { @@ -479,7 +479,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationDBEncryptErr() { _, err := s.StoreSQLMocked.UpdateOrganization(s.adminCtx, s.Fixtures.Orgs[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -507,7 +507,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationDBSaveErr() { _, err := s.StoreSQLMocked.UpdateOrganization(s.adminCtx, s.Fixtures.Orgs[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving org: saving org: saving org mock error", err.Error()) + s.Require().Equal("error saving org: error saving org: saving org mock error", err.Error()) s.assertSQLMockExpectations() } @@ -535,7 +535,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationDBDecryptingErr() { _, err := s.StoreSQLMocked.UpdateOrganization(s.adminCtx, s.Fixtures.Orgs[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error saving org: saving org: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -550,7 +550,7 @@ func (s *OrgTestSuite) TestGetOrganizationByIDInvalidOrgID() { _, err := s.Store.GetOrganizationByID(s.adminCtx, "dummy-org-id") s.Require().NotNil(err) - s.Require().Equal("fetching org: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching org: error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestGetOrganizationByIDDBDecryptingErr() { @@ -571,7 +571,7 @@ func (s *OrgTestSuite) TestGetOrganizationByIDDBDecryptingErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("fetching org: missing secret", err.Error()) + s.Require().Equal("error fetching org: missing secret", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationPool() { @@ -610,7 +610,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolInvalidOrgID() { _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("parsing id: invalid request", err.Error()) + s.Require().Equal("error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationPoolDBFetchTagErr() { @@ -628,7 +628,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBFetchTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("creating tag: fetching tag from database: mocked fetching tag error", err.Error()) + s.Require().Equal("error creating tag: error fetching tag from database: mocked fetching tag error", err.Error()) s.assertSQLMockExpectations() } @@ -656,7 +656,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBAddingPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("creating pool: mocked adding pool error", err.Error()) + s.Require().Equal("error creating pool: mocked adding pool error", err.Error()) s.assertSQLMockExpectations() } @@ -687,7 +687,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBSaveTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("associating tags: mocked saving tag error", err.Error()) + s.Require().Equal("error associating tags: mocked saving tag error", err.Error()) s.assertSQLMockExpectations() } @@ -728,7 +728,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolDBFetchPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("fetching pool: not found", err.Error()) + s.Require().Equal("error fetching pool: not found", err.Error()) s.assertSQLMockExpectations() } @@ -758,7 +758,7 @@ func (s *OrgTestSuite) TestListOrgPoolsInvalidOrgID() { _, err := s.Store.ListEntityPools(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("fetching pools: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pools: error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestGetOrganizationPool() { @@ -783,7 +783,7 @@ func (s *OrgTestSuite) TestGetOrganizationPoolInvalidOrgID() { _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationPool() { @@ -798,7 +798,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationPool() { s.Require().Nil(err) _, err = s.Store.GetEntityPool(s.adminCtx, entity, pool.ID) - s.Require().Equal("fetching pool: finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationPoolInvalidOrgID() { @@ -809,7 +809,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationPoolInvalidOrgID() { err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("parsing id: invalid request", err.Error()) + s.Require().Equal("error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationPoolDBDeleteErr() { @@ -831,7 +831,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationPoolDBDeleteErr() { err = s.StoreSQLMocked.DeleteEntityPool(s.adminCtx, entity, pool.ID) s.Require().NotNil(err) - s.Require().Equal("removing pool: mocked deleting pool error", err.Error()) + s.Require().Equal("error removing pool: mocked deleting pool error", err.Error()) s.assertSQLMockExpectations() } @@ -866,7 +866,7 @@ func (s *OrgTestSuite) TestListOrgInstancesInvalidOrgID() { _, err := s.Store.ListEntityInstances(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("fetching entity: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching entity: error parsing id: invalid request", err.Error()) } func (s *OrgTestSuite) TestUpdateOrganizationPool() { @@ -916,7 +916,7 @@ func (s *OrgTestSuite) TestUpdateOrganizationPoolInvalidOrgID() { _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-pool-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) } func TestOrgTestSuite(t *testing.T) { diff --git a/database/sql/pools.go b/database/sql/pools.go index 889cbc58..e873a150 100644 --- a/database/sql/pools.go +++ b/database/sql/pools.go @@ -16,10 +16,10 @@ package sql import ( "context" + "errors" "fmt" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" @@ -48,7 +48,7 @@ func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { Omit("extra_specs"). Find(&pools) if q.Error != nil { - return nil, errors.Wrap(q.Error, "fetching all pools") + return nil, fmt.Errorf("error fetching all pools: %w", q.Error) } ret := make([]params.Pool, len(pools)) @@ -56,7 +56,7 @@ func (s *sqlDatabase) ListAllPools(_ context.Context) ([]params.Pool, error) { for idx, val := range pools { ret[idx], err = s.sqlToCommonPool(val) if err != nil { - return nil, errors.Wrap(err, "converting pool") + return nil, fmt.Errorf("error converting pool: %w", err) } } return ret, nil @@ -75,7 +75,7 @@ func (s *sqlDatabase) GetPoolByID(_ context.Context, poolID string) (params.Pool } pool, err := s.getPoolByID(s.conn, poolID, preloadList...) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool by ID") + return params.Pool{}, fmt.Errorf("error fetching pool by ID: %w", err) } return s.sqlToCommonPool(pool) } @@ -83,7 +83,7 @@ func (s *sqlDatabase) GetPoolByID(_ context.Context, poolID string) (params.Pool func (s *sqlDatabase) DeletePoolByID(_ context.Context, poolID string) (err error) { pool, err := s.getPoolByID(s.conn, poolID) if err != nil { - return errors.Wrap(err, "fetching pool by ID") + return fmt.Errorf("error fetching pool by ID: %w", err) } defer func() { @@ -93,7 +93,7 @@ func (s *sqlDatabase) DeletePoolByID(_ context.Context, poolID string) (err erro }() if q := s.conn.Unscoped().Delete(&pool); q.Error != nil { - return errors.Wrap(q.Error, "removing pool") + return fmt.Errorf("error removing pool: %w", q.Error) } return nil @@ -101,12 +101,12 @@ func (s *sqlDatabase) DeletePoolByID(_ context.Context, poolID string) (err erro func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.ForgeEntityType, entityID, poolID string, preload ...string) (Pool, error) { if entityID == "" { - return Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing entity id") + return Pool{}, fmt.Errorf("error missing entity id: %w", runnerErrors.ErrBadRequest) } u, err := uuid.Parse(poolID) if err != nil { - return Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return Pool{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } var fieldName string @@ -140,9 +140,9 @@ func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.ForgeEntityTy First(&pool).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return Pool{}, errors.Wrap(runnerErrors.ErrNotFound, "finding pool") + return Pool{}, fmt.Errorf("error finding pool: %w", runnerErrors.ErrNotFound) } - return Pool{}, errors.Wrap(err, "fetching pool") + return Pool{}, fmt.Errorf("error fetching pool: %w", err) } return pool, nil @@ -150,11 +150,11 @@ func (s *sqlDatabase) getEntityPool(tx *gorm.DB, entityType params.ForgeEntityTy func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, preload ...string) ([]Pool, error) { if _, err := uuid.Parse(entityID); err != nil { - return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } if err := s.hasGithubEntity(tx, entityType, entityID); err != nil { - return nil, errors.Wrap(err, "checking entity existence") + return nil, fmt.Errorf("error checking entity existence: %w", err) } var preloadEntity string @@ -191,7 +191,7 @@ func (s *sqlDatabase) listEntityPools(tx *gorm.DB, entityType params.ForgeEntity if errors.Is(err, gorm.ErrRecordNotFound) { return []Pool{}, nil } - return nil, errors.Wrap(err, "fetching pool") + return nil, fmt.Errorf("error fetching pool: %w", err) } return pools, nil @@ -203,7 +203,7 @@ func (s *sqlDatabase) findPoolByTags(id string, poolType params.ForgeEntityType, } u, err := uuid.Parse(id) if err != nil { - return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } var fieldName string @@ -233,7 +233,7 @@ func (s *sqlDatabase) findPoolByTags(id string, poolType params.ForgeEntityType, if errors.Is(q.Error, gorm.ErrRecordNotFound) { return nil, runnerErrors.ErrNotFound } - return nil, errors.Wrap(q.Error, "fetching pool") + return nil, fmt.Errorf("error fetching pool: %w", q.Error) } if len(pools) == 0 { @@ -244,7 +244,7 @@ func (s *sqlDatabase) findPoolByTags(id string, poolType params.ForgeEntityType, for idx, val := range pools { ret[idx], err = s.sqlToCommonPool(val) if err != nil { - return nil, errors.Wrap(err, "converting pool") + return nil, fmt.Errorf("error converting pool: %w", err) } } @@ -261,7 +261,7 @@ func (s *sqlDatabase) FindPoolsMatchingAllTags(_ context.Context, entityType par if errors.Is(err, runnerErrors.ErrNotFound) { return []params.Pool{}, nil } - return nil, errors.Wrap(err, "fetching pools") + return nil, fmt.Errorf("error fetching pools: %w", err) } return pools, nil @@ -298,7 +298,7 @@ func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.ForgeEnt entityID, err := uuid.Parse(entity.ID) if err != nil { - return params.Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return params.Pool{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } switch entity.EntityType { @@ -311,26 +311,26 @@ func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.ForgeEnt } err = s.conn.Transaction(func(tx *gorm.DB) error { if err := s.hasGithubEntity(tx, entity.EntityType, entity.ID); err != nil { - return errors.Wrap(err, "checking entity existence") + return fmt.Errorf("error checking entity existence: %w", err) } tags := []Tag{} for _, val := range param.Tags { t, err := s.getOrCreateTag(tx, val) if err != nil { - return errors.Wrap(err, "creating tag") + return fmt.Errorf("error creating tag: %w", err) } tags = append(tags, t) } q := tx.Create(&newPool) if q.Error != nil { - return errors.Wrap(q.Error, "creating pool") + return fmt.Errorf("error creating pool: %w", q.Error) } for i := range tags { if err := tx.Model(&newPool).Association("Tags").Append(&tags[i]); err != nil { - return errors.Wrap(err, "associating tags") + return fmt.Errorf("error associating tags: %w", err) } } return nil @@ -341,7 +341,7 @@ func (s *sqlDatabase) CreateEntityPool(_ context.Context, entity params.ForgeEnt dbPool, err := s.getPoolByID(s.conn, newPool.ID.String(), "Tags", "Instances", "Enterprise", "Organization", "Repository") if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } return s.sqlToCommonPool(dbPool) @@ -358,7 +358,7 @@ func (s *sqlDatabase) GetEntityPool(_ context.Context, entity params.ForgeEntity func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.ForgeEntity, poolID string) (err error) { entityID, err := uuid.Parse(entity.ID) if err != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } defer func() { @@ -372,7 +372,7 @@ func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.ForgeEnt poolUUID, err := uuid.Parse(poolID) if err != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "parsing pool id") + return fmt.Errorf("error parsing pool id: %w", runnerErrors.ErrBadRequest) } var fieldName string switch entity.EntityType { @@ -387,7 +387,7 @@ func (s *sqlDatabase) DeleteEntityPool(_ context.Context, entity params.ForgeEnt } condition := fmt.Sprintf("id = ? and %s = ?", fieldName) if err := s.conn.Unscoped().Where(condition, poolUUID, entityID).Delete(&Pool{}).Error; err != nil { - return errors.Wrap(err, "removing pool") + return fmt.Errorf("error removing pool: %w", err) } return nil } @@ -401,12 +401,12 @@ func (s *sqlDatabase) UpdateEntityPool(ctx context.Context, entity params.ForgeE err = s.conn.Transaction(func(tx *gorm.DB) error { pool, err := s.getEntityPool(tx, entity.EntityType, entity.ID, poolID, "Tags", "Instances") if err != nil { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } updatedPool, err = s.updatePool(tx, pool, param) if err != nil { - return errors.Wrap(err, "updating pool") + return fmt.Errorf("error updating pool: %w", err) } return nil }) @@ -424,14 +424,14 @@ func (s *sqlDatabase) UpdateEntityPool(ctx context.Context, entity params.ForgeE func (s *sqlDatabase) ListEntityPools(_ context.Context, entity params.ForgeEntity) ([]params.Pool, error) { pools, err := s.listEntityPools(s.conn, entity.EntityType, entity.ID, "Tags") if err != nil { - return nil, errors.Wrap(err, "fetching pools") + return nil, fmt.Errorf("error fetching pools: %w", err) } ret := make([]params.Pool, len(pools)) for idx, pool := range pools { ret[idx], err = s.sqlToCommonPool(pool) if err != nil { - return nil, errors.Wrap(err, "fetching pool") + return nil, fmt.Errorf("error fetching pool: %w", err) } } @@ -441,7 +441,7 @@ func (s *sqlDatabase) ListEntityPools(_ context.Context, entity params.ForgeEnti func (s *sqlDatabase) ListEntityInstances(_ context.Context, entity params.ForgeEntity) ([]params.Instance, error) { pools, err := s.listEntityPools(s.conn, entity.EntityType, entity.ID, "Instances", "Instances.Job") if err != nil { - return nil, errors.Wrap(err, "fetching entity") + return nil, fmt.Errorf("error fetching entity: %w", err) } ret := []params.Instance{} for _, pool := range pools { @@ -451,7 +451,7 @@ func (s *sqlDatabase) ListEntityInstances(_ context.Context, entity params.Forge instance.Pool = pool paramsInstance, err := s.sqlToParamsInstance(instance) if err != nil { - return nil, errors.Wrap(err, "fetching instance") + return nil, fmt.Errorf("error fetching instance: %w", err) } ret = append(ret, paramsInstance) } diff --git a/database/sql/pools_test.go b/database/sql/pools_test.go index 9044bf18..297f4cdf 100644 --- a/database/sql/pools_test.go +++ b/database/sql/pools_test.go @@ -157,7 +157,7 @@ func (s *PoolsTestSuite) TestListAllPoolsDBFetchErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("fetching all pools: mocked fetching all pools error", err.Error()) + s.Require().Equal("error fetching all pools: mocked fetching all pools error", err.Error()) } func (s *PoolsTestSuite) TestGetPoolByID() { @@ -171,7 +171,7 @@ func (s *PoolsTestSuite) TestGetPoolByIDInvalidPoolID() { _, err := s.Store.GetPoolByID(s.adminCtx, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool by ID: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool by ID: error parsing id: invalid request", err.Error()) } func (s *PoolsTestSuite) TestDeletePoolByID() { @@ -179,14 +179,14 @@ func (s *PoolsTestSuite) TestDeletePoolByID() { s.Require().Nil(err) _, err = s.Store.GetPoolByID(s.adminCtx, s.Fixtures.Pools[0].ID) - s.Require().Equal("fetching pool by ID: not found", err.Error()) + s.Require().Equal("error fetching pool by ID: not found", err.Error()) } func (s *PoolsTestSuite) TestDeletePoolByIDInvalidPoolID() { err := s.Store.DeletePoolByID(s.adminCtx, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool by ID: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool by ID: error parsing id: invalid request", err.Error()) } func (s *PoolsTestSuite) TestDeletePoolByIDDBRemoveErr() { @@ -204,7 +204,7 @@ func (s *PoolsTestSuite) TestDeletePoolByIDDBRemoveErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("removing pool: mocked removing pool error", err.Error()) + s.Require().Equal("error removing pool: mocked removing pool error", err.Error()) } func (s *PoolsTestSuite) TestEntityPoolOperations() { diff --git a/database/sql/repositories.go b/database/sql/repositories.go index a18eb001..72b535e8 100644 --- a/database/sql/repositories.go +++ b/database/sql/repositories.go @@ -16,11 +16,11 @@ package sql import ( "context" + "errors" "fmt" "log/slog" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -57,23 +57,23 @@ func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name string, case params.GiteaEndpointType: newRepo.GiteaCredentialsID = &credentials.ID default: - return errors.Wrap(runnerErrors.ErrBadRequest, "unsupported credentials type") + return runnerErrors.NewBadRequestError("unsupported credentials type") } newRepo.EndpointName = &credentials.Endpoint.Name q := tx.Create(&newRepo) if q.Error != nil { - return errors.Wrap(q.Error, "creating repository") + return fmt.Errorf("error creating repository: %w", q.Error) } return nil }) if err != nil { - return params.Repository{}, errors.Wrap(err, "creating repository") + return params.Repository{}, fmt.Errorf("error creating repository: %w", err) } ret, err := s.GetRepositoryByID(ctx, newRepo.ID.String()) if err != nil { - return params.Repository{}, errors.Wrap(err, "creating repository") + return params.Repository{}, fmt.Errorf("error creating repository: %w", err) } return ret, nil @@ -82,12 +82,12 @@ func (s *sqlDatabase) CreateRepository(ctx context.Context, owner, name string, func (s *sqlDatabase) GetRepository(ctx context.Context, owner, name, endpointName string) (params.Repository, error) { repo, err := s.getRepo(ctx, owner, name, endpointName) if err != nil { - return params.Repository{}, errors.Wrap(err, "fetching repo") + return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) } param, err := s.sqlToCommonRepository(repo, true) if err != nil { - return params.Repository{}, errors.Wrap(err, "fetching repo") + return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) } return param, nil @@ -112,7 +112,7 @@ func (s *sqlDatabase) ListRepositories(_ context.Context, filter params.Reposito } q = q.Find(&repos) if q.Error != nil { - return []params.Repository{}, errors.Wrap(q.Error, "fetching user from database") + return []params.Repository{}, fmt.Errorf("error fetching user from database: %w", q.Error) } ret := make([]params.Repository, len(repos)) @@ -120,7 +120,7 @@ func (s *sqlDatabase) ListRepositories(_ context.Context, filter params.Reposito var err error ret[idx], err = s.sqlToCommonRepository(val, true) if err != nil { - return nil, errors.Wrap(err, "fetching repositories") + return nil, fmt.Errorf("error fetching repositories: %w", err) } } @@ -130,7 +130,7 @@ func (s *sqlDatabase) ListRepositories(_ context.Context, filter params.Reposito func (s *sqlDatabase) DeleteRepository(ctx context.Context, repoID string) (err error) { repo, err := s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { - return errors.Wrap(err, "fetching repo") + return fmt.Errorf("error fetching repo: %w", err) } defer func(repo Repository) { @@ -146,7 +146,7 @@ func (s *sqlDatabase) DeleteRepository(ctx context.Context, repoID string) (err q := s.conn.Unscoped().Delete(&repo) if q.Error != nil && !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return errors.Wrap(q.Error, "deleting repo") + return fmt.Errorf("error deleting repo: %w", q.Error) } return nil @@ -164,23 +164,23 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param var err error repo, err = s.getRepoByID(ctx, tx, repoID) if err != nil { - return errors.Wrap(err, "fetching repo") + return fmt.Errorf("error fetching repo: %w", err) } if repo.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "repository has no endpoint") + return runnerErrors.NewUnprocessableError("repository has no endpoint") } if param.CredentialsName != "" { creds, err = s.getGithubCredentialsByName(ctx, tx, param.CredentialsName, false) if err != nil { - return errors.Wrap(err, "fetching credentials") + return fmt.Errorf("error fetching credentials: %w", err) } if creds.EndpointName == nil { - return errors.Wrap(runnerErrors.ErrUnprocessable, "credentials have no endpoint") + return runnerErrors.NewUnprocessableError("credentials have no endpoint") } if *creds.EndpointName != *repo.EndpointName { - return errors.Wrap(runnerErrors.ErrBadRequest, "endpoint mismatch") + return runnerErrors.NewBadRequestError("endpoint mismatch") } repo.CredentialsID = &creds.ID } @@ -199,23 +199,23 @@ func (s *sqlDatabase) UpdateRepository(ctx context.Context, repoID string, param q := tx.Save(&repo) if q.Error != nil { - return errors.Wrap(q.Error, "saving repo") + return fmt.Errorf("error saving repo: %w", q.Error) } return nil }) if err != nil { - return params.Repository{}, errors.Wrap(err, "saving repo") + return params.Repository{}, fmt.Errorf("error saving repo: %w", err) } repo, err = s.getRepoByID(ctx, s.conn, repoID, "Endpoint", "Credentials", "Credentials.Endpoint", "GiteaCredentials", "GiteaCredentials.Endpoint") if err != nil { - return params.Repository{}, errors.Wrap(err, "updating enterprise") + return params.Repository{}, fmt.Errorf("error updating enterprise: %w", err) } newParams, err = s.sqlToCommonRepository(repo, true) if err != nil { - return params.Repository{}, errors.Wrap(err, "saving repo") + return params.Repository{}, fmt.Errorf("error saving repo: %w", err) } return newParams, nil } @@ -232,12 +232,12 @@ func (s *sqlDatabase) GetRepositoryByID(ctx context.Context, repoID string) (par } repo, err := s.getRepoByID(ctx, s.conn, repoID, preloadList...) if err != nil { - return params.Repository{}, errors.Wrap(err, "fetching repo") + return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) } param, err := s.sqlToCommonRepository(repo, true) if err != nil { - return params.Repository{}, errors.Wrap(err, "fetching repo") + return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) } return param, nil } @@ -259,7 +259,7 @@ func (s *sqlDatabase) getRepo(_ context.Context, owner, name, endpointName strin if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Repository{}, runnerErrors.ErrNotFound } - return Repository{}, errors.Wrap(q.Error, "fetching repository from database") + return Repository{}, fmt.Errorf("error fetching repository from database: %w", q.Error) } return repo, nil } @@ -267,7 +267,7 @@ func (s *sqlDatabase) getRepo(_ context.Context, owner, name, endpointName strin func (s *sqlDatabase) getRepoByID(_ context.Context, tx *gorm.DB, id string, preload ...string) (Repository, error) { u, err := uuid.Parse(id) if err != nil { - return Repository{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return Repository{}, runnerErrors.NewBadRequestError("error parsing id: %s", err) } var repo Repository @@ -283,7 +283,7 @@ func (s *sqlDatabase) getRepoByID(_ context.Context, tx *gorm.DB, id string, pre if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Repository{}, runnerErrors.ErrNotFound } - return Repository{}, errors.Wrap(q.Error, "fetching repository from database") + return Repository{}, fmt.Errorf("error fetching repository from database: %w", q.Error) } return repo, nil } diff --git a/database/sql/repositories_test.go b/database/sql/repositories_test.go index 4609a357..31591e89 100644 --- a/database/sql/repositories_test.go +++ b/database/sql/repositories_test.go @@ -284,7 +284,7 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidForgeType() { ) s.Require().NotNil(err) - s.Require().Equal("creating repository: unsupported credentials type: invalid request", err.Error()) + s.Require().Equal("error creating repository: unsupported credentials type", err.Error()) } func (s *RepoTestSuite) TestCreateRepositoryInvalidDBPassphrase() { @@ -330,7 +330,7 @@ func (s *RepoTestSuite) TestCreateRepositoryInvalidDBCreateErr() { ) s.Require().NotNil(err) - s.Require().Equal("creating repository: creating repository: creating repo mock error", err.Error()) + s.Require().Equal("error creating repository: error creating repository: creating repo mock error", err.Error()) s.assertSQLMockExpectations() } @@ -355,7 +355,7 @@ func (s *RepoTestSuite) TestGetRepositoryNotFound() { _, err := s.Store.GetRepository(s.adminCtx, "dummy-owner", "dummy-name", "github.com") s.Require().NotNil(err) - s.Require().Equal("fetching repo: not found", err.Error()) + s.Require().Equal("error fetching repo: not found", err.Error()) } func (s *RepoTestSuite) TestGetRepositoryDBDecryptingErr() { @@ -371,7 +371,7 @@ func (s *RepoTestSuite) TestGetRepositoryDBDecryptingErr() { _, err := s.StoreSQLMocked.GetRepository(s.adminCtx, s.Fixtures.Repos[0].Owner, s.Fixtures.Repos[0].Name, s.Fixtures.Repos[0].Endpoint.Name) s.Require().NotNil(err) - s.Require().Equal("fetching repo: missing secret", err.Error()) + s.Require().Equal("error fetching repo: missing secret", err.Error()) s.assertSQLMockExpectations() } @@ -471,7 +471,7 @@ func (s *RepoTestSuite) TestListRepositoriesDBFetchErr() { _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx, params.RepositoryFilter{}) s.Require().NotNil(err) - s.Require().Equal("fetching user from database: fetching user from database mock error", err.Error()) + s.Require().Equal("error fetching user from database: fetching user from database mock error", err.Error()) s.assertSQLMockExpectations() } @@ -485,7 +485,7 @@ func (s *RepoTestSuite) TestListRepositoriesDBDecryptingErr() { _, err := s.StoreSQLMocked.ListRepositories(s.adminCtx, params.RepositoryFilter{}) s.Require().NotNil(err) - s.Require().Equal("fetching repositories: decrypting secret: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error fetching repositories: error decrypting secret: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -495,14 +495,14 @@ func (s *RepoTestSuite) TestDeleteRepository() { s.Require().Nil(err) _, err = s.Store.GetRepositoryByID(s.adminCtx, s.Fixtures.Repos[0].ID) s.Require().NotNil(err) - s.Require().Equal("fetching repo: not found", err.Error()) + s.Require().Equal("error fetching repo: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryInvalidRepoID() { err := s.Store.DeleteRepository(s.adminCtx, "dummy-repo-id") s.Require().NotNil(err) - s.Require().Equal("fetching repo: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching repo: error parsing id: invalid UUID length: 13", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryDBRemoveErr() { @@ -520,7 +520,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryDBRemoveErr() { err := s.StoreSQLMocked.DeleteRepository(s.adminCtx, s.Fixtures.Repos[0].ID) s.Require().NotNil(err) - s.Require().Equal("deleting repo: mocked deleting repo error", err.Error()) + s.Require().Equal("error deleting repo: mocked deleting repo error", err.Error()) s.assertSQLMockExpectations() } @@ -536,7 +536,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryInvalidRepoID() { _, err := s.Store.UpdateRepository(s.adminCtx, "dummy-repo-id", s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving repo: fetching repo: parsing id: invalid request", err.Error()) + s.Require().Equal("error saving repo: error fetching repo: error parsing id: invalid UUID length: 13", err.Error()) } func (s *RepoTestSuite) TestUpdateRepositoryDBEncryptErr() { @@ -561,7 +561,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryDBEncryptErr() { _, err := s.StoreSQLMocked.UpdateRepository(s.adminCtx, s.Fixtures.Repos[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -589,7 +589,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryDBSaveErr() { _, err := s.StoreSQLMocked.UpdateRepository(s.adminCtx, s.Fixtures.Repos[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving repo: saving repo: saving repo mock error", err.Error()) + s.Require().Equal("error saving repo: error saving repo: saving repo mock error", err.Error()) s.assertSQLMockExpectations() } @@ -616,7 +616,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryDBDecryptingErr() { _, err := s.StoreSQLMocked.UpdateRepository(s.adminCtx, s.Fixtures.Repos[0].ID, s.Fixtures.UpdateRepoParams) s.Require().NotNil(err) - s.Require().Equal("saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) + s.Require().Equal("error saving repo: saving repo: failed to encrypt string: invalid passphrase length (expected length 32 characters)", err.Error()) s.assertSQLMockExpectations() } @@ -631,7 +631,7 @@ func (s *RepoTestSuite) TestGetRepositoryByIDInvalidRepoID() { _, err := s.Store.GetRepositoryByID(s.adminCtx, "dummy-repo-id") s.Require().NotNil(err) - s.Require().Equal("fetching repo: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching repo: error parsing id: invalid UUID length: 13", err.Error()) } func (s *RepoTestSuite) TestGetRepositoryByIDDBDecryptingErr() { @@ -651,7 +651,7 @@ func (s *RepoTestSuite) TestGetRepositoryByIDDBDecryptingErr() { _, err := s.StoreSQLMocked.GetRepositoryByID(s.adminCtx, s.Fixtures.Repos[0].ID) s.Require().NotNil(err) - s.Require().Equal("fetching repo: missing secret", err.Error()) + s.Require().Equal("error fetching repo: missing secret", err.Error()) s.assertSQLMockExpectations() } @@ -690,7 +690,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolInvalidRepoID() { _, err := s.Store.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("parsing id: invalid request", err.Error()) + s.Require().Equal("error parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestCreateRepositoryPoolDBFetchTagErr() { @@ -709,7 +709,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBFetchTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("creating tag: fetching tag from database: mocked fetching tag error", err.Error()) + s.Require().Equal("error creating tag: error fetching tag from database: mocked fetching tag error", err.Error()) s.assertSQLMockExpectations() } @@ -738,7 +738,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBAddingPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("creating pool: mocked adding pool error", err.Error()) + s.Require().Equal("error creating pool: mocked adding pool error", err.Error()) s.assertSQLMockExpectations() } @@ -769,7 +769,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBSaveTagErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("associating tags: mocked saving tag error", err.Error()) + s.Require().Equal("error associating tags: mocked saving tag error", err.Error()) s.assertSQLMockExpectations() } @@ -810,7 +810,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolDBFetchPoolErr() { _, err = s.StoreSQLMocked.CreateEntityPool(s.adminCtx, entity, s.Fixtures.CreatePoolParams) s.Require().NotNil(err) - s.Require().Equal("fetching pool: not found", err.Error()) + s.Require().Equal("error fetching pool: not found", err.Error()) s.assertSQLMockExpectations() } @@ -841,7 +841,7 @@ func (s *RepoTestSuite) TestListRepoPoolsInvalidRepoID() { _, err := s.Store.ListEntityPools(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("fetching pools: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pools: error parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestGetRepositoryPool() { @@ -866,7 +866,7 @@ func (s *RepoTestSuite) TestGetRepositoryPoolInvalidRepoID() { _, err := s.Store.GetEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("fetching pool: error parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryPool() { @@ -881,7 +881,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryPool() { s.Require().Nil(err) _, err = s.Store.GetEntityPool(s.adminCtx, entity, pool.ID) - s.Require().Equal("fetching pool: finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryPoolInvalidRepoID() { @@ -892,7 +892,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryPoolInvalidRepoID() { err := s.Store.DeleteEntityPool(s.adminCtx, entity, "dummy-pool-id") s.Require().NotNil(err) - s.Require().Equal("parsing id: invalid request", err.Error()) + s.Require().Equal("error parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryPoolDBDeleteErr() { @@ -913,7 +913,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryPoolDBDeleteErr() { err = s.StoreSQLMocked.DeleteEntityPool(s.adminCtx, entity, pool.ID) s.Require().NotNil(err) - s.Require().Equal("removing pool: mocked deleting pool error", err.Error()) + s.Require().Equal("error removing pool: mocked deleting pool error", err.Error()) s.assertSQLMockExpectations() } @@ -948,7 +948,7 @@ func (s *RepoTestSuite) TestListRepoInstancesInvalidRepoID() { _, err := s.Store.ListEntityInstances(s.adminCtx, entity) s.Require().NotNil(err) - s.Require().Equal("fetching entity: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching entity: error parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestUpdateRepositoryPool() { @@ -976,7 +976,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryPoolInvalidRepoID() { _, err := s.Store.UpdateEntityPool(s.adminCtx, entity, "dummy-repo-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("fetching pool: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool: error parsing id: invalid request", err.Error()) } func (s *RepoTestSuite) TestAddRepoEntityEvent() { diff --git a/database/sql/scaleset_instances.go b/database/sql/scaleset_instances.go index 61271e8b..457c99b5 100644 --- a/database/sql/scaleset_instances.go +++ b/database/sql/scaleset_instances.go @@ -16,8 +16,7 @@ package sql import ( "context" - - "github.com/pkg/errors" + "fmt" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" @@ -26,7 +25,7 @@ import ( func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, param params.CreateInstanceParams) (instance params.Instance, err error) { scaleSet, err := s.getScaleSetByID(s.conn, scaleSetID) if err != nil { - return params.Instance{}, errors.Wrap(err, "fetching scale set") + return params.Instance{}, fmt.Errorf("error fetching scale set: %w", err) } defer func() { @@ -39,7 +38,7 @@ func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, if len(param.JitConfiguration) > 0 { secret, err = s.marshalAndSeal(param.JitConfiguration) if err != nil { - return params.Instance{}, errors.Wrap(err, "marshalling jit config") + return params.Instance{}, fmt.Errorf("error marshalling jit config: %w", err) } } @@ -58,7 +57,7 @@ func (s *sqlDatabase) CreateScaleSetInstance(_ context.Context, scaleSetID uint, } q := s.conn.Create(&newInstance) if q.Error != nil { - return params.Instance{}, errors.Wrap(q.Error, "creating instance") + return params.Instance{}, fmt.Errorf("error creating instance: %w", q.Error) } return s.sqlToParamsInstance(newInstance) @@ -72,7 +71,7 @@ func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) Where("scale_set_fk_id = ?", scalesetID) if err := query.Find(&instances); err.Error != nil { - return nil, errors.Wrap(err.Error, "fetching instances") + return nil, fmt.Errorf("error fetching instances: %w", err.Error) } var err error @@ -80,7 +79,7 @@ func (s *sqlDatabase) ListScaleSetInstances(_ context.Context, scalesetID uint) for idx, inst := range instances { ret[idx], err = s.sqlToParamsInstance(inst) if err != nil { - return nil, errors.Wrap(err, "converting instance") + return nil, fmt.Errorf("error converting instance: %w", err) } } return ret, nil diff --git a/database/sql/scalesets.go b/database/sql/scalesets.go index 4748ed66..b247b7a8 100644 --- a/database/sql/scalesets.go +++ b/database/sql/scalesets.go @@ -16,10 +16,10 @@ package sql import ( "context" + "errors" "fmt" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" @@ -42,7 +42,7 @@ func (s *sqlDatabase) ListAllScaleSets(_ context.Context) ([]params.ScaleSet, er Omit("status_messages"). Find(&scaleSets) if q.Error != nil { - return nil, errors.Wrap(q.Error, "fetching all scale sets") + return nil, fmt.Errorf("error fetching all scale sets: %w", q.Error) } ret := make([]params.ScaleSet, len(scaleSets)) @@ -50,7 +50,7 @@ func (s *sqlDatabase) ListAllScaleSets(_ context.Context) ([]params.ScaleSet, er for idx, val := range scaleSets { ret[idx], err = s.sqlToCommonScaleSet(val) if err != nil { - return nil, errors.Wrap(err, "converting scale sets") + return nil, fmt.Errorf("error converting scale sets: %w", err) } } return ret, nil @@ -91,7 +91,7 @@ func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.Forg entityID, err := uuid.Parse(entity.ID) if err != nil { - return params.ScaleSet{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return params.ScaleSet{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } switch entity.EntityType { @@ -104,12 +104,12 @@ func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.Forg } err = s.conn.Transaction(func(tx *gorm.DB) error { if err := s.hasGithubEntity(tx, entity.EntityType, entity.ID); err != nil { - return errors.Wrap(err, "checking entity existence") + return fmt.Errorf("error checking entity existence: %w", err) } q := tx.Create(&newScaleSet) if q.Error != nil { - return errors.Wrap(q.Error, "creating scale set") + return fmt.Errorf("error creating scale set: %w", q.Error) } return nil @@ -120,7 +120,7 @@ func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.Forg dbScaleSet, err := s.getScaleSetByID(s.conn, newScaleSet.ID, "Instances", "Enterprise", "Organization", "Repository") if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "fetching scale set") + return params.ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) } return s.sqlToCommonScaleSet(dbScaleSet) @@ -128,11 +128,11 @@ func (s *sqlDatabase) CreateEntityScaleSet(_ context.Context, entity params.Forg func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, preload ...string) ([]ScaleSet, error) { if _, err := uuid.Parse(entityID); err != nil { - return nil, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return nil, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } if err := s.hasGithubEntity(tx, entityType, entityID); err != nil { - return nil, errors.Wrap(err, "checking entity existence") + return nil, fmt.Errorf("error checking entity existence: %w", err) } var preloadEntity string @@ -170,7 +170,7 @@ func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.ForgeEn if errors.Is(err, gorm.ErrRecordNotFound) { return []ScaleSet{}, nil } - return nil, errors.Wrap(err, "fetching scale sets") + return nil, fmt.Errorf("error fetching scale sets: %w", err) } return scaleSets, nil @@ -179,14 +179,14 @@ func (s *sqlDatabase) listEntityScaleSets(tx *gorm.DB, entityType params.ForgeEn func (s *sqlDatabase) ListEntityScaleSets(_ context.Context, entity params.ForgeEntity) ([]params.ScaleSet, error) { scaleSets, err := s.listEntityScaleSets(s.conn, entity.EntityType, entity.ID) if err != nil { - return nil, errors.Wrap(err, "fetching scale sets") + return nil, fmt.Errorf("error fetching scale sets: %w", err) } ret := make([]params.ScaleSet, len(scaleSets)) for idx, set := range scaleSets { ret[idx], err = s.sqlToCommonScaleSet(set) if err != nil { - return nil, errors.Wrap(err, "conbverting scale set") + return nil, fmt.Errorf("error conbverting scale set: %w", err) } } @@ -202,22 +202,22 @@ func (s *sqlDatabase) UpdateEntityScaleSet(ctx context.Context, entity params.Fo err = s.conn.Transaction(func(tx *gorm.DB) error { scaleSet, err := s.getEntityScaleSet(tx, entity.EntityType, entity.ID, scaleSetID, "Instances") if err != nil { - return errors.Wrap(err, "fetching scale set") + return fmt.Errorf("error fetching scale set: %w", err) } old, err := s.sqlToCommonScaleSet(scaleSet) if err != nil { - return errors.Wrap(err, "converting scale set") + return fmt.Errorf("error converting scale set: %w", err) } updatedScaleSet, err = s.updateScaleSet(tx, scaleSet, param) if err != nil { - return errors.Wrap(err, "updating scale set") + return fmt.Errorf("error updating scale set: %w", err) } if callback != nil { if err := callback(old, updatedScaleSet); err != nil { - return errors.Wrap(err, "executing update callback") + return fmt.Errorf("error executing update callback: %w", err) } } return nil @@ -235,11 +235,11 @@ func (s *sqlDatabase) UpdateEntityScaleSet(ctx context.Context, entity params.Fo func (s *sqlDatabase) getEntityScaleSet(tx *gorm.DB, entityType params.ForgeEntityType, entityID string, scaleSetID uint, preload ...string) (ScaleSet, error) { if entityID == "" { - return ScaleSet{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing entity id") + return ScaleSet{}, fmt.Errorf("error missing entity id: %w", runnerErrors.ErrBadRequest) } if scaleSetID == 0 { - return ScaleSet{}, errors.Wrap(runnerErrors.ErrBadRequest, "missing scaleset id") + return ScaleSet{}, fmt.Errorf("error missing scaleset id: %w", runnerErrors.ErrBadRequest) } var fieldName string @@ -273,9 +273,9 @@ func (s *sqlDatabase) getEntityScaleSet(tx *gorm.DB, entityType params.ForgeEnti First(&scaleSet).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return ScaleSet{}, errors.Wrap(runnerErrors.ErrNotFound, "finding scale set") + return ScaleSet{}, fmt.Errorf("error finding scale set: %w", runnerErrors.ErrNotFound) } - return ScaleSet{}, errors.Wrap(err, "fetching scale set") + return ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) } return scaleSet, nil @@ -343,7 +343,7 @@ func (s *sqlDatabase) updateScaleSet(tx *gorm.DB, scaleSet ScaleSet, param param } if q := tx.Save(&scaleSet); q.Error != nil { - return params.ScaleSet{}, errors.Wrap(q.Error, "saving database entry") + return params.ScaleSet{}, fmt.Errorf("error saving database entry: %w", q.Error) } return s.sqlToCommonScaleSet(scaleSet) @@ -362,7 +362,7 @@ func (s *sqlDatabase) GetScaleSetByID(_ context.Context, scaleSet uint) (params. "Repository.Endpoint", ) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "fetching scale set by ID") + return params.ScaleSet{}, fmt.Errorf("error fetching scale set by ID: %w", err) } return s.sqlToCommonScaleSet(set) } @@ -377,7 +377,7 @@ func (s *sqlDatabase) DeleteScaleSetByID(_ context.Context, scaleSetID uint) (er err = s.conn.Transaction(func(tx *gorm.DB) error { dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") if err != nil { - return errors.Wrap(err, "fetching scale set") + return fmt.Errorf("error fetching scale set: %w", err) } if len(dbSet.Instances) > 0 { @@ -385,16 +385,16 @@ func (s *sqlDatabase) DeleteScaleSetByID(_ context.Context, scaleSetID uint) (er } scaleSet, err = s.sqlToCommonScaleSet(dbSet) if err != nil { - return errors.Wrap(err, "converting scale set") + return fmt.Errorf("error converting scale set: %w", err) } if q := tx.Unscoped().Delete(&dbSet); q.Error != nil { - return errors.Wrap(q.Error, "deleting scale set") + return fmt.Errorf("error deleting scale set: %w", q.Error) } return nil }) if err != nil { - return errors.Wrap(err, "removing scale set") + return fmt.Errorf("error removing scale set: %w", err) } return nil } @@ -409,19 +409,19 @@ func (s *sqlDatabase) SetScaleSetLastMessageID(_ context.Context, scaleSetID uin if err := s.conn.Transaction(func(tx *gorm.DB) error { dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") if err != nil { - return errors.Wrap(err, "fetching scale set") + return fmt.Errorf("error fetching scale set: %w", err) } dbSet.LastMessageID = lastMessageID if err := tx.Save(&dbSet).Error; err != nil { - return errors.Wrap(err, "saving database entry") + return fmt.Errorf("error saving database entry: %w", err) } scaleSet, err = s.sqlToCommonScaleSet(dbSet) if err != nil { - return errors.Wrap(err, "converting scale set") + return fmt.Errorf("error converting scale set: %w", err) } return nil }); err != nil { - return errors.Wrap(err, "setting last message ID") + return fmt.Errorf("error setting last message ID: %w", err) } return nil } @@ -436,19 +436,19 @@ func (s *sqlDatabase) SetScaleSetDesiredRunnerCount(_ context.Context, scaleSetI if err := s.conn.Transaction(func(tx *gorm.DB) error { dbSet, err := s.getScaleSetByID(tx, scaleSetID, "Instances", "Enterprise", "Organization", "Repository") if err != nil { - return errors.Wrap(err, "fetching scale set") + return fmt.Errorf("error fetching scale set: %w", err) } dbSet.DesiredRunnerCount = desiredRunnerCount if err := tx.Save(&dbSet).Error; err != nil { - return errors.Wrap(err, "saving database entry") + return fmt.Errorf("error saving database entry: %w", err) } scaleSet, err = s.sqlToCommonScaleSet(dbSet) if err != nil { - return errors.Wrap(err, "converting scale set") + return fmt.Errorf("error converting scale set: %w", err) } return nil }); err != nil { - return errors.Wrap(err, "setting desired runner count") + return fmt.Errorf("error setting desired runner count: %w", err) } return nil } diff --git a/database/sql/sql.go b/database/sql/sql.go index 16411364..7d1fc96c 100644 --- a/database/sql/sql.go +++ b/database/sql/sql.go @@ -16,12 +16,12 @@ package sql import ( "context" + "errors" "fmt" "log/slog" "net/url" "strings" - "github.com/pkg/errors" "gorm.io/driver/mysql" "gorm.io/driver/sqlite" "gorm.io/gorm" @@ -46,7 +46,7 @@ const ( func newDBConn(dbCfg config.Database) (conn *gorm.DB, err error) { dbType, connURI, err := dbCfg.GormParams() if err != nil { - return nil, errors.Wrap(err, "getting DB URI string") + return nil, fmt.Errorf("error getting DB URI string: %w", err) } gormConfig := &gorm.Config{} @@ -61,7 +61,7 @@ func newDBConn(dbCfg config.Database) (conn *gorm.DB, err error) { conn, err = gorm.Open(sqlite.Open(connURI), gormConfig) } if err != nil { - return nil, errors.Wrap(err, "connecting to database") + return nil, fmt.Errorf("error connecting to database: %w", err) } if dbCfg.Debug { @@ -73,11 +73,11 @@ func newDBConn(dbCfg config.Database) (conn *gorm.DB, err error) { func NewSQLDatabase(ctx context.Context, cfg config.Database) (common.Store, error) { conn, err := newDBConn(cfg) if err != nil { - return nil, errors.Wrap(err, "creating DB connection") + return nil, fmt.Errorf("error creating DB connection: %w", err) } producer, err := watcher.RegisterProducer(ctx, "sql") if err != nil { - return nil, errors.Wrap(err, "registering producer") + return nil, fmt.Errorf("error registering producer: %w", err) } db := &sqlDatabase{ conn: conn, @@ -87,7 +87,7 @@ func NewSQLDatabase(ctx context.Context, cfg config.Database) (common.Store, err } if err := db.migrateDB(); err != nil { - return nil, errors.Wrap(err, "migrating database") + return nil, fmt.Errorf("error migrating database: %w", err) } return db, nil } @@ -221,14 +221,14 @@ func (s *sqlDatabase) ensureGithubEndpoint() error { var epCount int64 if err := s.conn.Model(&GithubEndpoint{}).Count(&epCount).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "counting github endpoints") + return fmt.Errorf("error counting github endpoints: %w", err) } } if epCount == 0 { if _, err := s.CreateGithubEndpoint(context.Background(), createEndpointParams); err != nil { if !errors.Is(err, runnerErrors.ErrDuplicateEntity) { - return errors.Wrap(err, "creating default github endpoint") + return fmt.Errorf("error creating default github endpoint: %w", err) } } } @@ -246,7 +246,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { // Admin user doesn't exist. This is a new deploy. Nothing to migrate. return nil } - return errors.Wrap(err, "getting admin user") + return fmt.Errorf("error getting admin user: %w", err) } // Impersonate the admin user. We're migrating from config credentials to @@ -259,7 +259,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { slog.Info("migrating credentials to DB") slog.Info("creating github endpoints table") if err := s.conn.AutoMigrate(&GithubEndpoint{}); err != nil { - return errors.Wrap(err, "migrating github endpoints") + return fmt.Errorf("error migrating github endpoints: %w", err) } defer func() { @@ -271,7 +271,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { slog.Info("creating github credentials table") if err := s.conn.AutoMigrate(&GithubCredentials{}); err != nil { - return errors.Wrap(err, "migrating github credentials") + return fmt.Errorf("error migrating github credentials: %w", err) } defer func() { @@ -291,12 +291,12 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { slog.Info("importing credential", "name", cred.Name) parsed, err := url.Parse(cred.BaseEndpoint()) if err != nil { - return errors.Wrap(err, "parsing base URL") + return fmt.Errorf("error parsing base URL: %w", err) } certBundle, err := cred.CACertBundle() if err != nil { - return errors.Wrap(err, "getting CA cert bundle") + return fmt.Errorf("error getting CA cert bundle: %w", err) } hostname := parsed.Hostname() createParams := params.CreateGithubEndpointParams{ @@ -312,11 +312,11 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { endpoint, err = s.GetGithubEndpoint(adminCtx, hostname) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return errors.Wrap(err, "getting github endpoint") + return fmt.Errorf("error getting github endpoint: %w", err) } endpoint, err = s.CreateGithubEndpoint(adminCtx, createParams) if err != nil { - return errors.Wrap(err, "creating default github endpoint") + return fmt.Errorf("error creating default github endpoint: %w", err) } } @@ -330,7 +330,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { case params.ForgeAuthTypeApp: keyBytes, err := cred.App.PrivateKeyBytes() if err != nil { - return errors.Wrap(err, "getting private key bytes") + return fmt.Errorf("error getting private key bytes: %w", err) } credParams.App = params.GithubApp{ AppID: cred.App.AppID, @@ -339,7 +339,7 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { } if err := credParams.App.Validate(); err != nil { - return errors.Wrap(err, "validating app credentials") + return fmt.Errorf("error validating app credentials: %w", err) } case params.ForgeAuthTypePAT: token := cred.PAT.OAuth2Token @@ -356,19 +356,19 @@ func (s *sqlDatabase) migrateCredentialsToDB() (err error) { creds, err := s.CreateGithubCredentials(adminCtx, credParams) if err != nil { - return errors.Wrap(err, "creating github credentials") + return fmt.Errorf("error creating github credentials: %w", err) } if err := s.conn.Exec("update repositories set credentials_id = ?,endpoint_name = ? where credentials_name = ?", creds.ID, creds.Endpoint.Name, creds.Name).Error; err != nil { - return errors.Wrap(err, "updating repositories") + return fmt.Errorf("error updating repositories: %w", err) } if err := s.conn.Exec("update organizations set credentials_id = ?,endpoint_name = ? where credentials_name = ?", creds.ID, creds.Endpoint.Name, creds.Name).Error; err != nil { - return errors.Wrap(err, "updating organizations") + return fmt.Errorf("error updating organizations: %w", err) } if err := s.conn.Exec("update enterprises set credentials_id = ?,endpoint_name = ? where credentials_name = ?", creds.ID, creds.Endpoint.Name, creds.Name).Error; err != nil { - return errors.Wrap(err, "updating enterprises") + return fmt.Errorf("error updating enterprises: %w", err) } } return nil @@ -380,10 +380,10 @@ func (s *sqlDatabase) migrateWorkflow() error { // Remove jobs that are not in "queued" status. We really only care about queued jobs. Once they transition // to something else, we don't really consume them anyway. if err := s.conn.Exec("delete from workflow_jobs where status is not 'queued'").Error; err != nil { - return errors.Wrap(err, "updating workflow_jobs") + return fmt.Errorf("error updating workflow_jobs: %w", err) } if err := s.conn.Migrator().DropColumn(&WorkflowJob{}, "runner_name"); err != nil { - return errors.Wrap(err, "updating workflow_jobs") + return fmt.Errorf("error updating workflow_jobs: %w", err) } } } @@ -404,34 +404,34 @@ func (s *sqlDatabase) migrateDB() error { } if err := s.cascadeMigration(); err != nil { - return errors.Wrap(err, "running cascade migration") + return fmt.Errorf("error running cascade migration: %w", err) } if s.conn.Migrator().HasTable(&Pool{}) { if err := s.conn.Exec("update pools set repo_id=NULL where repo_id='00000000-0000-0000-0000-000000000000'").Error; err != nil { - return errors.Wrap(err, "updating pools") + return fmt.Errorf("error updating pools %w", err) } if err := s.conn.Exec("update pools set org_id=NULL where org_id='00000000-0000-0000-0000-000000000000'").Error; err != nil { - return errors.Wrap(err, "updating pools") + return fmt.Errorf("error updating pools: %w", err) } if err := s.conn.Exec("update pools set enterprise_id=NULL where enterprise_id='00000000-0000-0000-0000-000000000000'").Error; err != nil { - return errors.Wrap(err, "updating pools") + return fmt.Errorf("error updating pools: %w", err) } } if err := s.migrateWorkflow(); err != nil { - return errors.Wrap(err, "migrating workflows") + return fmt.Errorf("error migrating workflows: %w", err) } if s.conn.Migrator().HasTable(&GithubEndpoint{}) { if !s.conn.Migrator().HasColumn(&GithubEndpoint{}, "endpoint_type") { if err := s.conn.Migrator().AutoMigrate(&GithubEndpoint{}); err != nil { - return errors.Wrap(err, "migrating github endpoints") + return fmt.Errorf("error migrating github endpoints: %w", err) } if err := s.conn.Exec("update github_endpoints set endpoint_type = 'github' where endpoint_type is null").Error; err != nil { - return errors.Wrap(err, "updating github endpoints") + return fmt.Errorf("error updating github endpoints: %w", err) } } } @@ -467,7 +467,7 @@ func (s *sqlDatabase) migrateDB() error { &WorkflowJob{}, &ScaleSet{}, ); err != nil { - return errors.Wrap(err, "running auto migrate") + return fmt.Errorf("error running auto migrate: %w", err) } s.conn.Exec("PRAGMA foreign_keys = ON") @@ -475,23 +475,23 @@ func (s *sqlDatabase) migrateDB() error { var controller ControllerInfo if err := s.conn.First(&controller).Error; err != nil { if !errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(err, "updating controller info") + return fmt.Errorf("error updating controller info: %w", err) } } else { controller.MinimumJobAgeBackoff = 30 if err := s.conn.Save(&controller).Error; err != nil { - return errors.Wrap(err, "updating controller info") + return fmt.Errorf("error updating controller info: %w", err) } } } if err := s.ensureGithubEndpoint(); err != nil { - return errors.Wrap(err, "ensuring github endpoint") + return fmt.Errorf("error ensuring github endpoint: %w", err) } if needsCredentialMigration { if err := s.migrateCredentialsToDB(); err != nil { - return errors.Wrap(err, "migrating credentials") + return fmt.Errorf("error migrating credentials: %w", err) } } return nil diff --git a/database/sql/users.go b/database/sql/users.go index 7d604a83..ca78c5e8 100644 --- a/database/sql/users.go +++ b/database/sql/users.go @@ -16,9 +16,9 @@ package sql import ( "context" + "errors" "fmt" - "github.com/pkg/errors" "gorm.io/gorm" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -39,7 +39,7 @@ func (s *sqlDatabase) getUserByUsernameOrEmail(tx *gorm.DB, user string) (User, if errors.Is(q.Error, gorm.ErrRecordNotFound) { return User{}, runnerErrors.ErrNotFound } - return User{}, errors.Wrap(q.Error, "fetching user") + return User{}, fmt.Errorf("error fetching user: %w", q.Error) } return dbUser, nil } @@ -51,7 +51,7 @@ func (s *sqlDatabase) getUserByID(tx *gorm.DB, userID string) (User, error) { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return User{}, runnerErrors.ErrNotFound } - return User{}, errors.Wrap(q.Error, "fetching user") + return User{}, fmt.Errorf("error fetching user: %w", q.Error) } return dbUser, nil } @@ -82,12 +82,12 @@ func (s *sqlDatabase) CreateUser(_ context.Context, user params.NewUserParams) ( q := tx.Save(&newUser) if q.Error != nil { - return errors.Wrap(q.Error, "creating user") + return fmt.Errorf("error creating user: %w", q.Error) } return nil }) if err != nil { - return params.User{}, errors.Wrap(err, "creating user") + return params.User{}, fmt.Errorf("error creating user: %w", err) } return s.sqlToParamsUser(newUser), nil } @@ -105,7 +105,7 @@ func (s *sqlDatabase) HasAdminUser(_ context.Context) bool { func (s *sqlDatabase) GetUser(_ context.Context, user string) (params.User, error) { dbUser, err := s.getUserByUsernameOrEmail(s.conn, user) if err != nil { - return params.User{}, errors.Wrap(err, "fetching user") + return params.User{}, fmt.Errorf("error fetching user: %w", err) } return s.sqlToParamsUser(dbUser), nil } @@ -113,7 +113,7 @@ func (s *sqlDatabase) GetUser(_ context.Context, user string) (params.User, erro func (s *sqlDatabase) GetUserByID(_ context.Context, userID string) (params.User, error) { dbUser, err := s.getUserByID(s.conn, userID) if err != nil { - return params.User{}, errors.Wrap(err, "fetching user") + return params.User{}, fmt.Errorf("error fetching user: %w", err) } return s.sqlToParamsUser(dbUser), nil } @@ -124,7 +124,7 @@ func (s *sqlDatabase) UpdateUser(_ context.Context, user string, param params.Up err = s.conn.Transaction(func(tx *gorm.DB) error { dbUser, err = s.getUserByUsernameOrEmail(tx, user) if err != nil { - return errors.Wrap(err, "fetching user") + return fmt.Errorf("error fetching user: %w", err) } if param.FullName != "" { @@ -141,12 +141,12 @@ func (s *sqlDatabase) UpdateUser(_ context.Context, user string, param params.Up } if q := tx.Save(&dbUser); q.Error != nil { - return errors.Wrap(q.Error, "saving user") + return fmt.Errorf("error saving user: %w", q.Error) } return nil }) if err != nil { - return params.User{}, errors.Wrap(err, "updating user") + return params.User{}, fmt.Errorf("error updating user: %w", err) } return s.sqlToParamsUser(dbUser), nil } @@ -159,7 +159,7 @@ func (s *sqlDatabase) GetAdminUser(_ context.Context) (params.User, error) { if errors.Is(q.Error, gorm.ErrRecordNotFound) { return params.User{}, runnerErrors.ErrNotFound } - return params.User{}, errors.Wrap(q.Error, "fetching admin user") + return params.User{}, fmt.Errorf("error fetching admin user: %w", q.Error) } return s.sqlToParamsUser(user), nil } diff --git a/database/sql/users_test.go b/database/sql/users_test.go index db24adc3..369abff3 100644 --- a/database/sql/users_test.go +++ b/database/sql/users_test.go @@ -161,7 +161,7 @@ func (s *UserTestSuite) TestCreateUserUsernameAlreadyExist() { _, err := s.Store.CreateUser(context.Background(), s.Fixtures.NewUserParams) s.Require().NotNil(err) - s.Require().Equal(("creating user: username already exists"), err.Error()) + s.Require().Equal(("error creating user: username already exists"), err.Error()) } func (s *UserTestSuite) TestCreateUserEmailAlreadyExist() { @@ -170,7 +170,7 @@ func (s *UserTestSuite) TestCreateUserEmailAlreadyExist() { _, err := s.Store.CreateUser(context.Background(), s.Fixtures.NewUserParams) s.Require().NotNil(err) - s.Require().Equal(("creating user: email already exists"), err.Error()) + s.Require().Equal(("error creating user: email already exists"), err.Error()) } func (s *UserTestSuite) TestCreateUserDBCreateErr() { @@ -191,7 +191,7 @@ func (s *UserTestSuite) TestCreateUserDBCreateErr() { _, err := s.StoreSQLMocked.CreateUser(context.Background(), s.Fixtures.NewUserParams) s.Require().NotNil(err) - s.Require().Equal("creating user: creating user: creating user mock error", err.Error()) + s.Require().Equal("error creating user: error creating user: creating user mock error", err.Error()) s.assertSQLMockExpectations() } @@ -230,7 +230,7 @@ func (s *UserTestSuite) TestGetUserNotFound() { _, err := s.Store.GetUser(context.Background(), "dummy-user") s.Require().NotNil(err) - s.Require().Equal("fetching user: not found", err.Error()) + s.Require().Equal("error fetching user: not found", err.Error()) } func (s *UserTestSuite) TestGetUserByID() { @@ -244,7 +244,7 @@ func (s *UserTestSuite) TestGetUserByIDNotFound() { _, err := s.Store.GetUserByID(context.Background(), "dummy-user-id") s.Require().NotNil(err) - s.Require().Equal("fetching user: not found", err.Error()) + s.Require().Equal("error fetching user: not found", err.Error()) } func (s *UserTestSuite) TestUpdateUser() { @@ -260,7 +260,7 @@ func (s *UserTestSuite) TestUpdateUserNotFound() { _, err := s.Store.UpdateUser(context.Background(), "dummy-user", s.Fixtures.UpdateUserParams) s.Require().NotNil(err) - s.Require().Equal("updating user: fetching user: not found", err.Error()) + s.Require().Equal("error updating user: error fetching user: not found", err.Error()) } func (s *UserTestSuite) TestUpdateUserDBSaveErr() { @@ -278,7 +278,7 @@ func (s *UserTestSuite) TestUpdateUserDBSaveErr() { s.assertSQLMockExpectations() s.Require().NotNil(err) - s.Require().Equal("updating user: saving user: saving user mock error", err.Error()) + s.Require().Equal("error updating user: error saving user: saving user mock error", err.Error()) } func TestUserTestSuite(t *testing.T) { diff --git a/database/sql/util.go b/database/sql/util.go index ebb3c57c..9509aacf 100644 --- a/database/sql/util.go +++ b/database/sql/util.go @@ -17,10 +17,10 @@ package sql import ( "context" "encoding/json" + "errors" "fmt" "github.com/google/uuid" - "github.com/pkg/errors" "gorm.io/datatypes" "gorm.io/gorm" @@ -41,14 +41,14 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e var labels []string if len(instance.AditionalLabels) > 0 { if err := json.Unmarshal(instance.AditionalLabels, &labels); err != nil { - return params.Instance{}, errors.Wrap(err, "unmarshalling labels") + return params.Instance{}, fmt.Errorf("error unmarshalling labels: %w", err) } } var jitConfig map[string]string if len(instance.JitConfiguration) > 0 { if err := s.unsealAndUnmarshal(instance.JitConfiguration, &jitConfig); err != nil { - return params.Instance{}, errors.Wrap(err, "unmarshalling jit configuration") + return params.Instance{}, fmt.Errorf("error unmarshalling jit configuration: %w", err) } } ret := params.Instance{ @@ -95,7 +95,7 @@ func (s *sqlDatabase) sqlToParamsInstance(instance Instance) (params.Instance, e if instance.Job != nil { paramJob, err := sqlWorkflowJobToParamsJob(*instance.Job) if err != nil { - return params.Instance{}, errors.Wrap(err, "converting job") + return params.Instance{}, fmt.Errorf("error converting job: %w", err) } ret.Job = ¶mJob } @@ -132,12 +132,12 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( } secret, err := util.Unseal(org.WebhookSecret, []byte(s.cfg.Passphrase)) if err != nil { - return params.Organization{}, errors.Wrap(err, "decrypting secret") + return params.Organization{}, fmt.Errorf("error decrypting secret: %w", err) } endpoint, err := s.sqlToCommonGithubEndpoint(org.Endpoint) if err != nil { - return params.Organization{}, errors.Wrap(err, "converting endpoint") + return params.Organization{}, fmt.Errorf("error converting endpoint: %w", err) } ret := params.Organization{ ID: org.ID.String(), @@ -163,7 +163,7 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( } if err != nil { - return params.Organization{}, errors.Wrap(err, "converting credentials") + return params.Organization{}, fmt.Errorf("error converting credentials: %w", err) } if len(org.Events) > 0 { @@ -191,7 +191,7 @@ func (s *sqlDatabase) sqlToCommonOrganization(org Organization, detailed bool) ( for idx, pool := range org.Pools { ret.Pools[idx], err = s.sqlToCommonPool(pool) if err != nil { - return params.Organization{}, errors.Wrap(err, "converting pool") + return params.Organization{}, fmt.Errorf("error converting pool: %w", err) } } @@ -204,12 +204,12 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool } secret, err := util.Unseal(enterprise.WebhookSecret, []byte(s.cfg.Passphrase)) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "decrypting secret") + return params.Enterprise{}, fmt.Errorf("error decrypting secret: %w", err) } endpoint, err := s.sqlToCommonGithubEndpoint(enterprise.Endpoint) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "converting endpoint") + return params.Enterprise{}, fmt.Errorf("error converting endpoint: %w", err) } ret := params.Enterprise{ ID: enterprise.ID.String(), @@ -243,7 +243,7 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool if detailed { creds, err := s.sqlToCommonForgeCredentials(enterprise.Credentials) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "converting credentials") + return params.Enterprise{}, fmt.Errorf("error converting credentials: %w", err) } ret.Credentials = creds } @@ -255,7 +255,7 @@ func (s *sqlDatabase) sqlToCommonEnterprise(enterprise Enterprise, detailed bool for idx, pool := range enterprise.Pools { ret.Pools[idx], err = s.sqlToCommonPool(pool) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "converting pool") + return params.Enterprise{}, fmt.Errorf("error converting pool: %w", err) } } @@ -309,7 +309,7 @@ func (s *sqlDatabase) sqlToCommonPool(pool Pool) (params.Pool, error) { endpoint, err := s.sqlToCommonGithubEndpoint(ep) if err != nil { - return params.Pool{}, errors.Wrap(err, "converting endpoint") + return params.Pool{}, fmt.Errorf("error converting endpoint: %w", err) } ret.Endpoint = endpoint @@ -320,7 +320,7 @@ func (s *sqlDatabase) sqlToCommonPool(pool Pool) (params.Pool, error) { for idx, inst := range pool.Instances { ret.Instances[idx], err = s.sqlToParamsInstance(inst) if err != nil { - return params.Pool{}, errors.Wrap(err, "converting instance") + return params.Pool{}, fmt.Errorf("error converting instance: %w", err) } } @@ -380,14 +380,14 @@ func (s *sqlDatabase) sqlToCommonScaleSet(scaleSet ScaleSet) (params.ScaleSet, e endpoint, err := s.sqlToCommonGithubEndpoint(ep) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "converting endpoint") + return params.ScaleSet{}, fmt.Errorf("error converting endpoint: %w", err) } ret.Endpoint = endpoint for idx, inst := range scaleSet.Instances { ret.Instances[idx], err = s.sqlToParamsInstance(inst) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "converting instance") + return params.ScaleSet{}, fmt.Errorf("error converting instance: %w", err) } } @@ -407,11 +407,11 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par } secret, err := util.Unseal(repo.WebhookSecret, []byte(s.cfg.Passphrase)) if err != nil { - return params.Repository{}, errors.Wrap(err, "decrypting secret") + return params.Repository{}, fmt.Errorf("error decrypting secret: %w", err) } endpoint, err := s.sqlToCommonGithubEndpoint(repo.Endpoint) if err != nil { - return params.Repository{}, errors.Wrap(err, "converting endpoint") + return params.Repository{}, fmt.Errorf("error converting endpoint: %w", err) } ret := params.Repository{ ID: repo.ID.String(), @@ -442,7 +442,7 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par } if err != nil { - return params.Repository{}, errors.Wrap(err, "converting credentials") + return params.Repository{}, fmt.Errorf("error converting credentials: %w", err) } if len(repo.Events) > 0 { @@ -470,7 +470,7 @@ func (s *sqlDatabase) sqlToCommonRepository(repo Repository, detailed bool) (par for idx, pool := range repo.Pools { ret.Pools[idx], err = s.sqlToCommonPool(pool) if err != nil { - return params.Repository{}, errors.Wrap(err, "converting pool") + return params.Repository{}, fmt.Errorf("error converting pool: %w", err) } } @@ -499,14 +499,14 @@ func (s *sqlDatabase) getOrCreateTag(tx *gorm.DB, tagName string) (Tag, error) { return tag, nil } if !errors.Is(q.Error, gorm.ErrRecordNotFound) { - return Tag{}, errors.Wrap(q.Error, "fetching tag from database") + return Tag{}, fmt.Errorf("error fetching tag from database: %w", q.Error) } newTag := Tag{ Name: tagName, } if err := tx.Create(&newTag).Error; err != nil { - return Tag{}, errors.Wrap(err, "creating tag") + return Tag{}, fmt.Errorf("error creating tag: %w", err) } return newTag, nil } @@ -561,7 +561,7 @@ func (s *sqlDatabase) updatePool(tx *gorm.DB, pool Pool, param params.UpdatePool } if q := tx.Save(&pool); q.Error != nil { - return params.Pool{}, errors.Wrap(q.Error, "saving database entry") + return params.Pool{}, fmt.Errorf("error saving database entry: %w", q.Error) } tags := []Tag{} @@ -569,13 +569,13 @@ func (s *sqlDatabase) updatePool(tx *gorm.DB, pool Pool, param params.UpdatePool for _, val := range param.Tags { t, err := s.getOrCreateTag(tx, val) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching tag") + return params.Pool{}, fmt.Errorf("error fetching tag: %w", err) } tags = append(tags, t) } if err := tx.Model(&pool).Association("Tags").Replace(&tags); err != nil { - return params.Pool{}, errors.Wrap(err, "replacing tags") + return params.Pool{}, fmt.Errorf("error replacing tags: %w", err) } } @@ -585,7 +585,7 @@ func (s *sqlDatabase) updatePool(tx *gorm.DB, pool Pool, param params.UpdatePool func (s *sqlDatabase) getPoolByID(tx *gorm.DB, poolID string, preload ...string) (Pool, error) { u, err := uuid.Parse(poolID) if err != nil { - return Pool{}, errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return Pool{}, fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } var pool Pool q := tx.Model(&Pool{}) @@ -601,7 +601,7 @@ func (s *sqlDatabase) getPoolByID(tx *gorm.DB, poolID string, preload ...string) if errors.Is(q.Error, gorm.ErrRecordNotFound) { return Pool{}, runnerErrors.ErrNotFound } - return Pool{}, errors.Wrap(q.Error, "fetching org from database") + return Pool{}, fmt.Errorf("error fetching org from database: %w", q.Error) } return pool, nil } @@ -621,7 +621,7 @@ func (s *sqlDatabase) getScaleSetByID(tx *gorm.DB, scaleSetID uint, preload ...s if errors.Is(q.Error, gorm.ErrRecordNotFound) { return ScaleSet{}, runnerErrors.ErrNotFound } - return ScaleSet{}, errors.Wrap(q.Error, "fetching scale set from database") + return ScaleSet{}, fmt.Errorf("error fetching scale set from database: %w", q.Error) } return scaleSet, nil } @@ -629,7 +629,7 @@ func (s *sqlDatabase) getScaleSetByID(tx *gorm.DB, scaleSetID uint, preload ...s func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.ForgeEntityType, entityID string) error { u, err := uuid.Parse(entityID) if err != nil { - return errors.Wrap(runnerErrors.ErrBadRequest, "parsing id") + return fmt.Errorf("error parsing id: %w", runnerErrors.ErrBadRequest) } var q *gorm.DB switch entityType { @@ -640,15 +640,15 @@ func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.ForgeEntity case params.ForgeEntityTypeEnterprise: q = tx.Model(&Enterprise{}).Where("id = ?", u) default: - return errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") + return fmt.Errorf("error invalid entity type: %w", runnerErrors.ErrBadRequest) } var entity interface{} if err := q.First(entity).Error; err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { - return errors.Wrap(runnerErrors.ErrNotFound, "entity not found") + return fmt.Errorf("error entity not found: %w", runnerErrors.ErrNotFound) } - return errors.Wrap(err, "fetching entity from database") + return fmt.Errorf("error fetching entity from database: %w", err) } return nil } @@ -656,7 +656,7 @@ func (s *sqlDatabase) hasGithubEntity(tx *gorm.DB, entityType params.ForgeEntity func (s *sqlDatabase) marshalAndSeal(data interface{}) ([]byte, error) { enc, err := json.Marshal(data) if err != nil { - return nil, errors.Wrap(err, "marshalling data") + return nil, fmt.Errorf("error marshalling data: %w", err) } return util.Seal(enc, []byte(s.cfg.Passphrase)) } @@ -664,10 +664,10 @@ func (s *sqlDatabase) marshalAndSeal(data interface{}) ([]byte, error) { func (s *sqlDatabase) unsealAndUnmarshal(data []byte, target interface{}) error { decrypted, err := util.Unseal(data, []byte(s.cfg.Passphrase)) if err != nil { - return errors.Wrap(err, "decrypting data") + return fmt.Errorf("error decrypting data: %w", err) } if err := json.Unmarshal(decrypted, target); err != nil { - return errors.Wrap(err, "unmarshalling data") + return fmt.Errorf("error unmarshalling data: %w", err) } return nil } @@ -699,15 +699,15 @@ func (s *sqlDatabase) GetForgeEntity(_ context.Context, entityType params.ForgeE case params.ForgeEntityTypeRepository: ghEntity, err = s.GetRepositoryByID(s.ctx, entityID) default: - return params.ForgeEntity{}, errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") + return params.ForgeEntity{}, fmt.Errorf("error invalid entity type: %w", runnerErrors.ErrBadRequest) } if err != nil { - return params.ForgeEntity{}, errors.Wrap(err, "failed to get ") + return params.ForgeEntity{}, fmt.Errorf("error failed to get entity from db: %w", err) } entity, err := ghEntity.GetEntity() if err != nil { - return params.ForgeEntity{}, errors.Wrap(err, "failed to get entity") + return params.ForgeEntity{}, fmt.Errorf("error failed to get entity: %w", err) } return entity, nil } @@ -715,7 +715,7 @@ func (s *sqlDatabase) GetForgeEntity(_ context.Context, entityType params.ForgeE func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { repo, err := s.getRepoByID(ctx, s.conn, repoID) if err != nil { - return errors.Wrap(err, "updating instance") + return fmt.Errorf("error updating instance: %w", err) } msg := RepositoryEvent{ @@ -725,7 +725,7 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve } if err := s.conn.Model(&repo).Association("Events").Append(&msg); err != nil { - return errors.Wrap(err, "adding status message") + return fmt.Errorf("error adding status message: %w", err) } if maxEvents > 0 { @@ -734,12 +734,12 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve Limit(maxEvents).Order("id desc"). Where("repo_id = ?", repo.ID).Find(&latestEvents) if q.Error != nil { - return errors.Wrap(q.Error, "fetching latest events") + return fmt.Errorf("error fetching latest events: %w", q.Error) } if len(latestEvents) == maxEvents { lastInList := latestEvents[len(latestEvents)-1] if err := s.conn.Where("repo_id = ? and id < ?", repo.ID, lastInList.ID).Unscoped().Delete(&RepositoryEvent{}).Error; err != nil { - return errors.Wrap(err, "deleting old events") + return fmt.Errorf("error deleting old events: %w", err) } } } @@ -749,7 +749,7 @@ func (s *sqlDatabase) addRepositoryEvent(ctx context.Context, repoID string, eve func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { org, err := s.getOrgByID(ctx, s.conn, orgID) if err != nil { - return errors.Wrap(err, "updating instance") + return fmt.Errorf("error updating instance: %w", err) } msg := OrganizationEvent{ @@ -759,7 +759,7 @@ func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event param } if err := s.conn.Model(&org).Association("Events").Append(&msg); err != nil { - return errors.Wrap(err, "adding status message") + return fmt.Errorf("error adding status message: %w", err) } if maxEvents > 0 { @@ -768,12 +768,12 @@ func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event param Limit(maxEvents).Order("id desc"). Where("org_id = ?", org.ID).Find(&latestEvents) if q.Error != nil { - return errors.Wrap(q.Error, "fetching latest events") + return fmt.Errorf("error fetching latest events: %w", q.Error) } if len(latestEvents) == maxEvents { lastInList := latestEvents[len(latestEvents)-1] if err := s.conn.Where("org_id = ? and id < ?", org.ID, lastInList.ID).Unscoped().Delete(&OrganizationEvent{}).Error; err != nil { - return errors.Wrap(err, "deleting old events") + return fmt.Errorf("error deleting old events: %w", err) } } } @@ -783,7 +783,7 @@ func (s *sqlDatabase) addOrgEvent(ctx context.Context, orgID string, event param func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { ent, err := s.getEnterpriseByID(ctx, s.conn, entID) if err != nil { - return errors.Wrap(err, "updating instance") + return fmt.Errorf("error updating instance: %w", err) } msg := EnterpriseEvent{ @@ -793,7 +793,7 @@ func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, even } if err := s.conn.Model(&ent).Association("Events").Append(&msg); err != nil { - return errors.Wrap(err, "adding status message") + return fmt.Errorf("error adding status message: %w", err) } if maxEvents > 0 { @@ -802,12 +802,12 @@ func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, even Limit(maxEvents).Order("id desc"). Where("enterprise_id = ?", ent.ID).Find(&latestEvents) if q.Error != nil { - return errors.Wrap(q.Error, "fetching latest events") + return fmt.Errorf("error fetching latest events: %w", q.Error) } if len(latestEvents) == maxEvents { lastInList := latestEvents[len(latestEvents)-1] if err := s.conn.Where("enterprise_id = ? and id < ?", ent.ID, lastInList.ID).Unscoped().Delete(&EnterpriseEvent{}).Error; err != nil { - return errors.Wrap(err, "deleting old events") + return fmt.Errorf("error deleting old events: %w", err) } } } @@ -817,7 +817,7 @@ func (s *sqlDatabase) addEnterpriseEvent(ctx context.Context, entID string, even func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.ForgeEntity, event params.EventType, eventLevel params.EventLevel, statusMessage string, maxEvents int) error { if maxEvents == 0 { - return errors.Wrap(runnerErrors.ErrBadRequest, "max events cannot be 0") + return fmt.Errorf("max events cannot be 0: %w", runnerErrors.ErrBadRequest) } switch entity.EntityType { @@ -828,7 +828,7 @@ func (s *sqlDatabase) AddEntityEvent(ctx context.Context, entity params.ForgeEnt case params.ForgeEntityTypeEnterprise: return s.addEnterpriseEvent(ctx, entity.ID, event, eventLevel, statusMessage, maxEvents) default: - return errors.Wrap(runnerErrors.ErrBadRequest, "invalid entity type") + return fmt.Errorf("invalid entity type: %w", runnerErrors.ErrBadRequest) } } @@ -838,12 +838,12 @@ func (s *sqlDatabase) sqlToCommonForgeCredentials(creds GithubCredentials) (para } data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "unsealing credentials") + return params.ForgeCredentials{}, fmt.Errorf("error unsealing credentials: %w", err) } ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github endpoint") + return params.ForgeCredentials{}, fmt.Errorf("error converting github endpoint: %w", err) } commonCreds := params.ForgeCredentials{ @@ -865,7 +865,7 @@ func (s *sqlDatabase) sqlToCommonForgeCredentials(creds GithubCredentials) (para for _, repo := range creds.Repositories { commonRepo, err := s.sqlToCommonRepository(repo, false) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github repository") + return params.ForgeCredentials{}, fmt.Errorf("error converting github repository: %w", err) } commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) } @@ -873,7 +873,7 @@ func (s *sqlDatabase) sqlToCommonForgeCredentials(creds GithubCredentials) (para for _, org := range creds.Organizations { commonOrg, err := s.sqlToCommonOrganization(org, false) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github organization") + return params.ForgeCredentials{}, fmt.Errorf("error converting github organization: %w", err) } commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) } @@ -881,7 +881,7 @@ func (s *sqlDatabase) sqlToCommonForgeCredentials(creds GithubCredentials) (para for _, ent := range creds.Enterprises { commonEnt, err := s.sqlToCommonEnterprise(ent, false) if err != nil { - return params.ForgeCredentials{}, errors.Wrapf(err, "converting github enterprise: %s", ent.Name) + return params.ForgeCredentials{}, fmt.Errorf("error converting github enterprise %s: %w", ent.Name, err) } commonCreds.Enterprises = append(commonCreds.Enterprises, commonEnt) } @@ -895,12 +895,12 @@ func (s *sqlDatabase) sqlGiteaToCommonForgeCredentials(creds GiteaCredentials) ( } data, err := util.Unseal(creds.Payload, []byte(s.cfg.Passphrase)) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "unsealing credentials") + return params.ForgeCredentials{}, fmt.Errorf("error unsealing credentials: %w", err) } ep, err := s.sqlToCommonGithubEndpoint(creds.Endpoint) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github endpoint") + return params.ForgeCredentials{}, fmt.Errorf("error converting github endpoint: %w", err) } commonCreds := params.ForgeCredentials{ @@ -921,7 +921,7 @@ func (s *sqlDatabase) sqlGiteaToCommonForgeCredentials(creds GiteaCredentials) ( for _, repo := range creds.Repositories { commonRepo, err := s.sqlToCommonRepository(repo, false) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github repository") + return params.ForgeCredentials{}, fmt.Errorf("error converting github repository: %w", err) } commonCreds.Repositories = append(commonCreds.Repositories, commonRepo) } @@ -929,7 +929,7 @@ func (s *sqlDatabase) sqlGiteaToCommonForgeCredentials(creds GiteaCredentials) ( for _, org := range creds.Organizations { commonOrg, err := s.sqlToCommonOrganization(org, false) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "converting github organization") + return params.ForgeCredentials{}, fmt.Errorf("error converting github organization: %w", err) } commonCreds.Organizations = append(commonCreds.Organizations, commonOrg) } @@ -954,12 +954,12 @@ func (s *sqlDatabase) sqlToCommonGithubEndpoint(ep GithubEndpoint) (params.Forge func getUIDFromContext(ctx context.Context) (uuid.UUID, error) { userID := auth.UserID(ctx) if userID == "" { - return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "getting UID from context") + return uuid.Nil, fmt.Errorf("error getting UID from context: %w", runnerErrors.ErrUnauthorized) } asUUID, err := uuid.Parse(userID) if err != nil { - return uuid.Nil, errors.Wrap(runnerErrors.ErrUnauthorized, "parsing UID from context") + return uuid.Nil, fmt.Errorf("error parsing UID from context: %w", runnerErrors.ErrUnauthorized) } return asUUID, nil } diff --git a/database/watcher/watcher.go b/database/watcher/watcher.go index a7e1cd67..804dec70 100644 --- a/database/watcher/watcher.go +++ b/database/watcher/watcher.go @@ -16,11 +16,10 @@ package watcher import ( "context" + "fmt" "log/slog" "sync" - "github.com/pkg/errors" - "github.com/cloudbase/garm/database/common" garmUtil "github.com/cloudbase/garm/util" ) @@ -83,7 +82,7 @@ func (w *watcher) RegisterProducer(ctx context.Context, id string) (common.Produ defer w.mux.Unlock() if _, ok := w.producers[id]; ok { - return nil, errors.Wrapf(common.ErrProducerAlreadyRegistered, "producer_id: %s", id) + return nil, fmt.Errorf("producer_id %s: %w", id, common.ErrProducerAlreadyRegistered) } p := &producer{ id: id, diff --git a/database/watcher/watcher_test.go b/database/watcher/watcher_test.go index 5b7ecdce..fcbcc4eb 100644 --- a/database/watcher/watcher_test.go +++ b/database/watcher/watcher_test.go @@ -17,11 +17,11 @@ package watcher_test import ( "context" + "fmt" "testing" "time" "github.com/google/uuid" - "github.com/pkg/errors" "github.com/stretchr/testify/suite" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -310,7 +310,7 @@ func maybeInitController(db common.Store) error { } if _, err := db.InitController(); err != nil { - return errors.Wrap(err, "initializing controller") + return fmt.Errorf("error initializing controller: %w", err) } return nil diff --git a/go.mod b/go.mod index da91a90d..9002cf2a 100644 --- a/go.mod +++ b/go.mod @@ -20,11 +20,8 @@ require ( github.com/gorilla/mux v1.8.1 github.com/gorilla/websocket v1.5.4-0.20240702125206-a62d9d2a8413 github.com/jedib0t/go-pretty/v6 v6.6.8 - github.com/juju/clock v1.1.1 - github.com/juju/retry v1.0.1 github.com/manifoldco/promptui v0.9.0 github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 - github.com/pkg/errors v0.9.1 github.com/prometheus/client_golang v1.23.0 github.com/spf13/cobra v1.9.1 github.com/stretchr/testify v1.10.0 @@ -62,9 +59,6 @@ require ( github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/now v1.1.5 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/juju/errors v1.0.0 // indirect - github.com/juju/loggo v1.0.0 // indirect - github.com/juju/testing v1.0.2 // indirect github.com/mailru/easyjson v0.9.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect @@ -74,6 +68,7 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect github.com/prometheus/common v0.65.0 // indirect diff --git a/go.sum b/go.sum index 2008dff3..4cbbe8d3 100644 --- a/go.sum +++ b/go.sum @@ -95,19 +95,6 @@ github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/juju/ansiterm v0.0.0-20180109212912-720a0952cc2a/go.mod h1:UJSiEoRfvx3hP73CvoARgeLjaIOjybY9vj8PUPPFGeU= -github.com/juju/clock v1.1.1 h1:NvgHG9DQmOpBevgt6gzkyimdWBooLXDy1cQn89qJzBI= -github.com/juju/clock v1.1.1/go.mod h1:HIBvJ8kiV/n7UHwKuCkdYL4l/MDECztHR2sAvWDxxf0= -github.com/juju/errors v1.0.0 h1:yiq7kjCLll1BiaRuNY53MGI0+EQ3rF6GB+wvboZDefM= -github.com/juju/errors v1.0.0/go.mod h1:B5x9thDqx0wIMH3+aLIMP9HjItInYWObRovoCFM5Qe8= -github.com/juju/loggo v1.0.0 h1:Y6ZMQOGR9Aj3BGkiWx7HBbIx6zNwNkxhVNOHU2i1bl0= -github.com/juju/loggo v1.0.0/go.mod h1:NIXFioti1SmKAlKNuUwbMenNdef59IF52+ZzuOmHYkg= -github.com/juju/retry v1.0.1 h1:EVwOPq273wO1o0BCU7Ay7XE/bNb+bTNYsCK6y+BboAk= -github.com/juju/retry v1.0.1/go.mod h1:SssN1eYeK3A2qjnFGTiVMbdzGJ2BfluaJblJXvuvgqA= -github.com/juju/testing v1.0.2 h1:OR90RqCd9CJONxXamZAjLknpZdtqDyxqW8IwCbgw3i4= -github.com/juju/testing v1.0.2/go.mod h1:h3Vd2rzB57KrdsBEy6R7bmSKPzP76BnNavt7i8PerwQ= -github.com/juju/utils/v3 v3.0.0 h1:Gg3n63mGPbBuoXCo+EPJuMi44hGZfloI8nlCIebHu2Q= -github.com/juju/utils/v3 v3.0.0/go.mod h1:8csUcj1VRkfjNIRzBFWzLFCMLwLqsRWvkmhfVAUwbC4= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -116,13 +103,10 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/lunixbochs/vtclean v0.0.0-20160125035106-4fbf7632a2c6/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI= github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= -github.com/mattn/go-colorable v0.0.6/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-isatty v0.0.0-20160806122752-66b8e73f3f5c/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= @@ -213,13 +197,10 @@ google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/ gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 h1:FVCohIoYO7IJoDDVpV2pdq7SgrMH6wHnuTyrdrxJNoY= gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0/go.mod h1:OdE7CF6DbADk7lN8LIKRzRJTTZXIjtWgA5THM5lhBAw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20160105164936-4f90aeace3a2/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/datatypes v1.2.6 h1:KafLdXvFUhzNeL2ncm03Gl3eTLONQfNKZ+wJ+9Y4Nck= diff --git a/internal/testing/testing.go b/internal/testing/testing.go index 98bfd34c..38725882 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -19,13 +19,13 @@ package testing import ( "context" + "errors" "fmt" "os" "path/filepath" "sort" "testing" - "github.com/pkg/errors" "github.com/stretchr/testify/require" runnerErrors "github.com/cloudbase/garm-provider-common/errors" diff --git a/params/requests.go b/params/requests.go index 3f4e1737..7bc17959 100644 --- a/params/requests.go +++ b/params/requests.go @@ -21,8 +21,6 @@ import ( "fmt" "net/url" - "github.com/pkg/errors" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" ) @@ -497,7 +495,7 @@ func (c CreateGithubCredentialsParams) Validate() error { if c.AuthType == ForgeAuthTypeApp { if err := c.App.Validate(); err != nil { - return errors.Wrap(err, "invalid app") + return fmt.Errorf("invalid app: %w", err) } } @@ -525,7 +523,7 @@ func (u UpdateGithubCredentialsParams) Validate() error { if u.App != nil { if err := u.App.Validate(); err != nil { - return errors.Wrap(err, "invalid app") + return fmt.Errorf("invalid app: %w", err) } } diff --git a/runner/common.go b/runner/common.go index 63d4887c..b1682c0c 100644 --- a/runner/common.go +++ b/runner/common.go @@ -2,8 +2,8 @@ package runner import ( "context" - - "github.com/pkg/errors" + "errors" + "fmt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/params" @@ -12,11 +12,11 @@ import ( func (r *Runner) ResolveForgeCredentialByName(ctx context.Context, credentialsName string) (params.ForgeCredentials, error) { githubCred, err := r.store.GetGithubCredentialsByName(ctx, credentialsName, false) if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { - return params.ForgeCredentials{}, errors.Wrap(err, "fetching github credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching github credentials: %w", err) } giteaCred, err := r.store.GetGiteaCredentialsByName(ctx, credentialsName, false) if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { - return params.ForgeCredentials{}, errors.Wrap(err, "fetching gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error fetching gitea credentials: %w", err) } if githubCred.ID != 0 && giteaCred.ID != 0 { return params.ForgeCredentials{}, runnerErrors.NewBadRequestError("credentials %s are defined for both GitHub and Gitea, please specify the forge type", credentialsName) diff --git a/runner/enterprises.go b/runner/enterprises.go index 341cf5b9..6b393abd 100644 --- a/runner/enterprises.go +++ b/runner/enterprises.go @@ -16,12 +16,11 @@ package runner import ( "context" + "errors" "fmt" "log/slog" "strings" - "github.com/pkg/errors" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" @@ -36,7 +35,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp err = param.Validate() if err != nil { - return params.Enterprise{}, errors.Wrap(err, "validating params") + return params.Enterprise{}, fmt.Errorf("error validating params: %w", err) } creds, err := r.store.GetGithubCredentialsByName(ctx, param.CredentialsName, true) @@ -47,7 +46,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp _, err = r.store.GetEnterprise(ctx, param.Name, creds.Endpoint.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") + return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) } } else { return params.Enterprise{}, runnerErrors.NewConflictError("enterprise %s already exists", param.Name) @@ -55,7 +54,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp enterprise, err = r.store.CreateEnterprise(ctx, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "creating enterprise") + return params.Enterprise{}, fmt.Errorf("error creating enterprise: %w", err) } defer func() { @@ -73,7 +72,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp var poolMgr common.PoolManager poolMgr, err = r.poolManagerCtrl.CreateEnterprisePoolManager(r.ctx, enterprise, r.providers, r.store) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "creating enterprise pool manager") + return params.Enterprise{}, fmt.Errorf("error creating enterprise pool manager: %w", err) } if err := poolMgr.Start(); err != nil { if deleteErr := r.poolManagerCtrl.DeleteEnterprisePoolManager(enterprise); deleteErr != nil { @@ -81,7 +80,7 @@ func (r *Runner) CreateEnterprise(ctx context.Context, param params.CreateEnterp ctx, "failed to cleanup pool manager for enterprise", "enterprise_id", enterprise.ID) } - return params.Enterprise{}, errors.Wrap(err, "starting enterprise pool manager") + return params.Enterprise{}, fmt.Errorf("error starting enterprise pool manager: %w", err) } return enterprise, nil } @@ -93,7 +92,7 @@ func (r *Runner) ListEnterprises(ctx context.Context, filter params.EnterpriseFi enterprises, err := r.store.ListEnterprises(ctx, filter) if err != nil { - return nil, errors.Wrap(err, "listing enterprises") + return nil, fmt.Errorf("error listing enterprises: %w", err) } var allEnterprises []params.Enterprise @@ -119,7 +118,7 @@ func (r *Runner) GetEnterpriseByID(ctx context.Context, enterpriseID string) (pa enterprise, err := r.store.GetEnterpriseByID(ctx, enterpriseID) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "fetching enterprise") + return params.Enterprise{}, fmt.Errorf("error fetching enterprise: %w", err) } poolMgr, err := r.poolManagerCtrl.GetEnterprisePoolManager(enterprise) if err != nil { @@ -137,17 +136,17 @@ func (r *Runner) DeleteEnterprise(ctx context.Context, enterpriseID string) erro enterprise, err := r.store.GetEnterpriseByID(ctx, enterpriseID) if err != nil { - return errors.Wrap(err, "fetching enterprise") + return fmt.Errorf("error fetching enterprise: %w", err) } entity, err := enterprise.GetEntity() if err != nil { - return errors.Wrap(err, "getting entity") + return fmt.Errorf("error getting entity: %w", err) } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return errors.Wrap(err, "fetching enterprise pools") + return fmt.Errorf("error fetching enterprise pools: %w", err) } if len(pools) > 0 { @@ -161,7 +160,7 @@ func (r *Runner) DeleteEnterprise(ctx context.Context, enterpriseID string) erro scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) if err != nil { - return errors.Wrap(err, "fetching enterprise scale sets") + return fmt.Errorf("error fetching enterprise scale sets: %w", err) } if len(scaleSets) > 0 { @@ -169,11 +168,11 @@ func (r *Runner) DeleteEnterprise(ctx context.Context, enterpriseID string) erro } if err := r.poolManagerCtrl.DeleteEnterprisePoolManager(enterprise); err != nil { - return errors.Wrap(err, "deleting enterprise pool manager") + return fmt.Errorf("error deleting enterprise pool manager: %w", err) } if err := r.store.DeleteEnterprise(ctx, enterpriseID); err != nil { - return errors.Wrapf(err, "removing enterprise %s", enterpriseID) + return fmt.Errorf("error removing enterprise %s: %w", enterpriseID, err) } return nil } @@ -194,7 +193,7 @@ func (r *Runner) UpdateEnterprise(ctx context.Context, enterpriseID string, para enterprise, err := r.store.UpdateEnterprise(ctx, enterpriseID, param) if err != nil { - return params.Enterprise{}, errors.Wrap(err, "updating enterprise") + return params.Enterprise{}, fmt.Errorf("error updating enterprise: %w", err) } poolMgr, err := r.poolManagerCtrl.GetEnterprisePoolManager(enterprise) @@ -243,7 +242,7 @@ func (r *Runner) GetEnterprisePoolByID(ctx context.Context, enterpriseID, poolID } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } return pool, nil } @@ -260,7 +259,7 @@ func (r *Runner) DeleteEnterprisePool(ctx context.Context, enterpriseID, poolID pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } // nolint:golangci-lint,godox @@ -274,7 +273,7 @@ func (r *Runner) DeleteEnterprisePool(ctx context.Context, enterpriseID, poolID } if err := r.store.DeleteEntityPool(ctx, entity, poolID); err != nil { - return errors.Wrap(err, "deleting pool") + return fmt.Errorf("error deleting pool: %w", err) } return nil } @@ -290,7 +289,7 @@ func (r *Runner) ListEnterprisePools(ctx context.Context, enterpriseID string) ( } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return nil, errors.Wrap(err, "fetching pools") + return nil, fmt.Errorf("error fetching pools: %w", err) } return pools, nil } @@ -306,7 +305,7 @@ func (r *Runner) UpdateEnterprisePool(ctx context.Context, enterpriseID, poolID } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } maxRunners := pool.MaxRunners @@ -325,7 +324,7 @@ func (r *Runner) UpdateEnterprisePool(ctx context.Context, enterpriseID, poolID newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, errors.Wrap(err, "updating pool") + return params.Pool{}, fmt.Errorf("error updating pool: %w", err) } return newPool, nil } @@ -340,7 +339,7 @@ func (r *Runner) ListEnterpriseInstances(ctx context.Context, enterpriseID strin } instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { - return []params.Instance{}, errors.Wrap(err, "fetching instances") + return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) } return instances, nil } @@ -351,12 +350,12 @@ func (r *Runner) findEnterprisePoolManager(name, endpointName string) (common.Po enterprise, err := r.store.GetEnterprise(r.ctx, name, endpointName) if err != nil { - return nil, errors.Wrap(err, "fetching enterprise") + return nil, fmt.Errorf("error fetching enterprise: %w", err) } poolManager, err := r.poolManagerCtrl.GetEnterprisePoolManager(enterprise) if err != nil { - return nil, errors.Wrap(err, "fetching pool manager for enterprise") + return nil, fmt.Errorf("error fetching pool manager for enterprise: %w", err) } return poolManager, nil } diff --git a/runner/enterprises_test.go b/runner/enterprises_test.go index ce791e55..0724ccf9 100644 --- a/runner/enterprises_test.go +++ b/runner/enterprises_test.go @@ -16,10 +16,10 @@ package runner import ( "context" + "errors" "fmt" "testing" - "github.com/pkg/errors" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" @@ -210,7 +210,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterprisePoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("creating enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error creating enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *EnterpriseTestSuite) TestCreateEnterpriseStartPoolMgrFailed() { @@ -222,7 +222,7 @@ func (s *EnterpriseTestSuite) TestCreateEnterpriseStartPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("starting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error starting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *EnterpriseTestSuite) TestListEnterprises() { @@ -324,7 +324,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprise() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEnterpriseByID(s.Fixtures.AdminContext, s.Fixtures.StoreEnterprises["test-enterprise-3"].ID) - s.Require().Equal("fetching enterprise: not found", err.Error()) + s.Require().Equal("error fetching enterprise: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterpriseErrUnauthorized() { @@ -354,7 +354,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolMgrFailed() { err := s.Runner.DeleteEnterprise(s.Fixtures.AdminContext, s.Fixtures.StoreEnterprises["test-enterprise-1"].ID) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("deleting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error deleting enterprise pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *EnterpriseTestSuite) TestUpdateEnterprise() { @@ -477,7 +477,7 @@ func (s *EnterpriseTestSuite) TestDeleteEnterprisePool() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEntityPool(s.Fixtures.AdminContext, entity, pool.ID) - s.Require().Equal("fetching pool: finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) } func (s *EnterpriseTestSuite) TestDeleteEnterprisePoolErrUnauthorized() { diff --git a/runner/gitea_credentials.go b/runner/gitea_credentials.go index 4fdad1d2..d66212f9 100644 --- a/runner/gitea_credentials.go +++ b/runner/gitea_credentials.go @@ -16,8 +16,7 @@ package runner import ( "context" - - "github.com/pkg/errors" + "fmt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" @@ -35,7 +34,7 @@ func (r *Runner) ListGiteaCredentials(ctx context.Context) ([]params.ForgeCreden // there is a posibillity that not all creds will be in the cache. creds, err := r.store.ListGiteaCredentials(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching gitea credentials") + return nil, fmt.Errorf("error fetching gitea credentials: %w", err) } return creds, nil } @@ -46,12 +45,12 @@ func (r *Runner) CreateGiteaCredentials(ctx context.Context, param params.Create } if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate gitea credentials params") + return params.ForgeCredentials{}, fmt.Errorf("error failed to validate gitea credentials params: %w", err) } creds, err := r.store.CreateGiteaCredentials(ctx, param) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to create gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error failed to create gitea credentials: %w", err) } return creds, nil @@ -64,7 +63,7 @@ func (r *Runner) GetGiteaCredentials(ctx context.Context, id uint) (params.Forge creds, err := r.store.GetGiteaCredentials(ctx, id, true) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to get gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error failed to get gitea credentials: %w", err) } return creds, nil @@ -76,7 +75,7 @@ func (r *Runner) DeleteGiteaCredentials(ctx context.Context, id uint) error { } if err := r.store.DeleteGiteaCredentials(ctx, id); err != nil { - return errors.Wrap(err, "failed to delete gitea credentials") + return fmt.Errorf("error failed to delete gitea credentials: %w", err) } return nil @@ -88,12 +87,12 @@ func (r *Runner) UpdateGiteaCredentials(ctx context.Context, id uint, param para } if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate gitea credentials params") + return params.ForgeCredentials{}, fmt.Errorf("error failed to validate gitea credentials params: %w", err) } newCreds, err := r.store.UpdateGiteaCredentials(ctx, id, param) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to update gitea credentials") + return params.ForgeCredentials{}, fmt.Errorf("error failed to update gitea credentials: %w", err) } return newCreds, nil diff --git a/runner/gitea_endpoints.go b/runner/gitea_endpoints.go index 181f8e7e..4a7e32d9 100644 --- a/runner/gitea_endpoints.go +++ b/runner/gitea_endpoints.go @@ -16,8 +16,7 @@ package runner import ( "context" - - "github.com/pkg/errors" + "fmt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" @@ -30,12 +29,12 @@ func (r *Runner) CreateGiteaEndpoint(ctx context.Context, param params.CreateGit } if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate gitea endpoint params") + return params.ForgeEndpoint{}, fmt.Errorf("failed to validate gitea endpoint params: %w", err) } ep, err := r.store.CreateGiteaEndpoint(ctx, param) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to create gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("failed to create gitea endpoint: %w", err) } return ep, nil @@ -47,7 +46,7 @@ func (r *Runner) GetGiteaEndpoint(ctx context.Context, name string) (params.Forg } endpoint, err := r.store.GetGiteaEndpoint(ctx, name) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to get gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("failed to get gitea endpoint: %w", err) } return endpoint, nil @@ -60,7 +59,7 @@ func (r *Runner) DeleteGiteaEndpoint(ctx context.Context, name string) error { err := r.store.DeleteGiteaEndpoint(ctx, name) if err != nil { - return errors.Wrap(err, "failed to delete gitea endpoint") + return fmt.Errorf("failed to delete gitea endpoint: %w", err) } return nil @@ -72,12 +71,12 @@ func (r *Runner) UpdateGiteaEndpoint(ctx context.Context, name string, param par } if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate gitea endpoint params") + return params.ForgeEndpoint{}, fmt.Errorf("failed to validate gitea endpoint params: %w", err) } newEp, err := r.store.UpdateGiteaEndpoint(ctx, name, param) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to update gitea endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("failed to update gitea endpoint: %w", err) } return newEp, nil } @@ -89,7 +88,7 @@ func (r *Runner) ListGiteaEndpoints(ctx context.Context) ([]params.ForgeEndpoint endpoints, err := r.store.ListGiteaEndpoints(ctx) if err != nil { - return nil, errors.Wrap(err, "failed to list gitea endpoints") + return nil, fmt.Errorf("failed to list gitea endpoints: %w", err) } return endpoints, nil diff --git a/runner/github_credentials.go b/runner/github_credentials.go index ec524056..5e1291ff 100644 --- a/runner/github_credentials.go +++ b/runner/github_credentials.go @@ -16,8 +16,7 @@ package runner import ( "context" - - "github.com/pkg/errors" + "fmt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" @@ -36,7 +35,7 @@ func (r *Runner) ListCredentials(ctx context.Context) ([]params.ForgeCredentials // there is a posibillity that not all creds will be in the cache. creds, err := r.store.ListGithubCredentials(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching github credentials") + return nil, fmt.Errorf("error fetching github credentials: %w", err) } // If we do have cache, update the rate limit for each credential. The rate limits are queried @@ -57,12 +56,12 @@ func (r *Runner) CreateGithubCredentials(ctx context.Context, param params.Creat } if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate github credentials params") + return params.ForgeCredentials{}, fmt.Errorf("failed to validate github credentials params: %w", err) } creds, err := r.store.CreateGithubCredentials(ctx, param) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to create github credentials") + return params.ForgeCredentials{}, fmt.Errorf("failed to create github credentials: %w", err) } return creds, nil @@ -75,7 +74,7 @@ func (r *Runner) GetGithubCredentials(ctx context.Context, id uint) (params.Forg creds, err := r.store.GetGithubCredentials(ctx, id, true) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to get github credentials") + return params.ForgeCredentials{}, fmt.Errorf("failed to get github credentials: %w", err) } cached, ok := cache.GetGithubCredentials((creds.ID)) @@ -92,7 +91,7 @@ func (r *Runner) DeleteGithubCredentials(ctx context.Context, id uint) error { } if err := r.store.DeleteGithubCredentials(ctx, id); err != nil { - return errors.Wrap(err, "failed to delete github credentials") + return fmt.Errorf("failed to delete github credentials: %w", err) } return nil @@ -104,12 +103,12 @@ func (r *Runner) UpdateGithubCredentials(ctx context.Context, id uint, param par } if err := param.Validate(); err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to validate github credentials params") + return params.ForgeCredentials{}, fmt.Errorf("failed to validate github credentials params: %w", err) } newCreds, err := r.store.UpdateGithubCredentials(ctx, id, param) if err != nil { - return params.ForgeCredentials{}, errors.Wrap(err, "failed to update github credentials") + return params.ForgeCredentials{}, fmt.Errorf("failed to update github credentials: %w", err) } return newCreds, nil diff --git a/runner/github_endpoints.go b/runner/github_endpoints.go index 0e144447..29965081 100644 --- a/runner/github_endpoints.go +++ b/runner/github_endpoints.go @@ -16,8 +16,7 @@ package runner import ( "context" - - "github.com/pkg/errors" + "fmt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" @@ -30,12 +29,12 @@ func (r *Runner) CreateGithubEndpoint(ctx context.Context, param params.CreateGi } if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") + return params.ForgeEndpoint{}, fmt.Errorf("error failed to validate github endpoint params: %w", err) } ep, err := r.store.CreateGithubEndpoint(ctx, param) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to create github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("failed to create github endpoint: %w", err) } return ep, nil @@ -47,7 +46,7 @@ func (r *Runner) GetGithubEndpoint(ctx context.Context, name string) (params.For } endpoint, err := r.store.GetGithubEndpoint(ctx, name) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to get github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("failed to get github endpoint: %w", err) } return endpoint, nil @@ -60,7 +59,7 @@ func (r *Runner) DeleteGithubEndpoint(ctx context.Context, name string) error { err := r.store.DeleteGithubEndpoint(ctx, name) if err != nil { - return errors.Wrap(err, "failed to delete github endpoint") + return fmt.Errorf("failed to delete github endpoint: %w", err) } return nil @@ -72,12 +71,12 @@ func (r *Runner) UpdateGithubEndpoint(ctx context.Context, name string, param pa } if err := param.Validate(); err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to validate github endpoint params") + return params.ForgeEndpoint{}, fmt.Errorf("failed to validate github endpoint params: %w", err) } newEp, err := r.store.UpdateGithubEndpoint(ctx, name, param) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "failed to update github endpoint") + return params.ForgeEndpoint{}, fmt.Errorf("failed to update github endpoint: %w", err) } return newEp, nil } @@ -89,7 +88,7 @@ func (r *Runner) ListGithubEndpoints(ctx context.Context) ([]params.ForgeEndpoin endpoints, err := r.store.ListGithubEndpoints(ctx) if err != nil { - return nil, errors.Wrap(err, "failed to list github endpoints") + return nil, fmt.Errorf("failed to list github endpoints: %w", err) } return endpoints, nil diff --git a/runner/metadata.go b/runner/metadata.go index 2c917ea0..b309b96e 100644 --- a/runner/metadata.go +++ b/runner/metadata.go @@ -18,12 +18,11 @@ import ( "bytes" "context" "encoding/base64" + "errors" "fmt" "html/template" "log/slog" - "github.com/pkg/errors" - "github.com/cloudbase/garm-provider-common/defaults" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" @@ -92,7 +91,7 @@ func (r *Runner) getForgeEntityFromInstance(ctx context.Context, instance params slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to get entity getter", "instance", instance.Name) - return params.ForgeEntity{}, errors.Wrap(err, "fetching entity getter") + return params.ForgeEntity{}, fmt.Errorf("error fetching entity getter: %w", err) } poolEntity, err := entityGetter.GetEntity() @@ -100,7 +99,7 @@ func (r *Runner) getForgeEntityFromInstance(ctx context.Context, instance params slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to get entity", "instance", instance.Name) - return params.ForgeEntity{}, errors.Wrap(err, "fetching entity") + return params.ForgeEntity{}, fmt.Errorf("error fetching entity: %w", err) } entity, err := r.store.GetForgeEntity(r.ctx, poolEntity.EntityType, poolEntity.ID) @@ -108,7 +107,7 @@ func (r *Runner) getForgeEntityFromInstance(ctx context.Context, instance params slog.With(slog.Any("error", err)).ErrorContext( ctx, "failed to get entity", "instance", instance.Name) - return params.ForgeEntity{}, errors.Wrap(err, "fetching entity") + return params.ForgeEntity{}, fmt.Errorf("error fetching entity: %w", err) } return entity, nil } @@ -136,13 +135,13 @@ func (r *Runner) GetRunnerServiceName(ctx context.Context) (string, error) { entity, err := r.getForgeEntityFromInstance(ctx, instance) if err != nil { slog.ErrorContext(r.ctx, "failed to get entity", "error", err) - return "", errors.Wrap(err, "fetching entity") + return "", fmt.Errorf("error fetching entity: %w", err) } serviceName, err := r.getServiceNameForEntity(entity) if err != nil { slog.ErrorContext(r.ctx, "failed to get service name", "error", err) - return "", errors.Wrap(err, "fetching service name") + return "", fmt.Errorf("error fetching service name: %w", err) } return serviceName, nil } @@ -157,13 +156,13 @@ func (r *Runner) GenerateSystemdUnitFile(ctx context.Context, runAsUser string) entity, err := r.getForgeEntityFromInstance(ctx, instance) if err != nil { slog.ErrorContext(r.ctx, "failed to get entity", "error", err) - return nil, errors.Wrap(err, "fetching entity") + return nil, fmt.Errorf("error fetching entity: %w", err) } serviceName, err := r.getServiceNameForEntity(entity) if err != nil { slog.ErrorContext(r.ctx, "failed to get service name", "error", err) - return nil, errors.Wrap(err, "fetching service name") + return nil, fmt.Errorf("error fetching service name: %w", err) } var unitTemplate *template.Template @@ -178,7 +177,7 @@ func (r *Runner) GenerateSystemdUnitFile(ctx context.Context, runAsUser string) } if err != nil { slog.ErrorContext(r.ctx, "failed to parse template", "error", err) - return nil, errors.Wrap(err, "parsing template") + return nil, fmt.Errorf("error parsing template: %w", err) } if runAsUser == "" { @@ -196,14 +195,14 @@ func (r *Runner) GenerateSystemdUnitFile(ctx context.Context, runAsUser string) var unitFile bytes.Buffer if err := unitTemplate.Execute(&unitFile, data); err != nil { slog.ErrorContext(r.ctx, "failed to execute template", "error", err) - return nil, errors.Wrap(err, "executing template") + return nil, fmt.Errorf("error executing template: %w", err) } return unitFile.Bytes(), nil } func (r *Runner) GetJITConfigFile(ctx context.Context, file string) ([]byte, error) { if !auth.InstanceHasJITConfig(ctx) { - return nil, fmt.Errorf("instance not configured for JIT: %w", runnerErrors.ErrNotFound) + return nil, runnerErrors.NewNotFoundError("instance not configured for JIT") } instance, err := validateInstanceState(ctx) @@ -215,12 +214,12 @@ func (r *Runner) GetJITConfigFile(ctx context.Context, file string) ([]byte, err jitConfig := instance.JitConfiguration contents, ok := jitConfig[file] if !ok { - return nil, errors.Wrap(runnerErrors.ErrNotFound, "retrieving file") + return nil, runnerErrors.NewNotFoundError("could not find file %q", file) } decoded, err := base64.StdEncoding.DecodeString(contents) if err != nil { - return nil, errors.Wrap(err, "decoding file contents") + return nil, fmt.Errorf("error decoding file contents: %w", err) } return decoded, nil @@ -249,12 +248,12 @@ func (r *Runner) GetInstanceGithubRegistrationToken(ctx context.Context) (string poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) if err != nil { - return "", errors.Wrap(err, "fetching pool manager for instance") + return "", fmt.Errorf("error fetching pool manager for instance: %w", err) } token, err := poolMgr.GithubRunnerRegistrationToken() if err != nil { - return "", errors.Wrap(err, "fetching runner token") + return "", fmt.Errorf("error fetching runner token: %w", err) } tokenFetched := true @@ -263,11 +262,11 @@ func (r *Runner) GetInstanceGithubRegistrationToken(ctx context.Context) (string } if _, err := r.store.UpdateInstance(r.ctx, instance.Name, updateParams); err != nil { - return "", errors.Wrap(err, "setting token_fetched for instance") + return "", fmt.Errorf("error setting token_fetched for instance: %w", err) } if err := r.store.AddInstanceEvent(ctx, instance.Name, params.FetchTokenEvent, params.EventInfo, "runner registration token was retrieved"); err != nil { - return "", errors.Wrap(err, "recording event") + return "", fmt.Errorf("error recording event: %w", err) } return token, nil @@ -283,7 +282,7 @@ func (r *Runner) GetRootCertificateBundle(ctx context.Context) (params.Certifica poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) if err != nil { - return params.CertificateBundle{}, errors.Wrap(err, "fetching pool manager for instance") + return params.CertificateBundle{}, fmt.Errorf("error fetching pool manager for instance: %w", err) } bundle, err := poolMgr.RootCABundle() diff --git a/runner/organizations.go b/runner/organizations.go index 0ec4bfa2..ffdd1c6c 100644 --- a/runner/organizations.go +++ b/runner/organizations.go @@ -16,12 +16,11 @@ package runner import ( "context" + "errors" "fmt" "log/slog" "strings" - "github.com/pkg/errors" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" @@ -35,7 +34,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP } if err := param.Validate(); err != nil { - return params.Organization{}, errors.Wrap(err, "validating params") + return params.Organization{}, fmt.Errorf("error validating params: %w", err) } var creds params.ForgeCredentials @@ -57,7 +56,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP _, err = r.store.GetOrganization(ctx, param.Name, creds.Endpoint.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.Organization{}, errors.Wrap(err, "fetching org") + return params.Organization{}, fmt.Errorf("error fetching org: %w", err) } } else { return params.Organization{}, runnerErrors.NewConflictError("organization %s already exists", param.Name) @@ -65,7 +64,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP org, err = r.store.CreateOrganization(ctx, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) if err != nil { - return params.Organization{}, errors.Wrap(err, "creating organization") + return params.Organization{}, fmt.Errorf("error creating organization: %w", err) } defer func() { @@ -82,7 +81,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP // updating the store. poolMgr, err := r.poolManagerCtrl.CreateOrgPoolManager(r.ctx, org, r.providers, r.store) if err != nil { - return params.Organization{}, errors.Wrap(err, "creating org pool manager") + return params.Organization{}, fmt.Errorf("error creating org pool manager: %w", err) } if err := poolMgr.Start(); err != nil { if deleteErr := r.poolManagerCtrl.DeleteOrgPoolManager(org); deleteErr != nil { @@ -90,7 +89,7 @@ func (r *Runner) CreateOrganization(ctx context.Context, param params.CreateOrgP ctx, "failed to cleanup pool manager for org", "org_id", org.ID) } - return params.Organization{}, errors.Wrap(err, "starting org pool manager") + return params.Organization{}, fmt.Errorf("error starting org pool manager: %w", err) } return org, nil } @@ -102,7 +101,7 @@ func (r *Runner) ListOrganizations(ctx context.Context, filter params.Organizati orgs, err := r.store.ListOrganizations(ctx, filter) if err != nil { - return nil, errors.Wrap(err, "listing organizations") + return nil, fmt.Errorf("error listing organizations: %w", err) } var allOrgs []params.Organization @@ -129,7 +128,7 @@ func (r *Runner) GetOrganizationByID(ctx context.Context, orgID string) (params. org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return params.Organization{}, errors.Wrap(err, "fetching organization") + return params.Organization{}, fmt.Errorf("error fetching organization: %w", err) } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) @@ -148,17 +147,17 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return errors.Wrap(err, "fetching org") + return fmt.Errorf("error fetching org: %w", err) } entity, err := org.GetEntity() if err != nil { - return errors.Wrap(err, "getting entity") + return fmt.Errorf("error getting entity: %w", err) } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return errors.Wrap(err, "fetching org pools") + return fmt.Errorf("error fetching org pools: %w", err) } if len(pools) > 0 { @@ -172,7 +171,7 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) if err != nil { - return errors.Wrap(err, "fetching organization scale sets") + return fmt.Errorf("error fetching organization scale sets: %w", err) } if len(scaleSets) > 0 { @@ -182,7 +181,7 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho if !keepWebhook && r.config.Default.EnableWebhookManagement { poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return errors.Wrap(err, "fetching pool manager") + return fmt.Errorf("error fetching pool manager: %w", err) } if err := poolMgr.UninstallWebhook(ctx); err != nil { @@ -195,11 +194,11 @@ func (r *Runner) DeleteOrganization(ctx context.Context, orgID string, keepWebho } if err := r.poolManagerCtrl.DeleteOrgPoolManager(org); err != nil { - return errors.Wrap(err, "deleting org pool manager") + return fmt.Errorf("error deleting org pool manager: %w", err) } if err := r.store.DeleteOrganization(ctx, orgID); err != nil { - return errors.Wrapf(err, "removing organization %s", orgID) + return fmt.Errorf("error removing organization %s: %w", orgID, err) } return nil } @@ -220,7 +219,7 @@ func (r *Runner) UpdateOrganization(ctx context.Context, orgID string, param par org, err := r.store.UpdateOrganization(ctx, orgID, param) if err != nil { - return params.Organization{}, errors.Wrap(err, "updating org") + return params.Organization{}, fmt.Errorf("error updating org: %w", err) } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) @@ -239,7 +238,7 @@ func (r *Runner) CreateOrgPool(ctx context.Context, orgID string, param params.C createPoolParams, err := r.appendTagsToCreatePoolParams(param) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool params") + return params.Pool{}, fmt.Errorf("error fetching pool params: %w", err) } if param.RunnerBootstrapTimeout == 0 { @@ -253,7 +252,7 @@ func (r *Runner) CreateOrgPool(ctx context.Context, orgID string, param params.C pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) if err != nil { - return params.Pool{}, errors.Wrap(err, "creating pool") + return params.Pool{}, fmt.Errorf("error creating pool: %w", err) } return pool, nil @@ -271,7 +270,7 @@ func (r *Runner) GetOrgPoolByID(ctx context.Context, orgID, poolID string) (para pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } return pool, nil @@ -290,7 +289,7 @@ func (r *Runner) DeleteOrgPool(ctx context.Context, orgID, poolID string) error pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } return nil } @@ -306,7 +305,7 @@ func (r *Runner) DeleteOrgPool(ctx context.Context, orgID, poolID string) error } if err := r.store.DeleteEntityPool(ctx, entity, poolID); err != nil { - return errors.Wrap(err, "deleting pool") + return fmt.Errorf("error deleting pool: %w", err) } return nil } @@ -321,7 +320,7 @@ func (r *Runner) ListOrgPools(ctx context.Context, orgID string) ([]params.Pool, } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return nil, errors.Wrap(err, "fetching pools") + return nil, fmt.Errorf("error fetching pools: %w", err) } return pools, nil } @@ -338,7 +337,7 @@ func (r *Runner) UpdateOrgPool(ctx context.Context, orgID, poolID string, param pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } maxRunners := pool.MaxRunners @@ -357,7 +356,7 @@ func (r *Runner) UpdateOrgPool(ctx context.Context, orgID, poolID string, param newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, errors.Wrap(err, "updating pool") + return params.Pool{}, fmt.Errorf("error updating pool: %w", err) } return newPool, nil } @@ -374,7 +373,7 @@ func (r *Runner) ListOrgInstances(ctx context.Context, orgID string) ([]params.I instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { - return []params.Instance{}, errors.Wrap(err, "fetching instances") + return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) } return instances, nil } @@ -385,12 +384,12 @@ func (r *Runner) findOrgPoolManager(name, endpointName string) (common.PoolManag org, err := r.store.GetOrganization(r.ctx, name, endpointName) if err != nil { - return nil, errors.Wrap(err, "fetching org") + return nil, fmt.Errorf("error fetching org: %w", err) } poolManager, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return nil, errors.Wrap(err, "fetching pool manager for org") + return nil, fmt.Errorf("error fetching pool manager for org: %w", err) } return poolManager, nil } @@ -402,17 +401,17 @@ func (r *Runner) InstallOrgWebhook(ctx context.Context, orgID string, param para org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching org") + return params.HookInfo{}, fmt.Errorf("error fetching org: %w", err) } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for org") + return params.HookInfo{}, fmt.Errorf("error fetching pool manager for org: %w", err) } info, err := poolMgr.InstallWebhook(ctx, param) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "installing webhook") + return params.HookInfo{}, fmt.Errorf("error installing webhook: %w", err) } return info, nil } @@ -424,16 +423,16 @@ func (r *Runner) UninstallOrgWebhook(ctx context.Context, orgID string) error { org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return errors.Wrap(err, "fetching org") + return fmt.Errorf("error fetching org: %w", err) } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return errors.Wrap(err, "fetching pool manager for org") + return fmt.Errorf("error fetching pool manager for org: %w", err) } if err := poolMgr.UninstallWebhook(ctx); err != nil { - return errors.Wrap(err, "uninstalling webhook") + return fmt.Errorf("error uninstalling webhook: %w", err) } return nil } @@ -445,17 +444,17 @@ func (r *Runner) GetOrgWebhookInfo(ctx context.Context, orgID string) (params.Ho org, err := r.store.GetOrganizationByID(ctx, orgID) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching org") + return params.HookInfo{}, fmt.Errorf("error fetching org: %w", err) } poolMgr, err := r.poolManagerCtrl.GetOrgPoolManager(org) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for org") + return params.HookInfo{}, fmt.Errorf("error fetching pool manager for org: %w", err) } info, err := poolMgr.GetWebhookInfo(ctx) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching webhook info") + return params.HookInfo{}, fmt.Errorf("error fetching webhook info: %w", err) } return info, nil } diff --git a/runner/organizations_test.go b/runner/organizations_test.go index 9de6d2b4..8d2aa3f6 100644 --- a/runner/organizations_test.go +++ b/runner/organizations_test.go @@ -16,10 +16,10 @@ package runner import ( "context" + "errors" "fmt" "testing" - "github.com/pkg/errors" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" @@ -224,7 +224,7 @@ func (s *OrgTestSuite) TestCreateOrganizationPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("creating org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error creating org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *OrgTestSuite) TestCreateOrganizationStartPoolMgrFailed() { @@ -236,7 +236,7 @@ func (s *OrgTestSuite) TestCreateOrganizationStartPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("starting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error starting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *OrgTestSuite) TestListOrganizations() { @@ -338,7 +338,7 @@ func (s *OrgTestSuite) TestDeleteOrganization() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetOrganizationByID(s.Fixtures.AdminContext, s.Fixtures.StoreOrgs["test-org-3"].ID) - s.Require().Equal("fetching org: not found", err.Error()) + s.Require().Equal("error fetching org: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrganizationErrUnauthorized() { @@ -368,7 +368,7 @@ func (s *OrgTestSuite) TestDeleteOrganizationPoolMgrFailed() { err := s.Runner.DeleteOrganization(s.Fixtures.AdminContext, s.Fixtures.StoreOrgs["test-org-1"].ID, true) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("deleting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error deleting org pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *OrgTestSuite) TestUpdateOrganization() { @@ -502,7 +502,7 @@ func (s *OrgTestSuite) TestDeleteOrgPool() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEntityPool(s.Fixtures.AdminContext, entity, pool.ID) - s.Require().Equal("fetching pool: finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) } func (s *OrgTestSuite) TestDeleteOrgPoolErrUnauthorized() { diff --git a/runner/pool/common.go b/runner/pool/common.go index 066866a2..fa2f7e5a 100644 --- a/runner/pool/common.go +++ b/runner/pool/common.go @@ -16,12 +16,12 @@ package pool import ( "context" + "fmt" "net/http" "net/url" "strings" "github.com/google/go-github/v72/github" - "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/params" @@ -30,7 +30,7 @@ import ( func validateHookRequest(controllerID, baseURL string, allHooks []*github.Hook, req *github.Hook) error { parsed, err := url.Parse(baseURL) if err != nil { - return errors.Wrap(err, "parsing webhook url") + return fmt.Errorf("error parsing webhook url: %w", err) } partialMatches := []string{} @@ -80,7 +80,7 @@ func (r *basePoolManager) listHooks(ctx context.Context) ([]*github.Hook, error) if ghResp != nil && ghResp.StatusCode == http.StatusNotFound { return nil, runnerErrors.NewBadRequestError("repository not found or your PAT does not have access to manage webhooks") } - return nil, errors.Wrap(err, "fetching hooks") + return nil, fmt.Errorf("error fetching hooks: %w", err) } allHooks = append(allHooks, hooks...) if ghResp.NextPage == 0 { diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 1afee56e..690fed93 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -17,6 +17,7 @@ package pool import ( "context" "crypto/rand" + "errors" "fmt" "log/slog" "math" @@ -29,7 +30,6 @@ import ( "github.com/google/go-github/v72/github" "github.com/google/uuid" - "github.com/pkg/errors" "golang.org/x/sync/errgroup" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -76,16 +76,16 @@ func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instan ) ghc, err := ghClient.Client(ctx, entity) if err != nil { - return nil, errors.Wrap(err, "getting github client") + return nil, fmt.Errorf("error getting github client: %w", err) } if entity.WebhookSecret == "" { - return nil, errors.New("webhook secret is empty") + return nil, fmt.Errorf("webhook secret is empty") } controllerInfo, err := store.ControllerInfo() if err != nil { - return nil, errors.Wrap(err, "getting controller info") + return nil, fmt.Errorf("error getting controller info: %w", err) } consumerID := fmt.Sprintf("pool-manager-%s-%s", entity.String(), entity.Credentials.Endpoint.Name) @@ -95,13 +95,13 @@ func NewEntityPoolManager(ctx context.Context, entity params.ForgeEntity, instan composeWatcherFilters(entity), ) if err != nil { - return nil, errors.Wrap(err, "registering consumer") + return nil, fmt.Errorf("error registering consumer: %w", err) } wg := &sync.WaitGroup{} backoff, err := locking.NewInstanceDeleteBackoff(ctx) if err != nil { - return nil, errors.Wrap(err, "creating backoff") + return nil, fmt.Errorf("error creating backoff: %w", err) } repo := &basePoolManager{ @@ -158,7 +158,7 @@ func (r *basePoolManager) getProviderBaseParams(pool params.Pool) common.Provide func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { if err := r.ValidateOwner(job); err != nil { slog.ErrorContext(r.ctx, "failed to validate owner", "error", err) - return errors.Wrap(err, "validating owner") + return fmt.Errorf("error validating owner: %w", err) } // we see events where the lables seem to be missing. We should ignore these @@ -171,7 +171,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { jobParams, err := r.paramsWorkflowJobToParamsJob(job) if err != nil { slog.ErrorContext(r.ctx, "failed to convert job to params", "error", err) - return errors.Wrap(err, "converting job to params") + return fmt.Errorf("error converting job to params: %w", err) } var triggeredBy int64 @@ -249,7 +249,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", util.SanitizeLogEntry(jobParams.RunnerName)) - return errors.Wrap(err, "updating runner") + return fmt.Errorf("error updating runner: %w", err) } slog.DebugContext( r.ctx, "marking instance as pending_delete", @@ -261,7 +261,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", util.SanitizeLogEntry(jobParams.RunnerName)) - return errors.Wrap(err, "updating runner") + return fmt.Errorf("error updating runner: %w", err) } case "in_progress": fromCache, ok := cache.GetInstanceCache(jobParams.RunnerName) @@ -284,7 +284,7 @@ func (r *basePoolManager) HandleWorkflowJob(job params.WorkflowJob) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", util.SanitizeLogEntry(jobParams.RunnerName)) - return errors.Wrap(err, "updating runner") + return fmt.Errorf("error updating runner: %w", err) } // Set triggeredBy here so we break the lock on any potential queued job. triggeredBy = jobIDFromLabels(instance.AditionalLabels) @@ -396,7 +396,7 @@ func (r *basePoolManager) updateTools() error { func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runner) error { dbInstances, err := r.store.ListEntityInstances(r.ctx, r.entity) if err != nil { - return errors.Wrap(err, "fetching instances from db") + return fmt.Errorf("error fetching instances from db: %w", err) } runnerNames := map[string]bool{} @@ -435,7 +435,7 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne } pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return errors.Wrap(err, "fetching instance pool info") + return fmt.Errorf("error fetching instance pool info: %w", err) } switch instance.RunnerStatus { @@ -463,7 +463,7 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner", "runner_name", instance.Name) - return errors.Wrap(err, "updating runner") + return fmt.Errorf("error updating runner: %w", err) } } } @@ -476,7 +476,7 @@ func (r *basePoolManager) cleanupOrphanedProviderRunners(runners []*github.Runne func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { dbInstances, err := r.store.ListEntityInstances(r.ctx, r.entity) if err != nil { - return errors.Wrap(err, "fetching instances from db") + return fmt.Errorf("error fetching instances from db: %w", err) } runnersByName := map[string]*github.Runner{} @@ -510,7 +510,7 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return errors.Wrap(err, "fetching instance pool info") + return fmt.Errorf("error fetching instance pool info: %w", err) } if time.Since(instance.UpdatedAt).Minutes() < float64(pool.RunnerTimeout()) { continue @@ -529,7 +529,7 @@ func (r *basePoolManager) reapTimedOutRunners(runners []*github.Runner) error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner status", "runner_name", instance.Name) - return errors.Wrap(err, "updating runner") + return fmt.Errorf("error updating runner: %w", err) } } } @@ -560,7 +560,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) dbInstance, err := r.store.GetInstanceByName(r.ctx, *runner.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return errors.Wrap(err, "fetching instance from DB") + return fmt.Errorf("error fetching instance from DB: %w", err) } // We no longer have a DB entry for this instance, and the runner appears offline in github. // Previous forceful removal may have failed? @@ -572,7 +572,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) if errors.Is(err, runnerErrors.ErrNotFound) { continue } - return errors.Wrap(err, "removing runner") + return fmt.Errorf("error removing runner: %w", err) } continue } @@ -606,7 +606,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) pool, err := r.store.GetEntityPool(r.ctx, r.entity, dbInstance.PoolID) if err != nil { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } // check if the provider still has the instance. @@ -628,7 +628,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) } poolInstances, err = provider.ListInstances(r.ctx, pool.ID, listInstancesParams) if err != nil { - return errors.Wrapf(err, "fetching instances for pool %s", dbInstance.PoolID) + return fmt.Errorf("error fetching instances for pool %s: %w", dbInstance.PoolID, err) } poolInstanceCache[dbInstance.PoolID] = poolInstances } @@ -662,7 +662,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) r.ctx, "runner disappeared from github", "runner_name", dbInstance.Name) } else { - return errors.Wrap(err, "removing runner from github") + return fmt.Errorf("error removing runner from github: %w", err) } } // Remove the database entry for the runner. @@ -670,7 +670,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) r.ctx, "Removing from database", "runner_name", dbInstance.Name) if err := r.store.DeleteInstance(ctx, dbInstance.PoolID, dbInstance.Name); err != nil { - return errors.Wrap(err, "removing runner from database") + return fmt.Errorf("error removing runner from database: %w", err) } deleteMux = true return nil @@ -696,13 +696,13 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) }, } if err := provider.Start(r.ctx, dbInstance.ProviderID, startParams); err != nil { - return errors.Wrapf(err, "starting instance %s", dbInstance.ProviderID) + return fmt.Errorf("error starting instance %s: %w", dbInstance.ProviderID, err) } return nil }) } if err := r.waitForErrorGroupOrContextCancelled(g); err != nil { - return errors.Wrap(err, "removing orphaned github runners") + return fmt.Errorf("error removing orphaned github runners: %w", err) } return nil } @@ -732,7 +732,7 @@ func (r *basePoolManager) setInstanceRunnerStatus(runnerName string, status para } instance, err := r.store.UpdateInstance(r.ctx, runnerName, updateParams) if err != nil { - return params.Instance{}, errors.Wrap(err, "updating runner state") + return params.Instance{}, fmt.Errorf("error updating runner state: %w", err) } return instance, nil } @@ -745,7 +745,7 @@ func (r *basePoolManager) setInstanceStatus(runnerName string, status commonPara instance, err := r.store.UpdateInstance(r.ctx, runnerName, updateParams) if err != nil { - return params.Instance{}, errors.Wrap(err, "updating runner state") + return params.Instance{}, fmt.Errorf("error updating runner state: %w", err) } return instance, nil } @@ -753,7 +753,7 @@ func (r *basePoolManager) setInstanceStatus(runnerName string, status commonPara func (r *basePoolManager) AddRunner(ctx context.Context, poolID string, aditionalLabels []string) (err error) { pool, err := r.store.GetEntityPool(r.ctx, r.entity, poolID) if err != nil { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } provider, ok := r.providers[pool.ProviderName] @@ -796,7 +796,7 @@ func (r *basePoolManager) AddRunner(ctx context.Context, poolID string, aditiona instance, err := r.store.CreateInstance(r.ctx, poolID, createParams) if err != nil { - return errors.Wrap(err, "creating instance") + return fmt.Errorf("error creating instance: %w", err) } defer func() { @@ -864,7 +864,7 @@ func (r *basePoolManager) getLabelsForInstance(pool params.Pool) []string { func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error { pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } provider, ok := r.providers[pool.ProviderName] @@ -876,7 +876,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error jwtToken, err := r.instanceTokenGetter.NewInstanceJWTToken(instance, r.entity, pool.PoolType(), jwtValidity) if err != nil { - return errors.Wrap(err, "fetching instance jwt token") + return fmt.Errorf("error fetching instance jwt token: %w", err) } hasJITConfig := len(instance.JitConfiguration) > 0 @@ -933,7 +933,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error providerInstance, err := provider.CreateInstance(r.ctx, bootstrapArgs, createInstanceParams) if err != nil { instanceIDToDelete = instance.Name - return errors.Wrap(err, "creating instance") + return fmt.Errorf("error creating instance: %w", err) } if providerInstance.Status == commonParams.InstanceError { @@ -945,7 +945,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error updateInstanceArgs := r.updateArgsFromProviderInstance(providerInstance) if _, err := r.store.UpdateInstance(r.ctx, instance.Name, updateInstanceArgs); err != nil { - return errors.Wrap(err, "updating instance") + return fmt.Errorf("error updating instance: %w", err) } return nil } @@ -966,7 +966,7 @@ func (r *basePoolManager) addInstanceToProvider(instance params.Instance) error func (r *basePoolManager) paramsWorkflowJobToParamsJob(job params.WorkflowJob) (params.Job, error) { asUUID, err := uuid.Parse(r.ID()) if err != nil { - return params.Job{}, errors.Wrap(err, "parsing pool ID as UUID") + return params.Job{}, fmt.Errorf("error parsing pool ID as UUID: %w", err) } jobParams := params.Job{ @@ -995,7 +995,7 @@ func (r *basePoolManager) paramsWorkflowJobToParamsJob(job params.WorkflowJob) ( case params.ForgeEntityTypeOrganization: jobParams.OrgID = &asUUID default: - return jobParams, errors.Errorf("unknown pool type: %s", r.entity.EntityType) + return jobParams, fmt.Errorf("unknown pool type: %s", r.entity.EntityType) } return jobParams, nil @@ -1101,7 +1101,7 @@ func (r *basePoolManager) scaleDownOnePool(ctx context.Context, pool params.Pool // instead of returning a bunch of results and filtering manually. queued, err := r.store.ListEntityJobsByStatus(r.ctx, r.entity.EntityType, r.entity.ID, params.JobStatusQueued) if err != nil && !errors.Is(err, runnerErrors.ErrNotFound) { - return errors.Wrap(err, "listing queued jobs") + return fmt.Errorf("error listing queued jobs: %w", err) } for _, job := range queued { @@ -1341,7 +1341,7 @@ func (r *basePoolManager) ensureMinIdleRunners() error { func (r *basePoolManager) deleteInstanceFromProvider(ctx context.Context, instance params.Instance) error { pool, err := r.store.GetEntityPool(r.ctx, r.entity, instance.PoolID) if err != nil { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } provider, ok := r.providers[instance.ProviderName] @@ -1367,7 +1367,7 @@ func (r *basePoolManager) deleteInstanceFromProvider(ctx context.Context, instan }, } if err := provider.DeleteInstance(ctx, identifier, deleteInstanceParams); err != nil { - return errors.Wrap(err, "removing instance") + return fmt.Errorf("error removing instance: %w", err) } return nil @@ -1583,7 +1583,7 @@ func (r *basePoolManager) Wait() error { select { case <-done: case <-timer.C: - return errors.Wrap(runnerErrors.ErrTimeout, "waiting for pool to stop") + return runnerErrors.NewTimeoutError("waiting for pool to stop") } return nil } @@ -1609,11 +1609,11 @@ func (r *basePoolManager) runnerCleanup() (err error) { func (r *basePoolManager) cleanupOrphanedRunners(runners []*github.Runner) error { if err := r.cleanupOrphanedProviderRunners(runners); err != nil { - return errors.Wrap(err, "cleaning orphaned instances") + return fmt.Errorf("error cleaning orphaned instances: %w", err) } if err := r.cleanupOrphanedGithubRunners(runners); err != nil { - return errors.Wrap(err, "cleaning orphaned github runners") + return fmt.Errorf("error cleaning orphaned github runners: %w", err) } return nil @@ -1693,10 +1693,10 @@ func (r *basePoolManager) DeleteRunner(runner params.Instance, forceRemove, bypa if bypassGHUnauthorizedError { slog.Info("bypass github unauthorized error is set, marking runner for deletion") } else { - return errors.Wrap(err, "removing runner") + return fmt.Errorf("error removing runner: %w", err) } } else { - return errors.Wrap(err, "removing runner") + return fmt.Errorf("error removing runner: %w", err) } } } @@ -1714,7 +1714,7 @@ func (r *basePoolManager) DeleteRunner(runner params.Instance, forceRemove, bypa slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to update runner", "runner_name", runner.Name) - return errors.Wrap(err, "updating runner") + return fmt.Errorf("error updating runner: %w", err) } return nil } @@ -1745,7 +1745,7 @@ func (r *basePoolManager) DeleteRunner(runner params.Instance, forceRemove, bypa func (r *basePoolManager) consumeQueuedJobs() error { queued, err := r.store.ListEntityJobsByStatus(r.ctx, r.entity.EntityType, r.entity.ID, params.JobStatusQueued) if err != nil { - return errors.Wrap(err, "listing queued jobs") + return fmt.Errorf("error listing queued jobs: %w", err) } poolsCache := poolsForTags{ @@ -1860,7 +1860,7 @@ func (r *basePoolManager) consumeQueuedJobs() error { slog.With(slog.Any("error", err)).ErrorContext( r.ctx, "failed to unlock job", "job_id", job.WorkflowJobID) - return errors.Wrap(err, "unlocking job") + return fmt.Errorf("error unlocking job: %w", err) } } } @@ -1874,12 +1874,12 @@ func (r *basePoolManager) consumeQueuedJobs() error { func (r *basePoolManager) UninstallWebhook(ctx context.Context) error { if r.controllerInfo.ControllerWebhookURL == "" { - return errors.Wrap(runnerErrors.ErrBadRequest, "controller webhook url is empty") + return runnerErrors.NewBadRequestError("controller webhook url is empty") } allHooks, err := r.listHooks(ctx) if err != nil { - return errors.Wrap(err, "listing hooks") + return fmt.Errorf("error listing hooks: %w", err) } var controllerHookID int64 @@ -1917,16 +1917,16 @@ func (r *basePoolManager) UninstallWebhook(ctx context.Context) error { func (r *basePoolManager) InstallHook(ctx context.Context, req *github.Hook) (params.HookInfo, error) { allHooks, err := r.listHooks(ctx) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "listing hooks") + return params.HookInfo{}, fmt.Errorf("error listing hooks: %w", err) } if err := validateHookRequest(r.controllerInfo.ControllerID.String(), r.controllerInfo.WebhookURL, allHooks, req); err != nil { - return params.HookInfo{}, errors.Wrap(err, "validating hook request") + return params.HookInfo{}, fmt.Errorf("error validating hook request: %w", err) } hook, err := r.ghcli.CreateEntityHook(ctx, req) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "creating entity hook") + return params.HookInfo{}, fmt.Errorf("error creating entity hook: %w", err) } if _, err := r.ghcli.PingEntityHook(ctx, hook.GetID()); err != nil { @@ -1941,7 +1941,7 @@ func (r *basePoolManager) InstallHook(ctx context.Context, req *github.Hook) (pa func (r *basePoolManager) InstallWebhook(ctx context.Context, param params.InstallWebhookParams) (params.HookInfo, error) { if r.controllerInfo.ControllerWebhookURL == "" { - return params.HookInfo{}, errors.Wrap(runnerErrors.ErrBadRequest, "controller webhook url is empty") + return params.HookInfo{}, runnerErrors.NewBadRequestError("controller webhook url is empty") } insecureSSL := "0" @@ -1989,9 +1989,9 @@ func (r *basePoolManager) GithubRunnerRegistrationToken() (string, error) { tk, ghResp, err := r.ghcli.CreateEntityRegistrationToken(r.ctx) if err != nil { if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return "", errors.Wrap(runnerErrors.ErrUnauthorized, "fetching token") + return "", runnerErrors.NewUnauthorizedError("error fetching token") } - return "", errors.Wrap(err, "creating runner token") + return "", fmt.Errorf("error creating runner token: %w", err) } return *tk.Token, nil } @@ -2000,9 +2000,9 @@ func (r *basePoolManager) FetchTools() ([]commonParams.RunnerApplicationDownload tools, ghResp, err := r.ghcli.ListEntityRunnerApplicationDownloads(r.ctx) if err != nil { if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return nil, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching tools") + return nil, runnerErrors.NewUnauthorizedError("error fetching tools") } - return nil, errors.Wrap(err, "fetching runner tools") + return nil, fmt.Errorf("error fetching runner tools: %w", err) } ret := []commonParams.RunnerApplicationDownload{} @@ -2027,9 +2027,9 @@ func (r *basePoolManager) GetGithubRunners() ([]*github.Runner, error) { runners, ghResp, err := r.ghcli.ListEntityRunners(r.ctx, &opts) if err != nil { if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return nil, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + return nil, runnerErrors.NewUnauthorizedError("error fetching runners") } - return nil, errors.Wrap(err, "fetching runners") + return nil, fmt.Errorf("error fetching runners: %w", err) } allRunners = append(allRunners, runners.Runners...) if ghResp.NextPage == 0 { @@ -2044,7 +2044,7 @@ func (r *basePoolManager) GetGithubRunners() ([]*github.Runner, error) { func (r *basePoolManager) GetWebhookInfo(ctx context.Context) (params.HookInfo, error) { allHooks, err := r.listHooks(ctx) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "listing hooks") + return params.HookInfo{}, fmt.Errorf("error listing hooks: %w", err) } trimmedBase := strings.TrimRight(r.controllerInfo.WebhookURL, "/") trimmedController := strings.TrimRight(r.controllerInfo.ControllerWebhookURL, "/") diff --git a/runner/pool/watcher.go b/runner/pool/watcher.go index 324643ce..999b52c6 100644 --- a/runner/pool/watcher.go +++ b/runner/pool/watcher.go @@ -17,8 +17,6 @@ package pool import ( "log/slog" - "github.com/pkg/errors" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/database/common" "github.com/cloudbase/garm/params" @@ -46,7 +44,7 @@ func (r *basePoolManager) getClientOrStub() runnerCommon.GithubClient { if err != nil { slog.WarnContext(r.ctx, "failed to create github client", "error", err) ghc = &stubGithubClient{ - err: errors.Wrapf(runnerErrors.ErrUnauthorized, "failed to create github client; please update credentials: %v", err), + err: runnerErrors.NewUnauthorizedError("failed to create github client; please update credentials"), } } return ghc diff --git a/runner/pools.go b/runner/pools.go index 15aecb5e..ffd3b9c8 100644 --- a/runner/pools.go +++ b/runner/pools.go @@ -16,8 +16,8 @@ package runner import ( "context" - - "github.com/pkg/errors" + "errors" + "fmt" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" @@ -31,7 +31,7 @@ func (r *Runner) ListAllPools(ctx context.Context) ([]params.Pool, error) { pools, err := r.store.ListAllPools(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching pools") + return nil, fmt.Errorf("error fetching pools: %w", err) } return pools, nil } @@ -43,7 +43,7 @@ func (r *Runner) GetPoolByID(ctx context.Context, poolID string) (params.Pool, e pool, err := r.store.GetPoolByID(ctx, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } return pool, nil } @@ -56,7 +56,7 @@ func (r *Runner) DeletePoolByID(ctx context.Context, poolID string) error { pool, err := r.store.GetPoolByID(ctx, poolID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } return nil } @@ -66,7 +66,7 @@ func (r *Runner) DeletePoolByID(ctx context.Context, poolID string) error { } if err := r.store.DeletePoolByID(ctx, poolID); err != nil { - return errors.Wrap(err, "deleting pool") + return fmt.Errorf("error deleting pool: %w", err) } return nil } @@ -78,7 +78,7 @@ func (r *Runner) UpdatePoolByID(ctx context.Context, poolID string, param params pool, err := r.store.GetPoolByID(ctx, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } maxRunners := pool.MaxRunners @@ -101,12 +101,12 @@ func (r *Runner) UpdatePoolByID(ctx context.Context, poolID string, param params entity, err := pool.GetEntity() if err != nil { - return params.Pool{}, errors.Wrap(err, "getting entity") + return params.Pool{}, fmt.Errorf("error getting entity: %w", err) } newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, errors.Wrap(err, "updating pool") + return params.Pool{}, fmt.Errorf("error updating pool: %w", err) } return newPool, nil } @@ -118,7 +118,7 @@ func (r *Runner) ListAllJobs(ctx context.Context) ([]params.Job, error) { jobs, err := r.store.ListAllJobs(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching jobs") + return nil, fmt.Errorf("error fetching jobs: %w", err) } return jobs, nil } diff --git a/runner/pools_test.go b/runner/pools_test.go index 3bc5d4b3..2a2aea5d 100644 --- a/runner/pools_test.go +++ b/runner/pools_test.go @@ -169,7 +169,7 @@ func (s *PoolTestSuite) TestGetPoolByIDNotFound() { s.Require().Nil(err) _, err = s.Runner.GetPoolByID(s.Fixtures.AdminContext, s.Fixtures.Pools[0].ID) s.Require().NotNil(err) - s.Require().Equal("fetching pool: fetching pool by ID: not found", err.Error()) + s.Require().Equal("error fetching pool: error fetching pool by ID: not found", err.Error()) } func (s *PoolTestSuite) TestDeletePoolByID() { @@ -178,7 +178,7 @@ func (s *PoolTestSuite) TestDeletePoolByID() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetPoolByID(s.Fixtures.AdminContext, s.Fixtures.Pools[0].ID) s.Require().NotNil(err) - s.Require().Equal("fetching pool by ID: not found", err.Error()) + s.Require().Equal("error fetching pool by ID: not found", err.Error()) } func (s *PoolTestSuite) TestDeletePoolByIDErrUnauthorized() { @@ -220,7 +220,7 @@ func (s *PoolTestSuite) TestTestUpdatePoolByIDInvalidPoolID() { _, err := s.Runner.UpdatePoolByID(s.Fixtures.AdminContext, "dummy-pool-id", s.Fixtures.UpdatePoolParams) s.Require().NotNil(err) - s.Require().Equal("fetching pool: fetching pool by ID: parsing id: invalid request", err.Error()) + s.Require().Equal("error fetching pool: error fetching pool by ID: error parsing id: invalid request", err.Error()) } func (s *PoolTestSuite) TestTestUpdatePoolByIDRunnerBootstrapTimeoutFailed() { diff --git a/runner/providers/providers.go b/runner/providers/providers.go index 165fb585..ada11729 100644 --- a/runner/providers/providers.go +++ b/runner/providers/providers.go @@ -16,10 +16,9 @@ package providers import ( "context" + "fmt" "log/slog" - "github.com/pkg/errors" - "github.com/cloudbase/garm/config" "github.com/cloudbase/garm/params" "github.com/cloudbase/garm/runner/common" @@ -39,11 +38,11 @@ func LoadProvidersFromConfig(ctx context.Context, cfg config.Config, controllerI conf := providerCfg provider, err := external.NewProvider(ctx, &conf, controllerID) if err != nil { - return nil, errors.Wrap(err, "creating provider") + return nil, fmt.Errorf("error creating provider: %w", err) } providers[providerCfg.Name] = provider default: - return nil, errors.Errorf("unknown provider type %s", providerCfg.ProviderType) + return nil, fmt.Errorf("unknown provider type %s", providerCfg.ProviderType) } } return providers, nil diff --git a/runner/providers/v0.1.0/external.go b/runner/providers/v0.1.0/external.go index 60c5ca1b..bb96f4d7 100644 --- a/runner/providers/v0.1.0/external.go +++ b/runner/providers/v0.1.0/external.go @@ -17,12 +17,11 @@ package v010 import ( "context" "encoding/json" + "errors" "fmt" "log/slog" "os/exec" - "github.com/pkg/errors" - garmErrors "github.com/cloudbase/garm-provider-common/errors" commonExecution "github.com/cloudbase/garm-provider-common/execution/common" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -44,7 +43,7 @@ func NewProvider(ctx context.Context, cfg *config.Provider, controllerID string) execPath, err := cfg.External.ExecutablePath() if err != nil { - return nil, errors.Wrap(err, "fetching executable path") + return nil, fmt.Errorf("error fetching executable path: %w", err) } // Set GARM_INTERFACE_VERSION to the version of the interface that the external @@ -83,7 +82,7 @@ func (e *external) CreateInstance(ctx context.Context, bootstrapParams commonPar asJs, err := json.Marshal(bootstrapParams) if err != nil { - return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing bootstrap params") + return commonParams.ProviderInstance{}, fmt.Errorf("error serializing bootstrap params: %w", err) } metrics.InstanceOperationCount.WithLabelValues( diff --git a/runner/providers/v0.1.1/external.go b/runner/providers/v0.1.1/external.go index 192f735d..6e43dce7 100644 --- a/runner/providers/v0.1.1/external.go +++ b/runner/providers/v0.1.1/external.go @@ -18,12 +18,11 @@ import ( "context" "encoding/base64" "encoding/json" + "errors" "fmt" "log/slog" "os/exec" - "github.com/pkg/errors" - garmErrors "github.com/cloudbase/garm-provider-common/errors" commonExecution "github.com/cloudbase/garm-provider-common/execution/common" commonParams "github.com/cloudbase/garm-provider-common/params" @@ -44,7 +43,7 @@ func NewProvider(ctx context.Context, cfg *config.Provider, controllerID string) execPath, err := cfg.External.ExecutablePath() if err != nil { - return nil, errors.Wrap(err, "fetching executable path") + return nil, fmt.Errorf("error fetching executable path: %w", err) } // Set GARM_INTERFACE_VERSION to the version of the interface that the external @@ -75,7 +74,7 @@ func (e *external) CreateInstance(ctx context.Context, bootstrapParams commonPar extraspecs := bootstrapParams.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing extraspecs") + return commonParams.ProviderInstance{}, fmt.Errorf("error serializing extraspecs: %w", err) } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -90,7 +89,7 @@ func (e *external) CreateInstance(ctx context.Context, bootstrapParams commonPar asJs, err := json.Marshal(bootstrapParams) if err != nil { - return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing bootstrap params") + return commonParams.ProviderInstance{}, fmt.Errorf("error serializing bootstrap params: %w", err) } metrics.InstanceOperationCount.WithLabelValues( @@ -136,7 +135,7 @@ func (e *external) DeleteInstance(ctx context.Context, instance string, deleteIn extraspecs := deleteInstanceParams.DeleteInstanceV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return errors.Wrap(err, "serializing extraspecs") + return fmt.Errorf("error serializing extraspecs: %w", err) } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -173,7 +172,7 @@ func (e *external) GetInstance(ctx context.Context, instance string, getInstance extraspecs := getInstanceParams.GetInstanceV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return commonParams.ProviderInstance{}, errors.Wrap(err, "serializing extraspecs") + return commonParams.ProviderInstance{}, fmt.Errorf("error serializing extraspecs: %w", err) } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -228,7 +227,7 @@ func (e *external) ListInstances(ctx context.Context, poolID string, listInstanc extraspecs := listInstancesParams.ListInstancesV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return []commonParams.ProviderInstance{}, errors.Wrap(err, "serializing extraspecs") + return []commonParams.ProviderInstance{}, fmt.Errorf("error serializing extraspecs: %w", err) } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -283,7 +282,7 @@ func (e *external) RemoveAllInstances(ctx context.Context, removeAllInstances co extraspecs := removeAllInstances.RemoveAllInstancesV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return errors.Wrap(err, "serializing extraspecs") + return fmt.Errorf("error serializing extraspecs: %w", err) } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -317,7 +316,7 @@ func (e *external) Stop(ctx context.Context, instance string, stopParams common. extraspecs := stopParams.StopV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return errors.Wrap(err, "serializing extraspecs") + return fmt.Errorf("error serializing extraspecs: %w", err) } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) @@ -351,7 +350,7 @@ func (e *external) Start(ctx context.Context, instance string, startParams commo extraspecs := startParams.StartV011.PoolInfo.ExtraSpecs extraspecsValue, err := json.Marshal(extraspecs) if err != nil { - return errors.Wrap(err, "serializing extraspecs") + return fmt.Errorf("error serializing extraspecs: %w", err) } // Encode the extraspecs as base64 to avoid issues with special characters. base64EncodedExtraSpecs := base64.StdEncoding.EncodeToString(extraspecsValue) diff --git a/runner/repositories.go b/runner/repositories.go index 24beaa07..0f21d882 100644 --- a/runner/repositories.go +++ b/runner/repositories.go @@ -16,12 +16,11 @@ package runner import ( "context" + "errors" "fmt" "log/slog" "strings" - "github.com/pkg/errors" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" @@ -35,7 +34,7 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa } if err := param.Validate(); err != nil { - return params.Repository{}, errors.Wrap(err, "validating params") + return params.Repository{}, fmt.Errorf("error validating params: %w", err) } var creds params.ForgeCredentials @@ -55,7 +54,7 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa _, err = r.store.GetRepository(ctx, param.Owner, param.Name, creds.Endpoint.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return params.Repository{}, errors.Wrap(err, "fetching repo") + return params.Repository{}, fmt.Errorf("error fetching repo: %w", err) } } else { return params.Repository{}, runnerErrors.NewConflictError("repository %s/%s already exists", param.Owner, param.Name) @@ -63,7 +62,7 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa repo, err = r.store.CreateRepository(ctx, param.Owner, param.Name, creds, param.WebhookSecret, param.PoolBalancerType) if err != nil { - return params.Repository{}, errors.Wrap(err, "creating repository") + return params.Repository{}, fmt.Errorf("error creating repository: %w", err) } defer func() { @@ -80,7 +79,7 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa // updating the store. poolMgr, err := r.poolManagerCtrl.CreateRepoPoolManager(r.ctx, repo, r.providers, r.store) if err != nil { - return params.Repository{}, errors.Wrap(err, "creating repo pool manager") + return params.Repository{}, fmt.Errorf("error creating repo pool manager: %w", err) } if err := poolMgr.Start(); err != nil { if deleteErr := r.poolManagerCtrl.DeleteRepoPoolManager(repo); deleteErr != nil { @@ -88,7 +87,7 @@ func (r *Runner) CreateRepository(ctx context.Context, param params.CreateRepoPa ctx, "failed to cleanup pool manager for repo", "repository_id", repo.ID) } - return params.Repository{}, errors.Wrap(err, "starting repo pool manager") + return params.Repository{}, fmt.Errorf("error starting repo pool manager: %w", err) } return repo, nil } @@ -100,7 +99,7 @@ func (r *Runner) ListRepositories(ctx context.Context, filter params.RepositoryF repos, err := r.store.ListRepositories(ctx, filter) if err != nil { - return nil, errors.Wrap(err, "listing repositories") + return nil, fmt.Errorf("error listing repositories: %w", err) } var allRepos []params.Repository @@ -126,7 +125,7 @@ func (r *Runner) GetRepositoryByID(ctx context.Context, repoID string) (params.R repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return params.Repository{}, errors.Wrap(err, "fetching repository") + return params.Repository{}, fmt.Errorf("error fetching repository: %w", err) } poolMgr, err := r.poolManagerCtrl.GetRepoPoolManager(repo) @@ -145,17 +144,17 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return errors.Wrap(err, "fetching repo") + return fmt.Errorf("error fetching repo: %w", err) } entity, err := repo.GetEntity() if err != nil { - return errors.Wrap(err, "getting entity") + return fmt.Errorf("error getting entity: %w", err) } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return errors.Wrap(err, "fetching repo pools") + return fmt.Errorf("error fetching repo pools: %w", err) } if len(pools) > 0 { @@ -169,7 +168,7 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) if err != nil { - return errors.Wrap(err, "fetching repo scale sets") + return fmt.Errorf("error fetching repo scale sets: %w", err) } if len(scaleSets) > 0 { @@ -179,7 +178,7 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo if !keepWebhook && r.config.Default.EnableWebhookManagement { poolMgr, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return errors.Wrap(err, "fetching pool manager") + return fmt.Errorf("error fetching pool manager: %w", err) } if err := poolMgr.UninstallWebhook(ctx); err != nil { @@ -192,11 +191,11 @@ func (r *Runner) DeleteRepository(ctx context.Context, repoID string, keepWebhoo } if err := r.poolManagerCtrl.DeleteRepoPoolManager(repo); err != nil { - return errors.Wrap(err, "deleting repo pool manager") + return fmt.Errorf("error deleting repo pool manager: %w", err) } if err := r.store.DeleteRepository(ctx, repoID); err != nil { - return errors.Wrap(err, "removing repository") + return fmt.Errorf("error removing repository: %w", err) } return nil } @@ -218,12 +217,12 @@ func (r *Runner) UpdateRepository(ctx context.Context, repoID string, param para slog.InfoContext(ctx, "updating repository", "repo_id", repoID, "param", param) repo, err := r.store.UpdateRepository(ctx, repoID, param) if err != nil { - return params.Repository{}, errors.Wrap(err, "updating repo") + return params.Repository{}, fmt.Errorf("error updating repo: %w", err) } poolMgr, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return params.Repository{}, errors.Wrap(err, "getting pool manager") + return params.Repository{}, fmt.Errorf("error getting pool manager: %w", err) } repo.PoolManagerStatus = poolMgr.Status() @@ -237,7 +236,7 @@ func (r *Runner) CreateRepoPool(ctx context.Context, repoID string, param params createPoolParams, err := r.appendTagsToCreatePoolParams(param) if err != nil { - return params.Pool{}, errors.Wrap(err, "appending tags to create pool params") + return params.Pool{}, fmt.Errorf("error appending tags to create pool params: %w", err) } if createPoolParams.RunnerBootstrapTimeout == 0 { @@ -251,7 +250,7 @@ func (r *Runner) CreateRepoPool(ctx context.Context, repoID string, param params pool, err := r.store.CreateEntityPool(ctx, entity, createPoolParams) if err != nil { - return params.Pool{}, errors.Wrap(err, "creating pool") + return params.Pool{}, fmt.Errorf("error creating pool: %w", err) } return pool, nil @@ -269,7 +268,7 @@ func (r *Runner) GetRepoPoolByID(ctx context.Context, repoID, poolID string) (pa pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } return pool, nil @@ -286,7 +285,7 @@ func (r *Runner) DeleteRepoPool(ctx context.Context, repoID, poolID string) erro } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return errors.Wrap(err, "fetching pool") + return fmt.Errorf("error fetching pool: %w", err) } // nolint:golangci-lint,godox @@ -300,7 +299,7 @@ func (r *Runner) DeleteRepoPool(ctx context.Context, repoID, poolID string) erro } if err := r.store.DeleteEntityPool(ctx, entity, poolID); err != nil { - return errors.Wrap(err, "deleting pool") + return fmt.Errorf("error deleting pool: %w", err) } return nil } @@ -315,7 +314,7 @@ func (r *Runner) ListRepoPools(ctx context.Context, repoID string) ([]params.Poo } pools, err := r.store.ListEntityPools(ctx, entity) if err != nil { - return nil, errors.Wrap(err, "fetching pools") + return nil, fmt.Errorf("error fetching pools: %w", err) } return pools, nil } @@ -327,7 +326,7 @@ func (r *Runner) ListPoolInstances(ctx context.Context, poolID string) ([]params instances, err := r.store.ListPoolInstances(ctx, poolID) if err != nil { - return []params.Instance{}, errors.Wrap(err, "fetching instances") + return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) } return instances, nil } @@ -343,7 +342,7 @@ func (r *Runner) UpdateRepoPool(ctx context.Context, repoID, poolID string, para } pool, err := r.store.GetEntityPool(ctx, entity, poolID) if err != nil { - return params.Pool{}, errors.Wrap(err, "fetching pool") + return params.Pool{}, fmt.Errorf("error fetching pool: %w", err) } maxRunners := pool.MaxRunners @@ -362,7 +361,7 @@ func (r *Runner) UpdateRepoPool(ctx context.Context, repoID, poolID string, para newPool, err := r.store.UpdateEntityPool(ctx, entity, poolID, param) if err != nil { - return params.Pool{}, errors.Wrap(err, "updating pool") + return params.Pool{}, fmt.Errorf("error updating pool: %w", err) } return newPool, nil } @@ -377,7 +376,7 @@ func (r *Runner) ListRepoInstances(ctx context.Context, repoID string) ([]params } instances, err := r.store.ListEntityInstances(ctx, entity) if err != nil { - return []params.Instance{}, errors.Wrap(err, "fetching instances") + return []params.Instance{}, fmt.Errorf("error , errfetching instances: %w", err) } return instances, nil } @@ -388,12 +387,12 @@ func (r *Runner) findRepoPoolManager(owner, name, endpointName string) (common.P repo, err := r.store.GetRepository(r.ctx, owner, name, endpointName) if err != nil { - return nil, errors.Wrap(err, "fetching repo") + return nil, fmt.Errorf("error fetching repo: %w", err) } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return nil, errors.Wrap(err, "fetching pool manager for repo") + return nil, fmt.Errorf("error fetching pool manager for repo: %w", err) } return poolManager, nil } @@ -405,17 +404,17 @@ func (r *Runner) InstallRepoWebhook(ctx context.Context, repoID string, param pa repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching repo") + return params.HookInfo{}, fmt.Errorf("error fetching repo: %w", err) } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for repo") + return params.HookInfo{}, fmt.Errorf("error fetching pool manager for repo: %w", err) } info, err := poolManager.InstallWebhook(ctx, param) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "installing webhook") + return params.HookInfo{}, fmt.Errorf("error installing webhook: %w", err) } return info, nil } @@ -427,16 +426,16 @@ func (r *Runner) UninstallRepoWebhook(ctx context.Context, repoID string) error repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return errors.Wrap(err, "fetching repo") + return fmt.Errorf("error fetching repo: %w", err) } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return errors.Wrap(err, "fetching pool manager for repo") + return fmt.Errorf("error fetching pool manager for repo: %w", err) } if err := poolManager.UninstallWebhook(ctx); err != nil { - return errors.Wrap(err, "uninstalling webhook") + return fmt.Errorf("error uninstalling webhook: %w", err) } return nil } @@ -448,17 +447,17 @@ func (r *Runner) GetRepoWebhookInfo(ctx context.Context, repoID string) (params. repo, err := r.store.GetRepositoryByID(ctx, repoID) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching repo") + return params.HookInfo{}, fmt.Errorf("error fetching repo: %w", err) } poolManager, err := r.poolManagerCtrl.GetRepoPoolManager(repo) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "fetching pool manager for repo") + return params.HookInfo{}, fmt.Errorf("error fetching pool manager for repo: %w", err) } info, err := poolManager.GetWebhookInfo(ctx) if err != nil { - return params.HookInfo{}, errors.Wrap(err, "getting webhook info") + return params.HookInfo{}, fmt.Errorf("error getting webhook info: %w", err) } return info, nil } diff --git a/runner/repositories_test.go b/runner/repositories_test.go index 53fe5869..8f195ae3 100644 --- a/runner/repositories_test.go +++ b/runner/repositories_test.go @@ -16,10 +16,10 @@ package runner import ( "context" + "errors" "fmt" "testing" - "github.com/pkg/errors" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" @@ -240,7 +240,7 @@ func (s *RepoTestSuite) TestCreateRepositoryPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("creating repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error creating repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestCreateRepositoryStartPoolMgrFailed() { @@ -252,7 +252,7 @@ func (s *RepoTestSuite) TestCreateRepositoryStartPoolMgrFailed() { s.Fixtures.PoolMgrMock.AssertExpectations(s.T()) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("starting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error starting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestListRepositories() { @@ -361,7 +361,7 @@ func (s *RepoTestSuite) TestDeleteRepository() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetRepositoryByID(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID) - s.Require().Equal("fetching repo: not found", err.Error()) + s.Require().Equal("error fetching repo: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepositoryErrUnauthorized() { @@ -391,7 +391,7 @@ func (s *RepoTestSuite) TestDeleteRepositoryPoolMgrFailed() { err := s.Runner.DeleteRepository(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID, true) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("deleting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error deleting repo pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestUpdateRepository() { @@ -445,7 +445,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryPoolMgrFailed() { _, err := s.Runner.UpdateRepository(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID, s.Fixtures.UpdateRepoParams) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestUpdateRepositoryCreateRepoPoolMgrFailed() { @@ -454,7 +454,7 @@ func (s *RepoTestSuite) TestUpdateRepositoryCreateRepoPoolMgrFailed() { _, err := s.Runner.UpdateRepository(s.Fixtures.AdminContext, s.Fixtures.StoreRepos["test-repo-1"].ID, s.Fixtures.UpdateRepoParams) s.Fixtures.PoolMgrCtrlMock.AssertExpectations(s.T()) - s.Require().Equal(fmt.Sprintf("getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) + s.Require().Equal(fmt.Sprintf("error getting pool manager: %s", s.Fixtures.ErrMock.Error()), err.Error()) } func (s *RepoTestSuite) TestCreateRepoPool() { @@ -527,7 +527,7 @@ func (s *RepoTestSuite) TestDeleteRepoPool() { s.Require().Nil(err) _, err = s.Fixtures.Store.GetEntityPool(s.Fixtures.AdminContext, entity, pool.ID) - s.Require().Equal("fetching pool: finding pool: not found", err.Error()) + s.Require().Equal("fetching pool: error finding pool: not found", err.Error()) } func (s *RepoTestSuite) TestDeleteRepoPoolErrUnauthorized() { diff --git a/runner/runner.go b/runner/runner.go index da3f35ea..2c12071d 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -21,6 +21,7 @@ import ( "crypto/sha256" "encoding/hex" "encoding/json" + "errors" "fmt" "hash" "log/slog" @@ -30,9 +31,6 @@ import ( "sync" "time" - "github.com/juju/clock" - "github.com/juju/retry" - "github.com/pkg/errors" "golang.org/x/sync/errgroup" runnerErrors "github.com/cloudbase/garm-provider-common/errors" @@ -52,12 +50,12 @@ import ( func NewRunner(ctx context.Context, cfg config.Config, db dbCommon.Store) (*Runner, error) { ctrlID, err := db.ControllerInfo() if err != nil { - return nil, errors.Wrap(err, "fetching controller info") + return nil, fmt.Errorf("error fetching controller info: %w", err) } providers, err := providers.LoadProvidersFromConfig(ctx, cfg, ctrlID.ControllerID.String()) if err != nil { - return nil, errors.Wrap(err, "loading providers") + return nil, fmt.Errorf("error loading providers: %w", err) } creds := map[string]config.Github{} @@ -82,7 +80,7 @@ func NewRunner(ctx context.Context, cfg config.Config, db dbCommon.Store) (*Runn } if err := runner.loadReposOrgsAndEnterprises(); err != nil { - return nil, errors.Wrap(err, "loading pool managers") + return nil, fmt.Errorf("error loading pool managers: %w", err) } return runner, nil @@ -105,16 +103,16 @@ func (p *poolManagerCtrl) CreateRepoPoolManager(ctx context.Context, repo params entity, err := repo.GetEntity() if err != nil { - return nil, errors.Wrap(err, "getting entity") + return nil, fmt.Errorf("error getting entity: %w", err) } instanceTokenGetter, err := auth.NewInstanceTokenGetter(p.config.JWTAuth.Secret) if err != nil { - return nil, errors.Wrap(err, "creating instance token getter") + return nil, fmt.Errorf("error creating instance token getter: %w", err) } poolManager, err := pool.NewEntityPoolManager(ctx, entity, instanceTokenGetter, providers, store) if err != nil { - return nil, errors.Wrap(err, "creating repo pool manager") + return nil, fmt.Errorf("error creating repo pool manager: %w", err) } p.repositories[repo.ID] = poolManager return poolManager, nil @@ -124,7 +122,7 @@ func (p *poolManagerCtrl) GetRepoPoolManager(repo params.Repository) (common.Poo if repoPoolMgr, ok := p.repositories[repo.ID]; ok { return repoPoolMgr, nil } - return nil, errors.Wrapf(runnerErrors.ErrNotFound, "repository %s/%s pool manager not loaded", repo.Owner, repo.Name) + return nil, fmt.Errorf("repository %s/%s pool manager not loaded: %w", repo.Owner, repo.Name, runnerErrors.ErrNotFound) } func (p *poolManagerCtrl) DeleteRepoPoolManager(repo params.Repository) error { @@ -134,7 +132,7 @@ func (p *poolManagerCtrl) DeleteRepoPoolManager(repo params.Repository) error { poolMgr, ok := p.repositories[repo.ID] if ok { if err := poolMgr.Stop(); err != nil { - return errors.Wrap(err, "stopping repo pool manager") + return fmt.Errorf("error stopping repo pool manager: %w", err) } delete(p.repositories, repo.ID) } @@ -151,16 +149,16 @@ func (p *poolManagerCtrl) CreateOrgPoolManager(ctx context.Context, org params.O entity, err := org.GetEntity() if err != nil { - return nil, errors.Wrap(err, "getting entity") + return nil, fmt.Errorf("error getting entity: %w", err) } instanceTokenGetter, err := auth.NewInstanceTokenGetter(p.config.JWTAuth.Secret) if err != nil { - return nil, errors.Wrap(err, "creating instance token getter") + return nil, fmt.Errorf("error creating instance token getter: %w", err) } poolManager, err := pool.NewEntityPoolManager(ctx, entity, instanceTokenGetter, providers, store) if err != nil { - return nil, errors.Wrap(err, "creating org pool manager") + return nil, fmt.Errorf("error creating org pool manager: %w", err) } p.organizations[org.ID] = poolManager return poolManager, nil @@ -170,7 +168,7 @@ func (p *poolManagerCtrl) GetOrgPoolManager(org params.Organization) (common.Poo if orgPoolMgr, ok := p.organizations[org.ID]; ok { return orgPoolMgr, nil } - return nil, errors.Wrapf(runnerErrors.ErrNotFound, "organization %s pool manager not loaded", org.Name) + return nil, fmt.Errorf("organization %s pool manager not loaded: %w", org.Name, runnerErrors.ErrNotFound) } func (p *poolManagerCtrl) DeleteOrgPoolManager(org params.Organization) error { @@ -180,7 +178,7 @@ func (p *poolManagerCtrl) DeleteOrgPoolManager(org params.Organization) error { poolMgr, ok := p.organizations[org.ID] if ok { if err := poolMgr.Stop(); err != nil { - return errors.Wrap(err, "stopping org pool manager") + return fmt.Errorf("error stopping org pool manager: %w", err) } delete(p.organizations, org.ID) } @@ -197,16 +195,16 @@ func (p *poolManagerCtrl) CreateEnterprisePoolManager(ctx context.Context, enter entity, err := enterprise.GetEntity() if err != nil { - return nil, errors.Wrap(err, "getting entity") + return nil, fmt.Errorf("error getting entity: %w", err) } instanceTokenGetter, err := auth.NewInstanceTokenGetter(p.config.JWTAuth.Secret) if err != nil { - return nil, errors.Wrap(err, "creating instance token getter") + return nil, fmt.Errorf("error creating instance token getter: %w", err) } poolManager, err := pool.NewEntityPoolManager(ctx, entity, instanceTokenGetter, providers, store) if err != nil { - return nil, errors.Wrap(err, "creating enterprise pool manager") + return nil, fmt.Errorf("error creating enterprise pool manager: %w", err) } p.enterprises[enterprise.ID] = poolManager return poolManager, nil @@ -216,7 +214,7 @@ func (p *poolManagerCtrl) GetEnterprisePoolManager(enterprise params.Enterprise) if enterprisePoolMgr, ok := p.enterprises[enterprise.ID]; ok { return enterprisePoolMgr, nil } - return nil, errors.Wrapf(runnerErrors.ErrNotFound, "enterprise %s pool manager not loaded", enterprise.Name) + return nil, fmt.Errorf("enterprise %s pool manager not loaded: %w", enterprise.Name, runnerErrors.ErrNotFound) } func (p *poolManagerCtrl) DeleteEnterprisePoolManager(enterprise params.Enterprise) error { @@ -226,7 +224,7 @@ func (p *poolManagerCtrl) DeleteEnterprisePoolManager(enterprise params.Enterpri poolMgr, ok := p.enterprises[enterprise.ID] if ok { if err := poolMgr.Stop(); err != nil { - return errors.Wrap(err, "stopping enterprise pool manager") + return fmt.Errorf("error stopping enterprise pool manager: %w", err) } delete(p.enterprises, enterprise.ID) } @@ -256,12 +254,12 @@ func (r *Runner) UpdateController(ctx context.Context, param params.UpdateContro } if err := param.Validate(); err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "validating controller update params") + return params.ControllerInfo{}, fmt.Errorf("error validating controller update params: %w", err) } info, err := r.store.UpdateController(param) if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "updating controller info") + return params.ControllerInfo{}, fmt.Errorf("error updating controller info: %w", err) } return info, nil } @@ -281,26 +279,26 @@ func (r *Runner) GetControllerInfo(ctx context.Context) (params.ControllerInfo, // As a side note, Windows requires a reboot for the hostname change to take effect, // so if we'll ever support Windows as a target system, the hostname can be cached. var hostname string - err := retry.Call(retry.CallArgs{ - Func: func() error { - var err error - hostname, err = os.Hostname() - if err != nil { - return errors.Wrap(err, "fetching hostname") + var err error + for range 10 { + hostname, err = os.Hostname() + if err != nil { + select { + case <-time.After(10 * time.Millisecond): + continue + case <-ctx.Done(): } - return nil - }, - Attempts: 10, - Delay: 100 * time.Millisecond, - Clock: clock.WallClock, - }) + return params.ControllerInfo{}, fmt.Errorf("error fetching hostname: %w", err) + } + break + } if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "fetching hostname") + return params.ControllerInfo{}, fmt.Errorf("error fetching hostname: %w", err) } info, err := r.store.ControllerInfo() if err != nil { - return params.ControllerInfo{}, errors.Wrap(err, "fetching controller info") + return params.ControllerInfo{}, fmt.Errorf("error fetching controller info: %w", err) } // This is temporary. Right now, GARM is a single-instance deployment. When we add the @@ -329,17 +327,17 @@ func (r *Runner) loadReposOrgsAndEnterprises() error { repos, err := r.store.ListRepositories(r.ctx, params.RepositoryFilter{}) if err != nil { - return errors.Wrap(err, "fetching repositories") + return fmt.Errorf("error fetching repositories: %w", err) } orgs, err := r.store.ListOrganizations(r.ctx, params.OrganizationFilter{}) if err != nil { - return errors.Wrap(err, "fetching organizations") + return fmt.Errorf("error fetching organizations: %w", err) } enterprises, err := r.store.ListEnterprises(r.ctx, params.EnterpriseFilter{}) if err != nil { - return errors.Wrap(err, "fetching enterprises") + return fmt.Errorf("error fetching enterprises: %w", err) } g, _ := errgroup.WithContext(r.ctx) @@ -384,17 +382,17 @@ func (r *Runner) Start() error { repositories, err := r.poolManagerCtrl.GetRepoPoolManagers() if err != nil { - return errors.Wrap(err, "fetch repo pool managers") + return fmt.Errorf("error fetch repo pool managers: %w", err) } organizations, err := r.poolManagerCtrl.GetOrgPoolManagers() if err != nil { - return errors.Wrap(err, "fetch org pool managers") + return fmt.Errorf("error fetch org pool managers: %w", err) } enterprises, err := r.poolManagerCtrl.GetEnterprisePoolManagers() if err != nil { - return errors.Wrap(err, "fetch enterprise pool managers") + return fmt.Errorf("error fetch enterprise pool managers: %w", err) } g, _ := errgroup.WithContext(r.ctx) @@ -450,17 +448,17 @@ func (r *Runner) Stop() error { repos, err := r.poolManagerCtrl.GetRepoPoolManagers() if err != nil { - return errors.Wrap(err, "fetch repo pool managers") + return fmt.Errorf("error fetching repo pool managers: %w", err) } orgs, err := r.poolManagerCtrl.GetOrgPoolManagers() if err != nil { - return errors.Wrap(err, "fetch org pool managers") + return fmt.Errorf("error fetching org pool managers: %w", err) } enterprises, err := r.poolManagerCtrl.GetEnterprisePoolManagers() if err != nil { - return errors.Wrap(err, "fetch enterprise pool managers") + return fmt.Errorf("error fetching enterprise pool managers: %w", err) } g, _ := errgroup.WithContext(r.ctx) @@ -512,17 +510,17 @@ func (r *Runner) Wait() error { repos, err := r.poolManagerCtrl.GetRepoPoolManagers() if err != nil { - return errors.Wrap(err, "fetch repo pool managers") + return fmt.Errorf("error fetching repo pool managers: %w", err) } orgs, err := r.poolManagerCtrl.GetOrgPoolManagers() if err != nil { - return errors.Wrap(err, "fetch org pool managers") + return fmt.Errorf("error fetching org pool managers: %w", err) } enterprises, err := r.poolManagerCtrl.GetEnterprisePoolManagers() if err != nil { - return errors.Wrap(err, "fetch enterprise pool managers") + return fmt.Errorf("error fetching enterprise pool managers: %w", err) } for poolID, repo := range repos { @@ -591,7 +589,7 @@ func (r *Runner) validateHookBody(signature, secret string, body []byte) error { mac := hmac.New(hashFunc, []byte(secret)) _, err := mac.Write(body) if err != nil { - return errors.Wrap(err, "failed to compute sha256") + return fmt.Errorf("failed to compute sha256: %w", err) } expectedMAC := hex.EncodeToString(mac.Sum(nil)) @@ -605,7 +603,7 @@ func (r *Runner) validateHookBody(signature, secret string, body []byte) error { func (r *Runner) findEndpointForJob(job params.WorkflowJob, forgeType params.EndpointType) (params.ForgeEndpoint, error) { uri, err := url.ParseRequestURI(job.WorkflowJob.HTMLURL) if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "parsing job URL") + return params.ForgeEndpoint{}, fmt.Errorf("error parsing job URL: %w", err) } baseURI := fmt.Sprintf("%s://%s", uri.Scheme, uri.Host) @@ -625,7 +623,7 @@ func (r *Runner) findEndpointForJob(job params.WorkflowJob, forgeType params.End } if err != nil { - return params.ForgeEndpoint{}, errors.Wrap(err, "fetching github endpoints") + return params.ForgeEndpoint{}, fmt.Errorf("error fetching github endpoints: %w", err) } for _, ep := range endpoints { slog.DebugContext(r.ctx, "checking endpoint", "base_uri", baseURI, "endpoint", ep.BaseURL) @@ -647,13 +645,13 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType var job params.WorkflowJob if err := json.Unmarshal(jobData, &job); err != nil { slog.ErrorContext(r.ctx, "failed to unmarshal job data", "error", err) - return errors.Wrapf(runnerErrors.ErrBadRequest, "invalid job data: %s", err) + return fmt.Errorf("invalid job data %s: %w", err, runnerErrors.ErrBadRequest) } endpoint, err := r.findEndpointForJob(job, forgeType) if err != nil { slog.ErrorContext(r.ctx, "failed to find endpoint for job", "error", err) - return errors.Wrap(err, "finding endpoint for job") + return fmt.Errorf("error finding endpoint for job: %w", err) } var poolManager common.PoolManager @@ -687,7 +685,7 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType slog.ErrorContext(r.ctx, "failed to find pool manager", "error", err, "hook_target_type", hookTargetType) // We don't have a repository or organization configured that // can handle this workflow job. - return errors.Wrap(err, "fetching poolManager") + return fmt.Errorf("error fetching poolManager: %w", err) } // We found a pool. Validate the webhook job. If a secret is configured, @@ -695,12 +693,12 @@ func (r *Runner) DispatchWorkflowJob(hookTargetType, signature string, forgeType secret := poolManager.WebhookSecret() if err := r.validateHookBody(signature, secret, jobData); err != nil { slog.ErrorContext(r.ctx, "failed to validate webhook data", "error", err) - return errors.Wrap(err, "validating webhook data") + return fmt.Errorf("error validating webhook data: %w", err) } if err := poolManager.HandleWorkflowJob(job); err != nil { slog.ErrorContext(r.ctx, "failed to handle workflow job", "error", err) - return errors.Wrap(err, "handling workflow job") + return fmt.Errorf("error handling workflow job: %w", err) } return nil @@ -735,7 +733,7 @@ func (r *Runner) GetInstance(ctx context.Context, instanceName string) (params.I instance, err := r.store.GetInstanceByName(ctx, instanceName) if err != nil { - return params.Instance{}, errors.Wrap(err, "fetching instance") + return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) } return instance, nil } @@ -747,7 +745,7 @@ func (r *Runner) ListAllInstances(ctx context.Context) ([]params.Instance, error instances, err := r.store.ListAllInstances(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching instances") + return nil, fmt.Errorf("error fetching instances: %w", err) } return instances, nil } @@ -759,7 +757,7 @@ func (r *Runner) AddInstanceStatusMessage(ctx context.Context, param params.Inst } if err := r.store.AddInstanceEvent(ctx, instanceName, params.StatusEvent, params.EventInfo, param.Message); err != nil { - return errors.Wrap(err, "adding status update") + return fmt.Errorf("error adding status update: %w", err) } updateParams := params.UpdateInstanceParams{ @@ -771,7 +769,7 @@ func (r *Runner) AddInstanceStatusMessage(ctx context.Context, param params.Inst } if _, err := r.store.UpdateInstance(r.ctx, instanceName, updateParams); err != nil { - return errors.Wrap(err, "updating runner agent ID") + return fmt.Errorf("error updating runner agent ID: %w", err) } return nil @@ -799,7 +797,7 @@ func (r *Runner) UpdateSystemInfo(ctx context.Context, param params.UpdateSystem } if _, err := r.store.UpdateInstance(r.ctx, instanceName, updateParams); err != nil { - return errors.Wrap(err, "updating runner system info") + return fmt.Errorf("error updating runner system info: %w", err) } return nil @@ -808,7 +806,7 @@ func (r *Runner) UpdateSystemInfo(ctx context.Context, param params.UpdateSystem func (r *Runner) getPoolManagerFromInstance(ctx context.Context, instance params.Instance) (common.PoolManager, error) { pool, err := r.store.GetPoolByID(ctx, instance.PoolID) if err != nil { - return nil, errors.Wrap(err, "fetching pool") + return nil, fmt.Errorf("error fetching pool: %w", err) } var poolMgr common.PoolManager @@ -817,29 +815,29 @@ func (r *Runner) getPoolManagerFromInstance(ctx context.Context, instance params case pool.RepoID != "": repo, err := r.store.GetRepositoryByID(ctx, pool.RepoID) if err != nil { - return nil, errors.Wrap(err, "fetching repo") + return nil, fmt.Errorf("error fetching repo: %w", err) } poolMgr, err = r.findRepoPoolManager(repo.Owner, repo.Name, repo.Endpoint.Name) if err != nil { - return nil, errors.Wrapf(err, "fetching pool manager for repo %s", pool.RepoName) + return nil, fmt.Errorf("error fetching pool manager for repo %s: %w", pool.RepoName, err) } case pool.OrgID != "": org, err := r.store.GetOrganizationByID(ctx, pool.OrgID) if err != nil { - return nil, errors.Wrap(err, "fetching org") + return nil, fmt.Errorf("error fetching org: %w", err) } poolMgr, err = r.findOrgPoolManager(org.Name, org.Endpoint.Name) if err != nil { - return nil, errors.Wrapf(err, "fetching pool manager for org %s", pool.OrgName) + return nil, fmt.Errorf("error fetching pool manager for org %s: %w", pool.OrgName, err) } case pool.EnterpriseID != "": enterprise, err := r.store.GetEnterpriseByID(ctx, pool.EnterpriseID) if err != nil { - return nil, errors.Wrap(err, "fetching enterprise") + return nil, fmt.Errorf("error fetching enterprise: %w", err) } poolMgr, err = r.findEnterprisePoolManager(enterprise.Name, enterprise.Endpoint.Name) if err != nil { - return nil, errors.Wrapf(err, "fetching pool manager for enterprise %s", pool.EnterpriseName) + return nil, fmt.Errorf("error fetching pool manager for enterprise %s: %w", pool.EnterpriseName, err) } } @@ -856,7 +854,7 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel instance, err := r.store.GetInstanceByName(ctx, instanceName) if err != nil { - return errors.Wrap(err, "fetching instance") + return fmt.Errorf("error fetching instance: %w", err) } switch instance.Status { @@ -874,7 +872,7 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel ghCli, ssCli, err := r.getGHCliFromInstance(ctx, instance) if err != nil { - return errors.Wrap(err, "fetching github client") + return fmt.Errorf("error fetching github client: %w", err) } if instance.AgentID != 0 { @@ -892,12 +890,12 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel if errors.Is(err, runnerErrors.ErrUnauthorized) && instance.PoolID != "" { poolMgr, err := r.getPoolManagerFromInstance(ctx, instance) if err != nil { - return errors.Wrap(err, "fetching pool manager for instance") + return fmt.Errorf("error fetching pool manager for instance: %w", err) } poolMgr.SetPoolRunningState(false, fmt.Sprintf("failed to remove runner: %q", err)) } if !bypassGithubUnauthorized { - return errors.Wrap(err, "removing runner from github") + return fmt.Errorf("error removing runner from github: %w", err) } } } @@ -918,7 +916,7 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel } _, err = r.store.UpdateInstance(r.ctx, instance.Name, updateParams) if err != nil { - return errors.Wrap(err, "updating runner state") + return fmt.Errorf("error updating runner state: %w", err) } return nil @@ -934,12 +932,12 @@ func (r *Runner) getGHCliFromInstance(ctx context.Context, instance params.Insta case instance.PoolID != "": entityGetter, err = r.store.GetPoolByID(ctx, instance.PoolID) if err != nil { - return nil, nil, errors.Wrap(err, "fetching pool") + return nil, nil, fmt.Errorf("error fetching pool: %w", err) } case instance.ScaleSetID != 0: entityGetter, err = r.store.GetScaleSetByID(ctx, instance.ScaleSetID) if err != nil { - return nil, nil, errors.Wrap(err, "fetching scale set") + return nil, nil, fmt.Errorf("error fetching scale set: %w", err) } default: return nil, nil, errors.New("instance does not have a pool or scale set") @@ -947,23 +945,23 @@ func (r *Runner) getGHCliFromInstance(ctx context.Context, instance params.Insta entity, err := entityGetter.GetEntity() if err != nil { - return nil, nil, errors.Wrap(err, "fetching entity") + return nil, nil, fmt.Errorf("error fetching entity: %w", err) } // Fetching the entity from the database will populate all fields, including credentials. entity, err = r.store.GetForgeEntity(ctx, entity.EntityType, entity.ID) if err != nil { - return nil, nil, errors.Wrap(err, "fetching entity") + return nil, nil, fmt.Errorf("error fetching entity: %w", err) } ghCli, err := github.Client(ctx, entity) if err != nil { - return nil, nil, errors.Wrap(err, "creating github client") + return nil, nil, fmt.Errorf("error creating github client: %w", err) } scaleSetCli, err := scalesets.NewClient(ghCli) if err != nil { - return nil, nil, errors.Wrap(err, "creating scaleset client") + return nil, nil, fmt.Errorf("error creating scaleset client: %w", err) } return ghCli, scaleSetCli, nil } diff --git a/runner/scalesets.go b/runner/scalesets.go index e7af9c22..d9361698 100644 --- a/runner/scalesets.go +++ b/runner/scalesets.go @@ -16,11 +16,10 @@ package runner import ( "context" + "errors" "fmt" "log/slog" - "github.com/pkg/errors" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/params" @@ -36,7 +35,7 @@ func (r *Runner) ListAllScaleSets(ctx context.Context) ([]params.ScaleSet, error scalesets, err := r.store.ListAllScaleSets(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching pools") + return nil, fmt.Errorf("error fetching pools: %w", err) } return scalesets, nil } @@ -48,7 +47,7 @@ func (r *Runner) GetScaleSetByID(ctx context.Context, scaleSet uint) (params.Sca set, err := r.store.GetScaleSetByID(ctx, scaleSet) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "fetching scale set") + return params.ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) } return set, nil } @@ -61,7 +60,7 @@ func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error scaleSet, err := r.store.GetScaleSetByID(ctx, scaleSetID) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { - return errors.Wrap(err, "fetching scale set") + return fmt.Errorf("error fetching scale set: %w", err) } return nil } @@ -76,22 +75,22 @@ func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error paramEntity, err := scaleSet.GetEntity() if err != nil { - return errors.Wrap(err, "getting entity") + return fmt.Errorf("error getting entity: %w", err) } entity, err := r.store.GetForgeEntity(ctx, paramEntity.EntityType, paramEntity.ID) if err != nil { - return errors.Wrap(err, "getting entity") + return fmt.Errorf("error getting entity: %w", err) } ghCli, err := github.Client(ctx, entity) if err != nil { - return errors.Wrap(err, "creating github client") + return fmt.Errorf("error creating github client: %w", err) } scalesetCli, err := scalesets.NewClient(ghCli) if err != nil { - return errors.Wrap(err, "getting scaleset client") + return fmt.Errorf("error getting scaleset client: %w", err) } slog.DebugContext(ctx, "deleting scale set", "scale_set_id", scaleSet.ScaleSetID) @@ -101,10 +100,10 @@ func (r *Runner) DeleteScaleSetByID(ctx context.Context, scaleSetID uint) error return nil } slog.With(slog.Any("error", err)).ErrorContext(ctx, "failed to delete scale set from github") - return errors.Wrap(err, "deleting scale set from github") + return fmt.Errorf("error deleting scale set from github: %w", err) } if err := r.store.DeleteScaleSetByID(ctx, scaleSetID); err != nil { - return errors.Wrap(err, "deleting scale set") + return fmt.Errorf("error deleting scale set: %w", err) } return nil } @@ -116,7 +115,7 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param scaleSet, err := r.store.GetScaleSetByID(ctx, scaleSetID) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "fetching scale set") + return params.ScaleSet{}, fmt.Errorf("error fetching scale set: %w", err) } maxRunners := scaleSet.MaxRunners @@ -139,22 +138,22 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param paramEntity, err := scaleSet.GetEntity() if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "getting entity") + return params.ScaleSet{}, fmt.Errorf("error getting entity: %w", err) } entity, err := r.store.GetForgeEntity(ctx, paramEntity.EntityType, paramEntity.ID) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "getting entity") + return params.ScaleSet{}, fmt.Errorf("error getting entity: %w", err) } ghCli, err := github.Client(ctx, entity) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "creating github client") + return params.ScaleSet{}, fmt.Errorf("error creating github client: %w", err) } scalesetCli, err := scalesets.NewClient(ghCli) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "getting scaleset client") + return params.ScaleSet{}, fmt.Errorf("error getting scaleset client: %w", err) } callback := func(old, newSet params.ScaleSet) error { @@ -190,7 +189,7 @@ func (r *Runner) UpdateScaleSetByID(ctx context.Context, scaleSetID uint, param newScaleSet, err := r.store.UpdateEntityScaleSet(ctx, entity, scaleSetID, param, callback) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "updating pool") + return params.ScaleSet{}, fmt.Errorf("error updating pool: %w", err) } return newScaleSet, nil } @@ -210,7 +209,7 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.For entity, err := r.store.GetForgeEntity(ctx, entityType, entityID) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "getting entity") + return params.ScaleSet{}, fmt.Errorf("error getting entity: %w", err) } if entity.Credentials.ForgeType != params.GithubEndpointType { @@ -219,18 +218,18 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.For ghCli, err := github.Client(ctx, entity) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "creating github client") + return params.ScaleSet{}, fmt.Errorf("error creating github client: %w", err) } scalesetCli, err := scalesets.NewClient(ghCli) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "getting scaleset client") + return params.ScaleSet{}, fmt.Errorf("error getting scaleset client: %w", err) } var runnerGroupID int64 = 1 if param.GitHubRunnerGroup != "Default" { runnerGroup, err := scalesetCli.GetRunnerGroupByName(ctx, param.GitHubRunnerGroup) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "getting runner group") + return params.ScaleSet{}, fmt.Errorf("error getting runner group: %w", err) } runnerGroupID = runnerGroup.ID } @@ -253,7 +252,7 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.For runnerScaleSet, err := scalesetCli.CreateRunnerScaleSet(ctx, createParam) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "creating runner scale set") + return params.ScaleSet{}, fmt.Errorf("error creating runner scale set: %w", err) } defer func() { @@ -267,7 +266,7 @@ func (r *Runner) CreateEntityScaleSet(ctx context.Context, entityType params.For scaleSet, err := r.store.CreateEntityScaleSet(ctx, entity, param) if err != nil { - return params.ScaleSet{}, errors.Wrap(err, "creating scale set") + return params.ScaleSet{}, fmt.Errorf("error creating scale set: %w", err) } return scaleSet, nil @@ -280,7 +279,7 @@ func (r *Runner) ListScaleSetInstances(ctx context.Context, scalesetID uint) ([] instances, err := r.store.ListScaleSetInstances(ctx, scalesetID) if err != nil { - return []params.Instance{}, errors.Wrap(err, "fetching instances") + return []params.Instance{}, fmt.Errorf("error fetching instances: %w", err) } return instances, nil } @@ -295,7 +294,7 @@ func (r *Runner) ListEntityScaleSets(ctx context.Context, entityType params.Forg } scaleSets, err := r.store.ListEntityScaleSets(ctx, entity) if err != nil { - return nil, errors.Wrap(err, "fetching scale sets") + return nil, fmt.Errorf("error fetching scale sets: %w", err) } return scaleSets, nil } diff --git a/util/github/client.go b/util/github/client.go index 46b6a170..19380587 100644 --- a/util/github/client.go +++ b/util/github/client.go @@ -18,6 +18,7 @@ import ( "context" "encoding/base64" "encoding/json" + "errors" "fmt" "log/slog" "net/http" @@ -25,7 +26,6 @@ import ( "strings" "github.com/google/go-github/v72/github" - "github.com/pkg/errors" runnerErrors "github.com/cloudbase/garm-provider-common/errors" "github.com/cloudbase/garm/metrics" @@ -309,7 +309,7 @@ func (g *githubClient) RemoveEntityRunner(ctx context.Context, runnerID int64) e } if err := parseError(response, err); err != nil { - return errors.Wrapf(err, "removing runner %d", runnerID) + return fmt.Errorf("error removing runner %d: %w", runnerID, err) } return nil @@ -366,9 +366,9 @@ func (g *githubClient) getOrganizationRunnerGroupIDByName(ctx context.Context, e entity.LabelScope(), // label: scope ).Inc() if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + return 0, fmt.Errorf("error fetching runners: %w", runnerErrors.ErrUnauthorized) } - return 0, errors.Wrap(err, "fetching runners") + return 0, fmt.Errorf("error fetching runners: %w", err) } for _, runnerGroup := range runnerGroups.RunnerGroups { if runnerGroup.Name != nil && *runnerGroup.Name == rgName { @@ -402,9 +402,9 @@ func (g *githubClient) getEnterpriseRunnerGroupIDByName(ctx context.Context, ent entity.LabelScope(), // label: scope ).Inc() if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return 0, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching runners") + return 0, fmt.Errorf("error fetching runners: %w", runnerErrors.ErrUnauthorized) } - return 0, errors.Wrap(err, "fetching runners") + return 0, fmt.Errorf("error fetching runners: %w", err) } for _, runnerGroup := range runnerGroups.RunnerGroups { if runnerGroup.Name != nil && *runnerGroup.Name == rgName { @@ -520,7 +520,7 @@ func (g *githubClient) GithubBaseURL() *url.URL { func NewRateLimitClient(ctx context.Context, credentials params.ForgeCredentials) (common.RateLimitClient, error) { httpClient, err := credentials.GetHTTPClient(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching http client") + return nil, fmt.Errorf("error fetching http client: %w", err) } slog.DebugContext( @@ -531,7 +531,7 @@ func NewRateLimitClient(ctx context.Context, credentials params.ForgeCredentials ghClient, err := github.NewClient(httpClient).WithEnterpriseURLs( credentials.APIBaseURL, credentials.UploadBaseURL) if err != nil { - return nil, errors.Wrap(err, "fetching github client") + return nil, fmt.Errorf("error fetching github client: %w", err) } cli := &githubClient{ rateLimit: ghClient.RateLimit, @@ -552,7 +552,7 @@ func withGiteaURLs(client *github.Client, apiBaseURL string) (*github.Client, er parsedBaseURL, err := url.ParseRequestURI(apiBaseURL) if err != nil { - return nil, errors.Wrap(err, "parsing gitea base URL") + return nil, fmt.Errorf("error parsing gitea base URL: %w", err) } if !strings.HasSuffix(parsedBaseURL.Path, "/") { @@ -573,7 +573,7 @@ func Client(ctx context.Context, entity params.ForgeEntity) (common.GithubClient // func GithubClient(ctx context.Context, entity params.ForgeEntity) (common.GithubClient, error) { httpClient, err := entity.Credentials.GetHTTPClient(ctx) if err != nil { - return nil, errors.Wrap(err, "fetching http client") + return nil, fmt.Errorf("error fetching http client: %w", err) } slog.DebugContext( @@ -590,7 +590,7 @@ func Client(ctx context.Context, entity params.ForgeEntity) (common.GithubClient } if err != nil { - return nil, errors.Wrap(err, "fetching github client") + return nil, fmt.Errorf("error fetching github client: %w", err) } cli := &githubClient{ diff --git a/util/github/gitea.go b/util/github/gitea.go index 51f340b7..5d35190b 100644 --- a/util/github/gitea.go +++ b/util/github/gitea.go @@ -16,11 +16,11 @@ package github import ( "context" + "errors" "fmt" "net/http" "github.com/google/go-github/v72/github" - "github.com/pkg/errors" "github.com/cloudbase/garm/metrics" "github.com/cloudbase/garm/params" diff --git a/util/util.go b/util/util.go index 994e4637..dc92ce0e 100644 --- a/util/util.go +++ b/util/util.go @@ -16,11 +16,10 @@ package util import ( "context" + "fmt" "net/http" "unicode/utf8" - "github.com/pkg/errors" - runnerErrors "github.com/cloudbase/garm-provider-common/errors" commonParams "github.com/cloudbase/garm-provider-common/params" "github.com/cloudbase/garm/runner/common" @@ -30,9 +29,9 @@ func FetchTools(ctx context.Context, cli common.GithubClient) ([]commonParams.Ru tools, ghResp, err := cli.ListEntityRunnerApplicationDownloads(ctx) if err != nil { if ghResp != nil && ghResp.StatusCode == http.StatusUnauthorized { - return nil, errors.Wrap(runnerErrors.ErrUnauthorized, "fetching tools") + return nil, fmt.Errorf("error fetching tools: %w", runnerErrors.ErrUnauthorized) } - return nil, errors.Wrap(err, "fetching runner tools") + return nil, fmt.Errorf("error fetching runner tools: %w", err) } ret := []commonParams.RunnerApplicationDownload{} diff --git a/vendor/github.com/juju/clock/.gitignore b/vendor/github.com/juju/clock/.gitignore deleted file mode 100644 index 1d74e219..00000000 --- a/vendor/github.com/juju/clock/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.vscode/ diff --git a/vendor/github.com/juju/clock/LICENSE b/vendor/github.com/juju/clock/LICENSE deleted file mode 100644 index ade9307b..00000000 --- a/vendor/github.com/juju/clock/LICENSE +++ /dev/null @@ -1,191 +0,0 @@ -All files in this repository are licensed as follows. If you contribute -to this repository, it is assumed that you license your contribution -under the same license unless you state otherwise. - -All files Copyright (C) 2015 Canonical Ltd. unless otherwise specified in the file. - -This software is licensed under the LGPLv3, included below. - -As a special exception to the GNU Lesser General Public License version 3 -("LGPL3"), the copyright holders of this Library give you permission to -convey to a third party a Combined Work that links statically or dynamically -to this Library without providing any Minimal Corresponding Source or -Minimal Application Code as set out in 4d or providing the installation -information set out in section 4e, provided that you comply with the other -provisions of LGPL3 and provided that you meet, for the Application the -terms and conditions of the license(s) which apply to the Application. - -Except as stated in this special exception, the provisions of LGPL3 will -continue to comply in full to this Library. If you modify this Library, you -may apply this exception to your version of this Library, but you are not -obliged to do so. If you do not wish to do so, delete this exception -statement from your version. This exception does not (and cannot) modify any -license terms which apply to the Application, with which you must still -comply. - - - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. diff --git a/vendor/github.com/juju/clock/Makefile b/vendor/github.com/juju/clock/Makefile deleted file mode 100644 index 900ccf75..00000000 --- a/vendor/github.com/juju/clock/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -PROJECT := github.com/juju/clock - -.PHONY: check-licence check-go check - -check: check-licence check-go - go test $(PROJECT)/... - -check-licence: - @(fgrep -rl "Licensed under the LGPLv3" --exclude *.s .;\ - fgrep -rl "MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT" --exclude *.s .;\ - find . -name "*.go") | sed -e 's,\./,,' | sort | uniq -u | \ - xargs -I {} echo FAIL: licence missed: {} - -check-go: - $(eval GOFMT := $(strip $(shell gofmt -l .| sed -e "s/^/ /g"))) - @(if [ x$(GOFMT) != x"" ]; then \ - echo go fmt is sad: $(GOFMT); \ - exit 1; \ - fi ) - @(go vet -all -composites=false -copylocks=false .) diff --git a/vendor/github.com/juju/clock/README.md b/vendor/github.com/juju/clock/README.md deleted file mode 100644 index a5ac464d..00000000 --- a/vendor/github.com/juju/clock/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# clock - -An interface definition for a fully defined clock. - -An WallClock implementation of that interface using the time package. - -A testing clock. diff --git a/vendor/github.com/juju/clock/clock.go b/vendor/github.com/juju/clock/clock.go deleted file mode 100644 index 6303cf65..00000000 --- a/vendor/github.com/juju/clock/clock.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2015 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -package clock - -import "time" - -// Clock provides an interface for dealing with clocks. -type Clock interface { - // Now returns the current clock time. - Now() time.Time - - // After waits for the duration to elapse and then sends the - // current time on the returned channel. - After(time.Duration) <-chan time.Time - - // AfterFunc waits for the duration to elapse and then calls f in its own goroutine. - // It returns a Timer that can be used to cancel the call using its Stop method. - AfterFunc(d time.Duration, f func()) Timer - - // NewTimer creates a new Timer that will send the current time - // on its channel after at least duration d. - NewTimer(d time.Duration) Timer - - // At waits for the time to pass and then sends the - // current time on the returned channel. - At(t time.Time) <-chan time.Time - - // AtFunc waits for the time to pass and then calls f in its own goroutine. - // It returns an Alarm that can be used to cancel the call using its Stop method. - AtFunc(t time.Time, f func()) Alarm - - // NewAlarm creates a new Alarm that will send the current time - // on its channel at or after time t has passed. - NewAlarm(t time.Time) Alarm -} - -// Timer type represents a single event. -// Timers must be created with AfterFunc or NewTimer. -// This interface follows time.Timer's methods but provides easier mocking. -type Timer interface { - // When the timer expires, the current time will be sent on the - // channel returned from Chan, unless the timer was created by - // AfterFunc. - Chan() <-chan time.Time - - // Reset changes the timer to expire after duration d. - // It returns true if the timer had been active, false if - // the timer had expired or been stopped. - Reset(d time.Duration) bool - - // Stop prevents the Timer from firing. It returns true if - // the call stops the timer, false if the timer has already expired or been stopped. - // Stop does not close the channel, to prevent a read - // from the channel succeeding incorrectly. - Stop() bool -} - -// Alarm type represents a single event. -// Alarms must be created with AtFunc or NewAlarm. -type Alarm interface { - // When the alarm expires, the current time will be sent on the - // channel returned from Chan, unless the alarm was created by - // AtFunc. - Chan() <-chan time.Time - - // Reset changes the alarm to expire at or after time t. - // It returns true if the alarm had been active, false if - // the alarm had fired or been stopped. - Reset(t time.Time) bool - - // Stop prevents the alarm from firing. It returns true if - // the call stops the alarm, false if the alarm has already fired or been stopped. - // Stop does not close the channel, to prevent a read - // from the channel succeeding incorrectly. - Stop() bool -} diff --git a/vendor/github.com/juju/clock/wall.go b/vendor/github.com/juju/clock/wall.go deleted file mode 100644 index 1a4b021e..00000000 --- a/vendor/github.com/juju/clock/wall.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2015 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -package clock - -import ( - "time" -) - -// WallClock exposes wall-clock time via the Clock interface. -var WallClock wallClock - -// ensure that WallClock does actually implement the Clock interface. -var _ Clock = WallClock - -// WallClock exposes wall-clock time as returned by time.Now. -type wallClock struct{} - -// Now is part of the Clock interface. -func (wallClock) Now() time.Time { - return time.Now() -} - -// After implements Clock.After. -func (wallClock) After(d time.Duration) <-chan time.Time { - return time.After(d) -} - -// AfterFunc implements Clock.AfterFunc. -func (wallClock) AfterFunc(d time.Duration, f func()) Timer { - return wallTimer{time.AfterFunc(d, f)} -} - -// NewTimer implements Clock.NewTimer. -func (wallClock) NewTimer(d time.Duration) Timer { - return wallTimer{time.NewTimer(d)} -} - -// wallTimer implements the Timer interface. -type wallTimer struct { - *time.Timer -} - -// Chan implements Timer.Chan. -func (t wallTimer) Chan() <-chan time.Time { - return t.C -} - -// At implements Clock.At. -func (wallClock) At(t time.Time) <-chan time.Time { - return time.After(time.Until(t)) -} - -// AtFunc implements Clock.AtFunc. -func (wallClock) AtFunc(t time.Time, f func()) Alarm { - return wallAlarm{time.AfterFunc(time.Until(t), f)} -} - -// NewAlarm implements Clock.NewAlarm. -func (wallClock) NewAlarm(t time.Time) Alarm { - return wallAlarm{time.NewTimer(time.Until(t))} -} - -// wallAlarm implements the Alarm interface. -type wallAlarm struct { - *time.Timer -} - -// Chan implements Alarm.Chan. -func (a wallAlarm) Chan() <-chan time.Time { - return a.C -} - -// Reset implements Alarm.Reset -func (a wallAlarm) Reset(t time.Time) bool { - return a.Timer.Reset(time.Until(t)) -} diff --git a/vendor/github.com/juju/errors/.gitignore b/vendor/github.com/juju/errors/.gitignore deleted file mode 100644 index 83656241..00000000 --- a/vendor/github.com/juju/errors/.gitignore +++ /dev/null @@ -1,23 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test diff --git a/vendor/github.com/juju/errors/LICENSE b/vendor/github.com/juju/errors/LICENSE deleted file mode 100644 index ade9307b..00000000 --- a/vendor/github.com/juju/errors/LICENSE +++ /dev/null @@ -1,191 +0,0 @@ -All files in this repository are licensed as follows. If you contribute -to this repository, it is assumed that you license your contribution -under the same license unless you state otherwise. - -All files Copyright (C) 2015 Canonical Ltd. unless otherwise specified in the file. - -This software is licensed under the LGPLv3, included below. - -As a special exception to the GNU Lesser General Public License version 3 -("LGPL3"), the copyright holders of this Library give you permission to -convey to a third party a Combined Work that links statically or dynamically -to this Library without providing any Minimal Corresponding Source or -Minimal Application Code as set out in 4d or providing the installation -information set out in section 4e, provided that you comply with the other -provisions of LGPL3 and provided that you meet, for the Application the -terms and conditions of the license(s) which apply to the Application. - -Except as stated in this special exception, the provisions of LGPL3 will -continue to comply in full to this Library. If you modify this Library, you -may apply this exception to your version of this Library, but you are not -obliged to do so. If you do not wish to do so, delete this exception -statement from your version. This exception does not (and cannot) modify any -license terms which apply to the Application, with which you must still -comply. - - - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. diff --git a/vendor/github.com/juju/errors/Makefile b/vendor/github.com/juju/errors/Makefile deleted file mode 100644 index a5bc81e6..00000000 --- a/vendor/github.com/juju/errors/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -PROJECT := github.com/juju/errors - -.PHONY: check-licence check-go check docs - -check: check-licence check-go - go test $(PROJECT)/... - -check-licence: - @(fgrep -rl "Licensed under the LGPLv3" --exclude *.s .;\ - fgrep -rl "MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT" --exclude *.s .;\ - find . -name "*.go") | sed -e 's,\./,,' | sort | uniq -u | \ - xargs -I {} echo FAIL: licence missed: {} - -check-go: - $(eval GOFMT := $(strip $(shell gofmt -l .| sed -e "s/^/ /g"))) - @(if [ x$(GOFMT) != x"" ]; then \ - echo go fmt is sad: $(GOFMT); \ - exit 1; \ - fi ) - @(go vet -all -composites=false -copylocks=false .) - -docs: - godoc2md github.com/juju/errors > README.md - sed -i '5i[\[GoDoc](https://godoc.org/github.com/juju/errors?status.svg)](https://godoc.org/github.com/juju/errors)' README.md diff --git a/vendor/github.com/juju/errors/README.md b/vendor/github.com/juju/errors/README.md deleted file mode 100644 index 271aa78c..00000000 --- a/vendor/github.com/juju/errors/README.md +++ /dev/null @@ -1,707 +0,0 @@ - -# errors - import "github.com/juju/errors" - -[![GoDoc](https://godoc.org/github.com/juju/errors?status.svg)](https://godoc.org/github.com/juju/errors) - -The juju/errors provides an easy way to annotate errors without losing the -original error context. - -The exported `New` and `Errorf` functions are designed to replace the -`errors.New` and `fmt.Errorf` functions respectively. The same underlying -error is there, but the package also records the location at which the error -was created. - -A primary use case for this library is to add extra context any time an -error is returned from a function. - - - if err := SomeFunc(); err != nil { - return err - } - -This instead becomes: - - - if err := SomeFunc(); err != nil { - return errors.Trace(err) - } - -which just records the file and line number of the Trace call, or - - - if err := SomeFunc(); err != nil { - return errors.Annotate(err, "more context") - } - -which also adds an annotation to the error. - -When you want to check to see if an error is of a particular type, a helper -function is normally exported by the package that returned the error, like the -`os` package does. The underlying cause of the error is available using the -`Cause` function. - - - os.IsNotExist(errors.Cause(err)) - -The result of the `Error()` call on an annotated error is the annotations joined -with colons, then the result of the `Error()` method for the underlying error -that was the cause. - - - err := errors.Errorf("original") - err = errors.Annotatef(err, "context") - err = errors.Annotatef(err, "more context") - err.Error() -> "more context: context: original" - -Obviously recording the file, line and functions is not very useful if you -cannot get them back out again. - - - errors.ErrorStack(err) - -will return something like: - - - first error - github.com/juju/errors/annotation_test.go:193: - github.com/juju/errors/annotation_test.go:194: annotation - github.com/juju/errors/annotation_test.go:195: - github.com/juju/errors/annotation_test.go:196: more context - github.com/juju/errors/annotation_test.go:197: - -The first error was generated by an external system, so there was no location -associated. The second, fourth, and last lines were generated with Trace calls, -and the other two through Annotate. - -Sometimes when responding to an error you want to return a more specific error -for the situation. - - - if err := FindField(field); err != nil { - return errors.Wrap(err, errors.NotFoundf(field)) - } - -This returns an error where the complete error stack is still available, and -`errors.Cause()` will return the `NotFound` error. - - - - - - -## func AlreadyExistsf -``` go -func AlreadyExistsf(format string, args ...interface{}) error -``` -AlreadyExistsf returns an error which satisfies IsAlreadyExists(). - - -## func Annotate -``` go -func Annotate(other error, message string) error -``` -Annotate is used to add extra context to an existing error. The location of -the Annotate call is recorded with the annotations. The file, line and -function are also recorded. - -For example: - - - if err := SomeFunc(); err != nil { - return errors.Annotate(err, "failed to frombulate") - } - - -## func Annotatef -``` go -func Annotatef(other error, format string, args ...interface{}) error -``` -Annotatef is used to add extra context to an existing error. The location of -the Annotate call is recorded with the annotations. The file, line and -function are also recorded. - -For example: - - - if err := SomeFunc(); err != nil { - return errors.Annotatef(err, "failed to frombulate the %s", arg) - } - - -## func BadRequestf -``` go -func BadRequestf(format string, args ...interface{}) error -``` -BadRequestf returns an error which satisfies IsBadRequest(). - - -## func Cause -``` go -func Cause(err error) error -``` -Cause returns the cause of the given error. This will be either the -original error, or the result of a Wrap or Mask call. - -Cause is the usual way to diagnose errors that may have been wrapped by -the other errors functions. - - -## func DeferredAnnotatef -``` go -func DeferredAnnotatef(err *error, format string, args ...interface{}) -``` -DeferredAnnotatef annotates the given error (when it is not nil) with the given -format string and arguments (like fmt.Sprintf). If *err is nil, DeferredAnnotatef -does nothing. This method is used in a defer statement in order to annotate any -resulting error with the same message. - -For example: - - - defer DeferredAnnotatef(&err, "failed to frombulate the %s", arg) - - -## func Details -``` go -func Details(err error) string -``` -Details returns information about the stack of errors wrapped by err, in -the format: - - - [{filename:99: error one} {otherfile:55: cause of error one}] - -This is a terse alternative to ErrorStack as it returns a single line. - - -## func ErrorStack -``` go -func ErrorStack(err error) string -``` -ErrorStack returns a string representation of the annotated error. If the -error passed as the parameter is not an annotated error, the result is -simply the result of the Error() method on that error. - -If the error is an annotated error, a multi-line string is returned where -each line represents one entry in the annotation stack. The full filename -from the call stack is used in the output. - - - first error - github.com/juju/errors/annotation_test.go:193: - github.com/juju/errors/annotation_test.go:194: annotation - github.com/juju/errors/annotation_test.go:195: - github.com/juju/errors/annotation_test.go:196: more context - github.com/juju/errors/annotation_test.go:197: - - -## func Errorf -``` go -func Errorf(format string, args ...interface{}) error -``` -Errorf creates a new annotated error and records the location that the -error is created. This should be a drop in replacement for fmt.Errorf. - -For example: - - - return errors.Errorf("validation failed: %s", message) - - -## func Forbiddenf -``` go -func Forbiddenf(format string, args ...interface{}) error -``` -Forbiddenf returns an error which satistifes IsForbidden() - - -## func IsAlreadyExists -``` go -func IsAlreadyExists(err error) bool -``` -IsAlreadyExists reports whether the error was created with -AlreadyExistsf() or NewAlreadyExists(). - - -## func IsBadRequest -``` go -func IsBadRequest(err error) bool -``` -IsBadRequest reports whether err was created with BadRequestf() or -NewBadRequest(). - - -## func IsForbidden -``` go -func IsForbidden(err error) bool -``` -IsForbidden reports whether err was created with Forbiddenf() or -NewForbidden(). - - -## func IsMethodNotAllowed -``` go -func IsMethodNotAllowed(err error) bool -``` -IsMethodNotAllowed reports whether err was created with MethodNotAllowedf() or -NewMethodNotAllowed(). - - -## func IsNotAssigned -``` go -func IsNotAssigned(err error) bool -``` -IsNotAssigned reports whether err was created with NotAssignedf() or -NewNotAssigned(). - - -## func IsNotFound -``` go -func IsNotFound(err error) bool -``` -IsNotFound reports whether err was created with NotFoundf() or -NewNotFound(). - - -## func IsNotImplemented -``` go -func IsNotImplemented(err error) bool -``` -IsNotImplemented reports whether err was created with -NotImplementedf() or NewNotImplemented(). - - -## func IsNotProvisioned -``` go -func IsNotProvisioned(err error) bool -``` -IsNotProvisioned reports whether err was created with NotProvisionedf() or -NewNotProvisioned(). - - -## func IsNotSupported -``` go -func IsNotSupported(err error) bool -``` -IsNotSupported reports whether the error was created with -NotSupportedf() or NewNotSupported(). - - -## func IsNotValid -``` go -func IsNotValid(err error) bool -``` -IsNotValid reports whether the error was created with NotValidf() or -NewNotValid(). - - -## func IsUnauthorized -``` go -func IsUnauthorized(err error) bool -``` -IsUnauthorized reports whether err was created with Unauthorizedf() or -NewUnauthorized(). - - -## func IsUserNotFound -``` go -func IsUserNotFound(err error) bool -``` -IsUserNotFound reports whether err was created with UserNotFoundf() or -NewUserNotFound(). - - -## func Mask -``` go -func Mask(other error) error -``` -Mask hides the underlying error type, and records the location of the masking. - - -## func Maskf -``` go -func Maskf(other error, format string, args ...interface{}) error -``` -Mask masks the given error with the given format string and arguments (like -fmt.Sprintf), returning a new error that maintains the error stack, but -hides the underlying error type. The error string still contains the full -annotations. If you want to hide the annotations, call Wrap. - - -## func MethodNotAllowedf -``` go -func MethodNotAllowedf(format string, args ...interface{}) error -``` -MethodNotAllowedf returns an error which satisfies IsMethodNotAllowed(). - - -## func New -``` go -func New(message string) error -``` -New is a drop in replacement for the standard library errors module that records -the location that the error is created. - -For example: - - - return errors.New("validation failed") - - -## func NewAlreadyExists -``` go -func NewAlreadyExists(err error, msg string) error -``` -NewAlreadyExists returns an error which wraps err and satisfies -IsAlreadyExists(). - - -## func NewBadRequest -``` go -func NewBadRequest(err error, msg string) error -``` -NewBadRequest returns an error which wraps err that satisfies -IsBadRequest(). - - -## func NewForbidden -``` go -func NewForbidden(err error, msg string) error -``` -NewForbidden returns an error which wraps err that satisfies -IsForbidden(). - - -## func NewMethodNotAllowed -``` go -func NewMethodNotAllowed(err error, msg string) error -``` -NewMethodNotAllowed returns an error which wraps err that satisfies -IsMethodNotAllowed(). - - -## func NewNotAssigned -``` go -func NewNotAssigned(err error, msg string) error -``` -NewNotAssigned returns an error which wraps err that satisfies -IsNotAssigned(). - - -## func NewNotFound -``` go -func NewNotFound(err error, msg string) error -``` -NewNotFound returns an error which wraps err that satisfies -IsNotFound(). - - -## func NewNotImplemented -``` go -func NewNotImplemented(err error, msg string) error -``` -NewNotImplemented returns an error which wraps err and satisfies -IsNotImplemented(). - - -## func NewNotProvisioned -``` go -func NewNotProvisioned(err error, msg string) error -``` -NewNotProvisioned returns an error which wraps err that satisfies -IsNotProvisioned(). - - -## func NewNotSupported -``` go -func NewNotSupported(err error, msg string) error -``` -NewNotSupported returns an error which wraps err and satisfies -IsNotSupported(). - - -## func NewNotValid -``` go -func NewNotValid(err error, msg string) error -``` -NewNotValid returns an error which wraps err and satisfies IsNotValid(). - - -## func NewUnauthorized -``` go -func NewUnauthorized(err error, msg string) error -``` -NewUnauthorized returns an error which wraps err and satisfies -IsUnauthorized(). - - -## func NewUserNotFound -``` go -func NewUserNotFound(err error, msg string) error -``` -NewUserNotFound returns an error which wraps err and satisfies -IsUserNotFound(). - - -## func NotAssignedf -``` go -func NotAssignedf(format string, args ...interface{}) error -``` -NotAssignedf returns an error which satisfies IsNotAssigned(). - - -## func NotFoundf -``` go -func NotFoundf(format string, args ...interface{}) error -``` -NotFoundf returns an error which satisfies IsNotFound(). - - -## func NotImplementedf -``` go -func NotImplementedf(format string, args ...interface{}) error -``` -NotImplementedf returns an error which satisfies IsNotImplemented(). - - -## func NotProvisionedf -``` go -func NotProvisionedf(format string, args ...interface{}) error -``` -NotProvisionedf returns an error which satisfies IsNotProvisioned(). - - -## func NotSupportedf -``` go -func NotSupportedf(format string, args ...interface{}) error -``` -NotSupportedf returns an error which satisfies IsNotSupported(). - - -## func NotValidf -``` go -func NotValidf(format string, args ...interface{}) error -``` -NotValidf returns an error which satisfies IsNotValid(). - - -## func Trace -``` go -func Trace(other error) error -``` -Trace adds the location of the Trace call to the stack. The Cause of the -resulting error is the same as the error parameter. If the other error is -nil, the result will be nil. - -For example: - - - if err := SomeFunc(); err != nil { - return errors.Trace(err) - } - - -## func Unauthorizedf -``` go -func Unauthorizedf(format string, args ...interface{}) error -``` -Unauthorizedf returns an error which satisfies IsUnauthorized(). - - -## func UserNotFoundf -``` go -func UserNotFoundf(format string, args ...interface{}) error -``` -UserNotFoundf returns an error which satisfies IsUserNotFound(). - - -## func Wrap -``` go -func Wrap(other, newDescriptive error) error -``` -Wrap changes the Cause of the error. The location of the Wrap call is also -stored in the error stack. - -For example: - - - if err := SomeFunc(); err != nil { - newErr := &packageError{"more context", private_value} - return errors.Wrap(err, newErr) - } - - -## func Wrapf -``` go -func Wrapf(other, newDescriptive error, format string, args ...interface{}) error -``` -Wrapf changes the Cause of the error, and adds an annotation. The location -of the Wrap call is also stored in the error stack. - -For example: - - - if err := SomeFunc(); err != nil { - return errors.Wrapf(err, simpleErrorType, "invalid value %q", value) - } - - - -## type Err -``` go -type Err struct { - // contains filtered or unexported fields -} -``` -Err holds a description of an error along with information about -where the error was created. - -It may be embedded in custom error types to add extra information that -this errors package can understand. - - - - - - - - - -### func NewErr -``` go -func NewErr(format string, args ...interface{}) Err -``` -NewErr is used to return an Err for the purpose of embedding in other -structures. The location is not specified, and needs to be set with a call -to SetLocation. - -For example: - - - type FooError struct { - errors.Err - code int - } - - func NewFooError(code int) error { - err := &FooError{errors.NewErr("foo"), code} - err.SetLocation(1) - return err - } - - -### func NewErrWithCause -``` go -func NewErrWithCause(other error, format string, args ...interface{}) Err -``` -NewErrWithCause is used to return an Err with cause by other error for the purpose of embedding in other -structures. The location is not specified, and needs to be set with a call -to SetLocation. - -For example: - - - type FooError struct { - errors.Err - code int - } - - func (e *FooError) Annotate(format string, args ...interface{}) error { - err := &FooError{errors.NewErrWithCause(e.Err, format, args...), e.code} - err.SetLocation(1) - return err - }) - - - - -### func (\*Err) Cause -``` go -func (e *Err) Cause() error -``` -The Cause of an error is the most recent error in the error stack that -meets one of these criteria: the original error that was raised; the new -error that was passed into the Wrap function; the most recently masked -error; or nil if the error itself is considered the Cause. Normally this -method is not invoked directly, but instead through the Cause stand alone -function. - - - -### func (\*Err) Error -``` go -func (e *Err) Error() string -``` -Error implements error.Error. - - - -### func (\*Err) Format -``` go -func (e *Err) Format(s fmt.State, verb rune) -``` -Format implements fmt.Formatter -When printing errors with %+v it also prints the stack trace. -%#v unsurprisingly will print the real underlying type. - - - -### func (\*Err) Location -``` go -func (e *Err) Location() (filename string, line int) -``` -Location is the file and line of where the error was most recently -created or annotated. - - - -### func (\*Err) Message -``` go -func (e *Err) Message() string -``` -Message returns the message stored with the most recent location. This is -the empty string if the most recent call was Trace, or the message stored -with Annotate or Mask. - - - -### func (\*Err) SetLocation -``` go -func (e *Err) SetLocation(callDepth int) -``` -SetLocation records the source location of the error at callDepth stack -frames above the call. - - - -### func (\*Err) StackTrace -``` go -func (e *Err) StackTrace() []string -``` -StackTrace returns one string for each location recorded in the stack of -errors. The first value is the originating error, with a line for each -other annotation or tracing of the error. - - - -### func (\*Err) Underlying -``` go -func (e *Err) Underlying() error -``` -Underlying returns the previous error in the error stack, if any. A client -should not ever really call this method. It is used to build the error -stack and should not be introspected by client calls. Or more -specifically, clients should not depend on anything but the `Cause` of an -error. - - - - - - - - - -- - - -Generated by [godoc2md](http://godoc.org/github.com/davecheney/godoc2md) diff --git a/vendor/github.com/juju/errors/doc.go b/vendor/github.com/juju/errors/doc.go deleted file mode 100644 index d4403662..00000000 --- a/vendor/github.com/juju/errors/doc.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2013, 2014 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -/* -Package errors provides an easy way to annotate errors without losing the -original error context. - -The exported `New` and `Errorf` functions are designed to replace the -`errors.New` and `fmt.Errorf` functions respectively. The same underlying -error is there, but the package also records the location at which the error -was created. - -A primary use case for this library is to add extra context any time an -error is returned from a function. - - if err := SomeFunc(); err != nil { - return err - } - -This instead becomes: - - if err := SomeFunc(); err != nil { - return errors.Trace(err) - } - -which just records the file and line number of the Trace call, or - - if err := SomeFunc(); err != nil { - return errors.Annotate(err, "more context") - } - -which also adds an annotation to the error. - -When you want to check to see if an error is of a particular type, a helper -function is normally exported by the package that returned the error, like the -`os` package does. The underlying cause of the error is available using the -`Cause` function. - - os.IsNotExist(errors.Cause(err)) - -The result of the `Error()` call on an annotated error is the annotations joined -with colons, then the result of the `Error()` method for the underlying error -that was the cause. - - err := errors.Errorf("original") - err = errors.Annotatef(err, "context") - err = errors.Annotatef(err, "more context") - err.Error() -> "more context: context: original" - -Obviously recording the file, line and functions is not very useful if you -cannot get them back out again. - - errors.ErrorStack(err) - -will return something like: - - first error - github.com/juju/errors/annotation_test.go:193: - github.com/juju/errors/annotation_test.go:194: annotation - github.com/juju/errors/annotation_test.go:195: - github.com/juju/errors/annotation_test.go:196: more context - github.com/juju/errors/annotation_test.go:197: - -The first error was generated by an external system, so there was no location -associated. The second, fourth, and last lines were generated with Trace calls, -and the other two through Annotate. - -Sometimes when responding to an error you want to return a more specific error -for the situation. - - if err := FindField(field); err != nil { - return errors.Wrap(err, errors.NotFoundf(field)) - } - -This returns an error where the complete error stack is still available, and -`errors.Cause()` will return the `NotFound` error. - -*/ -package errors diff --git a/vendor/github.com/juju/errors/error.go b/vendor/github.com/juju/errors/error.go deleted file mode 100644 index 326b917a..00000000 --- a/vendor/github.com/juju/errors/error.go +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright 2014 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -package errors - -import ( - "fmt" - "reflect" -) - -// Err holds a description of an error along with information about -// where the error was created. -// -// It may be embedded in custom error types to add extra information that -// this errors package can understand. -type Err struct { - // message holds an annotation of the error. - message string - - // cause holds the cause of the error as returned - // by the Cause method. - cause error - - // previous holds the previous error in the error stack, if any. - previous error - - // function is the package path-qualified function name where the - // error was created. - function string - - // line is the line number the error was created on inside of function - line int -} - -// Locationer is an interface that represents a certain class of errors that -// contain the location information from where they were raised. -type Locationer interface { - // Location returns the path-qualified function name where the error was - // created and the line number - Location() (function string, line int) -} - -// locationError is the internal implementation of the Locationer interface. -type locationError struct { - error - - // function is the package path-qualified function name where the - // error was created. - function string - - // line is the line number the error was created on inside of function - line int -} - -// newLocationError constructs a new Locationer error from the supplied error -// with the location set to callDepth in the stack. If a nill error is provided -// to this function then a new empty error is constructed. -func newLocationError(err error, callDepth int) *locationError { - le := &locationError{error: err} - le.function, le.line = getLocation(callDepth + 1) - return le -} - -// Error implementes the error interface. -func (l *locationError) Error() string { - if l.error == nil { - return "" - } - return l.error.Error() -} - -// *locationError implements Locationer.Location interface -func (l *locationError) Location() (string, int) { - return l.function, l.line -} - -func (l *locationError) Unwrap() error { - return l.error -} - -// NewErr is used to return an Err for the purpose of embedding in other -// structures. The location is not specified, and needs to be set with a call -// to SetLocation. -// -// For example: -// type FooError struct { -// errors.Err -// code int -// } -// -// func NewFooError(code int) error { -// err := &FooError{errors.NewErr("foo"), code} -// err.SetLocation(1) -// return err -// } -func NewErr(format string, args ...interface{}) Err { - return Err{ - message: fmt.Sprintf(format, args...), - } -} - -// NewErrWithCause is used to return an Err with cause by other error for the purpose of embedding in other -// structures. The location is not specified, and needs to be set with a call -// to SetLocation. -// -// For example: -// type FooError struct { -// errors.Err -// code int -// } -// -// func (e *FooError) Annotate(format string, args ...interface{}) error { -// err := &FooError{errors.NewErrWithCause(e.Err, format, args...), e.code} -// err.SetLocation(1) -// return err -// }) -func NewErrWithCause(other error, format string, args ...interface{}) Err { - return Err{ - message: fmt.Sprintf(format, args...), - cause: Cause(other), - previous: other, - } -} - -// Location returns the package path-qualified function name and line of where -// the error was most recently created or annotated. -func (e *Err) Location() (function string, line int) { - return e.function, e.line -} - -// Underlying returns the previous error in the error stack, if any. A client -// should not ever really call this method. It is used to build the error -// stack and should not be introspected by client calls. Or more -// specifically, clients should not depend on anything but the `Cause` of an -// error. -func (e *Err) Underlying() error { - return e.previous -} - -// Cause returns the most recent error in the error stack that -// meets one of these criteria: the original error that was raised; the new -// error that was passed into the Wrap function; the most recently masked -// error; or nil if the error itself is considered the Cause. Normally this -// method is not invoked directly, but instead through the Cause stand alone -// function. -func (e *Err) Cause() error { - return e.cause -} - -// Message returns the message stored with the most recent location. This is -// the empty string if the most recent call was Trace, or the message stored -// with Annotate or Mask. -func (e *Err) Message() string { - return e.message -} - -// Error implements error.Error. -func (e *Err) Error() string { - // We want to walk up the stack of errors showing the annotations - // as long as the cause is the same. - err := e.previous - if !sameError(Cause(err), e.cause) && e.cause != nil { - err = e.cause - } - switch { - case err == nil: - return e.message - case e.message == "": - return err.Error() - } - return fmt.Sprintf("%s: %v", e.message, err) -} - -// Format implements fmt.Formatter -// When printing errors with %+v it also prints the stack trace. -// %#v unsurprisingly will print the real underlying type. -func (e *Err) Format(s fmt.State, verb rune) { - switch verb { - case 'v': - switch { - case s.Flag('+'): - fmt.Fprintf(s, "%s", ErrorStack(e)) - return - case s.Flag('#'): - // avoid infinite recursion by wrapping e into a type - // that doesn't implement Formatter. - fmt.Fprintf(s, "%#v", (*unformatter)(e)) - return - } - fallthrough - case 's': - fmt.Fprintf(s, "%s", e.Error()) - case 'q': - fmt.Fprintf(s, "%q", e.Error()) - default: - fmt.Fprintf(s, "%%!%c(%T=%s)", verb, e, e.Error()) - } -} - -// helper for Format -type unformatter Err - -func (unformatter) Format() { /* break the fmt.Formatter interface */ } - -// SetLocation records the package path-qualified function name of the error at -// callDepth stack frames above the call. -func (e *Err) SetLocation(callDepth int) { - e.function, e.line = getLocation(callDepth + 1) -} - -// StackTrace returns one string for each location recorded in the stack of -// errors. The first value is the originating error, with a line for each -// other annotation or tracing of the error. -func (e *Err) StackTrace() []string { - return errorStack(e) -} - -// Ideally we'd have a way to check identity, but deep equals will do. -func sameError(e1, e2 error) bool { - return reflect.DeepEqual(e1, e2) -} - -// Unwrap is a synonym for Underlying, which allows Err to be used with the -// Unwrap, Is and As functions in Go's standard `errors` library. -func (e *Err) Unwrap() error { - return e.previous -} diff --git a/vendor/github.com/juju/errors/errortypes.go b/vendor/github.com/juju/errors/errortypes.go deleted file mode 100644 index 0029f91d..00000000 --- a/vendor/github.com/juju/errors/errortypes.go +++ /dev/null @@ -1,473 +0,0 @@ -// Copyright 2014 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -package errors - -import ( - "errors" - stderror "errors" - "fmt" - "strings" -) - -// a ConstError is a prototype for a certain type of error -type ConstError string - -// ConstError implements error -func (e ConstError) Error() string { - return string(e) -} - -// Different types of errors -const ( - // Timeout represents an error on timeout. - Timeout = ConstError("timeout") - // NotFound represents an error when something has not been found. - NotFound = ConstError("not found") - // UserNotFound represents an error when a non-existent user is looked up. - UserNotFound = ConstError("user not found") - // Unauthorized represents an error when an operation is unauthorized. - Unauthorized = ConstError("unauthorized") - // NotImplemented represents an error when something is not - // implemented. - NotImplemented = ConstError("not implemented") - // AlreadyExists represents and error when something already exists. - AlreadyExists = ConstError("already exists") - // NotSupported represents an error when something is not supported. - NotSupported = ConstError("not supported") - // NotValid represents an error when something is not valid. - NotValid = ConstError("not valid") - // NotProvisioned represents an error when something is not yet provisioned. - NotProvisioned = ConstError("not provisioned") - // NotAssigned represents an error when something is not yet assigned to - // something else. - NotAssigned = ConstError("not assigned") - // BadRequest represents an error when a request has bad parameters. - BadRequest = ConstError("bad request") - // MethodNotAllowed represents an error when an HTTP request - // is made with an inappropriate method. - MethodNotAllowed = ConstError("method not allowed") - // Forbidden represents an error when a request cannot be completed because of - // missing privileges. - Forbidden = ConstError("forbidden") - // QuotaLimitExceeded is emitted when an action failed due to a quota limit check. - QuotaLimitExceeded = ConstError("quota limit exceeded") - // NotYetAvailable is the error returned when a resource is not yet available - // but it might be in the future. - NotYetAvailable = ConstError("not yet available") -) - -// errWithType is an Err bundled with its error type (a ConstError) -type errWithType struct { - error - errType ConstError -} - -// Is compares `target` with e's error type -func (e *errWithType) Is(target error) bool { - if &e.errType == nil { - return false - } - return target == e.errType -} - -// Unwrap an errWithType gives the underlying Err -func (e *errWithType) Unwrap() error { - return e.error -} - -func wrapErrorWithMsg(err error, msg string) error { - if err == nil { - return stderror.New(msg) - } - if msg == "" { - return err - } - return fmt.Errorf("%s: %w", msg, err) -} - -func makeWrappedConstError(err error, format string, args ...interface{}) error { - separator := " " - if err.Error() == "" || errors.Is(err, &fmtNoop{}) { - separator = "" - } - return fmt.Errorf(strings.Join([]string{format, "%w"}, separator), append(args, err)...) -} - -// WithType is responsible for annotating an already existing error so that it -// also satisfies that of a ConstError. The resultant error returned should -// satisfy Is(err, errType). If err is nil then a nil error will also be returned. -// -// Now with Go's Is, As and Unwrap support it no longer makes sense to Wrap() -// 2 errors as both of those errors could be chains of errors in their own right. -// WithType aims to solve some of the usefulness of Wrap with the ability to -// make a pre-existing error also satisfy a ConstError type. -func WithType(err error, errType ConstError) error { - if err == nil { - return nil - } - return &errWithType{ - error: err, - errType: errType, - } -} - -// Timeoutf returns an error which satisfies Is(err, Timeout) and the Locationer -// interface. -func Timeoutf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(Timeout, format, args...), - 1, - ) -} - -// NewTimeout returns an error which wraps err and satisfies Is(err, Timeout) -// and the Locationer interface. -func NewTimeout(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: Timeout, - } -} - -// Deprecated: IsTimeout reports whether err is a Timeout error. Use -// Is(err, Timeout). -func IsTimeout(err error) bool { - return Is(err, Timeout) -} - -// NotFoundf returns an error which satisfies Is(err, NotFound) and the -// Locationer interface. -func NotFoundf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(NotFound, format, args...), - 1, - ) -} - -// NewNotFound returns an error which wraps err and satisfies Is(err, NotFound) -// and the Locationer interface. -func NewNotFound(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: NotFound, - } -} - -// Deprecated: IsNotFound reports whether err is a NotFound error. Use -// Is(err, NotFound). -func IsNotFound(err error) bool { - return Is(err, NotFound) -} - -// UserNotFoundf returns an error which satisfies Is(err, UserNotFound) and the -// Locationer interface. -func UserNotFoundf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(UserNotFound, format, args...), - 1, - ) -} - -// NewUserNotFound returns an error which wraps err and satisfies -// Is(err, UserNotFound) and the Locationer interface. -func NewUserNotFound(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: UserNotFound, - } -} - -// Deprecated: IsUserNotFound reports whether err is a UserNotFound error. Use -// Is(err, UserNotFound). -func IsUserNotFound(err error) bool { - return Is(err, UserNotFound) -} - -// Unauthorizedf returns an error that satisfies Is(err, Unauthorized) and -// the Locationer interface. -func Unauthorizedf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(Hide(Unauthorized), format, args...), - 1, - ) -} - -// NewUnauthorized returns an error which wraps err and satisfies -// Is(err, Unathorized) and the Locationer interface. -func NewUnauthorized(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: Unauthorized, - } -} - -// Deprecated: IsUnauthorized reports whether err is a Unauthorized error. Use -// Is(err, Unauthorized). -func IsUnauthorized(err error) bool { - return Is(err, Unauthorized) -} - -// NotImplementedf returns an error which satisfies Is(err, NotImplemented) and -// the Locationer interface. -func NotImplementedf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(NotImplemented, format, args...), - 1, - ) -} - -// NewNotImplemented returns an error which wraps err and satisfies -// Is(err, NotImplemented) and the Locationer interface. -func NewNotImplemented(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: NotImplemented, - } -} - -// Deprecated: IsNotImplemented reports whether err is a NotImplemented error. -// Use Is(err, NotImplemented). -func IsNotImplemented(err error) bool { - return Is(err, NotImplemented) -} - -// AlreadyExistsf returns an error which satisfies Is(err, AlreadyExists) and -// the Locationer interface. -func AlreadyExistsf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(AlreadyExists, format, args...), - 1, - ) -} - -// NewAlreadyExists returns an error which wraps err and satisfies -// Is(err, AlreadyExists) and the Locationer interface. -func NewAlreadyExists(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: AlreadyExists, - } -} - -// Deprecated: IsAlreadyExists reports whether the err is a AlreadyExists -// error. Use Is(err, AlreadyExists). -func IsAlreadyExists(err error) bool { - return Is(err, AlreadyExists) -} - -// NotSupportedf returns an error which satisfies Is(err, NotSupported) and the -// Locationer interface. -func NotSupportedf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(NotSupported, format, args...), - 1, - ) -} - -// NewNotSupported returns an error which satisfies Is(err, NotSupported) and -// the Locationer interface. -func NewNotSupported(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: NotSupported, - } -} - -// Deprecated: IsNotSupported reports whether err is a NotSupported error. Use -// Is(err, NotSupported). -func IsNotSupported(err error) bool { - return Is(err, NotSupported) -} - -// NotValidf returns an error which satisfies Is(err, NotValid) and the -// Locationer interface. -func NotValidf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(NotValid, format, args...), - 1, - ) -} - -// NewNotValid returns an error which wraps err and satisfies Is(err, NotValid) -// and the Locationer interface. -func NewNotValid(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: NotValid, - } -} - -// Deprecated: IsNotValid reports whether err is a NotValid error. Use -// Is(err, NotValid). -func IsNotValid(err error) bool { - return Is(err, NotValid) -} - -// NotProvisionedf returns an error which satisfies Is(err, NotProvisioned) and -// the Locationer interface. -func NotProvisionedf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(NotProvisioned, format, args...), - 1, - ) -} - -// NewNotProvisioned returns an error which wraps err and satisfies -// Is(err, NotProvisioned) and the Locationer interface. -func NewNotProvisioned(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: NotProvisioned, - } -} - -// Deprecated: IsNotProvisioned reports whether err is a NotProvisioned error. -// Use Is(err, NotProvisioned). -func IsNotProvisioned(err error) bool { - return Is(err, NotProvisioned) -} - -// NotAssignedf returns an error which satisfies Is(err, NotAssigned) and the -// Locationer interface. -func NotAssignedf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(NotAssigned, format, args...), - 1, - ) -} - -// NewNotAssigned returns an error which wraps err and satisfies -// Is(err, NotAssigned) and the Locationer interface. -func NewNotAssigned(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: NotAssigned, - } -} - -// Deprecated: IsNotAssigned reports whether err is a NotAssigned error. -// Use Is(err, NotAssigned) -func IsNotAssigned(err error) bool { - return Is(err, NotAssigned) -} - -// BadRequestf returns an error which satisfies Is(err, BadRequest) and the -// Locationer interface. -func BadRequestf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(Hide(BadRequest), format, args...), - 1, - ) -} - -// NewBadRequest returns an error which wraps err and satisfies -// Is(err, BadRequest) and the Locationer interface. -func NewBadRequest(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: BadRequest, - } -} - -// Deprecated: IsBadRequest reports whether err is a BadRequest error. -// Use Is(err, BadRequest) -func IsBadRequest(err error) bool { - return Is(err, BadRequest) -} - -// MethodNotAllowedf returns an error which satisfies Is(err, MethodNotAllowed) -// and the Locationer interface. -func MethodNotAllowedf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(Hide(MethodNotAllowed), format, args...), - 1, - ) -} - -// NewMethodNotAllowed returns an error which wraps err and satisfies -// Is(err, MethodNotAllowed) and the Locationer interface. -func NewMethodNotAllowed(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: MethodNotAllowed, - } -} - -// Deprecated: IsMethodNotAllowed reports whether err is a MethodNotAllowed -// error. Use Is(err, MethodNotAllowed) -func IsMethodNotAllowed(err error) bool { - return Is(err, MethodNotAllowed) -} - -// Forbiddenf returns an error which satistifes Is(err, Forbidden) and the -// Locationer interface. -func Forbiddenf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(Hide(Forbidden), format, args...), - 1, - ) -} - -// NewForbidden returns an error which wraps err and satisfies -// Is(err, Forbidden) and the Locationer interface. -func NewForbidden(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: Forbidden, - } -} - -// Deprecated: IsForbidden reports whether err is a Forbidden error. Use -// Is(err, Forbidden). -func IsForbidden(err error) bool { - return Is(err, Forbidden) -} - -// QuotaLimitExceededf returns an error which satisfies -// Is(err, QuotaLimitExceeded) and the Locationer interface. -func QuotaLimitExceededf(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(Hide(QuotaLimitExceeded), format, args...), - 1, - ) -} - -// NewQuotaLimitExceeded returns an error which wraps err and satisfies -// Is(err, QuotaLimitExceeded) and the Locationer interface. -func NewQuotaLimitExceeded(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: QuotaLimitExceeded, - } -} - -// Deprecated: IsQuotaLimitExceeded reports whether err is a QuoteLimitExceeded -// err. Use Is(err, QuotaLimitExceeded). -func IsQuotaLimitExceeded(err error) bool { - return Is(err, QuotaLimitExceeded) -} - -// NotYetAvailablef returns an error which satisfies Is(err, NotYetAvailable) -// and the Locationer interface. -func NotYetAvailablef(format string, args ...interface{}) error { - return newLocationError( - makeWrappedConstError(Hide(NotYetAvailable), format, args...), - 1, - ) -} - -// NewNotYetAvailable returns an error which wraps err and satisfies -// Is(err, NotYetAvailable) and the Locationer interface. -func NewNotYetAvailable(err error, msg string) error { - return &errWithType{ - error: newLocationError(wrapErrorWithMsg(err, msg), 1), - errType: NotYetAvailable, - } -} - -// Deprecated: IsNotYetAvailable reports whether err is a NotYetAvailable err. -// Use Is(err, NotYetAvailable) -func IsNotYetAvailable(err error) bool { - return Is(err, NotYetAvailable) -} diff --git a/vendor/github.com/juju/errors/functions.go b/vendor/github.com/juju/errors/functions.go deleted file mode 100644 index 952a6739..00000000 --- a/vendor/github.com/juju/errors/functions.go +++ /dev/null @@ -1,454 +0,0 @@ -// Copyright 2014 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -package errors - -import ( - stderrors "errors" - "fmt" - "runtime" - "strings" -) - -// New is a drop in replacement for the standard library errors module that records -// the location that the error is created. -// -// For example: -// return errors.New("validation failed") -// -func New(message string) error { - err := &Err{message: message} - err.SetLocation(1) - return err -} - -// Errorf creates a new annotated error and records the location that the -// error is created. This should be a drop in replacement for fmt.Errorf. -// -// For example: -// return errors.Errorf("validation failed: %s", message) -// -func Errorf(format string, args ...interface{}) error { - err := &Err{message: fmt.Sprintf(format, args...)} - err.SetLocation(1) - return err -} - -// getLocation records the package path-qualified function name of the error at -// callDepth stack frames above the call. -func getLocation(callDepth int) (string, int) { - rpc := make([]uintptr, 1) - n := runtime.Callers(callDepth+2, rpc[:]) - if n < 1 { - return "", 0 - } - frame, _ := runtime.CallersFrames(rpc).Next() - return frame.Function, frame.Line -} - -// Trace adds the location of the Trace call to the stack. The Cause of the -// resulting error is the same as the error parameter. If the other error is -// nil, the result will be nil. -// -// For example: -// if err := SomeFunc(); err != nil { -// return errors.Trace(err) -// } -// -func Trace(other error) error { - //return SetLocation(other, 2) - if other == nil { - return nil - } - err := &Err{previous: other, cause: Cause(other)} - err.SetLocation(1) - return err -} - -// Annotate is used to add extra context to an existing error. The location of -// the Annotate call is recorded with the annotations. The file, line and -// function are also recorded. -// -// For example: -// if err := SomeFunc(); err != nil { -// return errors.Annotate(err, "failed to frombulate") -// } -// -func Annotate(other error, message string) error { - if other == nil { - return nil - } - err := &Err{ - previous: other, - cause: Cause(other), - message: message, - } - err.SetLocation(1) - return err -} - -// Annotatef is used to add extra context to an existing error. The location of -// the Annotate call is recorded with the annotations. The file, line and -// function are also recorded. -// -// For example: -// if err := SomeFunc(); err != nil { -// return errors.Annotatef(err, "failed to frombulate the %s", arg) -// } -// -func Annotatef(other error, format string, args ...interface{}) error { - if other == nil { - return nil - } - err := &Err{ - previous: other, - cause: Cause(other), - message: fmt.Sprintf(format, args...), - } - err.SetLocation(1) - return err -} - -// DeferredAnnotatef annotates the given error (when it is not nil) with the given -// format string and arguments (like fmt.Sprintf). If *err is nil, DeferredAnnotatef -// does nothing. This method is used in a defer statement in order to annotate any -// resulting error with the same message. -// -// For example: -// -// defer DeferredAnnotatef(&err, "failed to frombulate the %s", arg) -// -func DeferredAnnotatef(err *error, format string, args ...interface{}) { - if *err == nil { - return - } - newErr := &Err{ - message: fmt.Sprintf(format, args...), - cause: Cause(*err), - previous: *err, - } - newErr.SetLocation(1) - *err = newErr -} - -// Wrap changes the Cause of the error. The location of the Wrap call is also -// stored in the error stack. -// -// For example: -// if err := SomeFunc(); err != nil { -// newErr := &packageError{"more context", private_value} -// return errors.Wrap(err, newErr) -// } -// -func Wrap(other, newDescriptive error) error { - err := &Err{ - previous: other, - cause: newDescriptive, - } - err.SetLocation(1) - return err -} - -// Wrapf changes the Cause of the error, and adds an annotation. The location -// of the Wrap call is also stored in the error stack. -// -// For example: -// if err := SomeFunc(); err != nil { -// return errors.Wrapf(err, simpleErrorType, "invalid value %q", value) -// } -// -func Wrapf(other, newDescriptive error, format string, args ...interface{}) error { - err := &Err{ - message: fmt.Sprintf(format, args...), - previous: other, - cause: newDescriptive, - } - err.SetLocation(1) - return err -} - -// Maskf masks the given error with the given format string and arguments (like -// fmt.Sprintf), returning a new error that maintains the error stack, but -// hides the underlying error type. The error string still contains the full -// annotations. If you want to hide the annotations, call Wrap. -func Maskf(other error, format string, args ...interface{}) error { - if other == nil { - return nil - } - err := &Err{ - message: fmt.Sprintf(format, args...), - previous: other, - } - err.SetLocation(1) - return err -} - -// Mask hides the underlying error type, and records the location of the masking. -func Mask(other error) error { - if other == nil { - return nil - } - err := &Err{ - previous: other, - } - err.SetLocation(1) - return err -} - -// Cause returns the cause of the given error. This will be either the -// original error, or the result of a Wrap or Mask call. -// -// Cause is the usual way to diagnose errors that may have been wrapped by -// the other errors functions. -func Cause(err error) error { - var diag error - if err, ok := err.(causer); ok { - diag = err.Cause() - } - if diag != nil { - return diag - } - return err -} - -type causer interface { - Cause() error -} - -type wrapper interface { - // Message returns the top level error message, - // not including the message from the Previous - // error. - Message() string - - // Underlying returns the Previous error, or nil - // if there is none. - Underlying() error -} - -var ( - _ wrapper = (*Err)(nil) - _ Locationer = (*Err)(nil) - _ causer = (*Err)(nil) -) - -// Details returns information about the stack of errors wrapped by err, in -// the format: -// -// [{filename:99: error one} {otherfile:55: cause of error one}] -// -// This is a terse alternative to ErrorStack as it returns a single line. -func Details(err error) string { - if err == nil { - return "[]" - } - var s []byte - s = append(s, '[') - for { - s = append(s, '{') - if err, ok := err.(Locationer); ok { - file, line := err.Location() - if file != "" { - s = append(s, fmt.Sprintf("%s:%d", file, line)...) - s = append(s, ": "...) - } - } - if cerr, ok := err.(wrapper); ok { - s = append(s, cerr.Message()...) - err = cerr.Underlying() - } else { - s = append(s, err.Error()...) - err = nil - } - s = append(s, '}') - if err == nil { - break - } - s = append(s, ' ') - } - s = append(s, ']') - return string(s) -} - -// ErrorStack returns a string representation of the annotated error. If the -// error passed as the parameter is not an annotated error, the result is -// simply the result of the Error() method on that error. -// -// If the error is an annotated error, a multi-line string is returned where -// each line represents one entry in the annotation stack. The full filename -// from the call stack is used in the output. -// -// first error -// github.com/juju/errors/annotation_test.go:193: -// github.com/juju/errors/annotation_test.go:194: annotation -// github.com/juju/errors/annotation_test.go:195: -// github.com/juju/errors/annotation_test.go:196: more context -// github.com/juju/errors/annotation_test.go:197: -func ErrorStack(err error) string { - return strings.Join(errorStack(err), "\n") -} - -func errorStack(err error) []string { - if err == nil { - return nil - } - - // We want the first error first - var lines []string - for { - var buff []byte - if err, ok := err.(Locationer); ok { - file, line := err.Location() - // Strip off the leading GOPATH/src path elements. - if file != "" { - buff = append(buff, fmt.Sprintf("%s:%d", file, line)...) - buff = append(buff, ": "...) - } - } - if cerr, ok := err.(wrapper); ok { - message := cerr.Message() - buff = append(buff, message...) - // If there is a cause for this error, and it is different to the cause - // of the underlying error, then output the error string in the stack trace. - var cause error - if err1, ok := err.(causer); ok { - cause = err1.Cause() - } - err = cerr.Underlying() - if cause != nil && !sameError(Cause(err), cause) { - if message != "" { - buff = append(buff, ": "...) - } - buff = append(buff, cause.Error()...) - } - } else { - buff = append(buff, err.Error()...) - err = nil - } - lines = append(lines, string(buff)) - if err == nil { - break - } - } - // reverse the lines to get the original error, which was at the end of - // the list, back to the start. - var result []string - for i := len(lines); i > 0; i-- { - result = append(result, lines[i-1]) - } - return result -} - -// Unwrap is a proxy for the Unwrap function in Go's standard `errors` library -// (pkg.go.dev/errors). -func Unwrap(err error) error { - return stderrors.Unwrap(err) -} - -// Is is a proxy for the Is function in Go's standard `errors` library -// (pkg.go.dev/errors). -func Is(err, target error) bool { - return stderrors.Is(err, target) -} - -// HasType is a function wrapper around AsType dropping the where return value -// from AsType() making a function that can be used like this: -// -// return HasType[*MyError](err) -// -// Or -// -// if HasType[*MyError](err) {} -func HasType[T error](err error) bool { - _, rval := AsType[T](err) - return rval -} - -// As is a proxy for the As function in Go's standard `errors` library -// (pkg.go.dev/errors). -func As(err error, target interface{}) bool { - return stderrors.As(err, target) -} - -// AsType is a convenience method for checking and getting an error from within -// a chain that is of type T. If no error is found of type T in the chain the -// zero value of T is returned with false. If an error in the chain implementes -// As(any) bool then it's As method will be called if it's type is not of type T. - -// AsType finds the first error in err's chain that is assignable to type T, and -// if a match is found, returns that error value and true. Otherwise, it returns -// T's zero value and false. -// -// AsType is equivalent to errors.As, but uses a type parameter and returns -// the target, to avoid having to define a variable before the call. For -// example, callers can replace this: -// -// var pathError *fs.PathError -// if errors.As(err, &pathError) { -// fmt.Println("Failed at path:", pathError.Path) -// } -// -// With: -// -// if pathError, ok := errors.AsType[*fs.PathError](err); ok { -// fmt.Println("Failed at path:", pathError.Path) -// } -func AsType[T error](err error) (T, bool) { - for err != nil { - if e, is := err.(T); is { - return e, true - } - var res T - if x, ok := err.(interface{ As(any) bool }); ok && x.As(&res) { - return res, true - } - err = stderrors.Unwrap(err) - } - var zero T - return zero, false -} - -// SetLocation takes a given error and records where in the stack SetLocation -// was called from and returns the wrapped error with the location information -// set. The returned error implements the Locationer interface. If err is nil -// then a nil error is returned. -func SetLocation(err error, callDepth int) error { - if err == nil { - return nil - } - - return newLocationError(err, callDepth) -} - -// fmtNoop provides an internal type for wrapping errors so they won't be -// printed in fmt type commands. As this type is used by the Hide function it's -// expected that error not be nil. -type fmtNoop struct { - error -} - -// Format implements the fmt.Formatter interface so that the error wrapped by -// fmtNoop will not be printed. -func (*fmtNoop) Format(_ fmt.State, r rune) {} - -// Is implements errors.Is. It useful for us to be able to check if an error -// chain has fmtNoop for formatting purposes. -func (f *fmtNoop) Is(err error) bool { - _, is := err.(*fmtNoop) - return is -} - -// Unwrap implements the errors.Unwrap method returning the error wrapped by -// fmtNoop. -func (f *fmtNoop) Unwrap() error { - return f.error -} - -// Hide takes an error and silences it's error string from appearing in fmt -// like -func Hide(err error) error { - if err == nil { - return nil - } - return &fmtNoop{err} -} diff --git a/vendor/github.com/juju/retry/.gitignore b/vendor/github.com/juju/retry/.gitignore deleted file mode 100644 index 9ed3b07c..00000000 --- a/vendor/github.com/juju/retry/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.test diff --git a/vendor/github.com/juju/retry/LICENSE b/vendor/github.com/juju/retry/LICENSE deleted file mode 100644 index ade9307b..00000000 --- a/vendor/github.com/juju/retry/LICENSE +++ /dev/null @@ -1,191 +0,0 @@ -All files in this repository are licensed as follows. If you contribute -to this repository, it is assumed that you license your contribution -under the same license unless you state otherwise. - -All files Copyright (C) 2015 Canonical Ltd. unless otherwise specified in the file. - -This software is licensed under the LGPLv3, included below. - -As a special exception to the GNU Lesser General Public License version 3 -("LGPL3"), the copyright holders of this Library give you permission to -convey to a third party a Combined Work that links statically or dynamically -to this Library without providing any Minimal Corresponding Source or -Minimal Application Code as set out in 4d or providing the installation -information set out in section 4e, provided that you comply with the other -provisions of LGPL3 and provided that you meet, for the Application the -terms and conditions of the license(s) which apply to the Application. - -Except as stated in this special exception, the provisions of LGPL3 will -continue to comply in full to this Library. If you modify this Library, you -may apply this exception to your version of this Library, but you are not -obliged to do so. If you do not wish to do so, delete this exception -statement from your version. This exception does not (and cannot) modify any -license terms which apply to the Application, with which you must still -comply. - - - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. diff --git a/vendor/github.com/juju/retry/Makefile b/vendor/github.com/juju/retry/Makefile deleted file mode 100644 index 6d36bad0..00000000 --- a/vendor/github.com/juju/retry/Makefile +++ /dev/null @@ -1,15 +0,0 @@ -PROJECT := github.com/juju/retry - -default: check - -check-licence: - @(fgrep -rl "Licensed under the LGPLv3" .;\ - fgrep -rl "MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT" .;\ - find . -name "*.go") | sed -e 's,\./,,' | sort | uniq -u | \ - xargs -I {} echo FAIL: licence missed: {} - -check: check-licence - go test $(PROJECT)/... - -docs: - godoc2md $(PROJECT) > README.md diff --git a/vendor/github.com/juju/retry/README.md b/vendor/github.com/juju/retry/README.md deleted file mode 100644 index 1fbe9a47..00000000 --- a/vendor/github.com/juju/retry/README.md +++ /dev/null @@ -1,277 +0,0 @@ - -# retry - import "github.com/juju/retry" - -The retry package encapsulates the mechanism around retrying commands. - -The simple use is to call retry.Call with a function closure. - -```go - - - err := retry.Call(retry.CallArgs{ - Func: func() error { ... }, - Attempts: 5, - Delay: time.Minute, - Clock: clock.WallClock, - }) - -``` - -The bare minimum arguments that need to be specified are: -* Func - the function to call -* Attempts - the number of times to try Func before giving up, or a negative number for unlimited attempts (`retry.UnlimitedAttempts`) -* Delay - how long to wait between each try that returns an error -* Clock - either the wall clock, or some testing clock - -Any error that is returned from the `Func` is considered transient. -In order to identify some errors as fatal, pass in a function for the -`IsFatalError` CallArgs value. - -In order to have the `Delay` change for each iteration, a `BackoffFunc` -needs to be set on the CallArgs. A simple doubling delay function is -provided by `DoubleDelay`. - -An example of a more complex `BackoffFunc` could be a stepped function such -as: - -```go - - - func StepDelay(last time.Duration, attempt int) time.Duration { - switch attempt{ - case 1: - return time.Second - case 2: - return 5 * time.Second - case 3: - return 20 * time.Second - case 4: - return time.Minute - case 5: - return 5 * time.Minute - default: - return 2 * last - } - } - -``` - -Consider some package `foo` that has a `TryAgainError`, which looks something -like this: -```go - - - type TryAgainError struct { - After time.Duration - } - -``` -and we create something that looks like this: - -```go - - - type TryAgainHelper struct { - next time.Duration - } - - func (h *TryAgainHelper) notify(lastError error, attempt int) { - if tryAgain, ok := lastError.(*foo.TryAgainError); ok { - h.next = tryAgain.After - } else { - h.next = 0 - } - } - - func (h *TryAgainHelper) next(last time.Duration) time.Duration { - if h.next != 0 { - return h.next - } - return last - } - -``` - -Then we could do this: -```go - - - helper := TryAgainHelper{} - retry.Call(retry.CallArgs{ - Func: func() error { - return foo.SomeFunc() - }, - NotifyFunc: helper.notify, - BackoffFunc: helper.next, - Attempts: 20, - Delay: 100 * time.Millisecond, - Clock: clock.WallClock, - }) - -``` - - - - -## Constants -``` go -const ( - // UnlimitedAttempts can be used as a value for `Attempts` to clearly - // show to the reader that there is no limit to the number of attempts. - UnlimitedAttempts = -1 -) -``` - - -## func Call -``` go -func Call(args CallArgs) error -``` -Call will repeatedly execute the Func until either the function returns no -error, the retry count is exceeded or the stop channel is closed. - - -## func DoubleDelay -``` go -func DoubleDelay(delay time.Duration, attempt int) time.Duration -``` -DoubleDelay provides a simple function that doubles the duration passed in. -This can then be easily used as the `BackoffFunc` in the `CallArgs` -structure. - -## func ExpBackoff -``` go -func ExpBackoff(minDelay, maxDelay time.Duration, exp float64, applyJitter bool) func(time.Duration, int) time.Duration { -``` -ExpBackoff returns a function a which generates time.Duration values using an -exponential back-off algorithm with the specified parameters. The returned value -can then be easily used as the `BackoffFunc` in the `CallArgs` structure. - -The next delay value is calculated using the following formula: - `newDelay = min(minDelay * exp^attempt, maxDelay)` - -If `applyJitter` is set to `true`, the function will randomly select and return -back a value in the `[minDelay, newDelay]` range. - -## func IsAttemptsExceeded -``` go -func IsAttemptsExceeded(err error) bool -``` -IsAttemptsExceeded returns true if the error is the result of the `Call` -function finishing due to hitting the requested number of `Attempts`. - - -## func IsDurationExceeded -``` go -func IsDurationExceeded(err error) bool -``` -IsDurationExceeded returns true if the error is the result of the `Call` -function finishing due to the total duration exceeding the specified -`MaxDuration` value. - - -## func IsRetryStopped -``` go -func IsRetryStopped(err error) bool -``` -IsRetryStopped returns true if the error is the result of the `Call` -function finishing due to the stop channel being closed. - - -## func LastError -``` go -func LastError(err error) error -``` -LastError retrieves the last error returned from `Func` before iteration -was terminated due to the attempt count being exceeded, the maximum -duration being exceeded, or the stop channel being closed. - - - -## type CallArgs -``` go -type CallArgs struct { - // Func is the function that will be retried if it returns an error result. - Func func() error - - // IsFatalError is a function that, if set, will be called for every non- - // nil error result from `Func`. If `IsFatalError` returns true, the error - // is immediately returned breaking out from any further retries. - IsFatalError func(error) bool - - // NotifyFunc is a function that is called if Func fails, and the attempt - // number. The first time this function is called attempt is 1, the second - // time, attempt is 2 and so on. - NotifyFunc func(lastError error, attempt int) - - // Attempts specifies the number of times Func should be retried before - // giving up and returning the `AttemptsExceeded` error. If a negative - // value is specified, the `Call` will retry forever. - Attempts int - - // Delay specifies how long to wait between retries. - Delay time.Duration - - // MaxDelay specifies how longest time to wait between retries. If no - // value is specified there is no maximum delay. - MaxDelay time.Duration - - // MaxDuration specifies the maximum time the `Call` function should spend - // iterating over `Func`. The duration is calculated from the start of the - // `Call` function. If the next delay time would take the total duration - // of the call over MaxDuration, then a DurationExceeded error is - // returned. If no value is specified, Call will continue until the number - // of attempts is complete. - MaxDuration time.Duration - - // BackoffFunc allows the caller to provide a function that alters the - // delay each time through the loop. If this function is not provided the - // delay is the same each iteration. Alternatively a function such as - // `retry.DoubleDelay` can be used that will provide an exponential - // backoff. The first time this function is called attempt is 1, the - // second time, attempt is 2 and so on. - BackoffFunc func(delay time.Duration, attempt int) time.Duration - - // Clock provides the mechanism for waiting. Normal program execution is - // expected to use something like clock.WallClock, and tests can override - // this to not actually sleep in tests. - Clock clock.Clock - - // Stop is a channel that can be used to indicate that the waiting should - // be interrupted. If Stop is nil, then the Call function cannot be interrupted. - // If the channel is closed prior to the Call function being executed, the - // Func is still attempted once. - Stop <-chan struct{} -} -``` -CallArgs is a simple structure used to define the behaviour of the Call -function. - - - - - - - - - - - -### func (\*CallArgs) Validate -``` go -func (args *CallArgs) Validate() error -``` -Validate the values are valid. The ensures that the Func, Delay, Attempts -and Clock have been specified. - - - - - - - - - -- - - -Generated by [godoc2md](http://godoc.org/github.com/davecheney/godoc2md) diff --git a/vendor/github.com/juju/retry/clock.go b/vendor/github.com/juju/retry/clock.go deleted file mode 100644 index 3451fbf3..00000000 --- a/vendor/github.com/juju/retry/clock.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2015 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -package retry - -import "time" - -// Clock provides an interface for dealing with clocks. -type Clock interface { - // Now returns the current clock time. - Now() time.Time - - // After waits for the duration to elapse and then sends the - // current time on the returned channel. - After(time.Duration) <-chan time.Time -} diff --git a/vendor/github.com/juju/retry/doc.go b/vendor/github.com/juju/retry/doc.go deleted file mode 100644 index 8a7393e0..00000000 --- a/vendor/github.com/juju/retry/doc.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2015 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -// The retry package encapsulates the mechanism around retrying commands. -// -// The simple use is to call retry.Call with a function closure. -// -// err := retry.Call(retry.CallArgs{ -// Func: func() error { ... }, -// Attempts: 5, -// Delay: time.Minute, -// Clock: clock.WallClock, -// }) -// -// The bare minimum arguments that need to be specified are: -// - Func - the function to call -// - Attempts - the number of times to try Func before giving up, or a negative number for unlimited attempts (`retry.UnlimitedAttempts`) -// - Delay - how long to wait between each try that returns an error -// - Clock - either the wall clock, or some testing clock -// -// Any error that is returned from the Func is considered transient. -// In order to identify some errors as fatal, pass in a function for the -// IsFatalError CallArgs value. -// -// In order to have the Delay change for each iteration, a BackoffFunc -// needs to be set on the CallArgs. A simple doubling delay function is -// provided by DoubleDelay. -// -// An example of a more complex BackoffFunc could be a stepped function such -// as: -// -// func StepDelay(last time.Duration, attempt int) time.Duration { -// switch attempt{ -// case 1: -// return time.Second -// case 2: -// return 5 * time.Second -// case 3: -// return 20 * time.Second -// case 4: -// return time.Minute -// case 5: -// return 5 * time.Minute -// default: -// return 2 * last -// } -// } -// -// Consider some package foo that has a TryAgainError, which looks something -// like this: -// -// type TryAgainError struct { -// After time.Duration -// } -// -// and we create something that looks like this: -// -// type TryAgainHelper struct { -// next time.Duration -// } -// -// func (h *TryAgainHelper) notify(lastError error, attempt int) { -// if tryAgain, ok := lastError.(*foo.TryAgainError); ok { -// h.next = tryAgain.After -// } else { -// h.next = 0 -// } -// } -// -// func (h *TryAgainHelper) next(last time.Duration) time.Duration { -// if h.next != 0 { -// return h.next -// } -// return last -// } -// -// Then we could do this: -// -// helper := TryAgainHelper{} -// retry.Call(retry.CallArgs{ -// Func: func() error { -// return foo.SomeFunc() -// }, -// NotifyFunc: helper.notify, -// BackoffFunc: helper.next, -// Attempts: 20, -// Delay: 100 * time.Millisecond, -// Clock: clock.WallClock, -// }) -package retry diff --git a/vendor/github.com/juju/retry/retry.go b/vendor/github.com/juju/retry/retry.go deleted file mode 100644 index d9964d6d..00000000 --- a/vendor/github.com/juju/retry/retry.go +++ /dev/null @@ -1,260 +0,0 @@ -// Copyright 2015 Canonical Ltd. -// Licensed under the LGPLv3, see LICENCE file for details. - -package retry - -import ( - "fmt" - "math" - "math/rand" - "time" - - "github.com/juju/errors" -) - -const ( - // UnlimitedAttempts can be used as a value for Attempts to clearly - // show to the reader that there is no limit to the number of attempts. - UnlimitedAttempts = -1 -) - -// retryStopped is the error that is returned from the Call function -// when the stop channel has been closed. -type retryStopped struct { - lastError error -} - -// Error provides the implementation for the error interface method. -func (e *retryStopped) Error() string { - return fmt.Sprintf("retry stopped") -} - -// attemptsExceeded is the error that is returned when the retry count has -// been hit without the function returning a nil error result. The last error -// returned from the function being retried is available as the LastError -// attribute. -type attemptsExceeded struct { - lastError error -} - -// Error provides the implementation for the error interface method. -func (e *attemptsExceeded) Error() string { - return fmt.Sprintf("attempt count exceeded: %s", e.lastError) -} - -// durationExceeded is the error that is returned when the total time that the -// Call function would have executed exceeds the MaxDuration specified. -// The last error returned from the function being retried is available as the -// LastError attribute. -type durationExceeded struct { - lastError error -} - -// Error provides the implementation for the error interface method. -func (e *durationExceeded) Error() string { - return fmt.Sprintf("max duration exceeded: %s", e.lastError) -} - -// LastError retrieves the last error returned from Func before iteration -// was terminated due to the attempt count being exceeded, the maximum -// duration being exceeded, or the stop channel being closed. -func LastError(err error) error { - cause := errors.Cause(err) - switch err := cause.(type) { - case *attemptsExceeded: - return err.lastError - case *retryStopped: - return err.lastError - case *durationExceeded: - return err.lastError - } - return errors.Errorf("unexpected error type: %T, %s", cause, cause) -} - -// IsAttemptsExceeded returns true if the error is the result of the Call -// function finishing due to hitting the requested number of Attempts. -func IsAttemptsExceeded(err error) bool { - cause := errors.Cause(err) - _, ok := cause.(*attemptsExceeded) - return ok -} - -// IsDurationExceeded returns true if the error is the result of the Call -// function finishing due to the total duration exceeding the specified -// MaxDuration value. -func IsDurationExceeded(err error) bool { - cause := errors.Cause(err) - _, ok := cause.(*durationExceeded) - return ok -} - -// IsRetryStopped returns true if the error is the result of the Call -// function finishing due to the stop channel being closed. -func IsRetryStopped(err error) bool { - cause := errors.Cause(err) - _, ok := cause.(*retryStopped) - return ok -} - -// CallArgs is a simple structure used to define the behaviour of the Call -// function. -type CallArgs struct { - // Func is the function that will be retried if it returns an error result. - Func func() error - - // IsFatalError is a function that, if set, will be called for every non- - // nil error result from Func. If IsFatalError returns true, the error - // is immediately returned breaking out from any further retries. - IsFatalError func(error) bool - - // NotifyFunc is a function that is called if Func fails, and the attempt - // number. The first time this function is called attempt is 1, the second - // time, attempt is 2 and so on. - NotifyFunc func(lastError error, attempt int) - - // Attempts specifies the number of times Func should be retried before - // giving up and returning the AttemptsExceeded error. If a negative - // value is specified, the Call will retry forever. - Attempts int - - // Delay specifies how long to wait between retries. - Delay time.Duration - - // MaxDelay specifies how longest time to wait between retries. If no - // value is specified there is no maximum delay. - MaxDelay time.Duration - - // MaxDuration specifies the maximum time the Call function should spend - // iterating over Func. The duration is calculated from the start of the - // Call function. If the next delay time would take the total duration - // of the call over MaxDuration, then a DurationExceeded error is - // returned. If no value is specified, Call will continue until the number - // of attempts is complete. - MaxDuration time.Duration - - // BackoffFunc allows the caller to provide a function that alters the - // delay each time through the loop. If this function is not provided the - // delay is the same each iteration. Alternatively a function such as - // retry.DoubleDelay can be used that will provide an exponential - // backoff. The first time this function is called attempt is 1, the - // second time, attempt is 2 and so on. - BackoffFunc func(delay time.Duration, attempt int) time.Duration - - // Clock provides the mechanism for waiting. Normal program execution is - // expected to use something like clock.WallClock, and tests can override - // this to not actually sleep in tests. - Clock Clock - - // Stop is a channel that can be used to indicate that the waiting should - // be interrupted. If Stop is nil, then the Call function cannot be interrupted. - // If the channel is closed prior to the Call function being executed, the - // Func is still attempted once. - Stop <-chan struct{} -} - -// Validate the values are valid. The ensures that the Func, Delay, Attempts -// and Clock have been specified. -func (args *CallArgs) Validate() error { - if args.Func == nil { - return errors.NotValidf("missing Func") - } - if args.Delay == 0 { - return errors.NotValidf("missing Delay") - } - if args.Clock == nil { - return errors.NotValidf("missing Clock") - } - // One of Attempts or MaxDuration need to be specified - if args.Attempts == 0 && args.MaxDuration == 0 { - return errors.NotValidf("missing Attempts or MaxDuration") - } - return nil -} - -// Call will repeatedly execute the Func until either the function returns no -// error, the retry count is exceeded or the stop channel is closed. -func Call(args CallArgs) error { - err := args.Validate() - if err != nil { - return errors.Trace(err) - } - start := args.Clock.Now() - for i := 1; args.Attempts <= 0 || i <= args.Attempts; i++ { - err = args.Func() - if err == nil { - return nil - } - if args.IsFatalError != nil && args.IsFatalError(err) { - return errors.Trace(err) - } - if args.NotifyFunc != nil { - args.NotifyFunc(err, i) - } - if i == args.Attempts && args.Attempts > 0 { - break // don't wait before returning the error - } - - if args.BackoffFunc != nil { - delay := args.BackoffFunc(args.Delay, i) - if delay > args.MaxDelay && args.MaxDelay > 0 { - delay = args.MaxDelay - } - args.Delay = delay - } - elapsedTime := args.Clock.Now().Sub(start) - if args.MaxDuration > 0 && (elapsedTime+args.Delay) > args.MaxDuration { - return errors.Wrap(err, &durationExceeded{err}) - } - - // Wait for the delay, and retry - select { - case <-args.Clock.After(args.Delay): - case <-args.Stop: - return errors.Wrap(err, &retryStopped{err}) - } - } - return errors.Wrap(err, &attemptsExceeded{err}) -} - -// DoubleDelay provides a simple function that doubles the duration passed in. -// This can then be easily used as the BackoffFunc in the CallArgs -// structure. -func DoubleDelay(delay time.Duration, attempt int) time.Duration { - if attempt == 1 { - return delay - } - return delay * 2 -} - -// ExpBackoff returns a function a which generates time.Duration values using -// an exponential back-off algorithm with the specified parameters. The -// returned value can then be easily used as the BackoffFunc in the CallArgs -// structure. -// -// The next delay value is calculated using the following formula: -// -// newDelay = min(minDelay * exp^attempt, maxDelay) -// -// If applyJitter is set to true, the function will randomly select and return -// back a value in the [minDelay, newDelay] range. -func ExpBackoff(minDelay, maxDelay time.Duration, exp float64, applyJitter bool) func(time.Duration, int) time.Duration { - minDelayF := float64(minDelay) - maxDelayF := float64(maxDelay) - return func(_ time.Duration, attempt int) time.Duration { - newDelay := minDelayF * math.Pow(exp, float64(attempt)) - - // Return a random value in the [minDelay, newDelay) range. - if applyJitter { - // We want to go +/- 20%, which is a 40% swing, and - // Float64 returns in the range 0-1 - newDelay = (1 + rand.Float64()*0.4 - 0.2) * newDelay - } - if newDelay < minDelayF { - newDelay = minDelayF - } - if newDelay > maxDelayF { - newDelay = maxDelayF - } - return time.Duration(newDelay).Round(time.Millisecond) - } -} diff --git a/vendor/modules.txt b/vendor/modules.txt index f090ee01..1222d730 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -131,19 +131,6 @@ github.com/jinzhu/now # github.com/josharian/intern v1.0.0 ## explicit; go 1.5 github.com/josharian/intern -# github.com/juju/clock v1.1.1 -## explicit; go 1.18 -github.com/juju/clock -# github.com/juju/errors v1.0.0 -## explicit; go 1.18 -github.com/juju/errors -# github.com/juju/loggo v1.0.0 -## explicit; go 1.14 -# github.com/juju/retry v1.0.1 -## explicit; go 1.17 -github.com/juju/retry -# github.com/juju/testing v1.0.2 -## explicit; go 1.17 # github.com/mailru/easyjson v0.9.0 ## explicit; go 1.20 github.com/mailru/easyjson/buffer diff --git a/websocket/client.go b/websocket/client.go index 70777265..820e49ce 100644 --- a/websocket/client.go +++ b/websocket/client.go @@ -16,6 +16,7 @@ package websocket import ( "context" + "errors" "fmt" "log/slog" "net" @@ -24,7 +25,6 @@ import ( "github.com/google/uuid" "github.com/gorilla/websocket" - "github.com/pkg/errors" "github.com/cloudbase/garm/auth" "github.com/cloudbase/garm/database/common" @@ -63,7 +63,7 @@ func NewClient(ctx context.Context, conn *websocket.Conn) (*Client, error) { watcher.WithUserIDFilter(user), ) if err != nil { - return nil, errors.Wrap(err, "registering consumer") + return nil, fmt.Errorf("error registering consumer: %w", err) } return &Client{ id: clientID.String(), From 31ad45eeb6251df91dee72f99f658fc0c9ca0949 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 16 Aug 2025 23:00:55 +0000 Subject: [PATCH 161/179] Allow referencing runners by ID Although runner names are unique, we still have an ID on the model which is used as a primary key. We should allow using that ID to reference a runner in the API. This change allows users to specify ID or runner name. Signed-off-by: Gabriel Adrian Samfira --- auth/instance_middleware.go | 2 +- database/common/store.go | 9 ++++----- database/sql/instances.go | 30 ++++++++++++++--------------- database/sql/instances_test.go | 35 ++++++++-------------------------- database/sql/jobs.go | 4 ++-- runner/pool/pool.go | 2 +- runner/runner.go | 4 ++-- 7 files changed, 32 insertions(+), 54 deletions(-) diff --git a/auth/instance_middleware.go b/auth/instance_middleware.go index dc31327e..6d1d66e4 100644 --- a/auth/instance_middleware.go +++ b/auth/instance_middleware.go @@ -120,7 +120,7 @@ func (amw *instanceMiddleware) claimsToContext(ctx context.Context, claims *Inst return nil, runnerErrors.ErrUnauthorized } - instanceInfo, err := amw.store.GetInstanceByName(ctx, claims.Name) + instanceInfo, err := amw.store.GetInstance(ctx, claims.Name) if err != nil { return ctx, runnerErrors.ErrUnauthorized } diff --git a/database/common/store.go b/database/common/store.go index d768f159..0cf5d929 100644 --- a/database/common/store.go +++ b/database/common/store.go @@ -75,7 +75,6 @@ type PoolStore interface { ListPoolInstances(ctx context.Context, poolID string) ([]params.Instance, error) PoolInstanceCount(ctx context.Context, poolID string) (int64, error) - GetPoolInstanceByName(ctx context.Context, poolID string, instanceName string) (params.Instance, error) FindPoolsMatchingAllTags(ctx context.Context, entityType params.ForgeEntityType, entityID string, tags []string) ([]params.Pool, error) } @@ -91,9 +90,9 @@ type UserStore interface { type InstanceStore interface { CreateInstance(ctx context.Context, poolID string, param params.CreateInstanceParams) (params.Instance, error) - DeleteInstance(ctx context.Context, poolID string, instanceName string) error + DeleteInstance(ctx context.Context, poolID string, instanceNameOrID string) error DeleteInstanceByName(ctx context.Context, instanceName string) error - UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) + UpdateInstance(ctx context.Context, instanceNameOrID string, param params.UpdateInstanceParams) (params.Instance, error) // Probably a bad idea without some king of filter or at least pagination // @@ -101,8 +100,8 @@ type InstanceStore interface { // TODO: add filter/pagination ListAllInstances(ctx context.Context) ([]params.Instance, error) - GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) - AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error + GetInstance(ctx context.Context, instanceNameOrID string) (params.Instance, error) + AddInstanceEvent(ctx context.Context, instanceNameOrID string, event params.EventType, eventLevel params.EventLevel, eventMessage string) error } type JobsStore interface { diff --git a/database/sql/instances.go b/database/sql/instances.go index 92194c5e..5f9d018e 100644 --- a/database/sql/instances.go +++ b/database/sql/instances.go @@ -103,9 +103,16 @@ func (s *sqlDatabase) getPoolInstanceByName(poolID string, instanceName string) return instance, nil } -func (s *sqlDatabase) getInstanceByName(_ context.Context, instanceName string, preload ...string) (Instance, error) { +func (s *sqlDatabase) getInstance(_ context.Context, instanceNameOrID string, preload ...string) (Instance, error) { var instance Instance + var whereArg any = instanceNameOrID + whereClause := "name = ?" + id, err := uuid.Parse(instanceNameOrID) + if err == nil { + whereArg = id + whereClause = "id = ?" + } q := s.conn if len(preload) > 0 { @@ -116,7 +123,7 @@ func (s *sqlDatabase) getInstanceByName(_ context.Context, instanceName string, q = q.Model(&Instance{}). Preload(clause.Associations). - Where("name = ?", instanceName). + Where(whereClause, whereArg). First(&instance) if q.Error != nil { if errors.Is(q.Error, gorm.ErrRecordNotFound) { @@ -127,17 +134,8 @@ func (s *sqlDatabase) getInstanceByName(_ context.Context, instanceName string, return instance, nil } -func (s *sqlDatabase) GetPoolInstanceByName(_ context.Context, poolID string, instanceName string) (params.Instance, error) { - instance, err := s.getPoolInstanceByName(poolID, instanceName) - if err != nil { - return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) - } - - return s.sqlToParamsInstance(instance) -} - -func (s *sqlDatabase) GetInstanceByName(ctx context.Context, instanceName string) (params.Instance, error) { - instance, err := s.getInstanceByName(ctx, instanceName, "StatusMessages", "Pool", "ScaleSet") +func (s *sqlDatabase) GetInstance(ctx context.Context, instanceName string) (params.Instance, error) { + instance, err := s.getInstance(ctx, instanceName, "StatusMessages", "Pool", "ScaleSet") if err != nil { return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) } @@ -189,7 +187,7 @@ func (s *sqlDatabase) DeleteInstance(_ context.Context, poolID string, instanceN } func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName string) error { - instance, err := s.getInstanceByName(ctx, instanceName, "Pool", "ScaleSet") + instance, err := s.getInstance(ctx, instanceName, "Pool", "ScaleSet") if err != nil { if errors.Is(err, runnerErrors.ErrNotFound) { return nil @@ -231,7 +229,7 @@ func (s *sqlDatabase) DeleteInstanceByName(ctx context.Context, instanceName str } func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, event params.EventType, eventLevel params.EventLevel, statusMessage string) error { - instance, err := s.getInstanceByName(ctx, instanceName) + instance, err := s.getInstance(ctx, instanceName) if err != nil { return fmt.Errorf("error updating instance: %w", err) } @@ -249,7 +247,7 @@ func (s *sqlDatabase) AddInstanceEvent(ctx context.Context, instanceName string, } func (s *sqlDatabase) UpdateInstance(ctx context.Context, instanceName string, param params.UpdateInstanceParams) (params.Instance, error) { - instance, err := s.getInstanceByName(ctx, instanceName, "Pool", "ScaleSet") + instance, err := s.getInstance(ctx, instanceName, "Pool", "ScaleSet") if err != nil { return params.Instance{}, fmt.Errorf("error updating instance: %w", err) } diff --git a/database/sql/instances_test.go b/database/sql/instances_test.go index c6093327..5ec55107 100644 --- a/database/sql/instances_test.go +++ b/database/sql/instances_test.go @@ -196,7 +196,7 @@ func (s *InstancesTestSuite) TestCreateInstance() { // assertions s.Require().Nil(err) - storeInstance, err := s.Store.GetInstanceByName(s.adminCtx, s.Fixtures.CreateInstanceParams.Name) + storeInstance, err := s.Store.GetInstance(s.adminCtx, s.Fixtures.CreateInstanceParams.Name) if err != nil { s.FailNow(fmt.Sprintf("failed to get instance: %v", err)) } @@ -236,29 +236,10 @@ func (s *InstancesTestSuite) TestCreateInstanceDBCreateErr() { s.Require().Equal("error creating instance: mocked insert instance error", err.Error()) } -func (s *InstancesTestSuite) TestGetPoolInstanceByName() { - storeInstance := s.Fixtures.Instances[0] // this is already created in `SetupTest()` - - instance, err := s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) - - s.Require().Nil(err) - s.Require().Equal(storeInstance.Name, instance.Name) - s.Require().Equal(storeInstance.PoolID, instance.PoolID) - s.Require().Equal(storeInstance.OSArch, instance.OSArch) - s.Require().Equal(storeInstance.OSType, instance.OSType) - s.Require().Equal(storeInstance.CallbackURL, instance.CallbackURL) -} - -func (s *InstancesTestSuite) TestGetPoolInstanceByNameNotFound() { - _, err := s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, "not-existent-instance-name") - - s.Require().Equal("error fetching instance: error fetching pool instance by name: not found", err.Error()) -} - func (s *InstancesTestSuite) TestGetInstanceByName() { storeInstance := s.Fixtures.Instances[1] - instance, err := s.Store.GetInstanceByName(s.adminCtx, storeInstance.Name) + instance, err := s.Store.GetInstance(s.adminCtx, storeInstance.Name) s.Require().Nil(err) s.Require().Equal(storeInstance.Name, instance.Name) @@ -269,7 +250,7 @@ func (s *InstancesTestSuite) TestGetInstanceByName() { } func (s *InstancesTestSuite) TestGetInstanceByNameFetchInstanceFailed() { - _, err := s.Store.GetInstanceByName(s.adminCtx, "not-existent-instance-name") + _, err := s.Store.GetInstance(s.adminCtx, "not-existent-instance-name") s.Require().Equal("error fetching instance: error fetching instance by name: not found", err.Error()) } @@ -281,8 +262,8 @@ func (s *InstancesTestSuite) TestDeleteInstance() { s.Require().Nil(err) - _, err = s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) - s.Require().Equal("error fetching instance: error fetching pool instance by name: not found", err.Error()) + _, err = s.Store.GetInstance(s.adminCtx, storeInstance.Name) + s.Require().Equal("error fetching instance: error fetching instance by name: not found", err.Error()) err = s.Store.DeleteInstance(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) s.Require().Nil(err) @@ -295,8 +276,8 @@ func (s *InstancesTestSuite) TestDeleteInstanceByName() { s.Require().Nil(err) - _, err = s.Store.GetPoolInstanceByName(s.adminCtx, s.Fixtures.Pool.ID, storeInstance.Name) - s.Require().Equal("error fetching instance: error fetching pool instance by name: not found", err.Error()) + _, err = s.Store.GetInstance(s.adminCtx, storeInstance.Name) + s.Require().Equal("error fetching instance: error fetching instance by name: not found", err.Error()) err = s.Store.DeleteInstanceByName(s.adminCtx, storeInstance.Name) s.Require().Nil(err) @@ -390,7 +371,7 @@ func (s *InstancesTestSuite) TestAddInstanceEvent() { err := s.Store.AddInstanceEvent(s.adminCtx, storeInstance.Name, params.StatusEvent, params.EventInfo, statusMsg) s.Require().Nil(err) - instance, err := s.Store.GetInstanceByName(s.adminCtx, storeInstance.Name) + instance, err := s.Store.GetInstance(s.adminCtx, storeInstance.Name) if err != nil { s.FailNow(fmt.Sprintf("failed to get db instance: %s", err)) } diff --git a/database/sql/jobs.go b/database/sql/jobs.go index f4d24e42..5740052a 100644 --- a/database/sql/jobs.go +++ b/database/sql/jobs.go @@ -100,7 +100,7 @@ func (s *sqlDatabase) paramsJobToWorkflowJob(ctx context.Context, job params.Job } if job.RunnerName != "" { - instance, err := s.getInstanceByName(s.ctx, job.RunnerName) + instance, err := s.getInstance(s.ctx, job.RunnerName) if err != nil { // This usually is very normal as not all jobs run on our runners. slog.DebugContext(ctx, "failed to get instance by name", "instance_name", job.RunnerName) @@ -282,7 +282,7 @@ func (s *sqlDatabase) CreateOrUpdateJob(ctx context.Context, job params.Job) (pa } if job.RunnerName != "" { - instance, err := s.getInstanceByName(ctx, job.RunnerName) + instance, err := s.getInstance(ctx, job.RunnerName) if err == nil { workflowJob.InstanceID = &instance.ID } else { diff --git a/runner/pool/pool.go b/runner/pool/pool.go index 690fed93..8610d4c9 100644 --- a/runner/pool/pool.go +++ b/runner/pool/pool.go @@ -557,7 +557,7 @@ func (r *basePoolManager) cleanupOrphanedGithubRunners(runners []*github.Runner) continue } - dbInstance, err := r.store.GetInstanceByName(r.ctx, *runner.Name) + dbInstance, err := r.store.GetInstance(r.ctx, *runner.Name) if err != nil { if !errors.Is(err, runnerErrors.ErrNotFound) { return fmt.Errorf("error fetching instance from DB: %w", err) diff --git a/runner/runner.go b/runner/runner.go index 2c12071d..bf081522 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -731,7 +731,7 @@ func (r *Runner) GetInstance(ctx context.Context, instanceName string) (params.I return params.Instance{}, runnerErrors.ErrUnauthorized } - instance, err := r.store.GetInstanceByName(ctx, instanceName) + instance, err := r.store.GetInstance(ctx, instanceName) if err != nil { return params.Instance{}, fmt.Errorf("error fetching instance: %w", err) } @@ -852,7 +852,7 @@ func (r *Runner) DeleteRunner(ctx context.Context, instanceName string, forceDel return runnerErrors.ErrUnauthorized } - instance, err := r.store.GetInstanceByName(ctx, instanceName) + instance, err := r.store.GetInstance(ctx, instanceName) if err != nil { return fmt.Errorf("error fetching instance: %w", err) } From f805123a859dc10f5f8c77a8d99c6ee88c7a8652 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sat, 16 Aug 2025 23:13:47 +0000 Subject: [PATCH 162/179] Remove the --all flag for scalesets Display all scalesets by default, similar to runners and pools. Signed-off-by: Gabriel Adrian Samfira --- cmd/garm-cli/cmd/scalesets.go | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/cmd/garm-cli/cmd/scalesets.go b/cmd/garm-cli/cmd/scalesets.go index cf64c9fa..a78fe33f 100644 --- a/cmd/garm-cli/cmd/scalesets.go +++ b/cmd/garm-cli/cmd/scalesets.go @@ -47,7 +47,6 @@ var ( scalesetEnterprise string scalesetExtraSpecsFile string scalesetExtraSpecs string - scalesetAll bool scalesetGitHubRunnerGroup string ) @@ -128,12 +127,9 @@ Example: listEnterpriseScaleSetsReq := apiClientEnterprises.NewListEnterpriseScaleSetsParams() listEnterpriseScaleSetsReq.EnterpriseID = scalesetEnterprise response, err = apiCli.Enterprises.ListEnterpriseScaleSets(listEnterpriseScaleSetsReq, authToken) - } else if cmd.Flags().Changed("all") { + } else { listScaleSetsReq := apiClientScaleSets.NewListScalesetsParams() response, err = apiCli.Scalesets.ListScalesets(listScaleSetsReq, authToken) - } else { - cmd.Help() //nolint - os.Exit(0) } default: cmd.Help() //nolint @@ -400,8 +396,7 @@ func init() { scalesetListCmd.Flags().StringVarP(&scalesetRepository, "repo", "r", "", "List all scale sets within this repository.") scalesetListCmd.Flags().StringVarP(&scalesetOrganization, "org", "o", "", "List all scale sets within this organization.") scalesetListCmd.Flags().StringVarP(&scalesetEnterprise, "enterprise", "e", "", "List all scale sets within this enterprise.") - scalesetListCmd.Flags().BoolVarP(&scalesetAll, "all", "a", false, "List all scale sets, regardless of org or repo.") - scalesetListCmd.MarkFlagsMutuallyExclusive("repo", "org", "all", "enterprise") + scalesetListCmd.MarkFlagsMutuallyExclusive("repo", "org", "enterprise") scalesetListCmd.Flags().StringVar(&endpointName, "endpoint", "", "When using the name of an entity, the endpoint must be specified when multiple entities with the same name exist.") scaleSetUpdateCmd.Flags().StringVar(&scalesetImage, "image", "", "The provider-specific image name to use for runners in this scale set.") From 7f647941f6bec46df78dc240c64e0ee6e090d6b7 Mon Sep 17 00:00:00 2001 From: Gabriel Adrian Samfira Date: Sun, 17 Aug 2025 07:34:40 +0000 Subject: [PATCH 163/179] Slightly better error handling Extract error details we get from the API when status code > 2xx. Also, use toast messages to display the error, properly close delete modals and prevent full page display of error messages. Signed-off-by: Gabriel Adrian Samfira --- .../assets/_app/immutable/chunks/2p_hWkLJ.js | 1 + .../chunks/{C41YH50Q.js => BE8f1Riw.js} | 2 +- .../chunks/{DDhBTdDt.js => BEoJgOul.js} | 2 +- .../assets/_app/immutable/chunks/BZiHL9L3.js | 1 + .../assets/_app/immutable/chunks/Bgb-frqW.js | 1 + .../chunks/{C9DJVOi1.js => BrNfsPe8.js} | 2 +- .../assets/_app/immutable/chunks/BsqC4UA1.js | 1 + .../chunks/{CLYUNKnN.js => BzlxTz7Q.js} | 2 +- .../assets/_app/immutable/chunks/C89fcOde.js | 1 - .../assets/_app/immutable/chunks/CIBm3n2u.js | 1 + .../assets/_app/immutable/chunks/CRD55Dyg.js | 1 + .../assets/_app/immutable/chunks/CRhkqW2i.js | 1 + .../assets/_app/immutable/chunks/CTf6mQoE.js | 3 - .../assets/_app/immutable/chunks/CclkODgu.js | 1 - .../assets/_app/immutable/chunks/CoIRRsD9.js | 1 - .../assets/_app/immutable/chunks/CwqI2jFH.js | 1 - .../chunks/{B7ITzBt8.js => D4PaGKsV.js} | 2 +- .../assets/_app/immutable/chunks/DQP15tlf.js | 1 - .../assets/_app/immutable/chunks/DXCC0cSN.js | 3 + .../assets/_app/immutable/chunks/Dbd6PPbz.js | 1 - .../chunks/{BE4wujub.js => MCv1Wq2q.js} | 2 +- .../chunks/{BmGWMSQm.js => OpktHEmj.js} | 2 +- .../{app.kAVAdeq9.js => app.OegKHTwb.js} | 4 +- .../_app/immutable/entry/start.CI0Cdear.js | 1 - .../_app/immutable/entry/start.S-sEy6br.js | 1 + .../nodes/{0.DINiyk_8.js => 0.g860C_Ot.js} | 2 +- .../nodes/{1.DcR4nNsi.js => 1.BjWDFEyD.js} | 2 +- .../_app/immutable/nodes/10.Ci2MePhm.js | 1 + .../_app/immutable/nodes/10.LnrIJgIa.js | 1 - .../_app/immutable/nodes/11.BX_bMXWi.js | 1 + .../_app/immutable/nodes/11.Bsn67lBa.js | 1 - .../_app/immutable/nodes/12.B-vC_cmu.js | 1 - .../_app/immutable/nodes/12.C0lS_ubI.js | 1 + .../_app/immutable/nodes/13.Br7HzjXP.js | 1 - .../_app/immutable/nodes/13.CEJ1u9Ql.js | 1 + .../_app/immutable/nodes/14.BJHnbtAi.js | 1 + .../_app/immutable/nodes/14.Cd0DOn96.js | 1 - .../_app/immutable/nodes/15.CkHQugXH.js | 1 - .../_app/immutable/nodes/15.CqYhwqAI.js | 1 + .../_app/immutable/nodes/16.B35VVkOd.js | 1 - .../_app/immutable/nodes/16.BVViOnXd.js | 1 + .../_app/immutable/nodes/17.CCltcs-Z.js | 1 - .../_app/immutable/nodes/17.DLt70sQQ.js | 1 + .../_app/immutable/nodes/18.eu91cRrS.js | 1 + .../_app/immutable/nodes/18.iVIhGVtu.js | 1 - .../assets/_app/immutable/nodes/2.1DFwbmOU.js | 1 + .../assets/_app/immutable/nodes/2.CiT4lj0D.js | 1 - .../assets/_app/immutable/nodes/3.BSFz0YHn.js | 7 -- .../assets/_app/immutable/nodes/3.BWxN3TuB.js | 7 ++ .../assets/_app/immutable/nodes/4.D1IF4qSs.js | 3 + .../assets/_app/immutable/nodes/4.XnVoh6ca.js | 3 - .../assets/_app/immutable/nodes/5.CeMzA7DH.js | 1 + .../assets/_app/immutable/nodes/5.rvsSG-AQ.js | 1 - .../assets/_app/immutable/nodes/6.BPDnwpl3.js | 1 + .../assets/_app/immutable/nodes/6.CtGX0qgG.js | 1 - .../assets/_app/immutable/nodes/7.0w3i9VHx.js | 1 - .../assets/_app/immutable/nodes/7.CaVS6POQ.js | 1 + .../assets/_app/immutable/nodes/8.BiZNKYxk.js | 1 - .../assets/_app/immutable/nodes/8.W6llQu20.js | 1 + .../assets/_app/immutable/nodes/9.DfrxaqP7.js | 1 + .../assets/_app/immutable/nodes/9.DpSfMRgo.js | 1 - webapp/assets/_app/version.json | 2 +- webapp/assets/index.html | 14 +-- .../components/CreateEnterpriseModal.svelte | 8 +- .../components/CreateOrganizationModal.svelte | 7 +- .../src/lib/components/CreatePoolModal.svelte | 7 +- .../components/CreateRepositoryModal.svelte | 7 +- .../lib/components/CreateScaleSetModal.svelte | 7 +- .../lib/components/EntityInformation.svelte | 2 +- .../src/lib/components/EventsSection.svelte | 2 +- .../components/UpdateEnterpriseModal.svelte | 5 +- .../lib/components/UpdateEntityModal.svelte | 5 +- .../components/UpdateOrganizationModal.svelte | 5 +- .../src/lib/components/UpdatePoolModal.svelte | 3 +- .../components/UpdateRepositoryModal.svelte | 5 +- .../lib/components/UpdateScaleSetModal.svelte | 3 +- .../src/lib/components/WebhookSection.svelte | 3 +- webapp/src/lib/utils/apiError.ts | 88 +++++++++++++++++++ webapp/src/routes/+page.svelte | 5 +- webapp/src/routes/credentials/+page.svelte | 20 ++--- webapp/src/routes/endpoints/+page.svelte | 21 ++--- webapp/src/routes/enterprises/+page.svelte | 8 +- .../src/routes/enterprises/[id]/+page.svelte | 9 +- webapp/src/routes/init/+page.svelte | 3 +- webapp/src/routes/instances/+page.svelte | 13 ++- webapp/src/routes/instances/[id]/+page.svelte | 6 +- webapp/src/routes/login/+page.svelte | 3 +- webapp/src/routes/organizations/+page.svelte | 16 +++- .../routes/organizations/[id]/+page.svelte | 11 ++- webapp/src/routes/pools/+page.svelte | 35 ++++---- webapp/src/routes/pools/[id]/+page.svelte | 12 ++- webapp/src/routes/repositories/+page.svelte | 22 ++--- .../src/routes/repositories/[id]/+page.svelte | 11 ++- webapp/src/routes/scalesets/+page.svelte | 21 ++--- webapp/src/routes/scalesets/[id]/+page.svelte | 9 +- 95 files changed, 296 insertions(+), 195 deletions(-) create mode 100644 webapp/assets/_app/immutable/chunks/2p_hWkLJ.js rename webapp/assets/_app/immutable/chunks/{C41YH50Q.js => BE8f1Riw.js} (71%) rename webapp/assets/_app/immutable/chunks/{DDhBTdDt.js => BEoJgOul.js} (95%) create mode 100644 webapp/assets/_app/immutable/chunks/BZiHL9L3.js create mode 100644 webapp/assets/_app/immutable/chunks/Bgb-frqW.js rename webapp/assets/_app/immutable/chunks/{C9DJVOi1.js => BrNfsPe8.js} (78%) create mode 100644 webapp/assets/_app/immutable/chunks/BsqC4UA1.js rename webapp/assets/_app/immutable/chunks/{CLYUNKnN.js => BzlxTz7Q.js} (92%) delete mode 100644 webapp/assets/_app/immutable/chunks/C89fcOde.js create mode 100644 webapp/assets/_app/immutable/chunks/CIBm3n2u.js create mode 100644 webapp/assets/_app/immutable/chunks/CRD55Dyg.js create mode 100644 webapp/assets/_app/immutable/chunks/CRhkqW2i.js delete mode 100644 webapp/assets/_app/immutable/chunks/CTf6mQoE.js delete mode 100644 webapp/assets/_app/immutable/chunks/CclkODgu.js delete mode 100644 webapp/assets/_app/immutable/chunks/CoIRRsD9.js delete mode 100644 webapp/assets/_app/immutable/chunks/CwqI2jFH.js rename webapp/assets/_app/immutable/chunks/{B7ITzBt8.js => D4PaGKsV.js} (96%) delete mode 100644 webapp/assets/_app/immutable/chunks/DQP15tlf.js create mode 100644 webapp/assets/_app/immutable/chunks/DXCC0cSN.js delete mode 100644 webapp/assets/_app/immutable/chunks/Dbd6PPbz.js rename webapp/assets/_app/immutable/chunks/{BE4wujub.js => MCv1Wq2q.js} (96%) rename webapp/assets/_app/immutable/chunks/{BmGWMSQm.js => OpktHEmj.js} (97%) rename webapp/assets/_app/immutable/entry/{app.kAVAdeq9.js => app.OegKHTwb.js} (50%) delete mode 100644 webapp/assets/_app/immutable/entry/start.CI0Cdear.js create mode 100644 webapp/assets/_app/immutable/entry/start.S-sEy6br.js rename webapp/assets/_app/immutable/nodes/{0.DINiyk_8.js => 0.g860C_Ot.js} (99%) rename webapp/assets/_app/immutable/nodes/{1.DcR4nNsi.js => 1.BjWDFEyD.js} (87%) create mode 100644 webapp/assets/_app/immutable/nodes/10.Ci2MePhm.js delete mode 100644 webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js create mode 100644 webapp/assets/_app/immutable/nodes/11.BX_bMXWi.js delete mode 100644 webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js delete mode 100644 webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js create mode 100644 webapp/assets/_app/immutable/nodes/12.C0lS_ubI.js delete mode 100644 webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js create mode 100644 webapp/assets/_app/immutable/nodes/13.CEJ1u9Ql.js create mode 100644 webapp/assets/_app/immutable/nodes/14.BJHnbtAi.js delete mode 100644 webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js delete mode 100644 webapp/assets/_app/immutable/nodes/15.CkHQugXH.js create mode 100644 webapp/assets/_app/immutable/nodes/15.CqYhwqAI.js delete mode 100644 webapp/assets/_app/immutable/nodes/16.B35VVkOd.js create mode 100644 webapp/assets/_app/immutable/nodes/16.BVViOnXd.js delete mode 100644 webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js create mode 100644 webapp/assets/_app/immutable/nodes/17.DLt70sQQ.js create mode 100644 webapp/assets/_app/immutable/nodes/18.eu91cRrS.js delete mode 100644 webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js create mode 100644 webapp/assets/_app/immutable/nodes/2.1DFwbmOU.js delete mode 100644 webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js delete mode 100644 webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js create mode 100644 webapp/assets/_app/immutable/nodes/3.BWxN3TuB.js create mode 100644 webapp/assets/_app/immutable/nodes/4.D1IF4qSs.js delete mode 100644 webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js create mode 100644 webapp/assets/_app/immutable/nodes/5.CeMzA7DH.js delete mode 100644 webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js create mode 100644 webapp/assets/_app/immutable/nodes/6.BPDnwpl3.js delete mode 100644 webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js delete mode 100644 webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js create mode 100644 webapp/assets/_app/immutable/nodes/7.CaVS6POQ.js delete mode 100644 webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js create mode 100644 webapp/assets/_app/immutable/nodes/8.W6llQu20.js create mode 100644 webapp/assets/_app/immutable/nodes/9.DfrxaqP7.js delete mode 100644 webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js create mode 100644 webapp/src/lib/utils/apiError.ts diff --git a/webapp/assets/_app/immutable/chunks/2p_hWkLJ.js b/webapp/assets/_app/immutable/chunks/2p_hWkLJ.js new file mode 100644 index 00000000..02a9bb5c --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/2p_hWkLJ.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as dr}from"./B3Pzt0F_.js";import{p as or,E as sr,m as i,o as ir,s,f as le,j as d,r as o,k as t,g as r,n as c,u as m,t as f,x as Ge,z as nr,v as _,e as je,c as l,D as R,B as Be,b as $e,d as lr}from"./D8EpLgQ1.js";import{p as ur,i as E}from"./5WA7h8uK.js";import{r as b,b as ze}from"./CiE1LlKV.js";import{b as v,a as br}from"./C6k1Q4We.js";import{p as gr}from"./D4Caz1gY.js";import{M as cr}from"./qB7B8uiS.js";import{e as mr}from"./BZiHL9L3.js";import{J as vr}from"./DZblzgqm.js";var pr=le('

                '),fr=le('
                Updating...
                '),xr=le('

                Scale Set Information

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ');function Jr(Ce,ue){or(ue,!1);let e=ur(ue,"scaleSet",8);const $=sr();let O=i(!1),I=i(""),J=i(e().name||""),T=i(e().image||""),A=i(e().flavor||""),M=i(e().max_runners),N=i(e().min_idle_runners),P=i(e().runner_bootstrap_timeout),U=i(e().runner_prefix||""),h=i(e().os_type||"linux"),k=i(e().os_arch||"amd64"),D=i(e()["github-runner-group"]||""),G=i(e().enabled),g=i("{}");ir(()=>{if(e().extra_specs)try{if(typeof e().extra_specs=="object")s(g,JSON.stringify(e().extra_specs,null,2));else{const u=JSON.parse(e().extra_specs);s(g,JSON.stringify(u,null,2))}}catch{s(g,e().extra_specs||"{}")}});async function Le(){try{s(O,!0),s(I,"");let u={};if(r(g).trim())try{u=JSON.parse(r(g))}catch{throw new Error("Invalid JSON in extra specs")}const w={name:r(J)!==e().name?r(J):void 0,image:r(T)!==e().image?r(T):void 0,flavor:r(A)!==e().flavor?r(A):void 0,max_runners:r(M)!==e().max_runners?r(M):void 0,min_idle_runners:r(N)!==e().min_idle_runners?r(N):void 0,runner_bootstrap_timeout:r(P)!==e().runner_bootstrap_timeout?r(P):void 0,runner_prefix:r(U)!==e().runner_prefix?r(U):void 0,os_type:r(h)!==e().os_type?r(h):void 0,os_arch:r(k)!==e().os_arch?r(k):void 0,"github-runner-group":r(D)!==e()["github-runner-group"]&&r(D)||void 0,enabled:r(G)!==e().enabled?r(G):void 0,extra_specs:r(g).trim()!==JSON.stringify(e().extra_specs||{},null,2).trim()?u:void 0};Object.keys(w).forEach(p=>{w[p]===void 0&&delete w[p]}),$("submit",w)}catch(u){s(I,mr(u))}finally{s(O,!1)}}dr(),cr(Ce,{$$events:{close:()=>$("close")},children:(u,w)=>{var p=xr(),z=d(p),be=d(z),Fe=d(be);o(be),o(z);var C=t(z,2),ge=d(C);{var He=a=>{var n=pr(),j=d(n),ne=d(j,!0);o(j),o(n),f(()=>_(ne,r(I))),l(a,n)};E(ge,a=>{r(I)&&a(He)})}var L=t(ge,2),ce=t(d(L),2),F=d(ce),me=t(d(F),2),We=d(me,!0);o(me),o(F);var ve=t(F,2),pe=t(d(ve),2),qe=d(pe);{var Ke=a=>{var n=R();f(()=>_(n,`Repository: ${c(e()),m(()=>e().repo_name)??""}`)),l(a,n)},Qe=a=>{var n=Be(),j=$e(n);{var ne=x=>{var S=R();f(()=>_(S,`Organization: ${c(e()),m(()=>e().org_name)??""}`)),l(x,S)},er=x=>{var S=Be(),rr=$e(S);{var ar=y=>{var B=R();f(()=>_(B,`Enterprise: ${c(e()),m(()=>e().enterprise_name)??""}`)),l(y,B)},tr=y=>{var B=R("Unknown Entity");l(y,B)};E(rr,y=>{c(e()),m(()=>e().enterprise_name)?y(ar):y(tr,!1)},!0)}l(x,S)};E(j,x=>{c(e()),m(()=>e().org_name)?x(ne):x(er,!1)},!0)}l(a,n)};E(qe,a=>{c(e()),m(()=>e().repo_name)?a(Ke):a(Qe,!1)})}o(pe),o(ve),o(ce),o(L);var H=t(L,2),fe=t(d(H),2);b(fe),o(H);var W=t(H,2),xe=t(d(W),2),q=d(xe),ye=t(d(q),2);b(ye),o(q);var K=t(q,2),_e=t(d(K),2);b(_e),o(K);var Q=t(K,2),V=t(d(Q),2);f(()=>{r(h),Ge(()=>{})});var X=d(V);X.value=X.__value="linux";var he=t(X);he.value=he.__value="windows",o(V),o(Q);var ke=t(Q,2),Y=t(d(ke),2);f(()=>{r(k),Ge(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var we=t(Z);we.value=we.__value="arm64",o(Y),o(ke),o(xe),o(W);var ee=t(W,2),Se=t(d(ee),2),re=d(Se),Re=t(d(re),2);b(Re),o(re);var ae=t(re,2),Ee=t(d(ae),2);b(Ee),o(ae);var Oe=t(ae,2),Ie=t(d(Oe),2);b(Ie),o(Oe),o(Se),o(ee);var te=t(ee,2),de=t(d(te),2),oe=d(de),Je=t(d(oe),2);b(Je),o(oe);var Te=t(oe,2),Ae=t(d(Te),2);b(Ae),o(Te),o(de);var se=t(de,2),Me=d(se),Ve=t(d(Me),2);vr(Ve,{rows:4,placeholder:"{}",get value(){return r(g)},set value(a){s(g,a)},$$legacy:!0}),o(Me),o(se);var Ne=t(se,2),Pe=d(Ne);b(Pe),nr(2),o(Ne),o(te);var Ue=t(te,2),De=d(Ue),ie=t(De,2),Xe=d(ie);{var Ye=a=>{var n=fr();l(a,n)},Ze=a=>{var n=R("Update Scale Set");l(a,n)};E(Xe,a=>{r(O)?a(Ye):a(Ze,!1)})}o(ie),o(Ue),o(C),o(p),f(()=>{_(Fe,`Update Scale Set ${c(e()),m(()=>e().name)??""}`),_(We,(c(e()),m(()=>e().provider_name))),ie.disabled=r(O)}),v(fe,()=>r(J),a=>s(J,a)),v(ye,()=>r(T),a=>s(T,a)),v(_e,()=>r(A),a=>s(A,a)),ze(V,()=>r(h),a=>s(h,a)),ze(Y,()=>r(k),a=>s(k,a)),v(Re,()=>r(N),a=>s(N,a)),v(Ee,()=>r(M),a=>s(M,a)),v(Ie,()=>r(P),a=>s(P,a)),v(Je,()=>r(U),a=>s(U,a)),v(Ae,()=>r(D),a=>s(D,a)),br(Pe,()=>r(G),a=>s(G,a)),je("click",De,()=>$("close")),je("submit",C,gr(Le)),l(u,p)},$$slots:{default:!0}}),lr()}export{Jr as U}; diff --git a/webapp/assets/_app/immutable/chunks/C41YH50Q.js b/webapp/assets/_app/immutable/chunks/BE8f1Riw.js similarity index 71% rename from webapp/assets/_app/immutable/chunks/C41YH50Q.js rename to webapp/assets/_app/immutable/chunks/BE8f1Riw.js index 9f4c7cfe..027693f0 100644 --- a/webapp/assets/_app/immutable/chunks/C41YH50Q.js +++ b/webapp/assets/_app/immutable/chunks/BE8f1Riw.js @@ -1 +1 @@ -import{s as e}from"./CTf6mQoE.js";const r=()=>{const s=e;return{page:{subscribe:s.page.subscribe},navigating:{subscribe:s.navigating.subscribe},updated:s.updated}},b={subscribe(s){return r().page.subscribe(s)}};export{b as p}; +import{s as e}from"./DXCC0cSN.js";const r=()=>{const s=e;return{page:{subscribe:s.page.subscribe},navigating:{subscribe:s.navigating.subscribe},updated:s.updated}},b={subscribe(s){return r().page.subscribe(s)}};export{b as p}; diff --git a/webapp/assets/_app/immutable/chunks/DDhBTdDt.js b/webapp/assets/_app/immutable/chunks/BEoJgOul.js similarity index 95% rename from webapp/assets/_app/immutable/chunks/DDhBTdDt.js rename to webapp/assets/_app/immutable/chunks/BEoJgOul.js index 7f2ab6df..cd418e15 100644 --- a/webapp/assets/_app/immutable/chunks/DDhBTdDt.js +++ b/webapp/assets/_app/immutable/chunks/BEoJgOul.js @@ -1 +1 @@ -import"./DsnmJJEf.js";import{i as U}from"./B3Pzt0F_.js";import{f as I,j as t,k as p,r as a,t as P,v as b,c as u,z as N,D as A,p as W,u as z,n as H,d as X}from"./D8EpLgQ1.js";import{p as s,i as T}from"./5WA7h8uK.js";import{s as Y,h as Z,B as F,c as $}from"./CiE1LlKV.js";import{b as ee}from"./CoIRRsD9.js";import{D as te,G as ae,a as se}from"./C9DJVOi1.js";import{E as ne}from"./B7ITzBt8.js";import{S as B}from"./BE4wujub.js";var le=I('
                '),ie=I('
                '),re=I('

                ');function ye(L,e){let n=s(e,"title",8),S=s(e,"subtitle",8),_=s(e,"forgeIcon",8,""),f=s(e,"onEdit",8,null),h=s(e,"onDelete",8,null),k=s(e,"editLabel",8,"Edit"),j=s(e,"deleteLabel",8,"Delete"),g=s(e,"titleClass",8,"");var c=re(),v=t(c),m=t(v),y=t(m),C=t(y);{var E=i=>{var r=le(),w=t(r);Z(w,_),a(r),u(i,r)};T(C,i=>{_()&&i(E)})}var l=p(C,2),D=t(l),G=t(D,!0);a(D);var M=p(D,2),V=t(M,!0);a(M),a(l),a(y);var R=p(y,2);{var q=i=>{var r=ie(),w=t(r);{var J=o=>{F(o,{variant:"secondary",size:"md",icon:"",$$events:{click(...d){f()?.apply(this,d)}},children:(d,Q)=>{N();var x=A();P(()=>b(x,k())),u(d,x)},$$slots:{default:!0}})};T(w,o=>{f()&&o(J)})}var K=p(w,2);{var O=o=>{F(o,{variant:"danger",size:"md",icon:"",$$events:{click(...d){h()?.apply(this,d)}},children:(d,Q)=>{N();var x=A();P(()=>b(x,j())),u(d,x)},$$slots:{default:!0}})};T(K,o=>{h()&&o(O)})}a(r),u(i,r)};T(R,i=>{(f()||h())&&i(q)})}a(m),a(v),a(c),P(()=>{Y(D,1,`text-2xl font-bold text-gray-900 dark:text-white ${g()??""}`),b(G,n()),b(V,S())}),u(L,c)}var oe=I('');function xe(L,e){W(e,!1);let n=s(e,"instances",8),S=s(e,"entityType",8),_=s(e,"onDeleteInstance",8);const f=[{key:"name",title:"Name",cellComponent:ne,cellProps:{entityType:"instance",nameField:"name"}},{key:"status",title:"Status",cellComponent:B,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:B,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"created",title:"Created",cellComponent:ae,cellProps:{field:"created_at",type:"date"}},{key:"actions",title:"Actions",align:"right",cellComponent:se,cellProps:{actions:[{type:"delete",label:"Delete",title:"Delete instance",ariaLabel:"Delete instance",action:"delete"}]}}],h={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"}],actions:[{type:"delete",handler:l=>k(l)}]};function k(l){_()(l)}function j(l){k(l.detail.item)}U();var g=oe(),c=t(g),v=t(c),m=t(v),y=t(m);a(m);var C=p(m,2);a(v);var E=p(v,2);te(E,{get columns(){return f},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return H(n()),z(()=>n().length)},totalPages:1,get totalItems(){return H(n()),z(()=>n().length)},itemName:"instances",emptyTitle:"No instances running",get emptyMessage(){return`No instances running for this ${S()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return h},$$events:{delete:j}}),a(c),a(g),P(()=>{b(y,`Instances (${H(n()),z(()=>n().length)??""})`),$(C,"href",`${ee}/instances`)}),u(L,g),X()}export{ye as D,xe as I}; +import"./DsnmJJEf.js";import{i as U}from"./B3Pzt0F_.js";import{f as I,j as t,k as p,r as a,t as P,v as b,c as u,z as N,D as A,p as W,u as z,n as H,d as X}from"./D8EpLgQ1.js";import{p as s,i as T}from"./5WA7h8uK.js";import{s as Y,h as Z,B as F,c as $}from"./CiE1LlKV.js";import{b as ee}from"./CRhkqW2i.js";import{D as te,G as ae,a as se}from"./BrNfsPe8.js";import{E as ne}from"./D4PaGKsV.js";import{S as B}from"./MCv1Wq2q.js";var le=I('
                '),ie=I('
                '),re=I('

                ');function ye(L,e){let n=s(e,"title",8),S=s(e,"subtitle",8),_=s(e,"forgeIcon",8,""),f=s(e,"onEdit",8,null),h=s(e,"onDelete",8,null),k=s(e,"editLabel",8,"Edit"),j=s(e,"deleteLabel",8,"Delete"),g=s(e,"titleClass",8,"");var c=re(),v=t(c),m=t(v),y=t(m),C=t(y);{var E=i=>{var r=le(),w=t(r);Z(w,_),a(r),u(i,r)};T(C,i=>{_()&&i(E)})}var l=p(C,2),D=t(l),G=t(D,!0);a(D);var M=p(D,2),V=t(M,!0);a(M),a(l),a(y);var R=p(y,2);{var q=i=>{var r=ie(),w=t(r);{var J=o=>{F(o,{variant:"secondary",size:"md",icon:"",$$events:{click(...d){f()?.apply(this,d)}},children:(d,Q)=>{N();var x=A();P(()=>b(x,k())),u(d,x)},$$slots:{default:!0}})};T(w,o=>{f()&&o(J)})}var K=p(w,2);{var O=o=>{F(o,{variant:"danger",size:"md",icon:"",$$events:{click(...d){h()?.apply(this,d)}},children:(d,Q)=>{N();var x=A();P(()=>b(x,j())),u(d,x)},$$slots:{default:!0}})};T(K,o=>{h()&&o(O)})}a(r),u(i,r)};T(R,i=>{(f()||h())&&i(q)})}a(m),a(v),a(c),P(()=>{Y(D,1,`text-2xl font-bold text-gray-900 dark:text-white ${g()??""}`),b(G,n()),b(V,S())}),u(L,c)}var oe=I('');function xe(L,e){W(e,!1);let n=s(e,"instances",8),S=s(e,"entityType",8),_=s(e,"onDeleteInstance",8);const f=[{key:"name",title:"Name",cellComponent:ne,cellProps:{entityType:"instance",nameField:"name"}},{key:"status",title:"Status",cellComponent:B,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:B,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"created",title:"Created",cellComponent:ae,cellProps:{field:"created_at",type:"date"}},{key:"actions",title:"Actions",align:"right",cellComponent:se,cellProps:{actions:[{type:"delete",label:"Delete",title:"Delete instance",ariaLabel:"Delete instance",action:"delete"}]}}],h={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"}],actions:[{type:"delete",handler:l=>k(l)}]};function k(l){_()(l)}function j(l){k(l.detail.item)}U();var g=oe(),c=t(g),v=t(c),m=t(v),y=t(m);a(m);var C=p(m,2);a(v);var E=p(v,2);te(E,{get columns(){return f},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return H(n()),z(()=>n().length)},totalPages:1,get totalItems(){return H(n()),z(()=>n().length)},itemName:"instances",emptyTitle:"No instances running",get emptyMessage(){return`No instances running for this ${S()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return h},$$events:{delete:j}}),a(c),a(g),P(()=>{b(y,`Instances (${H(n()),z(()=>n().length)??""})`),$(C,"href",`${ee}/instances`)}),u(L,g),X()}export{ye as D,xe as I}; diff --git a/webapp/assets/_app/immutable/chunks/BZiHL9L3.js b/webapp/assets/_app/immutable/chunks/BZiHL9L3.js new file mode 100644 index 00000000..586e429f --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/BZiHL9L3.js @@ -0,0 +1 @@ +function n(e){let s="An unexpected error occurred";if(e&&typeof e=="object"){if("response"in e&&e.response&&typeof e.response=="object"){const a=e.response;if("data"in a&&a.data&&typeof a.data=="object"){const t=a.data,r=t.error&&t.error.trim()?t.error:"",i=t.details&&t.details.trim()?t.details:"";if(r&&i)return`${r}. ${i}`;if(r)return r;if(i)return i}if("status"in a){const t=a.status;switch(t){case 400:s="Bad request - please check your input";break;case 401:s="Unauthorized - please log in again";break;case 403:s="Access denied - insufficient permissions";break;case 404:s="Resource not found";break;case 409:s="Conflict - resource already exists or is in use";break;case 422:s="Validation failed - please check your input";break;case 500:s="Internal server error - please try again later";break;default:s=`Request failed with status ${t}`}}}else if(e instanceof Error&&e.message&&!e.message.includes("status code"))return e.message}return s}export{n as e}; diff --git a/webapp/assets/_app/immutable/chunks/Bgb-frqW.js b/webapp/assets/_app/immutable/chunks/Bgb-frqW.js new file mode 100644 index 00000000..1d4c8b61 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/Bgb-frqW.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as cr}from"./B3Pzt0F_.js";import{p as pr,E as vr,m as u,o as fr,s as n,f as E,j as d,r as t,k as a,g as e,t as _,x as He,u as h,z as mr,n as D,v as k,e as w,c as m,D as xr,d as yr}from"./D8EpLgQ1.js";import{p as _r,i as ge,s as hr,a as kr}from"./5WA7h8uK.js";import{e as wr,i as Er}from"./u94nIB4-.js";import{r as c,b as Ke,c as Rr}from"./CiE1LlKV.js";import{b as p,a as $r}from"./C6k1Q4We.js";import{p as Sr}from"./D4Caz1gY.js";import{M as Tr}from"./qB7B8uiS.js";import{J as Or}from"./DZblzgqm.js";import{e as Pr}from"./BZiHL9L3.js";import{e as Jr}from"./wyaP0EDu.js";var Mr=E('

                '),Nr=E(' '),Ur=E('
                '),Ar=E('
                Updating...
                '),Ir=E('

                Pool Information (Read-only)

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Tags
                Extra Specs (JSON)
                ');function Qr(We,ce){pr(ce,!1);const[qe,Qe]=hr(),B=()=>kr(Jr,"$eagerCache",qe);let o=_r(ce,"pool",8);const G=vr();let R=u(!1),$=u(""),S=u(o().image||""),T=u(o().flavor||""),O=u(o().max_runners),P=u(o().min_idle_runners),J=u(o().runner_bootstrap_timeout),M=u(o().priority),N=u(o().runner_prefix||""),x=u(o().os_type||"linux"),y=u(o().os_arch||"amd64"),U=u(o()["github-runner-group"]||""),A=u(o().enabled),g=u((o().tags||[]).map(i=>i.name||"").filter(Boolean)),f=u(""),v=u("{}");function Ve(i){if(i.repo_id){const s=B().repositories.find(l=>l.id===i.repo_id);return s?`${s.owner}/${s.name}`:"Unknown Entity"}if(i.org_id){const s=B().organizations.find(l=>l.id===i.org_id);return s&&s.name?s.name:"Unknown Entity"}if(i.enterprise_id){const s=B().enterprises.find(l=>l.id===i.enterprise_id);return s&&s.name?s.name:"Unknown Entity"}return"Unknown Entity"}function Xe(i){return i.repo_id?"Repository":i.org_id?"Organization":i.enterprise_id?"Enterprise":"Unknown"}fr(()=>{if(o().extra_specs)try{if(typeof o().extra_specs=="object")n(v,JSON.stringify(o().extra_specs,null,2));else{const i=JSON.parse(o().extra_specs);n(v,JSON.stringify(i,null,2))}}catch{n(v,o().extra_specs||"{}")}});function pe(){e(f).trim()&&!e(g).includes(e(f).trim())&&(n(g,[...e(g),e(f).trim()]),n(f,""))}function Ye(i){n(g,e(g).filter((s,l)=>l!==i))}function Ze(i){i.key==="Enter"&&(i.preventDefault(),pe())}async function er(){try{n(R,!0),n($,"");let i={};if(e(v).trim())try{i=JSON.parse(e(v))}catch{throw new Error("Invalid JSON in extra specs")}const s={image:e(S)!==o().image?e(S):void 0,flavor:e(T)!==o().flavor?e(T):void 0,max_runners:e(O)!==o().max_runners?e(O):void 0,min_idle_runners:e(P)!==o().min_idle_runners?e(P):void 0,runner_bootstrap_timeout:e(J)!==o().runner_bootstrap_timeout?e(J):void 0,priority:e(M)!==o().priority?e(M):void 0,runner_prefix:e(N)!==o().runner_prefix?e(N):void 0,os_type:e(x)!==o().os_type?e(x):void 0,os_arch:e(y)!==o().os_arch?e(y):void 0,"github-runner-group":e(U)!==o()["github-runner-group"]&&e(U)||void 0,enabled:e(A)!==o().enabled?e(A):void 0,tags:JSON.stringify(e(g))!==JSON.stringify((o().tags||[]).map(l=>l.name||"").filter(Boolean))?e(g):void 0,extra_specs:e(v).trim()!==JSON.stringify(o().extra_specs||{},null,2).trim()?i:void 0};Object.keys(s).forEach(l=>{s[l]===void 0&&delete s[l]}),G("submit",s)}catch(i){n($,Pr(i))}finally{n(R,!1)}}cr(),Tr(We,{$$events:{close:()=>G("close")},children:(i,s)=>{var l=Ir(),z=d(l),ve=d(z),rr=d(ve);t(ve),t(z);var L=a(z,2),fe=d(L);{var tr=r=>{var b=Mr(),j=d(b),C=d(j,!0);t(j),t(b),_(()=>k(C,e($))),m(r,b)};ge(fe,r=>{e($)&&r(tr)})}var F=a(fe,2),me=a(d(F),2),H=d(me),xe=a(d(H),2),ar=d(xe,!0);t(xe),t(H);var ye=a(H,2),_e=a(d(ye),2),dr=d(_e);t(_e),t(ye),t(me),t(F);var K=a(F,2),he=a(d(K),2),W=d(he),ke=a(d(W),2);c(ke),t(W);var q=a(W,2),we=a(d(q),2);c(we),t(q);var Q=a(q,2),V=a(d(Q),2);_(()=>{e(x),He(()=>{})});var X=d(V);X.value=X.__value="linux";var Ee=a(X);Ee.value=Ee.__value="windows",t(V),t(Q);var Re=a(Q,2),Y=a(d(Re),2);_(()=>{e(y),He(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var $e=a(Z);$e.value=$e.__value="arm64",t(Y),t(Re),t(he),t(K);var ee=a(K,2),Se=a(d(ee),2),re=d(Se),Te=a(d(re),2);c(Te),t(re);var te=a(re,2),Oe=a(d(te),2);c(Oe),t(te);var Pe=a(te,2),Je=a(d(Pe),2);c(Je),t(Pe),t(Se),t(ee);var ae=a(ee,2),de=a(d(ae),2),oe=d(de),Me=a(d(oe),2);c(Me),t(oe);var ie=a(oe,2),Ne=a(d(ie),2);c(Ne),t(ie);var Ue=a(ie,2),Ae=a(d(Ue),2);c(Ae),t(Ue),t(de);var ne=a(de,2),Ie=d(ne),je=a(d(Ie),2),se=d(je),I=d(se);c(I);var or=a(I,2);t(se);var ir=a(se,2);{var nr=r=>{var b=Ur();wr(b,5,()=>e(g),Er,(j,C,gr)=>{var be=Nr(),Le=d(be),Fe=a(Le);t(be),_(()=>{k(Le,`${e(C)??""} `),Rr(Fe,"aria-label",`Remove tag ${e(C)??""}`)}),w("click",Fe,()=>Ye(gr)),m(j,be)}),t(b),m(r,b)};ge(ir,r=>{e(g),h(()=>e(g).length>0)&&r(nr)})}t(je),t(Ie),t(ne);var le=a(ne,2),Ce=d(le),sr=a(d(Ce),2);Or(sr,{rows:4,placeholder:"{}",get value(){return e(v)},set value(r){n(v,r)},$$legacy:!0}),t(Ce),t(le);var De=a(le,2),Be=d(De);c(Be),mr(2),t(De),t(ae);var Ge=a(ae,2),ze=d(Ge),ue=a(ze,2),lr=d(ue);{var ur=r=>{var b=Ar();m(r,b)},br=r=>{var b=xr("Update Pool");m(r,b)};ge(lr,r=>{e(R)?r(ur):r(br,!1)})}t(ue),t(Ge),t(L),t(l),_((r,b)=>{k(rr,`Update Pool ${D(o()),h(()=>o().id)??""}`),k(ar,(D(o()),h(()=>o().provider_name))),k(dr,`${r??""}: ${b??""}`),ue.disabled=e(R)},[()=>(D(o()),h(()=>Xe(o()))),()=>(D(o()),h(()=>Ve(o())))]),p(ke,()=>e(S),r=>n(S,r)),p(we,()=>e(T),r=>n(T,r)),Ke(V,()=>e(x),r=>n(x,r)),Ke(Y,()=>e(y),r=>n(y,r)),p(Te,()=>e(P),r=>n(P,r)),p(Oe,()=>e(O),r=>n(O,r)),p(Je,()=>e(J),r=>n(J,r)),p(Me,()=>e(N),r=>n(N,r)),p(Ne,()=>e(M),r=>n(M,r)),p(Ae,()=>e(U),r=>n(U,r)),p(I,()=>e(f),r=>n(f,r)),w("keydown",I,Ze),w("click",or,pe),$r(Be,()=>e(A),r=>n(A,r)),w("click",ze,()=>G("close")),w("submit",L,Sr(er)),m(i,l)},$$slots:{default:!0}}),yr(),Qe()}export{Qr as U}; diff --git a/webapp/assets/_app/immutable/chunks/C9DJVOi1.js b/webapp/assets/_app/immutable/chunks/BrNfsPe8.js similarity index 78% rename from webapp/assets/_app/immutable/chunks/C9DJVOi1.js rename to webapp/assets/_app/immutable/chunks/BrNfsPe8.js index 3375b79c..c916e43a 100644 --- a/webapp/assets/_app/immutable/chunks/C9DJVOi1.js +++ b/webapp/assets/_app/immutable/chunks/BrNfsPe8.js @@ -1 +1 @@ -import"./DsnmJJEf.js";import{i as ke}from"./B3Pzt0F_.js";import{V as ut,aU as gt,aV as ft,M as ht,O as mt,P as pt,Q as kt,R as xt,at as _t,K as Xe,L as yt,U as bt,T as wt,a8 as Mt,p as fe,l as $,s as ee,m as te,n as u,a as Le,C as ye,j as s,g as e,r as n,t as R,c as i,d as he,E as be,f as L,e as Ge,u as v,k as j,v as O,z as Se,D as Te,B as Q,b as q,x as Pt,q as X}from"./D8EpLgQ1.js";import{p as o,l as Ye,i as I,b as Ct}from"./5WA7h8uK.js";import{e as de,i as ge}from"./u94nIB4-.js";import{h as Ae,s as pe,f as jt,e as zt,r as Tt,c as Be,B as Ce,b as Lt,d as $e,i as Ht}from"./CiE1LlKV.js";import{c as It}from"./CCSWcuVN.js";import{b as St}from"./C6k1Q4We.js";import{b as At}from"./CoIRRsD9.js";import{B as Bt,g as et,b as Et}from"./BGVHQGl-.js";function tt(S,r,g){Xe&&yt();var a=S,t=_t,p,h,m=null,f=ut()?gt:ft;function M(){p&&bt(p),m!==null&&(m.lastChild.remove(),a.before(m),m=null),p=h}ht(()=>{if(f(t,t=r())){var k=a,A=xt();A&&(m=document.createDocumentFragment(),m.append(k=mt())),h=pt(()=>g(k)),A?kt.add_callback(M):M()}}),Xe&&(a=wt)}function Fe(S,r){var g=S.$$events?.[r.type],a=Mt(g)?g.slice():g==null?[]:[g];for(var t of a)t.call(this,r)}var Dt=ye('');function Vt(S,r){fe(r,!1);const g=te();let a=o(r,"name",8),t=o(r,"class",8,"h-5 w-5");const p={plus:'',edit:'',delete:'',view:'',close:'',check:'',x:'',"chevron-left":'',"chevron-right":'',"chevron-down":'',"chevron-up":'',search:'',refresh:'',menu:'',settings:'',"check-circle":'',"x-circle":'',"exclamation-circle":'',"information-circle":'',loading:'',sun:'',moon:'',document:'',folder:''};$(()=>u(a()),()=>{ee(g,p[a()]||"")}),Le();var h=Dt(),m=s(h);Ae(m,()=>e(g),!0),n(h),R(()=>pe(h,0,`${t()}`)),i(S,h),he()}var Nt=L('');function rt(S,r){const g=Ye(r,["children","$$slots","$$events","$$legacy"]),a=Ye(g,["action","disabled","title","ariaLabel","size"]);fe(r,!1);const t=te(),p=te(),h=te(),m=te(),f=te(),M=te(),k=te(),A=te(),U=te(),V=be();let P=o(r,"action",8,"edit"),Z=o(r,"disabled",8,!1),B=o(r,"title",8,""),x=o(r,"ariaLabel",8,""),H=o(r,"size",8,"md");function D(){Z()||V("click")}$(()=>{},()=>{ee(t,"transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2 dark:focus:ring-offset-gray-900 cursor-pointer disabled:cursor-not-allowed disabled:opacity-50")}),$(()=>u(H()),()=>{ee(p,{sm:"p-1",md:"p-2"}[H()])}),$(()=>u(P()),()=>{ee(h,{edit:"text-indigo-600 dark:text-indigo-400 hover:text-indigo-900 dark:hover:text-indigo-300 focus:ring-indigo-500",delete:"text-red-600 dark:text-red-400 hover:text-red-900 dark:hover:text-red-300 focus:ring-red-500",view:"text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-gray-300 focus:ring-gray-500",add:"text-green-600 dark:text-green-400 hover:text-green-900 dark:hover:text-green-300 focus:ring-green-500"}[P()])}),$(()=>u(H()),()=>{ee(m,H()==="sm"?"h-4 w-4":"h-5 w-5")}),$(()=>(e(t),e(p),e(h)),()=>{ee(f,[e(t),e(p),e(h)].join(" "))}),$(()=>{},()=>{ee(M,{edit:'',delete:'',view:'',add:''})}),$(()=>{},()=>{ee(k,{edit:"Edit",delete:"Delete",view:"View",add:"Add"})}),$(()=>(u(B()),e(k),u(P())),()=>{ee(A,B()||e(k)[P()])}),$(()=>(u(x()),e(k),u(P())),()=>{ee(U,x()||`${e(k)[P()]} item`)}),Le(),ke();var F=Nt();jt(F,()=>({type:"button",class:e(f),disabled:Z(),title:e(A),"aria-label":e(U),...a}));var J=s(F),l=s(J);Ae(l,()=>(e(M),u(P()),v(()=>e(M)[P()])),!0),n(J),n(F),R(()=>pe(J,0,zt(e(m)))),Ge("click",F,D),i(S,F),he()}var Rt=L('
                ');function Ut(S,r){fe(r,!1);let g=o(r,"value",12,""),a=o(r,"placeholder",8,"Search..."),t=o(r,"disabled",8,!1);const p=be();function h(){p("input",g())}ke();var m=Rt(),f=s(m),M=s(f);Vt(M,{name:"search",class:"h-5 w-5 text-gray-400"}),n(f);var k=j(f,2);Tt(k),n(m),R(()=>{Be(k,"placeholder",a()),k.disabled=t()}),St(k,g),Ge("input",k,h),i(S,m),he()}var qt=L('

                ');function Ot(S,r){let g=o(r,"message",8,"Loading...");var a=qt(),t=j(s(a),2),p=s(t,!0);n(t),n(a),R(()=>O(p,g())),i(S,a)}var Ft=L('
                '),Gt=L('

                ');function Kt(S,r){let g=o(r,"title",8,"Error"),a=o(r,"message",8),t=o(r,"showRetry",8,!1),p=o(r,"onRetry",8,void 0);var h=Gt(),m=s(h),f=s(m),M=j(s(f),2),k=s(M),A=s(k,!0);n(k);var U=j(k,2),V=s(U,!0);n(U);var P=j(U,2);{var Z=B=>{var x=Ft(),H=s(x);Ce(H,{variant:"secondary",size:"sm",icon:"",class:"text-red-700 dark:text-red-200 bg-red-100 dark:bg-red-800 hover:bg-red-200 dark:hover:bg-red-700 focus:outline-none focus:bg-red-200 dark:focus:bg-red-700",$$events:{click(...D){p()?.apply(this,D)}},children:(D,F)=>{Se();var J=Te("Retry");i(D,J)},$$slots:{default:!0}}),n(x),i(B,x)};I(P,B=>{t()&&p()&&B(Z)})}n(M),n(f),n(m),n(h),R(()=>{O(A,g()),O(V,a())}),i(S,h)}var Qt=ye(''),Zt=ye(''),Jt=ye(''),Wt=ye(''),Xt=ye(''),Yt=ye(''),$t=L('

                ');function er(S,r){let g=o(r,"title",8),a=o(r,"message",8),t=o(r,"iconType",8,"document");var p=$t(),h=s(p);{var m=V=>{var P=Qt();i(V,P)},f=V=>{var P=Q(),Z=q(P);{var B=H=>{var D=Zt();i(H,D)},x=H=>{var D=Q(),F=q(D);{var J=d=>{var c=Jt();i(d,c)},l=d=>{var c=Q(),y=q(c);{var N=w=>{var E=Wt();i(w,E)},W=w=>{var E=Q(),z=q(E);{var C=T=>{var G=Xt();i(T,G)},_=T=>{var G=Q(),Y=q(G);{var re=ae=>{var ce=Yt();i(ae,ce)};I(Y,ae=>{t()==="settings"&&ae(re)},!0)}i(T,G)};I(z,T=>{t()==="key"?T(C):T(_,!1)},!0)}i(w,E)};I(y,w=>{t()==="cog"?w(N):w(W,!1)},!0)}i(d,c)};I(F,d=>{t()==="users"?d(J):d(l,!1)},!0)}i(H,D)};I(Z,H=>{t()==="building"?H(B):H(x,!1)},!0)}i(V,P)};I(h,V=>{t()==="document"?V(m):V(f,!1)})}var M=j(h,2),k=s(M,!0);n(M);var A=j(M,2),U=s(A,!0);n(A),n(p),R(()=>{O(k,g()),O(U,a())}),i(S,p)}var tr=L(""),rr=L('
                '),ar=L('
                ');function nr(S,r){fe(r,!1);let g=o(r,"searchTerm",12,""),a=o(r,"perPage",12,25),t=o(r,"placeholder",8,"Search..."),p=o(r,"showPerPageSelector",8,!0),h=o(r,"perPageOptions",24,()=>[25,50,100]);const m=be();function f(){m("search",{term:g()})}function M(){m("perPageChange",{perPage:a()})}ke();var k=ar(),A=s(k),U=s(A),V=s(U),P=j(s(V),2);Ut(P,{get placeholder(){return t()},get value(){return g()},set value(x){g(x)},$$events:{input:f},$$legacy:!0}),n(V),n(U);var Z=j(U,2);{var B=x=>{var H=rr(),D=s(H),F=j(s(D),2);R(()=>{a(),Pt(()=>{h()})}),de(F,5,h,ge,(J,l)=>{var d=tr(),c=s(d,!0);n(d);var y={};R(()=>{O(c,e(l)),y!==(y=e(l))&&(d.value=(d.__value=e(l))??"")}),i(J,d)}),n(F),n(D),n(H),Lt(F,a),Ge("change",F,M),i(x,H)};I(Z,x=>{p()&&x(B)})}n(A),n(k),i(S,k),he()}var ir=L('Showing to of ',1),or=L('
                ');function sr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"currentPage",8,1),p=o(r,"totalPages",8,1),h=o(r,"perPage",8,25),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results");const M=be();function k(P){P>=1&&P<=p()&&P!==t()&&M("pageChange",{page:P})}$(()=>(u(m()),u(t()),u(h())),()=>{ee(g,m()===0?0:(t()-1)*h()+1)}),$(()=>(u(t()),u(h()),u(m())),()=>{ee(a,Math.min(t()*h(),m()))}),Le(),ke();var A=Q(),U=q(A);{var V=P=>{var Z=or(),B=s(Z),x=s(B);{let z=X(()=>t()===1);Ce(x,{variant:"secondary",get disabled(){return e(z)},$$events:{click:()=>k(t()-1)},children:(C,_)=>{Se();var T=Te("Previous");i(C,T)},$$slots:{default:!0}})}var H=j(x,2);{let z=X(()=>t()===p());Ce(H,{variant:"secondary",get disabled(){return e(z)},class:"ml-3",$$events:{click:()=>k(t()+1)},children:(C,_)=>{Se();var T=Te("Next");i(C,T)},$$slots:{default:!0}})}n(B);var D=j(B,2),F=s(D),J=s(F),l=s(J);{var d=z=>{var C=Te();R(()=>O(C,`No ${f()??""}`)),i(z,C)},c=z=>{var C=ir(),_=j(q(C)),T=s(_,!0);n(_);var G=j(_,2),Y=s(G,!0);n(G);var re=j(G,2),ae=s(re,!0);n(re);var ce=j(re);R(()=>{O(T,e(g)),O(Y,e(a)),O(ae,m()),O(ce,` ${f()??""}`)}),i(z,C)};I(l,z=>{m()===0?z(d):z(c,!1)})}n(J),n(F);var y=j(F,2),N=s(y),W=s(N);{let z=X(()=>t()===1);Ce(W,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-r-none","aria-label":"Previous page",icon:"",$$events:{click:()=>k(t()-1)}})}var w=j(W,2);de(w,1,()=>(u(p()),v(()=>Array(p()))),ge,(z,C,_)=>{const T=X(()=>_+1);{let G=X(()=>e(T)===t()?"primary":"secondary");Ce(z,{get variant(){return e(G)},size:"sm",class:"rounded-none border-l-0 first:border-l first:rounded-l-md",$$events:{click:()=>k(e(T))},children:(Y,re)=>{Se();var ae=Te();R(()=>O(ae,e(T))),i(Y,ae)},$$slots:{default:!0}})}});var E=j(w,2);{let z=X(()=>t()===p());Ce(E,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-l-none","aria-label":"Next page",icon:"",$$events:{click:()=>k(t()+1)}})}n(N),n(y),n(D),n(Z),i(P,Z)};I(U,P=>{p()>1&&P(V)})}i(S,A),he()}var lr=L('

                '),dr=L('

                '),cr=L('

                '),vr=L('

                '),ur=L('
                '),gr=L('
                '),fr=L('
                '),hr=L(" "),mr=L('
                '),pr=L('
                ');function kr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"config",8);function p(){if(!a())return"Unknown";const{field:l,useId:d,showOwner:c}=t().primaryText,y=a()[l];return d&&y?`${y.slice(0,8)}...`:c&&a().owner&&a().name?`${a().owner}/${a().name}`:y||"Unknown"}function h(){if(!t().secondaryText)return"";const{field:l,computedValue:d}=t().secondaryText;return d!==void 0?typeof d=="function"?d(a()):d:a()?.[l]||""}function m(){if(!t().primaryText.href||!a())return"#";let l=t().primaryText.href;return l=l.replace("{id}",a().id||""),l=l.replace("{name}",encodeURIComponent(a().name||"")),`${At}${l}`}function f(l){if(!a())return;const d=t().actions?.find(c=>c.type===l);d&&d.handler(a()),l==="edit"?g("edit",{item:a()}):l==="delete"?g("delete",{item:a()}):g("action",{type:l,item:a()})}function M(l){switch(l.type){case"status":if(t().entityType==="instance"){const c=a()?.[l.field]||"unknown";let y="neutral",N=c.charAt(0).toUpperCase()+c.slice(1);return l.field==="status"?y=c==="running"?"success":c==="pending"||c==="creating"?"info":c==="failed"||c==="error"?"error":"neutral":l.field==="runner_status"&&(y=c==="idle"?"info":c==="active"||c==="running"?"success":c==="failed"||c==="error"?"error":"neutral"),{variant:y,text:N}}return{variant:"neutral",text:a()?.[l.field]||"Unknown"};case"forge":return{variant:"neutral",text:a()?.[l.field]||"unknown"};case"auth":const d=a()?.[l.field]||"pat";return{variant:d==="pat"?"success":"info",text:d.toUpperCase()};case"custom":if(typeof l.value=="function"){const c=l.value(a());return{variant:c?.variant||"neutral",text:c?.text||""}}return{variant:l.value?.variant||"neutral",text:l.value?.text||""};default:return{variant:"neutral",text:""}}}ke();var k=pr(),A=s(k),U=s(A);{var V=l=>{var d=dr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=lr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R((w,E)=>{Be(d,"href",w),pe(c,1,`text-sm font-medium text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 truncate ${u(t()),v(()=>t().primaryText.isMonospace?"font-mono":"")??""}`),O(y,E)},[()=>v(m),()=>v(p)]),i(l,d)},P=l=>{var d=vr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=cr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R(w=>O(y,w),[()=>v(p)]),i(l,d)};I(U,l=>{u(t()),v(()=>t().primaryText.isClickable)?l(V):l(P,!1)})}var Z=j(U,2);{var B=l=>{var d=fr(),c=s(d);{var y=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().customInfo)),ge,(C,_)=>{const T=X(()=>(e(_),u(a()),v(()=>typeof e(_).icon=="function"?e(_).icon(a()):e(_).icon))),G=X(()=>(e(_),u(a()),v(()=>typeof e(_).text=="function"?e(_).text(a()):e(_).text)));var Y=ur(),re=s(Y);{var ae=je=>{var He=Q(),De=q(He);Ae(De,()=>e(T)),i(je,He)};I(re,je=>{e(T)&&je(ae)})}var ce=j(re,2),Ee=s(ce,!0);n(ce),n(Y),R(()=>O(Ee,e(G))),i(C,Y)}),i(w,E)};I(c,w=>{u(t()),v(()=>t().customInfo)&&w(y)})}var N=j(c,2);{var W=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().badges.filter(C=>C.type==="forge"))),ge,(C,_)=>{var T=gr(),G=s(T);Ae(G,()=>(u(et),e(_),u(a()),v(()=>et(e(_).field?a()?.[e(_).field]||"unknown":a()?.endpoint?.endpoint_type||"unknown"))));var Y=j(G,2),re=s(Y,!0);n(Y),n(T),R(()=>O(re,(u(a()),v(()=>a()?.endpoint?.name||"Unknown")))),i(C,T)}),i(w,E)};I(N,w=>{u(t()),v(()=>t().badges)&&w(W)})}n(d),i(l,d)};I(Z,l=>{u(t()),v(()=>t().customInfo||t().badges?.some(d=>d.type==="forge"))&&l(B)})}n(A);var x=j(A,2),H=s(x);{var D=l=>{var d=Q(),c=q(d);de(c,1,()=>(u(t()),v(()=>t().badges.filter(y=>y.type!=="forge"))),ge,(y,N)=>{var W=Q(),w=q(W);{var E=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));var T=hr(),G=s(T,!0);n(T),R(()=>{pe(T,1,`inline-flex items-center rounded-full px-2 py-1 text-xs font-medium ring-1 ring-inset ${u(e(_)),v(()=>e(_).variant==="success"?"bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-900/50 dark:text-green-300 dark:ring-green-400/20":e(_).variant==="info"?"bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-900/50 dark:text-blue-300 dark:ring-blue-400/20":e(_).variant==="error"?"bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-900/50 dark:text-red-300 dark:ring-red-400/20":"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-900/50 dark:text-gray-300 dark:ring-gray-400/20")??""}`),O(G,(u(e(_)),v(()=>e(_).text)))}),i(C,T)},z=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));Bt(C,{get variant(){return u(e(_)),v(()=>e(_).variant)},get text(){return u(e(_)),v(()=>e(_).text)}})};I(w,C=>{e(N),v(()=>e(N).type==="status")?C(E):C(z,!1)})}i(y,W)}),i(l,d)};I(H,l=>{u(t()),v(()=>t().badges)&&l(D)})}var F=j(H,2);{var J=l=>{var d=mr();de(d,5,()=>(u(t()),v(()=>t().actions)),ge,(c,y)=>{{let N=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`))),W=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`)));rt(c,{get action(){return e(y),v(()=>e(y).type)},size:"sm",get title(){return e(N)},get ariaLabel(){return e(W)},$$events:{click:()=>f(e(y).type)}})}}),n(d),i(l,d)};I(F,l=>{u(t()),v(()=>t().actions)&&l(J)})}n(x),n(k),i(S,k),he()}var xr=L('
                '),_r=L('
                '),yr=L("
                "),br=L("
                "),wr=L(' ',1),Mr=L('
                ');function Vr(S,r){fe(r,!1);const g=te();let a=o(r,"columns",24,()=>[]),t=o(r,"data",24,()=>[]),p=o(r,"loading",8,!1),h=o(r,"error",8,""),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results"),M=o(r,"searchTerm",12,""),k=o(r,"searchPlaceholder",8,"Search..."),A=o(r,"showSearch",8,!0),U=o(r,"currentPage",8,1),V=o(r,"perPage",12,25),P=o(r,"totalPages",8,1),Z=o(r,"showPagination",8,!0),B=o(r,"showPerPageSelector",8,!0),x=o(r,"emptyTitle",8,"No items found"),H=o(r,"emptyMessage",8,""),D=o(r,"emptyIconType",8,"document"),F=o(r,"errorTitle",8,"Error loading data"),J=o(r,"showRetry",8,!1),l=o(r,"showMobileCards",8,!0),d=o(r,"mobileCardConfig",8,null);const c=be();function y(b){c("search",b.detail)}function N(b){c("pageChange",b.detail)}function W(b){c("perPageChange",b.detail)}function w(){c("retry")}function E(b){c("edit",b.detail)}function z(b){c("delete",b.detail)}function C(b){c("action",b.detail)}function _(b){const ve="px-6 py-4 text-sm",Ve=b.align==="right"?"text-right":b.align==="center"?"text-center":"text-left",Ne=b.key==="actions"?"font-medium":"text-gray-900 dark:text-white",Re=b.flexible?"min-w-0":"";return`${ve} ${Ve} ${Ne} ${Re}`.trim()}function T(){return a().map(b=>b.flexible?`${b.flexRatio||1}fr`:"auto").join(" ")}$(()=>(u(H()),u(M()),u(f())),()=>{ee(g,H()||(M()?`No items found matching "${M()}"`:`No ${f()} found`))}),Le(),ke();var G=Mr(),Y=s(G);{var re=b=>{nr(b,{get placeholder(){return k()},get showPerPageSelector(){return B()},get searchTerm(){return M()},set searchTerm(ve){M(ve)},get perPage(){return V()},set perPage(ve){V(ve)},$$events:{search:y,perPageChange:W},$$legacy:!0})};I(Y,b=>{A()&&b(re)})}var ae=j(Y,2),ce=s(ae);{var Ee=b=>{Ot(b,{get message(){return`Loading ${f()??""}...`}})},je=b=>{var ve=Q(),Ve=q(ve);{var Ne=we=>{{let Ie=X(()=>J()?w:void 0);Kt(we,{get title(){return F()},get message(){return h()},get showRetry(){return J()},get onRetry(){return e(Ie)}})}},Re=we=>{var Ie=Q(),at=q(Ie);{var nt=Me=>{er(Me,{get title(){return x()},get message(){return e(g)},get iconType(){return D()}})},it=Me=>{var Ke=wr(),Qe=q(Ke);{var ot=oe=>{var K=_r();de(K,7,t,(le,ne)=>le.id||le.name||ne,(le,ne,qe)=>{var ze=xr(),ie=s(ze);{var Pe=me=>{var xe=Q(),se=q(xe);tt(se,()=>(e(ne),v(()=>`${e(ne).id||e(ne).name}-${e(ne).updated_at}-mobile`)),_e=>{kr(_e,{get item(){return e(ne)},get config(){return d()},$$events:{edit(ue){Fe.call(this,r,ue)},delete(ue){Fe.call(this,r,ue)},action(ue){Fe.call(this,r,ue)}}})}),i(me,xe)},Oe=me=>{var xe=Q(),se=q(xe);$e(se,r,"mobile-card",{get item(){return e(ne)},get index(){return e(qe)}}),i(me,xe)};I(ie,me=>{d()?me(Pe):me(Oe,!1)})}n(ze),i(le,ze)}),n(K),i(oe,K)};I(Qe,oe=>{l()&&oe(ot)})}var Ze=j(Qe,2),Ue=s(Ze),Je=s(Ue);de(Je,1,a,ge,(oe,K)=>{var le=yr(),ne=s(le,!0);n(le),R(()=>{pe(le,1,`px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-300 uppercase tracking-wider bg-gray-50 dark:bg-gray-700 border-b border-gray-200 dark:border-gray-600 ${e(K),v(()=>e(K).align==="right"?"text-right":e(K).align==="center"?"text-center":"text-left")??""}`),O(ne,(e(K),v(()=>e(K).title)))}),i(oe,le)});var st=j(Je,2);de(st,3,t,(oe,K)=>oe.id||oe.name||K,(oe,K,le)=>{var ne=Q(),qe=q(ne);de(qe,1,a,ge,(ze,ie)=>{var Pe=br(),Oe=s(Pe);{var me=se=>{var _e=Q(),ue=q(_e);tt(ue,()=>(e(K),e(ie),v(()=>`${e(K).id||e(K).name}-${e(K).updated_at}-${e(ie).key}`)),lt=>{var We=Q(),dt=q(We);It(dt,()=>e(ie).cellComponent,(ct,vt)=>{vt(ct,Ct({get item(){return e(K)}},()=>e(ie).cellProps,{$$events:{edit:E,delete:z,action:C}}))}),i(lt,We)}),i(se,_e)},xe=se=>{var _e=Q(),ue=q(_e);$e(ue,r,"cell",{get item(){return e(K)},get column(){return e(ie)},get index(){return e(le)},get value(){return e(K),e(ie),v(()=>e(K)[e(ie).key])}}),i(se,_e)};I(Oe,se=>{e(ie),v(()=>e(ie).cellComponent)?se(me):se(xe,!1)})}n(Pe),R(se=>pe(Pe,1,`${se??""} border-b border-gray-200 dark:border-gray-700`),[()=>(e(ie),v(()=>_(e(ie))))]),i(ze,Pe)}),i(oe,ne)}),n(Ue),n(Ze),R(oe=>Ht(Ue,`grid-template-columns: ${oe??""}`),[()=>v(T)]),i(Me,Ke)};I(at,Me=>{u(t()),v(()=>t().length===0)?Me(nt):Me(it,!1)},!0)}i(we,Ie)};I(Ve,we=>{h()?we(Ne):we(Re,!1)},!0)}i(b,ve)};I(ce,b=>{p()?b(Ee):b(je,!1)})}var He=j(ce,2);{var De=b=>{sr(b,{get currentPage(){return U()},get totalPages(){return P()},get perPage(){return V()},get totalItems(){return m()},get itemName(){return f()},$$events:{pageChange:N}})};I(He,b=>{u(Z()),u(p()),u(h()),u(t()),v(()=>Z()&&!p()&&!h()&&t().length>0)&&b(De)})}n(ae),n(G),i(S,G),he()}var Pr=L('
                ');function Nr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"actions",24,()=>[{type:"edit",title:"Edit",ariaLabel:"Edit item",action:"edit"},{type:"delete",title:"Delete",ariaLabel:"Delete item",action:"delete"}]);function p(m){a()&&(m==="edit"?g("edit",{item:a()}):m==="delete"?g("delete",{item:a()}):g("action",{type:m,item:a()}))}ke();var h=Pr();de(h,5,t,ge,(m,f)=>{{let M=X(()=>(e(f),v(()=>e(f).action||(e(f).type==="edit"?"edit":e(f).type==="delete"?"delete":"view")))),k=X(()=>(e(f),v(()=>e(f).title||(e(f).type==="edit"?"Edit":e(f).type==="delete"?"Delete":e(f).label)))),A=X(()=>(e(f),v(()=>e(f).ariaLabel||(e(f).type==="edit"?"Edit item":e(f).type==="delete"?"Delete item":e(f).label))));rt(m,{get action(){return e(M)},get title(){return e(k)},get ariaLabel(){return e(A)},$$events:{click:()=>p(e(f).type)}})}}),n(h),i(S,h),he()}var Cr=L(" "),jr=L(" ");function Rr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"item",8),p=o(r,"field",8),h=o(r,"type",8,"text"),m=o(r,"truncateLength",8,50),f=o(r,"showTitle",8,!1);function M(){return t()&&p().split(".").reduce((B,x)=>B?.[x],t())||""}function k(){return h()==="date"?Et(e(g)):h()==="truncated"&&e(g).length>m()?`${e(g).slice(0,m())}...`:e(g)}function A(){switch(h()){case"code":return"inline-block max-w-full truncate bg-gray-100 dark:bg-gray-700 px-2 py-1 rounded text-xs font-mono";case"description":return"block w-full truncate text-sm text-gray-500 dark:text-gray-300";case"date":return"block w-full truncate text-sm text-gray-900 dark:text-white font-mono";default:return"block w-full truncate text-sm text-gray-900 dark:text-white"}}$(()=>{},()=>{ee(g,M())}),$(()=>{},()=>{ee(a,k())}),Le(),ke();var U=Q(),V=q(U);{var P=B=>{var x=Cr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)},Z=B=>{var x=jr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)};I(V,B=>{h()==="code"?B(P):B(Z,!1)})}i(S,U),he()}export{rt as A,Vr as D,Rr as G,Nr as a,tt as k}; +import"./DsnmJJEf.js";import{i as ke}from"./B3Pzt0F_.js";import{V as ut,aU as gt,aV as ft,M as ht,O as mt,P as pt,Q as kt,R as xt,at as _t,K as Xe,L as yt,U as bt,T as wt,a8 as Mt,p as fe,l as $,s as ee,m as te,n as u,a as Le,C as ye,j as s,g as e,r as n,t as R,c as i,d as he,E as be,f as L,e as Ge,u as v,k as j,v as O,z as Se,D as Te,B as Q,b as q,x as Pt,q as X}from"./D8EpLgQ1.js";import{p as o,l as Ye,i as I,b as Ct}from"./5WA7h8uK.js";import{e as de,i as ge}from"./u94nIB4-.js";import{h as Ae,s as pe,f as jt,e as zt,B as Ce,r as Tt,c as Be,b as Lt,d as $e,i as Ht}from"./CiE1LlKV.js";import{c as It}from"./CCSWcuVN.js";import{b as St}from"./C6k1Q4We.js";import{b as At}from"./CRhkqW2i.js";import{B as Bt,g as et,b as Et}from"./BGVHQGl-.js";function tt(S,r,g){Xe&&yt();var a=S,t=_t,p,h,m=null,f=ut()?gt:ft;function M(){p&&bt(p),m!==null&&(m.lastChild.remove(),a.before(m),m=null),p=h}ht(()=>{if(f(t,t=r())){var k=a,A=xt();A&&(m=document.createDocumentFragment(),m.append(k=mt())),h=pt(()=>g(k)),A?kt.add_callback(M):M()}}),Xe&&(a=wt)}function Fe(S,r){var g=S.$$events?.[r.type],a=Mt(g)?g.slice():g==null?[]:[g];for(var t of a)t.call(this,r)}var Dt=ye('');function Vt(S,r){fe(r,!1);const g=te();let a=o(r,"name",8),t=o(r,"class",8,"h-5 w-5");const p={plus:'',edit:'',delete:'',view:'',close:'',check:'',x:'',"chevron-left":'',"chevron-right":'',"chevron-down":'',"chevron-up":'',search:'',refresh:'',menu:'',settings:'',"check-circle":'',"x-circle":'',"exclamation-circle":'',"information-circle":'',loading:'',sun:'',moon:'',document:'',folder:''};$(()=>u(a()),()=>{ee(g,p[a()]||"")}),Le();var h=Dt(),m=s(h);Ae(m,()=>e(g),!0),n(h),R(()=>pe(h,0,`${t()}`)),i(S,h),he()}var Nt=L('');function rt(S,r){const g=Ye(r,["children","$$slots","$$events","$$legacy"]),a=Ye(g,["action","disabled","title","ariaLabel","size"]);fe(r,!1);const t=te(),p=te(),h=te(),m=te(),f=te(),M=te(),k=te(),A=te(),U=te(),V=be();let P=o(r,"action",8,"edit"),Z=o(r,"disabled",8,!1),B=o(r,"title",8,""),x=o(r,"ariaLabel",8,""),H=o(r,"size",8,"md");function D(){Z()||V("click")}$(()=>{},()=>{ee(t,"transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2 dark:focus:ring-offset-gray-900 cursor-pointer disabled:cursor-not-allowed disabled:opacity-50")}),$(()=>u(H()),()=>{ee(p,{sm:"p-1",md:"p-2"}[H()])}),$(()=>u(P()),()=>{ee(h,{edit:"text-indigo-600 dark:text-indigo-400 hover:text-indigo-900 dark:hover:text-indigo-300 focus:ring-indigo-500",delete:"text-red-600 dark:text-red-400 hover:text-red-900 dark:hover:text-red-300 focus:ring-red-500",view:"text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-gray-300 focus:ring-gray-500",add:"text-green-600 dark:text-green-400 hover:text-green-900 dark:hover:text-green-300 focus:ring-green-500"}[P()])}),$(()=>u(H()),()=>{ee(m,H()==="sm"?"h-4 w-4":"h-5 w-5")}),$(()=>(e(t),e(p),e(h)),()=>{ee(f,[e(t),e(p),e(h)].join(" "))}),$(()=>{},()=>{ee(M,{edit:'',delete:'',view:'',add:''})}),$(()=>{},()=>{ee(k,{edit:"Edit",delete:"Delete",view:"View",add:"Add"})}),$(()=>(u(B()),e(k),u(P())),()=>{ee(A,B()||e(k)[P()])}),$(()=>(u(x()),e(k),u(P())),()=>{ee(U,x()||`${e(k)[P()]} item`)}),Le(),ke();var F=Nt();jt(F,()=>({type:"button",class:e(f),disabled:Z(),title:e(A),"aria-label":e(U),...a}));var J=s(F),l=s(J);Ae(l,()=>(e(M),u(P()),v(()=>e(M)[P()])),!0),n(J),n(F),R(()=>pe(J,0,zt(e(m)))),Ge("click",F,D),i(S,F),he()}var Rt=L('

                ');function Ut(S,r){let g=o(r,"message",8,"Loading...");var a=Rt(),t=j(s(a),2),p=s(t,!0);n(t),n(a),R(()=>O(p,g())),i(S,a)}var qt=L('
                '),Ot=L('

                ');function Ft(S,r){let g=o(r,"title",8,"Error"),a=o(r,"message",8),t=o(r,"showRetry",8,!1),p=o(r,"onRetry",8,void 0);var h=Ot(),m=s(h),f=s(m),M=j(s(f),2),k=s(M),A=s(k,!0);n(k);var U=j(k,2),V=s(U,!0);n(U);var P=j(U,2);{var Z=B=>{var x=qt(),H=s(x);Ce(H,{variant:"secondary",size:"sm",icon:"",class:"text-red-700 dark:text-red-200 bg-red-100 dark:bg-red-800 hover:bg-red-200 dark:hover:bg-red-700 focus:outline-none focus:bg-red-200 dark:focus:bg-red-700",$$events:{click(...D){p()?.apply(this,D)}},children:(D,F)=>{Se();var J=Te("Retry");i(D,J)},$$slots:{default:!0}}),n(x),i(B,x)};I(P,B=>{t()&&p()&&B(Z)})}n(M),n(f),n(m),n(h),R(()=>{O(A,g()),O(V,a())}),i(S,h)}var Gt=ye(''),Kt=ye(''),Qt=ye(''),Zt=ye(''),Jt=ye(''),Wt=ye(''),Xt=L('

                ');function Yt(S,r){let g=o(r,"title",8),a=o(r,"message",8),t=o(r,"iconType",8,"document");var p=Xt(),h=s(p);{var m=V=>{var P=Gt();i(V,P)},f=V=>{var P=Q(),Z=q(P);{var B=H=>{var D=Kt();i(H,D)},x=H=>{var D=Q(),F=q(D);{var J=d=>{var c=Qt();i(d,c)},l=d=>{var c=Q(),y=q(c);{var N=w=>{var E=Zt();i(w,E)},W=w=>{var E=Q(),z=q(E);{var C=T=>{var G=Jt();i(T,G)},_=T=>{var G=Q(),Y=q(G);{var re=ae=>{var ce=Wt();i(ae,ce)};I(Y,ae=>{t()==="settings"&&ae(re)},!0)}i(T,G)};I(z,T=>{t()==="key"?T(C):T(_,!1)},!0)}i(w,E)};I(y,w=>{t()==="cog"?w(N):w(W,!1)},!0)}i(d,c)};I(F,d=>{t()==="users"?d(J):d(l,!1)},!0)}i(H,D)};I(Z,H=>{t()==="building"?H(B):H(x,!1)},!0)}i(V,P)};I(h,V=>{t()==="document"?V(m):V(f,!1)})}var M=j(h,2),k=s(M,!0);n(M);var A=j(M,2),U=s(A,!0);n(A),n(p),R(()=>{O(k,g()),O(U,a())}),i(S,p)}var $t=L('
                ');function er(S,r){fe(r,!1);let g=o(r,"value",12,""),a=o(r,"placeholder",8,"Search..."),t=o(r,"disabled",8,!1);const p=be();function h(){p("input",g())}ke();var m=$t(),f=s(m),M=s(f);Vt(M,{name:"search",class:"h-5 w-5 text-gray-400"}),n(f);var k=j(f,2);Tt(k),n(m),R(()=>{Be(k,"placeholder",a()),k.disabled=t()}),St(k,g),Ge("input",k,h),i(S,m),he()}var tr=L(""),rr=L('
                '),ar=L('
                ');function nr(S,r){fe(r,!1);let g=o(r,"searchTerm",12,""),a=o(r,"perPage",12,25),t=o(r,"placeholder",8,"Search..."),p=o(r,"showPerPageSelector",8,!0),h=o(r,"perPageOptions",24,()=>[25,50,100]);const m=be();function f(){m("search",{term:g()})}function M(){m("perPageChange",{perPage:a()})}ke();var k=ar(),A=s(k),U=s(A),V=s(U),P=j(s(V),2);er(P,{get placeholder(){return t()},get value(){return g()},set value(x){g(x)},$$events:{input:f},$$legacy:!0}),n(V),n(U);var Z=j(U,2);{var B=x=>{var H=rr(),D=s(H),F=j(s(D),2);R(()=>{a(),Pt(()=>{h()})}),de(F,5,h,ge,(J,l)=>{var d=tr(),c=s(d,!0);n(d);var y={};R(()=>{O(c,e(l)),y!==(y=e(l))&&(d.value=(d.__value=e(l))??"")}),i(J,d)}),n(F),n(D),n(H),Lt(F,a),Ge("change",F,M),i(x,H)};I(Z,x=>{p()&&x(B)})}n(A),n(k),i(S,k),he()}var ir=L('Showing to of ',1),or=L('
                ');function sr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"currentPage",8,1),p=o(r,"totalPages",8,1),h=o(r,"perPage",8,25),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results");const M=be();function k(P){P>=1&&P<=p()&&P!==t()&&M("pageChange",{page:P})}$(()=>(u(m()),u(t()),u(h())),()=>{ee(g,m()===0?0:(t()-1)*h()+1)}),$(()=>(u(t()),u(h()),u(m())),()=>{ee(a,Math.min(t()*h(),m()))}),Le(),ke();var A=Q(),U=q(A);{var V=P=>{var Z=or(),B=s(Z),x=s(B);{let z=X(()=>t()===1);Ce(x,{variant:"secondary",get disabled(){return e(z)},$$events:{click:()=>k(t()-1)},children:(C,_)=>{Se();var T=Te("Previous");i(C,T)},$$slots:{default:!0}})}var H=j(x,2);{let z=X(()=>t()===p());Ce(H,{variant:"secondary",get disabled(){return e(z)},class:"ml-3",$$events:{click:()=>k(t()+1)},children:(C,_)=>{Se();var T=Te("Next");i(C,T)},$$slots:{default:!0}})}n(B);var D=j(B,2),F=s(D),J=s(F),l=s(J);{var d=z=>{var C=Te();R(()=>O(C,`No ${f()??""}`)),i(z,C)},c=z=>{var C=ir(),_=j(q(C)),T=s(_,!0);n(_);var G=j(_,2),Y=s(G,!0);n(G);var re=j(G,2),ae=s(re,!0);n(re);var ce=j(re);R(()=>{O(T,e(g)),O(Y,e(a)),O(ae,m()),O(ce,` ${f()??""}`)}),i(z,C)};I(l,z=>{m()===0?z(d):z(c,!1)})}n(J),n(F);var y=j(F,2),N=s(y),W=s(N);{let z=X(()=>t()===1);Ce(W,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-r-none","aria-label":"Previous page",icon:"",$$events:{click:()=>k(t()-1)}})}var w=j(W,2);de(w,1,()=>(u(p()),v(()=>Array(p()))),ge,(z,C,_)=>{const T=X(()=>_+1);{let G=X(()=>e(T)===t()?"primary":"secondary");Ce(z,{get variant(){return e(G)},size:"sm",class:"rounded-none border-l-0 first:border-l first:rounded-l-md",$$events:{click:()=>k(e(T))},children:(Y,re)=>{Se();var ae=Te();R(()=>O(ae,e(T))),i(Y,ae)},$$slots:{default:!0}})}});var E=j(w,2);{let z=X(()=>t()===p());Ce(E,{variant:"secondary",size:"sm",get disabled(){return e(z)},class:"rounded-l-none","aria-label":"Next page",icon:"",$$events:{click:()=>k(t()+1)}})}n(N),n(y),n(D),n(Z),i(P,Z)};I(U,P=>{p()>1&&P(V)})}i(S,A),he()}var lr=L('

                '),dr=L('

                '),cr=L('

                '),vr=L('

                '),ur=L('
                '),gr=L('
                '),fr=L('
                '),hr=L(" "),mr=L('
                '),pr=L('
                ');function kr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"config",8);function p(){if(!a())return"Unknown";const{field:l,useId:d,showOwner:c}=t().primaryText,y=a()[l];return d&&y?`${y.slice(0,8)}...`:c&&a().owner&&a().name?`${a().owner}/${a().name}`:y||"Unknown"}function h(){if(!t().secondaryText)return"";const{field:l,computedValue:d}=t().secondaryText;return d!==void 0?typeof d=="function"?d(a()):d:a()?.[l]||""}function m(){if(!t().primaryText.href||!a())return"#";let l=t().primaryText.href;return l=l.replace("{id}",a().id||""),l=l.replace("{name}",encodeURIComponent(a().name||"")),`${At}${l}`}function f(l){if(!a())return;const d=t().actions?.find(c=>c.type===l);d&&d.handler(a()),l==="edit"?g("edit",{item:a()}):l==="delete"?g("delete",{item:a()}):g("action",{type:l,item:a()})}function M(l){switch(l.type){case"status":if(t().entityType==="instance"){const c=a()?.[l.field]||"unknown";let y="neutral",N=c.charAt(0).toUpperCase()+c.slice(1);return l.field==="status"?y=c==="running"?"success":c==="pending"||c==="creating"?"info":c==="failed"||c==="error"?"error":"neutral":l.field==="runner_status"&&(y=c==="idle"?"info":c==="active"||c==="running"?"success":c==="failed"||c==="error"?"error":"neutral"),{variant:y,text:N}}return{variant:"neutral",text:a()?.[l.field]||"Unknown"};case"forge":return{variant:"neutral",text:a()?.[l.field]||"unknown"};case"auth":const d=a()?.[l.field]||"pat";return{variant:d==="pat"?"success":"info",text:d.toUpperCase()};case"custom":if(typeof l.value=="function"){const c=l.value(a());return{variant:c?.variant||"neutral",text:c?.text||""}}return{variant:l.value?.variant||"neutral",text:l.value?.text||""};default:return{variant:"neutral",text:""}}}ke();var k=pr(),A=s(k),U=s(A);{var V=l=>{var d=dr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=lr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R((w,E)=>{Be(d,"href",w),pe(c,1,`text-sm font-medium text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 truncate ${u(t()),v(()=>t().primaryText.isMonospace?"font-mono":"")??""}`),O(y,E)},[()=>v(m),()=>v(p)]),i(l,d)},P=l=>{var d=vr(),c=s(d),y=s(c,!0);n(c);var N=j(c,2);{var W=w=>{var E=cr(),z=s(E,!0);n(E),R(C=>O(z,C),[()=>v(h)]),i(w,E)};I(N,w=>{u(t()),v(()=>t().secondaryText)&&w(W)})}n(d),R(w=>O(y,w),[()=>v(p)]),i(l,d)};I(U,l=>{u(t()),v(()=>t().primaryText.isClickable)?l(V):l(P,!1)})}var Z=j(U,2);{var B=l=>{var d=fr(),c=s(d);{var y=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().customInfo)),ge,(C,_)=>{const T=X(()=>(e(_),u(a()),v(()=>typeof e(_).icon=="function"?e(_).icon(a()):e(_).icon))),G=X(()=>(e(_),u(a()),v(()=>typeof e(_).text=="function"?e(_).text(a()):e(_).text)));var Y=ur(),re=s(Y);{var ae=je=>{var He=Q(),De=q(He);Ae(De,()=>e(T)),i(je,He)};I(re,je=>{e(T)&&je(ae)})}var ce=j(re,2),Ee=s(ce,!0);n(ce),n(Y),R(()=>O(Ee,e(G))),i(C,Y)}),i(w,E)};I(c,w=>{u(t()),v(()=>t().customInfo)&&w(y)})}var N=j(c,2);{var W=w=>{var E=Q(),z=q(E);de(z,1,()=>(u(t()),v(()=>t().badges.filter(C=>C.type==="forge"))),ge,(C,_)=>{var T=gr(),G=s(T);Ae(G,()=>(u(et),e(_),u(a()),v(()=>et(e(_).field?a()?.[e(_).field]||"unknown":a()?.endpoint?.endpoint_type||"unknown"))));var Y=j(G,2),re=s(Y,!0);n(Y),n(T),R(()=>O(re,(u(a()),v(()=>a()?.endpoint?.name||"Unknown")))),i(C,T)}),i(w,E)};I(N,w=>{u(t()),v(()=>t().badges)&&w(W)})}n(d),i(l,d)};I(Z,l=>{u(t()),v(()=>t().customInfo||t().badges?.some(d=>d.type==="forge"))&&l(B)})}n(A);var x=j(A,2),H=s(x);{var D=l=>{var d=Q(),c=q(d);de(c,1,()=>(u(t()),v(()=>t().badges.filter(y=>y.type!=="forge"))),ge,(y,N)=>{var W=Q(),w=q(W);{var E=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));var T=hr(),G=s(T,!0);n(T),R(()=>{pe(T,1,`inline-flex items-center rounded-full px-2 py-1 text-xs font-medium ring-1 ring-inset ${u(e(_)),v(()=>e(_).variant==="success"?"bg-green-50 text-green-700 ring-green-600/20 dark:bg-green-900/50 dark:text-green-300 dark:ring-green-400/20":e(_).variant==="info"?"bg-blue-50 text-blue-700 ring-blue-600/20 dark:bg-blue-900/50 dark:text-blue-300 dark:ring-blue-400/20":e(_).variant==="error"?"bg-red-50 text-red-700 ring-red-600/20 dark:bg-red-900/50 dark:text-red-300 dark:ring-red-400/20":"bg-gray-50 text-gray-700 ring-gray-600/20 dark:bg-gray-900/50 dark:text-gray-300 dark:ring-gray-400/20")??""}`),O(G,(u(e(_)),v(()=>e(_).text)))}),i(C,T)},z=C=>{const _=X(()=>(e(N),v(()=>M(e(N)))));Bt(C,{get variant(){return u(e(_)),v(()=>e(_).variant)},get text(){return u(e(_)),v(()=>e(_).text)}})};I(w,C=>{e(N),v(()=>e(N).type==="status")?C(E):C(z,!1)})}i(y,W)}),i(l,d)};I(H,l=>{u(t()),v(()=>t().badges)&&l(D)})}var F=j(H,2);{var J=l=>{var d=mr();de(d,5,()=>(u(t()),v(()=>t().actions)),ge,(c,y)=>{{let N=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`))),W=X(()=>(e(y),u(t()),v(()=>e(y).type==="edit"?`Edit ${t().entityType}`:`Delete ${t().entityType}`)));rt(c,{get action(){return e(y),v(()=>e(y).type)},size:"sm",get title(){return e(N)},get ariaLabel(){return e(W)},$$events:{click:()=>f(e(y).type)}})}}),n(d),i(l,d)};I(F,l=>{u(t()),v(()=>t().actions)&&l(J)})}n(x),n(k),i(S,k),he()}var xr=L('
                '),_r=L('
                '),yr=L("
                "),br=L("
                "),wr=L(' ',1),Mr=L('
                ');function Vr(S,r){fe(r,!1);const g=te();let a=o(r,"columns",24,()=>[]),t=o(r,"data",24,()=>[]),p=o(r,"loading",8,!1),h=o(r,"error",8,""),m=o(r,"totalItems",8,0),f=o(r,"itemName",8,"results"),M=o(r,"searchTerm",12,""),k=o(r,"searchPlaceholder",8,"Search..."),A=o(r,"showSearch",8,!0),U=o(r,"currentPage",8,1),V=o(r,"perPage",12,25),P=o(r,"totalPages",8,1),Z=o(r,"showPagination",8,!0),B=o(r,"showPerPageSelector",8,!0),x=o(r,"emptyTitle",8,"No items found"),H=o(r,"emptyMessage",8,""),D=o(r,"emptyIconType",8,"document"),F=o(r,"errorTitle",8,"Error loading data"),J=o(r,"showRetry",8,!1),l=o(r,"showMobileCards",8,!0),d=o(r,"mobileCardConfig",8,null);const c=be();function y(b){c("search",b.detail)}function N(b){c("pageChange",b.detail)}function W(b){c("perPageChange",b.detail)}function w(){c("retry")}function E(b){c("edit",b.detail)}function z(b){c("delete",b.detail)}function C(b){c("action",b.detail)}function _(b){const ve="px-6 py-4 text-sm",Ve=b.align==="right"?"text-right":b.align==="center"?"text-center":"text-left",Ne=b.key==="actions"?"font-medium":"text-gray-900 dark:text-white",Re=b.flexible?"min-w-0":"";return`${ve} ${Ve} ${Ne} ${Re}`.trim()}function T(){return a().map(b=>b.flexible?`${b.flexRatio||1}fr`:"auto").join(" ")}$(()=>(u(H()),u(M()),u(f())),()=>{ee(g,H()||(M()?`No items found matching "${M()}"`:`No ${f()} found`))}),Le(),ke();var G=Mr(),Y=s(G);{var re=b=>{nr(b,{get placeholder(){return k()},get showPerPageSelector(){return B()},get searchTerm(){return M()},set searchTerm(ve){M(ve)},get perPage(){return V()},set perPage(ve){V(ve)},$$events:{search:y,perPageChange:W},$$legacy:!0})};I(Y,b=>{A()&&b(re)})}var ae=j(Y,2),ce=s(ae);{var Ee=b=>{Ut(b,{get message(){return`Loading ${f()??""}...`}})},je=b=>{var ve=Q(),Ve=q(ve);{var Ne=we=>{{let Ie=X(()=>J()?w:void 0);Ft(we,{get title(){return F()},get message(){return h()},get showRetry(){return J()},get onRetry(){return e(Ie)}})}},Re=we=>{var Ie=Q(),at=q(Ie);{var nt=Me=>{Yt(Me,{get title(){return x()},get message(){return e(g)},get iconType(){return D()}})},it=Me=>{var Ke=wr(),Qe=q(Ke);{var ot=oe=>{var K=_r();de(K,7,t,(le,ne)=>le.id||le.name||ne,(le,ne,qe)=>{var ze=xr(),ie=s(ze);{var Pe=me=>{var xe=Q(),se=q(xe);tt(se,()=>(e(ne),v(()=>`${e(ne).id||e(ne).name}-${e(ne).updated_at}-mobile`)),_e=>{kr(_e,{get item(){return e(ne)},get config(){return d()},$$events:{edit(ue){Fe.call(this,r,ue)},delete(ue){Fe.call(this,r,ue)},action(ue){Fe.call(this,r,ue)}}})}),i(me,xe)},Oe=me=>{var xe=Q(),se=q(xe);$e(se,r,"mobile-card",{get item(){return e(ne)},get index(){return e(qe)}}),i(me,xe)};I(ie,me=>{d()?me(Pe):me(Oe,!1)})}n(ze),i(le,ze)}),n(K),i(oe,K)};I(Qe,oe=>{l()&&oe(ot)})}var Ze=j(Qe,2),Ue=s(Ze),Je=s(Ue);de(Je,1,a,ge,(oe,K)=>{var le=yr(),ne=s(le,!0);n(le),R(()=>{pe(le,1,`px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-300 uppercase tracking-wider bg-gray-50 dark:bg-gray-700 border-b border-gray-200 dark:border-gray-600 ${e(K),v(()=>e(K).align==="right"?"text-right":e(K).align==="center"?"text-center":"text-left")??""}`),O(ne,(e(K),v(()=>e(K).title)))}),i(oe,le)});var st=j(Je,2);de(st,3,t,(oe,K)=>oe.id||oe.name||K,(oe,K,le)=>{var ne=Q(),qe=q(ne);de(qe,1,a,ge,(ze,ie)=>{var Pe=br(),Oe=s(Pe);{var me=se=>{var _e=Q(),ue=q(_e);tt(ue,()=>(e(K),e(ie),v(()=>`${e(K).id||e(K).name}-${e(K).updated_at}-${e(ie).key}`)),lt=>{var We=Q(),dt=q(We);It(dt,()=>e(ie).cellComponent,(ct,vt)=>{vt(ct,Ct({get item(){return e(K)}},()=>e(ie).cellProps,{$$events:{edit:E,delete:z,action:C}}))}),i(lt,We)}),i(se,_e)},xe=se=>{var _e=Q(),ue=q(_e);$e(ue,r,"cell",{get item(){return e(K)},get column(){return e(ie)},get index(){return e(le)},get value(){return e(K),e(ie),v(()=>e(K)[e(ie).key])}}),i(se,_e)};I(Oe,se=>{e(ie),v(()=>e(ie).cellComponent)?se(me):se(xe,!1)})}n(Pe),R(se=>pe(Pe,1,`${se??""} border-b border-gray-200 dark:border-gray-700`),[()=>(e(ie),v(()=>_(e(ie))))]),i(ze,Pe)}),i(oe,ne)}),n(Ue),n(Ze),R(oe=>Ht(Ue,`grid-template-columns: ${oe??""}`),[()=>v(T)]),i(Me,Ke)};I(at,Me=>{u(t()),v(()=>t().length===0)?Me(nt):Me(it,!1)},!0)}i(we,Ie)};I(Ve,we=>{h()?we(Ne):we(Re,!1)},!0)}i(b,ve)};I(ce,b=>{p()?b(Ee):b(je,!1)})}var He=j(ce,2);{var De=b=>{sr(b,{get currentPage(){return U()},get totalPages(){return P()},get perPage(){return V()},get totalItems(){return m()},get itemName(){return f()},$$events:{pageChange:N}})};I(He,b=>{u(Z()),u(p()),u(h()),u(t()),v(()=>Z()&&!p()&&!h()&&t().length>0)&&b(De)})}n(ae),n(G),i(S,G),he()}var Pr=L('
                ');function Nr(S,r){fe(r,!1);const g=be();let a=o(r,"item",8),t=o(r,"actions",24,()=>[{type:"edit",title:"Edit",ariaLabel:"Edit item",action:"edit"},{type:"delete",title:"Delete",ariaLabel:"Delete item",action:"delete"}]);function p(m){a()&&(m==="edit"?g("edit",{item:a()}):m==="delete"?g("delete",{item:a()}):g("action",{type:m,item:a()}))}ke();var h=Pr();de(h,5,t,ge,(m,f)=>{{let M=X(()=>(e(f),v(()=>e(f).action||(e(f).type==="edit"?"edit":e(f).type==="delete"?"delete":"view")))),k=X(()=>(e(f),v(()=>e(f).title||(e(f).type==="edit"?"Edit":e(f).type==="delete"?"Delete":e(f).label)))),A=X(()=>(e(f),v(()=>e(f).ariaLabel||(e(f).type==="edit"?"Edit item":e(f).type==="delete"?"Delete item":e(f).label))));rt(m,{get action(){return e(M)},get title(){return e(k)},get ariaLabel(){return e(A)},$$events:{click:()=>p(e(f).type)}})}}),n(h),i(S,h),he()}var Cr=L(" "),jr=L(" ");function Rr(S,r){fe(r,!1);const g=te(),a=te();let t=o(r,"item",8),p=o(r,"field",8),h=o(r,"type",8,"text"),m=o(r,"truncateLength",8,50),f=o(r,"showTitle",8,!1);function M(){return t()&&p().split(".").reduce((B,x)=>B?.[x],t())||""}function k(){return h()==="date"?Et(e(g)):h()==="truncated"&&e(g).length>m()?`${e(g).slice(0,m())}...`:e(g)}function A(){switch(h()){case"code":return"inline-block max-w-full truncate bg-gray-100 dark:bg-gray-700 px-2 py-1 rounded text-xs font-mono";case"description":return"block w-full truncate text-sm text-gray-500 dark:text-gray-300";case"date":return"block w-full truncate text-sm text-gray-900 dark:text-white font-mono";default:return"block w-full truncate text-sm text-gray-900 dark:text-white"}}$(()=>{},()=>{ee(g,M())}),$(()=>{},()=>{ee(a,k())}),Le(),ke();var U=Q(),V=q(U);{var P=B=>{var x=Cr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)},Z=B=>{var x=jr(),H=s(x,!0);n(x),R(D=>{pe(x,1,`${D??""} ${f()?"cursor-default":""}`),Be(x,"title",f()?e(g):""),O(H,e(a))},[()=>v(A)]),i(B,x)};I(V,B=>{h()==="code"?B(P):B(Z,!1)})}i(S,U),he()}export{rt as A,Vr as D,Rr as G,Nr as a,tt as k}; diff --git a/webapp/assets/_app/immutable/chunks/BsqC4UA1.js b/webapp/assets/_app/immutable/chunks/BsqC4UA1.js new file mode 100644 index 00000000..706e99fa --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/BsqC4UA1.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as ae}from"./B3Pzt0F_.js";import{p as se,E as re,l as M,n as ie,s as r,g as t,m as k,a as le,f as p,j as v,k as $,r as f,c as l,d as oe,B as T,b as E,z as V,D as q,t as F,v as N,u as ne}from"./D8EpLgQ1.js";import{p as R,i as m}from"./5WA7h8uK.js";import{g as u,B as G}from"./CiE1LlKV.js";import{t as y}from"./BEkVdVE1.js";import{e as de}from"./BZiHL9L3.js";var ce=p('
                Checking...
                '),ve=p('
                '),fe=p('
                Webhook installed
                ',1),ue=p('
                No webhook installed
                '),he=p('

                Webhook Status

                ');function _e(H,g){se(g,!1);const x=k();let h=R(g,"entityType",8),s=R(g,"entityId",8),j=R(g,"entityName",8),i=k(null),o=k(!1),b=k(!0);const A=re();async function _(){if(s())try{r(b,!0),h()==="repository"?r(i,await u.getRepositoryWebhookInfo(s())):r(i,await u.getOrganizationWebhookInfo(s()))}catch(e){e&&typeof e=="object"&&"response"in e&&e.response?.status===404?r(i,null):(console.warn("Failed to check webhook status:",e),r(i,null))}finally{r(b,!1)}}async function J(){if(s())try{r(o,!0),h()==="repository"?await u.installRepositoryWebhook(s()):await u.installOrganizationWebhook(s()),y.success("Webhook Installed",`Webhook for ${h()} ${j()} has been installed successfully.`),await _(),A("webhookStatusChanged",{installed:!0})}catch(e){y.error("Webhook Installation Failed",e instanceof Error?e.message:"Failed to install webhook.")}finally{r(o,!1)}}async function K(){if(s())try{r(o,!0),h()==="repository"?await u.uninstallRepositoryWebhook(s()):await u.uninstallOrganizationWebhook(s()),y.success("Webhook Uninstalled",`Webhook for ${h()} ${j()} has been uninstalled successfully.`),await _(),A("webhookStatusChanged",{installed:!1})}catch(e){y.error("Webhook Uninstall Failed",de(e))}finally{r(o,!1)}}M(()=>ie(s()),()=>{s()&&_()}),M(()=>t(i),()=>{r(x,t(i)&&t(i).active)}),le(),ae();var w=he(),O=v(w),P=v(O),W=v(P),D=$(v(W),2),Q=v(D);{var X=e=>{var d=ce();l(e,d)},Y=e=>{var d=T(),z=E(d);{var I=a=>{var n=fe(),B=$(E(n),2);{var c=C=>{var U=ve(),te=v(U);f(U),F(()=>N(te,`URL: ${t(i),ne(()=>t(i).url||"N/A")??""}`)),l(C,U)};m(B,C=>{t(i)&&C(c)})}l(a,n)},S=a=>{var n=ue();l(a,n)};m(z,a=>{t(x)?a(I):a(S,!1)},!0)}l(e,d)};m(Q,e=>{t(b)?e(X):e(Y,!1)})}f(D),f(W);var L=$(W,2),Z=v(L);{var ee=e=>{var d=T(),z=E(d);{var I=a=>{G(a,{variant:"danger",size:"sm",get disabled(){return t(o)},$$events:{click:K},children:(n,B)=>{V();var c=q();F(()=>N(c,t(o)?"Uninstalling...":"Uninstall")),l(n,c)},$$slots:{default:!0}})},S=a=>{G(a,{variant:"primary",size:"sm",get disabled(){return t(o)},$$events:{click:J},children:(n,B)=>{V();var c=q();F(()=>N(c,t(o)?"Installing...":"Install Webhook")),l(n,c)},$$slots:{default:!0}})};m(z,a=>{t(x)?a(I):a(S,!1)})}l(e,d)};m(Z,e=>{t(b)||e(ee)})}f(L),f(P),f(O),f(w),l(H,w),oe()}export{_e as W}; diff --git a/webapp/assets/_app/immutable/chunks/CLYUNKnN.js b/webapp/assets/_app/immutable/chunks/BzlxTz7Q.js similarity index 92% rename from webapp/assets/_app/immutable/chunks/CLYUNKnN.js rename to webapp/assets/_app/immutable/chunks/BzlxTz7Q.js index e6432af4..78027fdd 100644 --- a/webapp/assets/_app/immutable/chunks/CLYUNKnN.js +++ b/webapp/assets/_app/immutable/chunks/BzlxTz7Q.js @@ -1 +1 @@ -import"./DsnmJJEf.js";import{i as b}from"./B3Pzt0F_.js";import{p as k,f as E,t as C,u as i,n as t,v as n,c as j,d as P,k as z,j as l,r as o}from"./D8EpLgQ1.js";import{c as N}from"./CiE1LlKV.js";import{p as f}from"./5WA7h8uK.js";import"./CoIRRsD9.js";import{j as x,e as c,i as u}from"./BGVHQGl-.js";var T=E('');function G(d,r){k(r,!1);let e=f(r,"item",8),m=f(r,"eagerCache",8,null);b();var s=T(),a=l(s),v=l(a,!0);o(a);var p=z(a,2),g=l(p,!0);o(p),o(s),C((h,y,_)=>{N(a,"href",h),n(v,y),n(g,_)},[()=>(t(x),t(e()),i(()=>x(e()))),()=>(t(c),t(e()),t(m()),i(()=>c(e(),m()))),()=>(t(u),t(e()),i(()=>u(e())))]),j(d,s),P()}export{G as P}; +import"./DsnmJJEf.js";import{i as b}from"./B3Pzt0F_.js";import{p as k,f as E,t as C,u as i,n as t,v as n,c as j,d as P,k as z,j as l,r as o}from"./D8EpLgQ1.js";import{c as N}from"./CiE1LlKV.js";import{p as f}from"./5WA7h8uK.js";import"./CRhkqW2i.js";import{j as x,e as c,i as u}from"./BGVHQGl-.js";var T=E('');function G(d,r){k(r,!1);let e=f(r,"item",8),m=f(r,"eagerCache",8,null);b();var s=T(),a=l(s),v=l(a,!0);o(a);var p=z(a,2),g=l(p,!0);o(p),o(s),C((h,y,_)=>{N(a,"href",h),n(v,y),n(g,_)},[()=>(t(x),t(e()),i(()=>x(e()))),()=>(t(c),t(e()),t(m()),i(()=>c(e(),m()))),()=>(t(u),t(e()),i(()=>u(e())))]),j(d,s),P()}export{G as P}; diff --git a/webapp/assets/_app/immutable/chunks/C89fcOde.js b/webapp/assets/_app/immutable/chunks/C89fcOde.js deleted file mode 100644 index 481fd468..00000000 --- a/webapp/assets/_app/immutable/chunks/C89fcOde.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as dr}from"./B3Pzt0F_.js";import{p as or,E as sr,m as i,o as ir,s,f as le,j as d,r as o,k as t,g as r,n as c,u as m,t as f,x as Ge,z as nr,v as _,e as je,c as u,D as R,B as Be,b as $e,d as lr}from"./D8EpLgQ1.js";import{p as ur,i as E}from"./5WA7h8uK.js";import{r as b,b as ze}from"./CiE1LlKV.js";import{b as v,a as br}from"./C6k1Q4We.js";import{p as gr}from"./D4Caz1gY.js";import{M as cr}from"./qB7B8uiS.js";import{J as mr}from"./DZblzgqm.js";var vr=le('

                '),pr=le('
                Updating...
                '),fr=le('

                Scale Set Information

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ');function Or(Ce,ue){or(ue,!1);let e=ur(ue,"scaleSet",8);const $=sr();let O=i(!1),J=i(""),T=i(e().name||""),I=i(e().image||""),M=i(e().flavor||""),N=i(e().max_runners),A=i(e().min_idle_runners),P=i(e().runner_bootstrap_timeout),U=i(e().runner_prefix||""),h=i(e().os_type||"linux"),k=i(e().os_arch||"amd64"),D=i(e()["github-runner-group"]||""),G=i(e().enabled),g=i("{}");ir(()=>{if(e().extra_specs)try{if(typeof e().extra_specs=="object")s(g,JSON.stringify(e().extra_specs,null,2));else{const l=JSON.parse(e().extra_specs);s(g,JSON.stringify(l,null,2))}}catch{s(g,e().extra_specs||"{}")}});async function Fe(){try{s(O,!0),s(J,"");let l={};if(r(g).trim())try{l=JSON.parse(r(g))}catch{throw new Error("Invalid JSON in extra specs")}const w={name:r(T)!==e().name?r(T):void 0,image:r(I)!==e().image?r(I):void 0,flavor:r(M)!==e().flavor?r(M):void 0,max_runners:r(N)!==e().max_runners?r(N):void 0,min_idle_runners:r(A)!==e().min_idle_runners?r(A):void 0,runner_bootstrap_timeout:r(P)!==e().runner_bootstrap_timeout?r(P):void 0,runner_prefix:r(U)!==e().runner_prefix?r(U):void 0,os_type:r(h)!==e().os_type?r(h):void 0,os_arch:r(k)!==e().os_arch?r(k):void 0,"github-runner-group":r(D)!==e()["github-runner-group"]&&r(D)||void 0,enabled:r(G)!==e().enabled?r(G):void 0,extra_specs:r(g).trim()!==JSON.stringify(e().extra_specs||{},null,2).trim()?l:void 0};Object.keys(w).forEach(p=>{w[p]===void 0&&delete w[p]}),$("submit",w)}catch(l){s(J,l instanceof Error?l.message:"Failed to update scale set")}finally{s(O,!1)}}dr(),cr(Ce,{$$events:{close:()=>$("close")},children:(l,w)=>{var p=fr(),z=d(p),be=d(z),Le=d(be);o(be),o(z);var C=t(z,2),ge=d(C);{var He=a=>{var n=vr(),j=d(n),ne=d(j,!0);o(j),o(n),f(()=>_(ne,r(J))),u(a,n)};E(ge,a=>{r(J)&&a(He)})}var F=t(ge,2),ce=t(d(F),2),L=d(ce),me=t(d(L),2),We=d(me,!0);o(me),o(L);var ve=t(L,2),pe=t(d(ve),2),qe=d(pe);{var Ke=a=>{var n=R();f(()=>_(n,`Repository: ${c(e()),m(()=>e().repo_name)??""}`)),u(a,n)},Qe=a=>{var n=Be(),j=$e(n);{var ne=x=>{var S=R();f(()=>_(S,`Organization: ${c(e()),m(()=>e().org_name)??""}`)),u(x,S)},er=x=>{var S=Be(),rr=$e(S);{var ar=y=>{var B=R();f(()=>_(B,`Enterprise: ${c(e()),m(()=>e().enterprise_name)??""}`)),u(y,B)},tr=y=>{var B=R("Unknown Entity");u(y,B)};E(rr,y=>{c(e()),m(()=>e().enterprise_name)?y(ar):y(tr,!1)},!0)}u(x,S)};E(j,x=>{c(e()),m(()=>e().org_name)?x(ne):x(er,!1)},!0)}u(a,n)};E(qe,a=>{c(e()),m(()=>e().repo_name)?a(Ke):a(Qe,!1)})}o(pe),o(ve),o(ce),o(F);var H=t(F,2),fe=t(d(H),2);b(fe),o(H);var W=t(H,2),xe=t(d(W),2),q=d(xe),ye=t(d(q),2);b(ye),o(q);var K=t(q,2),_e=t(d(K),2);b(_e),o(K);var Q=t(K,2),V=t(d(Q),2);f(()=>{r(h),Ge(()=>{})});var X=d(V);X.value=X.__value="linux";var he=t(X);he.value=he.__value="windows",o(V),o(Q);var ke=t(Q,2),Y=t(d(ke),2);f(()=>{r(k),Ge(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var we=t(Z);we.value=we.__value="arm64",o(Y),o(ke),o(xe),o(W);var ee=t(W,2),Se=t(d(ee),2),re=d(Se),Re=t(d(re),2);b(Re),o(re);var ae=t(re,2),Ee=t(d(ae),2);b(Ee),o(ae);var Oe=t(ae,2),Je=t(d(Oe),2);b(Je),o(Oe),o(Se),o(ee);var te=t(ee,2),de=t(d(te),2),oe=d(de),Te=t(d(oe),2);b(Te),o(oe);var Ie=t(oe,2),Me=t(d(Ie),2);b(Me),o(Ie),o(de);var se=t(de,2),Ne=d(se),Ve=t(d(Ne),2);mr(Ve,{rows:4,placeholder:"{}",get value(){return r(g)},set value(a){s(g,a)},$$legacy:!0}),o(Ne),o(se);var Ae=t(se,2),Pe=d(Ae);b(Pe),nr(2),o(Ae),o(te);var Ue=t(te,2),De=d(Ue),ie=t(De,2),Xe=d(ie);{var Ye=a=>{var n=pr();u(a,n)},Ze=a=>{var n=R("Update Scale Set");u(a,n)};E(Xe,a=>{r(O)?a(Ye):a(Ze,!1)})}o(ie),o(Ue),o(C),o(p),f(()=>{_(Le,`Update Scale Set ${c(e()),m(()=>e().name)??""}`),_(We,(c(e()),m(()=>e().provider_name))),ie.disabled=r(O)}),v(fe,()=>r(T),a=>s(T,a)),v(ye,()=>r(I),a=>s(I,a)),v(_e,()=>r(M),a=>s(M,a)),ze(V,()=>r(h),a=>s(h,a)),ze(Y,()=>r(k),a=>s(k,a)),v(Re,()=>r(A),a=>s(A,a)),v(Ee,()=>r(N),a=>s(N,a)),v(Je,()=>r(P),a=>s(P,a)),v(Te,()=>r(U),a=>s(U,a)),v(Me,()=>r(D),a=>s(D,a)),br(Pe,()=>r(G),a=>s(G,a)),je("click",De,()=>$("close")),je("submit",C,gr(Fe)),u(l,p)},$$slots:{default:!0}}),lr()}export{Or as U}; diff --git a/webapp/assets/_app/immutable/chunks/CIBm3n2u.js b/webapp/assets/_app/immutable/chunks/CIBm3n2u.js new file mode 100644 index 00000000..ec528019 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CIBm3n2u.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as qe}from"./B3Pzt0F_.js";import{p as Fe,E as Ke,o as Ge,f as x,j as t,r,k as s,g as e,m,z as B,t as y,x as ue,u as l,v,n as T,s as i,e as be,c as u,D as He,d as Je}from"./D8EpLgQ1.js";import{p as ge,i as U}from"./5WA7h8uK.js";import{e as Qe,i as Ve}from"./u94nIB4-.js";import{r as me,b as ye,g as Xe}from"./CiE1LlKV.js";import{a as Ye,b as Ze}from"./C6k1Q4We.js";import{p as ea}from"./D4Caz1gY.js";import{e as xe}from"./BZiHL9L3.js";import{M as aa}from"./qB7B8uiS.js";var ta=x('

                '),ra=x('
                Owner:
                '),oa=x('
                '),sa=x(""),na=x(''),ia=x('

                Leave empty to auto-generate a new secret

                '),da=x('
                Updating...
                '),la=x('

                Name:
                Endpoint:
                Current Credentials:
                Current Pool Balancer:

                Leave unchanged to keep current credentials

                Round Robin distributes jobs evenly across pools, Pack fills pools in order

                ');function _a(fe,D){Fe(D,!1);let d=ge(D,"entity",8),w=ge(D,"entityType",8);const $=Ke();let C=m(!1),k=m(""),M=m([]),R=m(!1),f=m(""),_=m(""),h=m(""),b=m(!1);function _e(){if(w()==="repository"){const c=d();return`${c.owner}/${c.name}`}return d().name||""}function W(){return w().charAt(0).toUpperCase()+w().slice(1)}function he(){return w()==="repository"&&d().owner||""}async function ke(){try{i(R,!0),i(M,await Xe.listCredentials())}catch(c){i(k,xe(c))}finally{i(R,!1)}}function we(){i(f,d().credentials_name||""),i(_,d().pool_balancing_type||"roundrobin"),i(h,""),i(b,!1)}async function Ce(){try{i(C,!0),i(k,"");const c={};let E=!1;if(e(f)&&e(f)!==d().credentials_name&&(c.credentials_name=e(f),E=!0),e(_)&&e(_)!==d().pool_balancing_type&&(c.pool_balancer_type=e(_),E=!0),e(b)){if(!e(h).trim()){i(k,"Please enter a webhook secret or uncheck the option to change it");return}c.webhook_secret=e(h),E=!0}if(!E){$("close");return}$("submit",c)}catch(c){i(k,xe(c))}finally{i(C,!1)}}Ge(()=>{ke(),we()}),qe(),aa(fe,{$$events:{close:()=>$("close")},children:(c,E)=>{var j=la(),A=t(j),N=t(A),Ee=t(N);r(N);var Y=s(N,2),Se=t(Y,!0);r(Y),r(A);var z=s(A,2),Z=t(z);{var Pe=a=>{var o=ta(),n=t(o),p=t(n,!0);r(n),r(o),y(()=>v(p,e(k))),u(a,o)};U(Z,a=>{e(k)&&a(Pe)})}var I=s(Z,2),L=t(I),Ue=t(L);r(L);var ee=s(L,2),ae=t(ee);{var $e=a=>{var o=ra(),n=s(t(o),2),p=t(n,!0);r(n),r(o),y(S=>v(p,S),[()=>l(he)]),u(a,o)};U(ae,a=>{w()==="repository"&&a($e)})}var O=s(ae,2),te=s(t(O),2),Be=t(te,!0);r(te),r(O);var q=s(O,2),re=s(t(q),2),Te=t(re,!0);r(re),r(q);var F=s(q,2),oe=s(t(F),2),De=t(oe,!0);r(oe),r(F);var se=s(F,2),ne=s(t(se),2),Me=t(ne,!0);r(ne),r(se),r(ee),r(I);var K=s(I,2),G=t(K),Re=s(t(G),2);{var We=a=>{var o=oa();u(a,o)},je=a=>{var o=na();y(()=>{e(f),ue(()=>{e(M)})});var n=t(o);n.value=n.__value="";var p=s(n);Qe(p,1,()=>e(M),Ve,(S,g)=>{var P=sa(),Oe=t(P);r(P);var pe={};y(()=>{v(Oe,`${e(g),l(()=>e(g).name)??""} (${e(g),l(()=>e(g).endpoint?.name||"Unknown")??""})`),pe!==(pe=(e(g),l(()=>e(g).name)))&&(P.value=(P.__value=(e(g),l(()=>e(g).name)))??"")}),u(S,P)}),r(o),ye(o,()=>e(f),S=>i(f,S)),u(a,o)};U(Re,a=>{e(R)?a(We):a(je,!1)})}B(2),r(G);var H=s(G,2),J=s(t(H),2);y(()=>{e(_),ue(()=>{})});var Q=t(J);Q.value=Q.__value="roundrobin";var ie=s(Q);ie.value=ie.__value="pack",r(J),B(2),r(H);var de=s(H,2),V=t(de),le=t(V);me(le),B(2),r(V);var Ae=s(V,2);{var Ne=a=>{var o=ia(),n=s(t(o),2);me(n),B(2),r(o),y(()=>n.required=e(b)),Ze(n,()=>e(h),p=>i(h,p)),u(a,o)};U(Ae,a=>{e(b)&&a(Ne)})}r(de),r(K);var ce=s(K,2),ve=t(ce),X=s(ve,2),ze=t(X);{var Ie=a=>{var o=da();u(a,o)},Le=a=>{var o=He();y(n=>v(o,`Update ${n??""}`),[()=>l(W)]),u(a,o)};U(ze,a=>{e(C)?a(Ie):a(Le,!1)})}r(X),r(ce),r(z),r(j),y((a,o,n,p)=>{v(Ee,`Update ${a??""}`),v(Se,o),v(Ue,`${n??""} Information`),v(Be,(T(d()),l(()=>d().name))),v(Te,(T(d()),l(()=>d().endpoint?.name))),v(De,(T(d()),l(()=>d().credentials_name))),v(Me,(T(d()),l(()=>d().pool_balancing_type||"roundrobin"))),X.disabled=p},[()=>l(W),()=>l(_e),()=>l(W),()=>(e(C),e(b),e(h),l(()=>e(C)||e(b)&&!e(h).trim()))]),ye(J,()=>e(_),a=>i(_,a)),Ye(le,()=>e(b),a=>i(b,a)),be("click",ve,()=>$("close")),be("submit",z,ea(Ce)),u(c,j)},$$slots:{default:!0}}),Je()}export{_a as U}; diff --git a/webapp/assets/_app/immutable/chunks/CRD55Dyg.js b/webapp/assets/_app/immutable/chunks/CRD55Dyg.js new file mode 100644 index 00000000..0388dfa1 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CRD55Dyg.js @@ -0,0 +1 @@ +import"./DsnmJJEf.js";import{i as Lr}from"./B3Pzt0F_.js";import{p as qr,E as Gr,m as s,o as Jr,f as m,k as r,j as o,g as e,r as a,t as v,e as M,c as b,v as T,b as Nr,z as vr,x as W,u as p,s as d,D as je,d as Vr}from"./D8EpLgQ1.js";import{p as pr,i as z}from"./5WA7h8uK.js";import{e as Ae,i as Oe}from"./u94nIB4-.js";import{s as $e,r as h,b as Q,g as C,c as Fr}from"./CiE1LlKV.js";import{b as E,a as Kr}from"./C6k1Q4We.js";import{p as Ur}from"./D4Caz1gY.js";import{M as Wr}from"./qB7B8uiS.js";import{e as He}from"./BZiHL9L3.js";import{J as Qr}from"./DZblzgqm.js";var Xr=m('

                '),Yr=m('
                '),Zr=m(""),et=m(''),rt=m('
                '),tt=m(""),at=m(''),ot=m(' '),dt=m('
                '),it=m('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),st=m('
                Creating...
                '),lt=m('

                Create New Pool

                Entity Level *
                ');function ht(mr,X){qr(X,!1);const Y=Gr();let Be=pr(X,"initialEntityType",8,""),fr=pr(X,"initialEntityId",8,""),G=s(!1),P=s(""),n=s(Be()),I=s([]),Z=s([]),ee=s(!1),re=s(!1),k=s(fr()),S=s(""),B=s(""),D=s(""),te=s(void 0),ae=s(void 0),oe=s(void 0),de=s(100),ie=s("garm"),J=s("linux"),N=s("amd64"),se=s(""),le=s(!0),_=s([]),j=s(""),L=s("{}");async function yr(){try{d(re,!0),d(Z,await C.listProviders())}catch(l){d(P,He(l))}finally{d(re,!1)}}async function De(){if(e(n))try{switch(d(ee,!0),d(I,[]),e(n)){case"repository":d(I,await C.listRepositories());break;case"organization":d(I,await C.listOrganizations());break;case"enterprise":d(I,await C.listEnterprises());break}}catch(l){d(P,He(l))}finally{d(ee,!1)}}function ne(l){e(n)!==l&&(d(n,l),d(k,""),De())}function Le(){e(j).trim()&&!e(_).includes(e(j).trim())&&(d(_,[...e(_),e(j).trim()]),d(j,""))}function xr(l){d(_,e(_).filter((A,w)=>w!==l))}function hr(l){l.key==="Enter"&&(l.preventDefault(),Le())}async function kr(){if(!e(n)||!e(k)||!e(S)||!e(B)||!e(D)){d(P,"Please fill in all required fields");return}try{d(G,!0),d(P,"");let l={};if(e(L).trim())try{l=JSON.parse(e(L))}catch{throw new Error("Invalid JSON in extra specs")}const A={provider_name:e(S),image:e(B),flavor:e(D),max_runners:e(te)||10,min_idle_runners:e(ae)||0,runner_bootstrap_timeout:e(oe)||20,priority:e(de),runner_prefix:e(ie),os_type:e(J),os_arch:e(N),"github-runner-group":e(se)||void 0,enabled:e(le),tags:e(_),extra_specs:e(L).trim()?l:void 0};let w;switch(e(n)){case"repository":w=await C.createRepositoryPool(e(k),A);break;case"organization":w=await C.createOrganizationPool(e(k),A);break;case"enterprise":w=await C.createEnterprisePool(e(k),A);break;default:throw new Error("Invalid entity level")}Y("submit",A)}catch(l){d(P,He(l))}finally{d(G,!1)}}Jr(()=>{yr(),Be()&&De()}),Lr(),Wr(mr,{$$events:{close:()=>Y("close")},children:(l,A)=>{var w=lt(),V=r(o(w),2),qe=o(V);{var _r=c=>{var y=Xr(),O=o(y),F=o(O,!0);a(O),a(y),v(()=>T(F,e(P))),b(c,y)};z(qe,c=>{e(P)&&c(_r)})}var ue=r(qe,2),Ge=r(o(ue),2),be=o(Ge),ce=r(be,2),Je=r(ce,2);a(Ge),a(ue);var Ne=r(ue,2);{var wr=c=>{var y=it(),O=Nr(y),F=r(o(O),2),ve=o(F),pe=o(ve),Rr=o(pe);vr(),a(pe);var Tr=r(pe,2);{var zr=t=>{var u=Yr();b(t,u)},Cr=t=>{var u=et();v(()=>{e(k),W(()=>{e(n),e(I)})});var f=o(u),$=o(f);a(f),f.value=f.__value="";var R=r(f);Ae(R,1,()=>e(I),Oe,(g,i)=>{var x=Zr(),U=o(x);{var Br=H=>{var q=je();v(()=>T(q,`${e(i),p(()=>e(i).owner)??""}/${e(i),p(()=>e(i).name)??""} (${e(i),p(()=>e(i).endpoint?.name)??""})`)),b(H,q)},Dr=H=>{var q=je();v(()=>T(q,`${e(i),p(()=>e(i).name)??""} (${e(i),p(()=>e(i).endpoint?.name)??""})`)),b(H,q)};z(U,H=>{e(n)==="repository"?H(Br):H(Dr,!1)})}a(x);var gr={};v(()=>{gr!==(gr=(e(i),p(()=>e(i).id)))&&(x.value=(x.__value=(e(i),p(()=>e(i).id)))??"")}),b(g,x)}),a(u),v(()=>T($,`Select a ${e(n)??""}`)),Q(u,()=>e(k),g=>d(k,g)),b(t,u)};z(Tr,t=>{e(ee)?t(zr):t(Cr,!1)})}a(ve);var Ke=r(ve,2),Ir=r(o(Ke),2);{var Sr=t=>{var u=rt();b(t,u)},jr=t=>{var u=at();v(()=>{e(S),W(()=>{e(Z)})});var f=o(u);f.value=f.__value="";var $=r(f);Ae($,1,()=>e(Z),Oe,(R,g)=>{var i=tt(),x=o(i,!0);a(i);var U={};v(()=>{T(x,(e(g),p(()=>e(g).name))),U!==(U=(e(g),p(()=>e(g).name)))&&(i.value=(i.__value=(e(g),p(()=>e(g).name)))??"")}),b(R,i)}),a(u),Q(u,()=>e(S),R=>d(S,R)),b(t,u)};z(Ir,t=>{e(re)?t(Sr):t(jr,!1)})}a(Ke),a(F),a(O);var me=r(O,2),Ue=r(o(me),2),fe=o(Ue),We=r(o(fe),2);h(We),a(fe);var ye=r(fe,2),Qe=r(o(ye),2);h(Qe),a(ye);var xe=r(ye,2),he=r(o(xe),2);v(()=>{e(J),W(()=>{})});var ke=o(he);ke.value=ke.__value="linux";var Xe=r(ke);Xe.value=Xe.__value="windows",a(he),a(xe);var Ye=r(xe,2),_e=r(o(Ye),2);v(()=>{e(N),W(()=>{})});var we=o(_e);we.value=we.__value="amd64";var Ze=r(we);Ze.value=Ze.__value="arm64",a(_e),a(Ye),a(Ue),a(me);var Ee=r(me,2),er=r(o(Ee),2),Me=o(er),rr=r(o(Me),2);h(rr),a(Me);var Pe=r(Me,2),tr=r(o(Pe),2);h(tr),a(Pe);var ar=r(Pe,2),or=r(o(ar),2);h(or),a(ar),a(er),a(Ee);var dr=r(Ee,2),Re=r(o(dr),2),Te=o(Re),ir=r(o(Te),2);h(ir),a(Te);var ze=r(Te,2),sr=r(o(ze),2);h(sr),a(ze);var lr=r(ze,2),nr=r(o(lr),2);h(nr),a(lr),a(Re);var Ce=r(Re,2),ur=r(o(Ce),2),Ie=o(ur),K=o(Ie);h(K);var Ar=r(K,2);a(Ie);var Or=r(Ie,2);{var $r=t=>{var u=dt();Ae(u,5,()=>e(_),Oe,(f,$,R)=>{var g=ot(),i=o(g),x=r(i);a(g),v(()=>{T(i,`${e($)??""} `),Fr(x,"aria-label",`Remove tag ${e($)}`)}),M("click",x,()=>xr(R)),b(f,g)}),a(u),b(t,u)};z(Or,t=>{e(_),p(()=>e(_).length>0)&&t($r)})}a(ur),a(Ce);var Se=r(Ce,2),Hr=r(o(Se),2);Qr(Hr,{rows:4,placeholder:"{}",get value(){return e(L)},set value(t){d(L,t)},$$legacy:!0}),a(Se);var br=r(Se,2),cr=o(br);h(cr),vr(2),a(br),a(dr),v(t=>T(Rr,`${t??""} `),[()=>(e(n),p(()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)))]),E(We,()=>e(B),t=>d(B,t)),E(Qe,()=>e(D),t=>d(D,t)),Q(he,()=>e(J),t=>d(J,t)),Q(_e,()=>e(N),t=>d(N,t)),E(rr,()=>e(ae),t=>d(ae,t)),E(tr,()=>e(te),t=>d(te,t)),E(or,()=>e(oe),t=>d(oe,t)),E(ir,()=>e(ie),t=>d(ie,t)),E(sr,()=>e(de),t=>d(de,t)),E(nr,()=>e(se),t=>d(se,t)),E(K,()=>e(j),t=>d(j,t)),M("keydown",K,hr),M("click",Ar,Le),Kr(cr,()=>e(le),t=>d(le,t)),b(c,y)};z(Ne,c=>{e(n)&&c(wr)})}var Ve=r(Ne,2),Fe=o(Ve),ge=r(Fe,2),Er=o(ge);{var Mr=c=>{var y=st();b(c,y)},Pr=c=>{var y=je("Create Pool");b(c,y)};z(Er,c=>{e(G)?c(Mr):c(Pr,!1)})}a(ge),a(Ve),a(V),a(w),v(()=>{$e(be,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),$e(ce,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),$e(Je,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),ge.disabled=e(G)||!e(n)||!e(k)||!e(S)||!e(B)||!e(D)}),M("click",be,()=>ne("repository")),M("click",ce,()=>ne("organization")),M("click",Je,()=>ne("enterprise")),M("click",Fe,()=>Y("close")),M("submit",V,Ur(kr)),b(l,w)},$$slots:{default:!0}}),Vr()}export{ht as C}; diff --git a/webapp/assets/_app/immutable/chunks/CRhkqW2i.js b/webapp/assets/_app/immutable/chunks/CRhkqW2i.js new file mode 100644 index 00000000..57d21a05 --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/CRhkqW2i.js @@ -0,0 +1 @@ +const s=globalThis.__sveltekit_xtz33p?.base??"/ui",t=globalThis.__sveltekit_xtz33p?.assets??s;export{t as a,s as b}; diff --git a/webapp/assets/_app/immutable/chunks/CTf6mQoE.js b/webapp/assets/_app/immutable/chunks/CTf6mQoE.js deleted file mode 100644 index 15ee7bf7..00000000 --- a/webapp/assets/_app/immutable/chunks/CTf6mQoE.js +++ /dev/null @@ -1,3 +0,0 @@ -import{I as Ee,o as De,aR as T,g as x,s as P,bf as yt,aS as Be}from"./D8EpLgQ1.js";import{a as wt,b as L}from"./CoIRRsD9.js";class le{constructor(t,n){this.status=t,typeof n=="string"?this.body={message:n}:n?this.body=n:this.body={message:`Error: ${t}`}}toString(){return JSON.stringify(this.body)}}class Se{constructor(t,n){this.status=t,this.location=n}}class Re extends Error{constructor(t,n,r){super(r),this.status=t,this.text=n}}new URL("sveltekit-internal://");function vt(e,t){return e==="/"||t==="ignore"?e:t==="never"?e.endsWith("/")?e.slice(0,-1):e:t==="always"&&!e.endsWith("/")?e+"/":e}function bt(e){return e.split("%25").map(decodeURI).join("%25")}function kt(e){for(const t in e)e[t]=decodeURIComponent(e[t]);return e}function me({href:e}){return e.split("#")[0]}function At(e,t,n,r=!1){const a=new URL(e);Object.defineProperty(a,"searchParams",{value:new Proxy(a.searchParams,{get(i,o){if(o==="get"||o==="getAll"||o==="has")return f=>(n(f),i[o](f));t();const c=Reflect.get(i,o);return typeof c=="function"?c.bind(i):c}}),enumerable:!0,configurable:!0});const s=["href","pathname","search","toString","toJSON"];r&&s.push("hash");for(const i of s)Object.defineProperty(a,i,{get(){return t(),e[i]},enumerable:!0,configurable:!0});return a}function Et(...e){let t=5381;for(const n of e)if(typeof n=="string"){let r=n.length;for(;r;)t=t*33^n.charCodeAt(--r)}else if(ArrayBuffer.isView(n)){const r=new Uint8Array(n.buffer,n.byteOffset,n.byteLength);let a=r.length;for(;a;)t=t*33^r[--a]}else throw new TypeError("value must be a string or TypedArray");return(t>>>0).toString(36)}function St(e){const t=atob(e),n=new Uint8Array(t.length);for(let r=0;r((e instanceof Request?e.method:t?.method||"GET")!=="GET"&&G.delete(Ie(e)),Rt(e,t));const G=new Map;function It(e,t){const n=Ie(e,t),r=document.querySelector(n);if(r?.textContent){let{body:a,...s}=JSON.parse(r.textContent);const i=r.getAttribute("data-ttl");return i&&G.set(n,{body:a,init:s,ttl:1e3*Number(i)}),r.getAttribute("data-b64")!==null&&(a=St(a)),Promise.resolve(new Response(a,s))}return window.fetch(e,t)}function Ut(e,t,n){if(G.size>0){const r=Ie(e,n),a=G.get(r);if(a){if(performance.now(){const a=/^\[\.\.\.(\w+)(?:=(\w+))?\]$/.exec(r);if(a)return t.push({name:a[1],matcher:a[2],optional:!1,rest:!0,chained:!0}),"(?:/([^]*))?";const s=/^\[\[(\w+)(?:=(\w+))?\]\]$/.exec(r);if(s)return t.push({name:s[1],matcher:s[2],optional:!0,rest:!1,chained:!0}),"(?:/([^/]+))?";if(!r)return;const i=r.split(/\[(.+?)\](?!\])/);return"/"+i.map((c,f)=>{if(f%2){if(c.startsWith("x+"))return _e(String.fromCharCode(parseInt(c.slice(2),16)));if(c.startsWith("u+"))return _e(String.fromCharCode(...c.slice(2).split("-").map(_=>parseInt(_,16))));const d=Lt.exec(c),[,h,u,l,p]=d;return t.push({name:l,matcher:p,optional:!!h,rest:!!u,chained:u?f===1&&i[0]==="":!1}),u?"([^]*?)":h?"([^/]*)?":"([^/]+?)"}return _e(c)}).join("")}).join("")}/?$`),params:t}}function xt(e){return e!==""&&!/^\([^)]+\)$/.test(e)}function Pt(e){return e.slice(1).split("/").filter(xt)}function Ct(e,t,n){const r={},a=e.slice(1),s=a.filter(o=>o!==void 0);let i=0;for(let o=0;od).join("/"),i=0),f===void 0){c.rest&&(r[c.name]="");continue}if(!c.matcher||n[c.matcher](f)){r[c.name]=f;const d=t[o+1],h=a[o+1];d&&!d.rest&&d.optional&&h&&c.chained&&(i=0),!d&&!h&&Object.keys(r).length===s.length&&(i=0);continue}if(c.optional&&c.chained){i++;continue}return}if(!i)return r}function _e(e){return e.normalize().replace(/[[\]]/g,"\\$&").replace(/%/g,"%25").replace(/\//g,"%2[Ff]").replace(/\?/g,"%3[Ff]").replace(/#/g,"%23").replace(/[.*+?^${}()|\\]/g,"\\$&")}function Ot({nodes:e,server_loads:t,dictionary:n,matchers:r}){const a=new Set(t);return Object.entries(n).map(([o,[c,f,d]])=>{const{pattern:h,params:u}=Tt(o),l={id:o,exec:p=>{const _=h.exec(p);if(_)return Ct(_,u,r)},errors:[1,...d||[]].map(p=>e[p]),layouts:[0,...f||[]].map(i),leaf:s(c)};return l.errors.length=l.layouts.length=Math.max(l.errors.length,l.layouts.length),l});function s(o){const c=o<0;return c&&(o=~o),[c,e[o]]}function i(o){return o===void 0?o:[a.has(o),e[o]]}}function ze(e,t=JSON.parse){try{return t(sessionStorage[e])}catch{}}function Fe(e,t,n=JSON.stringify){const r=n(t);try{sessionStorage[e]=r}catch{}}const Nt="1755334486454",Xe="sveltekit:snapshot",Ze="sveltekit:scroll",Qe="sveltekit:states",jt="sveltekit:pageurl",F="sveltekit:history",Y="sveltekit:navigation",j={tap:1,hover:2,viewport:3,eager:4,off:-1,false:-1},Z=location.origin;function Ue(e){if(e instanceof URL)return e;let t=document.baseURI;if(!t){const n=document.getElementsByTagName("base");t=n.length?n[0].href:document.URL}return new URL(e,t)}function fe(){return{x:pageXOffset,y:pageYOffset}}function B(e,t){return e.getAttribute(`data-sveltekit-${t}`)}const Ve={...j,"":j.hover};function et(e){let t=e.assignedSlot??e.parentNode;return t?.nodeType===11&&(t=t.host),t}function tt(e,t){for(;e&&e!==t;){if(e.nodeName.toUpperCase()==="A"&&e.hasAttribute("href"))return e;e=et(e)}}function ve(e,t,n){let r;try{if(r=new URL(e instanceof SVGAElement?e.href.baseVal:e.href,document.baseURI),n&&r.hash.match(/^#[^/]/)){const o=location.hash.split("#")[1]||"/";r.hash=`#${o}${r.hash}`}}catch{}const a=e instanceof SVGAElement?e.target.baseVal:e.target,s=!r||!!a||ue(r,t,n)||(e.getAttribute("rel")||"").split(/\s+/).includes("external"),i=r?.origin===Z&&e.hasAttribute("download");return{url:r,external:s,target:a,download:i}}function te(e){let t=null,n=null,r=null,a=null,s=null,i=null,o=e;for(;o&&o!==document.documentElement;)r===null&&(r=B(o,"preload-code")),a===null&&(a=B(o,"preload-data")),t===null&&(t=B(o,"keepfocus")),n===null&&(n=B(o,"noscroll")),s===null&&(s=B(o,"reload")),i===null&&(i=B(o,"replacestate")),o=et(o);function c(f){switch(f){case"":case"true":return!0;case"off":case"false":return!1;default:return}}return{preload_code:Ve[r??"off"],preload_data:Ve[a??"off"],keepfocus:c(t),noscroll:c(n),reload:c(s),replace_state:c(i)}}function qe(e){const t=Ee(e);let n=!0;function r(){n=!0,t.update(i=>i)}function a(i){n=!1,t.set(i)}function s(i){let o;return t.subscribe(c=>{(o===void 0||n&&c!==o)&&i(o=c)})}return{notify:r,set:a,subscribe:s}}const nt={v:()=>{}};function $t(){const{set:e,subscribe:t}=Ee(!1);let n;async function r(){clearTimeout(n);try{const a=await fetch(`${wt}/_app/version.json`,{headers:{pragma:"no-cache","cache-control":"no-cache"}});if(!a.ok)return!1;const i=(await a.json()).version!==Nt;return i&&(e(!0),nt.v(),clearTimeout(n)),i}catch{return!1}}return{subscribe:t,check:r}}function ue(e,t,n){return e.origin!==Z||!e.pathname.startsWith(t)?!0:n?!(e.pathname===t+"/"||e.pathname===t+"/index.html"||e.protocol==="file:"&&e.pathname.replace(/\/[^/]+\.html?$/,"")===t):!1}function kn(e){}function Me(e){const t=Bt(e),n=new ArrayBuffer(t.length),r=new DataView(n);for(let a=0;a>16),t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255),n=r=0);return r===12?(n>>=4,t+=String.fromCharCode(n)):r===18&&(n>>=2,t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255)),t}const Ft=-1,Vt=-2,qt=-3,Mt=-4,Gt=-5,Ht=-6;function Kt(e,t){if(typeof e=="number")return a(e,!0);if(!Array.isArray(e)||e.length===0)throw new Error("Invalid input");const n=e,r=Array(n.length);function a(s,i=!1){if(s===Ft)return;if(s===qt)return NaN;if(s===Mt)return 1/0;if(s===Gt)return-1/0;if(s===Ht)return-0;if(i)throw new Error("Invalid input");if(s in r)return r[s];const o=n[s];if(!o||typeof o!="object")r[s]=o;else if(Array.isArray(o))if(typeof o[0]=="string"){const c=o[0],f=t?.[c];if(f)return r[s]=f(a(o[1]));switch(c){case"Date":r[s]=new Date(o[1]);break;case"Set":const d=new Set;r[s]=d;for(let l=1;lt!=null)}const Jt="x-sveltekit-invalidated",zt="x-sveltekit-trailing-slash";function ne(e){return e instanceof le||e instanceof Re?e.status:500}function Xt(e){return e instanceof Re?e.text:"Internal Error"}let E,J,ye;const Zt=De.toString().includes("$$")||/function \w+\(\) \{\}/.test(De.toString());Zt?(E={data:{},form:null,error:null,params:{},route:{id:null},state:{},status:-1,url:new URL("https://example.com")},J={current:null},ye={current:!1}):(E=new class{#e=T({});get data(){return x(this.#e)}set data(t){P(this.#e,t)}#t=T(null);get form(){return x(this.#t)}set form(t){P(this.#t,t)}#n=T(null);get error(){return x(this.#n)}set error(t){P(this.#n,t)}#r=T({});get params(){return x(this.#r)}set params(t){P(this.#r,t)}#a=T({id:null});get route(){return x(this.#a)}set route(t){P(this.#a,t)}#o=T({});get state(){return x(this.#o)}set state(t){P(this.#o,t)}#s=T(-1);get status(){return x(this.#s)}set status(t){P(this.#s,t)}#i=T(new URL("https://example.com"));get url(){return x(this.#i)}set url(t){P(this.#i,t)}},J=new class{#e=T(null);get current(){return x(this.#e)}set current(t){P(this.#e,t)}},ye=new class{#e=T(!1);get current(){return x(this.#e)}set current(t){P(this.#e,t)}},nt.v=()=>ye.current=!0);function Qt(e){Object.assign(E,e)}const en="/__data.json",tn=".html__data.json";function nn(e){return e.endsWith(".html")?e.replace(/\.html$/,tn):e.replace(/\/$/,"")+en}const{tick:rn}=yt,an=new Set(["icon","shortcut icon","apple-touch-icon"]),D=ze(Ze)??{},z=ze(Xe)??{},N={url:qe({}),page:qe({}),navigating:Ee(null),updated:$t()};function Le(e){D[e]=fe()}function on(e,t){let n=e+1;for(;D[n];)delete D[n],n+=1;for(n=t+1;z[n];)delete z[n],n+=1}function q(e){return location.href=e.href,new Promise(()=>{})}async function at(){if("serviceWorker"in navigator){const e=await navigator.serviceWorker.getRegistration(L||"/");e&&await e.update()}}function Ge(){}let Te,be,re,C,ke,v;globalThis.__sveltekit_13hoftk.data;const ae=[],oe=[];let O=null;const ee=new Map,ot=new Set,sn=new Set,H=new Set;let w={branch:[],error:null,url:null},xe=!1,se=!1,He=!0,X=!1,M=!1,st=!1,Pe=!1,it,k,I,$;const K=new Set,Ke=new Map;async function Rn(e,t,n){document.URL!==location.href&&(location.href=location.href),v=e,await e.hooks.init?.(),Te=Ot(e),C=document.documentElement,ke=t,be=e.nodes[0],re=e.nodes[1],be(),re(),k=history.state?.[F],I=history.state?.[Y],k||(k=I=Date.now(),history.replaceState({...history.state,[F]:k,[Y]:I},""));const r=D[k];function a(){r&&(history.scrollRestoration="manual",scrollTo(r.x,r.y))}n?(a(),await _n(ke,n)):(await W({type:"enter",url:Ue(v.hash?wn(new URL(location.href)):location.href),replace_state:!0}),a()),mn()}function cn(){ae.length=0,Pe=!1}function ct(e){oe.some(t=>t?.snapshot)&&(z[e]=oe.map(t=>t?.snapshot?.capture()))}function lt(e){z[e]?.forEach((t,n)=>{oe[n]?.snapshot?.restore(t)})}function We(){Le(k),Fe(Ze,D),ct(I),Fe(Xe,z)}async function Ce(e,t,n,r){let a;const s=await W({type:"goto",url:Ue(e),keepfocus:t.keepFocus,noscroll:t.noScroll,replace_state:t.replaceState,state:t.state,redirect_count:n,nav_token:r,accept:()=>{t.invalidateAll&&(Pe=!0,a=[...Ke.keys()]),t.invalidate&&t.invalidate.forEach(gn)}});return t.invalidateAll&&Be().then(Be).then(()=>{Ke.forEach(({resource:i},o)=>{a?.includes(o)&&i.refresh?.()})}),s}async function ln(e){if(e.id!==O?.id){const t={};K.add(t),O={id:e.id,token:t,promise:dt({...e,preload:t}).then(n=>(K.delete(t),n.type==="loaded"&&n.state.error&&(O=null),n))}}return O.promise}async function we(e){const t=(await he(e,!1))?.route;t&&await Promise.all([...t.layouts,t.leaf].map(n=>n?.[1]()))}function ft(e,t,n){w=e.state;const r=document.querySelector("style[data-sveltekit]");if(r&&r.remove(),Object.assign(E,e.props.page),it=new v.root({target:t,props:{...e.props,stores:N,components:oe},hydrate:n,sync:!1}),lt(I),n){const a={from:null,to:{params:w.params,route:{id:w.route?.id??null},url:new URL(location.href)},willUnload:!1,type:"enter",complete:Promise.resolve()};H.forEach(s=>s(a))}se=!0}function ie({url:e,params:t,branch:n,status:r,error:a,route:s,form:i}){let o="never";if(L&&(e.pathname===L||e.pathname===L+"/"))o="always";else for(const l of n)l?.slash!==void 0&&(o=l.slash);e.pathname=vt(e.pathname,o),e.search=e.search;const c={type:"loaded",state:{url:e,params:t,branch:n,error:a,route:s},props:{constructors:Yt(n).map(l=>l.node.component),page:$e(E)}};i!==void 0&&(c.props.form=i);let f={},d=!E,h=0;for(let l=0;l(o&&(c.route=!0),u[l])}),params:new Proxy(r,{get:(u,l)=>(o&&c.params.add(l),u[l])}),data:s?.data??null,url:At(n,()=>{o&&(c.url=!0)},u=>{o&&c.search_params.add(u)},v.hash),async fetch(u,l){u instanceof Request&&(l={body:u.method==="GET"||u.method==="HEAD"?void 0:await u.blob(),cache:u.cache,credentials:u.credentials,headers:[...u.headers].length>0?u?.headers:void 0,integrity:u.integrity,keepalive:u.keepalive,method:u.method,mode:u.mode,redirect:u.redirect,referrer:u.referrer,referrerPolicy:u.referrerPolicy,signal:u.signal,...l});const{resolved:p,promise:_}=ut(u,l,n);return o&&d(p.href),_},setHeaders:()=>{},depends:d,parent(){return o&&(c.parent=!0),t()},untrack(u){o=!1;try{return u()}finally{o=!0}}};i=await f.universal.load.call(null,h)??null}return{node:f,loader:e,server:s,universal:f.universal?.load?{type:"data",data:i,uses:c}:null,data:i??s?.data??null,slash:f.universal?.trailingSlash??s?.slash}}function ut(e,t,n){let r=e instanceof Request?e.url:e;const a=new URL(r,n);a.origin===n.origin&&(r=a.href.slice(n.origin.length));const s=se?Ut(r,a.href,t):It(r,t);return{resolved:a,promise:s}}function Ye(e,t,n,r,a,s){if(Pe)return!0;if(!a)return!1;if(a.parent&&e||a.route&&t||a.url&&n)return!0;for(const i of a.search_params)if(r.has(i))return!0;for(const i of a.params)if(s[i]!==w.params[i])return!0;for(const i of a.dependencies)if(ae.some(o=>o(new URL(i))))return!0;return!1}function Ne(e,t){return e?.type==="data"?e:e?.type==="skip"?t??null:null}function fn(e,t){if(!e)return new Set(t.searchParams.keys());const n=new Set([...e.searchParams.keys(),...t.searchParams.keys()]);for(const r of n){const a=e.searchParams.getAll(r),s=t.searchParams.getAll(r);a.every(i=>s.includes(i))&&s.every(i=>a.includes(i))&&n.delete(r)}return n}function Je({error:e,url:t,route:n,params:r}){return{type:"loaded",state:{error:e,url:t,route:n,params:r,branch:[]},props:{page:$e(E),constructors:[]}}}async function dt({id:e,invalidating:t,url:n,params:r,route:a,preload:s}){if(O?.id===e)return K.delete(O.token),O.promise;const{errors:i,layouts:o,leaf:c}=a,f=[...o,c];i.forEach(g=>g?.().catch(()=>{})),f.forEach(g=>g?.[1]().catch(()=>{}));let d=null;const h=w.url?e!==ce(w.url):!1,u=w.route?a.id!==w.route.id:!1,l=fn(w.url,n);let p=!1;const _=f.map((g,y)=>{const b=w.branch[y],A=!!g?.[0]&&(b?.loader!==g[1]||Ye(p,u,h,l,b.server?.uses,r));return A&&(p=!0),A});if(_.some(Boolean)){try{d=await gt(n,_)}catch(g){const y=await V(g,{url:n,params:r,route:{id:e}});return K.has(s)?Je({error:y,url:n,params:r,route:a}):de({status:ne(g),error:y,url:n,route:a})}if(d.type==="redirect")return d}const m=d?.nodes;let R=!1;const S=f.map(async(g,y)=>{if(!g)return;const b=w.branch[y],A=m?.[y];if((!A||A.type==="skip")&&g[1]===b?.loader&&!Ye(R,u,h,l,b.universal?.uses,r))return b;if(R=!0,A?.type==="error")throw A;return Oe({loader:g[1],url:n,params:r,route:a,parent:async()=>{const pe={};for(let ge=0;ge{});const U=[];for(let g=0;gPromise.resolve({}),server_data_node:Ne(s)}),c={node:await re(),loader:re,universal:null,server:null,data:null};return ie({url:n,params:a,branch:[o,c],status:e,error:t,route:null})}catch(o){if(o instanceof Se)return Ce(new URL(o.location,location.href),{},0);throw o}}async function dn(e){const t=e.href;if(ee.has(t))return ee.get(t);let n;try{const r=(async()=>{let a=await v.hooks.reroute({url:new URL(e),fetch:async(s,i)=>ut(s,i,e).promise})??e;if(typeof a=="string"){const s=new URL(e);v.hash?s.hash=a:s.pathname=a,a=s}return a})();ee.set(t,r),n=await r}catch{ee.delete(t);return}return n}async function he(e,t){if(e&&!ue(e,L,v.hash)){const n=await dn(e);if(!n)return;const r=hn(n);for(const a of Te){const s=a.exec(r);if(s)return{id:ce(e),invalidating:t,route:a,params:kt(s),url:e}}}}function hn(e){return bt(v.hash?e.hash.replace(/^#/,"").replace(/[?#].+/,""):e.pathname.slice(L.length))||"/"}function ce(e){return(v.hash?e.hash.replace(/^#/,""):e.pathname)+e.search}function ht({url:e,type:t,intent:n,delta:r}){let a=!1;const s=je(w,n,e,t);r!==void 0&&(s.navigation.delta=r);const i={...s.navigation,cancel:()=>{a=!0,s.reject(new Error("navigation cancelled"))}};return X||ot.forEach(o=>o(i)),a?null:s}async function W({type:e,url:t,popped:n,keepfocus:r,noscroll:a,replace_state:s,state:i={},redirect_count:o=0,nav_token:c={},accept:f=Ge,block:d=Ge}){const h=$;$=c;const u=await he(t,!1),l=e==="enter"?je(w,u,t,e):ht({url:t,type:e,delta:n?.delta,intent:u});if(!l){d(),$===c&&($=h);return}const p=k,_=I;f(),X=!0,se&&l.navigation.type!=="enter"&&N.navigating.set(J.current=l.navigation);let m=u&&await dt(u);if(!m){if(ue(t,L,v.hash))return await q(t);m=await pt(t,{id:null},await V(new Re(404,"Not Found",`Not found: ${t.pathname}`),{url:t,params:{},route:{id:null}}),404)}if(t=u?.url||t,$!==c)return l.reject(new Error("navigation aborted")),!1;if(m.type==="redirect")if(o>=20)m=await de({status:500,error:await V(new Error("Redirect loop"),{url:t,params:{},route:{id:null}}),url:t,route:{id:null}});else return await Ce(new URL(m.location,t).href,{},o+1,c),!1;else m.props.page.status>=400&&await N.updated.check()&&(await at(),await q(t));if(cn(),Le(p),ct(_),m.props.page.url.pathname!==t.pathname&&(t.pathname=m.props.page.url.pathname),i=n?n.state:i,!n){const g=s?0:1,y={[F]:k+=g,[Y]:I+=g,[Qe]:i};(s?history.replaceState:history.pushState).call(history,y,"",t),s||on(k,I)}if(O=null,m.props.page.state=i,se){const g=(await Promise.all(Array.from(sn,y=>y(l.navigation)))).filter(y=>typeof y=="function");if(g.length>0){let y=function(){g.forEach(b=>{H.delete(b)})};g.push(y),g.forEach(b=>{H.add(b)})}w=m.state,m.props.page&&(m.props.page.url=t),it.$set(m.props),Qt(m.props.page),st=!0}else ft(m,ke,!1);const{activeElement:R}=document;await rn();const S=n?n.scroll:a?fe():null;if(He){const g=t.hash&&document.getElementById(_t(t));S?scrollTo(S.x,S.y):g?g.scrollIntoView():scrollTo(0,0)}const U=document.activeElement!==R&&document.activeElement!==document.body;!r&&!U&&yn(t),He=!0,m.props.page&&Object.assign(E,m.props.page),X=!1,e==="popstate"&<(I),l.fulfil(void 0),H.forEach(g=>g(l.navigation)),N.navigating.set(J.current=null)}async function pt(e,t,n,r){return e.origin===Z&&e.pathname===location.pathname&&!xe?await de({status:r,error:n,url:e,route:t}):await q(e)}function pn(){let e,t,n;C.addEventListener("mousemove",o=>{const c=o.target;clearTimeout(e),e=setTimeout(()=>{s(c,j.hover)},20)});function r(o){o.defaultPrevented||s(o.composedPath()[0],j.tap)}C.addEventListener("mousedown",r),C.addEventListener("touchstart",r,{passive:!0});const a=new IntersectionObserver(o=>{for(const c of o)c.isIntersecting&&(we(new URL(c.target.href)),a.unobserve(c.target))},{threshold:0});async function s(o,c){const f=tt(o,C),d=f===t&&c>=n;if(!f||d)return;const{url:h,external:u,download:l}=ve(f,L,v.hash);if(u||l)return;const p=te(f),_=h&&ce(w.url)===ce(h);if(!(p.reload||_))if(c<=p.preload_data){t=f,n=j.tap;const m=await he(h,!1);if(!m)return;ln(m)}else c<=p.preload_code&&(t=f,n=c,we(h))}function i(){a.disconnect();for(const o of C.querySelectorAll("a")){const{url:c,external:f,download:d}=ve(o,L,v.hash);if(f||d)continue;const h=te(o);h.reload||(h.preload_code===j.viewport&&a.observe(o),h.preload_code===j.eager&&we(c))}}H.add(i),i()}function V(e,t){if(e instanceof le)return e.body;const n=ne(e),r=Xt(e);return v.hooks.handleError({error:e,event:t,status:n,message:r})??{message:r}}function In(e,t={}){return e=new URL(Ue(e)),e.origin!==Z?Promise.reject(new Error("goto: invalid URL")):Ce(e,t,0)}function gn(e){if(typeof e=="function")ae.push(e);else{const{href:t}=new URL(e,location.href);ae.push(n=>n.href===t)}}function mn(){history.scrollRestoration="manual",addEventListener("beforeunload",t=>{let n=!1;if(We(),!X){const r=je(w,void 0,null,"leave"),a={...r.navigation,cancel:()=>{n=!0,r.reject(new Error("navigation cancelled"))}};ot.forEach(s=>s(a))}n?(t.preventDefault(),t.returnValue=""):history.scrollRestoration="auto"}),addEventListener("visibilitychange",()=>{document.visibilityState==="hidden"&&We()}),navigator.connection?.saveData||pn(),C.addEventListener("click",async t=>{if(t.button||t.which!==1||t.metaKey||t.ctrlKey||t.shiftKey||t.altKey||t.defaultPrevented)return;const n=tt(t.composedPath()[0],C);if(!n)return;const{url:r,external:a,target:s,download:i}=ve(n,L,v.hash);if(!r)return;if(s==="_parent"||s==="_top"){if(window.parent!==window)return}else if(s&&s!=="_self")return;const o=te(n);if(!(n instanceof SVGAElement)&&r.protocol!==location.protocol&&!(r.protocol==="https:"||r.protocol==="http:")||i)return;const[f,d]=(v.hash?r.hash.replace(/^#/,""):r.href).split("#"),h=f===me(location);if(a||o.reload&&(!h||!d)){ht({url:r,type:"link"})?X=!0:t.preventDefault();return}if(d!==void 0&&h){const[,u]=w.url.href.split("#");if(u===d){if(t.preventDefault(),d===""||d==="top"&&n.ownerDocument.getElementById("top")===null)window.scrollTo({top:0});else{const l=n.ownerDocument.getElementById(decodeURIComponent(d));l&&(l.scrollIntoView(),l.focus())}return}if(M=!0,Le(k),e(r),!o.replace_state)return;M=!1}t.preventDefault(),await new Promise(u=>{requestAnimationFrame(()=>{setTimeout(u,0)}),setTimeout(u,100)}),await W({type:"link",url:r,keepfocus:o.keepfocus,noscroll:o.noscroll,replace_state:o.replace_state??r.href===location.href})}),C.addEventListener("submit",t=>{if(t.defaultPrevented)return;const n=HTMLFormElement.prototype.cloneNode.call(t.target),r=t.submitter;if((r?.formTarget||n.target)==="_blank"||(r?.formMethod||n.method)!=="get")return;const i=new URL(r?.hasAttribute("formaction")&&r?.formAction||n.action);if(ue(i,L,!1))return;const o=t.target,c=te(o);if(c.reload)return;t.preventDefault(),t.stopPropagation();const f=new FormData(o),d=r?.getAttribute("name");d&&f.append(d,r?.getAttribute("value")??""),i.search=new URLSearchParams(f).toString(),W({type:"form",url:i,keepfocus:c.keepfocus,noscroll:c.noscroll,replace_state:c.replace_state??i.href===location.href})}),addEventListener("popstate",async t=>{if(!Ae){if(t.state?.[F]){const n=t.state[F];if($={},n===k)return;const r=D[n],a=t.state[Qe]??{},s=new URL(t.state[jt]??location.href),i=t.state[Y],o=w.url?me(location)===me(w.url):!1;if(i===I&&(st||o)){a!==E.state&&(E.state=a),e(s),D[k]=fe(),r&&scrollTo(r.x,r.y),k=n;return}const f=n-k;await W({type:"popstate",url:s,popped:{state:a,scroll:r,delta:f},accept:()=>{k=n,I=i},block:()=>{history.go(-f)},nav_token:$})}else if(!M){const n=new URL(location.href);e(n),v.hash&&location.reload()}}}),addEventListener("hashchange",()=>{M&&(M=!1,history.replaceState({...history.state,[F]:++k,[Y]:I},"",location.href))});for(const t of document.querySelectorAll("link"))an.has(t.rel)&&(t.href=t.href);addEventListener("pageshow",t=>{t.persisted&&N.navigating.set(J.current=null)});function e(t){w.url=E.url=t,N.page.set($e(E)),N.page.notify()}}async function _n(e,{status:t=200,error:n,node_ids:r,params:a,route:s,server_route:i,data:o,form:c}){xe=!0;const f=new URL(location.href);let d;({params:a={},route:s={id:null}}=await he(f,!1)||{}),d=Te.find(({id:l})=>l===s.id);let h,u=!0;try{const l=r.map(async(_,m)=>{const R=o[m];return R?.uses&&(R.uses=mt(R.uses)),Oe({loader:v.nodes[_],url:f,params:a,route:s,parent:async()=>{const S={};for(let U=0;Us?"1":"0").join(""));const r=window.fetch,a=await r(n.href,{});if(!a.ok){let s;throw a.headers.get("content-type")?.includes("application/json")?s=await a.json():a.status===404?s="Not Found":a.status===500&&(s="Internal Error"),new le(a.status,s)}return new Promise(async s=>{const i=new Map,o=a.body.getReader(),c=new TextDecoder;function f(h){return Kt(h,{...v.decoders,Promise:u=>new Promise((l,p)=>{i.set(u,{fulfil:l,reject:p})})})}let d="";for(;;){const{done:h,value:u}=await o.read();if(h&&!d)break;for(d+=!u&&d?` -`:c.decode(u,{stream:!0});;){const l=d.indexOf(` -`);if(l===-1)break;const p=JSON.parse(d.slice(0,l));if(d=d.slice(l+1),p.type==="redirect")return s(p);if(p.type==="data")p.nodes?.forEach(_=>{_?.type==="data"&&(_.uses=mt(_.uses),_.data=f(_.data))}),s(p);else if(p.type==="chunk"){const{id:_,data:m,error:R}=p,S=i.get(_);i.delete(_),R?S.reject(f(R)):S.fulfil(f(m))}}}})}function mt(e){return{dependencies:new Set(e?.dependencies??[]),params:new Set(e?.params??[]),parent:!!e?.parent,route:!!e?.route,url:!!e?.url,search_params:new Set(e?.search_params??[])}}let Ae=!1;function yn(e){const t=document.querySelector("[autofocus]");if(t)t.focus();else{const n=_t(e);if(n&&document.getElementById(n)){const{x:a,y:s}=fe();setTimeout(()=>{const i=history.state;Ae=!0,location.replace(`#${n}`),v.hash&&location.replace(e.hash),history.replaceState(i,"",e.hash),scrollTo(a,s),Ae=!1})}else{const a=document.body,s=a.getAttribute("tabindex");a.tabIndex=-1,a.focus({preventScroll:!0,focusVisible:!1}),s!==null?a.setAttribute("tabindex",s):a.removeAttribute("tabindex")}const r=getSelection();if(r&&r.type!=="None"){const a=[];for(let s=0;s{if(r.rangeCount===a.length){for(let s=0;s{a=c,s=f});return i.catch(()=>{}),{navigation:{from:{params:e.params,route:{id:e.route?.id??null},url:e.url},to:n&&{params:t?.params??null,route:{id:t?.route?.id??null},url:n},willUnload:!t,type:r,complete:i},fulfil:a,reject:s}}function $e(e){return{data:e.data,error:e.error,form:e.form,params:e.params,route:e.route,state:e.state,status:e.status,url:e.url}}function wn(e){const t=new URL(e);return t.hash=decodeURIComponent(e.hash),t}function _t(e){let t;if(v.hash){const[,,n]=e.hash.split("#",3);t=n??""}else t=e.hash.slice(1);return decodeURIComponent(t)}export{Rn as a,In as g,kn as l,E as p,N as s}; diff --git a/webapp/assets/_app/immutable/chunks/CclkODgu.js b/webapp/assets/_app/immutable/chunks/CclkODgu.js deleted file mode 100644 index f3c6b3c0..00000000 --- a/webapp/assets/_app/immutable/chunks/CclkODgu.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as Oe}from"./B3Pzt0F_.js";import{p as qe,E as Ie,o as Ke,f as x,j as t,r,k as o,g as e,m,z as B,t as y,x as ue,u as c,v,n as T,s as i,e as be,c as u,D as Ge,d as He}from"./D8EpLgQ1.js";import{p as ge,i as $}from"./5WA7h8uK.js";import{e as Je,i as Qe}from"./u94nIB4-.js";import{r as me,b as ye,g as Ve}from"./CiE1LlKV.js";import{a as Xe,b as Ye}from"./C6k1Q4We.js";import{p as Ze}from"./D4Caz1gY.js";import{M as ea}from"./qB7B8uiS.js";var aa=x('

                '),ta=x('
                Owner:
                '),ra=x('
                '),sa=x(""),oa=x(''),na=x('

                Leave empty to auto-generate a new secret

                '),ia=x('
                Updating...
                '),da=x('

                Name:
                Endpoint:
                Current Credentials:
                Current Pool Balancer:

                Leave unchanged to keep current credentials

                Round Robin distributes jobs evenly across pools, Pack fills pools in order

                ');function xa(xe,D){qe(D,!1);let d=ge(D,"entity",8),k=ge(D,"entityType",8);const P=Ie();let C=m(!1),w=m(""),M=m([]),R=m(!1),f=m(""),_=m(""),h=m(""),b=m(!1);function fe(){if(k()==="repository"){const l=d();return`${l.owner}/${l.name}`}return d().name||""}function W(){return k().charAt(0).toUpperCase()+k().slice(1)}function _e(){return k()==="repository"&&d().owner||""}async function he(){try{i(R,!0),i(M,await Ve.listCredentials())}catch(l){i(w,l instanceof Error?l.message:"Failed to load credentials")}finally{i(R,!1)}}function ke(){i(f,d().credentials_name||""),i(_,d().pool_balancing_type||"roundrobin"),i(h,""),i(b,!1)}async function we(){try{i(C,!0),i(w,"");const l={};let E=!1;if(e(f)&&e(f)!==d().credentials_name&&(l.credentials_name=e(f),E=!0),e(_)&&e(_)!==d().pool_balancing_type&&(l.pool_balancer_type=e(_),E=!0),e(b)){if(!e(h).trim()){i(w,"Please enter a webhook secret or uncheck the option to change it");return}l.webhook_secret=e(h),E=!0}if(!E){P("close");return}P("submit",l)}catch(l){i(w,l instanceof Error?l.message:`Failed to update ${k()}`)}finally{i(C,!1)}}Ke(()=>{he(),ke()}),Oe(),ea(xe,{$$events:{close:()=>P("close")},children:(l,E)=>{var j=da(),F=t(j),N=t(F),Ce=t(N);r(N);var Y=o(N,2),Ee=t(Y,!0);r(Y),r(F);var z=o(F,2),Z=t(z);{var Se=a=>{var s=aa(),n=t(s),p=t(n,!0);r(n),r(s),y(()=>v(p,e(w))),u(a,s)};$(Z,a=>{e(w)&&a(Se)})}var A=o(Z,2),L=t(A),Ue=t(L);r(L);var ee=o(L,2),ae=t(ee);{var $e=a=>{var s=ta(),n=o(t(s),2),p=t(n,!0);r(n),r(s),y(S=>v(p,S),[()=>c(_e)]),u(a,s)};$(ae,a=>{k()==="repository"&&a($e)})}var O=o(ae,2),te=o(t(O),2),Pe=t(te,!0);r(te),r(O);var q=o(O,2),re=o(t(q),2),Be=t(re,!0);r(re),r(q);var I=o(q,2),se=o(t(I),2),Te=t(se,!0);r(se),r(I);var oe=o(I,2),ne=o(t(oe),2),De=t(ne,!0);r(ne),r(oe),r(ee),r(A);var K=o(A,2),G=t(K),Me=o(t(G),2);{var Re=a=>{var s=ra();u(a,s)},We=a=>{var s=oa();y(()=>{e(f),ue(()=>{e(M)})});var n=t(s);n.value=n.__value="";var p=o(n);Je(p,1,()=>e(M),Qe,(S,g)=>{var U=sa(),Le=t(U);r(U);var pe={};y(()=>{v(Le,`${e(g),c(()=>e(g).name)??""} (${e(g),c(()=>e(g).endpoint?.name||"Unknown")??""})`),pe!==(pe=(e(g),c(()=>e(g).name)))&&(U.value=(U.__value=(e(g),c(()=>e(g).name)))??"")}),u(S,U)}),r(s),ye(s,()=>e(f),S=>i(f,S)),u(a,s)};$(Me,a=>{e(R)?a(Re):a(We,!1)})}B(2),r(G);var H=o(G,2),J=o(t(H),2);y(()=>{e(_),ue(()=>{})});var Q=t(J);Q.value=Q.__value="roundrobin";var ie=o(Q);ie.value=ie.__value="pack",r(J),B(2),r(H);var de=o(H,2),V=t(de),le=t(V);me(le),B(2),r(V);var je=o(V,2);{var Fe=a=>{var s=na(),n=o(t(s),2);me(n),B(2),r(s),y(()=>n.required=e(b)),Ye(n,()=>e(h),p=>i(h,p)),u(a,s)};$(je,a=>{e(b)&&a(Fe)})}r(de),r(K);var ce=o(K,2),ve=t(ce),X=o(ve,2),Ne=t(X);{var ze=a=>{var s=ia();u(a,s)},Ae=a=>{var s=Ge();y(n=>v(s,`Update ${n??""}`),[()=>c(W)]),u(a,s)};$(Ne,a=>{e(C)?a(ze):a(Ae,!1)})}r(X),r(ce),r(z),r(j),y((a,s,n,p)=>{v(Ce,`Update ${a??""}`),v(Ee,s),v(Ue,`${n??""} Information`),v(Pe,(T(d()),c(()=>d().name))),v(Be,(T(d()),c(()=>d().endpoint?.name))),v(Te,(T(d()),c(()=>d().credentials_name))),v(De,(T(d()),c(()=>d().pool_balancing_type||"roundrobin"))),X.disabled=p},[()=>c(W),()=>c(fe),()=>c(W),()=>(e(C),e(b),e(h),c(()=>e(C)||e(b)&&!e(h).trim()))]),ye(J,()=>e(_),a=>i(_,a)),Xe(le,()=>e(b),a=>i(b,a)),be("click",ve,()=>P("close")),be("submit",z,Ze(we)),u(l,j)},$$slots:{default:!0}}),He()}export{xa as U}; diff --git a/webapp/assets/_app/immutable/chunks/CoIRRsD9.js b/webapp/assets/_app/immutable/chunks/CoIRRsD9.js deleted file mode 100644 index 8cbc8c4b..00000000 --- a/webapp/assets/_app/immutable/chunks/CoIRRsD9.js +++ /dev/null @@ -1 +0,0 @@ -const s=globalThis.__sveltekit_13hoftk?.base??"/ui",t=globalThis.__sveltekit_13hoftk?.assets??s;export{t as a,s as b}; diff --git a/webapp/assets/_app/immutable/chunks/CwqI2jFH.js b/webapp/assets/_app/immutable/chunks/CwqI2jFH.js deleted file mode 100644 index 4cf31304..00000000 --- a/webapp/assets/_app/immutable/chunks/CwqI2jFH.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as Dr}from"./B3Pzt0F_.js";import{p as Lr,E as qr,m as s,o as Gr,f as m,k as r,j as o,g as e,r as a,t as v,e as M,c as b,v as T,b as Jr,z as gr,x as W,u as p,s as d,D as Ie,d as Fr}from"./D8EpLgQ1.js";import{p as vr,i as z}from"./5WA7h8uK.js";import{e as Ae,i as $e}from"./u94nIB4-.js";import{s as Oe,r as h,b as Q,g as C,c as Nr}from"./CiE1LlKV.js";import{b as E,a as Vr}from"./C6k1Q4We.js";import{p as Kr}from"./D4Caz1gY.js";import{M as Ur}from"./qB7B8uiS.js";import{J as Wr}from"./DZblzgqm.js";var Qr=m('

                '),Xr=m('
                '),Yr=m(""),Zr=m(''),et=m('
                '),rt=m(""),tt=m(''),at=m(' '),ot=m('
                '),dt=m('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),it=m('
                Creating...
                '),st=m('

                Create New Pool

                Entity Level *
                ');function yt(pr,X){Lr(X,!1);const Y=qr();let He=vr(X,"initialEntityType",8,""),mr=vr(X,"initialEntityId",8,""),G=s(!1),P=s(""),n=s(He()),S=s([]),Z=s([]),ee=s(!1),re=s(!1),k=s(mr()),j=s(""),B=s(""),D=s(""),te=s(void 0),ae=s(void 0),oe=s(void 0),de=s(100),ie=s("garm"),J=s("linux"),F=s("amd64"),se=s(""),le=s(!0),_=s([]),I=s(""),L=s("{}");async function fr(){try{d(re,!0),d(Z,await C.listProviders())}catch(l){d(P,l instanceof Error?l.message:"Failed to load providers")}finally{d(re,!1)}}async function Be(){if(e(n))try{switch(d(ee,!0),d(S,[]),e(n)){case"repository":d(S,await C.listRepositories());break;case"organization":d(S,await C.listOrganizations());break;case"enterprise":d(S,await C.listEnterprises());break}}catch(l){d(P,l instanceof Error?l.message:`Failed to load ${e(n)}s`)}finally{d(ee,!1)}}function ne(l){e(n)!==l&&(d(n,l),d(k,""),Be())}function De(){e(I).trim()&&!e(_).includes(e(I).trim())&&(d(_,[...e(_),e(I).trim()]),d(I,""))}function yr(l){d(_,e(_).filter((A,w)=>w!==l))}function xr(l){l.key==="Enter"&&(l.preventDefault(),De())}async function hr(){if(!e(n)||!e(k)||!e(j)||!e(B)||!e(D)){d(P,"Please fill in all required fields");return}try{d(G,!0),d(P,"");let l={};if(e(L).trim())try{l=JSON.parse(e(L))}catch{throw new Error("Invalid JSON in extra specs")}const A={provider_name:e(j),image:e(B),flavor:e(D),max_runners:e(te)||10,min_idle_runners:e(ae)||0,runner_bootstrap_timeout:e(oe)||20,priority:e(de),runner_prefix:e(ie),os_type:e(J),os_arch:e(F),"github-runner-group":e(se)||void 0,enabled:e(le),tags:e(_),extra_specs:e(L).trim()?l:void 0};let w;switch(e(n)){case"repository":w=await C.createRepositoryPool(e(k),A);break;case"organization":w=await C.createOrganizationPool(e(k),A);break;case"enterprise":w=await C.createEnterprisePool(e(k),A);break;default:throw new Error("Invalid entity level")}Y("submit",A)}catch(l){d(P,l instanceof Error?l.message:"Failed to create pool")}finally{d(G,!1)}}Gr(()=>{fr(),He()&&Be()}),Dr(),Ur(pr,{$$events:{close:()=>Y("close")},children:(l,A)=>{var w=st(),N=r(o(w),2),Le=o(N);{var kr=c=>{var y=Qr(),$=o(y),V=o($,!0);a($),a(y),v(()=>T(V,e(P))),b(c,y)};z(Le,c=>{e(P)&&c(kr)})}var ue=r(Le,2),qe=r(o(ue),2),be=o(qe),ce=r(be,2),Ge=r(ce,2);a(qe),a(ue);var Je=r(ue,2);{var _r=c=>{var y=dt(),$=Jr(y),V=r(o($),2),ve=o(V),pe=o(ve),Pr=o(pe);gr(),a(pe);var Rr=r(pe,2);{var Tr=t=>{var u=Xr();b(t,u)},zr=t=>{var u=Zr();v(()=>{e(k),W(()=>{e(n),e(S)})});var f=o(u),O=o(f);a(f),f.value=f.__value="";var R=r(f);Ae(R,1,()=>e(S),$e,(g,i)=>{var x=Yr(),U=o(x);{var Hr=H=>{var q=Ie();v(()=>T(q,`${e(i),p(()=>e(i).owner)??""}/${e(i),p(()=>e(i).name)??""} (${e(i),p(()=>e(i).endpoint?.name)??""})`)),b(H,q)},Br=H=>{var q=Ie();v(()=>T(q,`${e(i),p(()=>e(i).name)??""} (${e(i),p(()=>e(i).endpoint?.name)??""})`)),b(H,q)};z(U,H=>{e(n)==="repository"?H(Hr):H(Br,!1)})}a(x);var cr={};v(()=>{cr!==(cr=(e(i),p(()=>e(i).id)))&&(x.value=(x.__value=(e(i),p(()=>e(i).id)))??"")}),b(g,x)}),a(u),v(()=>T(O,`Select a ${e(n)??""}`)),Q(u,()=>e(k),g=>d(k,g)),b(t,u)};z(Rr,t=>{e(ee)?t(Tr):t(zr,!1)})}a(ve);var Ve=r(ve,2),Cr=r(o(Ve),2);{var Sr=t=>{var u=et();b(t,u)},jr=t=>{var u=tt();v(()=>{e(j),W(()=>{e(Z)})});var f=o(u);f.value=f.__value="";var O=r(f);Ae(O,1,()=>e(Z),$e,(R,g)=>{var i=rt(),x=o(i,!0);a(i);var U={};v(()=>{T(x,(e(g),p(()=>e(g).name))),U!==(U=(e(g),p(()=>e(g).name)))&&(i.value=(i.__value=(e(g),p(()=>e(g).name)))??"")}),b(R,i)}),a(u),Q(u,()=>e(j),R=>d(j,R)),b(t,u)};z(Cr,t=>{e(re)?t(Sr):t(jr,!1)})}a(Ve),a(V),a($);var me=r($,2),Ke=r(o(me),2),fe=o(Ke),Ue=r(o(fe),2);h(Ue),a(fe);var ye=r(fe,2),We=r(o(ye),2);h(We),a(ye);var xe=r(ye,2),he=r(o(xe),2);v(()=>{e(J),W(()=>{})});var ke=o(he);ke.value=ke.__value="linux";var Qe=r(ke);Qe.value=Qe.__value="windows",a(he),a(xe);var Xe=r(xe,2),_e=r(o(Xe),2);v(()=>{e(F),W(()=>{})});var we=o(_e);we.value=we.__value="amd64";var Ye=r(we);Ye.value=Ye.__value="arm64",a(_e),a(Xe),a(Ke),a(me);var Ee=r(me,2),Ze=r(o(Ee),2),Me=o(Ze),er=r(o(Me),2);h(er),a(Me);var Pe=r(Me,2),rr=r(o(Pe),2);h(rr),a(Pe);var tr=r(Pe,2),ar=r(o(tr),2);h(ar),a(tr),a(Ze),a(Ee);var or=r(Ee,2),Re=r(o(or),2),Te=o(Re),dr=r(o(Te),2);h(dr),a(Te);var ze=r(Te,2),ir=r(o(ze),2);h(ir),a(ze);var sr=r(ze,2),lr=r(o(sr),2);h(lr),a(sr),a(Re);var Ce=r(Re,2),nr=r(o(Ce),2),Se=o(nr),K=o(Se);h(K);var Ir=r(K,2);a(Se);var Ar=r(Se,2);{var $r=t=>{var u=ot();Ae(u,5,()=>e(_),$e,(f,O,R)=>{var g=at(),i=o(g),x=r(i);a(g),v(()=>{T(i,`${e(O)??""} `),Nr(x,"aria-label",`Remove tag ${e(O)}`)}),M("click",x,()=>yr(R)),b(f,g)}),a(u),b(t,u)};z(Ar,t=>{e(_),p(()=>e(_).length>0)&&t($r)})}a(nr),a(Ce);var je=r(Ce,2),Or=r(o(je),2);Wr(Or,{rows:4,placeholder:"{}",get value(){return e(L)},set value(t){d(L,t)},$$legacy:!0}),a(je);var ur=r(je,2),br=o(ur);h(br),gr(2),a(ur),a(or),v(t=>T(Pr,`${t??""} `),[()=>(e(n),p(()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)))]),E(Ue,()=>e(B),t=>d(B,t)),E(We,()=>e(D),t=>d(D,t)),Q(he,()=>e(J),t=>d(J,t)),Q(_e,()=>e(F),t=>d(F,t)),E(er,()=>e(ae),t=>d(ae,t)),E(rr,()=>e(te),t=>d(te,t)),E(ar,()=>e(oe),t=>d(oe,t)),E(dr,()=>e(ie),t=>d(ie,t)),E(ir,()=>e(de),t=>d(de,t)),E(lr,()=>e(se),t=>d(se,t)),E(K,()=>e(I),t=>d(I,t)),M("keydown",K,xr),M("click",Ir,De),Vr(br,()=>e(le),t=>d(le,t)),b(c,y)};z(Je,c=>{e(n)&&c(_r)})}var Fe=r(Je,2),Ne=o(Fe),ge=r(Ne,2),wr=o(ge);{var Er=c=>{var y=it();b(c,y)},Mr=c=>{var y=Ie("Create Pool");b(c,y)};z(wr,c=>{e(G)?c(Er):c(Mr,!1)})}a(ge),a(Fe),a(N),a(w),v(()=>{Oe(be,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Oe(ce,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Oe(Ge,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),ge.disabled=e(G)||!e(n)||!e(k)||!e(j)||!e(B)||!e(D)}),M("click",be,()=>ne("repository")),M("click",ce,()=>ne("organization")),M("click",Ge,()=>ne("enterprise")),M("click",Ne,()=>Y("close")),M("submit",N,Kr(hr)),b(l,w)},$$slots:{default:!0}}),Fr()}export{yt as C}; diff --git a/webapp/assets/_app/immutable/chunks/B7ITzBt8.js b/webapp/assets/_app/immutable/chunks/D4PaGKsV.js similarity index 96% rename from webapp/assets/_app/immutable/chunks/B7ITzBt8.js rename to webapp/assets/_app/immutable/chunks/D4PaGKsV.js index 8b2103d3..4bd0902d 100644 --- a/webapp/assets/_app/immutable/chunks/B7ITzBt8.js +++ b/webapp/assets/_app/immutable/chunks/D4PaGKsV.js @@ -1 +1 @@ -import"./DsnmJJEf.js";import{i as j}from"./B3Pzt0F_.js";import{p as R,l as w,a as q,f as g,t as v,c as k,d as A,k as B,j as u,s as _,m as y,r as m,n as f,u as b,g as d,v as h}from"./D8EpLgQ1.js";import{p as o,i as D}from"./5WA7h8uK.js";import{c as U,s as F}from"./CiE1LlKV.js";import{b as r}from"./CoIRRsD9.js";var G=g('
                '),H=g('');function V(x,n){R(n,!1);const i=y(),p=y();let e=o(n,"item",8),s=o(n,"entityType",8,"repository"),$=o(n,"showOwner",8,!1),E=o(n,"showId",8,!1),I=o(n,"fontMono",8,!1);function z(){if(!e())return"Unknown";switch(s()){case"repository":return $()?`${e().owner||"Unknown"}/${e().name||"Unknown"}`:e().name||"Unknown";case"organization":case"enterprise":return e().name||"Unknown";case"pool":return E()?e().id||"Unknown":e().name||"Unknown";case"scaleset":return e().name||"Unknown";case"instance":return e().name||"Unknown";default:return e().name||e().id||"Unknown"}}function C(){if(!e())return"#";let t;switch(s()){case"instance":t=e().name;break;default:t=e().id||e().name;break}if(!t)return"#";switch(s()){case"repository":return`${r}/repositories/${t}`;case"organization":return`${r}/organizations/${t}`;case"enterprise":return`${r}/enterprises/${t}`;case"pool":return`${r}/pools/${t}`;case"scaleset":return`${r}/scalesets/${t}`;case"instance":return`${r}/instances/${encodeURIComponent(t)}`;default:return"#"}}w(()=>{},()=>{_(i,z())}),w(()=>{},()=>{_(p,C())}),q(),j();var c=H(),a=u(c),M=u(a,!0);m(a);var N=B(a,2);{var O=t=>{var l=G(),T=u(l,!0);m(l),v(()=>h(T,(f(e()),b(()=>e().provider_id)))),k(t,l)};D(N,t=>{f(s()),f(e()),b(()=>s()==="instance"&&e()?.provider_id)&&t(O)})}m(c),v(()=>{U(a,"href",d(p)),F(a,1,`block w-full truncate text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 ${I()?"font-mono":""}`),U(a,"title",d(i)),h(M,d(i))}),k(x,c),A()}export{V as E}; +import"./DsnmJJEf.js";import{i as j}from"./B3Pzt0F_.js";import{p as R,l as w,a as q,f as g,t as v,c as k,d as A,k as B,j as u,s as _,m as y,r as m,n as f,u as b,g as d,v as h}from"./D8EpLgQ1.js";import{p as o,i as D}from"./5WA7h8uK.js";import{c as U,s as F}from"./CiE1LlKV.js";import{b as r}from"./CRhkqW2i.js";var G=g('
                '),H=g('');function V(x,n){R(n,!1);const i=y(),p=y();let e=o(n,"item",8),s=o(n,"entityType",8,"repository"),$=o(n,"showOwner",8,!1),E=o(n,"showId",8,!1),I=o(n,"fontMono",8,!1);function z(){if(!e())return"Unknown";switch(s()){case"repository":return $()?`${e().owner||"Unknown"}/${e().name||"Unknown"}`:e().name||"Unknown";case"organization":case"enterprise":return e().name||"Unknown";case"pool":return E()?e().id||"Unknown":e().name||"Unknown";case"scaleset":return e().name||"Unknown";case"instance":return e().name||"Unknown";default:return e().name||e().id||"Unknown"}}function C(){if(!e())return"#";let t;switch(s()){case"instance":t=e().name;break;default:t=e().id||e().name;break}if(!t)return"#";switch(s()){case"repository":return`${r}/repositories/${t}`;case"organization":return`${r}/organizations/${t}`;case"enterprise":return`${r}/enterprises/${t}`;case"pool":return`${r}/pools/${t}`;case"scaleset":return`${r}/scalesets/${t}`;case"instance":return`${r}/instances/${encodeURIComponent(t)}`;default:return"#"}}w(()=>{},()=>{_(i,z())}),w(()=>{},()=>{_(p,C())}),q(),j();var c=H(),a=u(c),M=u(a,!0);m(a);var N=B(a,2);{var O=t=>{var l=G(),T=u(l,!0);m(l),v(()=>h(T,(f(e()),b(()=>e().provider_id)))),k(t,l)};D(N,t=>{f(s()),f(e()),b(()=>s()==="instance"&&e()?.provider_id)&&t(O)})}m(c),v(()=>{U(a,"href",d(p)),F(a,1,`block w-full truncate text-blue-600 dark:text-blue-400 hover:text-blue-500 dark:hover:text-blue-300 ${I()?"font-mono":""}`),U(a,"title",d(i)),h(M,d(i))}),k(x,c),A()}export{V as E}; diff --git a/webapp/assets/_app/immutable/chunks/DQP15tlf.js b/webapp/assets/_app/immutable/chunks/DQP15tlf.js deleted file mode 100644 index 73a0c7e8..00000000 --- a/webapp/assets/_app/immutable/chunks/DQP15tlf.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as cr}from"./B3Pzt0F_.js";import{p as pr,E as vr,m as u,o as fr,s as n,f as E,j as d,r as t,k as a,g as e,t as _,x as He,u as h,z as mr,n as D,v as k,e as w,c as m,D as yr,d as xr}from"./D8EpLgQ1.js";import{p as _r,i as ge,s as hr,a as kr}from"./5WA7h8uK.js";import{e as wr,i as Er}from"./u94nIB4-.js";import{r as c,b as Ke,c as Rr}from"./CiE1LlKV.js";import{b as p,a as $r}from"./C6k1Q4We.js";import{p as Sr}from"./D4Caz1gY.js";import{M as Tr}from"./qB7B8uiS.js";import{J as Or}from"./DZblzgqm.js";import{e as Pr}from"./wyaP0EDu.js";var Jr=E('

                '),Mr=E(' '),Nr=E('
                '),Ur=E('
                Updating...
                '),Ar=E('

                Pool Information (Read-only)

                Provider:
                Entity:

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Tags
                Extra Specs (JSON)
                ');function Wr(We,ce){pr(ce,!1);const[qe,Qe]=hr(),B=()=>kr(Pr,"$eagerCache",qe);let o=_r(ce,"pool",8);const G=vr();let R=u(!1),$=u(""),S=u(o().image||""),T=u(o().flavor||""),O=u(o().max_runners),P=u(o().min_idle_runners),J=u(o().runner_bootstrap_timeout),M=u(o().priority),N=u(o().runner_prefix||""),y=u(o().os_type||"linux"),x=u(o().os_arch||"amd64"),U=u(o()["github-runner-group"]||""),A=u(o().enabled),g=u((o().tags||[]).map(i=>i.name||"").filter(Boolean)),f=u(""),v=u("{}");function Ve(i){if(i.repo_id){const s=B().repositories.find(l=>l.id===i.repo_id);return s?`${s.owner}/${s.name}`:"Unknown Entity"}if(i.org_id){const s=B().organizations.find(l=>l.id===i.org_id);return s&&s.name?s.name:"Unknown Entity"}if(i.enterprise_id){const s=B().enterprises.find(l=>l.id===i.enterprise_id);return s&&s.name?s.name:"Unknown Entity"}return"Unknown Entity"}function Xe(i){return i.repo_id?"Repository":i.org_id?"Organization":i.enterprise_id?"Enterprise":"Unknown"}fr(()=>{if(o().extra_specs)try{if(typeof o().extra_specs=="object")n(v,JSON.stringify(o().extra_specs,null,2));else{const i=JSON.parse(o().extra_specs);n(v,JSON.stringify(i,null,2))}}catch{n(v,o().extra_specs||"{}")}});function pe(){e(f).trim()&&!e(g).includes(e(f).trim())&&(n(g,[...e(g),e(f).trim()]),n(f,""))}function Ye(i){n(g,e(g).filter((s,l)=>l!==i))}function Ze(i){i.key==="Enter"&&(i.preventDefault(),pe())}async function er(){try{n(R,!0),n($,"");let i={};if(e(v).trim())try{i=JSON.parse(e(v))}catch{throw new Error("Invalid JSON in extra specs")}const s={image:e(S)!==o().image?e(S):void 0,flavor:e(T)!==o().flavor?e(T):void 0,max_runners:e(O)!==o().max_runners?e(O):void 0,min_idle_runners:e(P)!==o().min_idle_runners?e(P):void 0,runner_bootstrap_timeout:e(J)!==o().runner_bootstrap_timeout?e(J):void 0,priority:e(M)!==o().priority?e(M):void 0,runner_prefix:e(N)!==o().runner_prefix?e(N):void 0,os_type:e(y)!==o().os_type?e(y):void 0,os_arch:e(x)!==o().os_arch?e(x):void 0,"github-runner-group":e(U)!==o()["github-runner-group"]&&e(U)||void 0,enabled:e(A)!==o().enabled?e(A):void 0,tags:JSON.stringify(e(g))!==JSON.stringify((o().tags||[]).map(l=>l.name||"").filter(Boolean))?e(g):void 0,extra_specs:e(v).trim()!==JSON.stringify(o().extra_specs||{},null,2).trim()?i:void 0};Object.keys(s).forEach(l=>{s[l]===void 0&&delete s[l]}),G("submit",s)}catch(i){n($,i instanceof Error?i.message:"Failed to update pool")}finally{n(R,!1)}}cr(),Tr(We,{$$events:{close:()=>G("close")},children:(i,s)=>{var l=Ar(),z=d(l),ve=d(z),rr=d(ve);t(ve),t(z);var L=a(z,2),fe=d(L);{var tr=r=>{var b=Jr(),j=d(b),C=d(j,!0);t(j),t(b),_(()=>k(C,e($))),m(r,b)};ge(fe,r=>{e($)&&r(tr)})}var F=a(fe,2),me=a(d(F),2),H=d(me),ye=a(d(H),2),ar=d(ye,!0);t(ye),t(H);var xe=a(H,2),_e=a(d(xe),2),dr=d(_e);t(_e),t(xe),t(me),t(F);var K=a(F,2),he=a(d(K),2),W=d(he),ke=a(d(W),2);c(ke),t(W);var q=a(W,2),we=a(d(q),2);c(we),t(q);var Q=a(q,2),V=a(d(Q),2);_(()=>{e(y),He(()=>{})});var X=d(V);X.value=X.__value="linux";var Ee=a(X);Ee.value=Ee.__value="windows",t(V),t(Q);var Re=a(Q,2),Y=a(d(Re),2);_(()=>{e(x),He(()=>{})});var Z=d(Y);Z.value=Z.__value="amd64";var $e=a(Z);$e.value=$e.__value="arm64",t(Y),t(Re),t(he),t(K);var ee=a(K,2),Se=a(d(ee),2),re=d(Se),Te=a(d(re),2);c(Te),t(re);var te=a(re,2),Oe=a(d(te),2);c(Oe),t(te);var Pe=a(te,2),Je=a(d(Pe),2);c(Je),t(Pe),t(Se),t(ee);var ae=a(ee,2),de=a(d(ae),2),oe=d(de),Me=a(d(oe),2);c(Me),t(oe);var ie=a(oe,2),Ne=a(d(ie),2);c(Ne),t(ie);var Ue=a(ie,2),Ae=a(d(Ue),2);c(Ae),t(Ue),t(de);var ne=a(de,2),Ie=d(ne),je=a(d(Ie),2),se=d(je),I=d(se);c(I);var or=a(I,2);t(se);var ir=a(se,2);{var nr=r=>{var b=Nr();wr(b,5,()=>e(g),Er,(j,C,gr)=>{var be=Mr(),Le=d(be),Fe=a(Le);t(be),_(()=>{k(Le,`${e(C)??""} `),Rr(Fe,"aria-label",`Remove tag ${e(C)??""}`)}),w("click",Fe,()=>Ye(gr)),m(j,be)}),t(b),m(r,b)};ge(ir,r=>{e(g),h(()=>e(g).length>0)&&r(nr)})}t(je),t(Ie),t(ne);var le=a(ne,2),Ce=d(le),sr=a(d(Ce),2);Or(sr,{rows:4,placeholder:"{}",get value(){return e(v)},set value(r){n(v,r)},$$legacy:!0}),t(Ce),t(le);var De=a(le,2),Be=d(De);c(Be),mr(2),t(De),t(ae);var Ge=a(ae,2),ze=d(Ge),ue=a(ze,2),lr=d(ue);{var ur=r=>{var b=Ur();m(r,b)},br=r=>{var b=yr("Update Pool");m(r,b)};ge(lr,r=>{e(R)?r(ur):r(br,!1)})}t(ue),t(Ge),t(L),t(l),_((r,b)=>{k(rr,`Update Pool ${D(o()),h(()=>o().id)??""}`),k(ar,(D(o()),h(()=>o().provider_name))),k(dr,`${r??""}: ${b??""}`),ue.disabled=e(R)},[()=>(D(o()),h(()=>Xe(o()))),()=>(D(o()),h(()=>Ve(o())))]),p(ke,()=>e(S),r=>n(S,r)),p(we,()=>e(T),r=>n(T,r)),Ke(V,()=>e(y),r=>n(y,r)),Ke(Y,()=>e(x),r=>n(x,r)),p(Te,()=>e(P),r=>n(P,r)),p(Oe,()=>e(O),r=>n(O,r)),p(Je,()=>e(J),r=>n(J,r)),p(Me,()=>e(N),r=>n(N,r)),p(Ne,()=>e(M),r=>n(M,r)),p(Ae,()=>e(U),r=>n(U,r)),p(I,()=>e(f),r=>n(f,r)),w("keydown",I,Ze),w("click",or,pe),$r(Be,()=>e(A),r=>n(A,r)),w("click",ze,()=>G("close")),w("submit",L,Sr(er)),m(i,l)},$$slots:{default:!0}}),xr(),Qe()}export{Wr as U}; diff --git a/webapp/assets/_app/immutable/chunks/DXCC0cSN.js b/webapp/assets/_app/immutable/chunks/DXCC0cSN.js new file mode 100644 index 00000000..9618e15f --- /dev/null +++ b/webapp/assets/_app/immutable/chunks/DXCC0cSN.js @@ -0,0 +1,3 @@ +import{I as Ee,o as De,aR as T,g as x,s as P,bf as yt,aS as Be}from"./D8EpLgQ1.js";import{a as wt,b as L}from"./CRhkqW2i.js";class le{constructor(t,n){this.status=t,typeof n=="string"?this.body={message:n}:n?this.body=n:this.body={message:`Error: ${t}`}}toString(){return JSON.stringify(this.body)}}class Se{constructor(t,n){this.status=t,this.location=n}}class Re extends Error{constructor(t,n,r){super(r),this.status=t,this.text=n}}new URL("sveltekit-internal://");function vt(e,t){return e==="/"||t==="ignore"?e:t==="never"?e.endsWith("/")?e.slice(0,-1):e:t==="always"&&!e.endsWith("/")?e+"/":e}function bt(e){return e.split("%25").map(decodeURI).join("%25")}function kt(e){for(const t in e)e[t]=decodeURIComponent(e[t]);return e}function me({href:e}){return e.split("#")[0]}function At(e,t,n,r=!1){const a=new URL(e);Object.defineProperty(a,"searchParams",{value:new Proxy(a.searchParams,{get(i,o){if(o==="get"||o==="getAll"||o==="has")return f=>(n(f),i[o](f));t();const c=Reflect.get(i,o);return typeof c=="function"?c.bind(i):c}}),enumerable:!0,configurable:!0});const s=["href","pathname","search","toString","toJSON"];r&&s.push("hash");for(const i of s)Object.defineProperty(a,i,{get(){return t(),e[i]},enumerable:!0,configurable:!0});return a}function Et(...e){let t=5381;for(const n of e)if(typeof n=="string"){let r=n.length;for(;r;)t=t*33^n.charCodeAt(--r)}else if(ArrayBuffer.isView(n)){const r=new Uint8Array(n.buffer,n.byteOffset,n.byteLength);let a=r.length;for(;a;)t=t*33^r[--a]}else throw new TypeError("value must be a string or TypedArray");return(t>>>0).toString(36)}function St(e){const t=atob(e),n=new Uint8Array(t.length);for(let r=0;r((e instanceof Request?e.method:t?.method||"GET")!=="GET"&&G.delete(Ie(e)),Rt(e,t));const G=new Map;function It(e,t){const n=Ie(e,t),r=document.querySelector(n);if(r?.textContent){let{body:a,...s}=JSON.parse(r.textContent);const i=r.getAttribute("data-ttl");return i&&G.set(n,{body:a,init:s,ttl:1e3*Number(i)}),r.getAttribute("data-b64")!==null&&(a=St(a)),Promise.resolve(new Response(a,s))}return window.fetch(e,t)}function Ut(e,t,n){if(G.size>0){const r=Ie(e,n),a=G.get(r);if(a){if(performance.now(){const a=/^\[\.\.\.(\w+)(?:=(\w+))?\]$/.exec(r);if(a)return t.push({name:a[1],matcher:a[2],optional:!1,rest:!0,chained:!0}),"(?:/([^]*))?";const s=/^\[\[(\w+)(?:=(\w+))?\]\]$/.exec(r);if(s)return t.push({name:s[1],matcher:s[2],optional:!0,rest:!1,chained:!0}),"(?:/([^/]+))?";if(!r)return;const i=r.split(/\[(.+?)\](?!\])/);return"/"+i.map((c,f)=>{if(f%2){if(c.startsWith("x+"))return _e(String.fromCharCode(parseInt(c.slice(2),16)));if(c.startsWith("u+"))return _e(String.fromCharCode(...c.slice(2).split("-").map(_=>parseInt(_,16))));const d=Lt.exec(c),[,h,u,l,p]=d;return t.push({name:l,matcher:p,optional:!!h,rest:!!u,chained:u?f===1&&i[0]==="":!1}),u?"([^]*?)":h?"([^/]*)?":"([^/]+?)"}return _e(c)}).join("")}).join("")}/?$`),params:t}}function xt(e){return e!==""&&!/^\([^)]+\)$/.test(e)}function Pt(e){return e.slice(1).split("/").filter(xt)}function Ct(e,t,n){const r={},a=e.slice(1),s=a.filter(o=>o!==void 0);let i=0;for(let o=0;od).join("/"),i=0),f===void 0){c.rest&&(r[c.name]="");continue}if(!c.matcher||n[c.matcher](f)){r[c.name]=f;const d=t[o+1],h=a[o+1];d&&!d.rest&&d.optional&&h&&c.chained&&(i=0),!d&&!h&&Object.keys(r).length===s.length&&(i=0);continue}if(c.optional&&c.chained){i++;continue}return}if(!i)return r}function _e(e){return e.normalize().replace(/[[\]]/g,"\\$&").replace(/%/g,"%25").replace(/\//g,"%2[Ff]").replace(/\?/g,"%3[Ff]").replace(/#/g,"%23").replace(/[.*+?^${}()|\\]/g,"\\$&")}function Ot({nodes:e,server_loads:t,dictionary:n,matchers:r}){const a=new Set(t);return Object.entries(n).map(([o,[c,f,d]])=>{const{pattern:h,params:u}=Tt(o),l={id:o,exec:p=>{const _=h.exec(p);if(_)return Ct(_,u,r)},errors:[1,...d||[]].map(p=>e[p]),layouts:[0,...f||[]].map(i),leaf:s(c)};return l.errors.length=l.layouts.length=Math.max(l.errors.length,l.layouts.length),l});function s(o){const c=o<0;return c&&(o=~o),[c,e[o]]}function i(o){return o===void 0?o:[a.has(o),e[o]]}}function Je(e,t=JSON.parse){try{return t(sessionStorage[e])}catch{}}function Fe(e,t,n=JSON.stringify){const r=n(t);try{sessionStorage[e]=r}catch{}}const Nt="1755415870786",Xe="sveltekit:snapshot",Ze="sveltekit:scroll",Qe="sveltekit:states",jt="sveltekit:pageurl",F="sveltekit:history",Y="sveltekit:navigation",j={tap:1,hover:2,viewport:3,eager:4,off:-1,false:-1},Z=location.origin;function Ue(e){if(e instanceof URL)return e;let t=document.baseURI;if(!t){const n=document.getElementsByTagName("base");t=n.length?n[0].href:document.URL}return new URL(e,t)}function fe(){return{x:pageXOffset,y:pageYOffset}}function B(e,t){return e.getAttribute(`data-sveltekit-${t}`)}const Ve={...j,"":j.hover};function et(e){let t=e.assignedSlot??e.parentNode;return t?.nodeType===11&&(t=t.host),t}function tt(e,t){for(;e&&e!==t;){if(e.nodeName.toUpperCase()==="A"&&e.hasAttribute("href"))return e;e=et(e)}}function ve(e,t,n){let r;try{if(r=new URL(e instanceof SVGAElement?e.href.baseVal:e.href,document.baseURI),n&&r.hash.match(/^#[^/]/)){const o=location.hash.split("#")[1]||"/";r.hash=`#${o}${r.hash}`}}catch{}const a=e instanceof SVGAElement?e.target.baseVal:e.target,s=!r||!!a||ue(r,t,n)||(e.getAttribute("rel")||"").split(/\s+/).includes("external"),i=r?.origin===Z&&e.hasAttribute("download");return{url:r,external:s,target:a,download:i}}function te(e){let t=null,n=null,r=null,a=null,s=null,i=null,o=e;for(;o&&o!==document.documentElement;)r===null&&(r=B(o,"preload-code")),a===null&&(a=B(o,"preload-data")),t===null&&(t=B(o,"keepfocus")),n===null&&(n=B(o,"noscroll")),s===null&&(s=B(o,"reload")),i===null&&(i=B(o,"replacestate")),o=et(o);function c(f){switch(f){case"":case"true":return!0;case"off":case"false":return!1;default:return}}return{preload_code:Ve[r??"off"],preload_data:Ve[a??"off"],keepfocus:c(t),noscroll:c(n),reload:c(s),replace_state:c(i)}}function qe(e){const t=Ee(e);let n=!0;function r(){n=!0,t.update(i=>i)}function a(i){n=!1,t.set(i)}function s(i){let o;return t.subscribe(c=>{(o===void 0||n&&c!==o)&&i(o=c)})}return{notify:r,set:a,subscribe:s}}const nt={v:()=>{}};function $t(){const{set:e,subscribe:t}=Ee(!1);let n;async function r(){clearTimeout(n);try{const a=await fetch(`${wt}/_app/version.json`,{headers:{pragma:"no-cache","cache-control":"no-cache"}});if(!a.ok)return!1;const i=(await a.json()).version!==Nt;return i&&(e(!0),nt.v(),clearTimeout(n)),i}catch{return!1}}return{subscribe:t,check:r}}function ue(e,t,n){return e.origin!==Z||!e.pathname.startsWith(t)?!0:n?!(e.pathname===t+"/"||e.pathname===t+"/index.html"||e.protocol==="file:"&&e.pathname.replace(/\/[^/]+\.html?$/,"")===t):!1}function kn(e){}function Me(e){const t=Bt(e),n=new ArrayBuffer(t.length),r=new DataView(n);for(let a=0;a>16),t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255),n=r=0);return r===12?(n>>=4,t+=String.fromCharCode(n)):r===18&&(n>>=2,t+=String.fromCharCode((n&65280)>>8),t+=String.fromCharCode(n&255)),t}const Ft=-1,Vt=-2,qt=-3,Mt=-4,Gt=-5,Ht=-6;function Kt(e,t){if(typeof e=="number")return a(e,!0);if(!Array.isArray(e)||e.length===0)throw new Error("Invalid input");const n=e,r=Array(n.length);function a(s,i=!1){if(s===Ft)return;if(s===qt)return NaN;if(s===Mt)return 1/0;if(s===Gt)return-1/0;if(s===Ht)return-0;if(i)throw new Error("Invalid input");if(s in r)return r[s];const o=n[s];if(!o||typeof o!="object")r[s]=o;else if(Array.isArray(o))if(typeof o[0]=="string"){const c=o[0],f=t?.[c];if(f)return r[s]=f(a(o[1]));switch(c){case"Date":r[s]=new Date(o[1]);break;case"Set":const d=new Set;r[s]=d;for(let l=1;lt!=null)}const zt="x-sveltekit-invalidated",Jt="x-sveltekit-trailing-slash";function ne(e){return e instanceof le||e instanceof Re?e.status:500}function Xt(e){return e instanceof Re?e.text:"Internal Error"}let E,z,ye;const Zt=De.toString().includes("$$")||/function \w+\(\) \{\}/.test(De.toString());Zt?(E={data:{},form:null,error:null,params:{},route:{id:null},state:{},status:-1,url:new URL("https://example.com")},z={current:null},ye={current:!1}):(E=new class{#e=T({});get data(){return x(this.#e)}set data(t){P(this.#e,t)}#t=T(null);get form(){return x(this.#t)}set form(t){P(this.#t,t)}#n=T(null);get error(){return x(this.#n)}set error(t){P(this.#n,t)}#r=T({});get params(){return x(this.#r)}set params(t){P(this.#r,t)}#a=T({id:null});get route(){return x(this.#a)}set route(t){P(this.#a,t)}#o=T({});get state(){return x(this.#o)}set state(t){P(this.#o,t)}#s=T(-1);get status(){return x(this.#s)}set status(t){P(this.#s,t)}#i=T(new URL("https://example.com"));get url(){return x(this.#i)}set url(t){P(this.#i,t)}},z=new class{#e=T(null);get current(){return x(this.#e)}set current(t){P(this.#e,t)}},ye=new class{#e=T(!1);get current(){return x(this.#e)}set current(t){P(this.#e,t)}},nt.v=()=>ye.current=!0);function Qt(e){Object.assign(E,e)}const en="/__data.json",tn=".html__data.json";function nn(e){return e.endsWith(".html")?e.replace(/\.html$/,tn):e.replace(/\/$/,"")+en}const{tick:rn}=yt,an=new Set(["icon","shortcut icon","apple-touch-icon"]),D=Je(Ze)??{},J=Je(Xe)??{},N={url:qe({}),page:qe({}),navigating:Ee(null),updated:$t()};function Le(e){D[e]=fe()}function on(e,t){let n=e+1;for(;D[n];)delete D[n],n+=1;for(n=t+1;J[n];)delete J[n],n+=1}function q(e){return location.href=e.href,new Promise(()=>{})}async function at(){if("serviceWorker"in navigator){const e=await navigator.serviceWorker.getRegistration(L||"/");e&&await e.update()}}function Ge(){}let Te,be,re,C,ke,v;globalThis.__sveltekit_xtz33p.data;const ae=[],oe=[];let O=null;const ee=new Map,ot=new Set,sn=new Set,H=new Set;let w={branch:[],error:null,url:null},xe=!1,se=!1,He=!0,X=!1,M=!1,st=!1,Pe=!1,it,k,I,$;const K=new Set,Ke=new Map;async function Rn(e,t,n){document.URL!==location.href&&(location.href=location.href),v=e,await e.hooks.init?.(),Te=Ot(e),C=document.documentElement,ke=t,be=e.nodes[0],re=e.nodes[1],be(),re(),k=history.state?.[F],I=history.state?.[Y],k||(k=I=Date.now(),history.replaceState({...history.state,[F]:k,[Y]:I},""));const r=D[k];function a(){r&&(history.scrollRestoration="manual",scrollTo(r.x,r.y))}n?(a(),await _n(ke,n)):(await W({type:"enter",url:Ue(v.hash?wn(new URL(location.href)):location.href),replace_state:!0}),a()),mn()}function cn(){ae.length=0,Pe=!1}function ct(e){oe.some(t=>t?.snapshot)&&(J[e]=oe.map(t=>t?.snapshot?.capture()))}function lt(e){J[e]?.forEach((t,n)=>{oe[n]?.snapshot?.restore(t)})}function We(){Le(k),Fe(Ze,D),ct(I),Fe(Xe,J)}async function Ce(e,t,n,r){let a;const s=await W({type:"goto",url:Ue(e),keepfocus:t.keepFocus,noscroll:t.noScroll,replace_state:t.replaceState,state:t.state,redirect_count:n,nav_token:r,accept:()=>{t.invalidateAll&&(Pe=!0,a=[...Ke.keys()]),t.invalidate&&t.invalidate.forEach(gn)}});return t.invalidateAll&&Be().then(Be).then(()=>{Ke.forEach(({resource:i},o)=>{a?.includes(o)&&i.refresh?.()})}),s}async function ln(e){if(e.id!==O?.id){const t={};K.add(t),O={id:e.id,token:t,promise:dt({...e,preload:t}).then(n=>(K.delete(t),n.type==="loaded"&&n.state.error&&(O=null),n))}}return O.promise}async function we(e){const t=(await he(e,!1))?.route;t&&await Promise.all([...t.layouts,t.leaf].map(n=>n?.[1]()))}function ft(e,t,n){w=e.state;const r=document.querySelector("style[data-sveltekit]");if(r&&r.remove(),Object.assign(E,e.props.page),it=new v.root({target:t,props:{...e.props,stores:N,components:oe},hydrate:n,sync:!1}),lt(I),n){const a={from:null,to:{params:w.params,route:{id:w.route?.id??null},url:new URL(location.href)},willUnload:!1,type:"enter",complete:Promise.resolve()};H.forEach(s=>s(a))}se=!0}function ie({url:e,params:t,branch:n,status:r,error:a,route:s,form:i}){let o="never";if(L&&(e.pathname===L||e.pathname===L+"/"))o="always";else for(const l of n)l?.slash!==void 0&&(o=l.slash);e.pathname=vt(e.pathname,o),e.search=e.search;const c={type:"loaded",state:{url:e,params:t,branch:n,error:a,route:s},props:{constructors:Yt(n).map(l=>l.node.component),page:$e(E)}};i!==void 0&&(c.props.form=i);let f={},d=!E,h=0;for(let l=0;l(o&&(c.route=!0),u[l])}),params:new Proxy(r,{get:(u,l)=>(o&&c.params.add(l),u[l])}),data:s?.data??null,url:At(n,()=>{o&&(c.url=!0)},u=>{o&&c.search_params.add(u)},v.hash),async fetch(u,l){u instanceof Request&&(l={body:u.method==="GET"||u.method==="HEAD"?void 0:await u.blob(),cache:u.cache,credentials:u.credentials,headers:[...u.headers].length>0?u?.headers:void 0,integrity:u.integrity,keepalive:u.keepalive,method:u.method,mode:u.mode,redirect:u.redirect,referrer:u.referrer,referrerPolicy:u.referrerPolicy,signal:u.signal,...l});const{resolved:p,promise:_}=ut(u,l,n);return o&&d(p.href),_},setHeaders:()=>{},depends:d,parent(){return o&&(c.parent=!0),t()},untrack(u){o=!1;try{return u()}finally{o=!0}}};i=await f.universal.load.call(null,h)??null}return{node:f,loader:e,server:s,universal:f.universal?.load?{type:"data",data:i,uses:c}:null,data:i??s?.data??null,slash:f.universal?.trailingSlash??s?.slash}}function ut(e,t,n){let r=e instanceof Request?e.url:e;const a=new URL(r,n);a.origin===n.origin&&(r=a.href.slice(n.origin.length));const s=se?Ut(r,a.href,t):It(r,t);return{resolved:a,promise:s}}function Ye(e,t,n,r,a,s){if(Pe)return!0;if(!a)return!1;if(a.parent&&e||a.route&&t||a.url&&n)return!0;for(const i of a.search_params)if(r.has(i))return!0;for(const i of a.params)if(s[i]!==w.params[i])return!0;for(const i of a.dependencies)if(ae.some(o=>o(new URL(i))))return!0;return!1}function Ne(e,t){return e?.type==="data"?e:e?.type==="skip"?t??null:null}function fn(e,t){if(!e)return new Set(t.searchParams.keys());const n=new Set([...e.searchParams.keys(),...t.searchParams.keys()]);for(const r of n){const a=e.searchParams.getAll(r),s=t.searchParams.getAll(r);a.every(i=>s.includes(i))&&s.every(i=>a.includes(i))&&n.delete(r)}return n}function ze({error:e,url:t,route:n,params:r}){return{type:"loaded",state:{error:e,url:t,route:n,params:r,branch:[]},props:{page:$e(E),constructors:[]}}}async function dt({id:e,invalidating:t,url:n,params:r,route:a,preload:s}){if(O?.id===e)return K.delete(O.token),O.promise;const{errors:i,layouts:o,leaf:c}=a,f=[...o,c];i.forEach(g=>g?.().catch(()=>{})),f.forEach(g=>g?.[1]().catch(()=>{}));let d=null;const h=w.url?e!==ce(w.url):!1,u=w.route?a.id!==w.route.id:!1,l=fn(w.url,n);let p=!1;const _=f.map((g,y)=>{const b=w.branch[y],A=!!g?.[0]&&(b?.loader!==g[1]||Ye(p,u,h,l,b.server?.uses,r));return A&&(p=!0),A});if(_.some(Boolean)){try{d=await gt(n,_)}catch(g){const y=await V(g,{url:n,params:r,route:{id:e}});return K.has(s)?ze({error:y,url:n,params:r,route:a}):de({status:ne(g),error:y,url:n,route:a})}if(d.type==="redirect")return d}const m=d?.nodes;let R=!1;const S=f.map(async(g,y)=>{if(!g)return;const b=w.branch[y],A=m?.[y];if((!A||A.type==="skip")&&g[1]===b?.loader&&!Ye(R,u,h,l,b.universal?.uses,r))return b;if(R=!0,A?.type==="error")throw A;return Oe({loader:g[1],url:n,params:r,route:a,parent:async()=>{const pe={};for(let ge=0;ge{});const U=[];for(let g=0;gPromise.resolve({}),server_data_node:Ne(s)}),c={node:await re(),loader:re,universal:null,server:null,data:null};return ie({url:n,params:a,branch:[o,c],status:e,error:t,route:null})}catch(o){if(o instanceof Se)return Ce(new URL(o.location,location.href),{},0);throw o}}async function dn(e){const t=e.href;if(ee.has(t))return ee.get(t);let n;try{const r=(async()=>{let a=await v.hooks.reroute({url:new URL(e),fetch:async(s,i)=>ut(s,i,e).promise})??e;if(typeof a=="string"){const s=new URL(e);v.hash?s.hash=a:s.pathname=a,a=s}return a})();ee.set(t,r),n=await r}catch{ee.delete(t);return}return n}async function he(e,t){if(e&&!ue(e,L,v.hash)){const n=await dn(e);if(!n)return;const r=hn(n);for(const a of Te){const s=a.exec(r);if(s)return{id:ce(e),invalidating:t,route:a,params:kt(s),url:e}}}}function hn(e){return bt(v.hash?e.hash.replace(/^#/,"").replace(/[?#].+/,""):e.pathname.slice(L.length))||"/"}function ce(e){return(v.hash?e.hash.replace(/^#/,""):e.pathname)+e.search}function ht({url:e,type:t,intent:n,delta:r}){let a=!1;const s=je(w,n,e,t);r!==void 0&&(s.navigation.delta=r);const i={...s.navigation,cancel:()=>{a=!0,s.reject(new Error("navigation cancelled"))}};return X||ot.forEach(o=>o(i)),a?null:s}async function W({type:e,url:t,popped:n,keepfocus:r,noscroll:a,replace_state:s,state:i={},redirect_count:o=0,nav_token:c={},accept:f=Ge,block:d=Ge}){const h=$;$=c;const u=await he(t,!1),l=e==="enter"?je(w,u,t,e):ht({url:t,type:e,delta:n?.delta,intent:u});if(!l){d(),$===c&&($=h);return}const p=k,_=I;f(),X=!0,se&&l.navigation.type!=="enter"&&N.navigating.set(z.current=l.navigation);let m=u&&await dt(u);if(!m){if(ue(t,L,v.hash))return await q(t);m=await pt(t,{id:null},await V(new Re(404,"Not Found",`Not found: ${t.pathname}`),{url:t,params:{},route:{id:null}}),404)}if(t=u?.url||t,$!==c)return l.reject(new Error("navigation aborted")),!1;if(m.type==="redirect")if(o>=20)m=await de({status:500,error:await V(new Error("Redirect loop"),{url:t,params:{},route:{id:null}}),url:t,route:{id:null}});else return await Ce(new URL(m.location,t).href,{},o+1,c),!1;else m.props.page.status>=400&&await N.updated.check()&&(await at(),await q(t));if(cn(),Le(p),ct(_),m.props.page.url.pathname!==t.pathname&&(t.pathname=m.props.page.url.pathname),i=n?n.state:i,!n){const g=s?0:1,y={[F]:k+=g,[Y]:I+=g,[Qe]:i};(s?history.replaceState:history.pushState).call(history,y,"",t),s||on(k,I)}if(O=null,m.props.page.state=i,se){const g=(await Promise.all(Array.from(sn,y=>y(l.navigation)))).filter(y=>typeof y=="function");if(g.length>0){let y=function(){g.forEach(b=>{H.delete(b)})};g.push(y),g.forEach(b=>{H.add(b)})}w=m.state,m.props.page&&(m.props.page.url=t),it.$set(m.props),Qt(m.props.page),st=!0}else ft(m,ke,!1);const{activeElement:R}=document;await rn();const S=n?n.scroll:a?fe():null;if(He){const g=t.hash&&document.getElementById(_t(t));S?scrollTo(S.x,S.y):g?g.scrollIntoView():scrollTo(0,0)}const U=document.activeElement!==R&&document.activeElement!==document.body;!r&&!U&&yn(t),He=!0,m.props.page&&Object.assign(E,m.props.page),X=!1,e==="popstate"&<(I),l.fulfil(void 0),H.forEach(g=>g(l.navigation)),N.navigating.set(z.current=null)}async function pt(e,t,n,r){return e.origin===Z&&e.pathname===location.pathname&&!xe?await de({status:r,error:n,url:e,route:t}):await q(e)}function pn(){let e,t,n;C.addEventListener("mousemove",o=>{const c=o.target;clearTimeout(e),e=setTimeout(()=>{s(c,j.hover)},20)});function r(o){o.defaultPrevented||s(o.composedPath()[0],j.tap)}C.addEventListener("mousedown",r),C.addEventListener("touchstart",r,{passive:!0});const a=new IntersectionObserver(o=>{for(const c of o)c.isIntersecting&&(we(new URL(c.target.href)),a.unobserve(c.target))},{threshold:0});async function s(o,c){const f=tt(o,C),d=f===t&&c>=n;if(!f||d)return;const{url:h,external:u,download:l}=ve(f,L,v.hash);if(u||l)return;const p=te(f),_=h&&ce(w.url)===ce(h);if(!(p.reload||_))if(c<=p.preload_data){t=f,n=j.tap;const m=await he(h,!1);if(!m)return;ln(m)}else c<=p.preload_code&&(t=f,n=c,we(h))}function i(){a.disconnect();for(const o of C.querySelectorAll("a")){const{url:c,external:f,download:d}=ve(o,L,v.hash);if(f||d)continue;const h=te(o);h.reload||(h.preload_code===j.viewport&&a.observe(o),h.preload_code===j.eager&&we(c))}}H.add(i),i()}function V(e,t){if(e instanceof le)return e.body;const n=ne(e),r=Xt(e);return v.hooks.handleError({error:e,event:t,status:n,message:r})??{message:r}}function In(e,t={}){return e=new URL(Ue(e)),e.origin!==Z?Promise.reject(new Error("goto: invalid URL")):Ce(e,t,0)}function gn(e){if(typeof e=="function")ae.push(e);else{const{href:t}=new URL(e,location.href);ae.push(n=>n.href===t)}}function mn(){history.scrollRestoration="manual",addEventListener("beforeunload",t=>{let n=!1;if(We(),!X){const r=je(w,void 0,null,"leave"),a={...r.navigation,cancel:()=>{n=!0,r.reject(new Error("navigation cancelled"))}};ot.forEach(s=>s(a))}n?(t.preventDefault(),t.returnValue=""):history.scrollRestoration="auto"}),addEventListener("visibilitychange",()=>{document.visibilityState==="hidden"&&We()}),navigator.connection?.saveData||pn(),C.addEventListener("click",async t=>{if(t.button||t.which!==1||t.metaKey||t.ctrlKey||t.shiftKey||t.altKey||t.defaultPrevented)return;const n=tt(t.composedPath()[0],C);if(!n)return;const{url:r,external:a,target:s,download:i}=ve(n,L,v.hash);if(!r)return;if(s==="_parent"||s==="_top"){if(window.parent!==window)return}else if(s&&s!=="_self")return;const o=te(n);if(!(n instanceof SVGAElement)&&r.protocol!==location.protocol&&!(r.protocol==="https:"||r.protocol==="http:")||i)return;const[f,d]=(v.hash?r.hash.replace(/^#/,""):r.href).split("#"),h=f===me(location);if(a||o.reload&&(!h||!d)){ht({url:r,type:"link"})?X=!0:t.preventDefault();return}if(d!==void 0&&h){const[,u]=w.url.href.split("#");if(u===d){if(t.preventDefault(),d===""||d==="top"&&n.ownerDocument.getElementById("top")===null)window.scrollTo({top:0});else{const l=n.ownerDocument.getElementById(decodeURIComponent(d));l&&(l.scrollIntoView(),l.focus())}return}if(M=!0,Le(k),e(r),!o.replace_state)return;M=!1}t.preventDefault(),await new Promise(u=>{requestAnimationFrame(()=>{setTimeout(u,0)}),setTimeout(u,100)}),await W({type:"link",url:r,keepfocus:o.keepfocus,noscroll:o.noscroll,replace_state:o.replace_state??r.href===location.href})}),C.addEventListener("submit",t=>{if(t.defaultPrevented)return;const n=HTMLFormElement.prototype.cloneNode.call(t.target),r=t.submitter;if((r?.formTarget||n.target)==="_blank"||(r?.formMethod||n.method)!=="get")return;const i=new URL(r?.hasAttribute("formaction")&&r?.formAction||n.action);if(ue(i,L,!1))return;const o=t.target,c=te(o);if(c.reload)return;t.preventDefault(),t.stopPropagation();const f=new FormData(o),d=r?.getAttribute("name");d&&f.append(d,r?.getAttribute("value")??""),i.search=new URLSearchParams(f).toString(),W({type:"form",url:i,keepfocus:c.keepfocus,noscroll:c.noscroll,replace_state:c.replace_state??i.href===location.href})}),addEventListener("popstate",async t=>{if(!Ae){if(t.state?.[F]){const n=t.state[F];if($={},n===k)return;const r=D[n],a=t.state[Qe]??{},s=new URL(t.state[jt]??location.href),i=t.state[Y],o=w.url?me(location)===me(w.url):!1;if(i===I&&(st||o)){a!==E.state&&(E.state=a),e(s),D[k]=fe(),r&&scrollTo(r.x,r.y),k=n;return}const f=n-k;await W({type:"popstate",url:s,popped:{state:a,scroll:r,delta:f},accept:()=>{k=n,I=i},block:()=>{history.go(-f)},nav_token:$})}else if(!M){const n=new URL(location.href);e(n),v.hash&&location.reload()}}}),addEventListener("hashchange",()=>{M&&(M=!1,history.replaceState({...history.state,[F]:++k,[Y]:I},"",location.href))});for(const t of document.querySelectorAll("link"))an.has(t.rel)&&(t.href=t.href);addEventListener("pageshow",t=>{t.persisted&&N.navigating.set(z.current=null)});function e(t){w.url=E.url=t,N.page.set($e(E)),N.page.notify()}}async function _n(e,{status:t=200,error:n,node_ids:r,params:a,route:s,server_route:i,data:o,form:c}){xe=!0;const f=new URL(location.href);let d;({params:a={},route:s={id:null}}=await he(f,!1)||{}),d=Te.find(({id:l})=>l===s.id);let h,u=!0;try{const l=r.map(async(_,m)=>{const R=o[m];return R?.uses&&(R.uses=mt(R.uses)),Oe({loader:v.nodes[_],url:f,params:a,route:s,parent:async()=>{const S={};for(let U=0;Us?"1":"0").join(""));const r=window.fetch,a=await r(n.href,{});if(!a.ok){let s;throw a.headers.get("content-type")?.includes("application/json")?s=await a.json():a.status===404?s="Not Found":a.status===500&&(s="Internal Error"),new le(a.status,s)}return new Promise(async s=>{const i=new Map,o=a.body.getReader(),c=new TextDecoder;function f(h){return Kt(h,{...v.decoders,Promise:u=>new Promise((l,p)=>{i.set(u,{fulfil:l,reject:p})})})}let d="";for(;;){const{done:h,value:u}=await o.read();if(h&&!d)break;for(d+=!u&&d?` +`:c.decode(u,{stream:!0});;){const l=d.indexOf(` +`);if(l===-1)break;const p=JSON.parse(d.slice(0,l));if(d=d.slice(l+1),p.type==="redirect")return s(p);if(p.type==="data")p.nodes?.forEach(_=>{_?.type==="data"&&(_.uses=mt(_.uses),_.data=f(_.data))}),s(p);else if(p.type==="chunk"){const{id:_,data:m,error:R}=p,S=i.get(_);i.delete(_),R?S.reject(f(R)):S.fulfil(f(m))}}}})}function mt(e){return{dependencies:new Set(e?.dependencies??[]),params:new Set(e?.params??[]),parent:!!e?.parent,route:!!e?.route,url:!!e?.url,search_params:new Set(e?.search_params??[])}}let Ae=!1;function yn(e){const t=document.querySelector("[autofocus]");if(t)t.focus();else{const n=_t(e);if(n&&document.getElementById(n)){const{x:a,y:s}=fe();setTimeout(()=>{const i=history.state;Ae=!0,location.replace(`#${n}`),v.hash&&location.replace(e.hash),history.replaceState(i,"",e.hash),scrollTo(a,s),Ae=!1})}else{const a=document.body,s=a.getAttribute("tabindex");a.tabIndex=-1,a.focus({preventScroll:!0,focusVisible:!1}),s!==null?a.setAttribute("tabindex",s):a.removeAttribute("tabindex")}const r=getSelection();if(r&&r.type!=="None"){const a=[];for(let s=0;s{if(r.rangeCount===a.length){for(let s=0;s{a=c,s=f});return i.catch(()=>{}),{navigation:{from:{params:e.params,route:{id:e.route?.id??null},url:e.url},to:n&&{params:t?.params??null,route:{id:t?.route?.id??null},url:n},willUnload:!t,type:r,complete:i},fulfil:a,reject:s}}function $e(e){return{data:e.data,error:e.error,form:e.form,params:e.params,route:e.route,state:e.state,status:e.status,url:e.url}}function wn(e){const t=new URL(e);return t.hash=decodeURIComponent(e.hash),t}function _t(e){let t;if(v.hash){const[,,n]=e.hash.split("#",3);t=n??""}else t=e.hash.slice(1);return decodeURIComponent(t)}export{Rn as a,In as g,kn as l,E as p,N as s}; diff --git a/webapp/assets/_app/immutable/chunks/Dbd6PPbz.js b/webapp/assets/_app/immutable/chunks/Dbd6PPbz.js deleted file mode 100644 index cca64cf0..00000000 --- a/webapp/assets/_app/immutable/chunks/Dbd6PPbz.js +++ /dev/null @@ -1 +0,0 @@ -import"./DsnmJJEf.js";import{i as ae}from"./B3Pzt0F_.js";import{p as se,E as re,l as P,n as ie,s as r,g as t,m as k,a as le,f as p,j as v,k as U,r as f,c as l,d as oe,B as T,b as $,z as V,D as q,t as E,v as N,u as ne}from"./D8EpLgQ1.js";import{p as R,i as m}from"./5WA7h8uK.js";import{g as u,B as G}from"./CiE1LlKV.js";import{t as y}from"./BEkVdVE1.js";var de=p('
                Checking...
                '),ce=p('
                '),ve=p('
                Webhook installed
                ',1),fe=p('
                No webhook installed
                '),ue=p('

                Webhook Status

                ');function ye(H,g){se(g,!1);const x=k();let h=R(g,"entityType",8),s=R(g,"entityId",8),j=R(g,"entityName",8),i=k(null),o=k(!1),b=k(!0);const O=re();async function _(){if(s())try{r(b,!0),h()==="repository"?r(i,await u.getRepositoryWebhookInfo(s())):r(i,await u.getOrganizationWebhookInfo(s()))}catch(e){e&&typeof e=="object"&&"response"in e&&e.response?.status===404?r(i,null):(console.warn("Failed to check webhook status:",e),r(i,null))}finally{r(b,!1)}}async function J(){if(s())try{r(o,!0),h()==="repository"?await u.installRepositoryWebhook(s()):await u.installOrganizationWebhook(s()),y.success("Webhook Installed",`Webhook for ${h()} ${j()} has been installed successfully.`),await _(),O("webhookStatusChanged",{installed:!0})}catch(e){y.error("Webhook Installation Failed",e instanceof Error?e.message:"Failed to install webhook.")}finally{r(o,!1)}}async function K(){if(s())try{r(o,!0),h()==="repository"?await u.uninstallRepositoryWebhook(s()):await u.uninstallOrganizationWebhook(s()),y.success("Webhook Uninstalled",`Webhook for ${h()} ${j()} has been uninstalled successfully.`),await _(),O("webhookStatusChanged",{installed:!1})}catch(e){y.error("Webhook Uninstall Failed",e instanceof Error?e.message:"Failed to uninstall webhook.")}finally{r(o,!1)}}P(()=>ie(s()),()=>{s()&&_()}),P(()=>t(i),()=>{r(x,t(i)&&t(i).active)}),le(),ae();var w=ue(),A=v(w),D=v(A),W=v(D),L=U(v(W),2),Q=v(L);{var X=e=>{var d=de();l(e,d)},Y=e=>{var d=T(),z=$(d);{var I=a=>{var n=ve(),B=U($(n),2);{var c=C=>{var F=ce(),te=v(F);f(F),E(()=>N(te,`URL: ${t(i),ne(()=>t(i).url||"N/A")??""}`)),l(C,F)};m(B,C=>{t(i)&&C(c)})}l(a,n)},S=a=>{var n=fe();l(a,n)};m(z,a=>{t(x)?a(I):a(S,!1)},!0)}l(e,d)};m(Q,e=>{t(b)?e(X):e(Y,!1)})}f(L),f(W);var M=U(W,2),Z=v(M);{var ee=e=>{var d=T(),z=$(d);{var I=a=>{G(a,{variant:"danger",size:"sm",get disabled(){return t(o)},$$events:{click:K},children:(n,B)=>{V();var c=q();E(()=>N(c,t(o)?"Uninstalling...":"Uninstall")),l(n,c)},$$slots:{default:!0}})},S=a=>{G(a,{variant:"primary",size:"sm",get disabled(){return t(o)},$$events:{click:J},children:(n,B)=>{V();var c=q();E(()=>N(c,t(o)?"Installing...":"Install Webhook")),l(n,c)},$$slots:{default:!0}})};m(z,a=>{t(x)?a(I):a(S,!1)})}l(e,d)};m(Z,e=>{t(b)||e(ee)})}f(M),f(D),f(A),f(w),l(H,w),oe()}export{ye as W}; diff --git a/webapp/assets/_app/immutable/chunks/BE4wujub.js b/webapp/assets/_app/immutable/chunks/MCv1Wq2q.js similarity index 96% rename from webapp/assets/_app/immutable/chunks/BE4wujub.js rename to webapp/assets/_app/immutable/chunks/MCv1Wq2q.js index 6b6a2146..5b0cc9f5 100644 --- a/webapp/assets/_app/immutable/chunks/BE4wujub.js +++ b/webapp/assets/_app/immutable/chunks/MCv1Wq2q.js @@ -1 +1 @@ -import"./DsnmJJEf.js";import{i as v}from"./B3Pzt0F_.js";import{p as w,l as m,n as s,g as r,m as g,a as x,B as h,b as T,c as B,d as S,s as k,u}from"./D8EpLgQ1.js";import{k as A}from"./C9DJVOi1.js";import{p as d}from"./5WA7h8uK.js";import{k as b,B as C}from"./BGVHQGl-.js";import{f as E}from"./ow_oMtSd.js";function q(_,i){w(i,!1);const c=g(),n=g();let e=d(i,"item",8),l=d(i,"statusType",8,"entity"),a=d(i,"statusField",8,"status");m(()=>(s(e()),s(a())),()=>{k(c,e()?.[a()]||"unknown")}),m(()=>(s(e()),s(l()),r(c),s(a())),()=>{k(n,(()=>{if(!e())return{variant:"error",text:"Unknown"};switch(l()){case"entity":return b(e());case"instance":let t="secondary";switch(r(c).toLowerCase()){case"running":t="success";break;case"stopped":t="info";break;case"creating":case"pending_create":t="warning";break;case"deleting":case"pending_delete":case"pending_force_delete":t="warning";break;case"error":case"deleted":t="error";break;case"active":case"online":t="success";break;case"idle":t="info";break;case"pending":case"installing":t="warning";break;case"failed":case"terminated":case"offline":t="error";break;case"unknown":default:t="secondary";break}return{variant:t,text:E(r(c))};case"enabled":return{variant:e().enabled?"success":"error",text:e().enabled?"Enabled":"Disabled"};case"custom":const o=e()[a()]||"Unknown";if(a()==="auth-type"){const f=o==="pat"||!o?"pat":"app";return{variant:f==="pat"?"success":"info",text:f==="pat"?"PAT":"App"}}return{variant:"info",text:o};default:return b(e())}})())}),x(),v();var p=h(),y=T(p);A(y,()=>(s(e()),s(a()),u(()=>`${e()?.name||"item"}-${e()?.[a()]||"status"}-${e()?.updated_at||"time"}`)),t=>{C(t,{get variant(){return r(n),u(()=>r(n).variant)},get text(){return r(n),u(()=>r(n).text)}})}),B(_,p),S()}export{q as S}; +import"./DsnmJJEf.js";import{i as v}from"./B3Pzt0F_.js";import{p as w,l as m,n as s,g as r,m as g,a as x,B as h,b as T,c as B,d as S,s as k,u}from"./D8EpLgQ1.js";import{k as A}from"./BrNfsPe8.js";import{p as d}from"./5WA7h8uK.js";import{k as b,B as C}from"./BGVHQGl-.js";import{f as E}from"./ow_oMtSd.js";function q(_,i){w(i,!1);const c=g(),n=g();let e=d(i,"item",8),l=d(i,"statusType",8,"entity"),a=d(i,"statusField",8,"status");m(()=>(s(e()),s(a())),()=>{k(c,e()?.[a()]||"unknown")}),m(()=>(s(e()),s(l()),r(c),s(a())),()=>{k(n,(()=>{if(!e())return{variant:"error",text:"Unknown"};switch(l()){case"entity":return b(e());case"instance":let t="secondary";switch(r(c).toLowerCase()){case"running":t="success";break;case"stopped":t="info";break;case"creating":case"pending_create":t="warning";break;case"deleting":case"pending_delete":case"pending_force_delete":t="warning";break;case"error":case"deleted":t="error";break;case"active":case"online":t="success";break;case"idle":t="info";break;case"pending":case"installing":t="warning";break;case"failed":case"terminated":case"offline":t="error";break;case"unknown":default:t="secondary";break}return{variant:t,text:E(r(c))};case"enabled":return{variant:e().enabled?"success":"error",text:e().enabled?"Enabled":"Disabled"};case"custom":const o=e()[a()]||"Unknown";if(a()==="auth-type"){const f=o==="pat"||!o?"pat":"app";return{variant:f==="pat"?"success":"info",text:f==="pat"?"PAT":"App"}}return{variant:"info",text:o};default:return b(e())}})())}),x(),v();var p=h(),y=T(p);A(y,()=>(s(e()),s(a()),u(()=>`${e()?.name||"item"}-${e()?.[a()]||"status"}-${e()?.updated_at||"time"}`)),t=>{C(t,{get variant(){return r(n),u(()=>r(n).variant)},get text(){return r(n),u(()=>r(n).text)}})}),B(_,p),S()}export{q as S}; diff --git a/webapp/assets/_app/immutable/chunks/BmGWMSQm.js b/webapp/assets/_app/immutable/chunks/OpktHEmj.js similarity index 97% rename from webapp/assets/_app/immutable/chunks/BmGWMSQm.js rename to webapp/assets/_app/immutable/chunks/OpktHEmj.js index 7e21970e..78eaac3e 100644 --- a/webapp/assets/_app/immutable/chunks/BmGWMSQm.js +++ b/webapp/assets/_app/immutable/chunks/OpktHEmj.js @@ -1 +1 @@ -import"./DsnmJJEf.js";import{i as K}from"./B3Pzt0F_.js";import{p as O,f as U,j as e,r as t,k as r,n as m,u as o,z as et,t as q,v as p,c as h,d as Q,E as ct,D as mt,B as Y,b as Z,g as P}from"./D8EpLgQ1.js";import{p as T,i as G,s as ut,a as pt}from"./5WA7h8uK.js";import{c as at,B as gt}from"./CiE1LlKV.js";import{b as R,B as H,e as xt}from"./BGVHQGl-.js";import{b as ft}from"./CoIRRsD9.js";import{e as yt}from"./wyaP0EDu.js";import{D as ht,G as tt}from"./C9DJVOi1.js";import{E as _t}from"./B7ITzBt8.js";import{S as bt}from"./BE4wujub.js";import{e as kt,i as wt}from"./u94nIB4-.js";import{b as Ct}from"./BAg1iRPq.js";var Pt=U('

                ID
                Created At
                Updated At
                Status
                Pool Balancer Type
                ');function Wt(L,v){O(v,!1);let a=T(v,"entity",8),g=T(v,"entityType",8);function N(){return`${g().charAt(0).toUpperCase()+g().slice(1)} Information`}function n(){if(!a().endpoint?.base_url)return"#";switch(g()){case"repository":const d=a();return`${a().endpoint.base_url}/${d.owner}/${a().name}`;case"organization":return`${a().endpoint.base_url}/${a().name}`;case"enterprise":return`${a().endpoint.base_url}/enterprises/${a().name}`;default:return"#"}}function E(){return`${g().charAt(0).toUpperCase()+g().slice(1)} URL`}function V(){const d=a().pool_balancing_type;if(!d||d===""||d==="none")return"Round Robin (default)";switch(d){case"roundrobin":return"Round Robin";case"pack":return"Pack";default:return d}}K();var c=Pt(),x=e(c),_=e(x),I=e(_,!0);t(_);var b=r(_,2),i=e(b),f=r(e(i),2),k=e(f,!0);t(f),t(i);var u=r(i,2),D=r(e(u),2),M=e(D,!0);t(D),t(u);var w=r(u,2),S=r(e(w),2),$=e(S,!0);t(S),t(w);var s=r(w,2),C=r(e(s),2),l=e(C);{var j=d=>{H(d,{variant:"success",text:"Running"})},z=d=>{H(d,{variant:"error",text:"Stopped"})};G(l,d=>{m(a()),o(()=>a().pool_manager_status?.running)?d(j):d(z,!1)})}t(C),t(s);var B=r(s,2),A=r(e(B),2),y=e(A,!0);t(A),t(B);var W=r(B,2),F=e(W),rt=e(F,!0);t(F);var X=r(F,2),J=e(X),st=e(J);et(),t(J),t(X),t(W),t(b),t(x),t(c),q((d,ot,it,dt,nt,lt,vt)=>{p(I,d),p(k,(m(a()),o(()=>a().id))),p(M,ot),p($,it),p(y,dt),p(rt,nt),at(J,"href",lt),p(st,`${vt??""} `)},[()=>o(N),()=>(m(R),m(a()),o(()=>R(a().created_at))),()=>(m(R),m(a()),o(()=>R(a().updated_at))),()=>o(V),()=>o(E),()=>o(n),()=>o(n)]),h(L,c),Q()}var Tt=U('

                No pools configured

                '),Et=U('');function qt(L,v){O(v,!1);const[a,g]=ut(),N=()=>pt(yt,"$eagerCache",a);let n=T(v,"pools",8),E=T(v,"entityType",8),V=T(v,"entityId",8,""),c=T(v,"entityName",8,"");const x=ct();function _(){x("addPool",{entityType:E(),entityId:V(),entityName:c()})}const I=[{key:"id",title:"ID",flexible:!0,cellComponent:_t,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:tt,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:tt,cellProps:{field:"provider_name"}},{key:"status",title:"Status",cellComponent:bt,cellProps:{statusType:"enabled"}}],b={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:s=>xt(s,N())},badges:[{type:"custom",value:s=>({variant:s.enabled?"success":"error",text:s.enabled?"Enabled":"Disabled"})}]};K();var i=Et(),f=e(i),k=e(f),u=e(k),D=e(u);t(u);var M=r(u,2);t(k);var w=r(k,2);{var S=s=>{var C=Tt(),l=r(e(C),4),j=e(l);t(l);var z=r(l,2),B=e(z);gt(B,{variant:"primary",size:"sm",$$events:{click:_},children:(A,y)=>{et();var W=mt("Add Pool");h(A,W)},$$slots:{default:!0}}),t(z),t(C),q(()=>p(j,`No pools configured for this ${E()??""}.`)),h(s,C)},$=s=>{ht(s,{get columns(){return I},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return m(n()),o(()=>n().length)},totalPages:1,get totalItems(){return m(n()),o(()=>n().length)},itemName:"pools",emptyTitle:"No pools configured",get emptyMessage(){return`No pools configured for this ${E()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return b}})};G(w,s=>{m(n()),o(()=>n().length===0)?s(S):s($,!1)})}t(f),t(i),q(()=>{p(D,`Pools (${m(n()),o(()=>n().length)??""})`),at(M,"href",`${ft}/pools`)}),h(L,i),Q(),g()}var It=U('

                '),Bt=U('

                Events

                '),Nt=U('

                Events

                No events available

                ');function Ft(L,v){O(v,!1);let a=T(v,"events",8),g=T(v,"eventsContainer",12,void 0);K();var N=Y(),n=Z(N);{var E=c=>{var x=Bt(),_=e(x),I=r(e(_),2);kt(I,5,a,wt,(b,i)=>{var f=It(),k=e(f),u=e(k),D=e(u,!0);t(u);var M=r(u,2),w=e(M);{var S=l=>{H(l,{variant:"error",text:"Error"})},$=l=>{var j=Y(),z=Z(j);{var B=y=>{H(y,{variant:"warning",text:"Warning"})},A=y=>{H(y,{variant:"info",text:"Info"})};G(z,y=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="warning")?y(B):y(A,!1)},!0)}h(l,j)};G(w,l=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="error")?l(S):l($,!1)})}var s=r(w,2),C=e(s,!0);t(s),t(M),t(k),t(f),q(l=>{p(D,(P(i),o(()=>P(i).message))),p(C,l)},[()=>(m(R),P(i),o(()=>R(P(i).created_at)))]),h(b,f)}),t(I),Ct(I,b=>g(b),()=>g()),t(_),t(x),h(c,x)},V=c=>{var x=Nt();h(c,x)};G(n,c=>{m(a()),o(()=>a()&&a().length>0)?c(E):c(V,!1)})}h(L,N),Q()}export{Wt as E,qt as P,Ft as a}; +import"./DsnmJJEf.js";import{i as K}from"./B3Pzt0F_.js";import{p as O,f as U,j as e,r as t,k as r,n as m,u as o,z as et,t as q,v as p,c as h,d as Q,E as ct,D as mt,B as Y,b as Z,g as P}from"./D8EpLgQ1.js";import{p as T,i as G,s as ut,a as pt}from"./5WA7h8uK.js";import{c as at,B as gt}from"./CiE1LlKV.js";import{b as R,B as H,e as xt}from"./BGVHQGl-.js";import{b as ft}from"./CRhkqW2i.js";import{e as yt}from"./wyaP0EDu.js";import{D as ht,G as tt}from"./BrNfsPe8.js";import{E as _t}from"./D4PaGKsV.js";import{S as bt}from"./MCv1Wq2q.js";import{e as kt,i as wt}from"./u94nIB4-.js";import{b as Ct}from"./BAg1iRPq.js";var Pt=U('

                ID
                Created At
                Updated At
                Status
                Pool Balancer Type
                ');function Wt(L,v){O(v,!1);let a=T(v,"entity",8),g=T(v,"entityType",8);function N(){return`${g().charAt(0).toUpperCase()+g().slice(1)} Information`}function n(){if(!a().endpoint?.base_url)return"#";switch(g()){case"repository":const d=a();return`${a().endpoint.base_url}/${d.owner}/${a().name}`;case"organization":return`${a().endpoint.base_url}/${a().name}`;case"enterprise":return`${a().endpoint.base_url}/enterprises/${a().name}`;default:return"#"}}function E(){return`${g().charAt(0).toUpperCase()+g().slice(1)} URL`}function V(){const d=a().pool_balancing_type;if(!d||d===""||d==="none")return"Round Robin (default)";switch(d){case"roundrobin":return"Round Robin";case"pack":return"Pack";default:return d}}K();var c=Pt(),x=e(c),_=e(x),I=e(_,!0);t(_);var b=r(_,2),i=e(b),f=r(e(i),2),k=e(f,!0);t(f),t(i);var u=r(i,2),D=r(e(u),2),M=e(D,!0);t(D),t(u);var w=r(u,2),S=r(e(w),2),$=e(S,!0);t(S),t(w);var s=r(w,2),C=r(e(s),2),l=e(C);{var j=d=>{H(d,{variant:"success",text:"Running"})},z=d=>{H(d,{variant:"error",text:"Stopped"})};G(l,d=>{m(a()),o(()=>a().pool_manager_status?.running)?d(j):d(z,!1)})}t(C),t(s);var B=r(s,2),A=r(e(B),2),y=e(A,!0);t(A),t(B);var W=r(B,2),F=e(W),rt=e(F,!0);t(F);var X=r(F,2),J=e(X),st=e(J);et(),t(J),t(X),t(W),t(b),t(x),t(c),q((d,ot,it,dt,nt,lt,vt)=>{p(I,d),p(k,(m(a()),o(()=>a().id))),p(M,ot),p($,it),p(y,dt),p(rt,nt),at(J,"href",lt),p(st,`${vt??""} `)},[()=>o(N),()=>(m(R),m(a()),o(()=>R(a().created_at))),()=>(m(R),m(a()),o(()=>R(a().updated_at))),()=>o(V),()=>o(E),()=>o(n),()=>o(n)]),h(L,c),Q()}var Tt=U('

                No pools configured

                '),Et=U('');function qt(L,v){O(v,!1);const[a,g]=ut(),N=()=>pt(yt,"$eagerCache",a);let n=T(v,"pools",8),E=T(v,"entityType",8),V=T(v,"entityId",8,""),c=T(v,"entityName",8,"");const x=ct();function _(){x("addPool",{entityType:E(),entityId:V(),entityName:c()})}const I=[{key:"id",title:"ID",flexible:!0,cellComponent:_t,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:tt,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:tt,cellProps:{field:"provider_name"}},{key:"status",title:"Status",cellComponent:bt,cellProps:{statusType:"enabled"}}],b={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:s=>xt(s,N())},badges:[{type:"custom",value:s=>({variant:s.enabled?"success":"error",text:s.enabled?"Enabled":"Disabled"})}]};K();var i=Et(),f=e(i),k=e(f),u=e(k),D=e(u);t(u);var M=r(u,2);t(k);var w=r(k,2);{var S=s=>{var C=Tt(),l=r(e(C),4),j=e(l);t(l);var z=r(l,2),B=e(z);gt(B,{variant:"primary",size:"sm",$$events:{click:_},children:(A,y)=>{et();var W=mt("Add Pool");h(A,W)},$$slots:{default:!0}}),t(z),t(C),q(()=>p(j,`No pools configured for this ${E()??""}.`)),h(s,C)},$=s=>{ht(s,{get columns(){return I},get data(){return n()},loading:!1,error:"",searchTerm:"",showSearch:!1,showPagination:!1,currentPage:1,get perPage(){return m(n()),o(()=>n().length)},totalPages:1,get totalItems(){return m(n()),o(()=>n().length)},itemName:"pools",emptyTitle:"No pools configured",get emptyMessage(){return`No pools configured for this ${E()??""}.`},emptyIconType:"cog",get mobileCardConfig(){return b}})};G(w,s=>{m(n()),o(()=>n().length===0)?s(S):s($,!1)})}t(f),t(i),q(()=>{p(D,`Pools (${m(n()),o(()=>n().length)??""})`),at(M,"href",`${ft}/pools`)}),h(L,i),Q(),g()}var It=U('

                '),Bt=U('

                Events

                '),Nt=U('

                Events

                No events available

                ');function Ft(L,v){O(v,!1);let a=T(v,"events",8),g=T(v,"eventsContainer",12,void 0);K();var N=Y(),n=Z(N);{var E=c=>{var x=Bt(),_=e(x),I=r(e(_),2);kt(I,5,a,wt,(b,i)=>{var f=It(),k=e(f),u=e(k),D=e(u,!0);t(u);var M=r(u,2),w=e(M);{var S=l=>{H(l,{variant:"error",text:"Error"})},$=l=>{var j=Y(),z=Z(j);{var B=y=>{H(y,{variant:"warning",text:"Warning"})},A=y=>{H(y,{variant:"info",text:"Info"})};G(z,y=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="warning")?y(B):y(A,!1)},!0)}h(l,j)};G(w,l=>{P(i),o(()=>(P(i).event_level||"info").toLowerCase()==="error")?l(S):l($,!1)})}var s=r(w,2),C=e(s,!0);t(s),t(M),t(k),t(f),q(l=>{p(D,(P(i),o(()=>P(i).message))),p(C,l)},[()=>(m(R),P(i),o(()=>R(P(i).created_at)))]),h(b,f)}),t(I),Ct(I,b=>g(b),()=>g()),t(_),t(x),h(c,x)},V=c=>{var x=Nt();h(c,x)};G(n,c=>{m(a()),o(()=>a()&&a().length>0)?c(E):c(V,!1)})}h(L,N),Q()}export{Wt as E,qt as P,Ft as a}; diff --git a/webapp/assets/_app/immutable/entry/app.kAVAdeq9.js b/webapp/assets/_app/immutable/entry/app.OegKHTwb.js similarity index 50% rename from webapp/assets/_app/immutable/entry/app.kAVAdeq9.js rename to webapp/assets/_app/immutable/entry/app.OegKHTwb.js index f6063c71..3f3633bb 100644 --- a/webapp/assets/_app/immutable/entry/app.kAVAdeq9.js +++ b/webapp/assets/_app/immutable/entry/app.OegKHTwb.js @@ -1,2 +1,2 @@ -const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["../nodes/0.DINiyk_8.js","../chunks/DsnmJJEf.js","../chunks/B3Pzt0F_.js","../chunks/D8EpLgQ1.js","../chunks/5WA7h8uK.js","../chunks/CiE1LlKV.js","../chunks/C41YH50Q.js","../chunks/CTf6mQoE.js","../chunks/CoIRRsD9.js","../chunks/duD3WMbl.js","../chunks/u94nIB4-.js","../chunks/BEkVdVE1.js","../assets/0.BPrCR_r7.css","../nodes/1.DcR4nNsi.js","../nodes/2.CiT4lj0D.js","../chunks/wyaP0EDu.js","../chunks/C6k1Q4We.js","../chunks/D4Caz1gY.js","../chunks/qB7B8uiS.js","../nodes/3.BSFz0YHn.js","../chunks/CO4LUyTP.js","../chunks/CNMHKIIK.js","../chunks/BGVHQGl-.js","../chunks/C9DJVOi1.js","../chunks/CCSWcuVN.js","../chunks/CGpPw4EW.js","../chunks/BE4wujub.js","../chunks/ow_oMtSd.js","../nodes/4.XnVoh6ca.js","../nodes/5.rvsSG-AQ.js","../chunks/CclkODgu.js","../chunks/KQ2xQpA3.js","../chunks/B7ITzBt8.js","../nodes/6.CtGX0qgG.js","../chunks/BmGWMSQm.js","../chunks/BAg1iRPq.js","../chunks/DDhBTdDt.js","../chunks/CwqI2jFH.js","../chunks/DZblzgqm.js","../nodes/7.0w3i9VHx.js","../nodes/8.BiZNKYxk.js","../nodes/9.DpSfMRgo.js","../nodes/10.LnrIJgIa.js","../nodes/11.Bsn67lBa.js","../nodes/12.B-vC_cmu.js","../chunks/Dbd6PPbz.js","../nodes/13.Br7HzjXP.js","../chunks/DQP15tlf.js","../chunks/CLYUNKnN.js","../nodes/14.Cd0DOn96.js","../nodes/15.CkHQugXH.js","../nodes/16.B35VVkOd.js","../nodes/17.CCltcs-Z.js","../chunks/C89fcOde.js","../nodes/18.iVIhGVtu.js"])))=>i.map(i=>d[i]); -import{s as A,aL as z,g as f,aN as U,aO as G,aP as Q,ax as W,aQ as Y,m as F,p as H,an as J,ao as K,o as X,aR as b,aS as Z,f as C,b as L,k as $,c as g,d as tt,B as T,j as et,r as rt,aT as D,D as st,t as ot,v as at}from"../chunks/D8EpLgQ1.js";import"../chunks/DsnmJJEf.js";import{p as I,i as V}from"../chunks/5WA7h8uK.js";import{c as w}from"../chunks/CCSWcuVN.js";import{b as k}from"../chunks/BAg1iRPq.js";function nt(c){return class extends it{constructor(t){super({component:c,...t})}}}class it{#e;#t;constructor(t){var a=new Map,u=(r,e)=>{var s=F(e,!1,!1);return a.set(r,s),s};const l=new Proxy({...t.props||{},$$events:{}},{get(r,e){return f(a.get(e)??u(e,Reflect.get(r,e)))},has(r,e){return e===z?!0:(f(a.get(e)??u(e,Reflect.get(r,e))),Reflect.has(r,e))},set(r,e,s){return A(a.get(e)??u(e,s),s),Reflect.set(r,e,s)}});this.#t=(t.hydrate?U:G)(t.component,{target:t.target,anchor:t.anchor,props:l,context:t.context,intro:t.intro??!1,recover:t.recover}),(!t?.props?.$$host||t.sync===!1)&&Q(),this.#e=l.$$events;for(const r of Object.keys(this.#t))r==="$set"||r==="$destroy"||r==="$on"||W(this,r,{get(){return this.#t[r]},set(e){this.#t[r]=e},enumerable:!0});this.#t.$set=r=>{Object.assign(l,r)},this.#t.$destroy=()=>{Y(this.#t)}}$set(t){this.#t.$set(t)}$on(t,a){this.#e[t]=this.#e[t]||[];const u=(...l)=>a.call(this,...l);return this.#e[t].push(u),()=>{this.#e[t]=this.#e[t].filter(l=>l!==u)}}$destroy(){this.#t.$destroy()}}const ct="modulepreload",ut=function(c,t){return new URL(c,t).href},j={},o=function(t,a,u){let l=Promise.resolve();if(a&&a.length>0){let O=function(i){return Promise.all(i.map(d=>Promise.resolve(d).then(v=>({status:"fulfilled",value:v}),v=>({status:"rejected",reason:v}))))};const e=document.getElementsByTagName("link"),s=document.querySelector("meta[property=csp-nonce]"),y=s?.nonce||s?.getAttribute("nonce");l=O(a.map(i=>{if(i=ut(i,u),i in j)return;j[i]=!0;const d=i.endsWith(".css"),v=d?'[rel="stylesheet"]':"";if(!!u)for(let n=e.length-1;n>=0;n--){const _=e[n];if(_.href===i&&(!d||_.rel==="stylesheet"))return}else if(document.querySelector(`link[href="${i}"]${v}`))return;const m=document.createElement("link");if(m.rel=d?"stylesheet":ct,d||(m.as="script"),m.crossOrigin="",m.href=i,y&&m.setAttribute("nonce",y),document.head.appendChild(m),d)return new Promise((n,_)=>{m.addEventListener("load",n),m.addEventListener("error",()=>_(new Error(`Unable to preload CSS for ${i}`)))})}))}function r(e){const s=new Event("vite:preloadError",{cancelable:!0});if(s.payload=e,window.dispatchEvent(s),!s.defaultPrevented)throw e}return l.then(e=>{for(const s of e||[])s.status==="rejected"&&r(s.reason);return t().catch(r)})},Rt={};var lt=C('
                '),_t=C(" ",1);function mt(c,t){H(t,!0);let a=I(t,"components",23,()=>[]),u=I(t,"data_0",3,null),l=I(t,"data_1",3,null);J(()=>t.stores.page.set(t.page)),K(()=>{t.stores,t.page,t.constructors,a(),t.form,u(),l(),t.stores.page.notify()});let r=b(!1),e=b(!1),s=b(null);X(()=>{const n=t.stores.page.subscribe(()=>{f(r)&&(A(e,!0),Z().then(()=>{A(s,document.title||"untitled page",!0)}))});return A(r,!0),n});const y=D(()=>t.constructors[1]);var O=_t(),i=L(O);{var d=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params},children:(R,vt)=>{var S=T(),B=L(S);w(B,()=>f(y),(N,M)=>{k(M(N,{get data(){return l()},get form(){return t.form},get params(){return t.page.params}}),q=>a()[1]=q,()=>a()?.[1])}),g(R,S)},$$slots:{default:!0}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)},v=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)};V(i,n=>{t.constructors[1]?n(d):n(v,!1)})}var x=$(i,2);{var m=n=>{var _=lt(),h=et(_);{var P=E=>{var p=st();ot(()=>at(p,f(s))),g(E,p)};V(h,E=>{f(e)&&E(P)})}rt(_),g(n,_)};V(x,n=>{f(r)&&n(m)})}g(c,O),tt()}const yt=nt(mt),Ot=[()=>o(()=>import("../nodes/0.DINiyk_8.js"),__vite__mapDeps([0,1,2,3,4,5,6,7,8,9,10,11,12]),import.meta.url),()=>o(()=>import("../nodes/1.DcR4nNsi.js"),__vite__mapDeps([13,1,2,3,7,8]),import.meta.url),()=>o(()=>import("../nodes/2.CiT4lj0D.js"),__vite__mapDeps([14,1,2,3,4,10,5,8,15,16,17,18,11]),import.meta.url),()=>o(()=>import("../nodes/3.BSFz0YHn.js"),__vite__mapDeps([19,1,2,3,4,10,5,16,17,20,21,22,23,24,8,15,11,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/4.XnVoh6ca.js"),__vite__mapDeps([28,1,2,3,4,5,16,17,20,21,22,23,10,24,8,15,11,25]),import.meta.url),()=>o(()=>import("../nodes/5.rvsSG-AQ.js"),__vite__mapDeps([29,1,2,3,4,5,8,20,10,16,17,18,15,30,31,11,22,23,24,32,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/6.CtGX0qgG.js"),__vite__mapDeps([33,1,2,3,4,5,6,7,8,30,10,16,17,18,31,34,22,15,23,24,32,26,27,35,36,11,37,38]),import.meta.url),()=>o(()=>import("../nodes/7.0w3i9VHx.js"),__vite__mapDeps([39,1,2,3,4,5,16,17,7,8,9,11]),import.meta.url),()=>o(()=>import("../nodes/8.BiZNKYxk.js"),__vite__mapDeps([40,1,2,3,4,5,31,18,20,10,11,23,24,16,8,22,32,26,27]),import.meta.url),()=>o(()=>import("../nodes/9.DpSfMRgo.js"),__vite__mapDeps([41,1,2,3,4,10,5,35,6,7,8,31,18,27,22]),import.meta.url),()=>o(()=>import("../nodes/10.LnrIJgIa.js"),__vite__mapDeps([42,1,2,3,4,5,16,17,7,8,9]),import.meta.url),()=>o(()=>import("../nodes/11.Bsn67lBa.js"),__vite__mapDeps([43,1,2,3,4,5,8,10,16,17,18,21,22,15,30,31,20,11,23,24,32,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/12.B-vC_cmu.js"),__vite__mapDeps([44,1,2,3,4,5,6,7,8,30,10,16,17,18,31,34,22,15,23,24,32,26,27,35,36,45,11,37,38]),import.meta.url),()=>o(()=>import("../nodes/13.Br7HzjXP.js"),__vite__mapDeps([46,1,2,3,4,5,8,20,37,10,16,17,18,38,47,15,31,11,22,23,24,32,25,26,27,48]),import.meta.url),()=>o(()=>import("../nodes/14.Cd0DOn96.js"),__vite__mapDeps([49,1,2,3,4,10,5,6,7,8,47,16,17,18,38,15,31,36,23,24,22,32,26,27,11]),import.meta.url),()=>o(()=>import("../nodes/15.CkHQugXH.js"),__vite__mapDeps([50,1,2,3,4,5,10,16,17,18,21,22,15,30,31,20,11,23,24,8,32,25,26,27]),import.meta.url),()=>o(()=>import("../nodes/16.B35VVkOd.js"),__vite__mapDeps([51,1,2,3,4,5,6,7,8,30,10,16,17,18,31,34,22,15,23,24,32,26,27,35,36,45,11,37,38]),import.meta.url),()=>o(()=>import("../nodes/17.CCltcs-Z.js"),__vite__mapDeps([52,1,2,3,4,5,8,20,10,16,17,18,38,53,31,15,11,22,23,24,32,25,26,27,48]),import.meta.url),()=>o(()=>import("../nodes/18.iVIhGVtu.js"),__vite__mapDeps([54,1,2,3,4,5,6,7,8,53,16,17,18,38,31,36,23,10,24,22,32,26,27,11]),import.meta.url)],Lt=[],At={"/":[2],"/credentials":[3],"/endpoints":[4],"/enterprises":[5],"/enterprises/[id]":[6],"/init":[7],"/instances":[8],"/instances/[id]":[9],"/login":[10],"/organizations":[11],"/organizations/[id]":[12],"/pools":[13],"/pools/[id]":[14],"/repositories":[15],"/repositories/[id]":[16],"/scalesets":[17],"/scalesets/[id]":[18]},dt={handleError:({error:c})=>{console.error(c)},reroute:()=>{},transport:{}},ft=Object.fromEntries(Object.entries(dt.transport).map(([c,t])=>[c,t.decode])),bt=!1,Tt=(c,t)=>ft[c](t);export{Tt as decode,ft as decoders,At as dictionary,bt as hash,dt as hooks,Rt as matchers,Ot as nodes,yt as root,Lt as server_loads}; +const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["../nodes/0.g860C_Ot.js","../chunks/DsnmJJEf.js","../chunks/B3Pzt0F_.js","../chunks/D8EpLgQ1.js","../chunks/5WA7h8uK.js","../chunks/CiE1LlKV.js","../chunks/BE8f1Riw.js","../chunks/DXCC0cSN.js","../chunks/CRhkqW2i.js","../chunks/duD3WMbl.js","../chunks/u94nIB4-.js","../chunks/BEkVdVE1.js","../assets/0.BPrCR_r7.css","../nodes/1.BjWDFEyD.js","../nodes/2.1DFwbmOU.js","../chunks/wyaP0EDu.js","../chunks/C6k1Q4We.js","../chunks/D4Caz1gY.js","../chunks/qB7B8uiS.js","../chunks/BZiHL9L3.js","../nodes/3.BWxN3TuB.js","../chunks/CO4LUyTP.js","../chunks/CNMHKIIK.js","../chunks/BGVHQGl-.js","../chunks/BrNfsPe8.js","../chunks/CCSWcuVN.js","../chunks/CGpPw4EW.js","../chunks/MCv1Wq2q.js","../chunks/ow_oMtSd.js","../nodes/4.D1IF4qSs.js","../nodes/5.CeMzA7DH.js","../chunks/CIBm3n2u.js","../chunks/KQ2xQpA3.js","../chunks/D4PaGKsV.js","../nodes/6.BPDnwpl3.js","../chunks/OpktHEmj.js","../chunks/BAg1iRPq.js","../chunks/BEoJgOul.js","../chunks/CRD55Dyg.js","../chunks/DZblzgqm.js","../nodes/7.CaVS6POQ.js","../nodes/8.W6llQu20.js","../nodes/9.DfrxaqP7.js","../nodes/10.Ci2MePhm.js","../nodes/11.BX_bMXWi.js","../nodes/12.C0lS_ubI.js","../chunks/BsqC4UA1.js","../nodes/13.CEJ1u9Ql.js","../chunks/Bgb-frqW.js","../chunks/BzlxTz7Q.js","../nodes/14.BJHnbtAi.js","../nodes/15.CqYhwqAI.js","../nodes/16.BVViOnXd.js","../nodes/17.DLt70sQQ.js","../chunks/2p_hWkLJ.js","../nodes/18.eu91cRrS.js"])))=>i.map(i=>d[i]); +import{s as A,aL as z,g as f,aN as U,aO as G,aP as Q,ax as W,aQ as Y,m as F,p as H,an as J,ao as K,o as X,aR as b,aS as Z,f as C,b as L,k as $,c as g,d as tt,B as T,j as et,r as rt,aT as D,D as st,t as ot,v as at}from"../chunks/D8EpLgQ1.js";import"../chunks/DsnmJJEf.js";import{p as I,i as V}from"../chunks/5WA7h8uK.js";import{c as w}from"../chunks/CCSWcuVN.js";import{b as k}from"../chunks/BAg1iRPq.js";function nt(c){return class extends it{constructor(t){super({component:c,...t})}}}class it{#e;#t;constructor(t){var a=new Map,u=(r,e)=>{var s=F(e,!1,!1);return a.set(r,s),s};const l=new Proxy({...t.props||{},$$events:{}},{get(r,e){return f(a.get(e)??u(e,Reflect.get(r,e)))},has(r,e){return e===z?!0:(f(a.get(e)??u(e,Reflect.get(r,e))),Reflect.has(r,e))},set(r,e,s){return A(a.get(e)??u(e,s),s),Reflect.set(r,e,s)}});this.#t=(t.hydrate?U:G)(t.component,{target:t.target,anchor:t.anchor,props:l,context:t.context,intro:t.intro??!1,recover:t.recover}),(!t?.props?.$$host||t.sync===!1)&&Q(),this.#e=l.$$events;for(const r of Object.keys(this.#t))r==="$set"||r==="$destroy"||r==="$on"||W(this,r,{get(){return this.#t[r]},set(e){this.#t[r]=e},enumerable:!0});this.#t.$set=r=>{Object.assign(l,r)},this.#t.$destroy=()=>{Y(this.#t)}}$set(t){this.#t.$set(t)}$on(t,a){this.#e[t]=this.#e[t]||[];const u=(...l)=>a.call(this,...l);return this.#e[t].push(u),()=>{this.#e[t]=this.#e[t].filter(l=>l!==u)}}$destroy(){this.#t.$destroy()}}const ct="modulepreload",ut=function(c,t){return new URL(c,t).href},j={},o=function(t,a,u){let l=Promise.resolve();if(a&&a.length>0){let O=function(i){return Promise.all(i.map(d=>Promise.resolve(d).then(v=>({status:"fulfilled",value:v}),v=>({status:"rejected",reason:v}))))};const e=document.getElementsByTagName("link"),s=document.querySelector("meta[property=csp-nonce]"),y=s?.nonce||s?.getAttribute("nonce");l=O(a.map(i=>{if(i=ut(i,u),i in j)return;j[i]=!0;const d=i.endsWith(".css"),v=d?'[rel="stylesheet"]':"";if(!!u)for(let n=e.length-1;n>=0;n--){const _=e[n];if(_.href===i&&(!d||_.rel==="stylesheet"))return}else if(document.querySelector(`link[href="${i}"]${v}`))return;const m=document.createElement("link");if(m.rel=d?"stylesheet":ct,d||(m.as="script"),m.crossOrigin="",m.href=i,y&&m.setAttribute("nonce",y),document.head.appendChild(m),d)return new Promise((n,_)=>{m.addEventListener("load",n),m.addEventListener("error",()=>_(new Error(`Unable to preload CSS for ${i}`)))})}))}function r(e){const s=new Event("vite:preloadError",{cancelable:!0});if(s.payload=e,window.dispatchEvent(s),!s.defaultPrevented)throw e}return l.then(e=>{for(const s of e||[])s.status==="rejected"&&r(s.reason);return t().catch(r)})},Rt={};var lt=C('
                '),_t=C(" ",1);function mt(c,t){H(t,!0);let a=I(t,"components",23,()=>[]),u=I(t,"data_0",3,null),l=I(t,"data_1",3,null);J(()=>t.stores.page.set(t.page)),K(()=>{t.stores,t.page,t.constructors,a(),t.form,u(),l(),t.stores.page.notify()});let r=b(!1),e=b(!1),s=b(null);X(()=>{const n=t.stores.page.subscribe(()=>{f(r)&&(A(e,!0),Z().then(()=>{A(s,document.title||"untitled page",!0)}))});return A(r,!0),n});const y=D(()=>t.constructors[1]);var O=_t(),i=L(O);{var d=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params},children:(R,vt)=>{var S=T(),B=L(S);w(B,()=>f(y),(N,M)=>{k(M(N,{get data(){return l()},get form(){return t.form},get params(){return t.page.params}}),q=>a()[1]=q,()=>a()?.[1])}),g(R,S)},$$slots:{default:!0}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)},v=n=>{const _=D(()=>t.constructors[0]);var h=T(),P=L(h);w(P,()=>f(_),(E,p)=>{k(p(E,{get data(){return u()},get form(){return t.form},get params(){return t.page.params}}),R=>a()[0]=R,()=>a()?.[0])}),g(n,h)};V(i,n=>{t.constructors[1]?n(d):n(v,!1)})}var x=$(i,2);{var m=n=>{var _=lt(),h=et(_);{var P=E=>{var p=st();ot(()=>at(p,f(s))),g(E,p)};V(h,E=>{f(e)&&E(P)})}rt(_),g(n,_)};V(x,n=>{f(r)&&n(m)})}g(c,O),tt()}const yt=nt(mt),Ot=[()=>o(()=>import("../nodes/0.g860C_Ot.js"),__vite__mapDeps([0,1,2,3,4,5,6,7,8,9,10,11,12]),import.meta.url),()=>o(()=>import("../nodes/1.BjWDFEyD.js"),__vite__mapDeps([13,1,2,3,7,8]),import.meta.url),()=>o(()=>import("../nodes/2.1DFwbmOU.js"),__vite__mapDeps([14,1,2,3,4,10,5,8,15,16,17,18,11,19]),import.meta.url),()=>o(()=>import("../nodes/3.BWxN3TuB.js"),__vite__mapDeps([20,1,2,3,4,10,5,16,17,21,22,23,24,25,8,15,11,19,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/4.D1IF4qSs.js"),__vite__mapDeps([29,1,2,3,4,5,16,17,21,22,23,24,10,25,8,15,11,19,26]),import.meta.url),()=>o(()=>import("../nodes/5.CeMzA7DH.js"),__vite__mapDeps([30,1,2,3,4,5,8,21,10,16,17,18,19,15,31,32,11,23,24,25,33,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/6.BPDnwpl3.js"),__vite__mapDeps([34,1,2,3,4,5,6,7,8,31,10,16,17,19,18,32,35,23,15,24,25,33,27,28,36,37,11,38,39]),import.meta.url),()=>o(()=>import("../nodes/7.CaVS6POQ.js"),__vite__mapDeps([40,1,2,3,4,5,16,17,7,8,9,11,19]),import.meta.url),()=>o(()=>import("../nodes/8.W6llQu20.js"),__vite__mapDeps([41,1,2,3,4,5,32,18,21,10,11,24,25,16,8,23,19,33,27,28]),import.meta.url),()=>o(()=>import("../nodes/9.DfrxaqP7.js"),__vite__mapDeps([42,1,2,3,4,10,5,36,6,7,8,32,18,28,23,19]),import.meta.url),()=>o(()=>import("../nodes/10.Ci2MePhm.js"),__vite__mapDeps([43,1,2,3,4,5,16,17,7,8,9,19]),import.meta.url),()=>o(()=>import("../nodes/11.BX_bMXWi.js"),__vite__mapDeps([44,1,2,3,4,5,8,10,16,17,18,22,23,19,15,31,32,21,11,24,25,33,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/12.C0lS_ubI.js"),__vite__mapDeps([45,1,2,3,4,5,6,7,8,31,10,16,17,19,18,32,35,23,15,24,25,33,27,28,36,37,46,11,38,39]),import.meta.url),()=>o(()=>import("../nodes/13.CEJ1u9Ql.js"),__vite__mapDeps([47,1,2,3,4,5,21,38,10,16,17,18,19,39,48,15,32,11,23,24,25,8,33,26,27,28,49]),import.meta.url),()=>o(()=>import("../nodes/14.BJHnbtAi.js"),__vite__mapDeps([50,1,2,3,4,10,5,6,7,8,48,16,17,18,39,19,15,32,37,24,25,23,33,27,28,11]),import.meta.url),()=>o(()=>import("../nodes/15.CqYhwqAI.js"),__vite__mapDeps([51,1,2,3,4,5,10,16,17,18,19,22,23,15,31,32,21,11,24,25,8,33,26,27,28]),import.meta.url),()=>o(()=>import("../nodes/16.BVViOnXd.js"),__vite__mapDeps([52,1,2,3,4,5,6,7,8,31,10,16,17,19,18,32,35,23,15,24,25,33,27,28,36,37,46,11,38,39]),import.meta.url),()=>o(()=>import("../nodes/17.DLt70sQQ.js"),__vite__mapDeps([53,1,2,3,4,5,8,21,10,16,17,18,39,19,54,32,15,11,23,24,25,33,26,27,28,49]),import.meta.url),()=>o(()=>import("../nodes/18.eu91cRrS.js"),__vite__mapDeps([55,1,2,3,4,5,6,7,8,54,16,17,18,19,39,32,37,24,10,25,23,33,27,28,11]),import.meta.url)],Lt=[],At={"/":[2],"/credentials":[3],"/endpoints":[4],"/enterprises":[5],"/enterprises/[id]":[6],"/init":[7],"/instances":[8],"/instances/[id]":[9],"/login":[10],"/organizations":[11],"/organizations/[id]":[12],"/pools":[13],"/pools/[id]":[14],"/repositories":[15],"/repositories/[id]":[16],"/scalesets":[17],"/scalesets/[id]":[18]},dt={handleError:({error:c})=>{console.error(c)},reroute:()=>{},transport:{}},ft=Object.fromEntries(Object.entries(dt.transport).map(([c,t])=>[c,t.decode])),bt=!1,Tt=(c,t)=>ft[c](t);export{Tt as decode,ft as decoders,At as dictionary,bt as hash,dt as hooks,Rt as matchers,Ot as nodes,yt as root,Lt as server_loads}; diff --git a/webapp/assets/_app/immutable/entry/start.CI0Cdear.js b/webapp/assets/_app/immutable/entry/start.CI0Cdear.js deleted file mode 100644 index b4e2a9f4..00000000 --- a/webapp/assets/_app/immutable/entry/start.CI0Cdear.js +++ /dev/null @@ -1 +0,0 @@ -import{l as o,a as r}from"../chunks/CTf6mQoE.js";export{o as load_css,r as start}; diff --git a/webapp/assets/_app/immutable/entry/start.S-sEy6br.js b/webapp/assets/_app/immutable/entry/start.S-sEy6br.js new file mode 100644 index 00000000..b348b111 --- /dev/null +++ b/webapp/assets/_app/immutable/entry/start.S-sEy6br.js @@ -0,0 +1 @@ +import{l as o,a as r}from"../chunks/DXCC0cSN.js";export{o as load_css,r as start}; diff --git a/webapp/assets/_app/immutable/nodes/0.DINiyk_8.js b/webapp/assets/_app/immutable/nodes/0.g860C_Ot.js similarity index 99% rename from webapp/assets/_app/immutable/nodes/0.DINiyk_8.js rename to webapp/assets/_app/immutable/nodes/0.g860C_Ot.js index d50c7e62..89f2370a 100644 --- a/webapp/assets/_app/immutable/nodes/0.DINiyk_8.js +++ b/webapp/assets/_app/immutable/nodes/0.g860C_Ot.js @@ -1,4 +1,4 @@ -import"../chunks/DsnmJJEf.js";import{i as He}from"../chunks/B3Pzt0F_.js";import{p as Se,o as De,s as h,m as F,g as e,l as X,a as Le,f as c,b as E,j as o,k as n,r as t,u as i,t as P,v as ge,c as s,B as U,C as Y,e as I,d as Be,q as cr,h as gr,$ as hr}from"../chunks/D8EpLgQ1.js";import{a as me,i as w,s as Ae}from"../chunks/5WA7h8uK.js";import{c as _,s as Q,h as ur,B as fr,d as Ge}from"../chunks/CiE1LlKV.js";import{p as qe}from"../chunks/C41YH50Q.js";import{g as fe}from"../chunks/CTf6mQoE.js";import{b as l}from"../chunks/CoIRRsD9.js";import{b as Ne,a as mr}from"../chunks/duD3WMbl.js";import{e as ne,i as ce,w as xr}from"../chunks/u94nIB4-.js";import{t as Oe}from"../chunks/BEkVdVE1.js";const pr=async({url:Z})=>({url:Z.pathname}),kr=!1,br=!1,va=Object.freeze(Object.defineProperty({__proto__:null,load:pr,prerender:kr,ssr:br},Symbol.toStringTag,{value:"Module"}));var yr=c('
                Live Updates
                '),_r=c('
                Connecting
                '),wr=c('
                Updates Unavailable
                '),Mr=c('
                Manual Refresh
                '),$r=Y(''),jr=Y(''),zr=Y(''),Cr=Y(''),Hr=c(' '),Sr=c(' '),Lr=c('
                '),Br=c('
                '),Ar=c('
                '),Vr=c('
                '),Ir=Y(''),Rr=Y(''),Tr=Y(''),Pr=Y(''),Er=c(' '),Gr=c(' '),Or=c('
                '),Dr=c('
                '),qr=c('
                GARM GARM

                GARM

                ',1);function Nr(Z,ee){Se(ee,!1);const[re,he]=Ae(),M=()=>me(xr,"$websocketStore",re),m=()=>me(qe,"$page",re),u=F(),y=F();let $=F(!1),G=F(!1),f=F(!1);De(()=>{j(),window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change",v)});function j(){const a=localStorage.getItem("theme");a==="dark"?h(f,!0):a==="light"?h(f,!1):h(f,window.matchMedia("(prefers-color-scheme: dark)").matches),p()}function v(a){(!localStorage.getItem("theme")||localStorage.getItem("theme")==="system")&&(h(f,a.matches),p())}function O(){h(f,!e(f)),localStorage.setItem("theme",e(f)?"dark":"light"),p()}function p(){e(f)?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}function z(){Ne.logout(),h(G,!1)}const le=[{href:`${l}/`,label:"Dashboard",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:`${l}/repositories`,label:"Repositories",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:`${l}/organizations`,label:"Organizations",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:`${l}/enterprises`,label:"Enterprises",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:`${l}/pools`,label:"Pools",icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"},{href:`${l}/scalesets`,label:"Scale Sets",icon:"M4 7v10c0 2.21 3.582 4 8 4s8-1.79 8-4V7M4 7c0 2.21 3.582 4 8 4s8-1.79 8-4M4 7c0-2.21 3.582-4 8-4s8 1.79 8 4m0 5c0 2.21-3.582 4-8 4s-8-1.79-8-4"},{href:`${l}/instances`,label:"Runners",icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z"}],J=[{href:`${l}/credentials`,label:"Credentials",icon:"M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1721 9z"},{href:`${l}/endpoints`,label:"Endpoints",icon:"M13 10V3L4 14h7v7l9-11h-7z"}];X(()=>M(),()=>{h(u,M())}),X(()=>m(),()=>{m().url.pathname&&h($,!1)}),X(()=>m(),()=>{h(y,m().url.pathname)}),Le(),He();var D=qr(),V=E(D),q=o(V),ae=o(q),K=o(ae),te=o(K),b=o(te),C=n(b,2);t(te),t(K);var N=n(K,2),oe=o(N),de=o(oe),xe=o(de);{var ke=a=>{var r=yr();s(a,r)},Ue=a=>{var r=U(),g=E(r);{var H=x=>{var S=_r();s(x,S)},R=x=>{var S=U(),W=E(S);{var L=k=>{var T=wr();s(k,T)},B=k=>{var T=Mr();s(k,T)};w(W,k=>{e(u),i(()=>e(u).error)?k(L):k(B,!1)},!0)}s(x,S)};w(g,x=>{e(u),i(()=>e(u).connecting)?x(H):x(R,!1)},!0)}s(a,r)};w(xe,a=>{e(u),i(()=>e(u).connected)?a(ke):a(Ue,!1)})}t(de);var pe=n(de,2),Qe=o(pe);{var Fe=a=>{var r=$r();s(a,r)},Je=a=>{var r=jr();s(a,r)};w(Qe,a=>{e(f)?a(Fe):a(Je,!1)})}t(pe),t(oe),t(N),t(ae);var Ve=n(ae,2),Ie=o(Ve);ne(Ie,1,()=>le,ce,(a,r)=>{var g=Hr(),H=o(g),R=o(H);{var x=L=>{var B=U(),k=E(B);ne(k,1,()=>(e(r),i(()=>e(r).icon)),ce,(T,se)=>{var d=zr();P(()=>_(d,"d",e(se))),s(T,d)}),s(L,B)},S=L=>{var B=Cr();P(()=>_(B,"d",(e(r),i(()=>e(r).icon)))),s(L,B)};w(R,L=>{e(r),i(()=>Array.isArray(e(r).icon))?L(x):L(S,!1)})}t(H);var W=n(H);t(g),P(()=>{_(g,"href",(e(r),i(()=>e(r).href))),Q(g,1,`group flex items-center px-2 py-2 text-sm font-medium rounded-md transition-colors duration-200 +import"../chunks/DsnmJJEf.js";import{i as He}from"../chunks/B3Pzt0F_.js";import{p as Se,o as De,s as h,m as F,g as e,l as X,a as Le,f as c,b as E,j as o,k as n,r as t,u as i,t as P,v as ge,c as s,B as U,C as Y,e as I,d as Be,q as cr,h as gr,$ as hr}from"../chunks/D8EpLgQ1.js";import{a as me,i as w,s as Ae}from"../chunks/5WA7h8uK.js";import{c as _,s as Q,h as ur,B as fr,d as Ge}from"../chunks/CiE1LlKV.js";import{p as qe}from"../chunks/BE8f1Riw.js";import{g as fe}from"../chunks/DXCC0cSN.js";import{b as l}from"../chunks/CRhkqW2i.js";import{b as Ne,a as mr}from"../chunks/duD3WMbl.js";import{e as ne,i as ce,w as xr}from"../chunks/u94nIB4-.js";import{t as Oe}from"../chunks/BEkVdVE1.js";const pr=async({url:Z})=>({url:Z.pathname}),kr=!1,br=!1,va=Object.freeze(Object.defineProperty({__proto__:null,load:pr,prerender:kr,ssr:br},Symbol.toStringTag,{value:"Module"}));var yr=c('
                Live Updates
                '),_r=c('
                Connecting
                '),wr=c('
                Updates Unavailable
                '),Mr=c('
                Manual Refresh
                '),$r=Y(''),jr=Y(''),zr=Y(''),Cr=Y(''),Hr=c(' '),Sr=c(' '),Lr=c('
                '),Br=c('
                '),Ar=c('
                '),Vr=c('
                '),Ir=Y(''),Rr=Y(''),Tr=Y(''),Pr=Y(''),Er=c(' '),Gr=c(' '),Or=c('
                '),Dr=c('
                '),qr=c('
                GARM GARM

                GARM

                ',1);function Nr(Z,ee){Se(ee,!1);const[re,he]=Ae(),M=()=>me(xr,"$websocketStore",re),m=()=>me(qe,"$page",re),u=F(),y=F();let $=F(!1),G=F(!1),f=F(!1);De(()=>{j(),window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change",v)});function j(){const a=localStorage.getItem("theme");a==="dark"?h(f,!0):a==="light"?h(f,!1):h(f,window.matchMedia("(prefers-color-scheme: dark)").matches),p()}function v(a){(!localStorage.getItem("theme")||localStorage.getItem("theme")==="system")&&(h(f,a.matches),p())}function O(){h(f,!e(f)),localStorage.setItem("theme",e(f)?"dark":"light"),p()}function p(){e(f)?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}function z(){Ne.logout(),h(G,!1)}const le=[{href:`${l}/`,label:"Dashboard",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:`${l}/repositories`,label:"Repositories",icon:["M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z","M8 5a2 2 0 012-2h4a2 2 0 012 2v2H8V5z"]},{href:`${l}/organizations`,label:"Organizations",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:`${l}/enterprises`,label:"Enterprises",icon:"M19 21V5a2 2 0 00-2-2H7a2 2 0 00-2 2v16m14 0h2m-2 0h-5m-9 0H3m2 0h5M9 7h1m-1 4h1m4-4h1m-1 4h1m-5 10v-5a1 1 0 011-1h2a1 1 0 011 1v5m-4 0h4"},{href:`${l}/pools`,label:"Pools",icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"},{href:`${l}/scalesets`,label:"Scale Sets",icon:"M4 7v10c0 2.21 3.582 4 8 4s8-1.79 8-4V7M4 7c0 2.21 3.582 4 8 4s8-1.79 8-4M4 7c0-2.21 3.582-4 8-4s8 1.79 8 4m0 5c0 2.21-3.582 4-8 4s-8-1.79-8-4"},{href:`${l}/instances`,label:"Runners",icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z"}],J=[{href:`${l}/credentials`,label:"Credentials",icon:"M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1721 9z"},{href:`${l}/endpoints`,label:"Endpoints",icon:"M13 10V3L4 14h7v7l9-11h-7z"}];X(()=>M(),()=>{h(u,M())}),X(()=>m(),()=>{m().url.pathname&&h($,!1)}),X(()=>m(),()=>{h(y,m().url.pathname)}),Le(),He();var D=qr(),V=E(D),q=o(V),ae=o(q),K=o(ae),te=o(K),b=o(te),C=n(b,2);t(te),t(K);var N=n(K,2),oe=o(N),de=o(oe),xe=o(de);{var ke=a=>{var r=yr();s(a,r)},Ue=a=>{var r=U(),g=E(r);{var H=x=>{var S=_r();s(x,S)},R=x=>{var S=U(),W=E(S);{var L=k=>{var T=wr();s(k,T)},B=k=>{var T=Mr();s(k,T)};w(W,k=>{e(u),i(()=>e(u).error)?k(L):k(B,!1)},!0)}s(x,S)};w(g,x=>{e(u),i(()=>e(u).connecting)?x(H):x(R,!1)},!0)}s(a,r)};w(xe,a=>{e(u),i(()=>e(u).connected)?a(ke):a(Ue,!1)})}t(de);var pe=n(de,2),Qe=o(pe);{var Fe=a=>{var r=$r();s(a,r)},Je=a=>{var r=jr();s(a,r)};w(Qe,a=>{e(f)?a(Fe):a(Je,!1)})}t(pe),t(oe),t(N),t(ae);var Ve=n(ae,2),Ie=o(Ve);ne(Ie,1,()=>le,ce,(a,r)=>{var g=Hr(),H=o(g),R=o(H);{var x=L=>{var B=U(),k=E(B);ne(k,1,()=>(e(r),i(()=>e(r).icon)),ce,(T,se)=>{var d=zr();P(()=>_(d,"d",e(se))),s(T,d)}),s(L,B)},S=L=>{var B=Cr();P(()=>_(B,"d",(e(r),i(()=>e(r).icon)))),s(L,B)};w(R,L=>{e(r),i(()=>Array.isArray(e(r).icon))?L(x):L(S,!1)})}t(H);var W=n(H);t(g),P(()=>{_(g,"href",(e(r),i(()=>e(r).href))),Q(g,1,`group flex items-center px-2 py-2 text-sm font-medium rounded-md transition-colors duration-200 ${e(y),e(r),i(()=>e(y)===e(r).href?"bg-gray-100 text-gray-900 dark:bg-gray-700 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),ge(W,` ${e(r),i(()=>e(r).label)??""}`)}),s(a,g)});var be=n(Ie,2);ne(be,5,()=>J,ce,(a,r)=>{var g=Sr(),H=o(g),R=o(H);t(H);var x=n(H);t(g),P(()=>{_(g,"href",(e(r),i(()=>e(r).href))),Q(g,1,`group flex items-center px-2 py-2 text-sm font-medium rounded-md transition-colors duration-200 ${e(y),e(r),i(()=>e(y)===e(r).href?"bg-gray-100 text-gray-900 dark:bg-gray-700 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),_(R,"d",(e(r),i(()=>e(r).icon))),ge(x,` ${e(r),i(()=>e(r).label)??""}`)}),s(a,g)}),t(be);var Re=n(be,2),Ke=o(Re);t(Re),t(Ve),t(q),t(V);var ye=n(V,2),_e=o(ye),Te=o(_e),we=n(Te,2),Me=o(we),$e=n(Me,2),Pe=n($e,4),We=o(Pe);{var Xe=a=>{var r=Lr();s(a,r)},Ye=a=>{var r=U(),g=E(r);{var H=x=>{var S=Br();s(x,S)},R=x=>{var S=U(),W=E(S);{var L=k=>{var T=Ar();s(k,T)},B=k=>{var T=Vr();s(k,T)};w(W,k=>{e(u),i(()=>e(u).error)?k(L):k(B,!1)},!0)}s(x,S)};w(g,x=>{e(u),i(()=>e(u).connecting)?x(H):x(R,!1)},!0)}s(a,r)};w(We,a=>{e(u),i(()=>e(u).connected)?a(Xe):a(Ye,!1)})}t(Pe),t(we);var je=n(we,2),Ze=o(je);{var er=a=>{var r=Ir();s(a,r)},rr=a=>{var r=Rr();s(a,r)};w(Ze,a=>{e(f)?a(er):a(rr,!1)})}t(je),t(_e);var ar=n(_e,2);{var tr=a=>{var r=Or(),g=o(r),H=n(g,2),R=o(H),x=o(R);t(R);var S=n(R,2),W=o(S),L=o(W);ne(L,1,()=>le,ce,(se,d)=>{var A=Er(),ie=o(A),ze=o(ie);{var Ce=ve=>{var ue=U(),lr=E(ue);ne(lr,1,()=>(e(d),i(()=>e(d).icon)),ce,(dr,vr)=>{var Ee=Tr();P(()=>_(Ee,"d",e(vr))),s(dr,Ee)}),s(ve,ue)},ir=ve=>{var ue=Pr();P(()=>_(ue,"d",(e(d),i(()=>e(d).icon)))),s(ve,ue)};w(ze,ve=>{e(d),i(()=>Array.isArray(e(d).icon))?ve(Ce):ve(ir,!1)})}t(ie);var nr=n(ie);t(A),P(()=>{_(A,"href",(e(d),i(()=>e(d).href))),Q(A,1,`group flex items-center px-2 py-2 text-base font-medium rounded-md transition-colors duration-200 ${e(y),e(d),i(()=>e(y)===e(d).href?"bg-gray-100 dark:bg-gray-700 text-gray-900 dark:text-white":"text-gray-600 hover:bg-gray-50 hover:text-gray-900 dark:text-gray-300 dark:hover:bg-gray-700 dark:hover:text-white")??""}`),ge(nr,` ${e(d),i(()=>e(d).label)??""}`)}),I("click",A,()=>h($,!1)),s(se,A)});var B=n(L,2);ne(B,5,()=>J,ce,(se,d)=>{var A=Gr(),ie=o(A),ze=o(ie);t(ie);var Ce=n(ie);t(A),P(()=>{_(A,"href",(e(d),i(()=>e(d).href))),Q(A,1,`group flex items-center px-2 py-2 text-base font-medium rounded-md transition-colors duration-200 diff --git a/webapp/assets/_app/immutable/nodes/1.DcR4nNsi.js b/webapp/assets/_app/immutable/nodes/1.BjWDFEyD.js similarity index 87% rename from webapp/assets/_app/immutable/nodes/1.DcR4nNsi.js rename to webapp/assets/_app/immutable/nodes/1.BjWDFEyD.js index 820e3848..73802f5b 100644 --- a/webapp/assets/_app/immutable/nodes/1.DcR4nNsi.js +++ b/webapp/assets/_app/immutable/nodes/1.BjWDFEyD.js @@ -1 +1 @@ -import"../chunks/DsnmJJEf.js";import{i as u}from"../chunks/B3Pzt0F_.js";import{p as h,f as g,b as v,t as d,c as l,d as _,j as s,r as a,k as x,v as o}from"../chunks/D8EpLgQ1.js";import{s as k,p}from"../chunks/CTf6mQoE.js";const $={get error(){return p.error},get status(){return p.status}};k.updated.check;const i=$;var b=g("

                ",1);function y(m,c){h(c,!1),u();var r=b(),t=v(r),n=s(t,!0);a(t);var e=x(t,2),f=s(e,!0);a(e),d(()=>{o(n,i.status),o(f,i.error?.message)}),l(m,r),_()}export{y as component}; +import"../chunks/DsnmJJEf.js";import{i as u}from"../chunks/B3Pzt0F_.js";import{p as h,f as g,b as v,t as d,c as l,d as _,j as s,r as a,k as x,v as o}from"../chunks/D8EpLgQ1.js";import{s as k,p}from"../chunks/DXCC0cSN.js";const $={get error(){return p.error},get status(){return p.status}};k.updated.check;const i=$;var b=g("

                ",1);function y(m,c){h(c,!1),u();var r=b(),t=v(r),n=s(t,!0);a(t);var e=x(t,2),f=s(e,!0);a(e),d(()=>{o(n,i.status),o(f,i.error?.message)}),l(m,r),_()}export{y as component}; diff --git a/webapp/assets/_app/immutable/nodes/10.Ci2MePhm.js b/webapp/assets/_app/immutable/nodes/10.Ci2MePhm.js new file mode 100644 index 00000000..1e54ae7f --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/10.Ci2MePhm.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as X}from"../chunks/B3Pzt0F_.js";import{p as Y,o as Z,l as ee,a as ae,f as C,h as re,t as _,g as a,e as k,c as w,d as te,$ as se,k as d,D as de,m as f,j as r,s as i,r as t,z as B,v as D}from"../chunks/D8EpLgQ1.js";import{i as oe,s as ie,a as le}from"../chunks/5WA7h8uK.js";import{B as ne,r as q,c as I}from"../chunks/CiE1LlKV.js";import{b as T}from"../chunks/C6k1Q4We.js";import{p as ce}from"../chunks/D4Caz1gY.js";import{g as U}from"../chunks/DXCC0cSN.js";import{b as c}from"../chunks/CRhkqW2i.js";import{a as me,b as ue}from"../chunks/duD3WMbl.js";import{e as pe}from"../chunks/BZiHL9L3.js";var ve=C('

                '),fe=C('
                GARM

                Sign in to GARM

                GitHub Actions Runner Manager

                ');function ze(H,K){Y(K,!1);const[W,F]=ie(),$=()=>le(me,"$authStore",W);let m=f(""),u=f(""),o=f(!1),l=f("");Z(()=>{J()});function J(){const e=localStorage.getItem("theme");let s=!1;e==="dark"?s=!0:e==="light"?s=!1:s=window.matchMedia("(prefers-color-scheme: dark)").matches,s?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}async function M(){if(!a(m)||!a(u)){i(l,"Please enter both username and password");return}i(o,!0),i(l,"");try{await ue.login(a(m),a(u)),U(`${c}/`)}catch(e){i(l,pe(e))}finally{i(o,!1)}}function L(e){e.key==="Enter"&&M()}ee(()=>($(),c),()=>{$().isAuthenticated&&U(`${c}/`)}),ae(),X();var g=fe();re(e=>{se.title="Login - GARM"});var z=r(g),h=r(z),A=r(h),S=r(A),N=d(S,2);t(A),B(4),t(h);var b=d(h,2),x=r(b),y=r(x),p=d(r(y),2);q(p),t(y);var P=d(y,2),v=d(r(P),2);q(v),t(P),t(x);var G=d(x,2);{var O=e=>{var s=ve(),n=r(s),E=d(r(n),2),j=r(E),V=r(j,!0);t(j),t(E),t(n),t(s),_(()=>D(V,a(l))),w(e,s)};oe(G,e=>{a(l)&&e(O)})}var R=d(G,2),Q=r(R);ne(Q,{type:"submit",variant:"primary",size:"md",fullWidth:!0,get disabled(){return a(o)},get loading(){return a(o)},children:(e,s)=>{B();var n=de();_(()=>D(n,a(o)?"Signing in...":"Sign in")),w(e,n)},$$slots:{default:!0}}),t(R),t(b),t(z),t(g),_(()=>{I(S,"src",`${c??""}/assets/garm-light.svg`),I(N,"src",`${c??""}/assets/garm-dark.svg`),p.disabled=a(o),v.disabled=a(o)}),T(p,()=>a(m),e=>i(m,e)),k("keypress",p,L),T(v,()=>a(u),e=>i(u,e)),k("keypress",v,L),k("submit",b,ce(M)),w(H,g),te(),F()}export{ze as component}; diff --git a/webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js b/webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js deleted file mode 100644 index d1f479c8..00000000 --- a/webapp/assets/_app/immutable/nodes/10.LnrIJgIa.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as X}from"../chunks/B3Pzt0F_.js";import{p as Y,o as Z,l as ee,a as ae,f as H,h as re,t as _,g as a,e as k,c as w,d as te,$ as se,k as d,D as de,m as f,j as r,s as i,r as t,z as B,v as D}from"../chunks/D8EpLgQ1.js";import{i as oe,s as ie,a as le}from"../chunks/5WA7h8uK.js";import{B as ne,r as q,c as T}from"../chunks/CiE1LlKV.js";import{b as U}from"../chunks/C6k1Q4We.js";import{p as ce}from"../chunks/D4Caz1gY.js";import{g as C}from"../chunks/CTf6mQoE.js";import{b as c}from"../chunks/CoIRRsD9.js";import{a as me,b as ue}from"../chunks/duD3WMbl.js";var pe=H('

                '),ve=H('
                GARM

                Sign in to GARM

                GitHub Actions Runner Manager

                ');function Le(I,K){Y(K,!1);const[W,F]=ie(),$=()=>le(me,"$authStore",W);let m=f(""),u=f(""),o=f(!1),l=f("");Z(()=>{J()});function J(){const e=localStorage.getItem("theme");let s=!1;e==="dark"?s=!0:e==="light"?s=!1:s=window.matchMedia("(prefers-color-scheme: dark)").matches,s?document.documentElement.classList.add("dark"):document.documentElement.classList.remove("dark")}async function L(){if(!a(m)||!a(u)){i(l,"Please enter both username and password");return}i(o,!0),i(l,"");try{await ue.login(a(m),a(u)),C(`${c}/`)}catch(e){i(l,e instanceof Error?e.message:"Login failed")}finally{i(o,!1)}}function M(e){e.key==="Enter"&&L()}ee(()=>($(),c),()=>{$().isAuthenticated&&C(`${c}/`)}),ae(),X();var g=ve();re(e=>{se.title="Login - GARM"});var z=r(g),h=r(z),S=r(h),A=r(S),N=d(A,2);t(S),B(4),t(h);var b=d(h,2),x=r(b),y=r(x),p=d(r(y),2);q(p),t(y);var G=d(y,2),v=d(r(G),2);q(v),t(G),t(x);var P=d(x,2);{var O=e=>{var s=pe(),n=r(s),E=d(r(n),2),j=r(E),V=r(j,!0);t(j),t(E),t(n),t(s),_(()=>D(V,a(l))),w(e,s)};oe(P,e=>{a(l)&&e(O)})}var R=d(P,2),Q=r(R);ne(Q,{type:"submit",variant:"primary",size:"md",fullWidth:!0,get disabled(){return a(o)},get loading(){return a(o)},children:(e,s)=>{B();var n=de();_(()=>D(n,a(o)?"Signing in...":"Sign in")),w(e,n)},$$slots:{default:!0}}),t(R),t(b),t(z),t(g),_(()=>{T(A,"src",`${c??""}/assets/garm-light.svg`),T(N,"src",`${c??""}/assets/garm-dark.svg`),p.disabled=a(o),v.disabled=a(o)}),U(p,()=>a(m),e=>i(m,e)),k("keypress",p,M),U(v,()=>a(u),e=>i(u,e)),k("keypress",v,M),k("submit",b,ce(L)),w(I,g),te(),F()}export{Le as component}; diff --git a/webapp/assets/_app/immutable/nodes/11.BX_bMXWi.js b/webapp/assets/_app/immutable/nodes/11.BX_bMXWi.js new file mode 100644 index 00000000..90aff430 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/11.BX_bMXWi.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Le}from"../chunks/B3Pzt0F_.js";import{p as Ne,E as Ke,o as je,l as x,s as a,m as s,g as e,y as W,a as qe,f as T,k as i,j as o,r as n,c as E,t as Q,v as ie,x as Ae,u as v,z as Fe,e as $e,d as Ge,h as Qe,b as Xe,$ as Ze,n as P,q as ue}from"../chunks/D8EpLgQ1.js";import{a as He,i as X,s as Je}from"../chunks/5WA7h8uK.js";import{r as ge,b as Ue,h as et,c as tt,g as me}from"../chunks/CiE1LlKV.js";import{b as Ie}from"../chunks/CRhkqW2i.js";import{e as at,i as rt}from"../chunks/u94nIB4-.js";import{b as Re,a as We}from"../chunks/C6k1Q4We.js";import{p as ot}from"../chunks/D4Caz1gY.js";import{M as nt}from"../chunks/qB7B8uiS.js";import{F as it}from"../chunks/CNMHKIIK.js";import{e as Pe}from"../chunks/BZiHL9L3.js";import{e as Ve,a as Me}from"../chunks/wyaP0EDu.js";import{U as st}from"../chunks/CIBm3n2u.js";import{D as lt}from"../chunks/KQ2xQpA3.js";import{P as dt}from"../chunks/CO4LUyTP.js";import{t as K}from"../chunks/BEkVdVE1.js";import{B as ct,k as Ce,g as Oe,l as ut}from"../chunks/BGVHQGl-.js";import{D as gt,A as Be,G as mt,a as pt}from"../chunks/BrNfsPe8.js";import{E as ft}from"../chunks/D4PaGKsV.js";import{E as bt}from"../chunks/CGpPw4EW.js";import{S as vt}from"../chunks/MCv1Wq2q.js";var yt=T('

                '),ht=T('

                Loading...

                '),_t=T(""),xt=T(''),kt=T('

                Webhook secret will be automatically generated

                '),wt=T('
                '),zt=T('

                Create Organization

                ');function $t(pe,fe){Ne(fe,!1);const[be,ve]=Je(),f=()=>He(Ve,"$eagerCache",be),D=s(),w=s(),z=s(),Z=s(),$=Ke();let C=s(!1),b=s(""),y=s("github"),r=s({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),k=s(!0),p=s(!0);async function A(){if(!f().loaded.credentials&&!f().loading.credentials)try{await Me.getCredentials()}catch(d){a(b,Pe(d))}}function B(d){a(y,d.detail),W(r,e(r).credentials_name="")}function c(){if(e(r).credentials_name){const d=e(D).find(L=>L.name===e(r).credentials_name);d&&d.forge_type&&a(y,d.forge_type)}}function ye(){const d=new Uint8Array(32);return crypto.getRandomValues(d),Array.from(d,L=>L.toString(16).padStart(2,"0")).join("")}async function he(){if(!e(r).name?.trim()){a(b,"Organization name is required");return}if(!e(r).credentials_name){a(b,"Please select credentials");return}try{a(C,!0),a(b,"");const d={...e(r),install_webhook:e(k),auto_generate_secret:e(p)};$("submit",d)}catch(d){a(b,d instanceof Error?d.message:"Failed to create organization"),a(C,!1)}}je(()=>{A()}),x(()=>f(),()=>{a(D,f().credentials)}),x(()=>f(),()=>{a(w,f().loading.credentials)}),x(()=>(e(D),e(y)),()=>{a(z,e(D).filter(d=>e(y)?d.forge_type===e(y):!0))}),x(()=>e(p),()=>{e(p)?W(r,e(r).webhook_secret=ye()):e(p)||W(r,e(r).webhook_secret="")}),x(()=>(e(r),e(p)),()=>{a(Z,e(r).name?.trim()!==""&&e(r).credentials_name!==""&&(e(p)||e(r).webhook_secret&&e(r).webhook_secret.trim()!==""))}),qe(),Le(),nt(pe,{$$events:{close:()=>$("close")},children:(d,L)=>{var ee=zt(),N=i(o(ee),2);{var te=h=>{var _=yt(),F=o(_),U=o(F,!0);n(F),n(_),Q(()=>ie(U,e(b))),E(h,_)};X(N,h=>{e(b)&&h(te)})}var _e=i(N,2);{var xe=h=>{var _=ht();E(h,_)},ke=h=>{var _=wt(),F=o(_);it(F,{get selectedForgeType(){return e(y)},set selectedForgeType(l){a(y,l)},$$events:{select:B},$$legacy:!0});var U=i(F,2),se=i(o(U),2);ge(se),n(U);var j=i(U,2),S=i(o(j),2);Q(()=>{e(r),Ae(()=>{e(z)})});var q=o(S);q.value=q.__value="";var we=i(q);at(we,1,()=>e(z),rt,(l,m)=>{var O=_t(),de=o(O);n(O);var ce={};Q(()=>{ie(de,`${e(m),v(()=>e(m).name)??""} (${e(m),v(()=>e(m).endpoint?.name||"Unknown endpoint")??""})`),ce!==(ce=(e(m),v(()=>e(m).name)))&&(O.value=(O.__value=(e(m),v(()=>e(m).name)))??"")}),E(l,O)}),n(S),n(j);var G=i(j,2),ae=i(o(G),2);Q(()=>{e(r),Ae(()=>{})});var H=o(ae);H.value=H.__value="roundrobin";var le=i(H);le.value=le.__value="pack",n(ae),n(G);var re=i(G,2),oe=o(re),t=o(oe);ge(t),Fe(2),n(oe);var u=i(oe,2),I=o(u),M=o(I);ge(M),Fe(2),n(I);var g=i(I,2);{var J=l=>{var m=xt();ge(m),Re(m,()=>e(r).webhook_secret,O=>W(r,e(r).webhook_secret=O)),E(l,m)},ne=l=>{var m=kt();E(l,m)};X(g,l=>{e(p)?l(ne,!1):l(J)})}n(u),n(re);var V=i(re,2),Y=o(V),R=i(Y,2),ze=o(R,!0);n(R),n(V),n(_),Q(()=>{R.disabled=e(C)||e(w)||!e(Z),ie(ze,e(C)?"Creating...":"Create Organization")}),Re(se,()=>e(r).name,l=>W(r,e(r).name=l)),Ue(S,()=>e(r).credentials_name,l=>W(r,e(r).credentials_name=l)),$e("change",S,c),Ue(ae,()=>e(r).pool_balancer_type,l=>W(r,e(r).pool_balancer_type=l)),We(t,()=>e(k),l=>a(k,l)),We(M,()=>e(p),l=>a(p,l)),$e("click",Y,()=>$("close")),$e("submit",_,ot(he)),E(h,_)};X(_e,h=>{e(C)?h(xe):h(ke,!1)})}n(ee),E(d,ee)},$$slots:{default:!0}}),Ge(),ve()}var Ct=T(''),Ot=T('
                ',1);function Kt(pe,fe){Ne(fe,!1);const[be,ve]=Je(),f=()=>He(Ve,"$eagerCache",be),D=s(),w=s(),z=s(),Z=s();let $=s([]),C=s(!0),b=s(""),y=s(""),r=s(1),k=s(25),p=s(!1),A=s(!1),B=s(!1),c=s(null);function ye(){a(p,!1),a(B,!1),a(A,!1)}async function he(t){try{a(b,"");const u=t.detail,I={name:u.name,credentials_name:u.credentials_name,webhook_secret:u.webhook_secret,pool_balancer_type:u.pool_balancer_type},M=await me.createOrganization(I);if(u.install_webhook&&M.id)try{await me.installOrganizationWebhook(M.id),K.success("Webhook Installed",`Webhook for organization ${M.name} has been installed successfully.`)}catch(g){console.warn("Organization created but webhook installation failed:",g),K.error("Webhook Installation Failed",g instanceof Error?g.message:"Failed to install webhook. You can try installing it manually from the organization details page.")}K.success("Organization Created",`Organization ${M.name} has been created successfully.`),a(p,!1)}catch(u){throw a(b,Pe(u)),u}}async function d(t){if(e(c))try{await me.updateOrganization(e(c).id,t),K.success("Organization Updated",`Organization ${e(c).name} has been updated successfully.`),a(A,!1),a(c,null)}catch(u){throw u}}async function L(){if(e(c))try{a(b,""),await me.deleteOrganization(e(c).id),K.success("Organization Deleted",`Organization ${e(c).name} has been deleted successfully.`),a(c,null)}catch(t){const u=Pe(t);K.error("Delete Failed",u)}finally{ye()}}function ee(){a(p,!0)}function N(t){a(c,t),a(A,!0)}function te(t){a(c,t),a(B,!0)}je(async()=>{try{a(C,!0);const t=await Me.getOrganizations();t&&Array.isArray(t)&&a($,t)}catch(t){console.error("Failed to load organizations:",t),a(b,t instanceof Error?t.message:"Failed to load organizations")}finally{a(C,!1)}});async function _e(){try{await Me.retryResource("organizations")}catch(t){console.error("Retry failed:",t)}}const xe=[{key:"name",title:"Name",cellComponent:ft,cellProps:{entityType:"organization"}},{key:"endpoint",title:"Endpoint",cellComponent:bt},{key:"credentials",title:"Credentials",cellComponent:mt,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:vt,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:pt}],ke={entityType:"organization",primaryText:{field:"name",isClickable:!0,href:"/organizations/{id}"},customInfo:[{icon:t=>Oe(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>Ce(t)}],actions:[{type:"edit",handler:t=>N(t)},{type:"delete",handler:t=>te(t)}]};function h(t){a(y,t.detail.term),a(r,1)}function _(t){a(r,t.detail.page)}function F(t){a(k,t.detail.perPage),a(r,1)}function U(t){N(t.detail.item)}function se(t){te(t.detail.item)}x(()=>(e($),f()),()=>{(!e($).length||f().loaded.organizations)&&a($,f().organizations)}),x(()=>f(),()=>{a(C,f().loading.organizations)}),x(()=>f(),()=>{a(D,f().errorMessages.organizations)}),x(()=>(e($),e(y)),()=>{a(w,ut(e($),e(y)))}),x(()=>(e(w),e(k)),()=>{a(z,Math.ceil(e(w).length/e(k)))}),x(()=>(e(r),e(z)),()=>{e(r)>e(z)&&e(z)>0&&a(r,e(z))}),x(()=>(e(w),e(r),e(k)),()=>{a(Z,e(w).slice((e(r)-1)*e(k),e(r)*e(k)))}),qe(),Le();var j=Ot();Qe(t=>{Ze.title="Organizations - GARM"});var S=Xe(j),q=o(S);dt(q,{title:"Organizations",description:"Manage GitHub and Gitea organizations",actionLabel:"Add Organization",$$events:{action:ee}});var we=i(q,2);{let t=ue(()=>e(D)||e(b)),u=ue(()=>!!e(D));gt(we,{get columns(){return xe},get data(){return e(Z)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(y)},searchPlaceholder:"Search organizations...",get currentPage(){return e(r)},get perPage(){return e(k)},get totalPages(){return e(z)},get totalItems(){return e(w),v(()=>e(w).length)},itemName:"organizations",emptyIconType:"building",get showRetry(){return e(u)},get mobileCardConfig(){return ke},$$events:{search:h,pageChange:_,perPageChange:F,retry:_e,edit:U,delete:se},$$slots:{"mobile-card":(I,M)=>{const g=ue(()=>M.item),J=ue(()=>(P(Ce),P(e(g)),v(()=>Ce(e(g)))));var ne=Ct(),V=o(ne),Y=o(V),R=o(Y),ze=o(R,!0);n(R);var l=i(R,2),m=o(l),O=o(m);et(O,()=>(P(Oe),P(e(g)),v(()=>Oe(e(g).endpoint?.endpoint_type||"unknown"))));var de=i(O,2),ce=o(de,!0);n(de),n(m),n(l),n(Y),n(V);var Ee=i(V,2),Te=o(Ee);ct(Te,{get variant(){return P(e(J)),v(()=>e(J).variant)},get text(){return P(e(J)),v(()=>e(J).text)}});var De=i(Te,2),Se=o(De);Be(Se,{action:"edit",size:"sm",title:"Edit organization",ariaLabel:"Edit organization",$$events:{click:()=>N(e(g))}});var Ye=i(Se,2);Be(Ye,{action:"delete",size:"sm",title:"Delete organization",ariaLabel:"Delete organization",$$events:{click:()=>te(e(g))}}),n(De),n(Ee),n(ne),Q(()=>{tt(Y,"href",(P(Ie),P(e(g)),v(()=>`${Ie}/organizations/${e(g).id}`))),ie(ze,(P(e(g)),v(()=>e(g).name))),ie(ce,(P(e(g)),v(()=>e(g).endpoint?.name||"Unknown")))}),E(I,ne)}}})}n(S);var G=i(S,2);{var ae=t=>{$t(t,{$$events:{close:()=>a(p,!1),submit:he}})};X(G,t=>{e(p)&&t(ae)})}var H=i(G,2);{var le=t=>{st(t,{get entity(){return e(c)},entityType:"organization",$$events:{close:()=>{a(A,!1),a(c,null)},submit:u=>d(u.detail)}})};X(H,t=>{e(A)&&e(c)&&t(le)})}var re=i(H,2);{var oe=t=>{lt(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone.",get itemName(){return e(c),v(()=>e(c).name)},$$events:{close:()=>{a(B,!1),a(c,null)},confirm:L}})};X(re,t=>{e(B)&&e(c)&&t(oe)})}E(pe,j),Ge(),ve()}export{Kt as component}; diff --git a/webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js b/webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js deleted file mode 100644 index 2a8fb7cb..00000000 --- a/webapp/assets/_app/immutable/nodes/11.Bsn67lBa.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Ie}from"../chunks/B3Pzt0F_.js";import{p as Be,E as Ve,o as Le,l as x,s as a,m as s,g as e,y as W,a as Ne,f as T,k as i,j as o,r as n,c as M,t as V,v as se,x as De,u as v,z as Fe,e as $e,d as je,h as Ye,b as Ke,$ as Qe,n as E,q as ue}from"../chunks/D8EpLgQ1.js";import{a as qe,i as Y,s as Ge}from"../chunks/5WA7h8uK.js";import{r as ge,b as Se,h as Xe,c as Ze,g as me}from"../chunks/CiE1LlKV.js";import{b as Ae}from"../chunks/CoIRRsD9.js";import{e as et,i as tt}from"../chunks/u94nIB4-.js";import{b as Ue,a as Re}from"../chunks/C6k1Q4We.js";import{p as at}from"../chunks/D4Caz1gY.js";import{M as rt}from"../chunks/qB7B8uiS.js";import{F as ot}from"../chunks/CNMHKIIK.js";import{e as He,a as Pe}from"../chunks/wyaP0EDu.js";import{U as nt}from"../chunks/CclkODgu.js";import{D as it}from"../chunks/KQ2xQpA3.js";import{P as st}from"../chunks/CO4LUyTP.js";import{t as ie}from"../chunks/BEkVdVE1.js";import{B as lt,k as Ce,g as Oe,l as dt}from"../chunks/BGVHQGl-.js";import{D as ct,A as We,G as ut,a as gt}from"../chunks/C9DJVOi1.js";import{E as mt}from"../chunks/B7ITzBt8.js";import{E as pt}from"../chunks/CGpPw4EW.js";import{S as ft}from"../chunks/BE4wujub.js";var bt=T('

                '),vt=T('

                Loading...

                '),yt=T(""),ht=T(''),_t=T('

                Webhook secret will be automatically generated

                '),xt=T('
                '),kt=T('

                Create Organization

                ');function wt(pe,fe){Be(fe,!1);const[be,ve]=Ge(),p=()=>qe(He,"$eagerCache",be),D=s(),w=s(),z=s(),K=s(),$=Ve();let C=s(!1),f=s(""),y=s("github"),r=s({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),k=s(!0),b=s(!0);async function I(){if(!p().loaded.credentials&&!p().loading.credentials)try{await Pe.getCredentials()}catch(d){a(f,d instanceof Error?d.message:"Failed to load credentials")}}function B(d){a(y,d.detail),W(r,e(r).credentials_name="")}function c(){if(e(r).credentials_name){const d=e(D).find(L=>L.name===e(r).credentials_name);d&&d.forge_type&&a(y,d.forge_type)}}function ye(){const d=new Uint8Array(32);return crypto.getRandomValues(d),Array.from(d,L=>L.toString(16).padStart(2,"0")).join("")}async function he(){if(!e(r).name?.trim()){a(f,"Organization name is required");return}if(!e(r).credentials_name){a(f,"Please select credentials");return}try{a(C,!0),a(f,"");const d={...e(r),install_webhook:e(k),auto_generate_secret:e(b)};$("submit",d)}catch(d){a(f,d instanceof Error?d.message:"Failed to create organization"),a(C,!1)}}Le(()=>{I()}),x(()=>p(),()=>{a(D,p().credentials)}),x(()=>p(),()=>{a(w,p().loading.credentials)}),x(()=>(e(D),e(y)),()=>{a(z,e(D).filter(d=>e(y)?d.forge_type===e(y):!0))}),x(()=>e(b),()=>{e(b)?W(r,e(r).webhook_secret=ye()):e(b)||W(r,e(r).webhook_secret="")}),x(()=>(e(r),e(b)),()=>{a(K,e(r).name?.trim()!==""&&e(r).credentials_name!==""&&(e(b)||e(r).webhook_secret&&e(r).webhook_secret.trim()!==""))}),Ne(),Ie(),rt(pe,{$$events:{close:()=>$("close")},children:(d,L)=>{var F=kt(),N=i(o(F),2);{var _e=h=>{var _=bt(),S=o(_),A=o(S,!0);n(S),n(_),V(()=>se(A,e(f))),M(h,_)};Y(N,h=>{e(f)&&h(_e)})}var xe=i(N,2);{var ke=h=>{var _=vt();M(h,_)},we=h=>{var _=xt(),S=o(_);ot(S,{get selectedForgeType(){return e(y)},set selectedForgeType(l){a(y,l)},$$events:{select:B},$$legacy:!0});var A=i(S,2),Q=i(o(A),2);ge(Q),n(A);var U=i(A,2),R=i(o(U),2);V(()=>{e(r),De(()=>{e(z)})});var X=o(R);X.value=X.__value="";var le=i(X);et(le,1,()=>e(z),tt,(l,m)=>{var P=yt(),ze=o(P);n(P);var ne={};V(()=>{se(ze,`${e(m),v(()=>e(m).name)??""} (${e(m),v(()=>e(m).endpoint?.name||"Unknown endpoint")??""})`),ne!==(ne=(e(m),v(()=>e(m).name)))&&(P.value=(P.__value=(e(m),v(()=>e(m).name)))??"")}),M(l,P)}),n(R),n(U);var Z=i(U,2),j=i(o(Z),2);V(()=>{e(r),De(()=>{})});var ee=o(j);ee.value=ee.__value="roundrobin";var de=i(ee);de.value=de.__value="pack",n(j),n(Z);var te=i(Z,2),t=o(te),g=o(t);ge(g),Fe(2),n(t);var q=i(t,2),O=o(q),u=o(O);ge(u),Fe(2),n(O);var G=i(O,2);{var ae=l=>{var m=ht();ge(m),Ue(m,()=>e(r).webhook_secret,P=>W(r,e(r).webhook_secret=P)),M(l,m)},re=l=>{var m=_t();M(l,m)};Y(G,l=>{e(b)?l(re,!1):l(ae)})}n(q),n(te);var H=i(te,2),J=o(H),oe=i(J,2),ce=o(oe,!0);n(oe),n(H),n(_),V(()=>{oe.disabled=e(C)||e(w)||!e(K),se(ce,e(C)?"Creating...":"Create Organization")}),Ue(Q,()=>e(r).name,l=>W(r,e(r).name=l)),Se(R,()=>e(r).credentials_name,l=>W(r,e(r).credentials_name=l)),$e("change",R,c),Se(j,()=>e(r).pool_balancer_type,l=>W(r,e(r).pool_balancer_type=l)),Re(g,()=>e(k),l=>a(k,l)),Re(u,()=>e(b),l=>a(b,l)),$e("click",J,()=>$("close")),$e("submit",_,at(he)),M(h,_)};Y(xe,h=>{e(C)?h(ke):h(we,!1)})}n(F),M(d,F)},$$slots:{default:!0}}),je(),ve()}var zt=T(''),$t=T('
                ',1);function Jt(pe,fe){Be(fe,!1);const[be,ve]=Ge(),p=()=>qe(He,"$eagerCache",be),D=s(),w=s(),z=s(),K=s();let $=s([]),C=s(!0),f=s(""),y=s(""),r=s(1),k=s(25),b=s(!1),I=s(!1),B=s(!1),c=s(null);async function ye(t){try{a(f,"");const g=t.detail,q={name:g.name,credentials_name:g.credentials_name,webhook_secret:g.webhook_secret,pool_balancer_type:g.pool_balancer_type},O=await me.createOrganization(q);if(g.install_webhook&&O.id)try{await me.installOrganizationWebhook(O.id),ie.success("Webhook Installed",`Webhook for organization ${O.name} has been installed successfully.`)}catch(u){console.warn("Organization created but webhook installation failed:",u),ie.error("Webhook Installation Failed",u instanceof Error?u.message:"Failed to install webhook. You can try installing it manually from the organization details page.")}ie.success("Organization Created",`Organization ${O.name} has been created successfully.`),a(b,!1)}catch(g){throw a(f,g instanceof Error?g.message:"Failed to create organization"),g}}async function he(t){if(e(c))try{await me.updateOrganization(e(c).id,t),ie.success("Organization Updated",`Organization ${e(c).name} has been updated successfully.`),a(I,!1),a(c,null)}catch(g){throw g}}async function d(){if(e(c))try{a(f,""),await me.deleteOrganization(e(c).id),ie.success("Organization Deleted",`Organization ${e(c).name} has been deleted successfully.`),a(B,!1),a(c,null)}catch(t){a(f,t instanceof Error?t.message:"Failed to delete organization")}}function L(){a(b,!0)}function F(t){a(c,t),a(I,!0)}function N(t){a(c,t),a(B,!0)}Le(async()=>{try{a(C,!0);const t=await Pe.getOrganizations();t&&Array.isArray(t)&&a($,t)}catch(t){console.error("Failed to load organizations:",t),a(f,t instanceof Error?t.message:"Failed to load organizations")}finally{a(C,!1)}});async function _e(){try{await Pe.retryResource("organizations")}catch(t){console.error("Retry failed:",t)}}const xe=[{key:"name",title:"Name",cellComponent:mt,cellProps:{entityType:"organization"}},{key:"endpoint",title:"Endpoint",cellComponent:pt},{key:"credentials",title:"Credentials",cellComponent:ut,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:ft,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:gt}],ke={entityType:"organization",primaryText:{field:"name",isClickable:!0,href:"/organizations/{id}"},customInfo:[{icon:t=>Oe(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>Ce(t)}],actions:[{type:"edit",handler:t=>F(t)},{type:"delete",handler:t=>N(t)}]};function we(t){a(y,t.detail.term),a(r,1)}function h(t){a(r,t.detail.page)}function _(t){a(k,t.detail.perPage),a(r,1)}function S(t){F(t.detail.item)}function A(t){N(t.detail.item)}x(()=>(e($),p()),()=>{(!e($).length||p().loaded.organizations)&&a($,p().organizations)}),x(()=>p(),()=>{a(C,p().loading.organizations)}),x(()=>p(),()=>{a(D,p().errorMessages.organizations)}),x(()=>(e($),e(y)),()=>{a(w,dt(e($),e(y)))}),x(()=>(e(w),e(k)),()=>{a(z,Math.ceil(e(w).length/e(k)))}),x(()=>(e(r),e(z)),()=>{e(r)>e(z)&&e(z)>0&&a(r,e(z))}),x(()=>(e(w),e(r),e(k)),()=>{a(K,e(w).slice((e(r)-1)*e(k),e(r)*e(k)))}),Ne(),Ie();var Q=$t();Ye(t=>{Qe.title="Organizations - GARM"});var U=Ke(Q),R=o(U);st(R,{title:"Organizations",description:"Manage GitHub and Gitea organizations",actionLabel:"Add Organization",$$events:{action:L}});var X=i(R,2);{let t=ue(()=>e(D)||e(f)),g=ue(()=>!!e(D));ct(X,{get columns(){return xe},get data(){return e(K)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(y)},searchPlaceholder:"Search organizations...",get currentPage(){return e(r)},get perPage(){return e(k)},get totalPages(){return e(z)},get totalItems(){return e(w),v(()=>e(w).length)},itemName:"organizations",emptyIconType:"building",get showRetry(){return e(g)},get mobileCardConfig(){return ke},$$events:{search:we,pageChange:h,perPageChange:_,retry:_e,edit:S,delete:A},$$slots:{"mobile-card":(q,O)=>{const u=ue(()=>O.item),G=ue(()=>(E(Ce),E(e(u)),v(()=>Ce(e(u)))));var ae=zt(),re=o(ae),H=o(re),J=o(H),oe=o(J,!0);n(J);var ce=i(J,2),l=o(ce),m=o(l);Xe(m,()=>(E(Oe),E(e(u)),v(()=>Oe(e(u).endpoint?.endpoint_type||"unknown"))));var P=i(m,2),ze=o(P,!0);n(P),n(l),n(ce),n(H),n(re);var ne=i(re,2),Ee=o(ne);lt(Ee,{get variant(){return E(e(G)),v(()=>e(G).variant)},get text(){return E(e(G)),v(()=>e(G).text)}});var Me=i(Ee,2),Te=o(Me);We(Te,{action:"edit",size:"sm",title:"Edit organization",ariaLabel:"Edit organization",$$events:{click:()=>F(e(u))}});var Je=i(Te,2);We(Je,{action:"delete",size:"sm",title:"Delete organization",ariaLabel:"Delete organization",$$events:{click:()=>N(e(u))}}),n(Me),n(ne),n(ae),V(()=>{Ze(H,"href",(E(Ae),E(e(u)),v(()=>`${Ae}/organizations/${e(u).id}`))),se(oe,(E(e(u)),v(()=>e(u).name))),se(ze,(E(e(u)),v(()=>e(u).endpoint?.name||"Unknown")))}),M(q,ae)}}})}n(U);var le=i(U,2);{var Z=t=>{wt(t,{$$events:{close:()=>a(b,!1),submit:ye}})};Y(le,t=>{e(b)&&t(Z)})}var j=i(le,2);{var ee=t=>{nt(t,{get entity(){return e(c)},entityType:"organization",$$events:{close:()=>{a(I,!1),a(c,null)},submit:g=>he(g.detail)}})};Y(j,t=>{e(I)&&e(c)&&t(ee)})}var de=i(j,2);{var te=t=>{it(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone.",get itemName(){return e(c),v(()=>e(c).name)},$$events:{close:()=>{a(B,!1),a(c,null)},confirm:d}})};Y(de,t=>{e(B)&&e(c)&&t(te)})}M(pe,Q),je(),ve()}export{Jt as component}; diff --git a/webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js b/webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js deleted file mode 100644 index 2fef2926..00000000 --- a/webapp/assets/_app/immutable/nodes/12.B-vC_cmu.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Le}from"../chunks/B3Pzt0F_.js";import{p as Ne,o as We,A as qe,l as He,a as je,f as A,h as Ge,b as M,t as q,c as x,d as Re,g as e,m as l,s as o,u as s,$ as Ve,j as f,r as g,k as d,v as le,y as Je,B as de,q as m,n as Ke}from"../chunks/D8EpLgQ1.js";import{i as h,s as Qe,a as Xe}from"../chunks/5WA7h8uK.js";import{c as Ye,g as _}from"../chunks/CiE1LlKV.js";import{p as Ze}from"../chunks/C41YH50Q.js";import{g as ce}from"../chunks/CTf6mQoE.js";import{b as H}from"../chunks/CoIRRsD9.js";import{U as et}from"../chunks/CclkODgu.js";import{D as ue}from"../chunks/KQ2xQpA3.js";import{E as tt,P as at,a as nt}from"../chunks/BmGWMSQm.js";import{D as ot,I as rt}from"../chunks/DDhBTdDt.js";import{g as fe}from"../chunks/BGVHQGl-.js";import{W as it}from"../chunks/Dbd6PPbz.js";import{C as st}from"../chunks/CwqI2jFH.js";import{w as j}from"../chunks/u94nIB4-.js";import{t as C}from"../chunks/BEkVdVE1.js";var lt=A('

                Loading organization...

                '),dt=A('

                '),ct=A(" ",1),ut=A(' ',1);function Tt(ge,me){Ne(me,!1);const[pe,ve]=Qe(),G=()=>Xe(Ze,"$page",pe),w=l();let a=l(null),c=l([]),p=l([]),F=l(!0),I=l(""),O=l(!1),D=l(!1),E=l(!1),T=l(!1),u=l(null),P=null,b=l();async function R(){if(e(w))try{o(F,!0),o(I,"");const[t,n,r]=await Promise.all([_.getOrganization(e(w)),_.listOrganizationPools(e(w)).catch(()=>[]),_.listOrganizationInstances(e(w)).catch(()=>[])]);o(a,t),o(c,n),o(p,r)}catch(t){o(I,t instanceof Error?t.message:"Failed to load organization")}finally{o(F,!1)}}function ye(t,n){const{events:r}=t;return{...n,events:r}}async function he(t){if(e(a))try{await _.updateOrganization(e(a).id,t),await R(),C.success("Organization Updated",`Organization ${e(a).name} has been updated successfully.`),o(O,!1)}catch(n){throw n}}async function _e(){if(e(a)){try{await _.deleteOrganization(e(a).id),ce(`${H}/organizations`)}catch(t){o(I,t instanceof Error?t.message:"Failed to delete organization")}o(D,!1)}}async function be(){if(e(u))try{await _.deleteInstance(e(u).name),C.success("Instance Deleted",`Instance ${e(u).name} has been deleted successfully.`),o(E,!1),o(u,null)}catch(t){const n=t instanceof Error?t.message:"Failed to delete instance";C.error("Delete Failed",n),o(E,!1),o(u,null)}}function ze(t){o(u,t),o(E,!0)}function $e(){o(T,!0)}async function xe(t){try{if(!e(a))return;await _.createOrganizationPool(e(a).id,t.detail),C.success("Pool Created",`Pool has been created successfully for organization ${e(a).name}.`),o(T,!1)}catch(n){throw n}}function V(){e(b)&&Je(b,e(b).scrollTop=e(b).scrollHeight)}function we(t){if(t.operation==="update"){const n=t.payload;if(e(a)&&n.id===e(a).id){const r=e(a).events?.length||0,i=n.events?.length||0;o(a,ye(e(a),n)),i>r&&setTimeout(()=>{V()},100)}}else if(t.operation==="delete"){const n=t.payload.id||t.payload;e(a)&&e(a).id===n&&ce(`${H}/organizations`)}}function Ie(t){if(!e(a))return;const n=t.payload;if(n.org_id===e(a).id){if(t.operation==="create")o(c,[...e(c),n]);else if(t.operation==="update")o(c,e(c).map(r=>r.id===n.id?n:r));else if(t.operation==="delete"){const r=n.id||n;o(c,e(c).filter(i=>i.id!==r))}}}function Ee(t){if(!e(a)||!e(c))return;const n=t.payload;if(e(c).some(i=>i.id===n.pool_id)){if(t.operation==="create")o(p,[...e(p),n]);else if(t.operation==="update")o(p,e(p).map(i=>i.id===n.id?n:i));else if(t.operation==="delete"){const i=n.id||n;o(p,e(p).filter(L=>L.id!==i))}}}We(()=>{R().then(()=>{e(a)?.events?.length&&setTimeout(()=>{V()},100)});const t=j.subscribeToEntity("organization",["update","delete"],we),n=j.subscribeToEntity("pool",["create","update","delete"],Ie),r=j.subscribeToEntity("instance",["create","update","delete"],Ee);P=()=>{t(),n(),r()}}),qe(()=>{P&&(P(),P=null)}),He(()=>G(),()=>{o(w,G().params.id)}),je(),Le();var J=ut();Ge(t=>{q(()=>Ve.title=`${e(a),s(()=>e(a)?`${e(a).name} - Organization Details`:"Organization Details")??""} - GARM`)});var S=M(J),B=f(S),K=f(B),U=f(K),Oe=f(U);g(U);var Q=d(U,2),X=f(Q),Y=d(f(X),2),De=f(Y,!0);g(Y),g(X),g(Q),g(K),g(B);var Te=d(B,2);{var Pe=t=>{var n=lt();x(t,n)},ke=t=>{var n=de(),r=M(n);{var i=z=>{var $=dt(),k=f($),N=f(k,!0);g(k),g($),q(()=>le(N,e(I))),x(z,$)},L=z=>{var $=de(),k=M($);{var N=W=>{var ae=ct(),ne=M(ae);{let v=m(()=>(e(a),s(()=>e(a).name||"Organization"))),y=m(()=>(e(a),s(()=>e(a).endpoint?.name))),Ue=m(()=>(Ke(fe),e(a),s(()=>fe(e(a).endpoint?.endpoint_type||"unknown"))));ot(ne,{get title(){return e(v)},get subtitle(){return`Endpoint: ${e(y)??""}`},get forgeIcon(){return e(Ue)},onEdit:()=>o(O,!0),onDelete:()=>o(D,!0)})}var oe=d(ne,2);tt(oe,{get entity(){return e(a)},entityType:"organization"});var re=d(oe,2);{let v=m(()=>(e(a),s(()=>e(a).id||""))),y=m(()=>(e(a),s(()=>e(a).name||"")));it(re,{entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)}})}var ie=d(re,2);{let v=m(()=>(e(a),s(()=>e(a).id||""))),y=m(()=>(e(a),s(()=>e(a).name||"")));at(ie,{get pools(){return e(c)},entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)},$$events:{addPool:$e}})}var se=d(ie,2);rt(se,{get instances(){return e(p)},entityType:"organization",onDeleteInstance:ze});var Be=d(se,2);{let v=m(()=>(e(a),s(()=>e(a)?.events)));nt(Be,{get events(){return e(v)},get eventsContainer(){return e(b)},set eventsContainer(y){o(b,y)},$$legacy:!0})}x(W,ae)};h(k,W=>{e(a)&&W(N)},!0)}x(z,$)};h(r,z=>{e(I)?z(i):z(L,!1)},!0)}x(t,n)};h(Te,t=>{e(F)?t(Pe):t(ke,!1)})}g(S);var Z=d(S,2);{var Me=t=>{et(t,{get entity(){return e(a)},entityType:"organization",$$events:{close:()=>o(O,!1),submit:n=>he(n.detail)}})};h(Z,t=>{e(O)&&e(a)&&t(Me)})}var ee=d(Z,2);{var Ce=t=>{ue(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a),s(()=>e(a).name)},$$events:{close:()=>o(D,!1),confirm:_e}})};h(ee,t=>{e(D)&&e(a)&&t(Ce)})}var te=d(ee,2);{var Ae=t=>{ue(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(u),s(()=>e(u).name)},$$events:{close:()=>{o(E,!1),o(u,null)},confirm:be}})};h(te,t=>{e(E)&&e(u)&&t(Ae)})}var Fe=d(te,2);{var Se=t=>{{let n=m(()=>(e(a),s(()=>e(a).id||"")));st(t,{initialEntityType:"organization",get initialEntityId(){return e(n)},$$events:{close:()=>o(T,!1),submit:xe}})}};h(Fe,t=>{e(T)&&e(a)&&t(Se)})}q(()=>{Ye(Oe,"href",`${H}/organizations`),le(De,(e(a),s(()=>e(a)?e(a).name:"Loading...")))}),x(ge,J),Re(),ve()}export{Tt as component}; diff --git a/webapp/assets/_app/immutable/nodes/12.C0lS_ubI.js b/webapp/assets/_app/immutable/nodes/12.C0lS_ubI.js new file mode 100644 index 00000000..64243495 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/12.C0lS_ubI.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Ne}from"../chunks/B3Pzt0F_.js";import{p as We,o as qe,A as He,l as je,a as Ge,f as A,h as Re,b as C,t as q,c as x,d as Ve,g as e,m as l,s as o,u as s,$ as Je,j as f,r as m,k as d,v as de,y as Ke,B as ce,q as g,n as Qe}from"../chunks/D8EpLgQ1.js";import{i as h,s as Xe,a as Ye}from"../chunks/5WA7h8uK.js";import{c as Ze,g as _}from"../chunks/CiE1LlKV.js";import{p as et}from"../chunks/BE8f1Riw.js";import{g as ue}from"../chunks/DXCC0cSN.js";import{b as H}from"../chunks/CRhkqW2i.js";import{U as tt}from"../chunks/CIBm3n2u.js";import{D as fe}from"../chunks/KQ2xQpA3.js";import{E as at,P as nt,a as ot}from"../chunks/OpktHEmj.js";import{D as rt,I as it}from"../chunks/BEoJgOul.js";import{g as me}from"../chunks/BGVHQGl-.js";import{e as j}from"../chunks/BZiHL9L3.js";import{W as st}from"../chunks/BsqC4UA1.js";import{C as lt}from"../chunks/CRD55Dyg.js";import{w as G}from"../chunks/u94nIB4-.js";import{t as E}from"../chunks/BEkVdVE1.js";var dt=A('

                Loading organization...

                '),ct=A('

                '),ut=A(" ",1),ft=A(' ',1);function Mt(ge,pe){We(pe,!1);const[ve,ye]=Xe(),R=()=>Ye(et,"$page",ve),I=l();let a=l(null),c=l([]),p=l([]),S=l(!0),O=l(""),D=l(!1),T=l(!1),w=l(!1),P=l(!1),u=l(null),M=null,b=l();async function V(){if(e(I))try{o(S,!0),o(O,"");const[t,n,r]=await Promise.all([_.getOrganization(e(I)),_.listOrganizationPools(e(I)).catch(()=>[]),_.listOrganizationInstances(e(I)).catch(()=>[])]);o(a,t),o(c,n),o(p,r)}catch(t){o(O,j(t))}finally{o(S,!1)}}function he(t,n){const{events:r}=t;return{...n,events:r}}async function _e(t){if(e(a))try{await _.updateOrganization(e(a).id,t),await V(),E.success("Organization Updated",`Organization ${e(a).name} has been updated successfully.`),o(D,!1)}catch(n){throw n}}async function be(){if(e(a)){try{await _.deleteOrganization(e(a).id),ue(`${H}/organizations`)}catch(t){const n=j(t);E.error("Delete Failed",n)}o(T,!1)}}async function ze(){if(e(u))try{await _.deleteInstance(e(u).name),E.success("Instance Deleted",`Instance ${e(u).name} has been deleted successfully.`),o(w,!1),o(u,null)}catch(t){const n=j(t);E.error("Delete Failed",n),o(w,!1),o(u,null)}}function $e(t){o(u,t),o(w,!0)}function xe(){o(P,!0)}async function Ie(t){try{if(!e(a))return;await _.createOrganizationPool(e(a).id,t.detail),E.success("Pool Created",`Pool has been created successfully for organization ${e(a).name}.`),o(P,!1)}catch(n){throw n}}function J(){e(b)&&Ke(b,e(b).scrollTop=e(b).scrollHeight)}function we(t){if(t.operation==="update"){const n=t.payload;if(e(a)&&n.id===e(a).id){const r=e(a).events?.length||0,i=n.events?.length||0;o(a,he(e(a),n)),i>r&&setTimeout(()=>{J()},100)}}else if(t.operation==="delete"){const n=t.payload.id||t.payload;e(a)&&e(a).id===n&&ue(`${H}/organizations`)}}function Ee(t){if(!e(a))return;const n=t.payload;if(n.org_id===e(a).id){if(t.operation==="create")o(c,[...e(c),n]);else if(t.operation==="update")o(c,e(c).map(r=>r.id===n.id?n:r));else if(t.operation==="delete"){const r=n.id||n;o(c,e(c).filter(i=>i.id!==r))}}}function Oe(t){if(!e(a)||!e(c))return;const n=t.payload;if(e(c).some(i=>i.id===n.pool_id)){if(t.operation==="create")o(p,[...e(p),n]);else if(t.operation==="update")o(p,e(p).map(i=>i.id===n.id?n:i));else if(t.operation==="delete"){const i=n.id||n;o(p,e(p).filter(L=>L.id!==i))}}}qe(()=>{V().then(()=>{e(a)?.events?.length&&setTimeout(()=>{J()},100)});const t=G.subscribeToEntity("organization",["update","delete"],we),n=G.subscribeToEntity("pool",["create","update","delete"],Ee),r=G.subscribeToEntity("instance",["create","update","delete"],Oe);M=()=>{t(),n(),r()}}),He(()=>{M&&(M(),M=null)}),je(()=>R(),()=>{o(I,R().params.id)}),Ge(),Ne();var K=ft();Re(t=>{q(()=>Je.title=`${e(a),s(()=>e(a)?`${e(a).name} - Organization Details`:"Organization Details")??""} - GARM`)});var B=C(K),U=f(B),Q=f(U),F=f(Q),De=f(F);m(F);var X=d(F,2),Y=f(X),Z=d(f(Y),2),Te=f(Z,!0);m(Z),m(Y),m(X),m(Q),m(U);var Pe=d(U,2);{var Me=t=>{var n=dt();x(t,n)},ke=t=>{var n=ce(),r=C(n);{var i=z=>{var $=ct(),k=f($),N=f(k,!0);m(k),m($),q(()=>de(N,e(O))),x(z,$)},L=z=>{var $=ce(),k=C($);{var N=W=>{var ne=ut(),oe=C(ne);{let v=g(()=>(e(a),s(()=>e(a).name||"Organization"))),y=g(()=>(e(a),s(()=>e(a).endpoint?.name))),Le=g(()=>(Qe(me),e(a),s(()=>me(e(a).endpoint?.endpoint_type||"unknown"))));rt(oe,{get title(){return e(v)},get subtitle(){return`Endpoint: ${e(y)??""}`},get forgeIcon(){return e(Le)},onEdit:()=>o(D,!0),onDelete:()=>o(T,!0)})}var re=d(oe,2);at(re,{get entity(){return e(a)},entityType:"organization"});var ie=d(re,2);{let v=g(()=>(e(a),s(()=>e(a).id||""))),y=g(()=>(e(a),s(()=>e(a).name||"")));st(ie,{entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)}})}var se=d(ie,2);{let v=g(()=>(e(a),s(()=>e(a).id||""))),y=g(()=>(e(a),s(()=>e(a).name||"")));nt(se,{get pools(){return e(c)},entityType:"organization",get entityId(){return e(v)},get entityName(){return e(y)},$$events:{addPool:xe}})}var le=d(se,2);it(le,{get instances(){return e(p)},entityType:"organization",onDeleteInstance:$e});var Fe=d(le,2);{let v=g(()=>(e(a),s(()=>e(a)?.events)));ot(Fe,{get events(){return e(v)},get eventsContainer(){return e(b)},set eventsContainer(y){o(b,y)},$$legacy:!0})}x(W,ne)};h(k,W=>{e(a)&&W(N)},!0)}x(z,$)};h(r,z=>{e(O)?z(i):z(L,!1)},!0)}x(t,n)};h(Pe,t=>{e(S)?t(Me):t(ke,!1)})}m(B);var ee=d(B,2);{var Ce=t=>{tt(t,{get entity(){return e(a)},entityType:"organization",$$events:{close:()=>o(D,!1),submit:n=>_e(n.detail)}})};h(ee,t=>{e(D)&&e(a)&&t(Ce)})}var te=d(ee,2);{var Ae=t=>{fe(t,{title:"Delete Organization",message:"Are you sure you want to delete this organization? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a),s(()=>e(a).name)},$$events:{close:()=>o(T,!1),confirm:be}})};h(te,t=>{e(T)&&e(a)&&t(Ae)})}var ae=d(te,2);{var Se=t=>{fe(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(u),s(()=>e(u).name)},$$events:{close:()=>{o(w,!1),o(u,null)},confirm:ze}})};h(ae,t=>{e(w)&&e(u)&&t(Se)})}var Be=d(ae,2);{var Ue=t=>{{let n=g(()=>(e(a),s(()=>e(a).id||"")));lt(t,{initialEntityType:"organization",get initialEntityId(){return e(n)},$$events:{close:()=>o(P,!1),submit:Ie}})}};h(Be,t=>{e(P)&&e(a)&&t(Ue)})}q(()=>{Ze(De,"href",`${H}/organizations`),de(Te,(e(a),s(()=>e(a)?e(a).name:"Loading...")))}),x(ge,K),Ve(),ye()}export{Mt as component}; diff --git a/webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js b/webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js deleted file mode 100644 index 7a557f43..00000000 --- a/webapp/assets/_app/immutable/nodes/13.Br7HzjXP.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as re}from"../chunks/B3Pzt0F_.js";import{p as se,o as ne,l as d,a as ie,f as ce,h as de,b as pe,c as me,d as ue,g as t,m as r,$ as fe,j as ge,q as E,u as S,k as v,s as o,r as ye,n as he}from"../chunks/D8EpLgQ1.js";import{i as w,s as Pe,a as Ce}from"../chunks/5WA7h8uK.js";import{g as N}from"../chunks/CiE1LlKV.js";import"../chunks/CoIRRsD9.js";import{P as ve}from"../chunks/CO4LUyTP.js";import{C as $e}from"../chunks/CwqI2jFH.js";import{U as be}from"../chunks/DQP15tlf.js";import{D as _e}from"../chunks/KQ2xQpA3.js";import{e as Me,a as R}from"../chunks/wyaP0EDu.js";import{t as $}from"../chunks/BEkVdVE1.js";import{e as b,h as Ee}from"../chunks/BGVHQGl-.js";import{D as we,G as D,a as De}from"../chunks/C9DJVOi1.js";import{E as Te}from"../chunks/B7ITzBt8.js";import{E as ke}from"../chunks/CGpPw4EW.js";import{S as Ae}from"../chunks/BE4wujub.js";import{P as Fe}from"../chunks/CLYUNKnN.js";var Ue=ce('
                ',1);function Xe(G,q){se(q,!1);const[L,j]=Pe(),s=()=>Ce(Me,"$eagerCache",L),_=r(),i=r(),p=r(),T=r();let m=r([]),g=r(!0),u=r(""),y=r(""),n=r(1),c=r(25),h=r(!1),P=r(!1),f=r(!1),a=r(null);async function H(e){try{o(u,""),o(h,!1)}catch(l){throw o(u,l instanceof Error?l.message:"Failed to create pool"),l}}async function V(e){if(t(a))try{await N.updatePool(t(a).id,e),o(P,!1),$.add({type:"success",title:"Pool Updated",message:`Pool ${t(a).id.slice(0,8)}... has been updated successfully.`}),o(a,null)}catch(l){const C=l instanceof Error?l.message:"Failed to update pool";throw $.add({type:"error",title:"Update Failed",message:C}),l}}async function z(){if(!t(a))return;const e=`Pool ${t(a).id.slice(0,8)}...`;try{await N.deletePool(t(a).id),o(f,!1),$.add({type:"success",title:"Pool Deleted",message:`${e} has been deleted successfully.`}),o(a,null)}catch(l){const C=l instanceof Error?l.message:"Failed to delete pool";o(u,C),$.add({type:"error",title:"Delete Failed",message:C})}o(f,!1),o(a,null)}function B(){o(h,!0)}function k(e){o(a,e),o(P,!0)}function A(e){o(a,e),o(f,!0)}ne(async()=>{try{o(g,!0);const e=await R.getPools();e&&Array.isArray(e)&&o(m,e)}catch(e){console.error("Failed to load pools:",e),o(u,e instanceof Error?e.message:"Failed to load pools")}finally{o(g,!1)}});async function J(){try{await R.retryResource("pools")}catch(e){console.error("Retry failed:",e)}}const K=[{key:"id",title:"ID",flexible:!0,cellComponent:Te,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:D,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:D,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:D,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:Fe},{key:"endpoint",title:"Endpoint",cellComponent:ke},{key:"status",title:"Status",cellComponent:Ae,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:De}],O={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:e=>b(e,s())},badges:[{type:"custom",value:e=>({variant:e.enabled?"success":"error",text:e.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:e=>k(e)},{type:"delete",handler:e=>A(e)}]};function Q(e){o(y,e.detail.term),o(n,1)}function W(e){o(n,e.detail.page)}function X(e){o(c,e.detail.perPage),o(n,1)}function Y(e){k(e.detail.item)}function Z(e){A(e.detail.item)}d(()=>(t(m),s()),()=>{(!t(m).length||s().loaded.pools)&&o(m,s().pools)}),d(()=>s(),()=>{o(g,s().loading.pools)}),d(()=>s(),()=>{o(_,s().errorMessages.pools)}),d(()=>(t(m),t(y),s()),()=>{o(i,Ee(t(m),t(y),e=>b(e,s())))}),d(()=>(t(i),t(c)),()=>{o(p,Math.ceil(t(i).length/t(c)))}),d(()=>(t(n),t(p)),()=>{t(n)>t(p)&&t(p)>0&&o(n,t(p))}),d(()=>(t(i),t(n),t(c)),()=>{o(T,t(i).slice((t(n)-1)*t(c),t(n)*t(c)))}),ie(),re();var F=Ue();de(e=>{fe.title="Pools - GARM"});var M=pe(F),U=ge(M);ve(U,{title:"Pools",description:"Manage runner pools across all entities",actionLabel:"Add Pool",$$events:{action:B}});var ee=v(U,2);{let e=E(()=>t(_)||t(u)),l=E(()=>!!t(_));we(ee,{get columns(){return K},get data(){return t(T)},get loading(){return t(g)},get error(){return t(e)},get searchTerm(){return t(y)},searchPlaceholder:"Search by entity name...",get currentPage(){return t(n)},get perPage(){return t(c)},get totalPages(){return t(p)},get totalItems(){return t(i),S(()=>t(i).length)},itemName:"pools",emptyIconType:"cog",get showRetry(){return t(l)},get mobileCardConfig(){return O},$$events:{search:Q,pageChange:W,perPageChange:X,retry:J,edit:Y,delete:Z}})}ye(M);var x=v(M,2);{var te=e=>{$e(e,{$$events:{close:()=>o(h,!1),submit:l=>H(l.detail)}})};w(x,e=>{t(h)&&e(te)})}var I=v(x,2);{var oe=e=>{be(e,{get pool(){return t(a)},$$events:{close:()=>{o(P,!1),o(a,null)},submit:l=>V(l.detail)}})};w(I,e=>{t(P)&&t(a)&&e(oe)})}var ae=v(I,2);{var le=e=>{{let l=E(()=>(t(a),he(b),s(),S(()=>`Pool ${t(a).id.slice(0,8)}... (${b(t(a),s())})`)));_e(e,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(l)},$$events:{close:()=>{o(f,!1),o(a,null)},confirm:z}})}};w(ae,e=>{t(f)&&t(a)&&e(le)})}me(G,F),ue(),j()}export{Xe as component}; diff --git a/webapp/assets/_app/immutable/nodes/13.CEJ1u9Ql.js b/webapp/assets/_app/immutable/nodes/13.CEJ1u9Ql.js new file mode 100644 index 00000000..a938b366 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/13.CEJ1u9Ql.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as se}from"../chunks/B3Pzt0F_.js";import{p as ne,o as ie,l as d,a as ce,f as de,h as pe,b as ue,c as me,d as fe,g as t,m as l,$ as ge,j as ye,q as M,u as F,k as C,s as o,r as he,n as Pe}from"../chunks/D8EpLgQ1.js";import{i as D,s as Ce,a as ve}from"../chunks/5WA7h8uK.js";import{g as N}from"../chunks/CiE1LlKV.js";import{P as $e}from"../chunks/CO4LUyTP.js";import{C as be}from"../chunks/CRD55Dyg.js";import{U as _e}from"../chunks/Bgb-frqW.js";import{D as Me}from"../chunks/KQ2xQpA3.js";import{e as De,a as R}from"../chunks/wyaP0EDu.js";import{t as f}from"../chunks/BEkVdVE1.js";import{e as v,h as Ee}from"../chunks/BGVHQGl-.js";import{e as G}from"../chunks/BZiHL9L3.js";import{D as Te,G as E,a as we}from"../chunks/BrNfsPe8.js";import{E as ke}from"../chunks/D4PaGKsV.js";import{E as Ae}from"../chunks/CGpPw4EW.js";import{S as xe}from"../chunks/MCv1Wq2q.js";import{P as Ie}from"../chunks/BzlxTz7Q.js";import"../chunks/CRhkqW2i.js";var Ue=de('
                ',1);function Ze(q,L){ne(L,!1);const[j,H]=Ce(),r=()=>ve(De,"$eagerCache",j),$=l(),i=l(),p=l(),T=l();let u=l([]),g=l(!0),w=l(""),y=l(""),s=l(1),c=l(25),h=l(!1),P=l(!1),m=l(!1),a=l(null);async function V(){f.success("Pool Created","Pool has been created successfully."),o(h,!1)}async function z(e){if(t(a))try{await N.updatePool(t(a).id,e),o(P,!1),f.add({type:"success",title:"Pool Updated",message:`Pool ${t(a).id.slice(0,8)}... has been updated successfully.`}),o(a,null)}catch(n){const _=G(n);throw f.add({type:"error",title:"Update Failed",message:_}),n}}async function B(){if(!t(a))return;const e=`Pool ${t(a).id.slice(0,8)}...`;try{await N.deletePool(t(a).id),o(m,!1),f.add({type:"success",title:"Pool Deleted",message:`${e} has been deleted successfully.`}),o(a,null)}catch(n){const _=G(n);f.add({type:"error",title:"Delete Failed",message:_})}o(m,!1),o(a,null)}function J(){o(h,!0)}function k(e){o(a,e),o(P,!0)}function A(e){o(a,e),o(m,!0)}ie(async()=>{try{o(g,!0);const e=await R.getPools();e&&Array.isArray(e)&&o(u,e)}catch(e){console.error("Failed to load pools:",e),o(w,e instanceof Error?e.message:"Failed to load pools")}finally{o(g,!1)}});async function K(){try{await R.retryResource("pools")}catch(e){console.error("Retry failed:",e)}}const O=[{key:"id",title:"ID",flexible:!0,cellComponent:ke,cellProps:{entityType:"pool",showId:!0,fontMono:!0}},{key:"image",title:"Image",flexible:!0,cellComponent:E,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:E,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:E,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:Ie},{key:"endpoint",title:"Endpoint",cellComponent:Ae},{key:"status",title:"Status",cellComponent:xe,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:we}],Q={entityType:"pool",primaryText:{field:"id",isClickable:!0,href:"/pools/{id}",useId:!0,isMonospace:!0},secondaryText:{field:"entity_name",computedValue:e=>v(e,r())},badges:[{type:"custom",value:e=>({variant:e.enabled?"success":"error",text:e.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:e=>k(e)},{type:"delete",handler:e=>A(e)}]};function W(e){o(y,e.detail.term),o(s,1)}function X(e){o(s,e.detail.page)}function Y(e){o(c,e.detail.perPage),o(s,1)}function Z(e){k(e.detail.item)}function ee(e){A(e.detail.item)}d(()=>(t(u),r()),()=>{(!t(u).length||r().loaded.pools)&&o(u,r().pools)}),d(()=>r(),()=>{o(g,r().loading.pools)}),d(()=>r(),()=>{o($,r().errorMessages.pools)}),d(()=>(t(u),t(y),r()),()=>{o(i,Ee(t(u),t(y),e=>v(e,r())))}),d(()=>(t(i),t(c)),()=>{o(p,Math.ceil(t(i).length/t(c)))}),d(()=>(t(s),t(p)),()=>{t(s)>t(p)&&t(p)>0&&o(s,t(p))}),d(()=>(t(i),t(s),t(c)),()=>{o(T,t(i).slice((t(s)-1)*t(c),t(s)*t(c)))}),ce(),se();var x=Ue();pe(e=>{ge.title="Pools - GARM"});var b=ue(x),I=ye(b);$e(I,{title:"Pools",description:"Manage runner pools across all entities",actionLabel:"Add Pool",$$events:{action:J}});var te=C(I,2);{let e=M(()=>t($)||t(w)),n=M(()=>!!t($));Te(te,{get columns(){return O},get data(){return t(T)},get loading(){return t(g)},get error(){return t(e)},get searchTerm(){return t(y)},searchPlaceholder:"Search by entity name...",get currentPage(){return t(s)},get perPage(){return t(c)},get totalPages(){return t(p)},get totalItems(){return t(i),F(()=>t(i).length)},itemName:"pools",emptyIconType:"cog",get showRetry(){return t(n)},get mobileCardConfig(){return Q},$$events:{search:W,pageChange:X,perPageChange:Y,retry:K,edit:Z,delete:ee}})}he(b);var U=C(b,2);{var oe=e=>{be(e,{$$events:{close:()=>o(h,!1),submit:()=>V()}})};D(U,e=>{t(h)&&e(oe)})}var S=C(U,2);{var ae=e=>{_e(e,{get pool(){return t(a)},$$events:{close:()=>{o(P,!1),o(a,null)},submit:n=>z(n.detail)}})};D(S,e=>{t(P)&&t(a)&&e(ae)})}var le=C(S,2);{var re=e=>{{let n=M(()=>(t(a),Pe(v),r(),F(()=>`Pool ${t(a).id.slice(0,8)}... (${v(t(a),r())})`)));Me(e,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(n)},$$events:{close:()=>{o(m,!1),o(a,null)},confirm:B}})}};D(le,e=>{t(m)&&t(a)&&e(re)})}me(q,x),fe(),H()}export{Ze as component}; diff --git a/webapp/assets/_app/immutable/nodes/14.BJHnbtAi.js b/webapp/assets/_app/immutable/nodes/14.BJHnbtAi.js new file mode 100644 index 00000000..b743db86 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/14.BJHnbtAi.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as He}from"../chunks/B3Pzt0F_.js";import{p as Ve,o as We,A as Ke,l as Qe,a as Xe,f as h,h as Ye,b as B,t as b,c as x,d as Ze,s as n,m as y,u as i,$ as ta,g as t,j as r,r as a,k as s,v as l,y as xt,B as Kt,q as N,n as f}from"../chunks/D8EpLgQ1.js";import{i as u,s as ea,a as aa}from"../chunks/5WA7h8uK.js";import{w as Qt,e as ra,i as da}from"../chunks/u94nIB4-.js";import{c as Xt,g as F,s as sa}from"../chunks/CiE1LlKV.js";import{p as ia}from"../chunks/BE8f1Riw.js";import{g as Yt}from"../chunks/DXCC0cSN.js";import{b as ut}from"../chunks/CRhkqW2i.js";import{U as oa}from"../chunks/Bgb-frqW.js";import{D as Zt}from"../chunks/KQ2xQpA3.js";import{D as na,I as la}from"../chunks/BEoJgOul.js";import{t as D}from"../chunks/BEkVdVE1.js";import{e as P,i as R,j as te,b as C,g as ee}from"../chunks/BGVHQGl-.js";import{e as L}from"../chunks/BZiHL9L3.js";var va=h('

                Loading pool...

                '),ca=h('

                '),ma=h('
                GitHub Runner Group
                '),xa=h(' '),ua=h('
                Tags
                '),ga=h('

                Extra Specifications

                 
                '),pa=h('

                Basic Information

                Pool ID
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Priority
                Runner Prefix
                OS Type / Architecture
                ',1),fa=h(' ',1);function Sa(ae,re){Ve(re,!1);const[de,se]=ea(),gt=()=>aa(ia,"$page",de),O=y();let e=y(null),G=y(!0),M=y(""),E=y(!1),A=y(!1),T=y(!1),g=y(null),U=null;async function ie(){if(t(O))try{n(G,!0),n(M,""),n(e,await F.getPool(t(O)))}catch(d){n(M,L(d))}finally{n(G,!1)}}async function oe(d){if(t(e))try{const o=await F.updatePool(t(e).id,d);n(e,o),n(E,!1),D.success("Pool Updated",`Pool ${t(e).id} has been updated successfully.`)}catch(o){const _=L(o);D.error("Update Failed",_)}}async function ne(){if(t(e)){try{await F.deletePool(t(e).id),Yt(`${ut}/pools`)}catch(d){const o=L(d);D.error("Delete Failed",o)}n(A,!1)}}async function le(){if(t(g)){try{await F.deleteInstance(t(g).name),D.success("Instance Deleted",`Instance ${t(g).name} has been deleted successfully.`)}catch(d){const o=L(d);D.error("Delete Failed",o)}n(T,!1),n(g,null)}}function ve(d){n(g,d),n(T,!0)}function ce(d){if(!d)return"{}";try{if(typeof d=="string"){const o=JSON.parse(d);return JSON.stringify(o,null,2)}return JSON.stringify(d,null,2)}catch{return d.toString()}}function me(d){if(d.operation==="update"){const o=d.payload;t(e)&&o.id===t(e).id&&n(e,o)}else if(d.operation==="delete"){const o=d.payload.id||d.payload;t(e)&&t(e).id===o&&Yt(`${ut}/pools`)}}function xe(d){if(!t(e)||!t(e).instances)return;const o=d.payload;if(o.pool_id===t(e).id){if(d.operation==="create")xt(e,t(e).instances=[...t(e).instances,o]);else if(d.operation==="update")xt(e,t(e).instances=t(e).instances.map(_=>_.id===o.id?o:_));else if(d.operation==="delete"){const _=o.id||o;xt(e,t(e).instances=t(e).instances.filter(z=>z.id!==_))}n(e,t(e))}}We(()=>{ie();const d=Qt.subscribeToEntity("pool",["update","delete"],me),o=Qt.subscribeToEntity("instance",["create","update","delete"],xe);U=()=>{d(),o()}}),Ke(()=>{U&&(U(),U=null)}),Qe(()=>gt(),()=>{n(O,gt().params.id)}),Xe(),He();var pt=fa();Ye(d=>{b(()=>ta.title=`${t(e),i(()=>t(e)?`Pool ${t(e).id} - Pool Details`:"Pool Details")??""} - GARM`)});var J=B(pt),j=r(J),ft=r(j),q=r(ft),ue=r(q);a(q);var _t=s(q,2),yt=r(_t),ht=s(r(yt),2),ge=r(ht,!0);a(ht),a(yt),a(_t),a(ft),a(j);var pe=s(j,2);{var fe=d=>{var o=va();x(d,o)},_e=d=>{var o=Kt(),_=B(o);{var z=k=>{var w=ca(),S=r(w),H=r(S,!0);a(S),a(w),b(()=>l(H,t(M))),x(k,w)},we=k=>{var w=Kt(),S=B(w);{var H=V=>{var wt=pa(),$t=B(wt);{let v=N(()=>(f(P),t(e),i(()=>P(t(e))))),c=N(()=>(f(R),t(e),i(()=>R(t(e))))),m=N(()=>(f(ee),t(e),i(()=>ee(t(e).endpoint?.endpoint_type||"unknown"))));na($t,{get title(){return t(e),i(()=>t(e).id)},get subtitle(){return`Pool for ${t(v)??""} (${t(c)??""})`},get forgeIcon(){return t(m)},onEdit:()=>n(E,!0),onDelete:()=>n(A,!0)})}var W=s($t,2),K=r(W),Pt=r(K),It=s(r(Pt),2),Q=r(It),Dt=s(r(Q),2),$e=r(Dt,!0);a(Dt),a(Q);var X=s(Q,2),Mt=s(r(X),2),Pe=r(Mt,!0);a(Mt),a(X);var Y=s(X,2),Et=s(r(Y),2),At=r(Et),Ie=r(At,!0);a(At),a(Et),a(Y);var Z=s(Y,2),Tt=s(r(Z),2),De=r(Tt,!0);a(Tt),a(Z);var tt=s(Z,2),Ut=s(r(tt),2),et=r(Ut),Me=r(et,!0);a(et),a(Ut),a(tt);var at=s(tt,2),St=s(r(at),2),Bt=r(St),rt=r(Bt),Ee=r(rt,!0);a(rt);var dt=s(rt,2),Ae=r(dt,!0);a(dt),a(Bt),a(St),a(at);var st=s(at,2),Nt=s(r(st),2),Te=r(Nt,!0);a(Nt),a(st);var Ft=s(st,2),Rt=s(r(Ft),2),Ue=r(Rt,!0);a(Rt),a(Ft),a(It),a(Pt),a(K);var Ct=s(K,2),Lt=r(Ct),Ot=s(r(Lt),2),it=r(Ot),Gt=s(r(it),2),Se=r(Gt,!0);a(Gt),a(it);var ot=s(it,2),Jt=s(r(ot),2),Be=r(Jt,!0);a(Jt),a(ot);var nt=s(ot,2),jt=s(r(nt),2),Ne=r(jt);a(jt),a(nt);var lt=s(nt,2),qt=s(r(lt),2),Fe=r(qt,!0);a(qt),a(lt);var vt=s(lt,2),zt=s(r(vt),2),Re=r(zt,!0);a(zt),a(vt);var ct=s(vt,2),Ht=s(r(ct),2),Ce=r(Ht);a(Ht),a(ct);var Vt=s(ct,2);{var Le=v=>{var c=ma(),m=s(r(c),2),p=r(m,!0);a(m),a(c),b(()=>l(p,(t(e),i(()=>t(e)["github-runner-group"])))),x(v,c)};u(Vt,v=>{t(e),i(()=>t(e)["github-runner-group"])&&v(Le)})}var Oe=s(Vt,2);{var Ge=v=>{var c=ua(),m=s(r(c),2),p=r(m);ra(p,5,()=>(t(e),i(()=>t(e).tags)),da,(I,$)=>{var mt=xa(),ze=r(mt,!0);a(mt),b(()=>l(ze,(t($),i(()=>typeof t($)=="string"?t($):t($).name)))),x(I,mt)}),a(p),a(m),a(c),x(v,c)};u(Oe,v=>{t(e),i(()=>t(e).tags&&t(e).tags.length>0)&&v(Ge)})}a(Ot),a(Lt),a(Ct),a(W);var Wt=s(W,2);{var Je=v=>{var c=ga(),m=r(c),p=s(r(m),2),I=r(p,!0);a(p),a(m),a(c),b($=>l(I,$),[()=>(t(e),i(()=>ce(t(e).extra_specs)))]),x(v,c)};u(Wt,v=>{t(e),i(()=>t(e).extra_specs)&&v(Je)})}var je=s(Wt,2);{var qe=v=>{la(v,{get instances(){return t(e),i(()=>t(e).instances)},entityType:"repository",onDeleteInstance:ve})};u(je,v=>{t(e),i(()=>t(e).instances)&&v(qe)})}b((v,c,m,p,I)=>{l($e,(t(e),i(()=>t(e).id))),l(Pe,(t(e),i(()=>t(e).provider_name))),l(Ie,(t(e),i(()=>t(e).image))),l(De,(t(e),i(()=>t(e).flavor))),sa(et,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${t(e),i(()=>t(e).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),l(Me,(t(e),i(()=>t(e).enabled?"Enabled":"Disabled"))),l(Ee,v),Xt(dt,"href",c),l(Ae,m),l(Te,p),l(Ue,I),l(Se,(t(e),i(()=>t(e).max_runners))),l(Be,(t(e),i(()=>t(e).min_idle_runners))),l(Ne,`${t(e),i(()=>t(e).runner_bootstrap_timeout)??""} minutes`),l(Fe,(t(e),i(()=>t(e).priority))),l(Re,(t(e),i(()=>t(e).runner_prefix||"garm"))),l(Ce,`${t(e),i(()=>t(e).os_type)??""} / ${t(e),i(()=>t(e).os_arch)??""}`)},[()=>(f(R),t(e),i(()=>R(t(e)))),()=>(f(te),t(e),i(()=>te(t(e)))),()=>(f(P),t(e),i(()=>P(t(e)))),()=>(f(C),t(e),i(()=>C(t(e).created_at||""))),()=>(f(C),t(e),i(()=>C(t(e).updated_at||"")))]),x(V,wt)};u(S,V=>{t(e)&&V(H)},!0)}x(k,w)};u(_,k=>{t(M)?k(z):k(we,!1)},!0)}x(d,o)};u(pe,d=>{t(G)?d(fe):d(_e,!1)})}a(J);var bt=s(J,2);{var ye=d=>{oa(d,{get pool(){return t(e)},$$events:{close:()=>n(E,!1),submit:o=>oe(o.detail)}})};u(bt,d=>{t(E)&&t(e)&&d(ye)})}var kt=s(bt,2);{var he=d=>{{let o=N(()=>(t(e),f(P),i(()=>`Pool ${t(e).id} (${P(t(e))})`)));Zt(d,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(o)},$$events:{close:()=>n(A,!1),confirm:ne}})}};u(kt,d=>{t(A)&&t(e)&&d(he)})}var be=s(kt,2);{var ke=d=>{Zt(d,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(g),i(()=>t(g).name)},$$events:{close:()=>{n(T,!1),n(g,null)},confirm:le}})};u(be,d=>{t(T)&&t(g)&&d(ke)})}b(()=>{Xt(ue,"href",`${ut}/pools`),l(ge,(t(e),i(()=>t(e)?t(e).id:"Loading...")))}),x(ae,pt),Ze(),se()}export{Sa as component}; diff --git a/webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js b/webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js deleted file mode 100644 index c682684d..00000000 --- a/webapp/assets/_app/immutable/nodes/14.Cd0DOn96.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as ze}from"../chunks/B3Pzt0F_.js";import{p as He,o as Ve,A as We,l as Ke,a as Qe,f as h,h as Xe,b as S,t as b,c as u,d as Ye,s as n,m as y,u as i,$ as Ze,g as t,j as r,r as a,k as s,v as l,y as mt,B as Wt,q as B,n as f}from"../chunks/D8EpLgQ1.js";import{i as g,s as ta,a as ea}from"../chunks/5WA7h8uK.js";import{w as Kt,e as aa,i as ra}from"../chunks/u94nIB4-.js";import{c as Qt,g as N,s as da}from"../chunks/CiE1LlKV.js";import{p as sa}from"../chunks/C41YH50Q.js";import{g as Xt}from"../chunks/CTf6mQoE.js";import{b as xt}from"../chunks/CoIRRsD9.js";import{U as ia}from"../chunks/DQP15tlf.js";import{D as Yt}from"../chunks/KQ2xQpA3.js";import{D as oa,I as na}from"../chunks/DDhBTdDt.js";import{t as E}from"../chunks/BEkVdVE1.js";import{e as P,i as R,j as Zt,b as C,g as te}from"../chunks/BGVHQGl-.js";var la=h('

                Loading pool...

                '),va=h('

                '),ca=h('
                GitHub Runner Group
                '),ma=h(' '),xa=h('
                Tags
                '),ua=h('

                Extra Specifications

                 
                '),ga=h('

                Basic Information

                Pool ID
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Priority
                Runner Prefix
                OS Type / Architecture
                ',1),pa=h(' ',1);function Ta(ee,ae){He(ae,!1);const[re,de]=ta(),ut=()=>ea(sa,"$page",re),L=y();let e=y(null),O=y(!0),M=y(""),F=y(!1),T=y(!1),D=y(!1),x=y(null),A=null;async function se(){if(t(L))try{n(O,!0),n(M,""),n(e,await N.getPool(t(L)))}catch(d){n(M,d instanceof Error?d.message:"Failed to load pool")}finally{n(O,!1)}}async function ie(d){if(t(e))try{const o=await N.updatePool(t(e).id,d);n(e,o),n(F,!1),E.success("Pool Updated",`Pool ${t(e).id} has been updated successfully.`)}catch(o){const _=o instanceof Error?o.message:"Failed to update pool";E.error("Update Failed",_)}}async function oe(){if(t(e)){try{await N.deletePool(t(e).id),Xt(`${xt}/pools`)}catch(d){const o=d instanceof Error?d.message:"Failed to delete pool";E.error("Delete Failed",o)}n(T,!1)}}async function ne(){if(t(x)){try{await N.deleteInstance(t(x).name),E.success("Instance Deleted",`Instance ${t(x).name} has been deleted successfully.`),n(D,!1),n(x,null)}catch(d){const o=d instanceof Error?d.message:"Failed to delete instance";E.error("Delete Failed",o)}n(D,!1),n(x,null)}}function le(d){n(x,d),n(D,!0)}function ve(d){if(!d)return"{}";try{if(typeof d=="string"){const o=JSON.parse(d);return JSON.stringify(o,null,2)}return JSON.stringify(d,null,2)}catch{return d.toString()}}function ce(d){if(d.operation==="update"){const o=d.payload;t(e)&&o.id===t(e).id&&n(e,o)}else if(d.operation==="delete"){const o=d.payload.id||d.payload;t(e)&&t(e).id===o&&Xt(`${xt}/pools`)}}function me(d){if(!t(e)||!t(e).instances)return;const o=d.payload;if(o.pool_id===t(e).id){if(d.operation==="create")mt(e,t(e).instances=[...t(e).instances,o]);else if(d.operation==="update")mt(e,t(e).instances=t(e).instances.map(_=>_.id===o.id?o:_));else if(d.operation==="delete"){const _=o.id||o;mt(e,t(e).instances=t(e).instances.filter(q=>q.id!==_))}n(e,t(e))}}Ve(()=>{se();const d=Kt.subscribeToEntity("pool",["update","delete"],ce),o=Kt.subscribeToEntity("instance",["create","update","delete"],me);A=()=>{d(),o()}}),We(()=>{A&&(A(),A=null)}),Ke(()=>ut(),()=>{n(L,ut().params.id)}),Qe(),ze();var gt=pa();Xe(d=>{b(()=>Ze.title=`${t(e),i(()=>t(e)?`Pool ${t(e).id} - Pool Details`:"Pool Details")??""} - GARM`)});var G=S(gt),J=r(G),pt=r(J),j=r(pt),xe=r(j);a(j);var ft=s(j,2),_t=r(ft),yt=s(r(_t),2),ue=r(yt,!0);a(yt),a(_t),a(ft),a(pt),a(J);var ge=s(J,2);{var pe=d=>{var o=la();u(d,o)},fe=d=>{var o=Wt(),_=S(o);{var q=k=>{var w=va(),U=r(w),z=r(U,!0);a(U),a(w),b(()=>l(z,t(M))),u(k,w)},ke=k=>{var w=Wt(),U=S(w);{var z=H=>{var kt=ga(),wt=S(kt);{let v=B(()=>(f(P),t(e),i(()=>P(t(e))))),c=B(()=>(f(R),t(e),i(()=>R(t(e))))),m=B(()=>(f(te),t(e),i(()=>te(t(e).endpoint?.endpoint_type||"unknown"))));oa(wt,{get title(){return t(e),i(()=>t(e).id)},get subtitle(){return`Pool for ${t(v)??""} (${t(c)??""})`},get forgeIcon(){return t(m)},onEdit:()=>n(F,!0),onDelete:()=>n(T,!0)})}var V=s(wt,2),W=r(V),$t=r(W),Pt=s(r($t),2),K=r(Pt),Dt=s(r(K),2),we=r(Dt,!0);a(Dt),a(K);var Q=s(K,2),It=s(r(Q),2),$e=r(It,!0);a(It),a(Q);var X=s(Q,2),Et=s(r(X),2),Mt=r(Et),Pe=r(Mt,!0);a(Mt),a(Et),a(X);var Y=s(X,2),Ft=s(r(Y),2),De=r(Ft,!0);a(Ft),a(Y);var Z=s(Y,2),Tt=s(r(Z),2),tt=r(Tt),Ie=r(tt,!0);a(tt),a(Tt),a(Z);var et=s(Z,2),At=s(r(et),2),Ut=r(At),at=r(Ut),Ee=r(at,!0);a(at);var rt=s(at,2),Me=r(rt,!0);a(rt),a(Ut),a(At),a(et);var dt=s(et,2),St=s(r(dt),2),Fe=r(St,!0);a(St),a(dt);var Bt=s(dt,2),Nt=s(r(Bt),2),Te=r(Nt,!0);a(Nt),a(Bt),a(Pt),a($t),a(W);var Rt=s(W,2),Ct=r(Rt),Lt=s(r(Ct),2),st=r(Lt),Ot=s(r(st),2),Ae=r(Ot,!0);a(Ot),a(st);var it=s(st,2),Gt=s(r(it),2),Ue=r(Gt,!0);a(Gt),a(it);var ot=s(it,2),Jt=s(r(ot),2),Se=r(Jt);a(Jt),a(ot);var nt=s(ot,2),jt=s(r(nt),2),Be=r(jt,!0);a(jt),a(nt);var lt=s(nt,2),qt=s(r(lt),2),Ne=r(qt,!0);a(qt),a(lt);var vt=s(lt,2),zt=s(r(vt),2),Re=r(zt);a(zt),a(vt);var Ht=s(vt,2);{var Ce=v=>{var c=ca(),m=s(r(c),2),p=r(m,!0);a(m),a(c),b(()=>l(p,(t(e),i(()=>t(e)["github-runner-group"])))),u(v,c)};g(Ht,v=>{t(e),i(()=>t(e)["github-runner-group"])&&v(Ce)})}var Le=s(Ht,2);{var Oe=v=>{var c=xa(),m=s(r(c),2),p=r(m);aa(p,5,()=>(t(e),i(()=>t(e).tags)),ra,(I,$)=>{var ct=ma(),qe=r(ct,!0);a(ct),b(()=>l(qe,(t($),i(()=>typeof t($)=="string"?t($):t($).name)))),u(I,ct)}),a(p),a(m),a(c),u(v,c)};g(Le,v=>{t(e),i(()=>t(e).tags&&t(e).tags.length>0)&&v(Oe)})}a(Lt),a(Ct),a(Rt),a(V);var Vt=s(V,2);{var Ge=v=>{var c=ua(),m=r(c),p=s(r(m),2),I=r(p,!0);a(p),a(m),a(c),b($=>l(I,$),[()=>(t(e),i(()=>ve(t(e).extra_specs)))]),u(v,c)};g(Vt,v=>{t(e),i(()=>t(e).extra_specs)&&v(Ge)})}var Je=s(Vt,2);{var je=v=>{na(v,{get instances(){return t(e),i(()=>t(e).instances)},entityType:"repository",onDeleteInstance:le})};g(Je,v=>{t(e),i(()=>t(e).instances)&&v(je)})}b((v,c,m,p,I)=>{l(we,(t(e),i(()=>t(e).id))),l($e,(t(e),i(()=>t(e).provider_name))),l(Pe,(t(e),i(()=>t(e).image))),l(De,(t(e),i(()=>t(e).flavor))),da(tt,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${t(e),i(()=>t(e).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),l(Ie,(t(e),i(()=>t(e).enabled?"Enabled":"Disabled"))),l(Ee,v),Qt(rt,"href",c),l(Me,m),l(Fe,p),l(Te,I),l(Ae,(t(e),i(()=>t(e).max_runners))),l(Ue,(t(e),i(()=>t(e).min_idle_runners))),l(Se,`${t(e),i(()=>t(e).runner_bootstrap_timeout)??""} minutes`),l(Be,(t(e),i(()=>t(e).priority))),l(Ne,(t(e),i(()=>t(e).runner_prefix||"garm"))),l(Re,`${t(e),i(()=>t(e).os_type)??""} / ${t(e),i(()=>t(e).os_arch)??""}`)},[()=>(f(R),t(e),i(()=>R(t(e)))),()=>(f(Zt),t(e),i(()=>Zt(t(e)))),()=>(f(P),t(e),i(()=>P(t(e)))),()=>(f(C),t(e),i(()=>C(t(e).created_at||""))),()=>(f(C),t(e),i(()=>C(t(e).updated_at||"")))]),u(H,kt)};g(U,H=>{t(e)&&H(z)},!0)}u(k,w)};g(_,k=>{t(M)?k(q):k(ke,!1)},!0)}u(d,o)};g(ge,d=>{t(O)?d(pe):d(fe,!1)})}a(G);var ht=s(G,2);{var _e=d=>{ia(d,{get pool(){return t(e)},$$events:{close:()=>n(F,!1),submit:o=>ie(o.detail)}})};g(ht,d=>{t(F)&&t(e)&&d(_e)})}var bt=s(ht,2);{var ye=d=>{{let o=B(()=>(t(e),f(P),i(()=>`Pool ${t(e).id} (${P(t(e))})`)));Yt(d,{title:"Delete Pool",message:"Are you sure you want to delete this pool? This action cannot be undone and will remove all associated runners.",get itemName(){return t(o)},$$events:{close:()=>n(T,!1),confirm:oe}})}};g(bt,d=>{t(T)&&t(e)&&d(ye)})}var he=s(bt,2);{var be=d=>{Yt(d,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(x),i(()=>t(x).name)},$$events:{close:()=>{n(D,!1),n(x,null)},confirm:ne}})};g(he,d=>{t(D)&&t(x)&&d(be)})}b(()=>{Qt(xe,"href",`${xt}/pools`),l(ue,(t(e),i(()=>t(e)?t(e).id:"Loading...")))}),u(ee,gt),Ye(),de()}export{Ta as component}; diff --git a/webapp/assets/_app/immutable/nodes/15.CkHQugXH.js b/webapp/assets/_app/immutable/nodes/15.CkHQugXH.js deleted file mode 100644 index 2e6c5196..00000000 --- a/webapp/assets/_app/immutable/nodes/15.CkHQugXH.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Ee}from"../chunks/B3Pzt0F_.js";import{p as Me,E as Le,o as Te,l as w,s as r,m as a,g as e,y as T,a as Fe,f as S,k as l,j as c,r as u,c as F,t as K,v as fe,x as xe,u as j,z as $e,e as be,d as Se,h as Ne,b as ze,$ as Be,q as Re}from"../chunks/D8EpLgQ1.js";import{a as De,i as J,s as Ae}from"../chunks/5WA7h8uK.js";import{r as Q,b as Ce,g as oe}from"../chunks/CiE1LlKV.js";import{e as Oe,i as He}from"../chunks/u94nIB4-.js";import{b as ye,a as Pe}from"../chunks/C6k1Q4We.js";import{p as Je}from"../chunks/D4Caz1gY.js";import{M as Ve}from"../chunks/qB7B8uiS.js";import{F as Ye}from"../chunks/CNMHKIIK.js";import{e as Ie,a as ve}from"../chunks/wyaP0EDu.js";import{U as Ke}from"../chunks/CclkODgu.js";import{D as Qe}from"../chunks/KQ2xQpA3.js";import{P as Xe}from"../chunks/CO4LUyTP.js";import{t as X}from"../chunks/BEkVdVE1.js";import{k as Ze,g as et,c as tt,m as rt,p as ot}from"../chunks/BGVHQGl-.js";import{D as at,G as st,a as nt}from"../chunks/C9DJVOi1.js";import{E as it}from"../chunks/B7ITzBt8.js";import{E as lt}from"../chunks/CGpPw4EW.js";import{S as dt}from"../chunks/BE4wujub.js";import"../chunks/CoIRRsD9.js";var ct=S('

                '),ut=S('

                Loading...

                '),pt=S(""),mt=S(''),gt=S('

                Webhook secret will be automatically generated

                '),ft=S('
                '),bt=S('

                Create Repository

                ');function yt(ae,se){Me(se,!1);const[ne,ie]=Ae(),p=()=>De(Ie,"$eagerCache",ne),R=a(),k=a(),G=a(),x=a(),C=Le();let g=a(!1),f=a(""),b=a("github"),o=a({name:"",owner:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),D=a(!0),d=a(!0);async function y(){if(!p().loaded.credentials&&!p().loading.credentials)try{await ve.getCredentials()}catch(s){r(f,s instanceof Error?s.message:"Failed to load credentials")}}function _(s){r(b,s.detail),T(o,e(o).credentials_name="")}function P(){if(e(o).credentials_name){const s=e(R).find(A=>A.name===e(o).credentials_name);s&&s.forge_type&&r(b,s.forge_type)}}function E(){const s=new Uint8Array(32);return crypto.getRandomValues(s),Array.from(s,A=>A.toString(16).padStart(2,"0")).join("")}Te(()=>{y()});async function le(){if(!e(o).name?.trim()){r(f,"Repository name is required");return}if(!e(o).owner?.trim()){r(f,"Repository owner is required");return}if(!e(o).credentials_name){r(f,"Please select credentials");return}try{r(g,!0),r(f,"");const s={...e(o),install_webhook:e(D),auto_generate_secret:e(d)};C("submit",s)}catch(s){r(f,s instanceof Error?s.message:"Failed to create repository"),r(g,!1)}}w(()=>p(),()=>{r(R,p().credentials)}),w(()=>p(),()=>{r(k,p().loading.credentials)}),w(()=>(e(R),e(b)),()=>{r(G,e(R).filter(s=>e(b)?s.forge_type===e(b):!0))}),w(()=>e(d),()=>{e(d)?T(o,e(o).webhook_secret=E()):e(d)||T(o,e(o).webhook_secret="")}),w(()=>(e(o),e(d)),()=>{r(x,e(o).name?.trim()!==""&&e(o).owner?.trim()!==""&&e(o).credentials_name!==""&&(e(d)||e(o).webhook_secret?.trim()!==""))}),Fe(),Ee(),Ve(ae,{$$events:{close:()=>C("close")},children:(s,A)=>{var M=bt(),Z=l(c(M),2);{var de=v=>{var h=ct(),I=c(h),W=c(I,!0);u(I),u(h),K(()=>fe(W,e(f))),F(v,h)};J(Z,v=>{e(f)&&v(de)})}var ce=l(Z,2);{var ue=v=>{var h=ut();F(v,h)},pe=v=>{var h=ft(),I=c(h);Ye(I,{get selectedForgeType(){return e(b)},set selectedForgeType(i){r(b,i)},$$events:{select:_},$$legacy:!0});var W=l(I,2),ee=l(c(W),2);Q(ee),u(W);var L=l(W,2),N=l(c(L),2);Q(N),u(L);var z=l(L,2),B=l(c(z),2);K(()=>{e(o),xe(()=>{e(G)})});var O=c(B);O.value=O.__value="";var me=l(O);Oe(me,1,()=>e(G),He,(i,m)=>{var U=pt(),Ge=c(U);u(U);var ke={};K(()=>{fe(Ge,`${e(m),j(()=>e(m).name)??""} (${e(m),j(()=>e(m).endpoint?.name)??""})`),ke!==(ke=(e(m),j(()=>e(m).name)))&&(U.value=(U.__value=(e(m),j(()=>e(m).name)))??"")}),F(i,U)}),u(B),u(z);var H=l(z,2),V=l(c(H),2);K(()=>{e(o),xe(()=>{})});var Y=c(V);Y.value=Y.__value="roundrobin";var te=l(Y);te.value=te.__value="pack",u(V),u(H);var t=l(H,2),n=c(t),re=c(n);Q(re),$e(2),u(n);var $=l(n,2),q=c($),he=c(q);Q(he),$e(2),u(q);var We=l(q,2);{var qe=i=>{var m=mt();Q(m),ye(m,()=>e(o).webhook_secret,U=>T(o,e(o).webhook_secret=U)),F(i,m)},Ue=i=>{var m=gt();F(i,m)};J(We,i=>{e(d)?i(Ue,!1):i(qe)})}u($),u(t);var _e=l(t,2),we=c(_e),ge=l(we,2),je=c(ge,!0);u(ge),u(_e),u(h),K(()=>{ge.disabled=e(g)||e(k)||!e(x),fe(je,e(g)?"Creating...":"Create Repository")}),ye(ee,()=>e(o).name,i=>T(o,e(o).name=i)),ye(N,()=>e(o).owner,i=>T(o,e(o).owner=i)),Ce(B,()=>e(o).credentials_name,i=>T(o,e(o).credentials_name=i)),be("change",B,P),Ce(V,()=>e(o).pool_balancer_type,i=>T(o,e(o).pool_balancer_type=i)),Pe(re,()=>e(D),i=>r(D,i)),Pe(he,()=>e(d),i=>r(d,i)),be("click",we,()=>C("close")),be("submit",h,Je(le)),F(v,h)};J(ce,v=>{e(g)?v(ue):v(pe,!1)})}u(M),F(s,M)},$$slots:{default:!0}}),Se(),ie()}var vt=S('
                ',1);function Gt(ae,se){Me(se,!1);const[ne,ie]=Ae(),p=()=>De(Ie,"$eagerCache",ne),R=a(),k=a(),G=a();let x=a([]),C=a(!0),g=a(""),f=a(""),b=a(!1),o=a(!1),D=a(!1),d=a(null),y=a(null),_=a(1),P=a(25),E=a(1);Te(async()=>{try{r(C,!0);const t=await ve.getRepositories();t&&Array.isArray(t)&&r(x,t)}catch(t){console.error("Failed to load repositories:",t),r(g,t instanceof Error?t.message:"Failed to load repositories")}finally{r(C,!1)}});async function le(){try{await ve.retryResource("repositories")}catch(t){console.error("Retry failed:",t)}}function s(t){r(d,t),r(o,!0)}function A(t){r(y,t),r(D,!0)}function M(){r(b,!1),r(o,!1),r(D,!1),r(d,null),r(y,null),r(g,"")}async function Z(t){try{r(g,"");const n=t.detail,re={name:n.name,owner:n.owner,credentials_name:n.credentials_name,webhook_secret:n.webhook_secret},$=await oe.createRepository(re);if(n.install_webhook&&$.id)try{await oe.installRepoWebhook($.id),X.success("Webhook Installed",`Webhook for repository ${$.owner}/${$.name} has been installed successfully.`)}catch(q){console.warn("Repository created but webhook installation failed:",q),X.error("Webhook Installation Failed",q instanceof Error?q.message:"Failed to install webhook. You can try installing it manually from the repository details page.")}r(b,!1),X.success("Repository Created",`Repository ${$.owner}/${$.name} has been created successfully.`)}catch(n){throw r(g,n instanceof Error?n.message:"Failed to create repository"),n}}async function de(t){if(e(d))try{await oe.updateRepository(e(d).id,t),X.success("Repository Updated",`Repository ${e(d).owner}/${e(d).name} has been updated successfully.`),M()}catch(n){throw n}}async function ce(){if(e(y))try{r(g,""),await oe.deleteRepository(e(y).id),X.success("Repository Deleted",`Repository ${e(y).owner}/${e(y).name} has been deleted successfully.`),M()}catch(t){r(g,t instanceof Error?t.message:"Failed to delete repository")}}const ue=[{key:"repository",title:"Repository",cellComponent:it,cellProps:{entityType:"repository",showOwner:!0}},{key:"endpoint",title:"Endpoint",cellComponent:lt},{key:"credentials",title:"Credentials",cellComponent:st,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:dt,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:nt}],pe={entityType:"repository",primaryText:{field:"name",isClickable:!0,href:"/repositories/{id}",showOwner:!0},customInfo:[{icon:t=>et(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>Ze(t)}],actions:[{type:"edit",handler:t=>s(t)},{type:"delete",handler:t=>A(t)}]};function v(t){r(f,t.detail.term),r(_,1)}function h(t){r(_,t.detail.page)}function I(t){const n=tt(t.detail.perPage);r(P,n.newPerPage),r(_,n.newCurrentPage)}function W(t){s(t.detail.item)}function ee(t){A(t.detail.item)}w(()=>(e(x),p()),()=>{(!e(x).length||p().loaded.repositories)&&r(x,p().repositories)}),w(()=>p(),()=>{r(C,p().loading.repositories)}),w(()=>p(),()=>{r(R,p().errorMessages.repositories)}),w(()=>(e(x),e(f)),()=>{r(k,rt(e(x),e(f)))}),w(()=>(e(E),e(k),e(P),e(_)),()=>{r(E,Math.ceil(e(k).length/e(P))),e(_)>e(E)&&e(E)>0&&r(_,e(E))}),w(()=>(e(k),e(_),e(P)),()=>{r(G,ot(e(k),e(_),e(P)))}),Fe(),Ee();var L=vt();Ne(t=>{Be.title="Repositories - GARM"});var N=ze(L),z=c(N);Xe(z,{title:"Repositories",description:"Manage your GitHub repositories and their runners",actionLabel:"Add Repository",$$events:{action:()=>{r(b,!0)}}});var B=l(z,2);{let t=Re(()=>e(R)||e(g)),n=Re(()=>!!e(R));at(B,{get columns(){return ue},get data(){return e(G)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(f)},searchPlaceholder:"Search repositories by name or owner...",get currentPage(){return e(_)},get perPage(){return e(P)},get totalPages(){return e(E)},get totalItems(){return e(k),j(()=>e(k).length)},itemName:"repositories",emptyIconType:"building",get showRetry(){return e(n)},get mobileCardConfig(){return pe},$$events:{search:v,pageChange:h,perPageChange:I,retry:le,edit:W,delete:ee}})}u(N);var O=l(N,2);{var me=t=>{yt(t,{$$events:{close:()=>r(b,!1),submit:Z}})};J(O,t=>{e(b)&&t(me)})}var H=l(O,2);{var V=t=>{Ke(t,{get entity(){return e(d)},entityType:"repository",$$events:{close:M,submit:n=>de(n.detail)}})};J(H,t=>{e(o)&&e(d)&&t(V)})}var Y=l(H,2);{var te=t=>{Qe(t,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and runners.",get itemName(){return`${e(y),j(()=>e(y).owner)??""}/${e(y),j(()=>e(y).name)??""}`},$$events:{close:M,confirm:ce}})};J(Y,t=>{e(D)&&e(y)&&t(te)})}F(ae,L),Se(),ie()}export{Gt as component}; diff --git a/webapp/assets/_app/immutable/nodes/15.CqYhwqAI.js b/webapp/assets/_app/immutable/nodes/15.CqYhwqAI.js new file mode 100644 index 00000000..82a2bac2 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/15.CqYhwqAI.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Me}from"../chunks/B3Pzt0F_.js";import{p as Te,E as Ne,o as De,l as w,s as r,m as a,g as e,y as T,a as Se,f as S,k as l,j as c,r as u,c as D,t as Q,v as fe,x as $e,u as j,z as Re,e as ye,d as Fe,h as ze,b as Be,$ as Oe,q as Ce}from"../chunks/D8EpLgQ1.js";import{a as Ae,i as V,s as Ie}from"../chunks/5WA7h8uK.js";import{r as X,b as Pe,g as oe}from"../chunks/CiE1LlKV.js";import{e as He,i as Je}from"../chunks/u94nIB4-.js";import{b as ve,a as Ee}from"../chunks/C6k1Q4We.js";import{p as Ve}from"../chunks/D4Caz1gY.js";import{M as Ye}from"../chunks/qB7B8uiS.js";import{e as ae}from"../chunks/BZiHL9L3.js";import{F as Ke}from"../chunks/CNMHKIIK.js";import{e as We,a as he}from"../chunks/wyaP0EDu.js";import{U as Qe}from"../chunks/CIBm3n2u.js";import{D as Xe}from"../chunks/KQ2xQpA3.js";import{P as Ze}from"../chunks/CO4LUyTP.js";import{t as J}from"../chunks/BEkVdVE1.js";import{k as et,g as tt,c as rt,m as ot,p as at}from"../chunks/BGVHQGl-.js";import{D as st,G as nt,a as it}from"../chunks/BrNfsPe8.js";import{E as lt}from"../chunks/D4PaGKsV.js";import{E as dt}from"../chunks/CGpPw4EW.js";import{S as ct}from"../chunks/MCv1Wq2q.js";import"../chunks/CRhkqW2i.js";var ut=S('

                '),pt=S('

                Loading...

                '),mt=S(""),gt=S(''),bt=S('

                Webhook secret will be automatically generated

                '),ft=S('
                '),yt=S('

                Create Repository

                ');function vt(se,ne){Te(ne,!1);const[ie,le]=Ie(),p=()=>Ae(We,"$eagerCache",ie),R=a(),k=a(),G=a(),x=a(),C=Ne();let y=a(!1),g=a(""),b=a("github"),o=a({name:"",owner:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"}),F=a(!0),d=a(!0);async function f(){if(!p().loaded.credentials&&!p().loading.credentials)try{await he.getCredentials()}catch(i){r(g,ae(i))}}function _(i){r(b,i.detail),T(o,e(o).credentials_name="")}function P(){if(e(o).credentials_name){const i=e(R).find(A=>A.name===e(o).credentials_name);i&&i.forge_type&&r(b,i.forge_type)}}function E(){const i=new Uint8Array(32);return crypto.getRandomValues(i),Array.from(i,A=>A.toString(16).padStart(2,"0")).join("")}De(()=>{f()});async function de(){if(!e(o).name?.trim()){r(g,"Repository name is required");return}if(!e(o).owner?.trim()){r(g,"Repository owner is required");return}if(!e(o).credentials_name){r(g,"Please select credentials");return}try{r(y,!0),r(g,"");const i={...e(o),install_webhook:e(F),auto_generate_secret:e(d)};C("submit",i)}catch(i){r(g,ae(i)),r(y,!1)}}w(()=>p(),()=>{r(R,p().credentials)}),w(()=>p(),()=>{r(k,p().loading.credentials)}),w(()=>(e(R),e(b)),()=>{r(G,e(R).filter(i=>e(b)?i.forge_type===e(b):!0))}),w(()=>e(d),()=>{e(d)?T(o,e(o).webhook_secret=E()):e(d)||T(o,e(o).webhook_secret="")}),w(()=>(e(o),e(d)),()=>{r(x,e(o).name?.trim()!==""&&e(o).owner?.trim()!==""&&e(o).credentials_name!==""&&(e(d)||e(o).webhook_secret?.trim()!==""))}),Se(),Me(),Ye(se,{$$events:{close:()=>C("close")},children:(i,A)=>{var M=yt(),Z=l(c(M),2);{var ce=v=>{var h=ut(),I=c(h),W=c(I,!0);u(I),u(h),Q(()=>fe(W,e(g))),D(v,h)};V(Z,v=>{e(g)&&v(ce)})}var ue=l(Z,2);{var pe=v=>{var h=pt();D(v,h)},me=v=>{var h=ft(),I=c(h);Ke(I,{get selectedForgeType(){return e(b)},set selectedForgeType(n){r(b,n)},$$events:{select:_},$$legacy:!0});var W=l(I,2),ee=l(c(W),2);X(ee),u(W);var L=l(W,2),N=l(c(L),2);X(N),u(L);var z=l(L,2),B=l(c(z),2);Q(()=>{e(o),$e(()=>{e(G)})});var O=c(B);O.value=O.__value="";var ge=l(O);He(ge,1,()=>e(G),Je,(n,m)=>{var U=mt(),Le=c(U);u(U);var xe={};Q(()=>{fe(Le,`${e(m),j(()=>e(m).name)??""} (${e(m),j(()=>e(m).endpoint?.name)??""})`),xe!==(xe=(e(m),j(()=>e(m).name)))&&(U.value=(U.__value=(e(m),j(()=>e(m).name)))??"")}),D(n,U)}),u(B),u(z);var H=l(z,2),Y=l(c(H),2);Q(()=>{e(o),$e(()=>{})});var K=c(Y);K.value=K.__value="roundrobin";var te=l(K);te.value=te.__value="pack",u(Y),u(H);var t=l(H,2),s=c(t),re=c(s);X(re),Re(2),u(s);var $=l(s,2),q=c($),_e=c(q);X(_e),Re(2),u(q);var qe=l(q,2);{var Ue=n=>{var m=gt();X(m),ve(m,()=>e(o).webhook_secret,U=>T(o,e(o).webhook_secret=U)),D(n,m)},je=n=>{var m=bt();D(n,m)};V(qe,n=>{e(d)?n(je,!1):n(Ue)})}u($),u(t);var we=l(t,2),ke=c(we),be=l(ke,2),Ge=c(be,!0);u(be),u(we),u(h),Q(()=>{be.disabled=e(y)||e(k)||!e(x),fe(Ge,e(y)?"Creating...":"Create Repository")}),ve(ee,()=>e(o).name,n=>T(o,e(o).name=n)),ve(N,()=>e(o).owner,n=>T(o,e(o).owner=n)),Pe(B,()=>e(o).credentials_name,n=>T(o,e(o).credentials_name=n)),ye("change",B,P),Pe(Y,()=>e(o).pool_balancer_type,n=>T(o,e(o).pool_balancer_type=n)),Ee(re,()=>e(F),n=>r(F,n)),Ee(_e,()=>e(d),n=>r(d,n)),ye("click",ke,()=>C("close")),ye("submit",h,Ve(de)),D(v,h)};V(ue,v=>{e(y)?v(pe):v(me,!1)})}u(M),D(i,M)},$$slots:{default:!0}}),Fe(),le()}var ht=S('
                ',1);function Nt(se,ne){Te(ne,!1);const[ie,le]=Ie(),p=()=>Ae(We,"$eagerCache",ie),R=a(),k=a(),G=a();let x=a([]),C=a(!0),y=a(""),g=a(""),b=a(!1),o=a(!1),F=a(!1),d=a(null),f=a(null),_=a(1),P=a(25),E=a(1);De(async()=>{try{r(C,!0);const t=await he.getRepositories();t&&Array.isArray(t)&&r(x,t)}catch(t){console.error("Failed to load repositories:",t),r(y,t instanceof Error?t.message:"Failed to load repositories")}finally{r(C,!1)}});async function de(){try{await he.retryResource("repositories")}catch(t){console.error("Retry failed:",t)}}function i(t){r(d,t),r(o,!0)}function A(t){r(f,t),r(F,!0)}function M(){r(b,!1),r(o,!1),r(F,!1),r(d,null),r(f,null),r(y,"")}async function Z(t){try{r(y,"");const s=t.detail,re={name:s.name,owner:s.owner,credentials_name:s.credentials_name,webhook_secret:s.webhook_secret},$=await oe.createRepository(re);if(s.install_webhook&&$.id)try{await oe.installRepoWebhook($.id),J.success("Webhook Installed",`Webhook for repository ${$.owner}/${$.name} has been installed successfully.`)}catch(q){console.warn("Repository created but webhook installation failed:",q),J.error("Webhook Installation Failed",q instanceof Error?q.message:"Failed to install webhook. You can try installing it manually from the repository details page.")}r(b,!1),J.success("Repository Created",`Repository ${$.owner}/${$.name} has been created successfully.`)}catch(s){throw r(y,ae(s)),s}}async function ce(t){if(e(d))try{await oe.updateRepository(e(d).id,t),J.success("Repository Updated",`Repository ${e(d).owner}/${e(d).name} has been updated successfully.`),M()}catch(s){throw s}}async function ue(){if(e(f))try{r(y,""),await oe.deleteRepository(e(f).id),J.success("Repository Deleted",`Repository ${e(f).owner}/${e(f).name} has been deleted successfully.`)}catch(t){const s=ae(t);J.error("Delete Failed",s)}finally{M()}}const pe=[{key:"repository",title:"Repository",cellComponent:lt,cellProps:{entityType:"repository",showOwner:!0}},{key:"endpoint",title:"Endpoint",cellComponent:dt},{key:"credentials",title:"Credentials",cellComponent:nt,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:ct,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:it}],me={entityType:"repository",primaryText:{field:"name",isClickable:!0,href:"/repositories/{id}",showOwner:!0},customInfo:[{icon:t=>tt(t?.endpoint?.endpoint_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"custom",value:t=>et(t)}],actions:[{type:"edit",handler:t=>i(t)},{type:"delete",handler:t=>A(t)}]};function v(t){r(g,t.detail.term),r(_,1)}function h(t){r(_,t.detail.page)}function I(t){const s=rt(t.detail.perPage);r(P,s.newPerPage),r(_,s.newCurrentPage)}function W(t){i(t.detail.item)}function ee(t){A(t.detail.item)}w(()=>(e(x),p()),()=>{(!e(x).length||p().loaded.repositories)&&r(x,p().repositories)}),w(()=>p(),()=>{r(C,p().loading.repositories)}),w(()=>p(),()=>{r(R,p().errorMessages.repositories)}),w(()=>(e(x),e(g)),()=>{r(k,ot(e(x),e(g)))}),w(()=>(e(E),e(k),e(P),e(_)),()=>{r(E,Math.ceil(e(k).length/e(P))),e(_)>e(E)&&e(E)>0&&r(_,e(E))}),w(()=>(e(k),e(_),e(P)),()=>{r(G,at(e(k),e(_),e(P)))}),Se(),Me();var L=ht();ze(t=>{Oe.title="Repositories - GARM"});var N=Be(L),z=c(N);Ze(z,{title:"Repositories",description:"Manage your GitHub repositories and their runners",actionLabel:"Add Repository",$$events:{action:()=>{r(b,!0)}}});var B=l(z,2);{let t=Ce(()=>e(R)||e(y)),s=Ce(()=>!!e(R));st(B,{get columns(){return pe},get data(){return e(G)},get loading(){return e(C)},get error(){return e(t)},get searchTerm(){return e(g)},searchPlaceholder:"Search repositories by name or owner...",get currentPage(){return e(_)},get perPage(){return e(P)},get totalPages(){return e(E)},get totalItems(){return e(k),j(()=>e(k).length)},itemName:"repositories",emptyIconType:"building",get showRetry(){return e(s)},get mobileCardConfig(){return me},$$events:{search:v,pageChange:h,perPageChange:I,retry:de,edit:W,delete:ee}})}u(N);var O=l(N,2);{var ge=t=>{vt(t,{$$events:{close:()=>r(b,!1),submit:Z}})};V(O,t=>{e(b)&&t(ge)})}var H=l(O,2);{var Y=t=>{Qe(t,{get entity(){return e(d)},entityType:"repository",$$events:{close:M,submit:s=>ce(s.detail)}})};V(H,t=>{e(o)&&e(d)&&t(Y)})}var K=l(H,2);{var te=t=>{Xe(t,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and runners.",get itemName(){return`${e(f),j(()=>e(f).owner)??""}/${e(f),j(()=>e(f).name)??""}`},$$events:{close:M,confirm:ue}})};V(K,t=>{e(F)&&e(f)&&t(te)})}D(se,L),Fe(),le()}export{Nt as component}; diff --git a/webapp/assets/_app/immutable/nodes/16.B35VVkOd.js b/webapp/assets/_app/immutable/nodes/16.B35VVkOd.js deleted file mode 100644 index 5ee0941c..00000000 --- a/webapp/assets/_app/immutable/nodes/16.B35VVkOd.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as We}from"../chunks/B3Pzt0F_.js";import{p as qe,o as ze,A as He,l as je,a as Ge,f as A,h as Oe,b as M,t as z,c as w,d as Ve,g as e,m as l,s as r,u as s,$ as Je,j as u,r as f,k as d,v as le,y as Ke,B as de,q as m,n as Qe}from"../chunks/D8EpLgQ1.js";import{i as g,s as Xe,a as Ye}from"../chunks/5WA7h8uK.js";import{c as Ze,g as h}from"../chunks/CiE1LlKV.js";import{p as et}from"../chunks/C41YH50Q.js";import{g as ce}from"../chunks/CTf6mQoE.js";import{b as H}from"../chunks/CoIRRsD9.js";import{U as tt}from"../chunks/CclkODgu.js";import{D as pe}from"../chunks/KQ2xQpA3.js";import{E as ot,P as at,a as rt}from"../chunks/BmGWMSQm.js";import{D as st,I as nt}from"../chunks/DDhBTdDt.js";import{g as ue}from"../chunks/BGVHQGl-.js";import{W as it}from"../chunks/Dbd6PPbz.js";import{C as lt}from"../chunks/CwqI2jFH.js";import{w as j}from"../chunks/u94nIB4-.js";import{t as C}from"../chunks/BEkVdVE1.js";var dt=A('

                Loading repository...

                '),ct=A('

                '),pt=A(" ",1),ut=A(' ',1);function kt(fe,me){qe(me,!1);const[ye,ve]=Xe(),G=()=>Ye(et,"$page",ye),x=l();let t=l(null),c=l([]),y=l([]),F=l(!0),I=l(""),R=l(!1),D=l(!1),E=l(!1),T=l(!1),p=l(null),P=null,_=l();async function O(){if(e(x))try{r(F,!0),r(I,"");const[o,a,n]=await Promise.all([h.getRepository(e(x)),h.listRepositoryPools(e(x)).catch(()=>[]),h.listRepositoryInstances(e(x)).catch(()=>[])]);r(t,o),r(c,a),r(y,n)}catch(o){r(I,o instanceof Error?o.message:"Failed to load repository")}finally{r(F,!1)}}function ge(o,a){const{events:n}=o;return{...a,events:n}}async function he(o){if(e(t))try{await h.updateRepository(e(t).id,o),await O(),C.success("Repository Updated",`Repository ${e(t).owner}/${e(t).name} has been updated successfully.`),r(R,!1)}catch(a){throw a}}async function _e(){if(e(t)){try{await h.deleteRepository(e(t).id),ce(`${H}/repositories`)}catch(o){r(I,o instanceof Error?o.message:"Failed to delete repository")}r(D,!1)}}async function $e(){if(e(p))try{await h.deleteInstance(e(p).name),C.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),r(E,!1),r(p,null)}catch(o){const a=o instanceof Error?o.message:"Failed to delete instance";C.error("Delete Failed",a),r(E,!1),r(p,null)}}function be(o){r(p,o),r(E,!0)}function we(){r(T,!0)}async function xe(o){try{if(!e(t))return;await h.createRepositoryPool(e(t).id,o.detail),C.success("Pool Created",`Pool has been created successfully for repository ${e(t).owner}/${e(t).name}.`),r(T,!1)}catch(a){throw a}}function V(){e(_)&&Ke(_,e(_).scrollTop=e(_).scrollHeight)}function Ie(o){if(o.operation==="update"){const a=o.payload;if(e(t)&&a.id===e(t).id){const n=e(t).events?.length||0,i=a.events?.length||0;r(t,ge(e(t),a)),i>n&&setTimeout(()=>{V()},100)}}else if(o.operation==="delete"){const a=o.payload.id||o.payload;e(t)&&e(t).id===a&&ce(`${H}/repositories`)}}function Ee(o){if(!e(t))return;const a=o.payload;if(a.repo_id===e(t).id){if(o.operation==="create")r(c,[...e(c),a]);else if(o.operation==="update")r(c,e(c).map(n=>n.id===a.id?a:n));else if(o.operation==="delete"){const n=a.id||a;r(c,e(c).filter(i=>i.id!==n))}}}function Re(o){if(!e(t)||!e(c))return;const a=o.payload;if(e(c).some(i=>i.id===a.pool_id)){if(o.operation==="create")r(y,[...e(y),a]);else if(o.operation==="update")r(y,e(y).map(i=>i.id===a.id?a:i));else if(o.operation==="delete"){const i=a.id||a;r(y,e(y).filter(L=>L.id!==i))}}}ze(()=>{O().then(()=>{e(t)?.events?.length&&setTimeout(()=>{V()},100)});const o=j.subscribeToEntity("repository",["update","delete"],Ie),a=j.subscribeToEntity("pool",["create","update","delete"],Ee),n=j.subscribeToEntity("instance",["create","update","delete"],Re);P=()=>{o(),a(),n()}}),He(()=>{P&&(P(),P=null)}),je(()=>G(),()=>{r(x,G().params.id)}),Ge(),We();var J=ut();Oe(o=>{z(()=>Je.title=`${e(t),s(()=>e(t)?`${e(t).name} - Repository Details`:"Repository Details")??""} - GARM`)});var S=M(J),B=u(S),K=u(B),U=u(K),De=u(U);f(U);var Q=d(U,2),X=u(Q),Y=d(u(X),2),Te=u(Y,!0);f(Y),f(X),f(Q),f(K),f(B);var Pe=d(B,2);{var ke=o=>{var a=dt();w(o,a)},Me=o=>{var a=de(),n=M(a);{var i=$=>{var b=ct(),k=u(b),N=u(k,!0);f(k),f(b),z(()=>le(N,e(I))),w($,b)},L=$=>{var b=de(),k=M(b);{var N=W=>{var oe=pt(),ae=M(oe);{let v=m(()=>(e(t),s(()=>e(t).name||"Repository"))),q=m(()=>(e(t),s(()=>e(t).owner))),Le=m(()=>(e(t),s(()=>e(t).endpoint?.name))),Ne=m(()=>(Qe(ue),e(t),s(()=>ue(e(t).endpoint?.endpoint_type||"unknown"))));st(ae,{get title(){return e(v)},get subtitle(){return`Owner: ${e(q)??""} • Endpoint: ${e(Le)??""}`},get forgeIcon(){return e(Ne)},onEdit:()=>r(R,!0),onDelete:()=>r(D,!0)})}var re=d(ae,2);ot(re,{get entity(){return e(t)},entityType:"repository"});var se=d(re,2);{let v=m(()=>(e(t),s(()=>e(t).id||"")));it(se,{entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),s(()=>e(t).owner)??""}/${e(t),s(()=>e(t).name)??""}`}})}var ne=d(se,2);{let v=m(()=>(e(t),s(()=>e(t).id||"")));at(ne,{get pools(){return e(c)},entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),s(()=>e(t).owner)??""}/${e(t),s(()=>e(t).name)??""}`},$$events:{addPool:we}})}var ie=d(ne,2);nt(ie,{get instances(){return e(y)},entityType:"repository",onDeleteInstance:be});var Ue=d(ie,2);{let v=m(()=>(e(t),s(()=>e(t)?.events)));rt(Ue,{get events(){return e(v)},get eventsContainer(){return e(_)},set eventsContainer(q){r(_,q)},$$legacy:!0})}w(W,oe)};g(k,W=>{e(t)&&W(N)},!0)}w($,b)};g(n,$=>{e(I)?$(i):$(L,!1)},!0)}w(o,a)};g(Pe,o=>{e(F)?o(ke):o(Me,!1)})}f(S);var Z=d(S,2);{var Ce=o=>{tt(o,{get entity(){return e(t)},entityType:"repository",$$events:{close:()=>r(R,!1),submit:a=>he(a.detail)}})};g(Z,o=>{e(R)&&e(t)&&o(Ce)})}var ee=d(Z,2);{var Ae=o=>{{let a=m(()=>(e(t),s(()=>`${e(t).owner}/${e(t).name}`)));pe(o,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a)},$$events:{close:()=>r(D,!1),confirm:_e}})}};g(ee,o=>{e(D)&&e(t)&&o(Ae)})}var te=d(ee,2);{var Fe=o=>{pe(o,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),s(()=>e(p).name)},$$events:{close:()=>{r(E,!1),r(p,null)},confirm:$e}})};g(te,o=>{e(E)&&e(p)&&o(Fe)})}var Se=d(te,2);{var Be=o=>{{let a=m(()=>(e(t),s(()=>e(t).id||"")));lt(o,{initialEntityType:"repository",get initialEntityId(){return e(a)},$$events:{close:()=>r(T,!1),submit:xe}})}};g(Se,o=>{e(T)&&e(t)&&o(Be)})}z(()=>{Ze(De,"href",`${H}/repositories`),le(Te,(e(t),s(()=>e(t)?e(t).name:"Loading...")))}),w(fe,J),Ve(),ve()}export{kt as component}; diff --git a/webapp/assets/_app/immutable/nodes/16.BVViOnXd.js b/webapp/assets/_app/immutable/nodes/16.BVViOnXd.js new file mode 100644 index 00000000..b2928fba --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/16.BVViOnXd.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as qe}from"../chunks/B3Pzt0F_.js";import{p as ze,o as He,A as je,l as Ge,a as Oe,f as A,h as Ve,b as C,t as z,c as w,d as Je,g as e,m as l,s as r,u as s,$ as Ke,j as u,r as f,k as d,v as de,y as Qe,B as ce,q as m,n as Xe}from"../chunks/D8EpLgQ1.js";import{i as g,s as Ye,a as Ze}from"../chunks/5WA7h8uK.js";import{c as et,g as h}from"../chunks/CiE1LlKV.js";import{p as tt}from"../chunks/BE8f1Riw.js";import{g as pe}from"../chunks/DXCC0cSN.js";import{b as H}from"../chunks/CRhkqW2i.js";import{U as ot}from"../chunks/CIBm3n2u.js";import{D as ue}from"../chunks/KQ2xQpA3.js";import{E as at,P as rt,a as st}from"../chunks/OpktHEmj.js";import{D as nt,I as it}from"../chunks/BEoJgOul.js";import{g as fe}from"../chunks/BGVHQGl-.js";import{e as j}from"../chunks/BZiHL9L3.js";import{W as lt}from"../chunks/BsqC4UA1.js";import{C as dt}from"../chunks/CRD55Dyg.js";import{w as G}from"../chunks/u94nIB4-.js";import{t as R}from"../chunks/BEkVdVE1.js";var ct=A('

                Loading repository...

                '),pt=A('

                '),ut=A(" ",1),ft=A(' ',1);function Ct(me,ye){ze(ye,!1);const[ve,ge]=Ye(),O=()=>Ze(tt,"$page",ve),x=l();let t=l(null),c=l([]),y=l([]),S=l(!0),E=l(""),D=l(!1),T=l(!1),I=l(!1),P=l(!1),p=l(null),M=null,_=l();async function V(){if(e(x))try{r(S,!0),r(E,"");const[o,a,n]=await Promise.all([h.getRepository(e(x)),h.listRepositoryPools(e(x)).catch(()=>[]),h.listRepositoryInstances(e(x)).catch(()=>[])]);r(t,o),r(c,a),r(y,n)}catch(o){r(E,j(o))}finally{r(S,!1)}}function he(o,a){const{events:n}=o;return{...a,events:n}}async function _e(o){if(e(t))try{await h.updateRepository(e(t).id,o),await V(),R.success("Repository Updated",`Repository ${e(t).owner}/${e(t).name} has been updated successfully.`),r(D,!1)}catch(a){throw a}}async function $e(){if(e(t)){try{await h.deleteRepository(e(t).id),pe(`${H}/repositories`)}catch(o){const a=j(o);R.error("Delete Failed",a)}r(T,!1)}}async function be(){if(e(p))try{await h.deleteInstance(e(p).name),R.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),r(I,!1),r(p,null)}catch(o){const a=j(o);R.error("Delete Failed",a),r(I,!1),r(p,null)}}function we(o){r(p,o),r(I,!0)}function xe(){r(P,!0)}async function Ie(o){try{if(!e(t))return;await h.createRepositoryPool(e(t).id,o.detail),R.success("Pool Created",`Pool has been created successfully for repository ${e(t).owner}/${e(t).name}.`),r(P,!1)}catch(a){throw a}}function J(){e(_)&&Qe(_,e(_).scrollTop=e(_).scrollHeight)}function Re(o){if(o.operation==="update"){const a=o.payload;if(e(t)&&a.id===e(t).id){const n=e(t).events?.length||0,i=a.events?.length||0;r(t,he(e(t),a)),i>n&&setTimeout(()=>{J()},100)}}else if(o.operation==="delete"){const a=o.payload.id||o.payload;e(t)&&e(t).id===a&&pe(`${H}/repositories`)}}function Ee(o){if(!e(t))return;const a=o.payload;if(a.repo_id===e(t).id){if(o.operation==="create")r(c,[...e(c),a]);else if(o.operation==="update")r(c,e(c).map(n=>n.id===a.id?a:n));else if(o.operation==="delete"){const n=a.id||a;r(c,e(c).filter(i=>i.id!==n))}}}function De(o){if(!e(t)||!e(c))return;const a=o.payload;if(e(c).some(i=>i.id===a.pool_id)){if(o.operation==="create")r(y,[...e(y),a]);else if(o.operation==="update")r(y,e(y).map(i=>i.id===a.id?a:i));else if(o.operation==="delete"){const i=a.id||a;r(y,e(y).filter(L=>L.id!==i))}}}He(()=>{V().then(()=>{e(t)?.events?.length&&setTimeout(()=>{J()},100)});const o=G.subscribeToEntity("repository",["update","delete"],Re),a=G.subscribeToEntity("pool",["create","update","delete"],Ee),n=G.subscribeToEntity("instance",["create","update","delete"],De);M=()=>{o(),a(),n()}}),je(()=>{M&&(M(),M=null)}),Ge(()=>O(),()=>{r(x,O().params.id)}),Oe(),qe();var K=ft();Ve(o=>{z(()=>Ke.title=`${e(t),s(()=>e(t)?`${e(t).name} - Repository Details`:"Repository Details")??""} - GARM`)});var B=C(K),U=u(B),Q=u(U),F=u(Q),Te=u(F);f(F);var X=d(F,2),Y=u(X),Z=d(u(Y),2),Pe=u(Z,!0);f(Z),f(Y),f(X),f(Q),f(U);var Me=d(U,2);{var ke=o=>{var a=ct();w(o,a)},Ce=o=>{var a=ce(),n=C(a);{var i=$=>{var b=pt(),k=u(b),N=u(k,!0);f(k),f(b),z(()=>de(N,e(E))),w($,b)},L=$=>{var b=ce(),k=C(b);{var N=W=>{var ae=ut(),re=C(ae);{let v=m(()=>(e(t),s(()=>e(t).name||"Repository"))),q=m(()=>(e(t),s(()=>e(t).owner))),Ne=m(()=>(e(t),s(()=>e(t).endpoint?.name))),We=m(()=>(Xe(fe),e(t),s(()=>fe(e(t).endpoint?.endpoint_type||"unknown"))));nt(re,{get title(){return e(v)},get subtitle(){return`Owner: ${e(q)??""} • Endpoint: ${e(Ne)??""}`},get forgeIcon(){return e(We)},onEdit:()=>r(D,!0),onDelete:()=>r(T,!0)})}var se=d(re,2);at(se,{get entity(){return e(t)},entityType:"repository"});var ne=d(se,2);{let v=m(()=>(e(t),s(()=>e(t).id||"")));lt(ne,{entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),s(()=>e(t).owner)??""}/${e(t),s(()=>e(t).name)??""}`}})}var ie=d(ne,2);{let v=m(()=>(e(t),s(()=>e(t).id||"")));rt(ie,{get pools(){return e(c)},entityType:"repository",get entityId(){return e(v)},get entityName(){return`${e(t),s(()=>e(t).owner)??""}/${e(t),s(()=>e(t).name)??""}`},$$events:{addPool:xe}})}var le=d(ie,2);it(le,{get instances(){return e(y)},entityType:"repository",onDeleteInstance:we});var Le=d(le,2);{let v=m(()=>(e(t),s(()=>e(t)?.events)));st(Le,{get events(){return e(v)},get eventsContainer(){return e(_)},set eventsContainer(q){r(_,q)},$$legacy:!0})}w(W,ae)};g(k,W=>{e(t)&&W(N)},!0)}w($,b)};g(n,$=>{e(E)?$(i):$(L,!1)},!0)}w(o,a)};g(Me,o=>{e(S)?o(ke):o(Ce,!1)})}f(B);var ee=d(B,2);{var Ae=o=>{ot(o,{get entity(){return e(t)},entityType:"repository",$$events:{close:()=>r(D,!1),submit:a=>_e(a.detail)}})};g(ee,o=>{e(D)&&e(t)&&o(Ae)})}var te=d(ee,2);{var Se=o=>{{let a=m(()=>(e(t),s(()=>`${e(t).owner}/${e(t).name}`)));ue(o,{title:"Delete Repository",message:"Are you sure you want to delete this repository? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a)},$$events:{close:()=>r(T,!1),confirm:$e}})}};g(te,o=>{e(T)&&e(t)&&o(Se)})}var oe=d(te,2);{var Be=o=>{ue(o,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),s(()=>e(p).name)},$$events:{close:()=>{r(I,!1),r(p,null)},confirm:be}})};g(oe,o=>{e(I)&&e(p)&&o(Be)})}var Ue=d(oe,2);{var Fe=o=>{{let a=m(()=>(e(t),s(()=>e(t).id||"")));dt(o,{initialEntityType:"repository",get initialEntityId(){return e(a)},$$events:{close:()=>r(P,!1),submit:Ie}})}};g(Ue,o=>{e(P)&&e(t)&&o(Fe)})}z(()=>{et(Te,"href",`${H}/repositories`),de(Pe,(e(t),s(()=>e(t)?e(t).name:"Loading...")))}),w(me,K),Je(),ge()}export{Ct as component}; diff --git a/webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js b/webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js deleted file mode 100644 index 731517fe..00000000 --- a/webapp/assets/_app/immutable/nodes/17.CCltcs-Z.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as yr}from"../chunks/B3Pzt0F_.js";import{p as hr,E as Gr,o as xr,f as w,k as a,j as o,g as e,m as i,r as s,t as k,s as r,e as de,c as f,v as X,b as kr,z as br,x as ye,D as qe,d as _r,l as U,a as Hr,h as jr,$ as Fr,q as Oe,u as mr,n as Nr}from"../chunks/D8EpLgQ1.js";import{i as D,s as Ur,a as qr}from"../chunks/5WA7h8uK.js";import{r as T,s as Le,b as he,g as R}from"../chunks/CiE1LlKV.js";import"../chunks/CoIRRsD9.js";import{P as Or}from"../chunks/CO4LUyTP.js";import{e as pr,i as vr}from"../chunks/u94nIB4-.js";import{b as H,a as Lr}from"../chunks/C6k1Q4We.js";import{p as Br}from"../chunks/D4Caz1gY.js";import{M as Jr}from"../chunks/qB7B8uiS.js";import{J as Vr}from"../chunks/DZblzgqm.js";import{U as Wr}from"../chunks/C89fcOde.js";import{D as Kr}from"../chunks/KQ2xQpA3.js";import{e as Qr,a as fr}from"../chunks/wyaP0EDu.js";import{t as Be}from"../chunks/BEkVdVE1.js";import{e as ne,h as Xr}from"../chunks/BGVHQGl-.js";import{D as Yr,G as Je,a as Zr}from"../chunks/C9DJVOi1.js";import{E as et}from"../chunks/B7ITzBt8.js";import{E as rt}from"../chunks/CGpPw4EW.js";import{S as tt}from"../chunks/BE4wujub.js";import{P as at}from"../chunks/CLYUNKnN.js";var ot=w('

                '),st=w('
                '),lt=w(""),it=w(''),dt=w('
                '),nt=w(""),ct=w(''),ut=w('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),gt=w('
                Creating...
                '),bt=w('

                Create New Scale Set

                Scale sets are only available for GitHub endpoints

                Entity Level *
                ');function mt(xe,ke){hr(ke,!1);const Y=Gr();let q=i(!1),m=i(""),n=i(""),v=i([]),S=i([]),O=i(!1),C=i(!1),$=i(""),p=i(""),_=i(""),u=i(""),y=i(""),A=i(void 0),z=i(void 0),I=i(void 0),d=i("garm"),L=i("linux"),B=i("amd64"),Z=i(""),ee=i(!0),G=i("{}");async function ce(){try{r(C,!0),r(S,await R.listProviders())}catch(c){r(m,c instanceof Error?c.message:"Failed to load providers")}finally{r(C,!1)}}async function _e(){if(e(n))try{switch(r(O,!0),r(v,[]),e(n)){case"repository":r(v,await R.listRepositories());break;case"organization":r(v,await R.listOrganizations());break;case"enterprise":r(v,await R.listEnterprises());break}}catch(c){r(m,c instanceof Error?c.message:`Failed to load ${e(n)}s`)}finally{r(O,!1)}}function re(c){e(n)!==c&&(r(n,c),r(p,""),_e())}async function we(){if(!e($)||!e(n)||!e(p)||!e(_)||!e(u)||!e(y)){r(m,"Please fill in all required fields");return}try{r(q,!0),r(m,"");let c={};if(e(G).trim())try{c=JSON.parse(e(G))}catch{throw new Error("Invalid JSON in extra specs")}const J={name:e($),provider_name:e(_),image:e(u),flavor:e(y),max_runners:e(A)||10,min_idle_runners:e(z)||0,runner_bootstrap_timeout:e(I)||20,runner_prefix:e(d),os_type:e(L),os_arch:e(B),"github-runner-group":e(Z)||void 0,enabled:e(ee),extra_specs:e(G).trim()?c:void 0};let E;switch(e(n)){case"repository":E=await R.createRepositoryScaleSet(e(p),J);break;case"organization":E=await R.createOrganizationScaleSet(e(p),J);break;case"enterprise":E=await R.createEnterpriseScaleSet(e(p),J);break;default:throw new Error("Invalid entity level selected")}Y("submit",E)}catch(c){r(m,c instanceof Error?c.message:"Failed to create scale set")}finally{r(q,!1)}}xr(()=>{ce()}),yr(),Jr(xe,{$$events:{close:()=>Y("close")},children:(c,J)=>{var E=bt(),V=a(o(E),2),ue=o(V);{var ge=b=>{var P=ot(),K=o(P),ve=o(K,!0);s(K),s(P),k(()=>X(ve,e(m))),f(b,P)};D(ue,b=>{e(m)&&b(ge)})}var j=a(ue,2),te=a(o(j),2);T(te),s(j);var ae=a(j,2),oe=o(ae),be=a(o(oe),2),W=o(be),se=a(W,2),me=a(se,2);s(be),s(oe),s(ae);var pe=a(ae,2);{var t=b=>{var P=ut(),K=kr(P),ve=a(o(K),2),Ce=o(ve),$e=o(Ce),$r=o($e);br(),s($e);var Er=a($e,2);{var Pr=l=>{var h=st();f(l,h)},Mr=l=>{var h=it();k(()=>{e(p),ye(()=>{e(n),e(v)})});var M=o(h),Ue=o(M);s(M),M.value=M.__value="";var le=a(M);pr(le,1,()=>e(v),vr,(F,x)=>{var N=lt(),fe=o(N);{var zr=Q=>{var ie=qe();k(()=>X(ie,`${e(x).owner??""}/${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,ie)},Ir=Q=>{var ie=qe();k(()=>X(ie,`${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,ie)};D(fe,Q=>{e(n)==="repository"?Q(zr):Q(Ir,!1)})}s(N);var gr={};k(()=>{gr!==(gr=e(x).id)&&(N.value=(N.__value=e(x).id)??"")}),f(F,N)}),s(h),k(()=>X(Ue,`Select a ${e(n)??""}`)),he(h,()=>e(p),F=>r(p,F)),f(l,h)};D(Er,l=>{e(O)?l(Pr):l(Mr,!1)})}s(Ce);var We=a(Ce,2),Tr=a(o(We),2);{var Rr=l=>{var h=dt();f(l,h)},Dr=l=>{var h=ct();k(()=>{e(_),ye(()=>{e(S)})});var M=o(h);M.value=M.__value="";var Ue=a(M);pr(Ue,1,()=>e(S),vr,(le,F)=>{var x=nt(),N=o(x,!0);s(x);var fe={};k(()=>{X(N,e(F).name),fe!==(fe=e(F).name)&&(x.value=(x.__value=e(F).name)??"")}),f(le,x)}),s(h),he(h,()=>e(_),le=>r(_,le)),f(l,h)};D(Tr,l=>{e(C)?l(Rr):l(Dr,!1)})}s(We),s(ve),s(K);var Ee=a(K,2),Ke=a(o(Ee),2),Pe=o(Ke),Qe=a(o(Pe),2);T(Qe),s(Pe);var Me=a(Pe,2),Xe=a(o(Me),2);T(Xe),s(Me);var Te=a(Me,2),Re=a(o(Te),2);k(()=>{e(L),ye(()=>{})});var De=o(Re);De.value=De.__value="linux";var Ye=a(De);Ye.value=Ye.__value="windows",s(Re),s(Te);var Ze=a(Te,2),Ae=a(o(Ze),2);k(()=>{e(B),ye(()=>{})});var ze=o(Ae);ze.value=ze.__value="amd64";var er=a(ze);er.value=er.__value="arm64",s(Ae),s(Ze),s(Ke),s(Ee);var Ie=a(Ee,2),rr=a(o(Ie),2),Ge=o(rr),tr=a(o(Ge),2);T(tr),s(Ge);var He=a(Ge,2),ar=a(o(He),2);T(ar),s(He);var or=a(He,2),sr=a(o(or),2);T(sr),s(or),s(rr),s(Ie);var lr=a(Ie,2),je=a(o(lr),2),Fe=o(je),ir=a(o(Fe),2);T(ir),s(Fe);var dr=a(Fe,2),nr=a(o(dr),2);T(nr),s(dr),s(je);var Ne=a(je,2),Ar=a(o(Ne),2);Vr(Ar,{rows:4,placeholder:"{}",get value(){return e(G)},set value(l){r(G,l)},$$legacy:!0}),s(Ne);var cr=a(Ne,2),ur=o(cr);T(ur),br(2),s(cr),s(lr),k(l=>X($r,`${l??""} `),[()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)]),H(Qe,()=>e(u),l=>r(u,l)),H(Xe,()=>e(y),l=>r(y,l)),he(Re,()=>e(L),l=>r(L,l)),he(Ae,()=>e(B),l=>r(B,l)),H(tr,()=>e(z),l=>r(z,l)),H(ar,()=>e(A),l=>r(A,l)),H(sr,()=>e(I),l=>r(I,l)),H(ir,()=>e(d),l=>r(d,l)),H(nr,()=>e(Z),l=>r(Z,l)),Lr(ur,()=>e(ee),l=>r(ee,l)),f(b,P)};D(pe,b=>{e(n)&&b(t)})}var g=a(pe,2),Ve=o(g),Se=a(Ve,2),wr=o(Se);{var Sr=b=>{var P=gt();f(b,P)},Cr=b=>{var P=qe("Create Scale Set");f(b,P)};D(wr,b=>{e(q)?b(Sr):b(Cr,!1)})}s(Se),s(g),s(V),s(E),k(()=>{Le(W,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Le(se,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Le(me,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Se.disabled=e(q)||!e($)||!e(n)||!e(p)||!e(_)||!e(u)||!e(y)}),H(te,()=>e($),b=>r($,b)),de("click",W,()=>re("repository")),de("click",se,()=>re("organization")),de("click",me,()=>re("enterprise")),de("click",Ve,()=>Y("close")),de("submit",V,Br(we)),f(c,E)},$$slots:{default:!0}}),_r()}var pt=w('
                ',1);function jt(xe,ke){hr(ke,!1);const[Y,q]=Ur(),m=()=>qr(Qr,"$eagerCache",Y),n=i(),v=i(),S=i(),O=i();let C=i([]),$=i(!0),p=i(""),_=i(""),u=i(1),y=i(25),A=i(!1),z=i(!1),I=i(!1),d=i(null);async function L(t){try{r(p,""),r(A,!1),Be.success("Scale Set Created","Scale set has been created successfully.")}catch(g){throw r(p,g instanceof Error?g.message:"Failed to create scale set"),g}}async function B(t){if(e(d))try{await R.updateScaleSet(e(d).id,t),Be.success("Scale Set Updated",`Scale set ${e(d).name} has been updated successfully.`),r(z,!1),r(d,null)}catch(g){throw g}}async function Z(){if(e(d))try{await R.deleteScaleSet(e(d).id),Be.success("Scale Set Deleted",`Scale set ${e(d).name} has been deleted successfully.`),r(I,!1),r(d,null)}catch(t){r(p,t instanceof Error?t.message:"Failed to delete scale set")}}function ee(){r(A,!0)}function G(t){r(d,t),r(z,!0)}function ce(t){r(d,t),r(I,!0)}xr(async()=>{try{r($,!0);const t=await fr.getScaleSets();t&&Array.isArray(t)&&r(C,t)}catch(t){console.error("Failed to load scale sets:",t),r(p,t instanceof Error?t.message:"Failed to load scale sets")}finally{r($,!1)}});async function _e(){try{await fr.retryResource("scalesets")}catch(t){console.error("Retry failed:",t)}}const re=[{key:"name",title:"Name",cellComponent:et,cellProps:{entityType:"scaleset"}},{key:"image",title:"Image",cellComponent:Je,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:Je,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:Je,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:at},{key:"endpoint",title:"Endpoint",cellComponent:rt},{key:"status",title:"Status",cellComponent:tt,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:Zr}],we={entityType:"scaleset",primaryText:{field:"name",isClickable:!0,href:"/scalesets/{id}"},secondaryText:{field:"entity_name",computedValue:t=>ne(t)},badges:[{type:"custom",value:t=>({variant:t.enabled?"success":"error",text:t.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:t=>G(t)},{type:"delete",handler:t=>ce(t)}]};function c(t){r(_,t.detail.term),r(u,1)}function J(t){r(u,t.detail.page)}function E(t){r(y,t.detail.perPage),r(u,1)}function V(t){G(t.detail.item)}function ue(t){ce(t.detail.item)}U(()=>(e(C),m()),()=>{(!e(C).length||m().loaded.scalesets)&&r(C,m().scalesets)}),U(()=>m(),()=>{r($,m().loading.scalesets)}),U(()=>m(),()=>{r(n,m().errorMessages.scalesets)}),U(()=>(e(C),e(_),ne),()=>{r(v,Xr(e(C),e(_),t=>ne(t)))}),U(()=>(e(v),e(y)),()=>{r(S,Math.ceil(e(v).length/e(y)))}),U(()=>(e(u),e(S)),()=>{e(u)>e(S)&&e(S)>0&&r(u,e(S))}),U(()=>(e(v),e(u),e(y)),()=>{r(O,e(v).slice((e(u)-1)*e(y),e(u)*e(y)))}),Hr(),yr();var ge=pt();jr(t=>{Fr.title="Scale Sets - GARM"});var j=kr(ge),te=o(j);Or(te,{title:"Scale Sets",description:"Manage GitHub runner scale sets",actionLabel:"Add Scale Set",$$events:{action:ee}});var ae=a(te,2);{let t=Oe(()=>e(n)||e(p)),g=Oe(()=>!!e(n));Yr(ae,{get columns(){return re},get data(){return e(O)},get loading(){return e($)},get error(){return e(t)},get searchTerm(){return e(_)},searchPlaceholder:"Search by entity name...",get currentPage(){return e(u)},get perPage(){return e(y)},get totalPages(){return e(S)},get totalItems(){return e(v),mr(()=>e(v).length)},itemName:"scale sets",emptyIconType:"cog",get showRetry(){return e(g)},get mobileCardConfig(){return we},$$events:{search:c,pageChange:J,perPageChange:E,retry:_e,edit:V,delete:ue}})}s(j);var oe=a(j,2);{var be=t=>{mt(t,{$$events:{close:()=>r(A,!1),submit:g=>L(g.detail)}})};D(oe,t=>{e(A)&&t(be)})}var W=a(oe,2);{var se=t=>{Wr(t,{get scaleSet(){return e(d)},$$events:{close:()=>{r(z,!1),r(d,null)},submit:g=>B(g.detail)}})};D(W,t=>{e(z)&&e(d)&&t(se)})}var me=a(W,2);{var pe=t=>{{let g=Oe(()=>(e(d),Nr(ne),mr(()=>`Scale Set ${e(d).name} (${ne(e(d))})`)));Kr(t,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return e(g)},$$events:{close:()=>{r(I,!1),r(d,null)},confirm:Z}})}};D(me,t=>{e(I)&&e(d)&&t(pe)})}f(xe,ge),_r(),q()}export{jt as component}; diff --git a/webapp/assets/_app/immutable/nodes/17.DLt70sQQ.js b/webapp/assets/_app/immutable/nodes/17.DLt70sQQ.js new file mode 100644 index 00000000..2b2f21ca --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/17.DLt70sQQ.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as hr}from"../chunks/B3Pzt0F_.js";import{p as xr,E as Hr,o as kr,f as w,k as a,j as o,g as e,m as i,r as s,t as k,s as r,e as ne,c as f,v as X,b as _r,z as pr,x as he,D as Be,d as wr,l as q,a as jr,h as Nr,$ as Ur,q as Je,u as mr,n as qr}from"../chunks/D8EpLgQ1.js";import{i as D,s as Or,a as Lr}from"../chunks/5WA7h8uK.js";import{r as T,s as Fe,b as xe,g as R}from"../chunks/CiE1LlKV.js";import"../chunks/CRhkqW2i.js";import{P as Br}from"../chunks/CO4LUyTP.js";import{e as vr,i as fr}from"../chunks/u94nIB4-.js";import{b as H,a as Jr}from"../chunks/C6k1Q4We.js";import{p as Fr}from"../chunks/D4Caz1gY.js";import{M as Vr}from"../chunks/qB7B8uiS.js";import{J as Wr}from"../chunks/DZblzgqm.js";import{e as Y}from"../chunks/BZiHL9L3.js";import{U as Kr}from"../chunks/2p_hWkLJ.js";import{D as Qr}from"../chunks/KQ2xQpA3.js";import{e as Xr,a as yr}from"../chunks/wyaP0EDu.js";import{t as ke}from"../chunks/BEkVdVE1.js";import{e as ce,h as Yr}from"../chunks/BGVHQGl-.js";import{D as Zr,G as Ve,a as et}from"../chunks/BrNfsPe8.js";import{E as rt}from"../chunks/D4PaGKsV.js";import{E as tt}from"../chunks/CGpPw4EW.js";import{S as at}from"../chunks/MCv1Wq2q.js";import{P as ot}from"../chunks/BzlxTz7Q.js";var st=w('

                '),lt=w('
                '),it=w(""),dt=w(''),nt=w('
                '),ct=w(""),ut=w(''),gt=w('

                Entity & Provider Configuration

                Image & OS Configuration

                Runner Limits & Timing

                Advanced Settings

                Extra Specs (JSON)
                ',1),bt=w('
                Creating...
                '),pt=w('

                Create New Scale Set

                Scale sets are only available for GitHub endpoints

                Entity Level *
                ');function mt(_e,we){xr(we,!1);const Z=Hr();let O=i(!1),b=i(""),n=i(""),m=i([]),S=i([]),L=i(!1),C=i(!1),$=i(""),v=i(""),_=i(""),u=i(""),y=i(""),A=i(void 0),z=i(void 0),I=i(void 0),d=i("garm"),B=i("linux"),J=i("amd64"),ee=i(""),re=i(!0),G=i("{}");async function ue(){try{r(C,!0),r(S,await R.listProviders())}catch(p){r(b,Y(p))}finally{r(C,!1)}}async function Se(){if(e(n))try{switch(r(L,!0),r(m,[]),e(n)){case"repository":r(m,await R.listRepositories());break;case"organization":r(m,await R.listOrganizations());break;case"enterprise":r(m,await R.listEnterprises());break}}catch(p){r(b,Y(p))}finally{r(L,!1)}}function te(p){e(n)!==p&&(r(n,p),r(v,""),Se())}async function Ce(){if(!e($)||!e(n)||!e(v)||!e(_)||!e(u)||!e(y)){r(b,"Please fill in all required fields");return}try{r(O,!0),r(b,"");let p={};if(e(G).trim())try{p=JSON.parse(e(G))}catch{throw new Error("Invalid JSON in extra specs")}const F={name:e($),provider_name:e(_),image:e(u),flavor:e(y),max_runners:e(A)||10,min_idle_runners:e(z)||0,runner_bootstrap_timeout:e(I)||20,runner_prefix:e(d),os_type:e(B),os_arch:e(J),"github-runner-group":e(ee)||void 0,enabled:e(re),extra_specs:e(G).trim()?p:void 0};let P;switch(e(n)){case"repository":P=await R.createRepositoryScaleSet(e(v),F);break;case"organization":P=await R.createOrganizationScaleSet(e(v),F);break;case"enterprise":P=await R.createEnterpriseScaleSet(e(v),F);break;default:throw new Error("Invalid entity level selected")}Z("submit",P)}catch(p){r(b,Y(p))}finally{r(O,!1)}}kr(()=>{ue()}),hr(),Vr(_e,{$$events:{close:()=>Z("close")},children:(p,F)=>{var P=pt(),V=a(o(P),2),ge=o(V);{var be=g=>{var M=st(),K=o(M),fe=o(K,!0);s(K),s(M),k(()=>X(fe,e(b))),f(g,M)};D(ge,g=>{e(b)&&g(be)})}var j=a(ge,2),ae=a(o(j),2);T(ae),s(j);var oe=a(j,2),se=o(oe),pe=a(o(se),2),W=o(pe),le=a(W,2),me=a(le,2);s(pe),s(se),s(oe);var ve=a(oe,2);{var t=g=>{var M=gt(),K=_r(M),fe=a(o(K),2),Pe=o(fe),Me=o(Pe),Pr=o(Me);pr(),s(Me);var Mr=a(Me,2);{var Er=l=>{var h=lt();f(l,h)},Tr=l=>{var h=dt();k(()=>{e(v),he(()=>{e(n),e(m)})});var E=o(h),Le=o(E);s(E),E.value=E.__value="";var ie=a(E);vr(ie,1,()=>e(m),fr,(N,x)=>{var U=it(),ye=o(U);{var Ir=Q=>{var de=Be();k(()=>X(de,`${e(x).owner??""}/${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,de)},Gr=Q=>{var de=Be();k(()=>X(de,`${e(x).name??""} (${e(x).endpoint?.name||"Unknown endpoint"})`)),f(Q,de)};D(ye,Q=>{e(n)==="repository"?Q(Ir):Q(Gr,!1)})}s(U);var br={};k(()=>{br!==(br=e(x).id)&&(U.value=(U.__value=e(x).id)??"")}),f(N,U)}),s(h),k(()=>X(Le,`Select a ${e(n)??""}`)),xe(h,()=>e(v),N=>r(v,N)),f(l,h)};D(Mr,l=>{e(L)?l(Er):l(Tr,!1)})}s(Pe);var Ke=a(Pe,2),Rr=a(o(Ke),2);{var Dr=l=>{var h=nt();f(l,h)},Ar=l=>{var h=ut();k(()=>{e(_),he(()=>{e(S)})});var E=o(h);E.value=E.__value="";var Le=a(E);vr(Le,1,()=>e(S),fr,(ie,N)=>{var x=ct(),U=o(x,!0);s(x);var ye={};k(()=>{X(U,e(N).name),ye!==(ye=e(N).name)&&(x.value=(x.__value=e(N).name)??"")}),f(ie,x)}),s(h),xe(h,()=>e(_),ie=>r(_,ie)),f(l,h)};D(Rr,l=>{e(C)?l(Dr):l(Ar,!1)})}s(Ke),s(fe),s(K);var Ee=a(K,2),Qe=a(o(Ee),2),Te=o(Qe),Xe=a(o(Te),2);T(Xe),s(Te);var Re=a(Te,2),Ye=a(o(Re),2);T(Ye),s(Re);var De=a(Re,2),Ae=a(o(De),2);k(()=>{e(B),he(()=>{})});var ze=o(Ae);ze.value=ze.__value="linux";var Ze=a(ze);Ze.value=Ze.__value="windows",s(Ae),s(De);var er=a(De,2),Ie=a(o(er),2);k(()=>{e(J),he(()=>{})});var Ge=o(Ie);Ge.value=Ge.__value="amd64";var rr=a(Ge);rr.value=rr.__value="arm64",s(Ie),s(er),s(Qe),s(Ee);var He=a(Ee,2),tr=a(o(He),2),je=o(tr),ar=a(o(je),2);T(ar),s(je);var Ne=a(je,2),or=a(o(Ne),2);T(or),s(Ne);var sr=a(Ne,2),lr=a(o(sr),2);T(lr),s(sr),s(tr),s(He);var ir=a(He,2),Ue=a(o(ir),2),qe=o(Ue),dr=a(o(qe),2);T(dr),s(qe);var nr=a(qe,2),cr=a(o(nr),2);T(cr),s(nr),s(Ue);var Oe=a(Ue,2),zr=a(o(Oe),2);Wr(zr,{rows:4,placeholder:"{}",get value(){return e(G)},set value(l){r(G,l)},$$legacy:!0}),s(Oe);var ur=a(Oe,2),gr=o(ur);T(gr),pr(2),s(ur),s(ir),k(l=>X(Pr,`${l??""} `),[()=>e(n).charAt(0).toUpperCase()+e(n).slice(1)]),H(Xe,()=>e(u),l=>r(u,l)),H(Ye,()=>e(y),l=>r(y,l)),xe(Ae,()=>e(B),l=>r(B,l)),xe(Ie,()=>e(J),l=>r(J,l)),H(ar,()=>e(z),l=>r(z,l)),H(or,()=>e(A),l=>r(A,l)),H(lr,()=>e(I),l=>r(I,l)),H(dr,()=>e(d),l=>r(d,l)),H(cr,()=>e(ee),l=>r(ee,l)),Jr(gr,()=>e(re),l=>r(re,l)),f(g,M)};D(ve,g=>{e(n)&&g(t)})}var c=a(ve,2),We=o(c),$e=a(We,2),Sr=o($e);{var Cr=g=>{var M=bt();f(g,M)},$r=g=>{var M=Be("Create Scale Set");f(g,M)};D(Sr,g=>{e(O)?g(Cr):g($r,!1)})}s($e),s(c),s(V),s(P),k(()=>{Fe(W,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="repository"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Fe(le,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="organization"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),Fe(me,1,`flex flex-col items-center justify-center p-4 border-2 rounded-lg transition-colors cursor-pointer ${e(n)==="enterprise"?"border-blue-500 bg-blue-50 dark:bg-blue-900":"border-gray-300 dark:border-gray-600 hover:border-gray-400 dark:hover:border-gray-500"}`),$e.disabled=e(O)||!e($)||!e(n)||!e(v)||!e(_)||!e(u)||!e(y)}),H(ae,()=>e($),g=>r($,g)),ne("click",W,()=>te("repository")),ne("click",le,()=>te("organization")),ne("click",me,()=>te("enterprise")),ne("click",We,()=>Z("close")),ne("submit",V,Fr(Ce)),f(p,P)},$$slots:{default:!0}}),wr()}var vt=w('
                ',1);function Ut(_e,we){xr(we,!1);const[Z,O]=Or(),b=()=>Lr(Xr,"$eagerCache",Z),n=i(),m=i(),S=i(),L=i();let C=i([]),$=i(!0),v=i(""),_=i(""),u=i(1),y=i(25),A=i(!1),z=i(!1),I=i(!1),d=i(null);async function B(t){try{r(v,""),r(A,!1),ke.success("Scale Set Created","Scale set has been created successfully.")}catch(c){throw r(v,Y(c)),c}}async function J(t){if(e(d))try{await R.updateScaleSet(e(d).id,t),ke.success("Scale Set Updated",`Scale set ${e(d).name} has been updated successfully.`),r(z,!1),r(d,null)}catch(c){throw c}}async function ee(){if(e(d))try{await R.deleteScaleSet(e(d).id),ke.success("Scale Set Deleted",`Scale set ${e(d).name} has been deleted successfully.`)}catch(t){const c=Y(t);ke.error("Delete Failed",c)}finally{r(I,!1),r(d,null)}}function re(){r(A,!0)}function G(t){r(d,t),r(z,!0)}function ue(t){r(d,t),r(I,!0)}kr(async()=>{try{r($,!0);const t=await yr.getScaleSets();t&&Array.isArray(t)&&r(C,t)}catch(t){console.error("Failed to load scale sets:",t),r(v,Y(t))}finally{r($,!1)}});async function Se(){try{await yr.retryResource("scalesets")}catch(t){console.error("Retry failed:",t)}}const te=[{key:"name",title:"Name",cellComponent:rt,cellProps:{entityType:"scaleset"}},{key:"image",title:"Image",cellComponent:Ve,cellProps:{field:"image",type:"code",showTitle:!0}},{key:"provider",title:"Provider",cellComponent:Ve,cellProps:{field:"provider_name"}},{key:"flavor",title:"Flavor",cellComponent:Ve,cellProps:{field:"flavor"}},{key:"entity",title:"Entity",cellComponent:ot},{key:"endpoint",title:"Endpoint",cellComponent:tt},{key:"status",title:"Status",cellComponent:at,cellProps:{statusType:"enabled"}},{key:"actions",title:"Actions",align:"right",cellComponent:et}],Ce={entityType:"scaleset",primaryText:{field:"name",isClickable:!0,href:"/scalesets/{id}"},secondaryText:{field:"entity_name",computedValue:t=>ce(t)},badges:[{type:"custom",value:t=>({variant:t.enabled?"success":"error",text:t.enabled?"Enabled":"Disabled"})}],actions:[{type:"edit",handler:t=>G(t)},{type:"delete",handler:t=>ue(t)}]};function p(t){r(_,t.detail.term),r(u,1)}function F(t){r(u,t.detail.page)}function P(t){r(y,t.detail.perPage),r(u,1)}function V(t){G(t.detail.item)}function ge(t){ue(t.detail.item)}q(()=>(e(C),b()),()=>{(!e(C).length||b().loaded.scalesets)&&r(C,b().scalesets)}),q(()=>b(),()=>{r($,b().loading.scalesets)}),q(()=>b(),()=>{r(n,b().errorMessages.scalesets)}),q(()=>(e(C),e(_),ce),()=>{r(m,Yr(e(C),e(_),t=>ce(t)))}),q(()=>(e(m),e(y)),()=>{r(S,Math.ceil(e(m).length/e(y)))}),q(()=>(e(u),e(S)),()=>{e(u)>e(S)&&e(S)>0&&r(u,e(S))}),q(()=>(e(m),e(u),e(y)),()=>{r(L,e(m).slice((e(u)-1)*e(y),e(u)*e(y)))}),jr(),hr();var be=vt();Nr(t=>{Ur.title="Scale Sets - GARM"});var j=_r(be),ae=o(j);Br(ae,{title:"Scale Sets",description:"Manage GitHub runner scale sets",actionLabel:"Add Scale Set",$$events:{action:re}});var oe=a(ae,2);{let t=Je(()=>e(n)||e(v)),c=Je(()=>!!e(n));Zr(oe,{get columns(){return te},get data(){return e(L)},get loading(){return e($)},get error(){return e(t)},get searchTerm(){return e(_)},searchPlaceholder:"Search by entity name...",get currentPage(){return e(u)},get perPage(){return e(y)},get totalPages(){return e(S)},get totalItems(){return e(m),mr(()=>e(m).length)},itemName:"scale sets",emptyIconType:"cog",get showRetry(){return e(c)},get mobileCardConfig(){return Ce},$$events:{search:p,pageChange:F,perPageChange:P,retry:Se,edit:V,delete:ge}})}s(j);var se=a(j,2);{var pe=t=>{mt(t,{$$events:{close:()=>r(A,!1),submit:c=>B(c.detail)}})};D(se,t=>{e(A)&&t(pe)})}var W=a(se,2);{var le=t=>{Kr(t,{get scaleSet(){return e(d)},$$events:{close:()=>{r(z,!1),r(d,null)},submit:c=>J(c.detail)}})};D(W,t=>{e(z)&&e(d)&&t(le)})}var me=a(W,2);{var ve=t=>{{let c=Je(()=>(e(d),qr(ce),mr(()=>`Scale Set ${e(d).name} (${ce(e(d))})`)));Qr(t,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return e(c)},$$events:{close:()=>{r(I,!1),r(d,null)},confirm:ee}})}};D(me,t=>{e(I)&&e(d)&&t(ve)})}f(_e,be),wr(),O()}export{Ut as component}; diff --git a/webapp/assets/_app/immutable/nodes/18.eu91cRrS.js b/webapp/assets/_app/immutable/nodes/18.eu91cRrS.js new file mode 100644 index 00000000..2b1238f3 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/18.eu91cRrS.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Je}from"../chunks/B3Pzt0F_.js";import{p as Pe,o as je,A as qe,l as ze,a as Ve,f as w,h as We,b as U,t as k,c as p,d as Ke,s as l,m as _,u as i,$ as Qe,g as t,j as a,r,k as d,v as o,y as ct,B as zt,q as $,n as x}from"../chunks/D8EpLgQ1.js";import{i as g,s as Xe,a as Ye}from"../chunks/5WA7h8uK.js";import{c as Vt,g as B,s as Ze}from"../chunks/CiE1LlKV.js";import{p as ta}from"../chunks/BE8f1Riw.js";import{g as Wt}from"../chunks/DXCC0cSN.js";import{b as vt}from"../chunks/CRhkqW2i.js";import{U as ea}from"../chunks/2p_hWkLJ.js";import{D as Kt}from"../chunks/KQ2xQpA3.js";import{D as aa,I as ra}from"../chunks/BEoJgOul.js";import{w as Qt}from"../chunks/u94nIB4-.js";import{t as R}from"../chunks/BEkVdVE1.js";import{e as Xt}from"../chunks/BZiHL9L3.js";import{e as S,i as F,j as Yt,b as C,g as Zt}from"../chunks/BGVHQGl-.js";var sa=w('

                Loading scale set...

                '),da=w('

                '),ia=w('
                GitHub Runner Group
                '),na=w('

                Extra Specifications

                 
                '),la=w('

                Basic Information

                Scale Set ID
                Name
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Runner Prefix
                OS Type / Architecture
                ',1),oa=w(' ',1);function $a(te,ee){Pe(ee,!1);const[ae,re]=Xe(),mt=()=>Ye(ta,"$page",ae),I=_();let e=_(null),G=_(!0),D=_(""),E=_(!1),M=_(!1),A=_(!1),m=_(null),N=null;async function ut(){if(!(!t(I)||isNaN(t(I))))try{l(G,!0),l(D,""),l(e,await B.getScaleSet(t(I)))}catch(s){l(D,s instanceof Error?s.message:"Failed to load scale set")}finally{l(G,!1)}}async function se(s){if(t(e))try{await B.updateScaleSet(t(e).id,s),await ut(),R.success("Scale Set Updated",`Scale Set ${t(e).name} has been updated successfully.`),l(E,!1)}catch(n){throw n}}async function de(){if(t(e)){try{await B.deleteScaleSet(t(e).id),Wt(`${vt}/scalesets`)}catch(s){const n=Xt(s);R.error("Delete Failed",n)}l(M,!1)}}async function ie(){if(t(m)){try{await B.deleteInstance(t(m).name),R.success("Instance Deleted",`Instance ${t(m).name} has been deleted successfully.`)}catch(s){const n=Xt(s);R.error("Delete Failed",n)}l(A,!1),l(m,null)}}function ne(s){l(m,s),l(A,!0)}function le(s){if(!s)return"{}";try{if(typeof s=="string"){const n=JSON.parse(s);return JSON.stringify(n,null,2)}return JSON.stringify(s,null,2)}catch{return s.toString()}}function oe(s){if(s.operation==="update"){const n=s.payload;t(e)&&n.id===t(e).id&&l(e,n)}else if(s.operation==="delete"){const n=s.payload.id||s.payload;t(e)&&t(e).id===n&&Wt(`${vt}/scalesets`)}}function ce(s){if(!t(e)||!t(e).instances)return;const n=s.payload;if(n.scale_set_id===t(e).id){if(s.operation==="create")ct(e,t(e).instances=[...t(e).instances,n]);else if(s.operation==="update")ct(e,t(e).instances=t(e).instances.map(y=>y.id===n.id?n:y));else if(s.operation==="delete"){const y=n.id||n;ct(e,t(e).instances=t(e).instances.filter(J=>J.id!==y))}l(e,t(e))}}je(()=>{ut();const s=Qt.subscribeToEntity("scaleset",["update","delete"],oe),n=Qt.subscribeToEntity("instance",["create","update","delete"],ce);N=()=>{s(),n()}}),qe(()=>{N&&(N(),N=null)}),ze(()=>mt(),()=>{l(I,parseInt(mt().params.id||"0"))}),Ve(),Je();var xt=oa();We(s=>{k(()=>Qe.title=`${t(e),i(()=>t(e)?`${t(e).name} - Scale Set Details`:"Scale Set Details")??""} - GARM`)});var L=U(xt),O=a(L),gt=a(O),H=a(gt),ve=a(H);r(H);var ft=d(H,2),pt=a(ft),_t=d(a(pt),2),me=a(_t,!0);r(_t),r(pt),r(ft),r(gt),r(O);var ue=d(O,2);{var xe=s=>{var n=sa();p(s,n)},ge=s=>{var n=zt(),y=U(n);{var J=h=>{var b=da(),T=a(b),P=a(T,!0);r(T),r(b),k(()=>o(P,t(D))),p(h,b)},he=h=>{var b=zt(),T=U(b);{var P=j=>{var bt=la(),kt=U(bt);{let c=$(()=>(t(e),i(()=>t(e).name||"Scale Set"))),v=$(()=>(x(S),t(e),i(()=>S(t(e))))),u=$(()=>(x(F),t(e),i(()=>F(t(e))))),f=$(()=>(x(Zt),i(()=>Zt("github"))));aa(kt,{get title(){return t(c)},get subtitle(){return`Scale set for ${t(v)??""} (${t(u)??""}) • GitHub Runner Scale Set`},get forgeIcon(){return t(f)},onEdit:()=>l(E,!0),onDelete:()=>l(M,!0)})}var q=d(kt,2),z=a(q),St=a(z),wt=d(a(St),2),V=a(wt),$t=d(a(V),2),be=a($t,!0);r($t),r(V);var W=d(V,2),It=d(a(W),2),ke=a(It,!0);r(It),r(W);var K=d(W,2),Dt=d(a(K),2),Se=a(Dt,!0);r(Dt),r(K);var Q=d(K,2),Et=d(a(Q),2),Mt=a(Et),we=a(Mt,!0);r(Mt),r(Et),r(Q);var X=d(Q,2),At=d(a(X),2),$e=a(At,!0);r(At),r(X);var Y=d(X,2),Nt=d(a(Y),2),Z=a(Nt),Ie=a(Z,!0);r(Z),r(Nt),r(Y);var tt=d(Y,2),Tt=d(a(tt),2),Ut=a(Tt),et=a(Ut),De=a(et,!0);r(et);var at=d(et,2),Ee=a(at,!0);r(at),r(Ut),r(Tt),r(tt);var rt=d(tt,2),Bt=d(a(rt),2),Me=a(Bt,!0);r(Bt),r(rt);var Rt=d(rt,2),Ft=d(a(Rt),2),Ae=a(Ft,!0);r(Ft),r(Rt),r(wt),r(St),r(z);var Ct=d(z,2),Gt=a(Ct),Lt=d(a(Gt),2),st=a(Lt),Ot=d(a(st),2),Ne=a(Ot,!0);r(Ot),r(st);var dt=d(st,2),Ht=d(a(dt),2),Te=a(Ht,!0);r(Ht),r(dt);var it=d(dt,2),Jt=d(a(it),2),Ue=a(Jt);r(Jt),r(it);var nt=d(it,2),Pt=d(a(nt),2),Be=a(Pt,!0);r(Pt),r(nt);var lt=d(nt,2),jt=d(a(lt),2),Re=a(jt);r(jt),r(lt);var Fe=d(lt,2);{var Ce=c=>{var v=ia(),u=d(a(v),2),f=a(u,!0);r(u),r(v),k(()=>o(f,(t(e),i(()=>t(e)["github-runner-group"])))),p(c,v)};g(Fe,c=>{t(e),i(()=>t(e)["github-runner-group"])&&c(Ce)})}r(Lt),r(Gt),r(Ct),r(q);var qt=d(q,2);{var Ge=c=>{var v=na(),u=a(v),f=d(a(u),2),ot=a(f,!0);r(f),r(u),r(v),k(He=>o(ot,He),[()=>(t(e),i(()=>le(t(e).extra_specs)))]),p(c,v)};g(qt,c=>{t(e),i(()=>t(e).extra_specs)&&c(Ge)})}var Le=d(qt,2);{var Oe=c=>{ra(c,{get instances(){return t(e),i(()=>t(e).instances)},entityType:"scaleset",onDeleteInstance:ne})};g(Le,c=>{t(e),i(()=>t(e).instances)&&c(Oe)})}k((c,v,u,f,ot)=>{o(be,(t(e),i(()=>t(e).id))),o(ke,(t(e),i(()=>t(e).name))),o(Se,(t(e),i(()=>t(e).provider_name))),o(we,(t(e),i(()=>t(e).image))),o($e,(t(e),i(()=>t(e).flavor))),Ze(Z,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${t(e),i(()=>t(e).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),o(Ie,(t(e),i(()=>t(e).enabled?"Enabled":"Disabled"))),o(De,c),Vt(at,"href",v),o(Ee,u),o(Me,f),o(Ae,ot),o(Ne,(t(e),i(()=>t(e).max_runners))),o(Te,(t(e),i(()=>t(e).min_idle_runners))),o(Ue,`${t(e),i(()=>t(e).runner_bootstrap_timeout)??""} minutes`),o(Be,(t(e),i(()=>t(e).runner_prefix||"garm"))),o(Re,`${t(e),i(()=>t(e).os_type)??""} / ${t(e),i(()=>t(e).os_arch)??""}`)},[()=>(x(F),t(e),i(()=>F(t(e)))),()=>(x(Yt),t(e),i(()=>Yt(t(e)))),()=>(x(S),t(e),i(()=>S(t(e)))),()=>(x(C),t(e),i(()=>C(t(e).created_at||""))),()=>(x(C),t(e),i(()=>C(t(e).updated_at||"")))]),p(j,bt)};g(T,j=>{t(e)&&j(P)},!0)}p(h,b)};g(y,h=>{t(D)?h(J):h(he,!1)},!0)}p(s,n)};g(ue,s=>{t(G)?s(xe):s(ge,!1)})}r(L);var yt=d(L,2);{var fe=s=>{ea(s,{get scaleSet(){return t(e)},$$events:{close:()=>l(E,!1),submit:n=>se(n.detail)}})};g(yt,s=>{t(E)&&t(e)&&s(fe)})}var ht=d(yt,2);{var pe=s=>{{let n=$(()=>(t(e),x(S),i(()=>`Scale Set ${t(e).name} (${S(t(e))})`)));Kt(s,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return t(n)},$$events:{close:()=>l(M,!1),confirm:de}})}};g(ht,s=>{t(M)&&t(e)&&s(pe)})}var _e=d(ht,2);{var ye=s=>{Kt(s,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(m),i(()=>t(m).name)},$$events:{close:()=>{l(A,!1),l(m,null)},confirm:ie}})};g(_e,s=>{t(A)&&t(m)&&s(ye)})}k(()=>{Vt(ve,"href",`${vt}/scalesets`),o(me,(t(e),i(()=>t(e)?t(e).name:"Loading...")))}),p(te,xt),Ke(),re()}export{$a as component}; diff --git a/webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js b/webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js deleted file mode 100644 index ad26b0dc..00000000 --- a/webapp/assets/_app/immutable/nodes/18.iVIhGVtu.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Ht}from"../chunks/B3Pzt0F_.js";import{p as Jt,o as jt,A as qt,l as zt,a as Pt,f as w,h as Vt,b as F,t as k,c as p,d as Wt,s as l,m as _,u as i,$ as Kt,g as e,j as a,r,k as d,v as o,y as ve,B as Pe,q as I,n as x}from"../chunks/D8EpLgQ1.js";import{i as g,s as Qt,a as Xt}from"../chunks/5WA7h8uK.js";import{c as Ve,g as U,s as Yt}from"../chunks/CiE1LlKV.js";import{p as Zt}from"../chunks/C41YH50Q.js";import{g as We}from"../chunks/CTf6mQoE.js";import{b as me}from"../chunks/CoIRRsD9.js";import{U as ea}from"../chunks/C89fcOde.js";import{D as Ke}from"../chunks/KQ2xQpA3.js";import{D as ta,I as aa}from"../chunks/DDhBTdDt.js";import{w as Qe}from"../chunks/u94nIB4-.js";import{t as B}from"../chunks/BEkVdVE1.js";import{e as S,i as R,j as Xe,b as C,g as Ye}from"../chunks/BGVHQGl-.js";var ra=w('

                Loading scale set...

                '),sa=w('

                '),da=w('
                GitHub Runner Group
                '),ia=w('

                Extra Specifications

                 
                '),na=w('

                Basic Information

                Scale Set ID
                Name
                Provider
                Image
                Flavor
                Status
                Entity
                Created At
                Updated At

                Configuration

                Max Runners
                Min Idle Runners
                Bootstrap Timeout
                Runner Prefix
                OS Type / Architecture
                ',1),la=w(' ',1);function Sa(Ze,et){Jt(et,!1);const[tt,at]=Qt(),ue=()=>Xt(Zt,"$page",tt),D=_();let t=_(null),G=_(!0),E=_(""),M=_(!1),N=_(!1),$=_(!1),m=_(null),A=null;async function L(){if(!(!e(D)||isNaN(e(D))))try{l(G,!0),l(E,""),l(t,await U.getScaleSet(e(D)))}catch(s){l(E,s instanceof Error?s.message:"Failed to load scale set")}finally{l(G,!1)}}async function rt(s){if(e(t))try{await U.updateScaleSet(e(t).id,s),await L(),B.success("Scale Set Updated",`Scale Set ${e(t).name} has been updated successfully.`),l(M,!1)}catch(n){throw n}}async function st(){if(e(t)){try{await U.deleteScaleSet(e(t).id),We(`${me}/scalesets`)}catch(s){const n=s instanceof Error?s.message:"Failed to delete scale set";B.error("Delete Failed",n)}l(N,!1)}}async function dt(){if(e(m)){try{await U.deleteInstance(e(m).name),B.success("Instance Deleted",`Instance ${e(m).name} has been deleted successfully.`),await L(),l($,!1),l(m,null)}catch(s){const n=s instanceof Error?s.message:"Failed to delete instance";B.error("Delete Failed",n)}l($,!1),l(m,null)}}function it(s){l(m,s),l($,!0)}function nt(s){if(!s)return"{}";try{if(typeof s=="string"){const n=JSON.parse(s);return JSON.stringify(n,null,2)}return JSON.stringify(s,null,2)}catch{return s.toString()}}function lt(s){if(s.operation==="update"){const n=s.payload;e(t)&&n.id===e(t).id&&l(t,n)}else if(s.operation==="delete"){const n=s.payload.id||s.payload;e(t)&&e(t).id===n&&We(`${me}/scalesets`)}}function ot(s){if(!e(t)||!e(t).instances)return;const n=s.payload;if(n.scale_set_id===e(t).id){if(s.operation==="create")ve(t,e(t).instances=[...e(t).instances,n]);else if(s.operation==="update")ve(t,e(t).instances=e(t).instances.map(y=>y.id===n.id?n:y));else if(s.operation==="delete"){const y=n.id||n;ve(t,e(t).instances=e(t).instances.filter(j=>j.id!==y))}l(t,e(t))}}jt(()=>{L();const s=Qe.subscribeToEntity("scaleset",["update","delete"],lt),n=Qe.subscribeToEntity("instance",["create","update","delete"],ot);A=()=>{s(),n()}}),qt(()=>{A&&(A(),A=null)}),zt(()=>ue(),()=>{l(D,parseInt(ue().params.id||"0"))}),Pt(),Ht();var xe=la();Vt(s=>{k(()=>Kt.title=`${e(t),i(()=>e(t)?`${e(t).name} - Scale Set Details`:"Scale Set Details")??""} - GARM`)});var O=F(xe),H=a(O),ge=a(H),J=a(ge),ct=a(J);r(J);var fe=d(J,2),pe=a(fe),_e=d(a(pe),2),vt=a(_e,!0);r(_e),r(pe),r(fe),r(ge),r(H);var mt=d(H,2);{var ut=s=>{var n=ra();p(s,n)},xt=s=>{var n=Pe(),y=F(n);{var j=h=>{var b=sa(),T=a(b),q=a(T,!0);r(T),r(b),k(()=>o(q,e(E))),p(h,b)},yt=h=>{var b=Pe(),T=F(b);{var q=z=>{var be=na(),ke=F(be);{let c=I(()=>(e(t),i(()=>e(t).name||"Scale Set"))),v=I(()=>(x(S),e(t),i(()=>S(e(t))))),u=I(()=>(x(R),e(t),i(()=>R(e(t))))),f=I(()=>(x(Ye),i(()=>Ye("github"))));ta(ke,{get title(){return e(c)},get subtitle(){return`Scale set for ${e(v)??""} (${e(u)??""}) • GitHub Runner Scale Set`},get forgeIcon(){return e(f)},onEdit:()=>l(M,!0),onDelete:()=>l(N,!0)})}var P=d(ke,2),V=a(P),Se=a(V),we=d(a(Se),2),W=a(we),$e=d(a(W),2),ht=a($e,!0);r($e),r(W);var K=d(W,2),Ie=d(a(K),2),bt=a(Ie,!0);r(Ie),r(K);var Q=d(K,2),De=d(a(Q),2),kt=a(De,!0);r(De),r(Q);var X=d(Q,2),Ee=d(a(X),2),Me=a(Ee),St=a(Me,!0);r(Me),r(Ee),r(X);var Y=d(X,2),Ne=d(a(Y),2),wt=a(Ne,!0);r(Ne),r(Y);var Z=d(Y,2),Ae=d(a(Z),2),ee=a(Ae),$t=a(ee,!0);r(ee),r(Ae),r(Z);var te=d(Z,2),Te=d(a(te),2),Fe=a(Te),ae=a(Fe),It=a(ae,!0);r(ae);var re=d(ae,2),Dt=a(re,!0);r(re),r(Fe),r(Te),r(te);var se=d(te,2),Ue=d(a(se),2),Et=a(Ue,!0);r(Ue),r(se);var Be=d(se,2),Re=d(a(Be),2),Mt=a(Re,!0);r(Re),r(Be),r(we),r(Se),r(V);var Ce=d(V,2),Ge=a(Ce),Le=d(a(Ge),2),de=a(Le),Oe=d(a(de),2),Nt=a(Oe,!0);r(Oe),r(de);var ie=d(de,2),He=d(a(ie),2),At=a(He,!0);r(He),r(ie);var ne=d(ie,2),Je=d(a(ne),2),Tt=a(Je);r(Je),r(ne);var le=d(ne,2),je=d(a(le),2),Ft=a(je,!0);r(je),r(le);var oe=d(le,2),qe=d(a(oe),2),Ut=a(qe);r(qe),r(oe);var Bt=d(oe,2);{var Rt=c=>{var v=da(),u=d(a(v),2),f=a(u,!0);r(u),r(v),k(()=>o(f,(e(t),i(()=>e(t)["github-runner-group"])))),p(c,v)};g(Bt,c=>{e(t),i(()=>e(t)["github-runner-group"])&&c(Rt)})}r(Le),r(Ge),r(Ce),r(P);var ze=d(P,2);{var Ct=c=>{var v=ia(),u=a(v),f=d(a(u),2),ce=a(f,!0);r(f),r(u),r(v),k(Ot=>o(ce,Ot),[()=>(e(t),i(()=>nt(e(t).extra_specs)))]),p(c,v)};g(ze,c=>{e(t),i(()=>e(t).extra_specs)&&c(Ct)})}var Gt=d(ze,2);{var Lt=c=>{aa(c,{get instances(){return e(t),i(()=>e(t).instances)},entityType:"scaleset",onDeleteInstance:it})};g(Gt,c=>{e(t),i(()=>e(t).instances)&&c(Lt)})}k((c,v,u,f,ce)=>{o(ht,(e(t),i(()=>e(t).id))),o(bt,(e(t),i(()=>e(t).name))),o(kt,(e(t),i(()=>e(t).provider_name))),o(St,(e(t),i(()=>e(t).image))),o(wt,(e(t),i(()=>e(t).flavor))),Yt(ee,1,`inline-flex px-2 py-1 text-xs font-medium rounded-full ${e(t),i(()=>e(t).enabled?"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200":"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200")??""}`),o($t,(e(t),i(()=>e(t).enabled?"Enabled":"Disabled"))),o(It,c),Ve(re,"href",v),o(Dt,u),o(Et,f),o(Mt,ce),o(Nt,(e(t),i(()=>e(t).max_runners))),o(At,(e(t),i(()=>e(t).min_idle_runners))),o(Tt,`${e(t),i(()=>e(t).runner_bootstrap_timeout)??""} minutes`),o(Ft,(e(t),i(()=>e(t).runner_prefix||"garm"))),o(Ut,`${e(t),i(()=>e(t).os_type)??""} / ${e(t),i(()=>e(t).os_arch)??""}`)},[()=>(x(R),e(t),i(()=>R(e(t)))),()=>(x(Xe),e(t),i(()=>Xe(e(t)))),()=>(x(S),e(t),i(()=>S(e(t)))),()=>(x(C),e(t),i(()=>C(e(t).created_at||""))),()=>(x(C),e(t),i(()=>C(e(t).updated_at||"")))]),p(z,be)};g(T,z=>{e(t)&&z(q)},!0)}p(h,b)};g(y,h=>{e(E)?h(j):h(yt,!1)},!0)}p(s,n)};g(mt,s=>{e(G)?s(ut):s(xt,!1)})}r(O);var ye=d(O,2);{var gt=s=>{ea(s,{get scaleSet(){return e(t)},$$events:{close:()=>l(M,!1),submit:n=>rt(n.detail)}})};g(ye,s=>{e(M)&&e(t)&&s(gt)})}var he=d(ye,2);{var ft=s=>{{let n=I(()=>(e(t),x(S),i(()=>`Scale Set ${e(t).name} (${S(e(t))})`)));Ke(s,{title:"Delete Scale Set",message:"Are you sure you want to delete this scale set? This action cannot be undone and will remove all associated runners.",get itemName(){return e(n)},$$events:{close:()=>l(N,!1),confirm:st}})}};g(he,s=>{e(N)&&e(t)&&s(ft)})}var pt=d(he,2);{var _t=s=>{Ke(s,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(m),i(()=>e(m).name)},$$events:{close:()=>{l($,!1),l(m,null)},confirm:dt}})};g(pt,s=>{e($)&&e(m)&&s(_t)})}k(()=>{Ve(ct,"href",`${me}/scalesets`),o(vt,(e(t),i(()=>e(t)?e(t).name:"Loading...")))}),p(Ze,xe),Wt(),at()}export{Sa as component}; diff --git a/webapp/assets/_app/immutable/nodes/2.1DFwbmOU.js b/webapp/assets/_app/immutable/nodes/2.1DFwbmOU.js new file mode 100644 index 00000000..333497f1 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/2.1DFwbmOU.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Fe}from"../chunks/B3Pzt0F_.js";import{f as h,k as a,j as r,r as t,t as T,v as z,c as m,B as He,b as ze,p as Oe,E as vt,l as Le,s as v,m as I,g as e,a as Ne,C as ut,z as ve,n as B,u,d as Qe,e as Ge,o as mt,A as gt,h as bt,$ as pt,y as N}from"../chunks/D8EpLgQ1.js";import{p as fe,i as C,s as ft,a as xt}from"../chunks/5WA7h8uK.js";import{e as ht,w as Me,i as kt}from"../chunks/u94nIB4-.js";import{s as xe,B as yt,r as Ue,g as Ke,c as ue}from"../chunks/CiE1LlKV.js";import{b as Q}from"../chunks/CRhkqW2i.js";import{e as _t,a as Ce}from"../chunks/wyaP0EDu.js";import{b as Re}from"../chunks/C6k1Q4We.js";import{p as wt}from"../chunks/D4Caz1gY.js";import{M as Mt}from"../chunks/qB7B8uiS.js";import{t as Je}from"../chunks/BEkVdVE1.js";import{e as Ut}from"../chunks/BZiHL9L3.js";var Ct=h('
                '),Rt=h('
                '),zt=h('
                '),Lt=h('
                '),$t=h('
                ');function pe(me,D){let f=fe(D,"title",8),re=fe(D,"content",8),s=fe(D,"position",8,"top"),ae=fe(D,"width",8,"w-80");var i=$t(),k=a(r(i),2),p=r(k),y=r(p,!0);t(p);var d=a(p,2),R=r(d,!0);t(d);var oe=a(d,2);{var ge=E=>{var P=Ct();m(E,P)},se=E=>{var P=He(),W=ze(P);{var J=L=>{var q=Rt();m(L,q)},K=L=>{var q=He(),ie=ze(q);{var le=$=>{var H=zt();m($,H)},ne=$=>{var H=He(),X=ze(H);{var de=l=>{var o=Lt();m(l,o)};C(X,l=>{s()==="right"&&l(de)},!0)}m($,H)};C(ie,$=>{s()==="left"?$(le):$(ne,!1)},!0)}m(L,q)};C(W,L=>{s()==="bottom"?L(J):L(K,!1)},!0)}m(E,P)};C(oe,E=>{s()==="top"?E(ge):E(se,!1)})}t(k),t(i),T(()=>{xe(k,1,`absolute ${s()==="top"?"bottom-full":s()==="bottom"?"top-full":s()==="left"?"right-full top-1/2 -translate-y-1/2":"left-full top-1/2 -translate-y-1/2"} left-1/2 transform -translate-x-1/2 ${s()==="top"?"mb-2":s()==="bottom"?"mt-2":"mx-2"} ${ae()??""} p-3 bg-gray-900 text-white text-xs rounded-lg shadow-lg opacity-0 invisible group-hover:opacity-100 group-hover:visible transition-all duration-200 z-50`),z(y,f()),z(R,re())}),m(me,i)}var St=ut(' Settings',1),jt=h('
                Metadata
                '),Bt=h('
                Callback
                '),At=h('
                Webhook
                '),It=h('

                No URLs configured

                '),Et=h('
                Controller Webhook URL

                Use this URL in your GitHub organization/repository webhook settings

                '),Ht=h('

                Please enter a valid URL

                '),Gt=h('

                Please enter a valid URL

                '),Pt=h('

                Please enter a valid URL

                '),qt=h('

                Controller Settings

                URL where runners can fetch metadata and setup information

                URL where runners send status updates and lifecycle events

                URL where GitHub/Gitea will send webhook events for job notifications

                Time to wait before spinning up a runner for a new job (0 = immediate)

                '),Tt=h('

                Controller Information

                Identity

                Controller ID
                Hostname
                Job Age Backoff

                Integration URLs

                ',1);function Dt(me,D){Oe(D,!1);const f=I(),re=I();let s=fe(D,"controllerInfo",12);const ae=vt();let i=I(!1),k=I(!1),p=I(""),y=I(""),d=I(""),R=I(null);function oe(){v(p,s().metadata_url||""),v(y,s().callback_url||""),v(d,s().webhook_url||""),v(R,s().minimum_job_age_backoff||null),v(i,!0)}async function ge(){try{v(k,!0);const n={};e(p).trim()&&(n.metadata_url=e(p).trim()),e(y).trim()&&(n.callback_url=e(y).trim()),e(d).trim()&&(n.webhook_url=e(d).trim()),e(R)!==null&&e(R)>=0&&(n.minimum_job_age_backoff=e(R));const c=await Ke.updateController(n);Je.success("Settings Updated","Controller settings have been updated successfully."),v(i,!1),s(c),ae("updated",c)}catch(n){Je.error("Update Failed",n instanceof Error?n.message:"Failed to update controller settings")}finally{v(k,!1)}}function se(){v(i,!1),v(p,""),v(y,""),v(d,""),v(R,null)}Le(()=>{},()=>{v(f,n=>{if(!n.trim())return!0;try{return new URL(n),!0}catch{return!1}})}),Le(()=>(e(f),e(p),e(y),e(d),e(R)),()=>{v(re,e(f)(e(p))&&e(f)(e(y))&&e(f)(e(d))&&(e(R)===null||e(R)>=0))}),Ne(),Fe();var E=Tt(),P=ze(E),W=r(P),J=r(W),K=r(J),L=a(r(K),2),q=a(r(L),2),ie=r(q),le=r(ie);t(ie),t(q),t(L),t(K);var ne=a(K,2);yt(ne,{variant:"secondary",size:"sm",$$events:{click:oe},children:(n,c)=>{var b=St();ve(),m(n,b)},$$slots:{default:!0}}),t(J);var $=a(J,2),H=r($),X=r(H),de=a(r(X),2),l=r(de),o=a(r(l),2),S=r(o,!0);t(o),t(l);var _=a(l,2),x=a(r(_),2),w=r(x,!0);t(x),t(_);var A=a(_,2),V=r(A),Y=a(r(V),2),Z=r(Y);pe(Z,{title:"Job Age Backoff",content:"Time in seconds GARM waits after receiving a new job before spinning up a runner. This delay allows existing idle runners to pick up jobs first, preventing unnecessary runner creation. Set to 0 for immediate response."}),t(Y),t(V);var F=a(V,2),O=r(F);t(F),t(A),t(de),t(X),t(H);var ee=a(H,2),ce=r(ee),he=a(r(ce),2),ke=r(he);{var $e=n=>{var c=jt(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Metadata URL",content:"URL where runners retrieve setup information and metadata. Runners must be able to connect to this URL during their initialization process. Usually accessible at /api/v1/metadata endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().metadata_url)))),m(n,c)};C(ke,n=>{B(s()),u(()=>s().metadata_url)&&n($e)})}var Pe=a(ke,2);{var Xe=n=>{var c=Bt(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Callback URL",content:"URL where runners send status updates and system information (OS version, runner agent ID, etc.) to the controller. Runners must be able to connect to this URL. Usually accessible at /api/v1/callbacks endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().callback_url)))),m(n,c)};C(Pe,n=>{B(s()),u(()=>s().callback_url)&&n(Xe)})}var qe=a(Pe,2);{var Ye=n=>{var c=At(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Webhook Base URL",content:"Base URL for webhooks where GitHub sends job notifications. GARM needs to receive these webhooks to know when to create new runners for jobs. GitHub must be able to connect to this URL. Usually accessible at /webhooks endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().webhook_url)))),m(n,c)};C(qe,n=>{B(s()),u(()=>s().webhook_url)&&n(Ye)})}var Ze=a(qe,2);{var et=n=>{var c=It(),b=a(r(c),4);t(c),Ge("click",b,oe),m(n,c)};C(Ze,n=>{B(s()),u(()=>!s().metadata_url&&!s().callback_url&&!s().webhook_url)&&n(et)})}t(he),t(ce),t(ee),t($);var tt=a($,2);{var rt=n=>{var c=Et(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Controller Webhook URL",content:"Unique webhook URL for this GARM controller. This is the preferred URL to use in GitHub webhook settings as it's controller-specific and allows multiple GARM controllers to work with the same repository. Automatically combines the webhook base URL with the controller ID."}),t(M),t(b);var U=a(b,2),j=r(U),be=a(r(j),2),ye=r(be),Se=r(ye,!0);t(ye),ve(2),t(be),t(j),t(U),t(c),T(()=>z(Se,(B(s()),u(()=>s().controller_webhook_url)))),m(n,c)};C(tt,n=>{B(s()),u(()=>s().controller_webhook_url)&&n(rt)})}t(W),t(P);var at=a(P,2);{var ot=n=>{Mt(n,{$$events:{close:se},children:(c,b)=>{var M=qt(),G=a(r(M),2),U=r(G),j=a(r(U),2);Ue(j);let be;var ye=a(j,2);{var Se=g=>{var te=Ht();m(g,te)};C(ye,g=>{e(f),e(p),u(()=>!e(f)(e(p)))&&g(Se)})}ve(2),t(U);var je=a(U,2),_e=a(r(je),2);Ue(_e);let Te;var st=a(_e,2);{var it=g=>{var te=Gt();m(g,te)};C(st,g=>{e(f),e(y),u(()=>!e(f)(e(y)))&&g(it)})}ve(2),t(je);var Be=a(je,2),we=a(r(Be),2);Ue(we);let De;var lt=a(we,2);{var nt=g=>{var te=Pt();m(g,te)};C(lt,g=>{e(f),e(d),u(()=>!e(f)(e(d)))&&g(nt)})}ve(2),t(Be);var Ae=a(Be,2),Ve=a(r(Ae),2);Ue(Ve),ve(2),t(Ae);var We=a(Ae,2),Ie=r(We),Ee=a(Ie,2),dt=r(Ee,!0);t(Ee),t(We),t(G),t(M),T((g,te,ct)=>{be=xe(j,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,be,g),Te=xe(_e,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,Te,te),De=xe(we,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,De,ct),Ie.disabled=e(k),Ee.disabled=!e(re)||e(k),z(dt,e(k)?"Saving...":"Save Changes")},[()=>({"border-red-300":!e(f)(e(p))}),()=>({"border-red-300":!e(f)(e(y))}),()=>({"border-red-300":!e(f)(e(d))})]),Re(j,()=>e(p),g=>v(p,g)),Re(_e,()=>e(y),g=>v(y,g)),Re(we,()=>e(d),g=>v(d,g)),Re(Ve,()=>e(R),g=>v(R,g)),Ge("click",Ie,se),Ge("submit",G,wt(ge)),m(c,M)},$$slots:{default:!0}})};C(at,n=>{e(i)&&n(ot)})}T(n=>{z(le,`v${n??""}`),z(S,(B(s()),u(()=>s().controller_id))),z(w,(B(s()),u(()=>s().hostname||"Unknown"))),z(O,`${B(s()),u(()=>s().minimum_job_age_backoff||30)??""}s`)},[()=>(B(s()),u(()=>s().version?.replace(/^v/,"")||"Unknown"))]),m(me,E),Qe()}var Vt=h('

                Error loading dashboard

                '),Wt=h('
                '),Jt=h('

                Dashboard

                Welcome to GARM - GitHub Actions Runner Manager

                ');function sr(me,D){Oe(D,!1);const[f,re]=ft(),s=()=>xt(_t,"$eagerCache",f),ae=I();let i=I({repositories:0,organizations:0,pools:0,instances:0}),k=I(null),p=I(""),y=[];function d(l,o,S=1e3){const _=parseInt(l.textContent||"0"),x=(o-_)/(S/16);let w=_;const A=()=>{if(w+=x,x>0&&w>=o||x<0&&w<=o){l.textContent=o.toString();return}l.textContent=Math.floor(w).toString(),requestAnimationFrame(A)};_!==o&&requestAnimationFrame(A)}mt(async()=>{try{const[x,w,A,V,Y]=await Promise.all([Ce.getRepositories(),Ce.getOrganizations(),Ce.getPools(),Ke.listInstances(),Ce.getControllerInfo()]);setTimeout(()=>{const Z=document.querySelector('[data-stat="repositories"]'),F=document.querySelector('[data-stat="organizations"]'),O=document.querySelector('[data-stat="pools"]'),ee=document.querySelector('[data-stat="instances"]');Z&&d(Z,x.length),F&&d(F,w.length),O&&d(O,A.length),ee&&d(ee,V.length)},100),v(i,{repositories:x.length,organizations:w.length,pools:A.length,instances:V.length}),Y&&v(k,Y)}catch(x){v(p,Ut(x)),console.error("Dashboard error:",x)}const l=Me.subscribeToEntity("repository",["create","delete"],R),o=Me.subscribeToEntity("organization",["create","delete"],oe),S=Me.subscribeToEntity("pool",["create","delete"],ge),_=Me.subscribeToEntity("instance",["create","delete"],se);y=[l,o,S,_]}),gt(()=>{y.forEach(l=>l())});function R(l){const o=document.querySelector('[data-stat="repositories"]');l.operation==="create"?(N(i,e(i).repositories++),o&&d(o,e(i).repositories,500)):l.operation==="delete"&&(N(i,e(i).repositories=Math.max(0,e(i).repositories-1)),o&&d(o,e(i).repositories,500))}function oe(l){const o=document.querySelector('[data-stat="organizations"]');l.operation==="create"?(N(i,e(i).organizations++),o&&d(o,e(i).organizations,500)):l.operation==="delete"&&(N(i,e(i).organizations=Math.max(0,e(i).organizations-1)),o&&d(o,e(i).organizations,500))}function ge(l){const o=document.querySelector('[data-stat="pools"]');l.operation==="create"?(N(i,e(i).pools++),o&&d(o,e(i).pools,500)):l.operation==="delete"&&(N(i,e(i).pools=Math.max(0,e(i).pools-1)),o&&d(o,e(i).pools,500))}function se(l){const o=document.querySelector('[data-stat="instances"]');l.operation==="create"?(N(i,e(i).instances++),o&&d(o,e(i).instances,500)):l.operation==="delete"&&(N(i,e(i).instances=Math.max(0,e(i).instances-1)),o&&d(o,e(i).instances,500))}function E(l){v(k,l.detail)}function P(l){return{blue:"bg-blue-500 text-white",green:"bg-green-500 text-white",purple:"bg-purple-500 text-white",yellow:"bg-yellow-500 text-white"}[l]||"bg-gray-500 text-white"}Le(()=>(e(k),s()),()=>{(!e(k)||s().loaded.controllerInfo)&&v(k,s().controllerInfo)}),Le(()=>(e(i),Q),()=>{v(ae,[{title:"Repositories",value:e(i).repositories,icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",color:"blue",href:`${Q}/repositories`},{title:"Organizations",value:e(i).organizations,icon:"M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z",color:"green",href:`${Q}/organizations`},{title:"Pools",value:e(i).pools,icon:"M4.318 6.318a4.5 4.5 0 000 6.364L12 20.364l7.682-7.682a4.5 4.5 0 00-6.364-6.364L12 7.636l-1.318-1.318a4.5 4.5 0 00-6.364 0z",color:"purple",href:`${Q}/pools`},{title:"Instances",value:e(i).instances,icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z",color:"yellow",href:`${Q}/instances`}])}),Ne(),Fe();var W=Jt();bt(l=>{pt.title="Dashboard - GARM"});var J=a(r(W),2);{var K=l=>{var o=Vt(),S=r(o),_=a(r(S),2),x=a(r(_),2),w=r(x,!0);t(x),t(_),t(S),t(o),T(()=>z(w,e(p))),m(l,o)};C(J,l=>{e(p)&&l(K)})}var L=a(J,2);ht(L,5,()=>e(ae),kt,(l,o)=>{var S=Wt(),_=r(S),x=r(_),w=r(x),A=r(w),V=r(A),Y=r(V);t(V),t(A),t(w);var Z=a(w,2),F=r(Z),O=r(F),ee=r(O,!0);t(O);var ce=a(O,2),he=r(ce,!0);t(ce),t(F),t(Z),t(x),t(_),t(S),T((ke,$e)=>{ue(S,"href",(e(o),u(()=>e(o).href))),xe(A,1,`w-8 h-8 rounded-md ${ke??""} flex items-center justify-center`),ue(Y,"d",(e(o),u(()=>e(o).icon))),z(ee,(e(o),u(()=>e(o).title))),ue(ce,"data-stat",$e),z(he,(e(o),u(()=>e(o).value)))},[()=>(e(o),u(()=>P(e(o).color))),()=>(e(o),u(()=>e(o).title.toLowerCase()))]),m(l,S)}),t(L);var q=a(L,2);{var ie=l=>{Dt(l,{get controllerInfo(){return e(k)},$$events:{updated:E}})};C(q,l=>{e(k)&&l(ie)})}var le=a(q,2),ne=r(le),$=a(r(ne),4),H=r($),X=a(H,2),de=a(X,2);t($),t(ne),t(le),t(W),T(()=>{ue(H,"href",`${Q??""}/repositories`),ue(X,"href",`${Q??""}/pools`),ue(de,"href",`${Q??""}/instances`)}),m(me,W),Qe(),re()}export{sr as component}; diff --git a/webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js b/webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js deleted file mode 100644 index b08a6de1..00000000 --- a/webapp/assets/_app/immutable/nodes/2.CiT4lj0D.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Je}from"../chunks/B3Pzt0F_.js";import{f as h,k as a,j as r,r as t,t as T,v as z,c as m,B as He,b as ze,p as Oe,E as vt,l as Le,s as v,m as I,g as e,a as Ne,C as ut,z as ve,n as B,u,d as Qe,e as Ge,o as mt,A as gt,h as bt,$ as pt,y as N}from"../chunks/D8EpLgQ1.js";import{p as fe,i as C,s as ft,a as xt}from"../chunks/5WA7h8uK.js";import{e as ht,w as Me,i as kt}from"../chunks/u94nIB4-.js";import{s as xe,B as yt,r as Ue,g as Ke,c as ue}from"../chunks/CiE1LlKV.js";import{b as Q}from"../chunks/CoIRRsD9.js";import{e as _t,a as Ce}from"../chunks/wyaP0EDu.js";import{b as Re}from"../chunks/C6k1Q4We.js";import{p as wt}from"../chunks/D4Caz1gY.js";import{M as Mt}from"../chunks/qB7B8uiS.js";import{t as Fe}from"../chunks/BEkVdVE1.js";var Ut=h('
                '),Ct=h('
                '),Rt=h('
                '),zt=h('
                '),Lt=h('
                ');function pe(me,D){let x=fe(D,"title",8),re=fe(D,"content",8),s=fe(D,"position",8,"top"),ae=fe(D,"width",8,"w-80");var i=Lt(),k=a(r(i),2),p=r(k),y=r(p,!0);t(p);var d=a(p,2),R=r(d,!0);t(d);var oe=a(d,2);{var ge=E=>{var q=Ut();m(E,q)},se=E=>{var q=He(),W=ze(q);{var F=L=>{var P=Ct();m(L,P)},K=L=>{var P=He(),ie=ze(P);{var le=$=>{var H=Rt();m($,H)},ne=$=>{var H=He(),X=ze(H);{var de=l=>{var o=zt();m(l,o)};C(X,l=>{s()==="right"&&l(de)},!0)}m($,H)};C(ie,$=>{s()==="left"?$(le):$(ne,!1)},!0)}m(L,P)};C(W,L=>{s()==="bottom"?L(F):L(K,!1)},!0)}m(E,q)};C(oe,E=>{s()==="top"?E(ge):E(se,!1)})}t(k),t(i),T(()=>{xe(k,1,`absolute ${s()==="top"?"bottom-full":s()==="bottom"?"top-full":s()==="left"?"right-full top-1/2 -translate-y-1/2":"left-full top-1/2 -translate-y-1/2"} left-1/2 transform -translate-x-1/2 ${s()==="top"?"mb-2":s()==="bottom"?"mt-2":"mx-2"} ${ae()??""} p-3 bg-gray-900 text-white text-xs rounded-lg shadow-lg opacity-0 invisible group-hover:opacity-100 group-hover:visible transition-all duration-200 z-50`),z(y,x()),z(R,re())}),m(me,i)}var $t=ut(' Settings',1),St=h('
                Metadata
                '),jt=h('
                Callback
                '),Bt=h('
                Webhook
                '),At=h('

                No URLs configured

                '),It=h('
                Controller Webhook URL

                Use this URL in your GitHub organization/repository webhook settings

                '),Et=h('

                Please enter a valid URL

                '),Ht=h('

                Please enter a valid URL

                '),Gt=h('

                Please enter a valid URL

                '),qt=h('

                Controller Settings

                URL where runners can fetch metadata and setup information

                URL where runners send status updates and lifecycle events

                URL where GitHub/Gitea will send webhook events for job notifications

                Time to wait before spinning up a runner for a new job (0 = immediate)

                '),Pt=h('

                Controller Information

                Identity

                Controller ID
                Hostname
                Job Age Backoff

                Integration URLs

                ',1);function Tt(me,D){Oe(D,!1);const x=I(),re=I();let s=fe(D,"controllerInfo",12);const ae=vt();let i=I(!1),k=I(!1),p=I(""),y=I(""),d=I(""),R=I(null);function oe(){v(p,s().metadata_url||""),v(y,s().callback_url||""),v(d,s().webhook_url||""),v(R,s().minimum_job_age_backoff||null),v(i,!0)}async function ge(){try{v(k,!0);const n={};e(p).trim()&&(n.metadata_url=e(p).trim()),e(y).trim()&&(n.callback_url=e(y).trim()),e(d).trim()&&(n.webhook_url=e(d).trim()),e(R)!==null&&e(R)>=0&&(n.minimum_job_age_backoff=e(R));const c=await Ke.updateController(n);Fe.success("Settings Updated","Controller settings have been updated successfully."),v(i,!1),s(c),ae("updated",c)}catch(n){Fe.error("Update Failed",n instanceof Error?n.message:"Failed to update controller settings")}finally{v(k,!1)}}function se(){v(i,!1),v(p,""),v(y,""),v(d,""),v(R,null)}Le(()=>{},()=>{v(x,n=>{if(!n.trim())return!0;try{return new URL(n),!0}catch{return!1}})}),Le(()=>(e(x),e(p),e(y),e(d),e(R)),()=>{v(re,e(x)(e(p))&&e(x)(e(y))&&e(x)(e(d))&&(e(R)===null||e(R)>=0))}),Ne(),Je();var E=Pt(),q=ze(E),W=r(q),F=r(W),K=r(F),L=a(r(K),2),P=a(r(L),2),ie=r(P),le=r(ie);t(ie),t(P),t(L),t(K);var ne=a(K,2);yt(ne,{variant:"secondary",size:"sm",$$events:{click:oe},children:(n,c)=>{var b=$t();ve(),m(n,b)},$$slots:{default:!0}}),t(F);var $=a(F,2),H=r($),X=r(H),de=a(r(X),2),l=r(de),o=a(r(l),2),S=r(o,!0);t(o),t(l);var _=a(l,2),f=a(r(_),2),w=r(f,!0);t(f),t(_);var A=a(_,2),V=r(A),Y=a(r(V),2),Z=r(Y);pe(Z,{title:"Job Age Backoff",content:"Time in seconds GARM waits after receiving a new job before spinning up a runner. This delay allows existing idle runners to pick up jobs first, preventing unnecessary runner creation. Set to 0 for immediate response."}),t(Y),t(V);var J=a(V,2),O=r(J);t(J),t(A),t(de),t(X),t(H);var ee=a(H,2),ce=r(ee),he=a(r(ce),2),ke=r(he);{var $e=n=>{var c=St(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Metadata URL",content:"URL where runners retrieve setup information and metadata. Runners must be able to connect to this URL during their initialization process. Usually accessible at /api/v1/metadata endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().metadata_url)))),m(n,c)};C(ke,n=>{B(s()),u(()=>s().metadata_url)&&n($e)})}var qe=a(ke,2);{var Xe=n=>{var c=jt(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Callback URL",content:"URL where runners send status updates and system information (OS version, runner agent ID, etc.) to the controller. Runners must be able to connect to this URL. Usually accessible at /api/v1/callbacks endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().callback_url)))),m(n,c)};C(qe,n=>{B(s()),u(()=>s().callback_url)&&n(Xe)})}var Pe=a(qe,2);{var Ye=n=>{var c=Bt(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Webhook Base URL",content:"Base URL for webhooks where GitHub sends job notifications. GARM needs to receive these webhooks to know when to create new runners for jobs. GitHub must be able to connect to this URL. Usually accessible at /webhooks endpoint."}),t(M),t(b);var U=a(b,2),j=r(U,!0);t(U),t(c),T(()=>z(j,(B(s()),u(()=>s().webhook_url)))),m(n,c)};C(Pe,n=>{B(s()),u(()=>s().webhook_url)&&n(Ye)})}var Ze=a(Pe,2);{var et=n=>{var c=At(),b=a(r(c),4);t(c),Ge("click",b,oe),m(n,c)};C(Ze,n=>{B(s()),u(()=>!s().metadata_url&&!s().callback_url&&!s().webhook_url)&&n(et)})}t(he),t(ce),t(ee),t($);var tt=a($,2);{var rt=n=>{var c=It(),b=r(c),M=a(r(b),2),G=r(M);pe(G,{title:"Controller Webhook URL",content:"Unique webhook URL for this GARM controller. This is the preferred URL to use in GitHub webhook settings as it's controller-specific and allows multiple GARM controllers to work with the same repository. Automatically combines the webhook base URL with the controller ID."}),t(M),t(b);var U=a(b,2),j=r(U),be=a(r(j),2),ye=r(be),Se=r(ye,!0);t(ye),ve(2),t(be),t(j),t(U),t(c),T(()=>z(Se,(B(s()),u(()=>s().controller_webhook_url)))),m(n,c)};C(tt,n=>{B(s()),u(()=>s().controller_webhook_url)&&n(rt)})}t(W),t(q);var at=a(q,2);{var ot=n=>{Mt(n,{$$events:{close:se},children:(c,b)=>{var M=qt(),G=a(r(M),2),U=r(G),j=a(r(U),2);Ue(j);let be;var ye=a(j,2);{var Se=g=>{var te=Et();m(g,te)};C(ye,g=>{e(x),e(p),u(()=>!e(x)(e(p)))&&g(Se)})}ve(2),t(U);var je=a(U,2),_e=a(r(je),2);Ue(_e);let Te;var st=a(_e,2);{var it=g=>{var te=Ht();m(g,te)};C(st,g=>{e(x),e(y),u(()=>!e(x)(e(y)))&&g(it)})}ve(2),t(je);var Be=a(je,2),we=a(r(Be),2);Ue(we);let De;var lt=a(we,2);{var nt=g=>{var te=Gt();m(g,te)};C(lt,g=>{e(x),e(d),u(()=>!e(x)(e(d)))&&g(nt)})}ve(2),t(Be);var Ae=a(Be,2),Ve=a(r(Ae),2);Ue(Ve),ve(2),t(Ae);var We=a(Ae,2),Ie=r(We),Ee=a(Ie,2),dt=r(Ee,!0);t(Ee),t(We),t(G),t(M),T((g,te,ct)=>{be=xe(j,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,be,g),Te=xe(_e,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,Te,te),De=xe(we,1,"block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 bg-white dark:bg-gray-700 text-gray-900 dark:text-white sm:text-sm",null,De,ct),Ie.disabled=e(k),Ee.disabled=!e(re)||e(k),z(dt,e(k)?"Saving...":"Save Changes")},[()=>({"border-red-300":!e(x)(e(p))}),()=>({"border-red-300":!e(x)(e(y))}),()=>({"border-red-300":!e(x)(e(d))})]),Re(j,()=>e(p),g=>v(p,g)),Re(_e,()=>e(y),g=>v(y,g)),Re(we,()=>e(d),g=>v(d,g)),Re(Ve,()=>e(R),g=>v(R,g)),Ge("click",Ie,se),Ge("submit",G,wt(ge)),m(c,M)},$$slots:{default:!0}})};C(at,n=>{e(i)&&n(ot)})}T(n=>{z(le,`v${n??""}`),z(S,(B(s()),u(()=>s().controller_id))),z(w,(B(s()),u(()=>s().hostname||"Unknown"))),z(O,`${B(s()),u(()=>s().minimum_job_age_backoff||30)??""}s`)},[()=>(B(s()),u(()=>s().version?.replace(/^v/,"")||"Unknown"))]),m(me,E),Qe()}var Dt=h('

                Error loading dashboard

                '),Vt=h('
                '),Wt=h('

                Dashboard

                Welcome to GARM - GitHub Actions Runner Manager

                ');function ar(me,D){Oe(D,!1);const[x,re]=ft(),s=()=>xt(_t,"$eagerCache",x),ae=I();let i=I({repositories:0,organizations:0,pools:0,instances:0}),k=I(null),p=I(""),y=[];function d(l,o,S=1e3){const _=parseInt(l.textContent||"0"),f=(o-_)/(S/16);let w=_;const A=()=>{if(w+=f,f>0&&w>=o||f<0&&w<=o){l.textContent=o.toString();return}l.textContent=Math.floor(w).toString(),requestAnimationFrame(A)};_!==o&&requestAnimationFrame(A)}mt(async()=>{try{const[f,w,A,V,Y]=await Promise.all([Ce.getRepositories(),Ce.getOrganizations(),Ce.getPools(),Ke.listInstances(),Ce.getControllerInfo()]);setTimeout(()=>{const Z=document.querySelector('[data-stat="repositories"]'),J=document.querySelector('[data-stat="organizations"]'),O=document.querySelector('[data-stat="pools"]'),ee=document.querySelector('[data-stat="instances"]');Z&&d(Z,f.length),J&&d(J,w.length),O&&d(O,A.length),ee&&d(ee,V.length)},100),v(i,{repositories:f.length,organizations:w.length,pools:A.length,instances:V.length}),Y&&v(k,Y)}catch(f){v(p,f instanceof Error?f.message:"Failed to load dashboard data"),console.error("Dashboard error:",f)}const l=Me.subscribeToEntity("repository",["create","delete"],R),o=Me.subscribeToEntity("organization",["create","delete"],oe),S=Me.subscribeToEntity("pool",["create","delete"],ge),_=Me.subscribeToEntity("instance",["create","delete"],se);y=[l,o,S,_]}),gt(()=>{y.forEach(l=>l())});function R(l){const o=document.querySelector('[data-stat="repositories"]');l.operation==="create"?(N(i,e(i).repositories++),o&&d(o,e(i).repositories,500)):l.operation==="delete"&&(N(i,e(i).repositories=Math.max(0,e(i).repositories-1)),o&&d(o,e(i).repositories,500))}function oe(l){const o=document.querySelector('[data-stat="organizations"]');l.operation==="create"?(N(i,e(i).organizations++),o&&d(o,e(i).organizations,500)):l.operation==="delete"&&(N(i,e(i).organizations=Math.max(0,e(i).organizations-1)),o&&d(o,e(i).organizations,500))}function ge(l){const o=document.querySelector('[data-stat="pools"]');l.operation==="create"?(N(i,e(i).pools++),o&&d(o,e(i).pools,500)):l.operation==="delete"&&(N(i,e(i).pools=Math.max(0,e(i).pools-1)),o&&d(o,e(i).pools,500))}function se(l){const o=document.querySelector('[data-stat="instances"]');l.operation==="create"?(N(i,e(i).instances++),o&&d(o,e(i).instances,500)):l.operation==="delete"&&(N(i,e(i).instances=Math.max(0,e(i).instances-1)),o&&d(o,e(i).instances,500))}function E(l){v(k,l.detail)}function q(l){return{blue:"bg-blue-500 text-white",green:"bg-green-500 text-white",purple:"bg-purple-500 text-white",yellow:"bg-yellow-500 text-white"}[l]||"bg-gray-500 text-white"}Le(()=>(e(k),s()),()=>{(!e(k)||s().loaded.controllerInfo)&&v(k,s().controllerInfo)}),Le(()=>(e(i),Q),()=>{v(ae,[{title:"Repositories",value:e(i).repositories,icon:"M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",color:"blue",href:`${Q}/repositories`},{title:"Organizations",value:e(i).organizations,icon:"M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z",color:"green",href:`${Q}/organizations`},{title:"Pools",value:e(i).pools,icon:"M4.318 6.318a4.5 4.5 0 000 6.364L12 20.364l7.682-7.682a4.5 4.5 0 00-6.364-6.364L12 7.636l-1.318-1.318a4.5 4.5 0 00-6.364 0z",color:"purple",href:`${Q}/pools`},{title:"Instances",value:e(i).instances,icon:"M9 3v2m6-2v2M9 19v2m6-2v2M5 9H3m2 6H3m18-6h-2m2 6h-2M7 19h10a2 2 0 002-2V7a2 2 0 00-2-2H7a2 2 0 00-2 2v10a2 2 0 002 2zM9 9h6v6H9V9z",color:"yellow",href:`${Q}/instances`}])}),Ne(),Je();var W=Wt();bt(l=>{pt.title="Dashboard - GARM"});var F=a(r(W),2);{var K=l=>{var o=Dt(),S=r(o),_=a(r(S),2),f=a(r(_),2),w=r(f,!0);t(f),t(_),t(S),t(o),T(()=>z(w,e(p))),m(l,o)};C(F,l=>{e(p)&&l(K)})}var L=a(F,2);ht(L,5,()=>e(ae),kt,(l,o)=>{var S=Vt(),_=r(S),f=r(_),w=r(f),A=r(w),V=r(A),Y=r(V);t(V),t(A),t(w);var Z=a(w,2),J=r(Z),O=r(J),ee=r(O,!0);t(O);var ce=a(O,2),he=r(ce,!0);t(ce),t(J),t(Z),t(f),t(_),t(S),T((ke,$e)=>{ue(S,"href",(e(o),u(()=>e(o).href))),xe(A,1,`w-8 h-8 rounded-md ${ke??""} flex items-center justify-center`),ue(Y,"d",(e(o),u(()=>e(o).icon))),z(ee,(e(o),u(()=>e(o).title))),ue(ce,"data-stat",$e),z(he,(e(o),u(()=>e(o).value)))},[()=>(e(o),u(()=>q(e(o).color))),()=>(e(o),u(()=>e(o).title.toLowerCase()))]),m(l,S)}),t(L);var P=a(L,2);{var ie=l=>{Tt(l,{get controllerInfo(){return e(k)},$$events:{updated:E}})};C(P,l=>{e(k)&&l(ie)})}var le=a(P,2),ne=r(le),$=a(r(ne),4),H=r($),X=a(H,2),de=a(X,2);t($),t(ne),t(le),t(W),T(()=>{ue(H,"href",`${Q??""}/repositories`),ue(X,"href",`${Q??""}/pools`),ue(de,"href",`${Q??""}/instances`)}),m(me,W),Qe(),re()}export{ar as component}; diff --git a/webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js b/webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js deleted file mode 100644 index f89efad2..00000000 --- a/webapp/assets/_app/immutable/nodes/3.BSFz0YHn.js +++ /dev/null @@ -1,7 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as It}from"../chunks/B3Pzt0F_.js";import{p as jt,g as e,o as Gt,l as Q,a as Ut,f as E,e as f,h as qt,b as Fe,c as $,d as zt,$ as Bt,m as b,s as n,i as St,j as i,k as a,r as o,u,n as ye,q as Le,t as se,v as W,w as tt,x as Lt,y as _,z as X}from"../chunks/D8EpLgQ1.js";import{i as G,s as Nt,a as Kt}from"../chunks/5WA7h8uK.js";import{e as Vt,i as Ht}from"../chunks/u94nIB4-.js";import{h as Rt,r as L,s as Ie,b as Yt,a as Ot,g as ve}from"../chunks/CiE1LlKV.js";import{b as N,a as Jt}from"../chunks/C6k1Q4We.js";import{p as rt}from"../chunks/D4Caz1gY.js";import{P as Qt}from"../chunks/CO4LUyTP.js";import{F as Wt}from"../chunks/CNMHKIIK.js";import{D as Xt,A as at,G as it,a as Zt}from"../chunks/C9DJVOi1.js";import{e as er,a as Ne}from"../chunks/wyaP0EDu.js";import{t as je}from"../chunks/BEkVdVE1.js";import{f as tr,p as rr,g as Ke,B as ot,c as ar}from"../chunks/BGVHQGl-.js";import"../chunks/CoIRRsD9.js";import{E as ir}from"../chunks/CGpPw4EW.js";import{S as or}from"../chunks/BE4wujub.js";var nr=E('

                '),dr=E(""),sr=E('

                '),lr=E('

                Gitea only supports PAT authentication

                '),cr=E('
                '),ur=E('

                or drag and drop

                PEM, KEY files only

                ',1),pr=E(''),gr=E('
                '),br=E('

                or drag and drop

                PEM, KEY files only. Upload new private key.

                ',1),yr=E(" ",1),vr=E(''),fr=E(''),mr=E('
                ',1);function Ur(nt,dt){jt(dt,!1);const[st,lt]=Nt(),U=()=>Kt(er,"$eagerCache",st),Ge=b(),Z=b(),Ve=b(),Ue=b(),p={PAT:"pat",APP:"app"};let we=b(!0),le=b([]),J=b([]),fe=b(""),Ce=b(""),K=b(1),ce=b(25),ue=b(1),Pe=b(!1),Ae=b(!1),Te=b(!1),D=b(p.PAT),g=b(null),M=b(null),r=b({name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),$e={...e(r)},ee=b(!1);function ct(t){t.key==="Escape"&&(e(Pe)||e(Ae)||e(Te))&&P()}Gt(async()=>{try{n(we,!0);const[t,s]=await Promise.all([Ne.getCredentials(),Ne.getEndpoints()]);t&&Array.isArray(t)&&n(le,t),s&&Array.isArray(s)&&n(J,s)}catch(t){console.error("Failed to load credentials:",t),n(fe,t instanceof Error?t.message:"Failed to load credentials")}finally{n(we,!1)}});async function ut(){try{await Ne.retryResource("credentials")}catch(t){console.error("Retry failed:",t)}}async function pt(){He(),n(Pe,!0),n(x,"github")}let x=b("");function gt(t){n(x,t.detail),_(r,e(r).auth_type=p.PAT),n(D,p.PAT)}async function qe(t){n(g,t),n(r,{name:t.name||"",description:t.description||"",endpoint:t.endpoint?.name||"",auth_type:t["auth-type"]||p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),n(D,t["auth-type"]||p.PAT),$e={...e(r)},n(ee,!1),n(Ae,!0)}function ze(t){n(M,t),n(Te,!0)}function He(){n(r,{name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),$e={...e(r)},n(D,p.PAT),n(ee,!1)}function P(){n(Pe,!1),n(Ae,!1),n(Te,!1),n(g,null),n(M,null),n(x,""),He()}function Re(t){n(D,t),_(r,e(r).auth_type=t)}function bt(){const t={};if(e(r).name!==$e.name&&e(r).name.trim()!==""&&(t.name=e(r).name.trim()),e(r).description!==$e.description&&e(r).description.trim()!==""&&(t.description=e(r).description.trim()),e(ee)&&e(g))if(e(g)["auth-type"]===p.PAT)e(r).oauth2_token.trim()!==""&&(t.pat={oauth2_token:e(r).oauth2_token.trim()});else{const s={};let y=!1;if(e(r).app_id.trim()!==""&&(s.app_id=parseInt(e(r).app_id.trim()),y=!0),e(r).installation_id.trim()!==""&&(s.installation_id=parseInt(e(r).installation_id.trim()),y=!0),e(r).private_key_bytes!=="")try{const m=atob(e(r).private_key_bytes);s.private_key_bytes=Array.from(m,l=>l.charCodeAt(0)),y=!0}catch{}y&&(t.app=s)}return t}async function yt(){try{if(e(x)==="github")await ve.createGithubCredentials(e(r));else if(e(x)==="gitea")await ve.createGiteaCredentials(e(r));else throw new Error("Please select a forge type");je.success("Credentials Created",`Credentials ${e(r).name} have been created successfully.`),P()}catch(t){n(fe,t instanceof Error?t.message:"Failed to create credentials")}}async function vt(){if(!(!e(g)||!e(g).id))try{const t=bt();if(Object.keys(t).length===0){je.info("No Changes","No fields were modified."),P();return}e(g).forge_type==="github"?await ve.updateGithubCredentials(e(g).id,t):await ve.updateGiteaCredentials(e(g).id,t),je.success("Credentials Updated",`Credentials ${e(g)?.name||"Unknown"} have been updated successfully.`),P()}catch(t){n(fe,t instanceof Error?t.message:"Failed to update credentials")}}async function ft(){if(!(!e(M)||!e(M).id))try{e(M).forge_type==="github"?await ve.deleteGithubCredentials(e(M).id):await ve.deleteGiteaCredentials(e(M).id),je.success("Credentials Deleted",`Credentials ${e(M)?.name||"Unknown"} have been deleted successfully.`),P()}catch(t){n(fe,t instanceof Error?t.message:"Failed to delete credentials")}}function Ye(t){const y=t.target.files?.[0];if(!y){_(r,e(r).private_key_bytes="");return}const m=new FileReader;m.onload=l=>{const h=l.target?.result;_(r,e(r).private_key_bytes=btoa(h))},m.readAsText(y)}function Oe(){return!e(r).name||!e(r).description||!e(r).endpoint?!1:e(r).auth_type===p.PAT?!!e(r).oauth2_token:!!e(r).app_id&&!!e(r).installation_id&&!!e(r).private_key_bytes}function Je(){return!e(r).name.trim()||!e(r).description.trim()?!1:e(ee)&&e(g)?e(g)["auth-type"]===p.PAT?!!e(r).oauth2_token.trim():!!e(r).app_id.trim()&&!!e(r).installation_id.trim()&&!!e(r).private_key_bytes:!0}function mt(t){return e(J).find(y=>y.name===t)?.endpoint_type||""}function xt(t){return mt(t)==="gitea"}const _t=[{key:"name",title:"Name",cellComponent:it,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:it,cellProps:{field:"description",type:"description"}},{key:"endpoint",title:"Endpoint",cellComponent:ir},{key:"auth_type",title:"Auth Type",cellComponent:or,cellProps:{statusType:"custom",statusField:"auth-type"}},{key:"actions",title:"Actions",align:"right",cellComponent:Zt}],ht={entityType:"credential",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:t=>Ke(t?.forge_type||"unknown"),text:t=>t?.endpoint?.name||"Unknown"}],badges:[{type:"auth",field:"auth-type"}],actions:[{type:"edit",handler:t=>qe(t)},{type:"delete",handler:t=>ze(t)}]};function kt(t){n(Ce,t.detail.term),n(K,1)}function wt(t){n(K,t.detail.page)}function Ct(t){const s=ar(t.detail.perPage);n(ce,s.newPerPage),n(K,s.newCurrentPage)}function Pt(t){qe(t.detail.item)}function At(t){ze(t.detail.item)}Q(()=>(e(le),U()),()=>{(!e(le).length||U().loaded.credentials)&&n(le,U().credentials)}),Q(()=>U(),()=>{n(we,U().loading.credentials)}),Q(()=>U(),()=>{n(Ge,U().errorMessages.credentials)}),Q(()=>(e(J),U()),()=>{(!e(J).length||U().loaded.endpoints)&&n(J,U().endpoints)}),Q(()=>(e(le),e(Ce)),()=>{n(Z,tr(e(le),e(Ce)))}),Q(()=>(e(ue),e(Z),e(ce),e(K)),()=>{n(ue,Math.ceil(e(Z).length/e(ce))),e(K)>e(ue)&&e(ue)>0&&n(K,e(ue))}),Q(()=>(e(Z),e(K),e(ce)),()=>{n(Ve,rr(e(Z),e(K),e(ce)))}),Q(()=>(e(x),e(J)),()=>{n(Ue,e(x)?e(J).filter(t=>t.endpoint_type===e(x)):e(J))}),Ut(),It();var Qe=mr();f("keydown",St,ct),qt(t=>{Bt.title="Credentials - GARM"});var Be=Fe(Qe),We=i(Be);Qt(We,{title:"Credentials",description:"Manage authentication credentials for your GitHub and Gitea endpoints.",actionLabel:"Add Credentials",$$events:{action:pt}});var Tt=a(We,2);{let t=Le(()=>e(Ge)||e(fe)),s=Le(()=>!!e(Ge));Xt(Tt,{get columns(){return _t},get data(){return e(Ve)},get loading(){return e(we)},get error(){return e(t)},get searchTerm(){return e(Ce)},searchPlaceholder:"Search credentials by name, description, or endpoint...",get currentPage(){return e(K)},get perPage(){return e(ce)},get totalPages(){return e(ue)},get totalItems(){return e(Z),u(()=>e(Z).length)},itemName:"credentials",emptyIconType:"key",get showRetry(){return e(s)},get mobileCardConfig(){return ht},$$events:{search:kt,pageChange:wt,perPageChange:Ct,retry:ut,edit:Pt,delete:At},$$slots:{"mobile-card":(y,m)=>{const l=Le(()=>m.item);var h=nr(),A=i(h),q=i(A),F=i(q),I=i(F,!0);o(F);var k=a(F,2),V=i(k,!0);o(k);var z=a(k,2),B=i(z),S=i(B);Rt(S,()=>(ye(Ke),ye(e(l)),u(()=>Ke(e(l).forge_type||"unknown"))));var te=a(S,2),re=i(te,!0);o(te),o(B),o(z),o(q),o(A);var ae=a(A,2),H=i(ae);{var ie=j=>{ot(j,{variant:"success",text:"PAT"})},R=j=>{ot(j,{variant:"info",text:"App"})};G(H,j=>{ye(e(l)),u(()=>(e(l)["auth-type"]||"pat")==="pat")?j(ie):j(R,!1)})}var Y=a(H,2),oe=i(Y);at(oe,{action:"edit",size:"sm",title:"Edit credentials",ariaLabel:"Edit credentials",$$events:{click:()=>qe(e(l))}});var pe=a(oe,2);at(pe,{action:"delete",size:"sm",title:"Delete credentials",ariaLabel:"Delete credentials",$$events:{click:()=>ze(e(l))}}),o(Y),o(ae),o(h),se(()=>{W(I,(ye(e(l)),u(()=>e(l).name))),W(V,(ye(e(l)),u(()=>e(l).description))),W(re,(ye(e(l)),u(()=>e(l).endpoint?.name||"Unknown")))}),$(y,h)}}})}o(Be);var Xe=a(Be,2);{var $t=t=>{var s=pr(),y=i(s),m=a(y,2),l=i(m),h=a(i(l),2);o(l);var A=a(l,2),q=i(A);Wt(q,{get selectedForgeType(){return e(x)},set selectedForgeType(d){n(x,d)},$$events:{select:gt},$$legacy:!0});var F=a(q,2),I=a(i(F),2);L(I),o(F);var k=a(F,2),V=a(i(k),2);tt(V),o(k);var z=a(k,2),B=a(i(z),2);se(()=>{e(r),Lt(()=>{e(Ue)})});var S=i(B);S.value=S.__value="";var te=a(S);Vt(te,1,()=>e(Ue),Ht,(d,c)=>{var v=dr(),C=i(v);o(v);var T={};se(()=>{W(C,`${e(c),u(()=>e(c).name)??""} (${e(c),u(()=>e(c).endpoint_type)??""})`),T!==(T=(e(c),u(()=>e(c).name)))&&(v.value=(v.__value=(e(c),u(()=>e(c).name)))??"")}),$(d,v)}),o(B);var re=a(B,2);{var ae=d=>{var c=sr(),v=i(c);o(c),se(()=>W(v,`Showing only ${e(x)??""} endpoints`)),$(d,c)};G(re,d=>{e(x)&&d(ae)})}o(z);var H=a(z,2),ie=a(i(H),2),R=i(ie),Y=a(R,2);o(ie);var oe=a(ie,2);{var pe=d=>{var c=lr();$(d,c)};G(oe,d=>{e(x)==="gitea"&&d(pe)})}o(H);var j=a(H,2);{var Ee=d=>{var c=cr(),v=a(i(c),2);L(v),o(c),N(v,()=>e(r).oauth2_token,C=>_(r,e(r).oauth2_token=C)),$(d,c)};G(j,d=>{e(D),u(()=>e(D)===p.PAT)&&d(Ee)})}var me=a(j,2);{var De=d=>{var c=ur(),v=Fe(c),C=a(i(v),2);L(C),o(v);var T=a(v,2),O=a(i(T),2);L(O),o(T);var de=a(T,2),ge=a(i(de),2),_e=i(ge),he=a(_e,2),ke=a(i(he),2),Me=i(ke);X(),o(ke),X(2),o(he),o(ge),o(de),N(C,()=>e(r).app_id,be=>_(r,e(r).app_id=be)),N(O,()=>e(r).installation_id,be=>_(r,e(r).installation_id=be)),f("change",_e,Ye),f("click",Me,()=>document.getElementById("private_key")?.click()),$(d,c)};G(me,d=>{e(D),u(()=>e(D)===p.APP)&&d(De)})}var w=a(me,2),ne=i(w),xe=a(ne,2);o(w),o(A),o(m),o(s),se((d,c,v)=>{Ie(R,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 cursor-pointer - ${e(D),u(()=>e(D)===p.PAT?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} - ${d??""}`),Y.disabled=e(x)==="gitea",Ie(Y,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 - ${e(D),u(()=>e(D)===p.APP?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} - ${e(x)==="gitea"?"opacity-50 cursor-not-allowed":"cursor-pointer"}`),xe.disabled=c,Ie(xe,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${v??""}`)},[()=>(e(r),u(()=>(e(r).endpoint&&xt(e(r).endpoint),""))),()=>u(()=>!Oe()),()=>u(()=>Oe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",y,P),f("click",h,P),N(I,()=>e(r).name,d=>_(r,e(r).name=d)),N(V,()=>e(r).description,d=>_(r,e(r).description=d)),Yt(B,()=>e(r).endpoint,d=>_(r,e(r).endpoint=d)),f("click",R,()=>Re(p.PAT)),f("click",Y,()=>Re(p.APP)),f("click",ne,P),f("submit",A,rt(yt)),$(t,s)};G(Xe,t=>{e(Pe)&&t($t)})}var Ze=a(Xe,2);{var Et=t=>{var s=vr(),y=i(s),m=a(y,2),l=i(m),h=i(l),A=a(i(h),2),q=i(A);o(A),o(h);var F=a(h,2);o(l);var I=a(l,2),k=i(I),V=a(i(k),2);L(V),o(k);var z=a(k,2),B=a(i(z),2);tt(B),o(z);var S=a(z,2),te=a(i(S),2);L(te),X(2),o(S);var re=a(S,2),ae=a(i(re),2),H=i(ae),ie=i(H,!0);o(H),o(ae),X(2),o(re);var R=a(re,2),Y=i(R),oe=i(Y);L(oe),X(2),o(Y),X(2),o(R);var pe=a(R,2);{var j=w=>{var ne=yr(),xe=Fe(ne);{var d=C=>{var T=gr(),O=a(i(T),2);L(O),o(T),N(O,()=>e(r).oauth2_token,de=>_(r,e(r).oauth2_token=de)),$(C,T)};G(xe,C=>{e(g),u(()=>e(g)["auth-type"]===p.PAT)&&C(d)})}var c=a(xe,2);{var v=C=>{var T=br(),O=Fe(T),de=a(i(O),2);L(de),o(O);var ge=a(O,2),_e=a(i(ge),2);L(_e),o(ge);var he=a(ge,2),ke=a(i(he),2),Me=i(ke),be=a(Me,2),et=a(i(be),2),Ft=i(et);X(),o(et),X(2),o(be),o(ke),o(he),N(de,()=>e(r).app_id,Se=>_(r,e(r).app_id=Se)),N(_e,()=>e(r).installation_id,Se=>_(r,e(r).installation_id=Se)),f("change",Me,Ye),f("click",Ft,()=>document.getElementById("edit_private_key")?.click()),$(C,T)};G(c,C=>{e(g),u(()=>e(g)["auth-type"]===p.APP)&&C(v)})}$(w,ne)};G(pe,w=>{e(ee)&&w(j)})}var Ee=a(pe,2),me=i(Ee),De=a(me,2);o(Ee),o(I),o(m),o(s),se((w,ne)=>{W(q,`Update credentials for ${e(g),u(()=>e(g)?.name||"Unknown")??""}`),Ot(te,(e(r),u(()=>e(r).endpoint))),W(ie,(e(g),u(()=>(e(g)?.["auth-type"]||p.PAT)===p.PAT?"Personal Access Token (PAT)":"GitHub App"))),De.disabled=w,Ie(De,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${ne??""}`)},[()=>u(()=>!Je()),()=>u(()=>Je()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",y,P),f("click",F,P),N(V,()=>e(r).name,w=>_(r,e(r).name=w)),N(B,()=>e(r).description,w=>_(r,e(r).description=w)),Jt(oe,()=>e(ee),w=>n(ee,w)),f("click",me,P),f("submit",I,rt(vt)),$(t,s)};G(Ze,t=>{e(Ae)&&e(g)&&t(Et)})}var Dt=a(Ze,2);{var Mt=t=>{var s=fr(),y=i(s),m=a(y,2),l=i(m),h=i(l),A=a(i(h),2),q=a(i(A),2),F=i(q);o(q),o(A),o(h),o(l);var I=a(l,2),k=i(I),V=a(k,2);o(I),o(m),o(s),se(()=>W(F,`Are you sure you want to delete the credentials "${e(M),u(()=>e(M)?.name||"Unknown")??""}"? This action cannot be undone.`)),f("click",y,P),f("click",k,P),f("click",V,ft),$(t,s)};G(Dt,t=>{e(Te)&&e(M)&&t(Mt)})}$(nt,Qe),zt(),lt()}export{Ur as component}; diff --git a/webapp/assets/_app/immutable/nodes/3.BWxN3TuB.js b/webapp/assets/_app/immutable/nodes/3.BWxN3TuB.js new file mode 100644 index 00000000..f96b2bd9 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/3.BWxN3TuB.js @@ -0,0 +1,7 @@ +import"../chunks/DsnmJJEf.js";import{i as jt}from"../chunks/B3Pzt0F_.js";import{p as Gt,g as e,o as Ut,l as Q,a as qt,f as E,e as f,h as zt,b as Fe,c as $,d as Bt,$ as St,m as y,s as n,i as Lt,j as o,k as a,r as i,u,n as be,q as Le,t as se,v as W,w as rt,x as Nt,y as _,z as X}from"../chunks/D8EpLgQ1.js";import{i as G,s as Kt,a as Vt}from"../chunks/5WA7h8uK.js";import{e as Ht,i as Rt}from"../chunks/u94nIB4-.js";import{h as Yt,r as L,s as je,b as Ot,a as Jt,g as ve}from"../chunks/CiE1LlKV.js";import{b as N,a as Qt}from"../chunks/C6k1Q4We.js";import{p as at}from"../chunks/D4Caz1gY.js";import{P as Wt}from"../chunks/CO4LUyTP.js";import{F as Xt}from"../chunks/CNMHKIIK.js";import{D as Zt,A as ot,G as it,a as er}from"../chunks/BrNfsPe8.js";import{e as tr,a as Ne}from"../chunks/wyaP0EDu.js";import{t as ke}from"../chunks/BEkVdVE1.js";import{f as rr,p as ar,g as Ke,B as nt,c as or}from"../chunks/BGVHQGl-.js";import{e as Ve}from"../chunks/BZiHL9L3.js";import"../chunks/CRhkqW2i.js";import{E as ir}from"../chunks/CGpPw4EW.js";import{S as nr}from"../chunks/MCv1Wq2q.js";var dr=E('

                '),sr=E(""),lr=E('

                '),cr=E('

                Gitea only supports PAT authentication

                '),ur=E('
                '),pr=E('

                or drag and drop

                PEM, KEY files only

                ',1),gr=E(''),yr=E('
                '),br=E('

                or drag and drop

                PEM, KEY files only. Upload new private key.

                ',1),vr=E(" ",1),fr=E(''),mr=E(''),xr=E('
                ',1);function zr(dt,st){Gt(st,!1);const[lt,ct]=Kt(),U=()=>Vt(tr,"$eagerCache",lt),Ge=y(),Z=y(),He=y(),Ue=y(),p={PAT:"pat",APP:"app"};let we=y(!0),le=y([]),J=y([]),Ce=y(""),Pe=y(""),K=y(1),ce=y(25),ue=y(1),Ae=y(!1),Te=y(!1),$e=y(!1),D=y(p.PAT),g=y(null),M=y(null),t=y({name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),Ee={...e(t)},ee=y(!1);function ut(r){r.key==="Escape"&&(e(Ae)||e(Te)||e($e))&&P()}Ut(async()=>{try{n(we,!0);const[r,d]=await Promise.all([Ne.getCredentials(),Ne.getEndpoints()]);r&&Array.isArray(r)&&n(le,r),d&&Array.isArray(d)&&n(J,d)}catch(r){console.error("Failed to load credentials:",r),n(Ce,r instanceof Error?r.message:"Failed to load credentials")}finally{n(we,!1)}});async function pt(){try{await Ne.retryResource("credentials")}catch(r){console.error("Retry failed:",r)}}async function gt(){Re(),n(Ae,!0),n(x,"github")}let x=y("");function yt(r){n(x,r.detail),_(t,e(t).auth_type=p.PAT),n(D,p.PAT)}async function qe(r){n(g,r),n(t,{name:r.name||"",description:r.description||"",endpoint:r.endpoint?.name||"",auth_type:r["auth-type"]||p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),n(D,r["auth-type"]||p.PAT),Ee={...e(t)},n(ee,!1),n(Te,!0)}function ze(r){n(M,r),n($e,!0)}function Re(){n(t,{name:"",description:"",endpoint:"",auth_type:p.PAT,oauth2_token:"",app_id:"",installation_id:"",private_key_bytes:""}),Ee={...e(t)},n(D,p.PAT),n(ee,!1)}function P(){n(Ae,!1),n(Te,!1),n($e,!1),n(g,null),n(M,null),n(x,""),Re()}function Ye(r){n(D,r),_(t,e(t).auth_type=r)}function bt(){const r={};if(e(t).name!==Ee.name&&e(t).name.trim()!==""&&(r.name=e(t).name.trim()),e(t).description!==Ee.description&&e(t).description.trim()!==""&&(r.description=e(t).description.trim()),e(ee)&&e(g))if(e(g)["auth-type"]===p.PAT)e(t).oauth2_token.trim()!==""&&(r.pat={oauth2_token:e(t).oauth2_token.trim()});else{const d={};let b=!1;if(e(t).app_id.trim()!==""&&(d.app_id=parseInt(e(t).app_id.trim()),b=!0),e(t).installation_id.trim()!==""&&(d.installation_id=parseInt(e(t).installation_id.trim()),b=!0),e(t).private_key_bytes!=="")try{const m=atob(e(t).private_key_bytes);d.private_key_bytes=Array.from(m,l=>l.charCodeAt(0)),b=!0}catch{}b&&(r.app=d)}return r}async function vt(){try{if(e(x)==="github")await ve.createGithubCredentials(e(t));else if(e(x)==="gitea")await ve.createGiteaCredentials(e(t));else throw new Error("Please select a forge type");ke.success("Credentials Created",`Credentials ${e(t).name} have been created successfully.`),P()}catch(r){n(Ce,Ve(r))}}async function ft(){if(!(!e(g)||!e(g).id))try{const r=bt();if(Object.keys(r).length===0){ke.info("No Changes","No fields were modified."),P();return}e(g).forge_type==="github"?await ve.updateGithubCredentials(e(g).id,r):await ve.updateGiteaCredentials(e(g).id,r),ke.success("Credentials Updated",`Credentials ${e(g)?.name||"Unknown"} have been updated successfully.`),P()}catch(r){n(Ce,Ve(r))}}async function mt(){if(!(!e(M)||!e(M).id))try{e(M).forge_type==="github"?await ve.deleteGithubCredentials(e(M).id):await ve.deleteGiteaCredentials(e(M).id),ke.success("Credentials Deleted",`Credentials ${e(M)?.name||"Unknown"} have been deleted successfully.`)}catch(r){const d=Ve(r);ke.error("Delete Failed",d)}finally{P()}}function Oe(r){const b=r.target.files?.[0];if(!b){_(t,e(t).private_key_bytes="");return}const m=new FileReader;m.onload=l=>{const h=l.target?.result;_(t,e(t).private_key_bytes=btoa(h))},m.readAsText(b)}function Je(){return!e(t).name||!e(t).description||!e(t).endpoint?!1:e(t).auth_type===p.PAT?!!e(t).oauth2_token:!!e(t).app_id&&!!e(t).installation_id&&!!e(t).private_key_bytes}function Qe(){return!e(t).name.trim()||!e(t).description.trim()?!1:e(ee)&&e(g)?e(g)["auth-type"]===p.PAT?!!e(t).oauth2_token.trim():!!e(t).app_id.trim()&&!!e(t).installation_id.trim()&&!!e(t).private_key_bytes:!0}function xt(r){return e(J).find(b=>b.name===r)?.endpoint_type||""}function _t(r){return xt(r)==="gitea"}const ht=[{key:"name",title:"Name",cellComponent:it,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:it,cellProps:{field:"description",type:"description"}},{key:"endpoint",title:"Endpoint",cellComponent:ir},{key:"auth_type",title:"Auth Type",cellComponent:nr,cellProps:{statusType:"custom",statusField:"auth-type"}},{key:"actions",title:"Actions",align:"right",cellComponent:er}],kt={entityType:"credential",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:r=>Ke(r?.forge_type||"unknown"),text:r=>r?.endpoint?.name||"Unknown"}],badges:[{type:"auth",field:"auth-type"}],actions:[{type:"edit",handler:r=>qe(r)},{type:"delete",handler:r=>ze(r)}]};function wt(r){n(Pe,r.detail.term),n(K,1)}function Ct(r){n(K,r.detail.page)}function Pt(r){const d=or(r.detail.perPage);n(ce,d.newPerPage),n(K,d.newCurrentPage)}function At(r){qe(r.detail.item)}function Tt(r){ze(r.detail.item)}Q(()=>(e(le),U()),()=>{(!e(le).length||U().loaded.credentials)&&n(le,U().credentials)}),Q(()=>U(),()=>{n(we,U().loading.credentials)}),Q(()=>U(),()=>{n(Ge,U().errorMessages.credentials)}),Q(()=>(e(J),U()),()=>{(!e(J).length||U().loaded.endpoints)&&n(J,U().endpoints)}),Q(()=>(e(le),e(Pe)),()=>{n(Z,rr(e(le),e(Pe)))}),Q(()=>(e(ue),e(Z),e(ce),e(K)),()=>{n(ue,Math.ceil(e(Z).length/e(ce))),e(K)>e(ue)&&e(ue)>0&&n(K,e(ue))}),Q(()=>(e(Z),e(K),e(ce)),()=>{n(He,ar(e(Z),e(K),e(ce)))}),Q(()=>(e(x),e(J)),()=>{n(Ue,e(x)?e(J).filter(r=>r.endpoint_type===e(x)):e(J))}),qt(),jt();var We=xr();f("keydown",Lt,ut),zt(r=>{St.title="Credentials - GARM"});var Be=Fe(We),Xe=o(Be);Wt(Xe,{title:"Credentials",description:"Manage authentication credentials for your GitHub and Gitea endpoints.",actionLabel:"Add Credentials",$$events:{action:gt}});var $t=a(Xe,2);{let r=Le(()=>e(Ge)||e(Ce)),d=Le(()=>!!e(Ge));Zt($t,{get columns(){return ht},get data(){return e(He)},get loading(){return e(we)},get error(){return e(r)},get searchTerm(){return e(Pe)},searchPlaceholder:"Search credentials by name, description, or endpoint...",get currentPage(){return e(K)},get perPage(){return e(ce)},get totalPages(){return e(ue)},get totalItems(){return e(Z),u(()=>e(Z).length)},itemName:"credentials",emptyIconType:"key",get showRetry(){return e(d)},get mobileCardConfig(){return kt},$$events:{search:wt,pageChange:Ct,perPageChange:Pt,retry:pt,edit:At,delete:Tt},$$slots:{"mobile-card":(b,m)=>{const l=Le(()=>m.item);var h=dr(),A=o(h),q=o(A),I=o(q),F=o(I,!0);i(I);var k=a(I,2),V=o(k,!0);i(k);var z=a(k,2),B=o(z),S=o(B);Yt(S,()=>(be(Ke),be(e(l)),u(()=>Ke(e(l).forge_type||"unknown"))));var te=a(S,2),re=o(te,!0);i(te),i(B),i(z),i(q),i(A);var ae=a(A,2),H=o(ae);{var oe=j=>{nt(j,{variant:"success",text:"PAT"})},R=j=>{nt(j,{variant:"info",text:"App"})};G(H,j=>{be(e(l)),u(()=>(e(l)["auth-type"]||"pat")==="pat")?j(oe):j(R,!1)})}var Y=a(H,2),ie=o(Y);ot(ie,{action:"edit",size:"sm",title:"Edit credentials",ariaLabel:"Edit credentials",$$events:{click:()=>qe(e(l))}});var pe=a(ie,2);ot(pe,{action:"delete",size:"sm",title:"Delete credentials",ariaLabel:"Delete credentials",$$events:{click:()=>ze(e(l))}}),i(Y),i(ae),i(h),se(()=>{W(F,(be(e(l)),u(()=>e(l).name))),W(V,(be(e(l)),u(()=>e(l).description))),W(re,(be(e(l)),u(()=>e(l).endpoint?.name||"Unknown")))}),$(b,h)}}})}i(Be);var Ze=a(Be,2);{var Et=r=>{var d=gr(),b=o(d),m=a(b,2),l=o(m),h=a(o(l),2);i(l);var A=a(l,2),q=o(A);Xt(q,{get selectedForgeType(){return e(x)},set selectedForgeType(s){n(x,s)},$$events:{select:yt},$$legacy:!0});var I=a(q,2),F=a(o(I),2);L(F),i(I);var k=a(I,2),V=a(o(k),2);rt(V),i(k);var z=a(k,2),B=a(o(z),2);se(()=>{e(t),Nt(()=>{e(Ue)})});var S=o(B);S.value=S.__value="";var te=a(S);Ht(te,1,()=>e(Ue),Rt,(s,c)=>{var v=sr(),C=o(v);i(v);var T={};se(()=>{W(C,`${e(c),u(()=>e(c).name)??""} (${e(c),u(()=>e(c).endpoint_type)??""})`),T!==(T=(e(c),u(()=>e(c).name)))&&(v.value=(v.__value=(e(c),u(()=>e(c).name)))??"")}),$(s,v)}),i(B);var re=a(B,2);{var ae=s=>{var c=lr(),v=o(c);i(c),se(()=>W(v,`Showing only ${e(x)??""} endpoints`)),$(s,c)};G(re,s=>{e(x)&&s(ae)})}i(z);var H=a(z,2),oe=a(o(H),2),R=o(oe),Y=a(R,2);i(oe);var ie=a(oe,2);{var pe=s=>{var c=cr();$(s,c)};G(ie,s=>{e(x)==="gitea"&&s(pe)})}i(H);var j=a(H,2);{var De=s=>{var c=ur(),v=a(o(c),2);L(v),i(c),N(v,()=>e(t).oauth2_token,C=>_(t,e(t).oauth2_token=C)),$(s,c)};G(j,s=>{e(D),u(()=>e(D)===p.PAT)&&s(De)})}var fe=a(j,2);{var Me=s=>{var c=pr(),v=Fe(c),C=a(o(v),2);L(C),i(v);var T=a(v,2),O=a(o(T),2);L(O),i(T);var de=a(T,2),ge=a(o(de),2),xe=o(ge),_e=a(xe,2),he=a(o(_e),2),Ie=o(he);X(),i(he),X(2),i(_e),i(ge),i(de),N(C,()=>e(t).app_id,ye=>_(t,e(t).app_id=ye)),N(O,()=>e(t).installation_id,ye=>_(t,e(t).installation_id=ye)),f("change",xe,Oe),f("click",Ie,()=>document.getElementById("private_key")?.click()),$(s,c)};G(fe,s=>{e(D),u(()=>e(D)===p.APP)&&s(Me)})}var w=a(fe,2),ne=o(w),me=a(ne,2);i(w),i(A),i(m),i(d),se((s,c,v)=>{je(R,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 cursor-pointer + ${e(D),u(()=>e(D)===p.PAT?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} + ${s??""}`),Y.disabled=e(x)==="gitea",je(Y,1,`flex-1 py-2 px-4 text-sm font-medium rounded-md border focus:outline-none focus:ring-2 focus:ring-blue-500 + ${e(D),u(()=>e(D)===p.APP?"bg-blue-600 text-white border-blue-600":"bg-white dark:bg-gray-700 text-gray-700 dark:text-gray-300 border-gray-300 dark:border-gray-600 hover:bg-gray-50 dark:hover:bg-gray-600")??""} + ${e(x)==="gitea"?"opacity-50 cursor-not-allowed":"cursor-pointer"}`),me.disabled=c,je(me,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${v??""}`)},[()=>(e(t),u(()=>(e(t).endpoint&&_t(e(t).endpoint),""))),()=>u(()=>!Je()),()=>u(()=>Je()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",b,P),f("click",h,P),N(F,()=>e(t).name,s=>_(t,e(t).name=s)),N(V,()=>e(t).description,s=>_(t,e(t).description=s)),Ot(B,()=>e(t).endpoint,s=>_(t,e(t).endpoint=s)),f("click",R,()=>Ye(p.PAT)),f("click",Y,()=>Ye(p.APP)),f("click",ne,P),f("submit",A,at(vt)),$(r,d)};G(Ze,r=>{e(Ae)&&r(Et)})}var et=a(Ze,2);{var Dt=r=>{var d=fr(),b=o(d),m=a(b,2),l=o(m),h=o(l),A=a(o(h),2),q=o(A);i(A),i(h);var I=a(h,2);i(l);var F=a(l,2),k=o(F),V=a(o(k),2);L(V),i(k);var z=a(k,2),B=a(o(z),2);rt(B),i(z);var S=a(z,2),te=a(o(S),2);L(te),X(2),i(S);var re=a(S,2),ae=a(o(re),2),H=o(ae),oe=o(H,!0);i(H),i(ae),X(2),i(re);var R=a(re,2),Y=o(R),ie=o(Y);L(ie),X(2),i(Y),X(2),i(R);var pe=a(R,2);{var j=w=>{var ne=vr(),me=Fe(ne);{var s=C=>{var T=yr(),O=a(o(T),2);L(O),i(T),N(O,()=>e(t).oauth2_token,de=>_(t,e(t).oauth2_token=de)),$(C,T)};G(me,C=>{e(g),u(()=>e(g)["auth-type"]===p.PAT)&&C(s)})}var c=a(me,2);{var v=C=>{var T=br(),O=Fe(T),de=a(o(O),2);L(de),i(O);var ge=a(O,2),xe=a(o(ge),2);L(xe),i(ge);var _e=a(ge,2),he=a(o(_e),2),Ie=o(he),ye=a(Ie,2),tt=a(o(ye),2),Ft=o(tt);X(),i(tt),X(2),i(ye),i(he),i(_e),N(de,()=>e(t).app_id,Se=>_(t,e(t).app_id=Se)),N(xe,()=>e(t).installation_id,Se=>_(t,e(t).installation_id=Se)),f("change",Ie,Oe),f("click",Ft,()=>document.getElementById("edit_private_key")?.click()),$(C,T)};G(c,C=>{e(g),u(()=>e(g)["auth-type"]===p.APP)&&C(v)})}$(w,ne)};G(pe,w=>{e(ee)&&w(j)})}var De=a(pe,2),fe=o(De),Me=a(fe,2);i(De),i(F),i(m),i(d),se((w,ne)=>{W(q,`Update credentials for ${e(g),u(()=>e(g)?.name||"Unknown")??""}`),Jt(te,(e(t),u(()=>e(t).endpoint))),W(oe,(e(g),u(()=>(e(g)?.["auth-type"]||p.PAT)===p.PAT?"Personal Access Token (PAT)":"GitHub App"))),Me.disabled=w,je(Me,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${ne??""}`)},[()=>u(()=>!Qe()),()=>u(()=>Qe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),f("click",b,P),f("click",I,P),N(V,()=>e(t).name,w=>_(t,e(t).name=w)),N(B,()=>e(t).description,w=>_(t,e(t).description=w)),Qt(ie,()=>e(ee),w=>n(ee,w)),f("click",fe,P),f("submit",F,at(ft)),$(r,d)};G(et,r=>{e(Te)&&e(g)&&r(Dt)})}var Mt=a(et,2);{var It=r=>{var d=mr(),b=o(d),m=a(b,2),l=o(m),h=o(l),A=a(o(h),2),q=a(o(A),2),I=o(q);i(q),i(A),i(h),i(l);var F=a(l,2),k=o(F),V=a(k,2);i(F),i(m),i(d),se(()=>W(I,`Are you sure you want to delete the credentials "${e(M),u(()=>e(M)?.name||"Unknown")??""}"? This action cannot be undone.`)),f("click",b,P),f("click",k,P),f("click",V,mt),$(r,d)};G(Mt,r=>{e($e)&&e(M)&&r(It)})}$(dt,We),Bt(),ct()}export{zr as component}; diff --git a/webapp/assets/_app/immutable/nodes/4.D1IF4qSs.js b/webapp/assets/_app/immutable/nodes/4.D1IF4qSs.js new file mode 100644 index 00000000..ce0d8aa9 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/4.D1IF4qSs.js @@ -0,0 +1,3 @@ +import"../chunks/DsnmJJEf.js";import{i as bt}from"../chunks/B3Pzt0F_.js";import{p as gt,g as e,o as ft,l as Z,a as _t,f as j,e as b,h as mt,b as Ce,c as I,d as yt,m as p,i as vt,$ as xt,j as o,q as Ee,k as a,t as ye,u as C,s,r as i,n as ie,v as se,w as Ie,z as V,y as g}from"../chunks/D8EpLgQ1.js";import{i as ne,s as ht,a as kt}from"../chunks/5WA7h8uK.js";import{h as wt,r as U,c as je,s as Ge,g as ee}from"../chunks/CiE1LlKV.js";import{b as E}from"../chunks/C6k1Q4We.js";import{p as ze}from"../chunks/D4Caz1gY.js";import{P as Ct}from"../chunks/CO4LUyTP.js";import{F as Et}from"../chunks/CNMHKIIK.js";import{D as Pt,A as qe,G as Pe,a as $t}from"../chunks/BrNfsPe8.js";import{e as At,a as He}from"../chunks/wyaP0EDu.js";import{t as de}from"../chunks/BEkVdVE1.js";import{g as $e,c as Rt,a as Bt,p as Ut}from"../chunks/BGVHQGl-.js";import{e as Ae}from"../chunks/BZiHL9L3.js";import"../chunks/CRhkqW2i.js";import{E as Lt}from"../chunks/CGpPw4EW.js";var Mt=j('

                '),Tt=j('
                ',1),Dt=j('

                If empty, Base URL will be used as API Base URL

                '),Ft=j(''),It=j('
                ',1),jt=j('

                If empty, Base URL will be used as API Base URL

                '),Gt=j(''),zt=j(''),qt=j('
                ',1);function or(Ne,Se){gt(Se,!1);const[Ve,Oe]=ht(),q=()=>kt(At,"$eagerCache",Ve),ve=p(),H=p(),Re=p();let le=p(!0),O=p([]),ue=p(""),ce=p(""),L=p(1),K=p(25),J=p(1),pe=p(!1),be=p(!1),ge=p(!1),G=p("github"),m=p(null),A=p(null),r=p({name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)};ft(async()=>{try{s(le,!0);const t=await He.getEndpoints();t&&Array.isArray(t)&&s(O,t)}catch(t){console.error("Failed to load endpoints:",t),s(ue,t instanceof Error?t.message:"Failed to load endpoints")}finally{s(le,!1)}});async function Ke(){try{await He.retryResource("endpoints")}catch(t){console.error("Retry failed:",t)}}const Je=[{key:"name",title:"Name",cellComponent:Pe,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:Pe,cellProps:{field:"description"}},{key:"api_url",title:"API URL",cellComponent:Pe,cellProps:{field:"api_base_url",fallbackField:"base_url"}},{key:"forge_type",title:"Forge Type",cellComponent:Lt},{key:"actions",title:"Actions",align:"right",cellComponent:$t}],Qe={entityType:"endpoint",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:t=>$e(t?.endpoint_type||"unknown"),text:t=>t?.api_base_url||"Unknown"}],actions:[{type:"edit",handler:t=>xe(t)},{type:"delete",handler:t=>he(t)}]};function We(t){s(ce,t.detail.term),s(L,1)}function Xe(t){s(L,t.detail.page)}function Ye(t){const n=Rt(t.detail.perPage);s(K,n.newPerPage),s(L,n.newCurrentPage)}function Ze(t){xe(t.detail.item)}function et(t){he(t.detail.item)}function tt(){s(G,"github"),Be(),s(pe,!0)}function rt(t){s(G,t.detail),g(r,e(r).endpoint_type=t.detail)}function xe(t){s(m,t),s(r,{name:t.name||"",description:t.description||"",endpoint_type:t.endpoint_type||"",base_url:t.base_url||"",api_base_url:t.api_base_url||"",upload_base_url:t.upload_base_url||"",ca_cert_bundle:typeof t.ca_cert_bundle=="string"?t.ca_cert_bundle:""}),k={...e(r)},s(be,!0)}function he(t){s(A,t),s(ge,!0)}function Be(){s(r,{name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)}}function at(t){t.key==="Escape"&&(e(pe)||e(be)||e(ge))&&y()}function y(){s(pe,!1),s(be,!1),s(ge,!1),s(G,"github"),s(m,null),s(A,null),Be()}function ot(){const t={};if(e(r).description!==k.description&&(e(r).description.trim()!==""||k.description!=="")&&(t.description=e(r).description.trim()),e(r).base_url!==k.base_url&&e(r).base_url.trim()!==""&&(t.base_url=e(r).base_url.trim()),e(r).api_base_url!==k.api_base_url&&(e(r).api_base_url.trim()!==""||k.api_base_url!=="")&&(t.api_base_url=e(r).api_base_url.trim()),e(m)?.endpoint_type==="github"&&e(r).upload_base_url!==k.upload_base_url&&(e(r).upload_base_url.trim()!==""||k.upload_base_url!=="")&&(t.upload_base_url=e(r).upload_base_url.trim()),e(r).ca_cert_bundle!==k.ca_cert_bundle)if(e(r).ca_cert_bundle!=="")try{const n=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(n,c=>c.charCodeAt(0))}catch{k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[])}else k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[]);return t}async function it(){try{const t={name:e(r).name,description:e(r).description,endpoint_type:e(r).endpoint_type,base_url:e(r).base_url,api_base_url:e(r).api_base_url,upload_base_url:e(r).upload_base_url};if(e(r).ca_cert_bundle&&e(r).ca_cert_bundle.trim()!=="")try{const n=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(n,c=>c.charCodeAt(0))}catch{}e(r).endpoint_type==="github"?await ee.createGithubEndpoint(t):await ee.createGiteaEndpoint(t),de.success("Endpoint Created",`Endpoint ${e(r).name} has been created successfully.`),y()}catch(t){s(ue,Ae(t))}}async function st(){if(e(m))try{const t=ot();if(Object.keys(t).length===0){de.info("No Changes","No fields were modified."),y();return}e(m).endpoint_type==="github"?await ee.updateGithubEndpoint(e(m).name,t):await ee.updateGiteaEndpoint(e(m).name,t),de.success("Endpoint Updated",`Endpoint ${e(m).name} has been updated successfully.`),y()}catch(t){s(ue,Ae(t))}}async function nt(){if(e(A)){try{e(A).endpoint_type==="github"?await ee.deleteGithubEndpoint(e(A).name):await ee.deleteGiteaEndpoint(e(A).name),de.success("Endpoint Deleted",`Endpoint ${e(A).name} has been deleted successfully.`)}catch(t){const n=Ae(t);de.error("Delete Failed",n)}y()}}function Ue(t){const c=t.target.files?.[0];if(!c){g(r,e(r).ca_cert_bundle="");return}const v=new FileReader;v.onload=d=>{const f=d.target?.result;g(r,e(r).ca_cert_bundle=btoa(f))},v.readAsText(c)}function fe(){return!(!e(r).name||!e(r).description||!e(r).base_url||e(r).endpoint_type==="github"&&!e(r).api_base_url)}Z(()=>(e(O),q()),()=>{(!e(O).length||q().loaded.endpoints)&&s(O,q().endpoints)}),Z(()=>q(),()=>{s(le,q().loading.endpoints)}),Z(()=>q(),()=>{s(ve,q().errorMessages.endpoints)}),Z(()=>(e(O),e(ce)),()=>{s(H,Bt(e(O),e(ce)))}),Z(()=>(e(J),e(H),e(K),e(L)),()=>{s(J,Math.ceil(e(H).length/e(K))),e(L)>e(J)&&e(J)>0&&s(L,e(J))}),Z(()=>(e(H),e(L),e(K)),()=>{s(Re,Ut(e(H),e(L),e(K)))}),_t(),bt();var Le=qt();b("keydown",vt,at),mt(t=>{xt.title="Endpoints - GARM"});var ke=Ce(Le),Me=o(ke);Ct(Me,{title:"Endpoints",description:"Manage your GitHub and Gitea endpoints for runner management.",actionLabel:"Add Endpoint",$$events:{action:tt}});var dt=a(Me,2);{let t=Ee(()=>e(ve)||e(ue)),n=Ee(()=>!!e(ve));Pt(dt,{get columns(){return Je},get data(){return e(Re)},get loading(){return e(le)},get error(){return e(t)},get searchTerm(){return e(ce)},searchPlaceholder:"Search endpoints by name, description, or URL...",get currentPage(){return e(L)},get perPage(){return e(K)},get totalPages(){return e(J)},get totalItems(){return e(H),C(()=>e(H).length)},itemName:"endpoints",emptyIconType:"settings",get showRetry(){return e(n)},get mobileCardConfig(){return Qe},$$events:{search:We,pageChange:Xe,perPageChange:Ye,retry:Ke,edit:Ze,delete:et},$$slots:{"mobile-card":(c,v)=>{const d=Ee(()=>v.item);var f=Mt(),x=o(f),R=o(x),P=o(R),w=o(P,!0);i(P);var _=a(P,2),M=o(_,!0);i(_);var B=a(_,2),T=o(B);wt(T,()=>(ie($e),ie(e(d)),C(()=>$e(e(d).endpoint_type||"","w-5 h-5"))));var D=a(T,2),Q=o(D,!0);i(D),i(B),i(R),i(x);var N=a(x,2),z=o(N);qe(z,{action:"edit",size:"sm",title:"Edit endpoint",ariaLabel:"Edit endpoint",$$events:{click:()=>xe(e(d))}});var W=a(z,2);qe(W,{action:"delete",size:"sm",title:"Delete endpoint",ariaLabel:"Delete endpoint",$$events:{click:()=>he(e(d))}}),i(N),i(f),ye(()=>{se(w,(ie(e(d)),C(()=>e(d).name))),se(M,(ie(e(d)),C(()=>e(d).description))),se(Q,(ie(e(d)),C(()=>e(d).endpoint_type)))}),I(c,f)}}})}i(ke);var Te=a(ke,2);{var lt=t=>{var n=Ft(),c=o(n),v=a(c,2),d=o(v),f=a(o(d),2);i(d);var x=a(d,2),R=o(x);Et(R,{get selectedForgeType(){return e(G)},set selectedForgeType(u){s(G,u)},$$events:{select:rt},$$legacy:!0});var P=a(R,2),w=a(o(P),2);U(w),i(P);var _=a(P,2),M=a(o(_),2);Ie(M),i(_);var B=a(_,2),T=a(o(B),2);U(T),i(B);var D=a(B,2);{var Q=u=>{var $=Tt(),l=Ce($),h=a(o(l),2);U(h),i(l);var F=a(l,2),S=a(o(F),2);U(S),i(F),E(h,()=>e(r).api_base_url,Y=>g(r,e(r).api_base_url=Y)),E(S,()=>e(r).upload_base_url,Y=>g(r,e(r).upload_base_url=Y)),I(u,$)},N=u=>{var $=Dt(),l=a(o($),2);U(l),V(2),i($),E(l,()=>e(r).api_base_url,h=>g(r,e(r).api_base_url=h)),I(u,$)};ne(D,u=>{e(G)==="github"?u(Q):u(N,!1)})}var z=a(D,2),W=a(o(z),2),X=o(W),te=a(X,2),re=a(o(te),2),_e=o(re);V(),i(re),V(2),i(te),i(W),i(z);var ae=a(z,2),me=o(ae),oe=a(me,2);i(ae),i(x),i(v),i(n),ye((u,$)=>{je(w,"placeholder",e(G)==="github"?"e.g., github-enterprise or github-com":"e.g., gitea-main or my-gitea"),je(T,"placeholder",e(G)==="github"?"https://github.com or https://github.example.com":"https://gitea.example.com"),oe.disabled=u,Ge(oe,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${$??""}`)},[()=>C(()=>!fe()),()=>C(()=>fe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",f,y),E(w,()=>e(r).name,u=>g(r,e(r).name=u)),E(M,()=>e(r).description,u=>g(r,e(r).description=u)),E(T,()=>e(r).base_url,u=>g(r,e(r).base_url=u)),b("change",X,Ue),b("click",_e,()=>document.getElementById("ca_cert_file")?.click()),b("click",me,y),b("submit",x,ze(it)),I(t,n)};ne(Te,t=>{e(pe)&&t(lt)})}var De=a(Te,2);{var ut=t=>{var n=Gt(),c=o(n),v=a(c,2),d=o(v),f=o(d),x=o(f),R=o(x);i(x),V(2),i(f);var P=a(f,2);i(d);var w=a(d,2),_=o(w),M=a(o(_),2);U(M),i(_);var B=a(_,2),T=a(o(B),2);Ie(T),i(B);var D=a(B,2),Q=a(o(D),2);U(Q),i(D);var N=a(D,2);{var z=l=>{var h=It(),F=Ce(h),S=a(o(F),2);U(S),i(F);var Y=a(F,2),Fe=a(o(Y),2);U(Fe),i(Y),E(S,()=>e(r).api_base_url,we=>g(r,e(r).api_base_url=we)),E(Fe,()=>e(r).upload_base_url,we=>g(r,e(r).upload_base_url=we)),I(l,h)},W=l=>{var h=jt(),F=a(o(h),2);U(F),V(2),i(h),E(F,()=>e(r).api_base_url,S=>g(r,e(r).api_base_url=S)),I(l,h)};ne(N,l=>{e(m),C(()=>e(m).endpoint_type==="github")?l(z):l(W,!1)})}var X=a(N,2),te=a(o(X),2),re=o(te),_e=a(re,2),ae=a(o(_e),2),me=o(ae);V(),i(ae),V(2),i(_e),i(te),i(X);var oe=a(X,2),u=o(oe),$=a(u,2);i(oe),i(w),i(v),i(n),ye((l,h)=>{se(R,`Edit ${e(m),C(()=>e(m).endpoint_type==="github"?"GitHub":"Gitea")??""} Endpoint`),$.disabled=l,Ge($,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors + ${h??""}`)},[()=>C(()=>!fe()),()=>C(()=>fe()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",P,y),E(M,()=>e(r).name,l=>g(r,e(r).name=l)),E(T,()=>e(r).description,l=>g(r,e(r).description=l)),E(Q,()=>e(r).base_url,l=>g(r,e(r).base_url=l)),b("change",re,Ue),b("click",me,()=>document.getElementById("edit_ca_cert_file")?.click()),b("click",u,y),b("submit",w,ze(st)),I(t,n)};ne(De,t=>{e(be)&&e(m)&&t(ut)})}var ct=a(De,2);{var pt=t=>{var n=zt(),c=o(n),v=a(c,2),d=o(v),f=o(d),x=a(o(f),2),R=a(o(x),2),P=o(R);i(R),i(x),i(f),i(d);var w=a(d,2),_=o(w),M=a(_,2);i(w),i(v),i(n),ye(()=>se(P,`Are you sure you want to delete the endpoint "${e(A),C(()=>e(A).name)??""}"? This action cannot be undone.`)),b("click",c,y),b("click",_,y),b("click",M,nt),I(t,n)};ne(ct,t=>{e(ge)&&e(A)&&t(pt)})}I(Ne,Le),yt(),Oe()}export{or as component}; diff --git a/webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js b/webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js deleted file mode 100644 index 36e9bd97..00000000 --- a/webapp/assets/_app/immutable/nodes/4.XnVoh6ca.js +++ /dev/null @@ -1,3 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as pt}from"../chunks/B3Pzt0F_.js";import{p as bt,g as e,o as gt,l as Z,a as ft,f as I,e as b,h as _t,b as Ee,c as j,d as mt,m as p,i as yt,$ as vt,j as o,q as Ce,k as a,t as me,u as E,s,r as i,n as se,v as ne,w as De,z as V,y as g}from"../chunks/D8EpLgQ1.js";import{i as de,s as xt,a as ht}from"../chunks/5WA7h8uK.js";import{h as kt,r as U,c as je,s as Ie,g as ee}from"../chunks/CiE1LlKV.js";import{b as C}from"../chunks/C6k1Q4We.js";import{p as Ge}from"../chunks/D4Caz1gY.js";import{P as wt}from"../chunks/CO4LUyTP.js";import{F as Et}from"../chunks/CNMHKIIK.js";import{D as Ct,A as ze,G as Pe,a as Pt}from"../chunks/C9DJVOi1.js";import{e as $t,a as qe}from"../chunks/wyaP0EDu.js";import{t as ye}from"../chunks/BEkVdVE1.js";import{g as $e,c as Rt,a as At,p as Bt}from"../chunks/BGVHQGl-.js";import"../chunks/CoIRRsD9.js";import{E as Ut}from"../chunks/CGpPw4EW.js";var Lt=I('

                '),Mt=I('
                ',1),Tt=I('

                If empty, Base URL will be used as API Base URL

                '),Ft=I(''),Dt=I('
                ',1),jt=I('

                If empty, Base URL will be used as API Base URL

                '),It=I(''),Gt=I(''),zt=I('
                ',1);function rr(He,Ne){bt(Ne,!1);const[Se,Ve]=xt(),q=()=>ht($t,"$eagerCache",Se),ve=p(),H=p(),Re=p();let le=p(!0),O=p([]),te=p(""),ue=p(""),L=p(1),K=p(25),J=p(1),ce=p(!1),pe=p(!1),be=p(!1),G=p("github"),m=p(null),R=p(null),r=p({name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)};gt(async()=>{try{s(le,!0);const t=await qe.getEndpoints();t&&Array.isArray(t)&&s(O,t)}catch(t){console.error("Failed to load endpoints:",t),s(te,t instanceof Error?t.message:"Failed to load endpoints")}finally{s(le,!1)}});async function Oe(){try{await qe.retryResource("endpoints")}catch(t){console.error("Retry failed:",t)}}const Ke=[{key:"name",title:"Name",cellComponent:Pe,cellProps:{field:"name"}},{key:"description",title:"Description",cellComponent:Pe,cellProps:{field:"description"}},{key:"api_url",title:"API URL",cellComponent:Pe,cellProps:{field:"api_base_url",fallbackField:"base_url"}},{key:"forge_type",title:"Forge Type",cellComponent:Ut},{key:"actions",title:"Actions",align:"right",cellComponent:Pt}],Je={entityType:"endpoint",primaryText:{field:"name",isClickable:!1},secondaryText:{field:"description"},customInfo:[{icon:t=>$e(t?.endpoint_type||"unknown"),text:t=>t?.api_base_url||"Unknown"}],actions:[{type:"edit",handler:t=>xe(t)},{type:"delete",handler:t=>he(t)}]};function Qe(t){s(ue,t.detail.term),s(L,1)}function We(t){s(L,t.detail.page)}function Xe(t){const d=Rt(t.detail.perPage);s(K,d.newPerPage),s(L,d.newCurrentPage)}function Ye(t){xe(t.detail.item)}function Ze(t){he(t.detail.item)}function et(){s(G,"github"),Ae(),s(ce,!0)}function tt(t){s(G,t.detail),g(r,e(r).endpoint_type=t.detail)}function xe(t){s(m,t),s(r,{name:t.name||"",description:t.description||"",endpoint_type:t.endpoint_type||"",base_url:t.base_url||"",api_base_url:t.api_base_url||"",upload_base_url:t.upload_base_url||"",ca_cert_bundle:typeof t.ca_cert_bundle=="string"?t.ca_cert_bundle:""}),k={...e(r)},s(pe,!0)}function he(t){s(R,t),s(be,!0)}function Ae(){s(r,{name:"",description:"",endpoint_type:"",base_url:"",api_base_url:"",upload_base_url:"",ca_cert_bundle:""}),k={...e(r)}}function rt(t){t.key==="Escape"&&(e(ce)||e(pe)||e(be))&&y()}function y(){s(ce,!1),s(pe,!1),s(be,!1),s(G,"github"),s(m,null),s(R,null),Ae()}function at(){const t={};if(e(r).description!==k.description&&(e(r).description.trim()!==""||k.description!=="")&&(t.description=e(r).description.trim()),e(r).base_url!==k.base_url&&e(r).base_url.trim()!==""&&(t.base_url=e(r).base_url.trim()),e(r).api_base_url!==k.api_base_url&&(e(r).api_base_url.trim()!==""||k.api_base_url!=="")&&(t.api_base_url=e(r).api_base_url.trim()),e(m)?.endpoint_type==="github"&&e(r).upload_base_url!==k.upload_base_url&&(e(r).upload_base_url.trim()!==""||k.upload_base_url!=="")&&(t.upload_base_url=e(r).upload_base_url.trim()),e(r).ca_cert_bundle!==k.ca_cert_bundle)if(e(r).ca_cert_bundle!=="")try{const d=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(d,c=>c.charCodeAt(0))}catch{k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[])}else k.ca_cert_bundle!==""&&(t.ca_cert_bundle=[]);return t}async function ot(){try{const t={name:e(r).name,description:e(r).description,endpoint_type:e(r).endpoint_type,base_url:e(r).base_url,api_base_url:e(r).api_base_url,upload_base_url:e(r).upload_base_url};if(e(r).ca_cert_bundle&&e(r).ca_cert_bundle.trim()!=="")try{const d=atob(e(r).ca_cert_bundle);t.ca_cert_bundle=Array.from(d,c=>c.charCodeAt(0))}catch{}e(r).endpoint_type==="github"?await ee.createGithubEndpoint(t):await ee.createGiteaEndpoint(t),ye.success("Endpoint Created",`Endpoint ${e(r).name} has been created successfully.`),y()}catch(t){s(te,t instanceof Error?t.message:"Failed to create endpoint")}}async function it(){if(e(m))try{const t=at();if(Object.keys(t).length===0){ye.info("No Changes","No fields were modified."),y();return}e(m).endpoint_type==="github"?await ee.updateGithubEndpoint(e(m).name,t):await ee.updateGiteaEndpoint(e(m).name,t),ye.success("Endpoint Updated",`Endpoint ${e(m).name} has been updated successfully.`),y()}catch(t){s(te,t instanceof Error?t.message:"Failed to update endpoint")}}async function st(){if(e(R))try{e(R).endpoint_type==="github"?await ee.deleteGithubEndpoint(e(R).name):await ee.deleteGiteaEndpoint(e(R).name),ye.success("Endpoint Deleted",`Endpoint ${e(R).name} has been deleted successfully.`),y()}catch(t){s(te,t instanceof Error?t.message:"Failed to delete endpoint")}}function Be(t){const c=t.target.files?.[0];if(!c){g(r,e(r).ca_cert_bundle="");return}const v=new FileReader;v.onload=n=>{const f=n.target?.result;g(r,e(r).ca_cert_bundle=btoa(f))},v.readAsText(c)}function ge(){return!(!e(r).name||!e(r).description||!e(r).base_url||e(r).endpoint_type==="github"&&!e(r).api_base_url)}Z(()=>(e(O),q()),()=>{(!e(O).length||q().loaded.endpoints)&&s(O,q().endpoints)}),Z(()=>q(),()=>{s(le,q().loading.endpoints)}),Z(()=>q(),()=>{s(ve,q().errorMessages.endpoints)}),Z(()=>(e(O),e(ue)),()=>{s(H,At(e(O),e(ue)))}),Z(()=>(e(J),e(H),e(K),e(L)),()=>{s(J,Math.ceil(e(H).length/e(K))),e(L)>e(J)&&e(J)>0&&s(L,e(J))}),Z(()=>(e(H),e(L),e(K)),()=>{s(Re,Bt(e(H),e(L),e(K)))}),ft(),pt();var Ue=zt();b("keydown",yt,rt),_t(t=>{vt.title="Endpoints - GARM"});var ke=Ee(Ue),Le=o(ke);wt(Le,{title:"Endpoints",description:"Manage your GitHub and Gitea endpoints for runner management.",actionLabel:"Add Endpoint",$$events:{action:et}});var nt=a(Le,2);{let t=Ce(()=>e(ve)||e(te)),d=Ce(()=>!!e(ve));Ct(nt,{get columns(){return Ke},get data(){return e(Re)},get loading(){return e(le)},get error(){return e(t)},get searchTerm(){return e(ue)},searchPlaceholder:"Search endpoints by name, description, or URL...",get currentPage(){return e(L)},get perPage(){return e(K)},get totalPages(){return e(J)},get totalItems(){return e(H),E(()=>e(H).length)},itemName:"endpoints",emptyIconType:"settings",get showRetry(){return e(d)},get mobileCardConfig(){return Je},$$events:{search:Qe,pageChange:We,perPageChange:Xe,retry:Oe,edit:Ye,delete:Ze},$$slots:{"mobile-card":(c,v)=>{const n=Ce(()=>v.item);var f=Lt(),x=o(f),A=o(x),P=o(A),w=o(P,!0);i(P);var _=a(P,2),M=o(_,!0);i(_);var B=a(_,2),T=o(B);kt(T,()=>(se($e),se(e(n)),E(()=>$e(e(n).endpoint_type||"","w-5 h-5"))));var F=a(T,2),Q=o(F,!0);i(F),i(B),i(A),i(x);var N=a(x,2),z=o(N);ze(z,{action:"edit",size:"sm",title:"Edit endpoint",ariaLabel:"Edit endpoint",$$events:{click:()=>xe(e(n))}});var W=a(z,2);ze(W,{action:"delete",size:"sm",title:"Delete endpoint",ariaLabel:"Delete endpoint",$$events:{click:()=>he(e(n))}}),i(N),i(f),me(()=>{ne(w,(se(e(n)),E(()=>e(n).name))),ne(M,(se(e(n)),E(()=>e(n).description))),ne(Q,(se(e(n)),E(()=>e(n).endpoint_type)))}),j(c,f)}}})}i(ke);var Me=a(ke,2);{var dt=t=>{var d=Ft(),c=o(d),v=a(c,2),n=o(v),f=a(o(n),2);i(n);var x=a(n,2),A=o(x);Et(A,{get selectedForgeType(){return e(G)},set selectedForgeType(u){s(G,u)},$$events:{select:tt},$$legacy:!0});var P=a(A,2),w=a(o(P),2);U(w),i(P);var _=a(P,2),M=a(o(_),2);De(M),i(_);var B=a(_,2),T=a(o(B),2);U(T),i(B);var F=a(B,2);{var Q=u=>{var $=Mt(),l=Ee($),h=a(o(l),2);U(h),i(l);var D=a(l,2),S=a(o(D),2);U(S),i(D),C(h,()=>e(r).api_base_url,Y=>g(r,e(r).api_base_url=Y)),C(S,()=>e(r).upload_base_url,Y=>g(r,e(r).upload_base_url=Y)),j(u,$)},N=u=>{var $=Tt(),l=a(o($),2);U(l),V(2),i($),C(l,()=>e(r).api_base_url,h=>g(r,e(r).api_base_url=h)),j(u,$)};de(F,u=>{e(G)==="github"?u(Q):u(N,!1)})}var z=a(F,2),W=a(o(z),2),X=o(W),re=a(X,2),ae=a(o(re),2),fe=o(ae);V(),i(ae),V(2),i(re),i(W),i(z);var oe=a(z,2),_e=o(oe),ie=a(_e,2);i(oe),i(x),i(v),i(d),me((u,$)=>{je(w,"placeholder",e(G)==="github"?"e.g., github-enterprise or github-com":"e.g., gitea-main or my-gitea"),je(T,"placeholder",e(G)==="github"?"https://github.com or https://github.example.com":"https://gitea.example.com"),ie.disabled=u,Ie(ie,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${$??""}`)},[()=>E(()=>!ge()),()=>E(()=>ge()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",f,y),C(w,()=>e(r).name,u=>g(r,e(r).name=u)),C(M,()=>e(r).description,u=>g(r,e(r).description=u)),C(T,()=>e(r).base_url,u=>g(r,e(r).base_url=u)),b("change",X,Be),b("click",fe,()=>document.getElementById("ca_cert_file")?.click()),b("click",_e,y),b("submit",x,Ge(ot)),j(t,d)};de(Me,t=>{e(ce)&&t(dt)})}var Te=a(Me,2);{var lt=t=>{var d=It(),c=o(d),v=a(c,2),n=o(v),f=o(n),x=o(f),A=o(x);i(x),V(2),i(f);var P=a(f,2);i(n);var w=a(n,2),_=o(w),M=a(o(_),2);U(M),i(_);var B=a(_,2),T=a(o(B),2);De(T),i(B);var F=a(B,2),Q=a(o(F),2);U(Q),i(F);var N=a(F,2);{var z=l=>{var h=Dt(),D=Ee(h),S=a(o(D),2);U(S),i(D);var Y=a(D,2),Fe=a(o(Y),2);U(Fe),i(Y),C(S,()=>e(r).api_base_url,we=>g(r,e(r).api_base_url=we)),C(Fe,()=>e(r).upload_base_url,we=>g(r,e(r).upload_base_url=we)),j(l,h)},W=l=>{var h=jt(),D=a(o(h),2);U(D),V(2),i(h),C(D,()=>e(r).api_base_url,S=>g(r,e(r).api_base_url=S)),j(l,h)};de(N,l=>{e(m),E(()=>e(m).endpoint_type==="github")?l(z):l(W,!1)})}var X=a(N,2),re=a(o(X),2),ae=o(re),fe=a(ae,2),oe=a(o(fe),2),_e=o(oe);V(),i(oe),V(2),i(fe),i(re),i(X);var ie=a(X,2),u=o(ie),$=a(u,2);i(ie),i(w),i(v),i(d),me((l,h)=>{ne(A,`Edit ${e(m),E(()=>e(m).endpoint_type==="github"?"GitHub":"Gitea")??""} Endpoint`),$.disabled=l,Ie($,1,`px-4 py-2 text-sm font-medium text-white rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 transition-colors - ${h??""}`)},[()=>E(()=>!ge()),()=>E(()=>ge()?"bg-blue-600 hover:bg-blue-700 focus:ring-blue-500 cursor-pointer":"bg-gray-400 cursor-not-allowed")]),b("click",c,y),b("click",P,y),C(M,()=>e(r).name,l=>g(r,e(r).name=l)),C(T,()=>e(r).description,l=>g(r,e(r).description=l)),C(Q,()=>e(r).base_url,l=>g(r,e(r).base_url=l)),b("change",ae,Be),b("click",_e,()=>document.getElementById("edit_ca_cert_file")?.click()),b("click",u,y),b("submit",w,Ge(it)),j(t,d)};de(Te,t=>{e(pe)&&e(m)&&t(lt)})}var ut=a(Te,2);{var ct=t=>{var d=Gt(),c=o(d),v=a(c,2),n=o(v),f=o(n),x=a(o(f),2),A=a(o(x),2),P=o(A);i(A),i(x),i(f),i(n);var w=a(n,2),_=o(w),M=a(_,2);i(w),i(v),i(d),me(()=>ne(P,`Are you sure you want to delete the endpoint "${e(R),E(()=>e(R).name)??""}"? This action cannot be undone.`)),b("click",c,y),b("click",_,y),b("click",M,st),j(t,d)};de(ut,t=>{e(be)&&e(R)&&t(ct)})}j(He,Ue),mt(),Ve()}export{rr as component}; diff --git a/webapp/assets/_app/immutable/nodes/5.CeMzA7DH.js b/webapp/assets/_app/immutable/nodes/5.CeMzA7DH.js new file mode 100644 index 00000000..06f4aeca --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/5.CeMzA7DH.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Ge}from"../chunks/B3Pzt0F_.js";import{p as Se,E as Ie,o as Be,l as w,s as r,m as n,g as e,a as He,f as M,k as o,j as i,r as l,c as $,t as N,v as K,x as $e,u as v,z as Je,y as Z,e as Ce,d as Re,B as Ve,b as Ue,h as We,$ as Ye,n as A,q as ee}from"../chunks/D8EpLgQ1.js";import{a as Le,i as B,s as Ne}from"../chunks/5WA7h8uK.js";import{r as Pe,b as Me,c as Ke,g as ve}from"../chunks/CiE1LlKV.js";import{b as De}from"../chunks/CRhkqW2i.js";import{P as Oe}from"../chunks/CO4LUyTP.js";import{e as Qe,i as Xe}from"../chunks/u94nIB4-.js";import{b as Te}from"../chunks/C6k1Q4We.js";import{p as Ze}from"../chunks/D4Caz1gY.js";import{M as et}from"../chunks/qB7B8uiS.js";import{e as re}from"../chunks/BZiHL9L3.js";import{e as qe,a as he}from"../chunks/wyaP0EDu.js";import{U as tt}from"../chunks/CIBm3n2u.js";import{D as rt}from"../chunks/KQ2xQpA3.js";import{t as te}from"../chunks/BEkVdVE1.js";import{B as at,k as ye,l as st}from"../chunks/BGVHQGl-.js";import{D as ot,A as Ae,G as nt,a as it}from"../chunks/BrNfsPe8.js";import{E as lt}from"../chunks/D4PaGKsV.js";import{E as dt}from"../chunks/CGpPw4EW.js";import{S as ct}from"../chunks/MCv1Wq2q.js";var ut=M('

                '),pt=M('

                Loading...

                '),mt=M(""),ft=M('

                Loading credentials...

                '),gt=M('

                No GitHub credentials found. Please create GitHub credentials first.

                '),bt=M(`

                You'll need to manually configure this secret in GitHub's enterprise webhook settings.

                `),vt=M('

                Create Enterprise

                Enterprises are only available for GitHub endpoints.

                ');function yt(ae,se){Se(se,!1);const[oe,ne]=Ne(),u=()=>Le(qe,"$eagerCache",oe),G=n(),h=n(),p=n(),H=n(),E=Ie();let x=n(!1),y=n(""),a=n({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"});async function _(){if(!u().loaded.credentials&&!u().loading.credentials)try{await he.getCredentials()}catch(m){r(y,re(m))}}async function C(){if(!e(a).name||!e(a).name.trim()){r(y,"Enterprise name is required");return}if(!e(a).credentials_name){r(y,"Please select credentials");return}try{r(x,!0),r(y,"");const m={...e(a)};E("submit",m)}catch(m){r(y,re(m)),r(x,!1)}}Be(()=>{_()}),w(()=>u(),()=>{r(G,u().credentials)}),w(()=>u(),()=>{r(h,u().loading.credentials)}),w(()=>e(G),()=>{r(p,e(G).filter(m=>m.forge_type==="github"))}),w(()=>e(a),()=>{r(H,e(a).name&&e(a).name.trim()!==""&&e(a).credentials_name!==""&&e(a).webhook_secret&&e(a).webhook_secret.trim()!=="")}),He(),Ge(),et(ae,{$$events:{close:()=>E("close")},children:(m,q)=>{var D=vt(),d=o(i(D),4);{var ie=f=>{var g=ut(),T=i(g),R=i(T,!0);l(T),l(g),N(()=>K(R,e(y))),$(f,g)};B(d,f=>{e(y)&&f(ie)})}var le=o(d,2);{var de=f=>{var g=pt();$(f,g)},ce=f=>{var g=bt(),T=i(g),R=o(i(T),2);Pe(R),l(T);var z=o(T,2),U=o(i(z),2);N(()=>{e(a),$e(()=>{e(p)})});var j=i(U);j.value=j.__value="";var ue=o(j);Qe(ue,1,()=>e(p),Xe,(s,c)=>{var P=mt(),b=i(P);l(P);var k={};N(()=>{K(b,`${e(c),v(()=>e(c).name)??""} (${e(c),v(()=>e(c).endpoint?.name||"Unknown endpoint")??""})`),k!==(k=(e(c),v(()=>e(c).name)))&&(P.value=(P.__value=(e(c),v(()=>e(c).name)))??"")}),$(s,P)}),l(U);var pe=o(U,2);{var me=s=>{var c=ft();$(s,c)},O=s=>{var c=Ve(),P=Ue(c);{var b=k=>{var Y=gt();$(k,Y)};B(P,k=>{e(p),v(()=>e(p).length===0)&&k(b)},!0)}$(s,c)};B(pe,s=>{e(h)?s(me):s(O,!1)})}l(z);var S=o(z,2),L=o(i(S),2);N(()=>{e(a),$e(()=>{})});var F=i(L);F.value=F.__value="roundrobin";var I=o(F);I.value=I.__value="pack",l(L),l(S);var J=o(S,2),V=o(i(J),2);Pe(V),Je(2),l(J);var Q=o(J,2),X=i(Q),W=o(X,2),t=i(W,!0);l(W),l(Q),l(g),N(()=>{W.disabled=(e(x),e(h),e(H),e(p),v(()=>e(x)||e(h)||!e(H)||e(p).length===0)),K(t,e(x)?"Creating...":"Create Enterprise")}),Te(R,()=>e(a).name,s=>Z(a,e(a).name=s)),Me(U,()=>e(a).credentials_name,s=>Z(a,e(a).credentials_name=s)),Me(L,()=>e(a).pool_balancer_type,s=>Z(a,e(a).pool_balancer_type=s)),Te(V,()=>e(a).webhook_secret,s=>Z(a,e(a).webhook_secret=s)),Ce("click",X,()=>E("close")),Ce("submit",g,Ze(C)),$(f,g)};B(le,f=>{e(x)?f(de):f(ce,!1)})}l(D),$(m,D)},$$slots:{default:!0}}),Re(),ne()}var ht=M(''),_t=M('
                ',1);function jt(ae,se){Se(se,!1);const[oe,ne]=Ne(),u=()=>Le(qe,"$eagerCache",oe),G=n(),h=n(),p=n(),H=n();let E=n([]),x=n(!0),y=n(""),a=n(""),_=n(1),C=n(25),m=n(!1),q=n(!1),D=n(!1),d=n(null);async function ie(t){try{r(y,""),await ve.createEnterprise(t),te.success("Enterprise Created",`Enterprise ${t.name} has been created successfully.`),r(m,!1)}catch(s){throw r(y,re(s)),s}}async function le(t){if(e(d))try{await ve.updateEnterprise(e(d).id,t),te.success("Enterprise Updated",`Enterprise ${e(d).name} has been updated successfully.`),r(q,!1),r(d,null)}catch(s){throw s}}async function de(){if(e(d))try{r(y,""),await ve.deleteEnterprise(e(d).id),te.success("Enterprise Deleted",`Enterprise ${e(d).name} has been deleted successfully.`),r(D,!1),r(d,null)}catch(t){const s=re(t);te.error("Delete Failed",s)}}function ce(){r(m,!0)}function f(t){r(d,t),r(q,!0)}function g(t){r(d,t),r(D,!0)}Be(async()=>{try{r(x,!0);const t=await he.getEnterprises();t&&Array.isArray(t)&&r(E,t)}catch(t){console.error("Failed to load enterprises:",t),r(y,t instanceof Error?t.message:"Failed to load enterprises")}finally{r(x,!1)}});async function T(){try{await he.retryResource("enterprises")}catch(t){console.error("Retry failed:",t)}}const R=[{key:"name",title:"Name",cellComponent:lt,cellProps:{entityType:"enterprise"}},{key:"endpoint",title:"Endpoint",cellComponent:dt},{key:"credentials",title:"Credentials",cellComponent:nt,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:ct,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:it}],z={entityType:"enterprise",primaryText:{field:"name",isClickable:!0,href:"/enterprises/{id}"},secondaryText:{field:"credentials_name"},badges:[{type:"custom",value:t=>ye(t)}],actions:[{type:"edit",handler:t=>f(t)},{type:"delete",handler:t=>g(t)}]};function U(t){r(a,t.detail.term),r(_,1)}function j(t){r(_,t.detail.page)}function ue(t){r(C,t.detail.perPage),r(_,1)}function pe(t){f(t.detail.item)}function me(t){g(t.detail.item)}w(()=>(e(E),u()),()=>{(!e(E).length||u().loaded.enterprises)&&r(E,u().enterprises)}),w(()=>u(),()=>{r(x,u().loading.enterprises)}),w(()=>u(),()=>{r(G,u().errorMessages.enterprises)}),w(()=>(e(E),e(a)),()=>{r(h,st(e(E),e(a)))}),w(()=>(e(h),e(C)),()=>{r(p,Math.ceil(e(h).length/e(C)))}),w(()=>(e(_),e(p)),()=>{e(_)>e(p)&&e(p)>0&&r(_,e(p))}),w(()=>(e(h),e(_),e(C)),()=>{r(H,e(h).slice((e(_)-1)*e(C),e(_)*e(C)))}),He(),Ge();var O=_t();We(t=>{Ye.title="Enterprises - GARM"});var S=Ue(O),L=i(S);Oe(L,{title:"Enterprises",description:"Manage GitHub enterprises",actionLabel:"Add Enterprise",$$events:{action:ce}});var F=o(L,2);{let t=ee(()=>e(G)||e(y)),s=ee(()=>!!e(G));ot(F,{get columns(){return R},get data(){return e(H)},get loading(){return e(x)},get error(){return e(t)},get searchTerm(){return e(a)},searchPlaceholder:"Search enterprises...",get currentPage(){return e(_)},get perPage(){return e(C)},get totalPages(){return e(p)},get totalItems(){return e(h),v(()=>e(h).length)},itemName:"enterprises",emptyIconType:"building",get showRetry(){return e(s)},get mobileCardConfig(){return z},$$events:{search:U,pageChange:j,perPageChange:ue,retry:T,edit:pe,delete:me},$$slots:{"mobile-card":(c,P)=>{const b=ee(()=>P.item),k=ee(()=>(A(ye),A(e(b)),v(()=>ye(e(b)))));var Y=ht(),fe=i(Y),ge=i(fe),be=i(ge),ze=i(be,!0);l(be);var _e=o(be,2),je=i(_e,!0);l(_e),l(ge),l(fe);var xe=o(fe,2),ke=i(xe);at(ke,{get variant(){return A(e(k)),v(()=>e(k).variant)},get text(){return A(e(k)),v(()=>e(k).text)}});var we=o(ke,2),Ee=i(we);Ae(Ee,{action:"edit",size:"sm",title:"Edit enterprise",ariaLabel:"Edit enterprise",$$events:{click:()=>f(e(b))}});var Fe=o(Ee,2);Ae(Fe,{action:"delete",size:"sm",title:"Delete enterprise",ariaLabel:"Delete enterprise",$$events:{click:()=>g(e(b))}}),l(we),l(xe),l(Y),N(()=>{Ke(ge,"href",(A(De),A(e(b)),v(()=>`${De}/enterprises/${e(b).id}`))),K(ze,(A(e(b)),v(()=>e(b).name))),K(je,(A(e(b)),v(()=>e(b).credentials_name)))}),$(c,Y)}}})}l(S);var I=o(S,2);{var J=t=>{yt(t,{$$events:{close:()=>r(m,!1),submit:s=>ie(s.detail)}})};B(I,t=>{e(m)&&t(J)})}var V=o(I,2);{var Q=t=>{tt(t,{get entity(){return e(d)},entityType:"enterprise",$$events:{close:()=>{r(q,!1),r(d,null)},submit:s=>le(s.detail)}})};B(V,t=>{e(q)&&e(d)&&t(Q)})}var X=o(V,2);{var W=t=>{rt(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone.",get itemName(){return e(d),v(()=>e(d).name)},$$events:{close:()=>{r(D,!1),r(d,null)},confirm:de}})};B(X,t=>{e(D)&&e(d)&&t(W)})}$(ae,O),Re(),ne()}export{jt as component}; diff --git a/webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js b/webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js deleted file mode 100644 index 5f4f52cc..00000000 --- a/webapp/assets/_app/immutable/nodes/5.rvsSG-AQ.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Ae}from"../chunks/B3Pzt0F_.js";import{p as Ge,E as je,o as Se,l as w,s as r,m as n,g as e,a as Be,f as M,k as o,j as i,r as l,c as $,t as L,v as K,x as Ee,u as y,z as Ie,y as Z,e as $e,d as He,B as Je,b as Re,h as Ve,$ as We,n as A,q as ee}from"../chunks/D8EpLgQ1.js";import{a as Ue,i as B,s as Fe}from"../chunks/5WA7h8uK.js";import{r as Ce,b as Pe,c as Ye,g as ge}from"../chunks/CiE1LlKV.js";import{b as Me}from"../chunks/CoIRRsD9.js";import{P as Ke}from"../chunks/CO4LUyTP.js";import{e as Oe,i as Qe}from"../chunks/u94nIB4-.js";import{b as De}from"../chunks/C6k1Q4We.js";import{p as Xe}from"../chunks/D4Caz1gY.js";import{M as Ze}from"../chunks/qB7B8uiS.js";import{e as Le,a as ye}from"../chunks/wyaP0EDu.js";import{U as et}from"../chunks/CclkODgu.js";import{D as tt}from"../chunks/KQ2xQpA3.js";import{t as be}from"../chunks/BEkVdVE1.js";import{B as rt,k as ve,l as at}from"../chunks/BGVHQGl-.js";import{D as st,A as Te,G as ot,a as nt}from"../chunks/C9DJVOi1.js";import{E as it}from"../chunks/B7ITzBt8.js";import{E as lt}from"../chunks/CGpPw4EW.js";import{S as dt}from"../chunks/BE4wujub.js";var ct=M('

                '),ut=M('

                Loading...

                '),pt=M(""),mt=M('

                Loading credentials...

                '),ft=M('

                No GitHub credentials found. Please create GitHub credentials first.

                '),gt=M(`

                You'll need to manually configure this secret in GitHub's enterprise webhook settings.

                `),bt=M('

                Create Enterprise

                Enterprises are only available for GitHub endpoints.

                ');function vt(te,re){Ge(re,!1);const[ae,se]=Fe(),p=()=>Ue(Le,"$eagerCache",ae),G=n(),h=n(),m=n(),H=n(),E=je();let x=n(!1),f=n(""),a=n({name:"",credentials_name:"",webhook_secret:"",pool_balancer_type:"roundrobin"});async function _(){if(!p().loaded.credentials&&!p().loading.credentials)try{await ye.getCredentials()}catch(c){r(f,c instanceof Error?c.message:"Failed to load credentials")}}async function C(){if(!e(a).name||!e(a).name.trim()){r(f,"Enterprise name is required");return}if(!e(a).credentials_name){r(f,"Please select credentials");return}try{r(x,!0),r(f,"");const c={...e(a)};E("submit",c)}catch(c){r(f,c instanceof Error?c.message:"Failed to create enterprise"),r(x,!1)}}Se(()=>{_()}),w(()=>p(),()=>{r(G,p().credentials)}),w(()=>p(),()=>{r(h,p().loading.credentials)}),w(()=>e(G),()=>{r(m,e(G).filter(c=>c.forge_type==="github"))}),w(()=>e(a),()=>{r(H,e(a).name&&e(a).name.trim()!==""&&e(a).credentials_name!==""&&e(a).webhook_secret&&e(a).webhook_secret.trim()!=="")}),Be(),Ae(),Ze(te,{$$events:{close:()=>E("close")},children:(c,N)=>{var D=bt(),d=o(i(D),4);{var oe=g=>{var b=ct(),T=i(b),R=i(T,!0);l(T),l(b),L(()=>K(R,e(f))),$(g,b)};B(d,g=>{e(f)&&g(oe)})}var ne=o(d,2);{var ie=g=>{var b=ut();$(g,b)},le=g=>{var b=gt(),T=i(b),R=o(i(T),2);Ce(R),l(T);var q=o(T,2),U=o(i(q),2);L(()=>{e(a),Ee(()=>{e(m)})});var z=i(U);z.value=z.__value="";var de=o(z);Oe(de,1,()=>e(m),Qe,(s,u)=>{var P=pt(),v=i(P);l(P);var k={};L(()=>{K(v,`${e(u),y(()=>e(u).name)??""} (${e(u),y(()=>e(u).endpoint?.name||"Unknown endpoint")??""})`),k!==(k=(e(u),y(()=>e(u).name)))&&(P.value=(P.__value=(e(u),y(()=>e(u).name)))??"")}),$(s,P)}),l(U);var ce=o(U,2);{var ue=s=>{var u=mt();$(s,u)},O=s=>{var u=Je(),P=Re(u);{var v=k=>{var Y=ft();$(k,Y)};B(P,k=>{e(m),y(()=>e(m).length===0)&&k(v)},!0)}$(s,u)};B(ce,s=>{e(h)?s(ue):s(O,!1)})}l(q);var S=o(q,2),F=o(i(S),2);L(()=>{e(a),Ee(()=>{})});var j=i(F);j.value=j.__value="roundrobin";var I=o(j);I.value=I.__value="pack",l(F),l(S);var J=o(S,2),V=o(i(J),2);Ce(V),Ie(2),l(J);var Q=o(J,2),X=i(Q),W=o(X,2),t=i(W,!0);l(W),l(Q),l(b),L(()=>{W.disabled=(e(x),e(h),e(H),e(m),y(()=>e(x)||e(h)||!e(H)||e(m).length===0)),K(t,e(x)?"Creating...":"Create Enterprise")}),De(R,()=>e(a).name,s=>Z(a,e(a).name=s)),Pe(U,()=>e(a).credentials_name,s=>Z(a,e(a).credentials_name=s)),Pe(F,()=>e(a).pool_balancer_type,s=>Z(a,e(a).pool_balancer_type=s)),De(V,()=>e(a).webhook_secret,s=>Z(a,e(a).webhook_secret=s)),$e("click",X,()=>E("close")),$e("submit",b,Xe(C)),$(g,b)};B(ne,g=>{e(x)?g(ie):g(le,!1)})}l(D),$(c,D)},$$slots:{default:!0}}),He(),se()}var yt=M(''),ht=M('
                ',1);function Nt(te,re){Ge(re,!1);const[ae,se]=Fe(),p=()=>Ue(Le,"$eagerCache",ae),G=n(),h=n(),m=n(),H=n();let E=n([]),x=n(!0),f=n(""),a=n(""),_=n(1),C=n(25),c=n(!1),N=n(!1),D=n(!1),d=n(null);async function oe(t){try{r(f,""),await ge.createEnterprise(t),be.success("Enterprise Created",`Enterprise ${t.name} has been created successfully.`),r(c,!1)}catch(s){throw r(f,s instanceof Error?s.message:"Failed to create enterprise"),s}}async function ne(t){if(e(d))try{await ge.updateEnterprise(e(d).id,t),be.success("Enterprise Updated",`Enterprise ${e(d).name} has been updated successfully.`),r(N,!1),r(d,null)}catch(s){throw s}}async function ie(){if(e(d))try{r(f,""),await ge.deleteEnterprise(e(d).id),be.success("Enterprise Deleted",`Enterprise ${e(d).name} has been deleted successfully.`),r(D,!1),r(d,null)}catch(t){r(f,t instanceof Error?t.message:"Failed to delete enterprise")}}function le(){r(c,!0)}function g(t){r(d,t),r(N,!0)}function b(t){r(d,t),r(D,!0)}Se(async()=>{try{r(x,!0);const t=await ye.getEnterprises();t&&Array.isArray(t)&&r(E,t)}catch(t){console.error("Failed to load enterprises:",t),r(f,t instanceof Error?t.message:"Failed to load enterprises")}finally{r(x,!1)}});async function T(){try{await ye.retryResource("enterprises")}catch(t){console.error("Retry failed:",t)}}const R=[{key:"name",title:"Name",cellComponent:it,cellProps:{entityType:"enterprise"}},{key:"endpoint",title:"Endpoint",cellComponent:lt},{key:"credentials",title:"Credentials",cellComponent:ot,cellProps:{field:"credentials_name"}},{key:"status",title:"Status",cellComponent:dt,cellProps:{statusType:"entity"}},{key:"actions",title:"Actions",align:"right",cellComponent:nt}],q={entityType:"enterprise",primaryText:{field:"name",isClickable:!0,href:"/enterprises/{id}"},secondaryText:{field:"credentials_name"},badges:[{type:"custom",value:t=>ve(t)}],actions:[{type:"edit",handler:t=>g(t)},{type:"delete",handler:t=>b(t)}]};function U(t){r(a,t.detail.term),r(_,1)}function z(t){r(_,t.detail.page)}function de(t){r(C,t.detail.perPage),r(_,1)}function ce(t){g(t.detail.item)}function ue(t){b(t.detail.item)}w(()=>(e(E),p()),()=>{(!e(E).length||p().loaded.enterprises)&&r(E,p().enterprises)}),w(()=>p(),()=>{r(x,p().loading.enterprises)}),w(()=>p(),()=>{r(G,p().errorMessages.enterprises)}),w(()=>(e(E),e(a)),()=>{r(h,at(e(E),e(a)))}),w(()=>(e(h),e(C)),()=>{r(m,Math.ceil(e(h).length/e(C)))}),w(()=>(e(_),e(m)),()=>{e(_)>e(m)&&e(m)>0&&r(_,e(m))}),w(()=>(e(h),e(_),e(C)),()=>{r(H,e(h).slice((e(_)-1)*e(C),e(_)*e(C)))}),Be(),Ae();var O=ht();Ve(t=>{We.title="Enterprises - GARM"});var S=Re(O),F=i(S);Ke(F,{title:"Enterprises",description:"Manage GitHub enterprises",actionLabel:"Add Enterprise",$$events:{action:le}});var j=o(F,2);{let t=ee(()=>e(G)||e(f)),s=ee(()=>!!e(G));st(j,{get columns(){return R},get data(){return e(H)},get loading(){return e(x)},get error(){return e(t)},get searchTerm(){return e(a)},searchPlaceholder:"Search enterprises...",get currentPage(){return e(_)},get perPage(){return e(C)},get totalPages(){return e(m)},get totalItems(){return e(h),y(()=>e(h).length)},itemName:"enterprises",emptyIconType:"building",get showRetry(){return e(s)},get mobileCardConfig(){return q},$$events:{search:U,pageChange:z,perPageChange:de,retry:T,edit:ce,delete:ue},$$slots:{"mobile-card":(u,P)=>{const v=ee(()=>P.item),k=ee(()=>(A(ve),A(e(v)),y(()=>ve(e(v)))));var Y=yt(),pe=i(Y),me=i(pe),fe=i(me),Ne=i(fe,!0);l(fe);var he=o(fe,2),qe=i(he,!0);l(he),l(me),l(pe);var _e=o(pe,2),xe=i(_e);rt(xe,{get variant(){return A(e(k)),y(()=>e(k).variant)},get text(){return A(e(k)),y(()=>e(k).text)}});var ke=o(xe,2),we=i(ke);Te(we,{action:"edit",size:"sm",title:"Edit enterprise",ariaLabel:"Edit enterprise",$$events:{click:()=>g(e(v))}});var ze=o(we,2);Te(ze,{action:"delete",size:"sm",title:"Delete enterprise",ariaLabel:"Delete enterprise",$$events:{click:()=>b(e(v))}}),l(ke),l(_e),l(Y),L(()=>{Ye(me,"href",(A(Me),A(e(v)),y(()=>`${Me}/enterprises/${e(v).id}`))),K(Ne,(A(e(v)),y(()=>e(v).name))),K(qe,(A(e(v)),y(()=>e(v).credentials_name)))}),$(u,Y)}}})}l(S);var I=o(S,2);{var J=t=>{vt(t,{$$events:{close:()=>r(c,!1),submit:s=>oe(s.detail)}})};B(I,t=>{e(c)&&t(J)})}var V=o(I,2);{var Q=t=>{et(t,{get entity(){return e(d)},entityType:"enterprise",$$events:{close:()=>{r(N,!1),r(d,null)},submit:s=>ne(s.detail)}})};B(V,t=>{e(N)&&e(d)&&t(Q)})}var X=o(V,2);{var W=t=>{tt(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone.",get itemName(){return e(d),y(()=>e(d).name)},$$events:{close:()=>{r(D,!1),r(d,null)},confirm:ie}})};B(X,t=>{e(D)&&e(d)&&t(W)})}$(te,O),He(),se()}export{Nt as component}; diff --git a/webapp/assets/_app/immutable/nodes/6.BPDnwpl3.js b/webapp/assets/_app/immutable/nodes/6.BPDnwpl3.js new file mode 100644 index 00000000..d1a00717 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/6.BPDnwpl3.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Ne}from"../chunks/B3Pzt0F_.js";import{p as qe,o as ze,A as Ge,l as je,a as Re,f as F,h as Ve,b as B,t as G,c as E,d as We,g as e,m as i,s,u as l,$ as Je,j as u,r as f,k as d,v as ie,y as Ke,B as le,q as v,n as Oe}from"../chunks/D8EpLgQ1.js";import{i as g,s as Qe,a as Xe}from"../chunks/5WA7h8uK.js";import{c as Ye,g as y}from"../chunks/CiE1LlKV.js";import{p as Ze}from"../chunks/BE8f1Riw.js";import{g as de}from"../chunks/DXCC0cSN.js";import{b as j}from"../chunks/CRhkqW2i.js";import{U as et}from"../chunks/CIBm3n2u.js";import{D as ce}from"../chunks/KQ2xQpA3.js";import{E as tt,P as at,a as rt}from"../chunks/OpktHEmj.js";import{D as st,I as nt}from"../chunks/BEoJgOul.js";import{g as pe}from"../chunks/BGVHQGl-.js";import{w as R}from"../chunks/u94nIB4-.js";import{t as D}from"../chunks/BEkVdVE1.js";import{C as ot}from"../chunks/CRD55Dyg.js";import{e as ue}from"../chunks/BZiHL9L3.js";var it=F('

                Loading enterprise...

                '),lt=F('

                '),dt=F(" ",1),ct=F(' ',1);function Tt(fe,me){qe(me,!1);const[ve,ge]=Qe(),V=()=>Xe(Ze,"$page",ve),$=i();let a=i(null),c=i([]),m=i([]),S=i(!0),P=i(""),T=i(!1),M=i(!1),x=i(!1),C=i(!1),p=i(null),k=null,h=i();async function W(){if(e($))try{s(S,!0),s(P,"");const[t,r,n]=await Promise.all([y.getEnterprise(e($)),y.listEnterprisePools(e($)).catch(()=>[]),y.listEnterpriseInstances(e($)).catch(()=>[])]);s(a,t),s(c,r),s(m,n)}catch(t){s(P,t instanceof Error?t.message:"Failed to load enterprise")}finally{s(S,!1)}}function ye(t,r){const{events:n}=t;return{...r,events:n}}async function he(t){if(e(a))try{await y.updateEnterprise(e(a).id,t),await W(),D.success("Enterprise Updated",`Enterprise ${e(a).name} has been updated successfully.`),s(T,!1)}catch(r){throw r}}async function be(){if(e(a)){try{await y.deleteEnterprise(e(a).id),de(`${j}/enterprises`)}catch(t){const r=ue(t);D.error("Delete Failed",r)}s(M,!1)}}async function _e(){if(e(p))try{await y.deleteInstance(e(p).name),D.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),s(x,!1),s(p,null)}catch(t){const r=ue(t);D.error("Delete Failed",r),s(x,!1),s(p,null)}}function Ee(t){s(p,t),s(x,!0)}function $e(){s(C,!0)}async function xe(t){try{if(!e(a))return;await y.createEnterprisePool(e(a).id,t.detail),D.success("Pool Created",`Pool has been created successfully for enterprise ${e(a).name}.`),s(C,!1)}catch(r){throw r}}function J(){e(h)&&Ke(h,e(h).scrollTop=e(h).scrollHeight)}function Ie(t){if(t.operation==="update"){const r=t.payload;if(e(a)&&r.id===e(a).id){const n=e(a).events?.length||0,o=r.events?.length||0;s(a,ye(e(a),r)),o>n&&setTimeout(()=>{J()},100)}}else if(t.operation==="delete"){const r=t.payload.id||t.payload;e(a)&&e(a).id===r&&de(`${j}/enterprises`)}}function we(t){if(!e(a))return;const r=t.payload;if(r.enterprise_id===e(a).id){if(t.operation==="create")s(c,[...e(c),r]);else if(t.operation==="update")s(c,e(c).map(n=>n.id===r.id?r:n));else if(t.operation==="delete"){const n=r.id||r;s(c,e(c).filter(o=>o.id!==n))}}}function De(t){if(!e(a)||!e(c))return;const r=t.payload;if(e(c).some(o=>o.id===r.pool_id)){if(t.operation==="create")s(m,[...e(m),r]);else if(t.operation==="update")s(m,e(m).map(o=>o.id===r.id?r:o));else if(t.operation==="delete"){const o=r.id||r;s(m,e(m).filter(N=>N.id!==o))}}}ze(()=>{W().then(()=>{e(a)?.events?.length&&setTimeout(()=>{J()},100)});const t=R.subscribeToEntity("enterprise",["update","delete"],Ie),r=R.subscribeToEntity("pool",["create","update","delete"],we),n=R.subscribeToEntity("instance",["create","update","delete"],De);k=()=>{t(),r(),n()}}),Ge(()=>{k&&(k(),k=null)}),je(()=>V(),()=>{s($,V().params.id)}),Re(),Ne();var K=ct();Ve(t=>{G(()=>Je.title=`${e(a),l(()=>e(a)?`${e(a).name} - Enterprise Details`:"Enterprise Details")??""} - GARM`)});var U=B(K),L=u(U),O=u(L),H=u(O),Pe=u(H);f(H);var Q=d(H,2),X=u(Q),Y=d(u(X),2),Te=u(Y,!0);f(Y),f(X),f(Q),f(O),f(L);var Me=d(L,2);{var Ce=t=>{var r=it();E(t,r)},ke=t=>{var r=le(),n=B(r);{var o=b=>{var _=lt(),A=u(_),q=u(A,!0);f(A),f(_),G(()=>ie(q,e(P))),E(b,_)},N=b=>{var _=le(),A=B(_);{var q=z=>{var ae=dt(),re=B(ae);{let I=v(()=>(e(a),l(()=>e(a).name||"Enterprise"))),w=v(()=>(e(a),l(()=>e(a).endpoint?.name))),He=v(()=>(Oe(pe),l(()=>pe("github"))));st(re,{get title(){return e(I)},get subtitle(){return`Endpoint: ${e(w)??""} • GitHub Enterprise`},get forgeIcon(){return e(He)},onEdit:()=>s(T,!0),onDelete:()=>s(M,!0)})}var se=d(re,2);tt(se,{get entity(){return e(a)},entityType:"enterprise"});var ne=d(se,2);{let I=v(()=>(e(a),l(()=>e(a).id||""))),w=v(()=>(e(a),l(()=>e(a).name||"")));at(ne,{get pools(){return e(c)},entityType:"enterprise",get entityId(){return e(I)},get entityName(){return e(w)},$$events:{addPool:$e}})}var oe=d(ne,2);nt(oe,{get instances(){return e(m)},entityType:"enterprise",onDeleteInstance:Ee});var Le=d(oe,2);{let I=v(()=>(e(a),l(()=>e(a)?.events)));rt(Le,{get events(){return e(I)},get eventsContainer(){return e(h)},set eventsContainer(w){s(h,w)},$$legacy:!0})}E(z,ae)};g(A,z=>{e(a)&&z(q)},!0)}E(b,_)};g(n,b=>{e(P)?b(o):b(N,!1)},!0)}E(t,r)};g(Me,t=>{e(S)?t(Ce):t(ke,!1)})}f(U);var Z=d(U,2);{var Ae=t=>{et(t,{get entity(){return e(a)},entityType:"enterprise",$$events:{close:()=>s(T,!1),submit:r=>he(r.detail)}})};g(Z,t=>{e(T)&&e(a)&&t(Ae)})}var ee=d(Z,2);{var Be=t=>{ce(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a),l(()=>e(a).name)},$$events:{close:()=>s(M,!1),confirm:be}})};g(ee,t=>{e(M)&&e(a)&&t(Be)})}var te=d(ee,2);{var Fe=t=>{ce(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),l(()=>e(p).name)},$$events:{close:()=>{s(x,!1),s(p,null)},confirm:_e}})};g(te,t=>{e(x)&&e(p)&&t(Fe)})}var Se=d(te,2);{var Ue=t=>{{let r=v(()=>(e(a),l(()=>e(a).id||"")));ot(t,{initialEntityType:"enterprise",get initialEntityId(){return e(r)},$$events:{close:()=>s(C,!1),submit:xe}})}};g(Se,t=>{e(C)&&e(a)&&t(Ue)})}G(()=>{Ye(Pe,"href",`${j}/enterprises`),ie(Te,(e(a),l(()=>e(a)?e(a).name:"Loading...")))}),E(fe,K),We(),ge()}export{Tt as component}; diff --git a/webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js b/webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js deleted file mode 100644 index 03f28a9f..00000000 --- a/webapp/assets/_app/immutable/nodes/6.CtGX0qgG.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as He}from"../chunks/B3Pzt0F_.js";import{p as Ne,o as qe,A as ze,l as Ge,a as je,f as B,h as Re,b as A,t as G,c as E,d as Ve,g as e,m as o,s,u as l,$ as We,j as u,r as f,k as d,v as oe,y as Je,B as le,q as v,n as Ke}from"../chunks/D8EpLgQ1.js";import{i as g,s as Oe,a as Qe}from"../chunks/5WA7h8uK.js";import{c as Xe,g as y}from"../chunks/CiE1LlKV.js";import{p as Ye}from"../chunks/C41YH50Q.js";import{g as de}from"../chunks/CTf6mQoE.js";import{b as j}from"../chunks/CoIRRsD9.js";import{U as Ze}from"../chunks/CclkODgu.js";import{D as ce}from"../chunks/KQ2xQpA3.js";import{E as et,P as tt,a as at}from"../chunks/BmGWMSQm.js";import{D as rt,I as st}from"../chunks/DDhBTdDt.js";import{g as pe}from"../chunks/BGVHQGl-.js";import{w as R}from"../chunks/u94nIB4-.js";import{t as F}from"../chunks/BEkVdVE1.js";import{C as nt}from"../chunks/CwqI2jFH.js";var it=B('

                Loading enterprise...

                '),ot=B('

                '),lt=B(" ",1),dt=B(' ',1);function Dt(ue,fe){Ne(fe,!1);const[me,ve]=Oe(),V=()=>Qe(Ye,"$page",me),$=o();let a=o(null),c=o([]),m=o([]),S=o(!0),x=o(""),T=o(!1),P=o(!1),w=o(!1),M=o(!1),p=o(null),C=null,h=o();async function W(){if(e($))try{s(S,!0),s(x,"");const[t,r,n]=await Promise.all([y.getEnterprise(e($)),y.listEnterprisePools(e($)).catch(()=>[]),y.listEnterpriseInstances(e($)).catch(()=>[])]);s(a,t),s(c,r),s(m,n)}catch(t){s(x,t instanceof Error?t.message:"Failed to load enterprise")}finally{s(S,!1)}}function ge(t,r){const{events:n}=t;return{...r,events:n}}async function ye(t){if(e(a))try{await y.updateEnterprise(e(a).id,t),await W(),F.success("Enterprise Updated",`Enterprise ${e(a).name} has been updated successfully.`),s(T,!1)}catch(r){throw r}}async function he(){if(e(a)){try{await y.deleteEnterprise(e(a).id),de(`${j}/enterprises`)}catch(t){s(x,t instanceof Error?t.message:"Failed to delete enterprise")}s(P,!1)}}async function be(){if(e(p))try{await y.deleteInstance(e(p).name),F.success("Instance Deleted",`Instance ${e(p).name} has been deleted successfully.`),s(w,!1),s(p,null)}catch(t){const r=t instanceof Error?t.message:"Failed to delete instance";F.error("Delete Failed",r),s(w,!1),s(p,null)}}function _e(t){s(p,t),s(w,!0)}function Ee(){s(M,!0)}async function $e(t){try{if(!e(a))return;await y.createEnterprisePool(e(a).id,t.detail),F.success("Pool Created",`Pool has been created successfully for enterprise ${e(a).name}.`),s(M,!1)}catch(r){throw r}}function J(){e(h)&&Je(h,e(h).scrollTop=e(h).scrollHeight)}function xe(t){if(t.operation==="update"){const r=t.payload;if(e(a)&&r.id===e(a).id){const n=e(a).events?.length||0,i=r.events?.length||0;s(a,ge(e(a),r)),i>n&&setTimeout(()=>{J()},100)}}else if(t.operation==="delete"){const r=t.payload.id||t.payload;e(a)&&e(a).id===r&&de(`${j}/enterprises`)}}function we(t){if(!e(a))return;const r=t.payload;if(r.enterprise_id===e(a).id){if(t.operation==="create")s(c,[...e(c),r]);else if(t.operation==="update")s(c,e(c).map(n=>n.id===r.id?r:n));else if(t.operation==="delete"){const n=r.id||r;s(c,e(c).filter(i=>i.id!==n))}}}function Ie(t){if(!e(a)||!e(c))return;const r=t.payload;if(e(c).some(i=>i.id===r.pool_id)){if(t.operation==="create")s(m,[...e(m),r]);else if(t.operation==="update")s(m,e(m).map(i=>i.id===r.id?r:i));else if(t.operation==="delete"){const i=r.id||r;s(m,e(m).filter(N=>N.id!==i))}}}qe(()=>{W().then(()=>{e(a)?.events?.length&&setTimeout(()=>{J()},100)});const t=R.subscribeToEntity("enterprise",["update","delete"],xe),r=R.subscribeToEntity("pool",["create","update","delete"],we),n=R.subscribeToEntity("instance",["create","update","delete"],Ie);C=()=>{t(),r(),n()}}),ze(()=>{C&&(C(),C=null)}),Ge(()=>V(),()=>{s($,V().params.id)}),je(),He();var K=dt();Re(t=>{G(()=>We.title=`${e(a),l(()=>e(a)?`${e(a).name} - Enterprise Details`:"Enterprise Details")??""} - GARM`)});var U=A(K),L=u(U),O=u(L),H=u(O),De=u(H);f(H);var Q=d(H,2),X=u(Q),Y=d(u(X),2),Te=u(Y,!0);f(Y),f(X),f(Q),f(O),f(L);var Pe=d(L,2);{var Me=t=>{var r=it();E(t,r)},Ce=t=>{var r=le(),n=A(r);{var i=b=>{var _=ot(),k=u(_),q=u(k,!0);f(k),f(_),G(()=>oe(q,e(x))),E(b,_)},N=b=>{var _=le(),k=A(_);{var q=z=>{var ae=lt(),re=A(ae);{let I=v(()=>(e(a),l(()=>e(a).name||"Enterprise"))),D=v(()=>(e(a),l(()=>e(a).endpoint?.name))),Le=v(()=>(Ke(pe),l(()=>pe("github"))));rt(re,{get title(){return e(I)},get subtitle(){return`Endpoint: ${e(D)??""} • GitHub Enterprise`},get forgeIcon(){return e(Le)},onEdit:()=>s(T,!0),onDelete:()=>s(P,!0)})}var se=d(re,2);et(se,{get entity(){return e(a)},entityType:"enterprise"});var ne=d(se,2);{let I=v(()=>(e(a),l(()=>e(a).id||""))),D=v(()=>(e(a),l(()=>e(a).name||"")));tt(ne,{get pools(){return e(c)},entityType:"enterprise",get entityId(){return e(I)},get entityName(){return e(D)},$$events:{addPool:Ee}})}var ie=d(ne,2);st(ie,{get instances(){return e(m)},entityType:"enterprise",onDeleteInstance:_e});var Ue=d(ie,2);{let I=v(()=>(e(a),l(()=>e(a)?.events)));at(Ue,{get events(){return e(I)},get eventsContainer(){return e(h)},set eventsContainer(D){s(h,D)},$$legacy:!0})}E(z,ae)};g(k,z=>{e(a)&&z(q)},!0)}E(b,_)};g(n,b=>{e(x)?b(i):b(N,!1)},!0)}E(t,r)};g(Pe,t=>{e(S)?t(Me):t(Ce,!1)})}f(U);var Z=d(U,2);{var ke=t=>{Ze(t,{get entity(){return e(a)},entityType:"enterprise",$$events:{close:()=>s(T,!1),submit:r=>ye(r.detail)}})};g(Z,t=>{e(T)&&e(a)&&t(ke)})}var ee=d(Z,2);{var Ae=t=>{ce(t,{title:"Delete Enterprise",message:"Are you sure you want to delete this enterprise? This action cannot be undone and will remove all associated pools and instances.",get itemName(){return e(a),l(()=>e(a).name)},$$events:{close:()=>s(P,!1),confirm:he}})};g(ee,t=>{e(P)&&e(a)&&t(Ae)})}var te=d(ee,2);{var Fe=t=>{ce(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(p),l(()=>e(p).name)},$$events:{close:()=>{s(w,!1),s(p,null)},confirm:be}})};g(te,t=>{e(w)&&e(p)&&t(Fe)})}var Be=d(te,2);{var Se=t=>{{let r=v(()=>(e(a),l(()=>e(a).id||"")));nt(t,{initialEntityType:"enterprise",get initialEntityId(){return e(r)},$$events:{close:()=>s(M,!1),submit:$e}})}};g(Be,t=>{e(M)&&e(a)&&t(Se)})}G(()=>{Xe(De,"href",`${j}/enterprises`),oe(Te,(e(a),l(()=>e(a)?e(a).name:"Loading...")))}),E(ue,K),Ve(),ve()}export{Dt as component}; diff --git a/webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js b/webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js deleted file mode 100644 index 3b5d5acd..00000000 --- a/webapp/assets/_app/immutable/nodes/7.0w3i9VHx.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Xe}from"../chunks/B3Pzt0F_.js";import{p as Ye,o as Ze,l as R,a as er,f as m,h as rr,t as re,g as e,e as ar,c as n,d as tr,m as i,$ as dr,j as a,k as d,s,r as t,z as L,u as h,C as lr,b as sr,D as or,v as $e,q as ir}from"../chunks/D8EpLgQ1.js";import{i as u,s as nr,a as vr}from"../chunks/5WA7h8uK.js";import{B as Ue,r as A,s as F,c as ze}from"../chunks/CiE1LlKV.js";import{b as P}from"../chunks/C6k1Q4We.js";import{p as mr}from"../chunks/D4Caz1gY.js";import{g as W}from"../chunks/CTf6mQoE.js";import{b as q}from"../chunks/CoIRRsD9.js";import{a as cr,b as ur}from"../chunks/duD3WMbl.js";import{t as pr}from"../chunks/BEkVdVE1.js";var br=m('

                Username is required

                '),gr=m('

                Please enter a valid email address

                '),fr=m('

                Full name is required

                '),xr=m('

                Password must be at least 8 characters long

                '),hr=m('

                Passwords do not match

                '),yr=lr(' Advanced Configuration (Optional)',1),kr=m('

                URL where runners can fetch metadata and setup information.

                URL where runners send status updates and lifecycle events.

                URL where GitHub/Gitea will send webhook events for job notifications.

                '),_r=m("
              • Enter a username
              • "),wr=m("
              • Enter a valid email address
              • "),$r=m("
              • Enter your full name
              • "),Ur=m("
              • Enter a password with at least 8 characters
              • "),zr=m("
              • Confirm your password
              • "),Mr=m('

                Please complete all required fields

                '),Rr=m('

                '),Ar=m('
                GARM

                Welcome to GARM

                Complete the first-run setup to get started

                First-Run Initialization

                GARM needs to be initialized before first use. This will create the admin user and generate a unique controller ID for this installation.

                This will create the admin user, generate a unique controller ID, and configure the required URLs for your GARM installation.
                Make sure to remember these credentials as they cannot be recovered.

                ');function Sr(Me,Re){Ye(Re,!1);const[Ae,Pe]=nr(),C=()=>vr(cr,"$authStore",Ae),k=i(),_=i(),w=i(),$=i(),U=i(),N=i();let g=i("admin"),p=i("admin@garm.local"),c=i(""),f=i(""),x=i("Administrator"),S=i(!1),H=i(""),T=i(!1),E=i(""),I=i(""),V=i("");async function qe(){if(e(N))try{s(S,!0),s(H,""),await ur.initialize(e(g).trim(),e(p).trim(),e(c),e(x).trim(),{callbackUrl:e(E).trim()||void 0,metadataUrl:e(I).trim()||void 0,webhookUrl:e(V).trim()||void 0}),pr.success("GARM Initialized","GARM has been successfully initialized. Welcome!"),W(`${q}/`)}catch(r){s(H,r instanceof Error?r.message:"Failed to initialize GARM")}finally{s(S,!1)}}Ze(()=>{if(C().isAuthenticated){W(`${q}/`);return}!C().needsInitialization&&!C().loading&&W(`${q}/login`)}),R(()=>(e(E),e(I),e(V)),()=>{if(typeof window<"u"){const r=window.location.origin;e(E)||s(E,`${r}/api/v1/callbacks`),e(I)||s(I,`${r}/api/v1/metadata`),e(V)||s(V,`${r}/webhooks`)}}),R(()=>e(p),()=>{s(k,e(p).trim()!==""&&e(p).includes("@"))}),R(()=>e(c),()=>{s(_,e(c).length>=8)}),R(()=>(e(f),e(c)),()=>{s(w,e(f).length>0&&e(c)===e(f))}),R(()=>e(g),()=>{s($,e(g).trim()!=="")}),R(()=>e(x),()=>{s(U,e(x).trim()!=="")}),R(()=>(e($),e(k),e(U),e(_),e(w)),()=>{s(N,e($)&&e(k)&&e(U)&&e(_)&&e(w))}),R(()=>(C(),q),()=>{C().isAuthenticated?W(`${q}/`):!C().needsInitialization&&!C().loading&&W(`${q}/login`)}),er(),Xe();var ae=Ar();rr(r=>{dr.title="Initialize GARM - First Run Setup"});var te=a(ae),me=a(te),ce=a(me),Ce=d(ce,2);t(me),L(4),t(te);var ue=d(te,2),pe=d(a(ue),2),de=a(pe),le=a(de),be=d(a(le),2),O=a(be);A(O);var Ge=d(O,2);{var Le=r=>{var l=br();n(r,l)};u(Ge,r=>{e($),e(g),h(()=>!e($)&&e(g).length>0)&&r(Le)})}t(be),t(le);var se=d(le,2),ge=d(a(se),2),J=a(ge);A(J);var Ee=d(J,2);{var Ie=r=>{var l=gr();n(r,l)};u(Ee,r=>{e(k),e(p),h(()=>!e(k)&&e(p).length>0)&&r(Ie)})}t(ge),t(se);var oe=d(se,2),fe=d(a(oe),2),K=a(fe);A(K);var Ve=d(K,2);{var Be=r=>{var l=fr();n(r,l)};u(Ve,r=>{e(U),e(x),h(()=>!e(U)&&e(x).length>0)&&r(Be)})}t(fe),t(oe);var ie=d(oe,2),xe=d(a(ie),2),Q=a(xe);A(Q);var Fe=d(Q,2);{var Ne=r=>{var l=xr();n(r,l)};u(Fe,r=>{e(_),e(c),h(()=>!e(_)&&e(c).length>0)&&r(Ne)})}t(xe),t(ie);var ne=d(ie,2),he=d(a(ne),2),X=a(he);A(X);var Se=d(X,2);{var je=r=>{var l=hr();n(r,l)};u(Se,r=>{e(w),e(f),h(()=>!e(w)&&e(f).length>0)&&r(je)})}t(he),t(ne);var ve=d(ne,2),ye=a(ve);Ue(ye,{type:"button",variant:"ghost",size:"sm",$$events:{click:()=>s(T,!e(T))},children:(r,l)=>{var b=yr(),v=sr(b);L(),re(()=>F(v,0,`w-4 h-4 mr-2 transition-transform ${e(T)?"rotate-90":""}`)),n(r,b)},$$slots:{default:!0}});var De=d(ye,2);{var We=r=>{var l=kr(),b=a(l),v=a(b),z=d(a(v),2),G=a(z);A(G),L(2),t(z),t(v);var B=d(v,2),Y=d(a(B),2),j=a(Y);A(j),L(2),t(Y),t(B);var Z=d(B,2),D=d(a(Z),2),ee=a(D);A(ee),L(2),t(D),t(Z),t(b),t(l),P(G,()=>e(I),M=>s(I,M)),P(j,()=>e(E),M=>s(E,M)),P(ee,()=>e(V),M=>s(V,M)),n(r,l)};u(De,r=>{e(T)&&r(We)})}t(ve);var ke=d(ve,2);{var He=r=>{var l=Mr(),b=a(l),v=d(a(b),2),z=d(a(v),2),G=a(z),B=a(G);{var Y=o=>{var y=_r();n(o,y)};u(B,o=>{e($)||o(Y)})}var j=d(B,2);{var Z=o=>{var y=wr();n(o,y)};u(j,o=>{e(k)||o(Z)})}var D=d(j,2);{var ee=o=>{var y=$r();n(o,y)};u(D,o=>{e(U)||o(ee)})}var M=d(D,2);{var Je=o=>{var y=Ur();n(o,y)};u(M,o=>{e(_)||o(Je)})}var Ke=d(M,2);{var Qe=o=>{var y=zr();n(o,y)};u(Ke,o=>{e(w)||o(Qe)})}t(G),t(z),t(v),t(b),t(l),n(r,l)};u(ke,r=>{e(N),e(g),e(p),e(x),e(c),e(f),h(()=>!e(N)&&(e(g).length>0||e(p).length>0||e(x).length>0||e(c).length>0||e(f).length>0))&&r(He)})}var _e=d(ke,2);{var Te=r=>{var l=Rr(),b=a(l),v=d(a(b),2),z=a(v),G=a(z,!0);t(z),t(v),t(b),t(l),re(()=>$e(G,e(H))),n(r,l)};u(_e,r=>{e(H)&&r(Te)})}var we=d(_e,2),Oe=a(we);{let r=ir(()=>!e(N)||e(S));Ue(Oe,{type:"submit",variant:"primary",size:"lg",fullWidth:!0,get loading(){return e(S)},get disabled(){return e(r)},children:(l,b)=>{L();var v=or();re(()=>$e(v,e(S)?"Initializing...":"Initialize GARM")),n(l,v)},$$slots:{default:!0}})}t(we),t(de),L(2),t(pe),t(ue),t(ae),re(()=>{ze(ce,"src",`${q??""}/assets/garm-light.svg`),ze(Ce,"src",`${q??""}/assets/garm-dark.svg`),F(O,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e($),e(g),h(()=>!e($)&&e(g).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(J,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(k),e(p),h(()=>!e(k)&&e(p).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(K,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(U),e(x),h(()=>!e(U)&&e(x).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(Q,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(_),e(c),h(()=>!e(_)&&e(c).length>0?"border-red-300 dark:border-red-600":"")??""}`),F(X,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(w),e(f),h(()=>!e(w)&&e(f).length>0?"border-red-300 dark:border-red-600":"")??""}`)}),P(O,()=>e(g),r=>s(g,r)),P(J,()=>e(p),r=>s(p,r)),P(K,()=>e(x),r=>s(x,r)),P(Q,()=>e(c),r=>s(c,r)),P(X,()=>e(f),r=>s(f,r)),ar("submit",de,mr(qe)),n(Me,ae),tr(),Pe()}export{Sr as component}; diff --git a/webapp/assets/_app/immutable/nodes/7.CaVS6POQ.js b/webapp/assets/_app/immutable/nodes/7.CaVS6POQ.js new file mode 100644 index 00000000..93f6a61b --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/7.CaVS6POQ.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Xe}from"../chunks/B3Pzt0F_.js";import{p as Ye,o as Ze,l as R,a as er,f as m,h as rr,t as re,g as e,e as ar,c as n,d as tr,m as i,$ as dr,j as a,k as d,s,r as t,z as L,u as h,C as lr,b as sr,D as or,v as $e,q as ir}from"../chunks/D8EpLgQ1.js";import{i as u,s as nr,a as vr}from"../chunks/5WA7h8uK.js";import{B as Ue,r as A,s as N,c as ze}from"../chunks/CiE1LlKV.js";import{b as P}from"../chunks/C6k1Q4We.js";import{p as mr}from"../chunks/D4Caz1gY.js";import{g as W}from"../chunks/DXCC0cSN.js";import{b as q}from"../chunks/CRhkqW2i.js";import{a as cr,b as ur}from"../chunks/duD3WMbl.js";import{t as pr}from"../chunks/BEkVdVE1.js";import{e as br}from"../chunks/BZiHL9L3.js";var gr=m('

                Username is required

                '),fr=m('

                Please enter a valid email address

                '),xr=m('

                Full name is required

                '),hr=m('

                Password must be at least 8 characters long

                '),yr=m('

                Passwords do not match

                '),kr=lr(' Advanced Configuration (Optional)',1),_r=m('

                URL where runners can fetch metadata and setup information.

                URL where runners send status updates and lifecycle events.

                URL where GitHub/Gitea will send webhook events for job notifications.

                '),wr=m("
              • Enter a username
              • "),$r=m("
              • Enter a valid email address
              • "),Ur=m("
              • Enter your full name
              • "),zr=m("
              • Enter a password with at least 8 characters
              • "),Mr=m("
              • Confirm your password
              • "),Rr=m('

                Please complete all required fields

                '),Ar=m('

                '),Pr=m('
                GARM

                Welcome to GARM

                Complete the first-run setup to get started

                First-Run Initialization

                GARM needs to be initialized before first use. This will create the admin user and generate a unique controller ID for this installation.

                This will create the admin user, generate a unique controller ID, and configure the required URLs for your GARM installation.
                Make sure to remember these credentials as they cannot be recovered.

                ');function Dr(Me,Re){Ye(Re,!1);const[Ae,Pe]=nr(),C=()=>vr(cr,"$authStore",Ae),k=i(),_=i(),w=i(),$=i(),U=i(),S=i();let g=i("admin"),p=i("admin@garm.local"),c=i(""),f=i(""),x=i("Administrator"),j=i(!1),H=i(""),T=i(!1),I=i(""),E=i(""),V=i("");async function qe(){if(e(S))try{s(j,!0),s(H,""),await ur.initialize(e(g).trim(),e(p).trim(),e(c),e(x).trim(),{callbackUrl:e(I).trim()||void 0,metadataUrl:e(E).trim()||void 0,webhookUrl:e(V).trim()||void 0}),pr.success("GARM Initialized","GARM has been successfully initialized. Welcome!"),W(`${q}/`)}catch(r){s(H,br(r))}finally{s(j,!1)}}Ze(()=>{if(C().isAuthenticated){W(`${q}/`);return}!C().needsInitialization&&!C().loading&&W(`${q}/login`)}),R(()=>(e(I),e(E),e(V)),()=>{if(typeof window<"u"){const r=window.location.origin;e(I)||s(I,`${r}/api/v1/callbacks`),e(E)||s(E,`${r}/api/v1/metadata`),e(V)||s(V,`${r}/webhooks`)}}),R(()=>e(p),()=>{s(k,e(p).trim()!==""&&e(p).includes("@"))}),R(()=>e(c),()=>{s(_,e(c).length>=8)}),R(()=>(e(f),e(c)),()=>{s(w,e(f).length>0&&e(c)===e(f))}),R(()=>e(g),()=>{s($,e(g).trim()!=="")}),R(()=>e(x),()=>{s(U,e(x).trim()!=="")}),R(()=>(e($),e(k),e(U),e(_),e(w)),()=>{s(S,e($)&&e(k)&&e(U)&&e(_)&&e(w))}),R(()=>(C(),q),()=>{C().isAuthenticated?W(`${q}/`):!C().needsInitialization&&!C().loading&&W(`${q}/login`)}),er(),Xe();var ae=Pr();rr(r=>{dr.title="Initialize GARM - First Run Setup"});var te=a(ae),me=a(te),ce=a(me),Ce=d(ce,2);t(me),L(4),t(te);var ue=d(te,2),pe=d(a(ue),2),de=a(pe),le=a(de),be=d(a(le),2),O=a(be);A(O);var Ge=d(O,2);{var Le=r=>{var l=gr();n(r,l)};u(Ge,r=>{e($),e(g),h(()=>!e($)&&e(g).length>0)&&r(Le)})}t(be),t(le);var se=d(le,2),ge=d(a(se),2),J=a(ge);A(J);var Ie=d(J,2);{var Ee=r=>{var l=fr();n(r,l)};u(Ie,r=>{e(k),e(p),h(()=>!e(k)&&e(p).length>0)&&r(Ee)})}t(ge),t(se);var oe=d(se,2),fe=d(a(oe),2),K=a(fe);A(K);var Ve=d(K,2);{var Be=r=>{var l=xr();n(r,l)};u(Ve,r=>{e(U),e(x),h(()=>!e(U)&&e(x).length>0)&&r(Be)})}t(fe),t(oe);var ie=d(oe,2),xe=d(a(ie),2),Q=a(xe);A(Q);var Ne=d(Q,2);{var Se=r=>{var l=hr();n(r,l)};u(Ne,r=>{e(_),e(c),h(()=>!e(_)&&e(c).length>0)&&r(Se)})}t(xe),t(ie);var ne=d(ie,2),he=d(a(ne),2),X=a(he);A(X);var je=d(X,2);{var Fe=r=>{var l=yr();n(r,l)};u(je,r=>{e(w),e(f),h(()=>!e(w)&&e(f).length>0)&&r(Fe)})}t(he),t(ne);var ve=d(ne,2),ye=a(ve);Ue(ye,{type:"button",variant:"ghost",size:"sm",$$events:{click:()=>s(T,!e(T))},children:(r,l)=>{var b=kr(),v=sr(b);L(),re(()=>N(v,0,`w-4 h-4 mr-2 transition-transform ${e(T)?"rotate-90":""}`)),n(r,b)},$$slots:{default:!0}});var De=d(ye,2);{var We=r=>{var l=_r(),b=a(l),v=a(b),z=d(a(v),2),G=a(z);A(G),L(2),t(z),t(v);var B=d(v,2),Y=d(a(B),2),F=a(Y);A(F),L(2),t(Y),t(B);var Z=d(B,2),D=d(a(Z),2),ee=a(D);A(ee),L(2),t(D),t(Z),t(b),t(l),P(G,()=>e(E),M=>s(E,M)),P(F,()=>e(I),M=>s(I,M)),P(ee,()=>e(V),M=>s(V,M)),n(r,l)};u(De,r=>{e(T)&&r(We)})}t(ve);var ke=d(ve,2);{var He=r=>{var l=Rr(),b=a(l),v=d(a(b),2),z=d(a(v),2),G=a(z),B=a(G);{var Y=o=>{var y=wr();n(o,y)};u(B,o=>{e($)||o(Y)})}var F=d(B,2);{var Z=o=>{var y=$r();n(o,y)};u(F,o=>{e(k)||o(Z)})}var D=d(F,2);{var ee=o=>{var y=Ur();n(o,y)};u(D,o=>{e(U)||o(ee)})}var M=d(D,2);{var Je=o=>{var y=zr();n(o,y)};u(M,o=>{e(_)||o(Je)})}var Ke=d(M,2);{var Qe=o=>{var y=Mr();n(o,y)};u(Ke,o=>{e(w)||o(Qe)})}t(G),t(z),t(v),t(b),t(l),n(r,l)};u(ke,r=>{e(S),e(g),e(p),e(x),e(c),e(f),h(()=>!e(S)&&(e(g).length>0||e(p).length>0||e(x).length>0||e(c).length>0||e(f).length>0))&&r(He)})}var _e=d(ke,2);{var Te=r=>{var l=Ar(),b=a(l),v=d(a(b),2),z=a(v),G=a(z,!0);t(z),t(v),t(b),t(l),re(()=>$e(G,e(H))),n(r,l)};u(_e,r=>{e(H)&&r(Te)})}var we=d(_e,2),Oe=a(we);{let r=ir(()=>!e(S)||e(j));Ue(Oe,{type:"submit",variant:"primary",size:"lg",fullWidth:!0,get loading(){return e(j)},get disabled(){return e(r)},children:(l,b)=>{L();var v=or();re(()=>$e(v,e(j)?"Initializing...":"Initialize GARM")),n(l,v)},$$slots:{default:!0}})}t(we),t(de),L(2),t(pe),t(ue),t(ae),re(()=>{ze(ce,"src",`${q??""}/assets/garm-light.svg`),ze(Ce,"src",`${q??""}/assets/garm-dark.svg`),N(O,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e($),e(g),h(()=>!e($)&&e(g).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(J,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(k),e(p),h(()=>!e(k)&&e(p).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(K,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(U),e(x),h(()=>!e(U)&&e(x).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(Q,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(_),e(c),h(()=>!e(_)&&e(c).length>0?"border-red-300 dark:border-red-600":"")??""}`),N(X,1,`appearance-none block w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 dark:placeholder-gray-500 focus:outline-none focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:text-white sm:text-sm ${e(w),e(f),h(()=>!e(w)&&e(f).length>0?"border-red-300 dark:border-red-600":"")??""}`)}),P(O,()=>e(g),r=>s(g,r)),P(J,()=>e(p),r=>s(p,r)),P(K,()=>e(x),r=>s(x,r)),P(Q,()=>e(c),r=>s(c,r)),P(X,()=>e(f),r=>s(f,r)),ar("submit",de,mr(qe)),n(Me,ae),tr(),Pe()}export{Dr as component}; diff --git a/webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js b/webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js deleted file mode 100644 index 38d013cb..00000000 --- a/webapp/assets/_app/immutable/nodes/8.BiZNKYxk.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as z}from"../chunks/B3Pzt0F_.js";import{p as J,f as C,c as x,d as K,j as y,n as _,u as f,r as b,t as A,v as L,B as ce,b as O,o as de,A as ue,l as w,a as me,h as pe,g as e,m as d,$ as fe,q as ge,k as S,s}from"../chunks/D8EpLgQ1.js";import{p as ve,i as D}from"../chunks/5WA7h8uK.js";import{c as T,g as B}from"../chunks/CiE1LlKV.js";import{D as he}from"../chunks/KQ2xQpA3.js";import{P as _e}from"../chunks/CO4LUyTP.js";import{w as ye}from"../chunks/u94nIB4-.js";import{t as be}from"../chunks/BEkVdVE1.js";import{D as xe,G as Ce,a as ke}from"../chunks/C9DJVOi1.js";import{E as Pe}from"../chunks/B7ITzBt8.js";import{S as H}from"../chunks/BE4wujub.js";import{b as W}from"../chunks/CoIRRsD9.js";var Ie=C(' '),we=C(' '),Se=C('-'),Te=C('
                ');function De($,P){J(P,!1);let a=ve(P,"item",8);z();var p=Te(),I=y(p);{var u=r=>{var n=Ie(),h=y(n);b(n),A(()=>{T(n,"href",`${W??""}/pools/${_(a()),f(()=>a().pool_id)??""}`),T(n,"title",`Pool: ${_(a()),f(()=>a().pool_id)??""}`),L(h,`Pool: ${_(a()),f(()=>a().pool_id)??""}`)}),x(r,n)},k=r=>{var n=ce(),h=O(n);{var o=l=>{var i=we(),m=y(i);b(i),A(()=>{T(i,"href",`${W??""}/scalesets/${_(a()),f(()=>a().scale_set_id)??""}`),T(i,"title",`Scale Set: ${_(a()),f(()=>a().scale_set_id)??""}`),L(m,`Scale Set: ${_(a()),f(()=>a().scale_set_id)??""}`)}),x(l,i)},g=l=>{var i=Se();x(l,i)};D(h,l=>{_(a()),f(()=>a()?.scale_set_id)?l(o):l(g,!1)},!0)}x(r,n)};D(I,r=>{_(a()),f(()=>a()?.pool_id)?r(u):r(k,!1)})}b(p),x($,p),K()}var $e=C('

                Error

                '),Ee=C('
                ',1);function Je($,P){J(P,!1);const a=d(),p=d(),I=d();let u=d([]),k=d(!0),r=d(""),n=d(""),h=null,o=d(1),g=d(25),l=d(""),i=d(!1),m=d(null);async function M(){try{s(k,!0),s(r,""),s(u,await B.listInstances())}catch(t){s(r,t instanceof Error?t.message:"Failed to load instances")}finally{s(k,!1)}}function F(t){s(m,t),s(i,!0)}async function Q(){if(e(m))try{await B.deleteInstance(e(m).name),be.success("Instance Deleted",`Instance ${e(m).name} has been deleted successfully.`),s(i,!1),s(m,null)}catch(t){s(r,t instanceof Error?t.message:"Failed to delete instance")}}const U=[{key:"name",title:"Name",cellComponent:Pe,cellProps:{entityType:"instance",showId:!0}},{key:"pool_scale_set",title:"Pool/Scale Set",flexible:!0,cellComponent:De},{key:"created",title:"Created",cellComponent:Ce,cellProps:{field:"created_at",type:"date"}},{key:"status",title:"Status",cellComponent:H,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:H,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"actions",title:"Actions",align:"right",cellComponent:ke,cellProps:{actions:[{type:"delete",title:"Delete",ariaLabel:"Delete instance",action:"delete"}]}}],V={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"},{type:"status",field:"runner_status"}],actions:[{type:"delete",handler:t=>F(t)}]};function X(t){s(l,t.detail.term),s(o,1)}function Y(t){s(o,t.detail.page)}function Z(t){s(g,t.detail.perPage),s(o,1)}async function ee(){try{await M()}catch(t){console.error("Retry failed:",t)}}function te(t){}function ae(t){F(t.detail.item)}function se(t){if(t.operation==="create"){const c=t.payload;s(u,[...e(u),c])}else if(t.operation==="update"){const c=t.payload;s(u,e(u).map(v=>v.name===c.name?c:v))}else if(t.operation==="delete"){const c=t.payload.name||t.payload;s(u,e(u).filter(v=>v.name!==c))}}de(()=>{M(),h=ye.subscribeToEntity("instance",["create","update","delete"],se)}),ue(()=>{h&&(h(),h=null)}),w(()=>(e(u),e(l),e(n)),()=>{s(a,e(u).filter(t=>{const c=e(l)===""||t.name?.toLowerCase().includes(e(l).toLowerCase())||t.provider_id?.toLowerCase().includes(e(l).toLowerCase()),v=e(n)===""||t.status===e(n)||t.runner_status===e(n);return c&&v}))}),w(()=>(e(a),e(g)),()=>{s(p,Math.ceil(e(a).length/e(g)))}),w(()=>(e(o),e(p)),()=>{e(o)>e(p)&&e(p)>0&&s(o,e(p))}),w(()=>(e(a),e(o),e(g)),()=>{s(I,e(a).slice((e(o)-1)*e(g),e(o)*e(g)))}),me(),z();var R=Ee();pe(t=>{fe.title="Instances - GARM"});var E=O(R),N=y(E);_e(N,{title:"Runner Instances",description:"Monitor your running instances",showAction:!1});var G=S(N,2);{var re=t=>{var c=$e(),v=y(c),q=y(v),j=S(y(q),2),ie=y(j,!0);b(j),b(q),b(v),b(c),A(()=>L(ie,e(r))),x(t,c)};D(G,t=>{e(r)&&t(re)})}var ne=S(G,2);{let t=ge(()=>!!e(r));xe(ne,{get columns(){return U},get data(){return e(I)},get loading(){return e(k)},get error(){return e(r)},get searchTerm(){return e(l)},searchPlaceholder:"Search instances...",get currentPage(){return e(o)},get perPage(){return e(g)},get totalPages(){return e(p)},get totalItems(){return e(a),f(()=>e(a).length)},itemName:"instances",emptyIconType:"cog",get showRetry(){return e(t)},get mobileCardConfig(){return V},$$events:{search:X,pageChange:Y,perPageChange:Z,retry:ee,edit:te,delete:ae}})}b(E);var le=S(E,2);{var oe=t=>{he(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(m),f(()=>e(m).name)},$$events:{close:()=>{s(i,!1),s(m,null)},confirm:Q}})};D(le,t=>{e(i)&&e(m)&&t(oe)})}x($,R),K()}export{Je as component}; diff --git a/webapp/assets/_app/immutable/nodes/8.W6llQu20.js b/webapp/assets/_app/immutable/nodes/8.W6llQu20.js new file mode 100644 index 00000000..2b4194a5 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/8.W6llQu20.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as z}from"../chunks/B3Pzt0F_.js";import{p as J,f as C,c as x,d as K,j as y,n as _,u as f,r as b,t as E,v as L,B as ce,b as O,o as de,A as ue,l as S,a as me,h as pe,g as e,m as u,$ as fe,q as ge,k as w,s}from"../chunks/D8EpLgQ1.js";import{p as ve,i as D}from"../chunks/5WA7h8uK.js";import{c as T,g as B}from"../chunks/CiE1LlKV.js";import{D as he}from"../chunks/KQ2xQpA3.js";import{P as _e}from"../chunks/CO4LUyTP.js";import{w as ye}from"../chunks/u94nIB4-.js";import{t as be}from"../chunks/BEkVdVE1.js";import{D as xe,G as Ce,a as Pe}from"../chunks/BrNfsPe8.js";import{e as ke}from"../chunks/BZiHL9L3.js";import{E as Ie}from"../chunks/D4PaGKsV.js";import{S as H}from"../chunks/MCv1Wq2q.js";import{b as W}from"../chunks/CRhkqW2i.js";var Se=C(' '),we=C(' '),Te=C('-'),De=C('
                ');function $e($,k){J(k,!1);let a=ve(k,"item",8);z();var m=De(),I=y(m);{var i=r=>{var p=Se(),v=y(p);b(p),E(()=>{T(p,"href",`${W??""}/pools/${_(a()),f(()=>a().pool_id)??""}`),T(p,"title",`Pool: ${_(a()),f(()=>a().pool_id)??""}`),L(v,`Pool: ${_(a()),f(()=>a().pool_id)??""}`)}),x(r,p)},P=r=>{var p=ce(),v=O(p);{var l=n=>{var o=we(),c=y(o);b(o),E(()=>{T(o,"href",`${W??""}/scalesets/${_(a()),f(()=>a().scale_set_id)??""}`),T(o,"title",`Scale Set: ${_(a()),f(()=>a().scale_set_id)??""}`),L(c,`Scale Set: ${_(a()),f(()=>a().scale_set_id)??""}`)}),x(n,o)},g=n=>{var o=Te();x(n,o)};D(v,n=>{_(a()),f(()=>a()?.scale_set_id)?n(l):n(g,!1)},!0)}x(r,p)};D(I,r=>{_(a()),f(()=>a()?.pool_id)?r(i):r(P,!1)})}b(m),x($,m),K()}var Ae=C('

                Error

                '),Ee=C('
                ',1);function Oe($,k){J(k,!1);const a=u(),m=u(),I=u();let i=u([]),P=u(!0),r=u(""),p="",v=null,l=u(1),g=u(25),n=u(""),o=u(!1),c=u(null);async function M(){try{s(P,!0),s(r,""),s(i,await B.listInstances())}catch(t){s(r,t instanceof Error?t.message:"Failed to load instances")}finally{s(P,!1)}}function R(t){s(c,t),s(o,!0)}async function Q(){if(e(c))try{await B.deleteInstance(e(c).name),be.success("Instance Deleted",`Instance ${e(c).name} has been deleted successfully.`)}catch(t){s(r,ke(t))}finally{s(o,!1),s(c,null)}}const U=[{key:"name",title:"Name",cellComponent:Ie,cellProps:{entityType:"instance",showId:!0}},{key:"pool_scale_set",title:"Pool/Scale Set",flexible:!0,cellComponent:$e},{key:"created",title:"Created",cellComponent:Ce,cellProps:{field:"created_at",type:"date"}},{key:"status",title:"Status",cellComponent:H,cellProps:{statusType:"instance",statusField:"status"}},{key:"runner_status",title:"Runner Status",cellComponent:H,cellProps:{statusType:"instance",statusField:"runner_status"}},{key:"actions",title:"Actions",align:"right",cellComponent:Pe,cellProps:{actions:[{type:"delete",title:"Delete",ariaLabel:"Delete instance",action:"delete"}]}}],V={entityType:"instance",primaryText:{field:"name",isClickable:!0,href:"/instances/{name}"},secondaryText:{field:"provider_id"},badges:[{type:"status",field:"status"},{type:"status",field:"runner_status"}],actions:[{type:"delete",handler:t=>R(t)}]};function X(t){s(n,t.detail.term),s(l,1)}function Y(t){s(l,t.detail.page)}function Z(t){s(g,t.detail.perPage),s(l,1)}async function ee(){try{await M()}catch(t){console.error("Retry failed:",t)}}function te(t){}function ae(t){R(t.detail.item)}function se(t){if(t.operation==="create"){const d=t.payload;s(i,[...e(i),d])}else if(t.operation==="update"){const d=t.payload;s(i,e(i).map(h=>h.name===d.name?d:h))}else if(t.operation==="delete"){const d=t.payload.name||t.payload;s(i,e(i).filter(h=>h.name!==d))}}de(()=>{M(),v=ye.subscribeToEntity("instance",["create","update","delete"],se)}),ue(()=>{v&&(v(),v=null)}),S(()=>(e(i),e(n)),()=>{s(a,e(i).filter(t=>(e(n)===""||t.name?.toLowerCase().includes(e(n).toLowerCase())||t.provider_id?.toLowerCase().includes(e(n).toLowerCase()))&&p===""))}),S(()=>(e(a),e(g)),()=>{s(m,Math.ceil(e(a).length/e(g)))}),S(()=>(e(l),e(m)),()=>{e(l)>e(m)&&e(m)>0&&s(l,e(m))}),S(()=>(e(a),e(l),e(g)),()=>{s(I,e(a).slice((e(l)-1)*e(g),e(l)*e(g)))}),me(),z();var F=Ee();pe(t=>{fe.title="Instances - GARM"});var A=O(F),N=y(A);_e(N,{title:"Runner Instances",description:"Monitor your running instances",showAction:!1});var G=w(N,2);{var re=t=>{var d=Ae(),h=y(d),q=y(h),j=w(y(q),2),ie=y(j,!0);b(j),b(q),b(h),b(d),E(()=>L(ie,e(r))),x(t,d)};D(G,t=>{e(r)&&t(re)})}var ne=w(G,2);{let t=ge(()=>!!e(r));xe(ne,{get columns(){return U},get data(){return e(I)},get loading(){return e(P)},get error(){return e(r)},get searchTerm(){return e(n)},searchPlaceholder:"Search instances...",get currentPage(){return e(l)},get perPage(){return e(g)},get totalPages(){return e(m)},get totalItems(){return e(a),f(()=>e(a).length)},itemName:"instances",emptyIconType:"cog",get showRetry(){return e(t)},get mobileCardConfig(){return V},$$events:{search:X,pageChange:Y,perPageChange:Z,retry:ee,edit:te,delete:ae}})}b(A);var le=w(A,2);{var oe=t=>{he(t,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return e(c),f(()=>e(c).name)},$$events:{close:()=>{s(o,!1),s(c,null)},confirm:Q}})};D(le,t=>{e(o)&&e(c)&&t(oe)})}x($,F),K()}export{Oe as component}; diff --git a/webapp/assets/_app/immutable/nodes/9.DfrxaqP7.js b/webapp/assets/_app/immutable/nodes/9.DfrxaqP7.js new file mode 100644 index 00000000..6f22cc95 --- /dev/null +++ b/webapp/assets/_app/immutable/nodes/9.DfrxaqP7.js @@ -0,0 +1 @@ +import"../chunks/DsnmJJEf.js";import{i as Te}from"../chunks/B3Pzt0F_.js";import{p as Ue,o as Le,A as Oe,l as Pe,a as Ve,f as g,h as ze,b as O,t as f,c as v,d as Re,g as t,m as M,s as p,u as i,$ as qe,j as s,r as a,k as r,v as c,B as Et,n as b,e as Fe,q as Tt,D as Ut}from"../chunks/D8EpLgQ1.js";import{i as _,s as Ge,a as He}from"../chunks/5WA7h8uK.js";import{w as We,e as Lt,i as Ot}from"../chunks/u94nIB4-.js";import{c as nt,g as Pt,s as Vt}from"../chunks/CiE1LlKV.js";import{b as Je}from"../chunks/BAg1iRPq.js";import{p as Ke}from"../chunks/BE8f1Riw.js";import{g as zt}from"../chunks/DXCC0cSN.js";import{b as $}from"../chunks/CRhkqW2i.js";import{D as Qe}from"../chunks/KQ2xQpA3.js";import{g as P,f as V}from"../chunks/ow_oMtSd.js";import{s as Rt,b as A,B as qt,d as Ft}from"../chunks/BGVHQGl-.js";import{e as Xe}from"../chunks/BZiHL9L3.js";var Ye=g('

                Error

                '),Ze=g('

                Loading instance details...

                '),ta=g(' '),ea=g(' '),aa=g('-'),sa=g('
                Updated At:
                '),ra=g('
                '),da=g('
                Network Addresses:
                '),ia=g('
                Network Addresses:
                No addresses available
                '),na=g('
                OS Type:
                '),oa=g('
                OS Name:
                '),la=g('
                OS Version:
                '),va=g('
                OS Architecture:
                '),xa=g('

                '),ca=g('

                Status Messages

                '),ma=g('

                Status Messages

                No status messages available

                '),ga=g('

                Instance Information

                ID:
                Name:
                Provider ID:
                Provider:
                Pool/Scale Set:
                Agent ID:
                Created At:

                Status & Network

                Instance Status:
                Runner Status:
                ',1),_a=g('
                Instance not found.
                '),ua=g(' ',1);function $a(Gt,Ht){Ue(Ht,!1);const[Wt,Jt]=Ge(),ot=()=>He(Ke,"$page",Wt),z=M();let e=M(null),R=M(!0),B=M(""),N=M(!1),C=null,E=M();async function Kt(){if(t(z))try{p(R,!0),p(B,""),p(e,await Pt.getInstance(t(z)))}catch(o){p(B,o instanceof Error?o.message:"Failed to load instance")}finally{p(R,!1)}}async function Qt(){if(t(e)){try{await Pt.deleteInstance(t(e).name),zt(`${$}/instances`)}catch(o){p(B,Xe(o))}p(N,!1)}}function Xt(o){if(t(e))if(o.operation==="update"&&o.payload.id===t(e).id){const h=t(e).status_messages?.length||0,j={...t(e),...o.payload},S=j.status_messages?.length||0;p(e,j),S>h&&setTimeout(()=>{Rt(t(E))},100)}else o.operation==="delete"&&(o.payload.id||o.payload)===t(e).id&&zt(`${$}/instances`)}Le(()=>{Kt().then(()=>{t(e)?.status_messages?.length&&setTimeout(()=>{Rt(t(E))},100)}),C=We.subscribeToEntity("instance",["update","delete"],Xt)}),Oe(()=>{C&&(C(),C=null)}),Pe(()=>ot(),()=>{p(z,decodeURIComponent(ot().params.id||""))}),Ve(),Te();var lt=ua();ze(o=>{f(()=>qe.title=`${t(e),i(()=>t(e)?`${t(e).name} - Instance Details`:"Instance Details")??""} - GARM`)});var q=O(lt),F=s(q),vt=s(F),G=s(vt),Yt=s(G);a(G);var xt=r(G,2),ct=s(xt),mt=r(s(ct),2),Zt=s(mt,!0);a(mt),a(ct),a(xt),a(vt),a(F);var gt=r(F,2);{var te=o=>{var h=Ye(),j=s(h),S=s(j),T=r(s(S),2),I=s(T,!0);a(T),a(S),a(j),a(h),f(()=>c(I,t(B))),v(o,h)};_(gt,o=>{t(B)&&o(te)})}var ee=r(gt,2);{var ae=o=>{var h=Ze();v(o,h)},se=o=>{var h=Et(),j=O(h);{var S=I=>{var U=ga(),H=O(U),W=s(H),J=s(W),_t=r(s(J),2),ie=s(_t);a(_t),a(J);var ut=r(J,2),K=s(ut),ft=r(s(K),2),ne=s(ft,!0);a(ft),a(K);var Q=r(K,2),yt=r(s(Q),2),oe=s(yt,!0);a(yt),a(Q);var X=r(Q,2),pt=r(s(X),2),le=s(pt,!0);a(pt),a(X);var Y=r(X,2),ht=r(s(Y),2),ve=s(ht,!0);a(ht),a(Y);var Z=r(Y,2),kt=r(s(Z),2),xe=s(kt);{var ce=d=>{var n=ta(),l=s(n,!0);a(n),f(()=>{nt(n,"href",`${$??""}/pools/${t(e),i(()=>t(e).pool_id)??""}`),c(l,(t(e),i(()=>t(e).pool_id)))}),v(d,n)},me=d=>{var n=Et(),l=O(n);{var m=u=>{var y=ea(),D=s(y,!0);a(y),f(()=>{nt(y,"href",`${$??""}/scalesets/${t(e),i(()=>t(e).scale_set_id)??""}`),c(D,(t(e),i(()=>t(e).scale_set_id)))}),v(u,y)},x=u=>{var y=aa();v(u,y)};_(l,u=>{t(e),i(()=>t(e).scale_set_id)?u(m):u(x,!1)},!0)}v(d,n)};_(xe,d=>{t(e),i(()=>t(e).pool_id)?d(ce):d(me,!1)})}a(kt),a(Z);var tt=r(Z,2),bt=r(s(tt),2),ge=s(bt,!0);a(bt),a(tt);var et=r(tt,2),wt=r(s(et),2),_e=s(wt,!0);a(wt),a(et);var ue=r(et,2);{var fe=d=>{var n=sa(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(x=>c(m,x),[()=>(b(A),t(e),i(()=>A(t(e).updated_at)))]),v(d,n)};_(ue,d=>{t(e),i(()=>t(e).updated_at&&t(e).updated_at!==t(e).created_at)&&d(fe)})}a(ut),a(W);var It=r(W,2),jt=r(s(It),2),at=s(jt),Dt=r(s(at),2),st=s(Dt),ye=s(st,!0);a(st),a(Dt),a(at);var rt=r(at,2),St=r(s(rt),2),dt=s(St),pe=s(dt,!0);a(dt),a(St),a(rt);var Mt=r(rt,2);{var he=d=>{var n=da(),l=r(s(n),2);Lt(l,5,()=>(t(e),i(()=>t(e).addresses)),Ot,(m,x)=>{var u=ra(),y=s(u),D=s(y,!0);a(y);var it=r(y,2);{let L=Tt(()=>(t(x),i(()=>t(x).type||"Unknown")));qt(it,{variant:"info",get text(){return t(L)}})}a(u),f(()=>c(D,(t(x),i(()=>t(x).address)))),v(m,u)}),a(l),a(n),v(d,n)},ke=d=>{var n=ia();v(d,n)};_(Mt,d=>{t(e),i(()=>t(e).addresses&&t(e).addresses.length>0)?d(he):d(ke,!1)})}var At=r(Mt,2);{var be=d=>{var n=na(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_type)))),v(d,n)};_(At,d=>{t(e),i(()=>t(e).os_type)&&d(be)})}var Bt=r(At,2);{var we=d=>{var n=oa(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_name)))),v(d,n)};_(Bt,d=>{t(e),i(()=>t(e).os_name)&&d(we)})}var $t=r(Bt,2);{var Ie=d=>{var n=la(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_version)))),v(d,n)};_($t,d=>{t(e),i(()=>t(e).os_version)&&d(Ie)})}var je=r($t,2);{var De=d=>{var n=va(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_arch)))),v(d,n)};_(je,d=>{t(e),i(()=>t(e).os_arch)&&d(De)})}a(jt),a(It),a(H);var Se=r(H,2);{var Me=d=>{var n=ca(),l=r(s(n),2);Lt(l,5,()=>(t(e),i(()=>t(e).status_messages)),Ot,(m,x)=>{var u=xa(),y=s(u),D=s(y),it=s(D,!0);a(D);var L=r(D,2),Nt=s(L);{var Be=k=>{const w=Tt(()=>(b(Ft),t(x),i(()=>Ft(t(x).event_level))));qt(k,{get variant(){return b(t(w)),i(()=>t(w).variant)},get text(){return b(t(w)),i(()=>t(w).text)}})};_(Nt,k=>{t(x),i(()=>t(x).event_level)&&k(Be)})}var Ct=r(Nt,2),$e=s(Ct);{var Ne=k=>{var w=Ut();f(Ee=>c(w,Ee),[()=>(b(A),t(x),i(()=>A(t(x).created_at)))]),v(k,w)},Ce=k=>{var w=Ut("Unknown date");v(k,w)};_($e,k=>{t(x),i(()=>t(x).created_at)?k(Ne):k(Ce,!1)})}a(Ct),a(L),a(y),a(u),f(()=>c(it,(t(x),i(()=>t(x).message)))),v(m,u)}),a(l),Je(l,m=>p(E,m),()=>t(E)),a(n),v(d,n)},Ae=d=>{var n=ma();v(d,n)};_(Se,d=>{t(e),i(()=>t(e).status_messages&&t(e).status_messages.length>0)?d(Me):d(Ae,!1)})}f((d,n,l,m,x)=>{c(ne,(t(e),i(()=>t(e).id))),c(oe,(t(e),i(()=>t(e).name))),c(le,(t(e),i(()=>t(e).provider_id))),c(ve,(t(e),i(()=>t(e).provider_name||"Unknown"))),c(ge,(t(e),i(()=>t(e).agent_id||"Not assigned"))),c(_e,d),Vt(st,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${n??""}`),c(ye,l),Vt(dt,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${m??""}`),c(pe,x)},[()=>(b(A),t(e),i(()=>A(t(e).created_at))),()=>(b(P),t(e),i(()=>P(t(e).status||"unknown"))),()=>(b(V),t(e),i(()=>V(t(e).status||"unknown"))),()=>(b(P),t(e),i(()=>P(t(e).runner_status||"unknown"))),()=>(b(V),t(e),i(()=>V(t(e).runner_status||"unknown")))]),Fe("click",ie,()=>p(N,!0)),v(I,U)},T=I=>{var U=_a();v(I,U)};_(j,I=>{t(e)?I(S):I(T,!1)},!0)}v(o,h)};_(ee,o=>{t(R)?o(ae):o(se,!1)})}a(q);var re=r(q,2);{var de=o=>{Qe(o,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(e),i(()=>t(e).name)},$$events:{close:()=>p(N,!1),confirm:Qt}})};_(re,o=>{t(N)&&t(e)&&o(de)})}f(()=>{nt(Yt,"href",`${$}/instances`),c(Zt,(t(e),i(()=>t(e)?t(e).name:"Instance Details")))}),v(Gt,lt),Re(),Jt()}export{$a as component}; diff --git a/webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js b/webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js deleted file mode 100644 index 54eb6b8a..00000000 --- a/webapp/assets/_app/immutable/nodes/9.DpSfMRgo.js +++ /dev/null @@ -1 +0,0 @@ -import"../chunks/DsnmJJEf.js";import{i as Te}from"../chunks/B3Pzt0F_.js";import{p as Ue,o as Le,A as Oe,l as Ve,a as ze,f as g,h as Pe,b as O,t as f,c as v,d as Re,g as t,m as M,s as p,u as i,$ as qe,j as s,r as a,k as r,v as c,B as Et,n as b,e as Fe,q as Tt,D as Ut}from"../chunks/D8EpLgQ1.js";import{i as _,s as Ge,a as He}from"../chunks/5WA7h8uK.js";import{w as We,e as Lt,i as Ot}from"../chunks/u94nIB4-.js";import{c as nt,g as Vt,s as zt}from"../chunks/CiE1LlKV.js";import{b as Je}from"../chunks/BAg1iRPq.js";import{p as Ke}from"../chunks/C41YH50Q.js";import{g as Pt}from"../chunks/CTf6mQoE.js";import{b as A}from"../chunks/CoIRRsD9.js";import{D as Qe}from"../chunks/KQ2xQpA3.js";import{g as V,f as z}from"../chunks/ow_oMtSd.js";import{s as Rt,b as B,B as qt,d as Ft}from"../chunks/BGVHQGl-.js";var Xe=g('

                Error

                '),Ye=g('

                Loading instance details...

                '),Ze=g(' '),ta=g(' '),ea=g('-'),aa=g('
                Updated At:
                '),sa=g('
                '),ra=g('
                Network Addresses:
                '),da=g('
                Network Addresses:
                No addresses available
                '),ia=g('
                OS Type:
                '),na=g('
                OS Name:
                '),oa=g('
                OS Version:
                '),la=g('
                OS Architecture:
                '),va=g('

                '),xa=g('

                Status Messages

                '),ca=g('

                Status Messages

                No status messages available

                '),ma=g('

                Instance Information

                ID:
                Name:
                Provider ID:
                Provider:
                Pool/Scale Set:
                Agent ID:
                Created At:

                Status & Network

                Instance Status:
                Runner Status:
                ',1),ga=g('
                Instance not found.
                '),_a=g(' ',1);function Ba(Gt,Ht){Ue(Ht,!1);const[Wt,Jt]=Ge(),ot=()=>He(Ke,"$page",Wt),P=M();let e=M(null),R=M(!0),$=M(""),N=M(!1),C=null,E=M();async function Kt(){if(t(P))try{p(R,!0),p($,""),p(e,await Vt.getInstance(t(P)))}catch(o){p($,o instanceof Error?o.message:"Failed to load instance")}finally{p(R,!1)}}async function Qt(){if(t(e)){try{await Vt.deleteInstance(t(e).name),Pt(`${A}/instances`)}catch(o){p($,o instanceof Error?o.message:"Failed to delete instance")}p(N,!1)}}function Xt(o){if(t(e))if(o.operation==="update"&&o.payload.id===t(e).id){const h=t(e).status_messages?.length||0,j={...t(e),...o.payload},S=j.status_messages?.length||0;p(e,j),S>h&&setTimeout(()=>{Rt(t(E))},100)}else o.operation==="delete"&&(o.payload.id||o.payload)===t(e).id&&Pt(`${A}/instances`)}Le(()=>{Kt().then(()=>{t(e)?.status_messages?.length&&setTimeout(()=>{Rt(t(E))},100)}),C=We.subscribeToEntity("instance",["update","delete"],Xt)}),Oe(()=>{C&&(C(),C=null)}),Ve(()=>ot(),()=>{p(P,decodeURIComponent(ot().params.id||""))}),ze(),Te();var lt=_a();Pe(o=>{f(()=>qe.title=`${t(e),i(()=>t(e)?`${t(e).name} - Instance Details`:"Instance Details")??""} - GARM`)});var q=O(lt),F=s(q),vt=s(F),G=s(vt),Yt=s(G);a(G);var xt=r(G,2),ct=s(xt),mt=r(s(ct),2),Zt=s(mt,!0);a(mt),a(ct),a(xt),a(vt),a(F);var gt=r(F,2);{var te=o=>{var h=Xe(),j=s(h),S=s(j),T=r(s(S),2),I=s(T,!0);a(T),a(S),a(j),a(h),f(()=>c(I,t($))),v(o,h)};_(gt,o=>{t($)&&o(te)})}var ee=r(gt,2);{var ae=o=>{var h=Ye();v(o,h)},se=o=>{var h=Et(),j=O(h);{var S=I=>{var U=ma(),H=O(U),W=s(H),J=s(W),_t=r(s(J),2),ie=s(_t);a(_t),a(J);var ut=r(J,2),K=s(ut),ft=r(s(K),2),ne=s(ft,!0);a(ft),a(K);var Q=r(K,2),yt=r(s(Q),2),oe=s(yt,!0);a(yt),a(Q);var X=r(Q,2),pt=r(s(X),2),le=s(pt,!0);a(pt),a(X);var Y=r(X,2),ht=r(s(Y),2),ve=s(ht,!0);a(ht),a(Y);var Z=r(Y,2),kt=r(s(Z),2),xe=s(kt);{var ce=d=>{var n=Ze(),l=s(n,!0);a(n),f(()=>{nt(n,"href",`${A??""}/pools/${t(e),i(()=>t(e).pool_id)??""}`),c(l,(t(e),i(()=>t(e).pool_id)))}),v(d,n)},me=d=>{var n=Et(),l=O(n);{var m=u=>{var y=ta(),D=s(y,!0);a(y),f(()=>{nt(y,"href",`${A??""}/scalesets/${t(e),i(()=>t(e).scale_set_id)??""}`),c(D,(t(e),i(()=>t(e).scale_set_id)))}),v(u,y)},x=u=>{var y=ea();v(u,y)};_(l,u=>{t(e),i(()=>t(e).scale_set_id)?u(m):u(x,!1)},!0)}v(d,n)};_(xe,d=>{t(e),i(()=>t(e).pool_id)?d(ce):d(me,!1)})}a(kt),a(Z);var tt=r(Z,2),bt=r(s(tt),2),ge=s(bt,!0);a(bt),a(tt);var et=r(tt,2),wt=r(s(et),2),_e=s(wt,!0);a(wt),a(et);var ue=r(et,2);{var fe=d=>{var n=aa(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(x=>c(m,x),[()=>(b(B),t(e),i(()=>B(t(e).updated_at)))]),v(d,n)};_(ue,d=>{t(e),i(()=>t(e).updated_at&&t(e).updated_at!==t(e).created_at)&&d(fe)})}a(ut),a(W);var It=r(W,2),jt=r(s(It),2),at=s(jt),Dt=r(s(at),2),st=s(Dt),ye=s(st,!0);a(st),a(Dt),a(at);var rt=r(at,2),St=r(s(rt),2),dt=s(St),pe=s(dt,!0);a(dt),a(St),a(rt);var Mt=r(rt,2);{var he=d=>{var n=ra(),l=r(s(n),2);Lt(l,5,()=>(t(e),i(()=>t(e).addresses)),Ot,(m,x)=>{var u=sa(),y=s(u),D=s(y,!0);a(y);var it=r(y,2);{let L=Tt(()=>(t(x),i(()=>t(x).type||"Unknown")));qt(it,{variant:"info",get text(){return t(L)}})}a(u),f(()=>c(D,(t(x),i(()=>t(x).address)))),v(m,u)}),a(l),a(n),v(d,n)},ke=d=>{var n=da();v(d,n)};_(Mt,d=>{t(e),i(()=>t(e).addresses&&t(e).addresses.length>0)?d(he):d(ke,!1)})}var Bt=r(Mt,2);{var be=d=>{var n=ia(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_type)))),v(d,n)};_(Bt,d=>{t(e),i(()=>t(e).os_type)&&d(be)})}var $t=r(Bt,2);{var we=d=>{var n=na(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_name)))),v(d,n)};_($t,d=>{t(e),i(()=>t(e).os_name)&&d(we)})}var At=r($t,2);{var Ie=d=>{var n=oa(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_version)))),v(d,n)};_(At,d=>{t(e),i(()=>t(e).os_version)&&d(Ie)})}var je=r(At,2);{var De=d=>{var n=la(),l=r(s(n),2),m=s(l,!0);a(l),a(n),f(()=>c(m,(t(e),i(()=>t(e).os_arch)))),v(d,n)};_(je,d=>{t(e),i(()=>t(e).os_arch)&&d(De)})}a(jt),a(It),a(H);var Se=r(H,2);{var Me=d=>{var n=xa(),l=r(s(n),2);Lt(l,5,()=>(t(e),i(()=>t(e).status_messages)),Ot,(m,x)=>{var u=va(),y=s(u),D=s(y),it=s(D,!0);a(D);var L=r(D,2),Nt=s(L);{var $e=k=>{const w=Tt(()=>(b(Ft),t(x),i(()=>Ft(t(x).event_level))));qt(k,{get variant(){return b(t(w)),i(()=>t(w).variant)},get text(){return b(t(w)),i(()=>t(w).text)}})};_(Nt,k=>{t(x),i(()=>t(x).event_level)&&k($e)})}var Ct=r(Nt,2),Ae=s(Ct);{var Ne=k=>{var w=Ut();f(Ee=>c(w,Ee),[()=>(b(B),t(x),i(()=>B(t(x).created_at)))]),v(k,w)},Ce=k=>{var w=Ut("Unknown date");v(k,w)};_(Ae,k=>{t(x),i(()=>t(x).created_at)?k(Ne):k(Ce,!1)})}a(Ct),a(L),a(y),a(u),f(()=>c(it,(t(x),i(()=>t(x).message)))),v(m,u)}),a(l),Je(l,m=>p(E,m),()=>t(E)),a(n),v(d,n)},Be=d=>{var n=ca();v(d,n)};_(Se,d=>{t(e),i(()=>t(e).status_messages&&t(e).status_messages.length>0)?d(Me):d(Be,!1)})}f((d,n,l,m,x)=>{c(ne,(t(e),i(()=>t(e).id))),c(oe,(t(e),i(()=>t(e).name))),c(le,(t(e),i(()=>t(e).provider_id))),c(ve,(t(e),i(()=>t(e).provider_name||"Unknown"))),c(ge,(t(e),i(()=>t(e).agent_id||"Not assigned"))),c(_e,d),zt(st,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${n??""}`),c(ye,l),zt(dt,1,`inline-flex px-2 py-1 text-xs font-semibold rounded-full ring-1 ring-inset ${m??""}`),c(pe,x)},[()=>(b(B),t(e),i(()=>B(t(e).created_at))),()=>(b(V),t(e),i(()=>V(t(e).status||"unknown"))),()=>(b(z),t(e),i(()=>z(t(e).status||"unknown"))),()=>(b(V),t(e),i(()=>V(t(e).runner_status||"unknown"))),()=>(b(z),t(e),i(()=>z(t(e).runner_status||"unknown")))]),Fe("click",ie,()=>p(N,!0)),v(I,U)},T=I=>{var U=ga();v(I,U)};_(j,I=>{t(e)?I(S):I(T,!1)},!0)}v(o,h)};_(ee,o=>{t(R)?o(ae):o(se,!1)})}a(q);var re=r(q,2);{var de=o=>{Qe(o,{title:"Delete Instance",message:"Are you sure you want to delete this instance? This action cannot be undone.",get itemName(){return t(e),i(()=>t(e).name)},$$events:{close:()=>p(N,!1),confirm:Qt}})};_(re,o=>{t(N)&&t(e)&&o(de)})}f(()=>{nt(Yt,"href",`${A}/instances`),c(Zt,(t(e),i(()=>t(e)?t(e).name:"Instance Details")))}),v(Gt,lt),Re(),Jt()}export{Ba as component}; diff --git a/webapp/assets/_app/version.json b/webapp/assets/_app/version.json index 6268ff48..0a3e848e 100644 --- a/webapp/assets/_app/version.json +++ b/webapp/assets/_app/version.json @@ -1 +1 @@ -{"version":"1755334486454"} \ No newline at end of file +{"version":"1755415870786"} \ No newline at end of file diff --git a/webapp/assets/index.html b/webapp/assets/index.html index fa125d3f..8c3850e6 100644 --- a/webapp/assets/index.html +++ b/webapp/assets/index.html @@ -71,11 +71,11 @@ })(); - - + + - - + + @@ -85,7 +85,7 @@
                - - + + - - + + @@ -85,7 +85,7 @@
                - - + + - - + + + @@ -85,7 +86,7 @@
                - - - - - - + + + + + - - - + + +
                {#if item?.pool_id} - + Pool: {item.pool_id} {:else if item?.scale_set_id} - + Scale Set: {item.scale_set_id} {:else} diff --git a/webapp/src/lib/components/cells/PoolEntityCell.svelte b/webapp/src/lib/components/cells/PoolEntityCell.svelte index 0861d11c..e8ed933d 100644 --- a/webapp/src/lib/components/cells/PoolEntityCell.svelte +++ b/webapp/src/lib/components/cells/PoolEntityCell.svelte @@ -1,5 +1,4 @@ diff --git a/webapp/src/routes/+page.svelte b/webapp/src/routes/+page.svelte index 258f0cf3..07df6498 100644 --- a/webapp/src/routes/+page.svelte +++ b/webapp/src/routes/+page.svelte @@ -1,6 +1,6 @@ @@ -107,12 +107,12 @@
                GARM diff --git a/webapp/src/routes/instances/[id]/+page.svelte b/webapp/src/routes/instances/[id]/+page.svelte index e7024404..40fa3be5 100644 --- a/webapp/src/routes/instances/[id]/+page.svelte +++ b/webapp/src/routes/instances/[id]/+page.svelte @@ -4,7 +4,7 @@ import { goto } from '$app/navigation'; import { garmApi } from '$lib/api/client.js'; import type { Instance } from '$lib/api/generated/api.js'; - import { base } from '$app/paths'; + import { resolve } from '$app/paths'; import DeleteModal from '$lib/components/DeleteModal.svelte'; import { websocketStore, type WebSocketEvent } from '$lib/stores/websocket.js'; import { formatStatusText, getStatusBadgeClass } from '$lib/utils/status.js'; @@ -40,7 +40,7 @@ if (!instance) return; try { await garmApi.deleteInstance(instance.name!); - goto(`${base}/instances`); + goto(resolve('/instances')); } catch (err) { error = extractAPIError(err); } @@ -71,7 +71,7 @@ // Instance was deleted - redirect to list page const instanceId = event.payload.id || event.payload; if (instanceId === instance.id) { - goto(`${base}/instances`); + goto(resolve('/instances')); } } } @@ -112,7 +112,7 @@